Lines Matching +full:center +full:- +full:spread

1 // SPDX-License-Identifier: BSD-3-Clause-Clear
3 * Copyright (c) 2018-2021 The Linux Foundation. All rights reserved.
4 * Copyright (c) 2021-2024 Qualcomm Innovation Center, Inc. All rights reserved.
27 if (!ab->hal_rx_ops->rx_desc_encrypt_valid(desc)) in ath12k_dp_rx_h_enctype()
30 return ab->hal_rx_ops->rx_desc_get_encrypt_type(desc); in ath12k_dp_rx_h_enctype()
36 return ab->hal_rx_ops->rx_desc_get_decap_type(desc); in ath12k_dp_rx_h_decap_type()
42 return ab->hal_rx_ops->rx_desc_get_mesh_ctl(desc); in ath12k_dp_rx_h_mesh_ctl_present()
48 return ab->hal_rx_ops->rx_desc_get_mpdu_seq_ctl_vld(desc); in ath12k_dp_rx_h_seq_ctrl_valid()
54 return ab->hal_rx_ops->rx_desc_get_mpdu_fc_valid(desc); in ath12k_dp_rx_h_fc_valid()
62 hdr = (struct ieee80211_hdr *)(skb->data + ab->hal.hal_desc_sz); in ath12k_dp_rx_h_more_frags()
63 return ieee80211_has_morefrags(hdr->frame_control); in ath12k_dp_rx_h_more_frags()
71 hdr = (struct ieee80211_hdr *)(skb->data + ab->hal.hal_desc_sz); in ath12k_dp_rx_h_frag_no()
72 return le16_to_cpu(hdr->seq_ctrl) & IEEE80211_SCTL_FRAG; in ath12k_dp_rx_h_frag_no()
78 return ab->hal_rx_ops->rx_desc_get_mpdu_start_seq_no(desc); in ath12k_dp_rx_h_seq_no()
84 return ab->hal_rx_ops->dp_rx_h_msdu_done(desc); in ath12k_dp_rx_h_msdu_done()
90 return ab->hal_rx_ops->dp_rx_h_l4_cksum_fail(desc); in ath12k_dp_rx_h_l4_cksum_fail()
96 return ab->hal_rx_ops->dp_rx_h_ip_cksum_fail(desc); in ath12k_dp_rx_h_ip_cksum_fail()
102 return ab->hal_rx_ops->dp_rx_h_is_decrypted(desc); in ath12k_dp_rx_h_is_decrypted()
108 return ab->hal_rx_ops->dp_rx_h_mpdu_err(desc); in ath12k_dp_rx_h_mpdu_err()
114 return ab->hal_rx_ops->rx_desc_get_msdu_len(desc); in ath12k_dp_rx_h_msdu_len()
120 return ab->hal_rx_ops->rx_desc_get_msdu_sgi(desc); in ath12k_dp_rx_h_sgi()
126 return ab->hal_rx_ops->rx_desc_get_msdu_rate_mcs(desc); in ath12k_dp_rx_h_rate_mcs()
132 return ab->hal_rx_ops->rx_desc_get_msdu_rx_bw(desc); in ath12k_dp_rx_h_rx_bw()
138 return ab->hal_rx_ops->rx_desc_get_msdu_freq(desc); in ath12k_dp_rx_h_freq()
144 return ab->hal_rx_ops->rx_desc_get_msdu_pkt_type(desc); in ath12k_dp_rx_h_pkt_type()
150 return hweight8(ab->hal_rx_ops->rx_desc_get_msdu_nss(desc)); in ath12k_dp_rx_h_nss()
156 return ab->hal_rx_ops->rx_desc_get_mpdu_tid(desc); in ath12k_dp_rx_h_tid()
162 return ab->hal_rx_ops->rx_desc_get_mpdu_peer_id(desc); in ath12k_dp_rx_h_peer_id()
168 return ab->hal_rx_ops->rx_desc_get_l3_pad_bytes(desc); in ath12k_dp_rx_h_l3pad()
174 return ab->hal_rx_ops->rx_desc_get_first_msdu(desc); in ath12k_dp_rx_h_first_msdu()
180 return ab->hal_rx_ops->rx_desc_get_last_msdu(desc); in ath12k_dp_rx_h_last_msdu()
187 ab->hal_rx_ops->rx_desc_copy_end_tlv(fdesc, ldesc); in ath12k_dp_rx_desc_end_tlv_copy()
194 ab->hal_rx_ops->rx_desc_set_msdu_len(desc, len); in ath12k_dp_rxdesc_set_msdu_len()
201 ab->hal_rx_ops->rx_desc_is_da_mcbc(desc)); in ath12k_dp_rx_h_is_da_mcbc()
207 return ab->hal_rx_ops->rx_desc_mac_addr2_valid(desc); in ath12k_dp_rxdesc_mac_addr2_valid()
213 return ab->hal_rx_ops->rx_desc_mpdu_start_addr2(desc); in ath12k_dp_rxdesc_get_mpdu_start_addr2()
220 ab->hal_rx_ops->rx_desc_get_dot11_hdr(desc, hdr); in ath12k_dp_rx_desc_get_dot11_hdr()
228 ab->hal_rx_ops->rx_desc_get_crypto_header(desc, crypto_hdr, enctype); in ath12k_dp_rx_desc_get_crypto_header()
234 return ab->hal_rx_ops->rx_desc_get_mpdu_frame_ctl(desc); in ath12k_dp_rxdesc_get_mpdu_frame_ctrl()
240 return ab->hal_rx_ops->rx_desc_get_msdu_src_link_id(desc); in ath12k_dp_rx_get_msdu_src_link()
269 rx_desc->in_use = true; in ath12k_dp_list_cut_nodes()
271 count--; in ath12k_dp_list_cut_nodes()
287 rx_desc->in_use = false; in ath12k_dp_rx_enqueue_free()
289 spin_lock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_enqueue_free()
290 list_splice_tail(used_list, &dp->rx_desc_free_list); in ath12k_dp_rx_enqueue_free()
291 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_enqueue_free()
307 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_bufs_replenish()
309 enum hal_rx_buf_return_buf_manager mgr = ab->hw_params->hal_params->rx_buf_rbm; in ath12k_dp_rx_bufs_replenish()
311 req_entries = min(req_entries, rx_ring->bufs_max); in ath12k_dp_rx_bufs_replenish()
313 srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id]; in ath12k_dp_rx_bufs_replenish()
315 spin_lock_bh(&srng->lock); in ath12k_dp_rx_bufs_replenish()
320 if (!req_entries && (num_free > (rx_ring->bufs_max * 3) / 4)) in ath12k_dp_rx_bufs_replenish()
331 spin_lock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_bufs_replenish()
333 &dp->rx_desc_free_list, in ath12k_dp_rx_bufs_replenish()
335 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_bufs_replenish()
345 if (!IS_ALIGNED((unsigned long)skb->data, in ath12k_dp_rx_bufs_replenish()
348 PTR_ALIGN(skb->data, DP_RX_BUFFER_ALIGN_SIZE) - in ath12k_dp_rx_bufs_replenish()
349 skb->data); in ath12k_dp_rx_bufs_replenish()
352 paddr = dma_map_single(ab->dev, skb->data, in ath12k_dp_rx_bufs_replenish()
353 skb->len + skb_tailroom(skb), in ath12k_dp_rx_bufs_replenish()
355 if (dma_mapping_error(ab->dev, paddr)) in ath12k_dp_rx_bufs_replenish()
364 rx_desc->skb = skb; in ath12k_dp_rx_bufs_replenish()
365 cookie = rx_desc->cookie; in ath12k_dp_rx_bufs_replenish()
371 list_del(&rx_desc->list); in ath12k_dp_rx_bufs_replenish()
372 ATH12K_SKB_RXCB(skb)->paddr = paddr; in ath12k_dp_rx_bufs_replenish()
374 num_remain--; in ath12k_dp_rx_bufs_replenish()
382 dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb), in ath12k_dp_rx_bufs_replenish()
392 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_bufs_replenish()
394 return req_entries - num_remain; in ath12k_dp_rx_bufs_replenish()
403 spin_lock_bh(&rx_ring->idr_lock); in ath12k_dp_rxdma_mon_buf_ring_free()
404 idr_for_each_entry(&rx_ring->bufs_idr, skb, buf_id) { in ath12k_dp_rxdma_mon_buf_ring_free()
405 idr_remove(&rx_ring->bufs_idr, buf_id); in ath12k_dp_rxdma_mon_buf_ring_free()
409 dma_unmap_single(ab->dev, ATH12K_SKB_RXCB(skb)->paddr, in ath12k_dp_rxdma_mon_buf_ring_free()
410 skb->len + skb_tailroom(skb), DMA_FROM_DEVICE); in ath12k_dp_rxdma_mon_buf_ring_free()
414 idr_destroy(&rx_ring->bufs_idr); in ath12k_dp_rxdma_mon_buf_ring_free()
415 spin_unlock_bh(&rx_ring->idr_lock); in ath12k_dp_rxdma_mon_buf_ring_free()
422 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rxdma_buf_free()
424 ath12k_dp_rxdma_mon_buf_ring_free(ab, &dp->rxdma_mon_buf_ring); in ath12k_dp_rxdma_buf_free()
435 num_entries = rx_ring->refill_buf_ring.size / in ath12k_dp_rxdma_mon_ring_buf_setup()
438 rx_ring->bufs_max = num_entries; in ath12k_dp_rxdma_mon_ring_buf_setup()
449 rx_ring->bufs_max = rx_ring->refill_buf_ring.size / in ath12k_dp_rxdma_ring_buf_setup()
459 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rxdma_buf_setup()
462 ret = ath12k_dp_rxdma_ring_buf_setup(ab, &dp->rx_refill_buf_ring); in ath12k_dp_rxdma_buf_setup()
469 if (ab->hw_params->rxdma1_enable) { in ath12k_dp_rxdma_buf_setup()
471 &dp->rxdma_mon_buf_ring, in ath12k_dp_rxdma_buf_setup()
485 struct ath12k_pdev_dp *dp = &ar->dp; in ath12k_dp_rx_pdev_srng_free()
486 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_pdev_srng_free()
489 for (i = 0; i < ab->hw_params->num_rxdma_per_pdev; i++) in ath12k_dp_rx_pdev_srng_free()
490 ath12k_dp_srng_cleanup(ab, &dp->rxdma_mon_dst_ring[i]); in ath12k_dp_rx_pdev_srng_free()
495 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_pdev_reo_cleanup()
499 ath12k_dp_srng_cleanup(ab, &dp->reo_dst_ring[i]); in ath12k_dp_rx_pdev_reo_cleanup()
504 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_pdev_reo_setup()
509 ret = ath12k_dp_srng_setup(ab, &dp->reo_dst_ring[i], in ath12k_dp_rx_pdev_reo_setup()
528 struct ath12k_pdev_dp *dp = &ar->dp; in ath12k_dp_rx_pdev_srng_alloc()
529 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_pdev_srng_alloc()
532 u32 mac_id = dp->mac_id; in ath12k_dp_rx_pdev_srng_alloc()
534 for (i = 0; i < ab->hw_params->num_rxdma_per_pdev; i++) { in ath12k_dp_rx_pdev_srng_alloc()
535 ret = ath12k_dp_srng_setup(ar->ab, in ath12k_dp_rx_pdev_srng_alloc()
536 &dp->rxdma_mon_dst_ring[i], in ath12k_dp_rx_pdev_srng_alloc()
541 ath12k_warn(ar->ab, in ath12k_dp_rx_pdev_srng_alloc()
552 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_reo_cmd_list_cleanup()
556 spin_lock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_reo_cmd_list_cleanup()
557 list_for_each_entry_safe(cmd, tmp, &dp->reo_cmd_list, list) { in ath12k_dp_rx_reo_cmd_list_cleanup()
558 list_del(&cmd->list); in ath12k_dp_rx_reo_cmd_list_cleanup()
559 dma_unmap_single(ab->dev, cmd->data.paddr, in ath12k_dp_rx_reo_cmd_list_cleanup()
560 cmd->data.size, DMA_BIDIRECTIONAL); in ath12k_dp_rx_reo_cmd_list_cleanup()
561 kfree(cmd->data.vaddr); in ath12k_dp_rx_reo_cmd_list_cleanup()
566 &dp->reo_cmd_cache_flush_list, list) { in ath12k_dp_rx_reo_cmd_list_cleanup()
567 list_del(&cmd_cache->list); in ath12k_dp_rx_reo_cmd_list_cleanup()
568 dp->reo_cmd_cache_flush_count--; in ath12k_dp_rx_reo_cmd_list_cleanup()
569 dma_unmap_single(ab->dev, cmd_cache->data.paddr, in ath12k_dp_rx_reo_cmd_list_cleanup()
570 cmd_cache->data.size, DMA_BIDIRECTIONAL); in ath12k_dp_rx_reo_cmd_list_cleanup()
571 kfree(cmd_cache->data.vaddr); in ath12k_dp_rx_reo_cmd_list_cleanup()
574 spin_unlock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_reo_cmd_list_cleanup()
583 ath12k_warn(dp->ab, "failed to flush rx tid hw desc, tid %d status %d\n", in ath12k_dp_reo_cmd_free()
584 rx_tid->tid, status); in ath12k_dp_reo_cmd_free()
586 dma_unmap_single(dp->ab->dev, rx_tid->paddr, rx_tid->size, in ath12k_dp_reo_cmd_free()
588 kfree(rx_tid->vaddr); in ath12k_dp_reo_cmd_free()
589 rx_tid->vaddr = NULL; in ath12k_dp_reo_cmd_free()
598 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_reo_cmd_send()
603 cmd_ring = &ab->hal.srng_list[dp->reo_cmd_ring.ring_id]; in ath12k_dp_reo_cmd_send()
612 return -EINVAL; in ath12k_dp_reo_cmd_send()
624 return -ENOMEM; in ath12k_dp_reo_cmd_send()
626 memcpy(&dp_cmd->data, rx_tid, sizeof(*rx_tid)); in ath12k_dp_reo_cmd_send()
627 dp_cmd->cmd_num = cmd_num; in ath12k_dp_reo_cmd_send()
628 dp_cmd->handler = cb; in ath12k_dp_reo_cmd_send()
630 spin_lock_bh(&dp->reo_cmd_lock); in ath12k_dp_reo_cmd_send()
631 list_add_tail(&dp_cmd->list, &dp->reo_cmd_list); in ath12k_dp_reo_cmd_send()
632 spin_unlock_bh(&dp->reo_cmd_lock); in ath12k_dp_reo_cmd_send()
644 tot_desc_sz = rx_tid->size; in ath12k_dp_reo_cache_flush()
648 tot_desc_sz -= desc_sz; in ath12k_dp_reo_cache_flush()
649 cmd.addr_lo = lower_32_bits(rx_tid->paddr + tot_desc_sz); in ath12k_dp_reo_cache_flush()
650 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath12k_dp_reo_cache_flush()
657 rx_tid->tid, ret); in ath12k_dp_reo_cache_flush()
661 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath12k_dp_reo_cache_flush()
662 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath12k_dp_reo_cache_flush()
669 rx_tid->tid, ret); in ath12k_dp_reo_cache_flush()
670 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath12k_dp_reo_cache_flush()
672 kfree(rx_tid->vaddr); in ath12k_dp_reo_cache_flush()
673 rx_tid->vaddr = NULL; in ath12k_dp_reo_cache_flush()
680 struct ath12k_base *ab = dp->ab; in ath12k_dp_rx_tid_del_func()
689 rx_tid->tid, status); in ath12k_dp_rx_tid_del_func()
697 elem->ts = jiffies; in ath12k_dp_rx_tid_del_func()
698 memcpy(&elem->data, rx_tid, sizeof(*rx_tid)); in ath12k_dp_rx_tid_del_func()
700 spin_lock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_tid_del_func()
701 list_add_tail(&elem->list, &dp->reo_cmd_cache_flush_list); in ath12k_dp_rx_tid_del_func()
702 dp->reo_cmd_cache_flush_count++; in ath12k_dp_rx_tid_del_func()
705 list_for_each_entry_safe(elem, tmp, &dp->reo_cmd_cache_flush_list, in ath12k_dp_rx_tid_del_func()
707 if (dp->reo_cmd_cache_flush_count > ATH12K_DP_RX_REO_DESC_FREE_THRES || in ath12k_dp_rx_tid_del_func()
708 time_after(jiffies, elem->ts + in ath12k_dp_rx_tid_del_func()
710 list_del(&elem->list); in ath12k_dp_rx_tid_del_func()
711 dp->reo_cmd_cache_flush_count--; in ath12k_dp_rx_tid_del_func()
719 * delete to this list. Hence unlock-lock is safe here. in ath12k_dp_rx_tid_del_func()
721 spin_unlock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_tid_del_func()
723 ath12k_dp_reo_cache_flush(ab, &elem->data); in ath12k_dp_rx_tid_del_func()
725 spin_lock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_tid_del_func()
728 spin_unlock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_tid_del_func()
732 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath12k_dp_rx_tid_del_func()
734 kfree(rx_tid->vaddr); in ath12k_dp_rx_tid_del_func()
735 rx_tid->vaddr = NULL; in ath12k_dp_rx_tid_del_func()
742 struct ath12k_dp *dp = &ab->dp; in ath12k_peer_rx_tid_qref_setup()
745 if (!ab->hw_params->reoq_lut_support) in ath12k_peer_rx_tid_qref_setup()
754 qref = (struct ath12k_reo_queue_ref *)dp->ml_reoq_lut.vaddr + in ath12k_peer_rx_tid_qref_setup()
757 qref = (struct ath12k_reo_queue_ref *)dp->reoq_lut.vaddr + in ath12k_peer_rx_tid_qref_setup()
760 qref->info0 = u32_encode_bits(lower_32_bits(paddr), in ath12k_peer_rx_tid_qref_setup()
762 qref->info1 = u32_encode_bits(upper_32_bits(paddr), in ath12k_peer_rx_tid_qref_setup()
770 struct ath12k_dp *dp = &ab->dp; in ath12k_peer_rx_tid_qref_reset()
773 if (!ab->hw_params->reoq_lut_support) in ath12k_peer_rx_tid_qref_reset()
782 qref = (struct ath12k_reo_queue_ref *)dp->ml_reoq_lut.vaddr + in ath12k_peer_rx_tid_qref_reset()
785 qref = (struct ath12k_reo_queue_ref *)dp->reoq_lut.vaddr + in ath12k_peer_rx_tid_qref_reset()
788 qref->info0 = u32_encode_bits(0, BUFFER_ADDR_INFO0_ADDR); in ath12k_peer_rx_tid_qref_reset()
789 qref->info1 = u32_encode_bits(0, BUFFER_ADDR_INFO1_ADDR) | in ath12k_peer_rx_tid_qref_reset()
797 struct ath12k_dp_rx_tid *rx_tid = &peer->rx_tid[tid]; in ath12k_dp_rx_peer_tid_delete()
800 if (!rx_tid->active) in ath12k_dp_rx_peer_tid_delete()
804 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath12k_dp_rx_peer_tid_delete()
805 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath12k_dp_rx_peer_tid_delete()
807 ret = ath12k_dp_reo_cmd_send(ar->ab, rx_tid, in ath12k_dp_rx_peer_tid_delete()
811 ath12k_err(ar->ab, "failed to send HAL_REO_CMD_UPDATE_RX_QUEUE cmd, tid %d (%d)\n", in ath12k_dp_rx_peer_tid_delete()
813 dma_unmap_single(ar->ab->dev, rx_tid->paddr, rx_tid->size, in ath12k_dp_rx_peer_tid_delete()
815 kfree(rx_tid->vaddr); in ath12k_dp_rx_peer_tid_delete()
816 rx_tid->vaddr = NULL; in ath12k_dp_rx_peer_tid_delete()
819 if (peer->mlo) in ath12k_dp_rx_peer_tid_delete()
820 ath12k_peer_rx_tid_qref_reset(ar->ab, peer->ml_id, tid); in ath12k_dp_rx_peer_tid_delete()
822 ath12k_peer_rx_tid_qref_reset(ar->ab, peer->peer_id, tid); in ath12k_dp_rx_peer_tid_delete()
824 rx_tid->active = false; in ath12k_dp_rx_peer_tid_delete()
837 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_link_desc_return()
841 srng = &ab->hal.srng_list[dp->wbm_desc_rel_ring.ring_id]; in ath12k_dp_rx_link_desc_return()
843 spin_lock_bh(&srng->lock); in ath12k_dp_rx_link_desc_return()
849 ret = -ENOBUFS; in ath12k_dp_rx_link_desc_return()
858 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_link_desc_return()
866 struct ath12k_base *ab = rx_tid->ab; in ath12k_dp_rx_frags_cleanup()
868 lockdep_assert_held(&ab->base_lock); in ath12k_dp_rx_frags_cleanup()
870 if (rx_tid->dst_ring_desc) { in ath12k_dp_rx_frags_cleanup()
872 ath12k_dp_rx_link_desc_return(ab, rx_tid->dst_ring_desc, in ath12k_dp_rx_frags_cleanup()
874 kfree(rx_tid->dst_ring_desc); in ath12k_dp_rx_frags_cleanup()
875 rx_tid->dst_ring_desc = NULL; in ath12k_dp_rx_frags_cleanup()
878 rx_tid->cur_sn = 0; in ath12k_dp_rx_frags_cleanup()
879 rx_tid->last_frag_no = 0; in ath12k_dp_rx_frags_cleanup()
880 rx_tid->rx_frag_bitmap = 0; in ath12k_dp_rx_frags_cleanup()
881 __skb_queue_purge(&rx_tid->rx_frags); in ath12k_dp_rx_frags_cleanup()
889 lockdep_assert_held(&ar->ab->base_lock); in ath12k_dp_rx_peer_tid_cleanup()
892 rx_tid = &peer->rx_tid[i]; in ath12k_dp_rx_peer_tid_cleanup()
897 spin_unlock_bh(&ar->ab->base_lock); in ath12k_dp_rx_peer_tid_cleanup()
898 del_timer_sync(&rx_tid->frag_timer); in ath12k_dp_rx_peer_tid_cleanup()
899 spin_lock_bh(&ar->ab->base_lock); in ath12k_dp_rx_peer_tid_cleanup()
912 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath12k_peer_rx_tid_reo_update()
913 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath12k_peer_rx_tid_reo_update()
923 ret = ath12k_dp_reo_cmd_send(ar->ab, rx_tid, in ath12k_peer_rx_tid_reo_update()
927 ath12k_warn(ar->ab, "failed to update rx tid queue, tid %d (%d)\n", in ath12k_peer_rx_tid_reo_update()
928 rx_tid->tid, ret); in ath12k_peer_rx_tid_reo_update()
932 rx_tid->ba_win_sz = ba_win_sz; in ath12k_peer_rx_tid_reo_update()
941 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_peer_tid_setup()
942 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_peer_tid_setup()
951 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
955 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
957 return -ENOENT; in ath12k_dp_rx_peer_tid_setup()
960 if (!peer->primary_link) { in ath12k_dp_rx_peer_tid_setup()
961 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
965 if (ab->hw_params->reoq_lut_support && in ath12k_dp_rx_peer_tid_setup()
966 (!dp->reoq_lut.vaddr || !dp->ml_reoq_lut.vaddr)) { in ath12k_dp_rx_peer_tid_setup()
967 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
969 return -EINVAL; in ath12k_dp_rx_peer_tid_setup()
972 if (peer->peer_id > DP_MAX_PEER_ID || tid > IEEE80211_NUM_TIDS) { in ath12k_dp_rx_peer_tid_setup()
974 peer->peer_id, tid); in ath12k_dp_rx_peer_tid_setup()
975 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
976 return -EINVAL; in ath12k_dp_rx_peer_tid_setup()
979 rx_tid = &peer->rx_tid[tid]; in ath12k_dp_rx_peer_tid_setup()
981 if (rx_tid->active) { in ath12k_dp_rx_peer_tid_setup()
982 paddr = rx_tid->paddr; in ath12k_dp_rx_peer_tid_setup()
985 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
991 if (!ab->hw_params->reoq_lut_support) { in ath12k_dp_rx_peer_tid_setup()
1006 rx_tid->tid = tid; in ath12k_dp_rx_peer_tid_setup()
1008 rx_tid->ba_win_sz = ba_win_sz; in ath12k_dp_rx_peer_tid_setup()
1018 vaddr = kzalloc(hw_desc_sz + HAL_LINK_DESC_ALIGN - 1, GFP_ATOMIC); in ath12k_dp_rx_peer_tid_setup()
1020 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
1021 return -ENOMEM; in ath12k_dp_rx_peer_tid_setup()
1029 paddr = dma_map_single(ab->dev, addr_aligned, hw_desc_sz, in ath12k_dp_rx_peer_tid_setup()
1032 ret = dma_mapping_error(ab->dev, paddr); in ath12k_dp_rx_peer_tid_setup()
1034 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
1038 rx_tid->vaddr = vaddr; in ath12k_dp_rx_peer_tid_setup()
1039 rx_tid->paddr = paddr; in ath12k_dp_rx_peer_tid_setup()
1040 rx_tid->size = hw_desc_sz; in ath12k_dp_rx_peer_tid_setup()
1041 rx_tid->active = true; in ath12k_dp_rx_peer_tid_setup()
1043 if (ab->hw_params->reoq_lut_support) { in ath12k_dp_rx_peer_tid_setup()
1047 if (peer->mlo) in ath12k_dp_rx_peer_tid_setup()
1048 ath12k_peer_rx_tid_qref_setup(ab, peer->ml_id, tid, paddr); in ath12k_dp_rx_peer_tid_setup()
1050 ath12k_peer_rx_tid_qref_setup(ab, peer->peer_id, tid, paddr); in ath12k_dp_rx_peer_tid_setup()
1052 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
1054 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
1071 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_ampdu_start()
1072 struct ath12k_sta *ahsta = ath12k_sta_to_ahsta(params->sta); in ath12k_dp_rx_ampdu_start()
1077 lockdep_assert_wiphy(ath12k_ar_to_hw(ar)->wiphy); in ath12k_dp_rx_ampdu_start()
1079 arsta = wiphy_dereference(ath12k_ar_to_hw(ar)->wiphy, in ath12k_dp_rx_ampdu_start()
1080 ahsta->link[link_id]); in ath12k_dp_rx_ampdu_start()
1082 return -ENOLINK; in ath12k_dp_rx_ampdu_start()
1084 vdev_id = arsta->arvif->vdev_id; in ath12k_dp_rx_ampdu_start()
1086 ret = ath12k_dp_rx_peer_tid_setup(ar, arsta->addr, vdev_id, in ath12k_dp_rx_ampdu_start()
1087 params->tid, params->buf_size, in ath12k_dp_rx_ampdu_start()
1088 params->ssn, arsta->ahsta->pn_type); in ath12k_dp_rx_ampdu_start()
1099 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_ampdu_stop()
1101 struct ath12k_sta *ahsta = ath12k_sta_to_ahsta(params->sta); in ath12k_dp_rx_ampdu_stop()
1107 lockdep_assert_wiphy(ath12k_ar_to_hw(ar)->wiphy); in ath12k_dp_rx_ampdu_stop()
1109 arsta = wiphy_dereference(ath12k_ar_to_hw(ar)->wiphy, in ath12k_dp_rx_ampdu_stop()
1110 ahsta->link[link_id]); in ath12k_dp_rx_ampdu_stop()
1112 return -ENOLINK; in ath12k_dp_rx_ampdu_stop()
1114 vdev_id = arsta->arvif->vdev_id; in ath12k_dp_rx_ampdu_stop()
1116 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_ampdu_stop()
1118 peer = ath12k_peer_find(ab, vdev_id, arsta->addr); in ath12k_dp_rx_ampdu_stop()
1120 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_ampdu_stop()
1122 return -ENOENT; in ath12k_dp_rx_ampdu_stop()
1125 active = peer->rx_tid[params->tid].active; in ath12k_dp_rx_ampdu_stop()
1128 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_ampdu_stop()
1132 ret = ath12k_peer_rx_tid_reo_update(ar, peer, peer->rx_tid, 1, 0, false); in ath12k_dp_rx_ampdu_stop()
1133 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_ampdu_stop()
1136 params->tid, ret); in ath12k_dp_rx_ampdu_stop()
1148 struct ath12k *ar = arvif->ar; in ath12k_dp_rx_peer_pn_replay_config()
1149 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_peer_pn_replay_config()
1160 if (!(key->flags & IEEE80211_KEY_FLAG_PAIRWISE)) in ath12k_dp_rx_peer_pn_replay_config()
1170 switch (key->cipher) { in ath12k_dp_rx_peer_pn_replay_config()
1185 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_peer_pn_replay_config()
1187 peer = ath12k_peer_find(ab, arvif->vdev_id, peer_addr); in ath12k_dp_rx_peer_pn_replay_config()
1189 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_pn_replay_config()
1192 return -ENOENT; in ath12k_dp_rx_peer_pn_replay_config()
1196 rx_tid = &peer->rx_tid[tid]; in ath12k_dp_rx_peer_pn_replay_config()
1197 if (!rx_tid->active) in ath12k_dp_rx_peer_pn_replay_config()
1199 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath12k_dp_rx_peer_pn_replay_config()
1200 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath12k_dp_rx_peer_pn_replay_config()
1211 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_pn_replay_config()
1221 for (i = 0; i < HTT_PPDU_STATS_MAX_USERS - 1; i++) { in ath12k_get_ppdu_user_index()
1222 if (ppdu_stats->user_stats[i].is_valid_peer_id) { in ath12k_get_ppdu_user_index()
1223 if (peer_id == ppdu_stats->user_stats[i].peer_id) in ath12k_get_ppdu_user_index()
1230 return -EINVAL; in ath12k_get_ppdu_user_index()
1252 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1254 memcpy(&ppdu_info->ppdu_stats.common, ptr, in ath12k_htt_tlv_ppdu_stats_parse()
1261 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1264 peer_id = le16_to_cpu(user_rate->sw_peer_id); in ath12k_htt_tlv_ppdu_stats_parse()
1265 cur_user = ath12k_get_ppdu_user_index(&ppdu_info->ppdu_stats, in ath12k_htt_tlv_ppdu_stats_parse()
1268 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1269 user_stats = &ppdu_info->ppdu_stats.user_stats[cur_user]; in ath12k_htt_tlv_ppdu_stats_parse()
1270 user_stats->peer_id = peer_id; in ath12k_htt_tlv_ppdu_stats_parse()
1271 user_stats->is_valid_peer_id = true; in ath12k_htt_tlv_ppdu_stats_parse()
1272 memcpy(&user_stats->rate, ptr, in ath12k_htt_tlv_ppdu_stats_parse()
1274 user_stats->tlv_flags |= BIT(tag); in ath12k_htt_tlv_ppdu_stats_parse()
1280 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1284 peer_id = le16_to_cpu(cmplt_cmn->sw_peer_id); in ath12k_htt_tlv_ppdu_stats_parse()
1285 cur_user = ath12k_get_ppdu_user_index(&ppdu_info->ppdu_stats, in ath12k_htt_tlv_ppdu_stats_parse()
1288 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1289 user_stats = &ppdu_info->ppdu_stats.user_stats[cur_user]; in ath12k_htt_tlv_ppdu_stats_parse()
1290 user_stats->peer_id = peer_id; in ath12k_htt_tlv_ppdu_stats_parse()
1291 user_stats->is_valid_peer_id = true; in ath12k_htt_tlv_ppdu_stats_parse()
1292 memcpy(&user_stats->cmpltn_cmn, ptr, in ath12k_htt_tlv_ppdu_stats_parse()
1294 user_stats->tlv_flags |= BIT(tag); in ath12k_htt_tlv_ppdu_stats_parse()
1301 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1305 peer_id = le16_to_cpu(ba_status->sw_peer_id); in ath12k_htt_tlv_ppdu_stats_parse()
1306 cur_user = ath12k_get_ppdu_user_index(&ppdu_info->ppdu_stats, in ath12k_htt_tlv_ppdu_stats_parse()
1309 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1310 user_stats = &ppdu_info->ppdu_stats.user_stats[cur_user]; in ath12k_htt_tlv_ppdu_stats_parse()
1311 user_stats->peer_id = peer_id; in ath12k_htt_tlv_ppdu_stats_parse()
1312 user_stats->is_valid_peer_id = true; in ath12k_htt_tlv_ppdu_stats_parse()
1313 memcpy(&user_stats->ack_ba, ptr, in ath12k_htt_tlv_ppdu_stats_parse()
1315 user_stats->tlv_flags |= BIT(tag); in ath12k_htt_tlv_ppdu_stats_parse()
1329 int ret = -EINVAL; in ath12k_dp_htt_tlv_iter()
1334 ptr - begin, len, sizeof(*tlv)); in ath12k_dp_htt_tlv_iter()
1335 return -EINVAL; in ath12k_dp_htt_tlv_iter()
1338 tlv_tag = le32_get_bits(tlv->header, HTT_TLV_TAG); in ath12k_dp_htt_tlv_iter()
1339 tlv_len = le32_get_bits(tlv->header, HTT_TLV_LEN); in ath12k_dp_htt_tlv_iter()
1341 len -= sizeof(*tlv); in ath12k_dp_htt_tlv_iter()
1345 tlv_tag, ptr - begin, len, tlv_len); in ath12k_dp_htt_tlv_iter()
1346 return -EINVAL; in ath12k_dp_htt_tlv_iter()
1349 if (ret == -ENOMEM) in ath12k_dp_htt_tlv_iter()
1353 len -= tlv_len; in ath12k_dp_htt_tlv_iter()
1362 struct ath12k_base *ab = ar->ab; in ath12k_update_per_peer_tx_stats()
1368 struct ath12k_per_peer_tx_stats *peer_stats = &ar->peer_tx_stats; in ath12k_update_per_peer_tx_stats()
1369 struct htt_ppdu_user_stats *usr_stats = &ppdu_stats->user_stats[user]; in ath12k_update_per_peer_tx_stats()
1370 struct htt_ppdu_stats_common *common = &ppdu_stats->common; in ath12k_update_per_peer_tx_stats()
1379 if (!(usr_stats->tlv_flags & BIT(HTT_PPDU_STATS_TAG_USR_RATE))) in ath12k_update_per_peer_tx_stats()
1382 if (usr_stats->tlv_flags & BIT(HTT_PPDU_STATS_TAG_USR_COMPLTN_COMMON)) in ath12k_update_per_peer_tx_stats()
1384 HTT_USR_CMPLTN_IS_AMPDU(usr_stats->cmpltn_cmn.flags); in ath12k_update_per_peer_tx_stats()
1386 if (usr_stats->tlv_flags & in ath12k_update_per_peer_tx_stats()
1388 succ_bytes = le32_to_cpu(usr_stats->ack_ba.success_bytes); in ath12k_update_per_peer_tx_stats()
1389 succ_pkts = le32_get_bits(usr_stats->ack_ba.info, in ath12k_update_per_peer_tx_stats()
1391 tid = le32_get_bits(usr_stats->ack_ba.info, in ath12k_update_per_peer_tx_stats()
1395 if (common->fes_duration_us) in ath12k_update_per_peer_tx_stats()
1396 tx_duration = le32_to_cpu(common->fes_duration_us); in ath12k_update_per_peer_tx_stats()
1398 user_rate = &usr_stats->rate; in ath12k_update_per_peer_tx_stats()
1399 flags = HTT_USR_RATE_PREAMBLE(user_rate->rate_flags); in ath12k_update_per_peer_tx_stats()
1400 bw = HTT_USR_RATE_BW(user_rate->rate_flags) - 2; in ath12k_update_per_peer_tx_stats()
1401 nss = HTT_USR_RATE_NSS(user_rate->rate_flags) + 1; in ath12k_update_per_peer_tx_stats()
1402 mcs = HTT_USR_RATE_MCS(user_rate->rate_flags); in ath12k_update_per_peer_tx_stats()
1403 sgi = HTT_USR_RATE_GI(user_rate->rate_flags); in ath12k_update_per_peer_tx_stats()
1404 dcm = HTT_USR_RATE_DCM(user_rate->rate_flags); in ath12k_update_per_peer_tx_stats()
1437 spin_lock_bh(&ab->base_lock); in ath12k_update_per_peer_tx_stats()
1438 peer = ath12k_peer_find_by_id(ab, usr_stats->peer_id); in ath12k_update_per_peer_tx_stats()
1440 if (!peer || !peer->sta) { in ath12k_update_per_peer_tx_stats()
1441 spin_unlock_bh(&ab->base_lock); in ath12k_update_per_peer_tx_stats()
1446 sta = peer->sta; in ath12k_update_per_peer_tx_stats()
1448 arsta = &ahsta->deflink; in ath12k_update_per_peer_tx_stats()
1450 memset(&arsta->txrate, 0, sizeof(arsta->txrate)); in ath12k_update_per_peer_tx_stats()
1454 arsta->txrate.legacy = rate; in ath12k_update_per_peer_tx_stats()
1457 arsta->txrate.legacy = rate; in ath12k_update_per_peer_tx_stats()
1460 arsta->txrate.mcs = mcs + 8 * (nss - 1); in ath12k_update_per_peer_tx_stats()
1461 arsta->txrate.flags = RATE_INFO_FLAGS_MCS; in ath12k_update_per_peer_tx_stats()
1463 arsta->txrate.flags |= RATE_INFO_FLAGS_SHORT_GI; in ath12k_update_per_peer_tx_stats()
1466 arsta->txrate.mcs = mcs; in ath12k_update_per_peer_tx_stats()
1467 arsta->txrate.flags = RATE_INFO_FLAGS_VHT_MCS; in ath12k_update_per_peer_tx_stats()
1469 arsta->txrate.flags |= RATE_INFO_FLAGS_SHORT_GI; in ath12k_update_per_peer_tx_stats()
1472 arsta->txrate.mcs = mcs; in ath12k_update_per_peer_tx_stats()
1473 arsta->txrate.flags = RATE_INFO_FLAGS_HE_MCS; in ath12k_update_per_peer_tx_stats()
1474 arsta->txrate.he_dcm = dcm; in ath12k_update_per_peer_tx_stats()
1475 arsta->txrate.he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi); in ath12k_update_per_peer_tx_stats()
1476 tones = le16_to_cpu(user_rate->ru_end) - in ath12k_update_per_peer_tx_stats()
1477 le16_to_cpu(user_rate->ru_start) + 1; in ath12k_update_per_peer_tx_stats()
1479 arsta->txrate.he_ru_alloc = v; in ath12k_update_per_peer_tx_stats()
1483 arsta->txrate.nss = nss; in ath12k_update_per_peer_tx_stats()
1484 arsta->txrate.bw = ath12k_mac_bw_to_mac80211_bw(bw); in ath12k_update_per_peer_tx_stats()
1485 arsta->tx_duration += tx_duration; in ath12k_update_per_peer_tx_stats()
1486 memcpy(&arsta->last_txrate, &arsta->txrate, sizeof(struct rate_info)); in ath12k_update_per_peer_tx_stats()
1493 peer_stats->succ_pkts = succ_pkts; in ath12k_update_per_peer_tx_stats()
1494 peer_stats->succ_bytes = succ_bytes; in ath12k_update_per_peer_tx_stats()
1495 peer_stats->is_ampdu = is_ampdu; in ath12k_update_per_peer_tx_stats()
1496 peer_stats->duration = tx_duration; in ath12k_update_per_peer_tx_stats()
1497 peer_stats->ba_fails = in ath12k_update_per_peer_tx_stats()
1498 HTT_USR_CMPLTN_LONG_RETRY(usr_stats->cmpltn_cmn.flags) + in ath12k_update_per_peer_tx_stats()
1499 HTT_USR_CMPLTN_SHORT_RETRY(usr_stats->cmpltn_cmn.flags); in ath12k_update_per_peer_tx_stats()
1502 spin_unlock_bh(&ab->base_lock); in ath12k_update_per_peer_tx_stats()
1511 for (user = 0; user < HTT_PPDU_STATS_MAX_USERS - 1; user++) in ath12k_htt_update_ppdu_stats()
1521 lockdep_assert_held(&ar->data_lock); in ath12k_dp_htt_get_ppdu_desc()
1522 if (!list_empty(&ar->ppdu_stats_info)) { in ath12k_dp_htt_get_ppdu_desc()
1523 list_for_each_entry(ppdu_info, &ar->ppdu_stats_info, list) { in ath12k_dp_htt_get_ppdu_desc()
1524 if (ppdu_info->ppdu_id == ppdu_id) in ath12k_dp_htt_get_ppdu_desc()
1528 if (ar->ppdu_stat_list_depth > HTT_PPDU_DESC_MAX_DEPTH) { in ath12k_dp_htt_get_ppdu_desc()
1529 ppdu_info = list_first_entry(&ar->ppdu_stats_info, in ath12k_dp_htt_get_ppdu_desc()
1531 list_del(&ppdu_info->list); in ath12k_dp_htt_get_ppdu_desc()
1532 ar->ppdu_stat_list_depth--; in ath12k_dp_htt_get_ppdu_desc()
1533 ath12k_htt_update_ppdu_stats(ar, &ppdu_info->ppdu_stats); in ath12k_dp_htt_get_ppdu_desc()
1542 list_add_tail(&ppdu_info->list, &ar->ppdu_stats_info); in ath12k_dp_htt_get_ppdu_desc()
1543 ar->ppdu_stat_list_depth++; in ath12k_dp_htt_get_ppdu_desc()
1551 peer->ppdu_stats_delayba.sw_peer_id = le16_to_cpu(usr_stats->rate.sw_peer_id); in ath12k_copy_to_delay_stats()
1552 peer->ppdu_stats_delayba.info0 = le32_to_cpu(usr_stats->rate.info0); in ath12k_copy_to_delay_stats()
1553 peer->ppdu_stats_delayba.ru_end = le16_to_cpu(usr_stats->rate.ru_end); in ath12k_copy_to_delay_stats()
1554 peer->ppdu_stats_delayba.ru_start = le16_to_cpu(usr_stats->rate.ru_start); in ath12k_copy_to_delay_stats()
1555 peer->ppdu_stats_delayba.info1 = le32_to_cpu(usr_stats->rate.info1); in ath12k_copy_to_delay_stats()
1556 peer->ppdu_stats_delayba.rate_flags = le32_to_cpu(usr_stats->rate.rate_flags); in ath12k_copy_to_delay_stats()
1557 peer->ppdu_stats_delayba.resp_rate_flags = in ath12k_copy_to_delay_stats()
1558 le32_to_cpu(usr_stats->rate.resp_rate_flags); in ath12k_copy_to_delay_stats()
1560 peer->delayba_flag = true; in ath12k_copy_to_delay_stats()
1566 usr_stats->rate.sw_peer_id = cpu_to_le16(peer->ppdu_stats_delayba.sw_peer_id); in ath12k_copy_to_bar()
1567 usr_stats->rate.info0 = cpu_to_le32(peer->ppdu_stats_delayba.info0); in ath12k_copy_to_bar()
1568 usr_stats->rate.ru_end = cpu_to_le16(peer->ppdu_stats_delayba.ru_end); in ath12k_copy_to_bar()
1569 usr_stats->rate.ru_start = cpu_to_le16(peer->ppdu_stats_delayba.ru_start); in ath12k_copy_to_bar()
1570 usr_stats->rate.info1 = cpu_to_le32(peer->ppdu_stats_delayba.info1); in ath12k_copy_to_bar()
1571 usr_stats->rate.rate_flags = cpu_to_le32(peer->ppdu_stats_delayba.rate_flags); in ath12k_copy_to_bar()
1572 usr_stats->rate.resp_rate_flags = in ath12k_copy_to_bar()
1573 cpu_to_le32(peer->ppdu_stats_delayba.resp_rate_flags); in ath12k_copy_to_bar()
1575 peer->delayba_flag = false; in ath12k_copy_to_bar()
1591 msg = (struct ath12k_htt_ppdu_stats_msg *)skb->data; in ath12k_htt_pull_ppdu_stats()
1592 len = le32_get_bits(msg->info, HTT_T2H_PPDU_STATS_INFO_PAYLOAD_SIZE); in ath12k_htt_pull_ppdu_stats()
1593 if (len > (skb->len - struct_size(msg, data, 0))) { in ath12k_htt_pull_ppdu_stats()
1596 len, skb->len); in ath12k_htt_pull_ppdu_stats()
1597 return -EINVAL; in ath12k_htt_pull_ppdu_stats()
1600 pdev_id = le32_get_bits(msg->info, HTT_T2H_PPDU_STATS_INFO_PDEV_ID); in ath12k_htt_pull_ppdu_stats()
1601 ppdu_id = le32_to_cpu(msg->ppdu_id); in ath12k_htt_pull_ppdu_stats()
1606 ret = -EINVAL; in ath12k_htt_pull_ppdu_stats()
1610 spin_lock_bh(&ar->data_lock); in ath12k_htt_pull_ppdu_stats()
1613 spin_unlock_bh(&ar->data_lock); in ath12k_htt_pull_ppdu_stats()
1614 ret = -EINVAL; in ath12k_htt_pull_ppdu_stats()
1618 ppdu_info->ppdu_id = ppdu_id; in ath12k_htt_pull_ppdu_stats()
1619 ret = ath12k_dp_htt_tlv_iter(ab, msg->data, len, in ath12k_htt_pull_ppdu_stats()
1623 spin_unlock_bh(&ar->data_lock); in ath12k_htt_pull_ppdu_stats()
1628 if (ppdu_info->ppdu_stats.common.num_users >= HTT_PPDU_STATS_MAX_USERS) { in ath12k_htt_pull_ppdu_stats()
1629 spin_unlock_bh(&ar->data_lock); in ath12k_htt_pull_ppdu_stats()
1632 ppdu_info->ppdu_stats.common.num_users, in ath12k_htt_pull_ppdu_stats()
1634 ret = -EINVAL; in ath12k_htt_pull_ppdu_stats()
1639 if (ppdu_info->frame_type == HTT_STATS_PPDU_FTYPE_DATA && in ath12k_htt_pull_ppdu_stats()
1640 (ppdu_info->tlv_bitmap & (1 << HTT_PPDU_STATS_TAG_USR_COMMON)) && in ath12k_htt_pull_ppdu_stats()
1641 ppdu_info->delay_ba) { in ath12k_htt_pull_ppdu_stats()
1642 for (i = 0; i < ppdu_info->ppdu_stats.common.num_users; i++) { in ath12k_htt_pull_ppdu_stats()
1643 peer_id = ppdu_info->ppdu_stats.user_stats[i].peer_id; in ath12k_htt_pull_ppdu_stats()
1644 spin_lock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1647 spin_unlock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1651 usr_stats = &ppdu_info->ppdu_stats.user_stats[i]; in ath12k_htt_pull_ppdu_stats()
1652 if (usr_stats->delay_ba) in ath12k_htt_pull_ppdu_stats()
1654 spin_unlock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1658 /* restore all peers' data rate tlv to mu-bar tlv */ in ath12k_htt_pull_ppdu_stats()
1659 if (ppdu_info->frame_type == HTT_STATS_PPDU_FTYPE_BAR && in ath12k_htt_pull_ppdu_stats()
1660 (ppdu_info->tlv_bitmap & (1 << HTT_PPDU_STATS_TAG_USR_COMMON))) { in ath12k_htt_pull_ppdu_stats()
1661 for (i = 0; i < ppdu_info->bar_num_users; i++) { in ath12k_htt_pull_ppdu_stats()
1662 peer_id = ppdu_info->ppdu_stats.user_stats[i].peer_id; in ath12k_htt_pull_ppdu_stats()
1663 spin_lock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1666 spin_unlock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1670 usr_stats = &ppdu_info->ppdu_stats.user_stats[i]; in ath12k_htt_pull_ppdu_stats()
1671 if (peer->delayba_flag) in ath12k_htt_pull_ppdu_stats()
1673 spin_unlock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1677 spin_unlock_bh(&ar->data_lock); in ath12k_htt_pull_ppdu_stats()
1693 msg = (struct ath12k_htt_mlo_offset_msg *)skb->data; in ath12k_htt_mlo_offset_event_handler()
1694 pdev_id = u32_get_bits(__le32_to_cpu(msg->info), in ath12k_htt_mlo_offset_event_handler()
1708 spin_lock_bh(&ar->data_lock); in ath12k_htt_mlo_offset_event_handler()
1709 pdev = ar->pdev; in ath12k_htt_mlo_offset_event_handler()
1711 pdev->timestamp.info = __le32_to_cpu(msg->info); in ath12k_htt_mlo_offset_event_handler()
1712 pdev->timestamp.sync_timestamp_lo_us = __le32_to_cpu(msg->sync_timestamp_lo_us); in ath12k_htt_mlo_offset_event_handler()
1713 pdev->timestamp.sync_timestamp_hi_us = __le32_to_cpu(msg->sync_timestamp_hi_us); in ath12k_htt_mlo_offset_event_handler()
1714 pdev->timestamp.mlo_offset_lo = __le32_to_cpu(msg->mlo_offset_lo); in ath12k_htt_mlo_offset_event_handler()
1715 pdev->timestamp.mlo_offset_hi = __le32_to_cpu(msg->mlo_offset_hi); in ath12k_htt_mlo_offset_event_handler()
1716 pdev->timestamp.mlo_offset_clks = __le32_to_cpu(msg->mlo_offset_clks); in ath12k_htt_mlo_offset_event_handler()
1717 pdev->timestamp.mlo_comp_clks = __le32_to_cpu(msg->mlo_comp_clks); in ath12k_htt_mlo_offset_event_handler()
1718 pdev->timestamp.mlo_comp_timer = __le32_to_cpu(msg->mlo_comp_timer); in ath12k_htt_mlo_offset_event_handler()
1720 spin_unlock_bh(&ar->data_lock); in ath12k_htt_mlo_offset_event_handler()
1728 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_htt_htc_t2h_msg_handler()
1729 struct htt_resp_msg *resp = (struct htt_resp_msg *)skb->data; in ath12k_dp_htt_htc_t2h_msg_handler()
1738 type = le32_get_bits(resp->version_msg.version, HTT_T2H_MSG_TYPE); in ath12k_dp_htt_htc_t2h_msg_handler()
1744 dp->htt_tgt_ver_major = le32_get_bits(resp->version_msg.version, in ath12k_dp_htt_htc_t2h_msg_handler()
1746 dp->htt_tgt_ver_minor = le32_get_bits(resp->version_msg.version, in ath12k_dp_htt_htc_t2h_msg_handler()
1748 complete(&dp->htt_tgt_version_received); in ath12k_dp_htt_htc_t2h_msg_handler()
1752 vdev_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1754 peer_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1756 peer_mac_h16 = le32_get_bits(resp->peer_map_ev.info1, in ath12k_dp_htt_htc_t2h_msg_handler()
1758 ath12k_dp_get_mac_addr(le32_to_cpu(resp->peer_map_ev.mac_addr_l32), in ath12k_dp_htt_htc_t2h_msg_handler()
1763 vdev_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1765 peer_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1767 peer_mac_h16 = le32_get_bits(resp->peer_map_ev.info1, in ath12k_dp_htt_htc_t2h_msg_handler()
1769 ath12k_dp_get_mac_addr(le32_to_cpu(resp->peer_map_ev.mac_addr_l32), in ath12k_dp_htt_htc_t2h_msg_handler()
1771 ast_hash = le32_get_bits(resp->peer_map_ev.info2, in ath12k_dp_htt_htc_t2h_msg_handler()
1773 hw_peer_id = le32_get_bits(resp->peer_map_ev.info1, in ath12k_dp_htt_htc_t2h_msg_handler()
1779 vdev_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1781 peer_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1783 peer_mac_h16 = le32_get_bits(resp->peer_map_ev.info1, in ath12k_dp_htt_htc_t2h_msg_handler()
1785 ath12k_dp_get_mac_addr(le32_to_cpu(resp->peer_map_ev.mac_addr_l32), in ath12k_dp_htt_htc_t2h_msg_handler()
1792 peer_id = le32_get_bits(resp->peer_unmap_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1819 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_msdu_coalesce()
1825 u32 hal_rx_desc_sz = ar->ab->hal.hal_desc_sz; in ath12k_dp_rx_msdu_coalesce()
1827 /* As the msdu is spread across multiple rx buffers, in ath12k_dp_rx_msdu_coalesce()
1832 buf_first_len = DP_RX_BUFFER_SIZE - buf_first_hdr_len; in ath12k_dp_rx_msdu_coalesce()
1840 ldesc = (struct hal_rx_desc *)last->data; in ath12k_dp_rx_msdu_coalesce()
1841 rxcb->is_first_msdu = ath12k_dp_rx_h_first_msdu(ab, ldesc); in ath12k_dp_rx_msdu_coalesce()
1842 rxcb->is_last_msdu = ath12k_dp_rx_h_last_msdu(ab, ldesc); in ath12k_dp_rx_msdu_coalesce()
1845 * exceeds DP_RX_BUFFER_SIZE - HAL_RX_DESC_SIZE. So assume the data in ath12k_dp_rx_msdu_coalesce()
1846 * in the first buf is of length DP_RX_BUFFER_SIZE - HAL_RX_DESC_SIZE. in ath12k_dp_rx_msdu_coalesce()
1851 /* When an MSDU spread over multiple buffers MSDU_END in ath12k_dp_rx_msdu_coalesce()
1854 ath12k_dp_rx_desc_end_tlv_copy(ab, rxcb->rx_desc, ldesc); in ath12k_dp_rx_msdu_coalesce()
1856 space_extra = msdu_len - (buf_first_len + skb_tailroom(first)); in ath12k_dp_rx_msdu_coalesce()
1862 if (!rxcb->is_continuation) { in ath12k_dp_rx_msdu_coalesce()
1868 return -ENOMEM; in ath12k_dp_rx_msdu_coalesce()
1871 rem_len = msdu_len - buf_first_len; in ath12k_dp_rx_msdu_coalesce()
1874 if (rxcb->is_continuation) in ath12k_dp_rx_msdu_coalesce()
1875 buf_len = DP_RX_BUFFER_SIZE - hal_rx_desc_sz; in ath12k_dp_rx_msdu_coalesce()
1879 if (buf_len > (DP_RX_BUFFER_SIZE - hal_rx_desc_sz)) { in ath12k_dp_rx_msdu_coalesce()
1882 return -EINVAL; in ath12k_dp_rx_msdu_coalesce()
1891 rem_len -= buf_len; in ath12k_dp_rx_msdu_coalesce()
1892 if (!rxcb->is_continuation) in ath12k_dp_rx_msdu_coalesce()
1905 if (!rxcb->is_continuation) in ath12k_dp_rx_get_msdu_last_buf()
1910 if (!rxcb->is_continuation) in ath12k_dp_rx_get_msdu_last_buf()
1920 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_csum_offload()
1923 ip_csum_fail = ath12k_dp_rx_h_ip_cksum_fail(ab, rxcb->rx_desc); in ath12k_dp_rx_h_csum_offload()
1924 l4_csum_fail = ath12k_dp_rx_h_l4_cksum_fail(ab, rxcb->rx_desc); in ath12k_dp_rx_h_csum_offload()
1926 msdu->ip_summed = (ip_csum_fail || l4_csum_fail) ? in ath12k_dp_rx_h_csum_offload()
1953 ath12k_warn(ar->ab, "unsupported encryption type %d for mic len\n", enctype); in ath12k_dp_rx_crypto_mic_len()
1981 ath12k_warn(ar->ab, "unsupported encryption type %d\n", enctype); in ath12k_dp_rx_crypto_param_len()
2006 ath12k_warn(ar->ab, "unsupported encryption type %d\n", enctype); in ath12k_dp_rx_crypto_icv_len()
2015 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_undecap_nwifi()
2024 hdr = (struct ieee80211_hdr *)msdu->data; in ath12k_dp_rx_h_undecap_nwifi()
2025 hdr_len = ieee80211_hdrlen(hdr->frame_control); in ath12k_dp_rx_h_undecap_nwifi()
2029 hdr->frame_control |= __cpu_to_le16(IEEE80211_STYPE_QOS_DATA); in ath12k_dp_rx_h_undecap_nwifi()
2032 hdr->frame_control &= ~(__cpu_to_le16(IEEE80211_FCTL_ORDER)); in ath12k_dp_rx_h_undecap_nwifi()
2034 qos_ctl = rxcb->tid; in ath12k_dp_rx_h_undecap_nwifi()
2036 if (ath12k_dp_rx_h_mesh_ctl_present(ab, rxcb->rx_desc)) in ath12k_dp_rx_h_undecap_nwifi()
2045 if (!(status->flag & RX_FLAG_IV_STRIPPED)) { in ath12k_dp_rx_h_undecap_nwifi()
2047 ath12k_dp_rx_desc_get_crypto_header(ar->ab, in ath12k_dp_rx_h_undecap_nwifi()
2048 rxcb->rx_desc, crypto_hdr, in ath12k_dp_rx_h_undecap_nwifi()
2068 if (!rxcb->is_first_msdu || in ath12k_dp_rx_h_undecap_raw()
2069 !(rxcb->is_first_msdu && rxcb->is_last_msdu)) { in ath12k_dp_rx_h_undecap_raw()
2074 skb_trim(msdu, msdu->len - FCS_LEN); in ath12k_dp_rx_h_undecap_raw()
2079 hdr = (void *)msdu->data; in ath12k_dp_rx_h_undecap_raw()
2082 if (status->flag & RX_FLAG_IV_STRIPPED) { in ath12k_dp_rx_h_undecap_raw()
2083 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_raw()
2086 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_raw()
2090 if (status->flag & RX_FLAG_MIC_STRIPPED) in ath12k_dp_rx_h_undecap_raw()
2091 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_raw()
2095 if (status->flag & RX_FLAG_ICV_STRIPPED) in ath12k_dp_rx_h_undecap_raw()
2096 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_raw()
2101 if ((status->flag & RX_FLAG_MMIC_STRIPPED) && in ath12k_dp_rx_h_undecap_raw()
2102 !ieee80211_has_morefrags(hdr->frame_control) && in ath12k_dp_rx_h_undecap_raw()
2104 skb_trim(msdu, msdu->len - IEEE80211_CCMP_MIC_LEN); in ath12k_dp_rx_h_undecap_raw()
2107 if (status->flag & RX_FLAG_IV_STRIPPED) { in ath12k_dp_rx_h_undecap_raw()
2108 hdr_len = ieee80211_hdrlen(hdr->frame_control); in ath12k_dp_rx_h_undecap_raw()
2111 memmove(msdu->data + crypto_len, msdu->data, hdr_len); in ath12k_dp_rx_h_undecap_raw()
2122 struct hal_rx_desc *rx_desc = rxcb->rx_desc; in ath12k_get_dot11_hdr_from_rx_desc()
2123 struct ath12k_base *ab = ar->ab; in ath12k_get_dot11_hdr_from_rx_desc()
2130 if (!(status->flag & RX_FLAG_IV_STRIPPED)) { in ath12k_get_dot11_hdr_from_rx_desc()
2139 hdr = (struct ieee80211_hdr *)msdu->data; in ath12k_get_dot11_hdr_from_rx_desc()
2140 hdr->frame_control = fc; in ath12k_get_dot11_hdr_from_rx_desc()
2145 if (rxcb->is_mcbc) in ath12k_get_dot11_hdr_from_rx_desc()
2146 status->flag &= ~RX_FLAG_PN_VALIDATED; in ath12k_get_dot11_hdr_from_rx_desc()
2149 if (ieee80211_is_data_qos(hdr->frame_control)) { in ath12k_get_dot11_hdr_from_rx_desc()
2150 qos_ctl = rxcb->tid; in ath12k_get_dot11_hdr_from_rx_desc()
2155 memcpy(msdu->data + (hdr_len - IEEE80211_QOS_CTL_LEN), in ath12k_get_dot11_hdr_from_rx_desc()
2172 eth = (struct ethhdr *)msdu->data; in ath12k_dp_rx_h_undecap_eth()
2173 ether_addr_copy(da, eth->h_dest); in ath12k_dp_rx_h_undecap_eth()
2174 ether_addr_copy(sa, eth->h_source); in ath12k_dp_rx_h_undecap_eth()
2175 rfc.snap_type = eth->h_proto; in ath12k_dp_rx_h_undecap_eth()
2184 hdr = (struct ieee80211_hdr *)msdu->data; in ath12k_dp_rx_h_undecap_eth()
2195 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_undecap()
2210 ehdr = (struct ethhdr *)msdu->data; in ath12k_dp_rx_h_undecap()
2213 if (ehdr->h_proto == cpu_to_be16(ETH_P_PAE)) { in ath12k_dp_rx_h_undecap()
2214 ATH12K_SKB_RXCB(msdu)->is_eapol = true; in ath12k_dp_rx_h_undecap()
2222 if (ATH12K_SKB_RXCB(msdu)->is_mcbc && decrypted) in ath12k_dp_rx_h_undecap()
2235 struct hal_rx_desc *rx_desc = rxcb->rx_desc; in ath12k_dp_rx_h_find_peer()
2238 lockdep_assert_held(&ab->base_lock); in ath12k_dp_rx_h_find_peer()
2240 if (rxcb->peer_id) in ath12k_dp_rx_h_find_peer()
2241 peer = ath12k_peer_find_by_id(ab, rxcb->peer_id); in ath12k_dp_rx_h_find_peer()
2261 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_mpdu()
2271 fill_crypto_hdr = ath12k_dp_rx_h_is_da_mcbc(ar->ab, rx_desc); in ath12k_dp_rx_h_mpdu()
2272 rxcb->is_mcbc = fill_crypto_hdr; in ath12k_dp_rx_h_mpdu()
2274 if (rxcb->is_mcbc) in ath12k_dp_rx_h_mpdu()
2275 rxcb->peer_id = ath12k_dp_rx_h_peer_id(ar->ab, rx_desc); in ath12k_dp_rx_h_mpdu()
2277 spin_lock_bh(&ar->ab->base_lock); in ath12k_dp_rx_h_mpdu()
2278 peer = ath12k_dp_rx_h_find_peer(ar->ab, msdu); in ath12k_dp_rx_h_mpdu()
2280 if (rxcb->is_mcbc) in ath12k_dp_rx_h_mpdu()
2281 enctype = peer->sec_type_grp; in ath12k_dp_rx_h_mpdu()
2283 enctype = peer->sec_type; in ath12k_dp_rx_h_mpdu()
2287 spin_unlock_bh(&ar->ab->base_lock); in ath12k_dp_rx_h_mpdu()
2293 /* Clear per-MPDU flags while leaving per-PPDU flags intact */ in ath12k_dp_rx_h_mpdu()
2294 rx_status->flag &= ~(RX_FLAG_FAILED_FCS_CRC | in ath12k_dp_rx_h_mpdu()
2301 rx_status->flag |= RX_FLAG_FAILED_FCS_CRC; in ath12k_dp_rx_h_mpdu()
2303 rx_status->flag |= RX_FLAG_MMIC_ERROR; in ath12k_dp_rx_h_mpdu()
2306 rx_status->flag |= RX_FLAG_DECRYPTED | RX_FLAG_MMIC_STRIPPED; in ath12k_dp_rx_h_mpdu()
2309 rx_status->flag |= RX_FLAG_MIC_STRIPPED | in ath12k_dp_rx_h_mpdu()
2312 rx_status->flag |= RX_FLAG_IV_STRIPPED | in ath12k_dp_rx_h_mpdu()
2323 if (ath12k_dp_rx_h_decap_type(ar->ab, rx_desc) != in ath12k_dp_rx_h_mpdu()
2325 hdr = (void *)msdu->data; in ath12k_dp_rx_h_mpdu()
2326 hdr->frame_control &= ~__cpu_to_le16(IEEE80211_FCTL_PROTECTED); in ath12k_dp_rx_h_mpdu()
2333 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_rate()
2351 sband = &ar->mac.sbands[rx_status->band]; in ath12k_dp_rx_h_rate()
2352 rx_status->rate_idx = ath12k_mac_hw_rate_to_idx(sband, rate_mcs, in ath12k_dp_rx_h_rate()
2356 rx_status->encoding = RX_ENC_HT; in ath12k_dp_rx_h_rate()
2358 ath12k_warn(ar->ab, in ath12k_dp_rx_h_rate()
2363 rx_status->rate_idx = rate_mcs + (8 * (nss - 1)); in ath12k_dp_rx_h_rate()
2365 rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI; in ath12k_dp_rx_h_rate()
2366 rx_status->bw = ath12k_mac_bw_to_mac80211_bw(bw); in ath12k_dp_rx_h_rate()
2369 rx_status->encoding = RX_ENC_VHT; in ath12k_dp_rx_h_rate()
2370 rx_status->rate_idx = rate_mcs; in ath12k_dp_rx_h_rate()
2372 ath12k_warn(ar->ab, in ath12k_dp_rx_h_rate()
2377 rx_status->nss = nss; in ath12k_dp_rx_h_rate()
2379 rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI; in ath12k_dp_rx_h_rate()
2380 rx_status->bw = ath12k_mac_bw_to_mac80211_bw(bw); in ath12k_dp_rx_h_rate()
2383 rx_status->rate_idx = rate_mcs; in ath12k_dp_rx_h_rate()
2385 ath12k_warn(ar->ab, in ath12k_dp_rx_h_rate()
2390 rx_status->encoding = RX_ENC_HE; in ath12k_dp_rx_h_rate()
2391 rx_status->nss = nss; in ath12k_dp_rx_h_rate()
2392 rx_status->he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi); in ath12k_dp_rx_h_rate()
2393 rx_status->bw = ath12k_mac_bw_to_mac80211_bw(bw); in ath12k_dp_rx_h_rate()
2401 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_ppdu()
2406 rx_status->freq = 0; in ath12k_dp_rx_h_ppdu()
2407 rx_status->rate_idx = 0; in ath12k_dp_rx_h_ppdu()
2408 rx_status->nss = 0; in ath12k_dp_rx_h_ppdu()
2409 rx_status->encoding = RX_ENC_LEGACY; in ath12k_dp_rx_h_ppdu()
2410 rx_status->bw = RATE_INFO_BW_20; in ath12k_dp_rx_h_ppdu()
2411 rx_status->enc_flags = 0; in ath12k_dp_rx_h_ppdu()
2413 rx_status->flag |= RX_FLAG_NO_SIGNAL_VAL; in ath12k_dp_rx_h_ppdu()
2421 rx_status->band = NL80211_BAND_6GHZ; in ath12k_dp_rx_h_ppdu()
2422 rx_status->freq = center_freq; in ath12k_dp_rx_h_ppdu()
2424 rx_status->band = NL80211_BAND_2GHZ; in ath12k_dp_rx_h_ppdu()
2426 rx_status->band = NL80211_BAND_5GHZ; in ath12k_dp_rx_h_ppdu()
2428 spin_lock_bh(&ar->data_lock); in ath12k_dp_rx_h_ppdu()
2429 channel = ar->rx_channel; in ath12k_dp_rx_h_ppdu()
2431 rx_status->band = channel->band; in ath12k_dp_rx_h_ppdu()
2433 ieee80211_frequency_to_channel(channel->center_freq); in ath12k_dp_rx_h_ppdu()
2435 spin_unlock_bh(&ar->data_lock); in ath12k_dp_rx_h_ppdu()
2436 ath12k_dbg_dump(ar->ab, ATH12K_DBG_DATA, NULL, "rx_desc: ", in ath12k_dp_rx_h_ppdu()
2440 if (rx_status->band != NL80211_BAND_6GHZ) in ath12k_dp_rx_h_ppdu()
2441 rx_status->freq = ieee80211_channel_to_frequency(channel_num, in ath12k_dp_rx_h_ppdu()
2442 rx_status->band); in ath12k_dp_rx_h_ppdu()
2451 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_deliver_msdu()
2463 bool is_mcbc = rxcb->is_mcbc; in ath12k_dp_rx_deliver_msdu()
2464 bool is_eapol = rxcb->is_eapol; in ath12k_dp_rx_deliver_msdu()
2466 if (status->encoding == RX_ENC_HE && !(status->flag & RX_FLAG_RADIOTAP_HE) && in ath12k_dp_rx_deliver_msdu()
2467 !(status->flag & RX_FLAG_SKIP_MONITOR)) { in ath12k_dp_rx_deliver_msdu()
2470 status->flag |= RX_FLAG_RADIOTAP_HE; in ath12k_dp_rx_deliver_msdu()
2473 if (!(status->flag & RX_FLAG_ONLY_MONITOR)) in ath12k_dp_rx_deliver_msdu()
2474 decap = ath12k_dp_rx_h_decap_type(ab, rxcb->rx_desc); in ath12k_dp_rx_deliver_msdu()
2476 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_deliver_msdu()
2479 pubsta = peer ? peer->sta : NULL; in ath12k_dp_rx_deliver_msdu()
2481 if (pubsta && pubsta->valid_links) { in ath12k_dp_rx_deliver_msdu()
2482 status->link_valid = 1; in ath12k_dp_rx_deliver_msdu()
2483 status->link_id = peer->link_id; in ath12k_dp_rx_deliver_msdu()
2486 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_deliver_msdu()
2489 …s%s%s%s%s%s rate_idx %u vht_nss %u freq %u band %u flag 0x%x fcs-err %i mic-err %i amsdu-more %i\n… in ath12k_dp_rx_deliver_msdu()
2491 msdu->len, in ath12k_dp_rx_deliver_msdu()
2492 peer ? peer->addr : NULL, in ath12k_dp_rx_deliver_msdu()
2493 rxcb->tid, in ath12k_dp_rx_deliver_msdu()
2495 ath12k_dp_rx_h_seq_no(ab, rxcb->rx_desc), in ath12k_dp_rx_deliver_msdu()
2496 (status->encoding == RX_ENC_LEGACY) ? "legacy" : "", in ath12k_dp_rx_deliver_msdu()
2497 (status->encoding == RX_ENC_HT) ? "ht" : "", in ath12k_dp_rx_deliver_msdu()
2498 (status->encoding == RX_ENC_VHT) ? "vht" : "", in ath12k_dp_rx_deliver_msdu()
2499 (status->encoding == RX_ENC_HE) ? "he" : "", in ath12k_dp_rx_deliver_msdu()
2500 (status->bw == RATE_INFO_BW_40) ? "40" : "", in ath12k_dp_rx_deliver_msdu()
2501 (status->bw == RATE_INFO_BW_80) ? "80" : "", in ath12k_dp_rx_deliver_msdu()
2502 (status->bw == RATE_INFO_BW_160) ? "160" : "", in ath12k_dp_rx_deliver_msdu()
2503 (status->bw == RATE_INFO_BW_320) ? "320" : "", in ath12k_dp_rx_deliver_msdu()
2504 status->enc_flags & RX_ENC_FLAG_SHORT_GI ? "sgi " : "", in ath12k_dp_rx_deliver_msdu()
2505 status->rate_idx, in ath12k_dp_rx_deliver_msdu()
2506 status->nss, in ath12k_dp_rx_deliver_msdu()
2507 status->freq, in ath12k_dp_rx_deliver_msdu()
2508 status->band, status->flag, in ath12k_dp_rx_deliver_msdu()
2509 !!(status->flag & RX_FLAG_FAILED_FCS_CRC), in ath12k_dp_rx_deliver_msdu()
2510 !!(status->flag & RX_FLAG_MMIC_ERROR), in ath12k_dp_rx_deliver_msdu()
2511 !!(status->flag & RX_FLAG_AMSDU_MORE)); in ath12k_dp_rx_deliver_msdu()
2514 msdu->data, msdu->len); in ath12k_dp_rx_deliver_msdu()
2527 !(is_mcbc && rx_status->flag & RX_FLAG_DECRYPTED)) in ath12k_dp_rx_deliver_msdu()
2528 rx_status->flag |= RX_FLAG_8023; in ath12k_dp_rx_deliver_msdu()
2545 hdr = (struct ieee80211_hdr *)msdu->data; in ath12k_dp_rx_check_nwifi_hdr_len_valid()
2546 hdr_len = ieee80211_hdrlen(hdr->frame_control); in ath12k_dp_rx_check_nwifi_hdr_len_valid()
2551 ab->soc_stats.invalid_rbm++; in ath12k_dp_rx_check_nwifi_hdr_len_valid()
2561 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_process_msdu()
2568 u32 hal_rx_desc_sz = ar->ab->hal.hal_desc_sz; in ath12k_dp_rx_process_msdu()
2574 ret = -EIO; in ath12k_dp_rx_process_msdu()
2578 rx_desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_process_msdu()
2579 lrx_desc = (struct hal_rx_desc *)last_buf->data; in ath12k_dp_rx_process_msdu()
2582 ret = -EIO; in ath12k_dp_rx_process_msdu()
2587 rxcb->rx_desc = rx_desc; in ath12k_dp_rx_process_msdu()
2591 if (rxcb->is_frag) { in ath12k_dp_rx_process_msdu()
2593 } else if (!rxcb->is_continuation) { in ath12k_dp_rx_process_msdu()
2595 ret = -EINVAL; in ath12k_dp_rx_process_msdu()
2615 ret = -EINVAL; in ath12k_dp_rx_process_msdu()
2622 rx_status->flag |= RX_FLAG_SKIP_MONITOR | RX_FLAG_DUP_VALIDATED; in ath12k_dp_rx_process_msdu()
2635 struct ath12k_hw_group *ag = ab->ag; in ath12k_dp_rx_process_received_packets()
2640 struct ath12k_hw_link *hw_links = ag->hw_links; in ath12k_dp_rx_process_received_packets()
2652 hw_link_id = rxcb->hw_link_id; in ath12k_dp_rx_process_received_packets()
2655 pdev_id = ath12k_hw_mac_id_to_pdev_id(partner_ab->hw_params, in ath12k_dp_rx_process_received_packets()
2657 ar = partner_ab->pdevs[pdev_id].ar; in ath12k_dp_rx_process_received_packets()
2658 if (!rcu_dereference(partner_ab->pdevs_active[pdev_id])) { in ath12k_dp_rx_process_received_packets()
2663 if (test_bit(ATH12K_FLAG_CAC_RUNNING, &ar->dev_flags)) { in ath12k_dp_rx_process_received_packets()
2708 struct ath12k_hw_group *ag = ab->ag; in ath12k_dp_rx_process()
2710 struct ath12k_hw_link *hw_links = ag->hw_links; in ath12k_dp_rx_process()
2713 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_process()
2714 struct dp_rxdma_ring *rx_ring = &dp->rx_refill_buf_ring; in ath12k_dp_rx_process()
2731 srng = &ab->hal.srng_list[dp->reo_dst_ring[ring_id].ring_id]; in ath12k_dp_rx_process()
2733 spin_lock_bh(&srng->lock); in ath12k_dp_rx_process()
2744 cookie = le32_get_bits(desc->buf_addr_info.info1, in ath12k_dp_rx_process()
2747 hw_link_id = le32_get_bits(desc->info0, in ath12k_dp_rx_process()
2750 desc_va = ((u64)le32_to_cpu(desc->buf_va_hi) << 32 | in ath12k_dp_rx_process()
2751 le32_to_cpu(desc->buf_va_lo)); in ath12k_dp_rx_process()
2757 if (desc_info->skb) { in ath12k_dp_rx_process()
2758 dev_kfree_skb_any(desc_info->skb); in ath12k_dp_rx_process()
2759 desc_info->skb = NULL; in ath12k_dp_rx_process()
2775 if (desc_info->magic != ATH12K_DP_RX_DESC_MAGIC) in ath12k_dp_rx_process()
2778 msdu = desc_info->skb; in ath12k_dp_rx_process()
2779 desc_info->skb = NULL; in ath12k_dp_rx_process()
2781 list_add_tail(&desc_info->list, &rx_desc_used_list[device_id]); in ath12k_dp_rx_process()
2784 dma_unmap_single(partner_ab->dev, rxcb->paddr, in ath12k_dp_rx_process()
2785 msdu->len + skb_tailroom(msdu), in ath12k_dp_rx_process()
2790 push_reason = le32_get_bits(desc->info0, in ath12k_dp_rx_process()
2795 ab->soc_stats.hal_reo_error[ring_id]++; in ath12k_dp_rx_process()
2799 msdu_info = &desc->rx_msdu_info; in ath12k_dp_rx_process()
2800 mpdu_info = &desc->rx_mpdu_info; in ath12k_dp_rx_process()
2802 rxcb->is_first_msdu = !!(le32_to_cpu(msdu_info->info0) & in ath12k_dp_rx_process()
2804 rxcb->is_last_msdu = !!(le32_to_cpu(msdu_info->info0) & in ath12k_dp_rx_process()
2806 rxcb->is_continuation = !!(le32_to_cpu(msdu_info->info0) & in ath12k_dp_rx_process()
2808 rxcb->hw_link_id = hw_link_id; in ath12k_dp_rx_process()
2809 rxcb->peer_id = ath12k_dp_rx_get_peer_id(ab, dp->peer_metadata_ver, in ath12k_dp_rx_process()
2810 mpdu_info->peer_meta_data); in ath12k_dp_rx_process()
2811 rxcb->tid = le32_get_bits(mpdu_info->info0, in ath12k_dp_rx_process()
2816 if (!rxcb->is_continuation) { in ath12k_dp_rx_process()
2840 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_process()
2850 rx_ring = &partner_ab->dp.rx_refill_buf_ring; in ath12k_dp_rx_process()
2868 spin_lock_bh(&rx_tid->ab->base_lock); in ath12k_dp_rx_frag_timer()
2869 if (rx_tid->last_frag_no && in ath12k_dp_rx_frag_timer()
2870 rx_tid->rx_frag_bitmap == GENMASK(rx_tid->last_frag_no, 0)) { in ath12k_dp_rx_frag_timer()
2871 spin_unlock_bh(&rx_tid->ab->base_lock); in ath12k_dp_rx_frag_timer()
2875 spin_unlock_bh(&rx_tid->ab->base_lock); in ath12k_dp_rx_frag_timer()
2880 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_peer_frag_setup()
2890 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_peer_frag_setup()
2894 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_frag_setup()
2897 return -ENOENT; in ath12k_dp_rx_peer_frag_setup()
2900 if (!peer->primary_link) { in ath12k_dp_rx_peer_frag_setup()
2901 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_frag_setup()
2907 rx_tid = &peer->rx_tid[i]; in ath12k_dp_rx_peer_frag_setup()
2908 rx_tid->ab = ab; in ath12k_dp_rx_peer_frag_setup()
2909 timer_setup(&rx_tid->frag_timer, ath12k_dp_rx_frag_timer, 0); in ath12k_dp_rx_peer_frag_setup()
2910 skb_queue_head_init(&rx_tid->rx_frags); in ath12k_dp_rx_peer_frag_setup()
2913 peer->tfm_mmic = tfm; in ath12k_dp_rx_peer_frag_setup()
2914 peer->dp_setup_done = true; in ath12k_dp_rx_peer_frag_setup()
2915 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_frag_setup()
2930 return -EINVAL; in ath12k_dp_rx_h_michael_mic()
2932 desc->tfm = tfm; in ath12k_dp_rx_h_michael_mic()
2945 if (ieee80211_is_data_qos(hdr->frame_control)) in ath12k_dp_rx_h_michael_mic()
2964 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_verify_tkip_mic()
2965 struct hal_rx_desc *rx_desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_h_verify_tkip_mic()
2972 u32 hdr_len, hal_rx_desc_sz = ar->ab->hal.hal_desc_sz; in ath12k_dp_rx_h_verify_tkip_mic()
2979 hdr = (struct ieee80211_hdr *)(msdu->data + hal_rx_desc_sz); in ath12k_dp_rx_h_verify_tkip_mic()
2980 hdr_len = ieee80211_hdrlen(hdr->frame_control); in ath12k_dp_rx_h_verify_tkip_mic()
2984 if (!is_multicast_ether_addr(hdr->addr1)) in ath12k_dp_rx_h_verify_tkip_mic()
2985 key_idx = peer->ucast_keyidx; in ath12k_dp_rx_h_verify_tkip_mic()
2987 key_idx = peer->mcast_keyidx; in ath12k_dp_rx_h_verify_tkip_mic()
2989 key_conf = peer->keys[key_idx]; in ath12k_dp_rx_h_verify_tkip_mic()
2991 data = msdu->data + head_len; in ath12k_dp_rx_h_verify_tkip_mic()
2992 data_len = msdu->len - head_len - tail_len; in ath12k_dp_rx_h_verify_tkip_mic()
2993 key = &key_conf->key[NL80211_TKIP_DATA_OFFSET_RX_MIC_KEY]; in ath12k_dp_rx_h_verify_tkip_mic()
2995 ret = ath12k_dp_rx_h_michael_mic(peer->tfm_mmic, key, hdr, data, data_len, mic); in ath12k_dp_rx_h_verify_tkip_mic()
3002 (ATH12K_SKB_RXCB(msdu))->is_first_msdu = true; in ath12k_dp_rx_h_verify_tkip_mic()
3003 (ATH12K_SKB_RXCB(msdu))->is_last_msdu = true; in ath12k_dp_rx_h_verify_tkip_mic()
3005 rxs->flag |= RX_FLAG_MMIC_ERROR | RX_FLAG_MMIC_STRIPPED | in ath12k_dp_rx_h_verify_tkip_mic()
3010 return -EINVAL; in ath12k_dp_rx_h_verify_tkip_mic()
3016 return -EINVAL; in ath12k_dp_rx_h_verify_tkip_mic()
3025 u32 hal_rx_desc_sz = ar->ab->hal.hal_desc_sz; in ath12k_dp_rx_h_undecap_frag()
3030 hdr = (struct ieee80211_hdr *)(msdu->data + hal_rx_desc_sz); in ath12k_dp_rx_h_undecap_frag()
3033 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_frag()
3037 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_frag()
3041 hdr_len = ieee80211_hdrlen(hdr->frame_control); in ath12k_dp_rx_h_undecap_frag()
3044 memmove(msdu->data + hal_rx_desc_sz + crypto_len, in ath12k_dp_rx_h_undecap_frag()
3045 msdu->data + hal_rx_desc_sz, hdr_len); in ath12k_dp_rx_h_undecap_frag()
3055 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_defrag()
3063 u32 flags, hal_rx_desc_sz = ar->ab->hal.hal_desc_sz; in ath12k_dp_rx_h_defrag()
3065 first_frag = skb_peek(&rx_tid->rx_frags); in ath12k_dp_rx_h_defrag()
3066 last_frag = skb_peek_tail(&rx_tid->rx_frags); in ath12k_dp_rx_h_defrag()
3068 skb_queue_walk(&rx_tid->rx_frags, skb) { in ath12k_dp_rx_h_defrag()
3070 rx_desc = (struct hal_rx_desc *)skb->data; in ath12k_dp_rx_h_defrag()
3071 hdr = (struct ieee80211_hdr *)(skb->data + hal_rx_desc_sz); in ath12k_dp_rx_h_defrag()
3088 skb_trim(skb, skb->len - FCS_LEN); in ath12k_dp_rx_h_defrag()
3093 ieee80211_hdrlen(hdr->frame_control)); in ath12k_dp_rx_h_defrag()
3094 msdu_len += skb->len; in ath12k_dp_rx_h_defrag()
3097 extra_space = msdu_len - (DP_RX_BUFFER_SIZE + skb_tailroom(first_frag)); in ath12k_dp_rx_h_defrag()
3100 return -ENOMEM; in ath12k_dp_rx_h_defrag()
3102 __skb_unlink(first_frag, &rx_tid->rx_frags); in ath12k_dp_rx_h_defrag()
3103 while ((skb = __skb_dequeue(&rx_tid->rx_frags))) { in ath12k_dp_rx_h_defrag()
3104 skb_put_data(first_frag, skb->data, skb->len); in ath12k_dp_rx_h_defrag()
3108 hdr = (struct ieee80211_hdr *)(first_frag->data + hal_rx_desc_sz); in ath12k_dp_rx_h_defrag()
3109 hdr->frame_control &= ~__cpu_to_le16(IEEE80211_FCTL_MOREFRAGS); in ath12k_dp_rx_h_defrag()
3110 ATH12K_SKB_RXCB(first_frag)->is_frag = 1; in ath12k_dp_rx_h_defrag()
3123 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_defrag_reo_reinject()
3124 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_h_defrag_reo_reinject()
3125 struct hal_rx_desc *rx_desc = (struct hal_rx_desc *)defrag_skb->data; in ath12k_dp_rx_h_defrag_reo_reinject()
3137 enum hal_rx_buf_return_buf_manager idle_link_rbm = dp->idle_link_rbm; in ath12k_dp_rx_h_defrag_reo_reinject()
3140 hal_rx_desc_sz = ab->hal.hal_desc_sz; in ath12k_dp_rx_h_defrag_reo_reinject()
3141 link_desc_banks = dp->link_desc_banks; in ath12k_dp_rx_h_defrag_reo_reinject()
3142 reo_dest_ring = rx_tid->dst_ring_desc; in ath12k_dp_rx_h_defrag_reo_reinject()
3144 ath12k_hal_rx_reo_ent_paddr_get(ab, &reo_dest_ring->buf_addr_info, in ath12k_dp_rx_h_defrag_reo_reinject()
3149 (link_paddr - link_desc_banks[desc_bank].paddr)); in ath12k_dp_rx_h_defrag_reo_reinject()
3150 msdu0 = &msdu_link->msdu_link[0]; in ath12k_dp_rx_h_defrag_reo_reinject()
3151 msdu_ext_info = le32_to_cpu(msdu0->rx_msdu_ext_info.info0); in ath12k_dp_rx_h_defrag_reo_reinject()
3159 u32_encode_bits(defrag_skb->len - hal_rx_desc_sz, in ath12k_dp_rx_h_defrag_reo_reinject()
3163 msdu0->rx_msdu_info.info0 = cpu_to_le32(msdu_info); in ath12k_dp_rx_h_defrag_reo_reinject()
3164 msdu0->rx_msdu_ext_info.info0 = cpu_to_le32(msdu_ext_info); in ath12k_dp_rx_h_defrag_reo_reinject()
3167 ath12k_dp_rxdesc_set_msdu_len(ab, rx_desc, defrag_skb->len - hal_rx_desc_sz); in ath12k_dp_rx_h_defrag_reo_reinject()
3169 buf_paddr = dma_map_single(ab->dev, defrag_skb->data, in ath12k_dp_rx_h_defrag_reo_reinject()
3170 defrag_skb->len + skb_tailroom(defrag_skb), in ath12k_dp_rx_h_defrag_reo_reinject()
3172 if (dma_mapping_error(ab->dev, buf_paddr)) in ath12k_dp_rx_h_defrag_reo_reinject()
3173 return -ENOMEM; in ath12k_dp_rx_h_defrag_reo_reinject()
3175 spin_lock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3176 desc_info = list_first_entry_or_null(&dp->rx_desc_free_list, in ath12k_dp_rx_h_defrag_reo_reinject()
3180 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3182 ret = -ENOMEM; in ath12k_dp_rx_h_defrag_reo_reinject()
3186 desc_info->skb = defrag_skb; in ath12k_dp_rx_h_defrag_reo_reinject()
3187 desc_info->in_use = true; in ath12k_dp_rx_h_defrag_reo_reinject()
3189 list_del(&desc_info->list); in ath12k_dp_rx_h_defrag_reo_reinject()
3190 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3192 ATH12K_SKB_RXCB(defrag_skb)->paddr = buf_paddr; in ath12k_dp_rx_h_defrag_reo_reinject()
3194 ath12k_hal_rx_buf_addr_info_set(&msdu0->buf_addr_info, buf_paddr, in ath12k_dp_rx_h_defrag_reo_reinject()
3195 desc_info->cookie, in ath12k_dp_rx_h_defrag_reo_reinject()
3199 srng = &ab->hal.srng_list[dp->reo_reinject_ring.ring_id]; in ath12k_dp_rx_h_defrag_reo_reinject()
3201 spin_lock_bh(&srng->lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3207 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3208 ret = -ENOSPC; in ath12k_dp_rx_h_defrag_reo_reinject()
3213 ath12k_hal_rx_buf_addr_info_set(&reo_ent_ring->buf_addr_info, link_paddr, in ath12k_dp_rx_h_defrag_reo_reinject()
3221 u32_encode_bits(rx_tid->tid, RX_MPDU_DESC_INFO0_TID); in ath12k_dp_rx_h_defrag_reo_reinject()
3223 reo_ent_ring->rx_mpdu_info.info0 = cpu_to_le32(mpdu_info); in ath12k_dp_rx_h_defrag_reo_reinject()
3224 reo_ent_ring->rx_mpdu_info.peer_meta_data = in ath12k_dp_rx_h_defrag_reo_reinject()
3225 reo_dest_ring->rx_mpdu_info.peer_meta_data; in ath12k_dp_rx_h_defrag_reo_reinject()
3227 reo_ent_ring->queue_addr_lo = cpu_to_le32(lower_32_bits(rx_tid->paddr)); in ath12k_dp_rx_h_defrag_reo_reinject()
3228 queue_addr_hi = upper_32_bits(rx_tid->paddr); in ath12k_dp_rx_h_defrag_reo_reinject()
3229 reo_ent_ring->info0 = le32_encode_bits(queue_addr_hi, in ath12k_dp_rx_h_defrag_reo_reinject()
3234 reo_ent_ring->info1 = le32_encode_bits(rx_tid->cur_sn, in ath12k_dp_rx_h_defrag_reo_reinject()
3236 dest_ring_info0 = le32_get_bits(reo_dest_ring->info0, in ath12k_dp_rx_h_defrag_reo_reinject()
3238 reo_ent_ring->info2 = in ath12k_dp_rx_h_defrag_reo_reinject()
3243 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3248 spin_lock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3249 desc_info->in_use = false; in ath12k_dp_rx_h_defrag_reo_reinject()
3250 desc_info->skb = NULL; in ath12k_dp_rx_h_defrag_reo_reinject()
3251 list_add_tail(&desc_info->list, &dp->rx_desc_free_list); in ath12k_dp_rx_h_defrag_reo_reinject()
3252 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3254 dma_unmap_single(ab->dev, buf_paddr, defrag_skb->len + skb_tailroom(defrag_skb), in ath12k_dp_rx_h_defrag_reo_reinject()
3267 return frag1 - frag2; in ath12k_dp_rx_h_cmp_frags()
3292 u32 hal_rx_desc_sz = ar->ab->hal.hal_desc_sz; in ath12k_dp_rx_h_get_pn()
3294 hdr = (struct ieee80211_hdr *)(skb->data + hal_rx_desc_sz); in ath12k_dp_rx_h_get_pn()
3295 ehdr = skb->data + hal_rx_desc_sz + ieee80211_hdrlen(hdr->frame_control); in ath12k_dp_rx_h_get_pn()
3310 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_defrag_validate_incr_pn()
3317 first_frag = skb_peek(&rx_tid->rx_frags); in ath12k_dp_rx_h_defrag_validate_incr_pn()
3318 desc = (struct hal_rx_desc *)first_frag->data; in ath12k_dp_rx_h_defrag_validate_incr_pn()
3328 skb_queue_walk(&rx_tid->rx_frags, skb) { in ath12k_dp_rx_h_defrag_validate_incr_pn()
3344 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_frag_h_mpdu()
3355 rx_desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_frag_h_mpdu()
3365 return -EINVAL; in ath12k_dp_rx_frag_h_mpdu()
3373 return -EINVAL; in ath12k_dp_rx_frag_h_mpdu()
3375 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_frag_h_mpdu()
3378 ath12k_warn(ab, "failed to find the peer to de-fragment received fragment peer_id %d\n", in ath12k_dp_rx_frag_h_mpdu()
3380 ret = -ENOENT; in ath12k_dp_rx_frag_h_mpdu()
3384 if (!peer->dp_setup_done) { in ath12k_dp_rx_frag_h_mpdu()
3386 peer->addr, peer_id); in ath12k_dp_rx_frag_h_mpdu()
3387 ret = -ENOENT; in ath12k_dp_rx_frag_h_mpdu()
3391 rx_tid = &peer->rx_tid[tid]; in ath12k_dp_rx_frag_h_mpdu()
3393 if ((!skb_queue_empty(&rx_tid->rx_frags) && seqno != rx_tid->cur_sn) || in ath12k_dp_rx_frag_h_mpdu()
3394 skb_queue_empty(&rx_tid->rx_frags)) { in ath12k_dp_rx_frag_h_mpdu()
3397 rx_tid->cur_sn = seqno; in ath12k_dp_rx_frag_h_mpdu()
3400 if (rx_tid->rx_frag_bitmap & BIT(frag_no)) { in ath12k_dp_rx_frag_h_mpdu()
3402 ret = -EINVAL; in ath12k_dp_rx_frag_h_mpdu()
3406 if ((!rx_tid->rx_frag_bitmap || frag_no > __fls(rx_tid->rx_frag_bitmap))) in ath12k_dp_rx_frag_h_mpdu()
3407 __skb_queue_tail(&rx_tid->rx_frags, msdu); in ath12k_dp_rx_frag_h_mpdu()
3409 ath12k_dp_rx_h_sort_frags(ab, &rx_tid->rx_frags, msdu); in ath12k_dp_rx_frag_h_mpdu()
3411 rx_tid->rx_frag_bitmap |= BIT(frag_no); in ath12k_dp_rx_frag_h_mpdu()
3413 rx_tid->last_frag_no = frag_no; in ath12k_dp_rx_frag_h_mpdu()
3416 rx_tid->dst_ring_desc = kmemdup(ring_desc, in ath12k_dp_rx_frag_h_mpdu()
3417 sizeof(*rx_tid->dst_ring_desc), in ath12k_dp_rx_frag_h_mpdu()
3419 if (!rx_tid->dst_ring_desc) { in ath12k_dp_rx_frag_h_mpdu()
3420 ret = -ENOMEM; in ath12k_dp_rx_frag_h_mpdu()
3428 if (!rx_tid->last_frag_no || in ath12k_dp_rx_frag_h_mpdu()
3429 rx_tid->rx_frag_bitmap != GENMASK(rx_tid->last_frag_no, 0)) { in ath12k_dp_rx_frag_h_mpdu()
3430 mod_timer(&rx_tid->frag_timer, jiffies + in ath12k_dp_rx_frag_h_mpdu()
3435 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_frag_h_mpdu()
3436 del_timer_sync(&rx_tid->frag_timer); in ath12k_dp_rx_frag_h_mpdu()
3437 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_frag_h_mpdu()
3462 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_frag_h_mpdu()
3471 struct ath12k_base *ab = ar->ab; in ath12k_dp_process_rx_err_buf()
3476 u32 hal_rx_desc_sz = ab->hal.hal_desc_sz; in ath12k_dp_process_rx_err_buf()
3480 desc_va = ((u64)le32_to_cpu(desc->buf_va_hi) << 32 | in ath12k_dp_process_rx_err_buf()
3481 le32_to_cpu(desc->buf_va_lo)); in ath12k_dp_process_rx_err_buf()
3490 return -EINVAL; in ath12k_dp_process_rx_err_buf()
3494 if (desc_info->magic != ATH12K_DP_RX_DESC_MAGIC) in ath12k_dp_process_rx_err_buf()
3497 msdu = desc_info->skb; in ath12k_dp_process_rx_err_buf()
3498 desc_info->skb = NULL; in ath12k_dp_process_rx_err_buf()
3500 list_add_tail(&desc_info->list, used_list); in ath12k_dp_process_rx_err_buf()
3503 dma_unmap_single(ar->ab->dev, rxcb->paddr, in ath12k_dp_process_rx_err_buf()
3504 msdu->len + skb_tailroom(msdu), in ath12k_dp_process_rx_err_buf()
3513 if (!rcu_dereference(ar->ab->pdevs_active[ar->pdev_idx])) { in ath12k_dp_process_rx_err_buf()
3518 if (test_bit(ATH12K_FLAG_CAC_RUNNING, &ar->dev_flags)) { in ath12k_dp_process_rx_err_buf()
3523 rx_desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_process_rx_err_buf()
3524 msdu_len = ath12k_dp_rx_h_msdu_len(ar->ab, rx_desc); in ath12k_dp_process_rx_err_buf()
3526 ath12k_warn(ar->ab, "invalid msdu leng %u", msdu_len); in ath12k_dp_process_rx_err_buf()
3527 ath12k_dbg_dump(ar->ab, ATH12K_DBG_DATA, NULL, "", rx_desc, in ath12k_dp_process_rx_err_buf()
3537 ath12k_dp_rx_link_desc_return(ar->ab, desc, in ath12k_dp_process_rx_err_buf()
3548 struct ath12k_hw_group *ag = ab->ag; in ath12k_dp_rx_process_err()
3559 struct ath12k_hw_link *hw_links = ag->hw_links; in ath12k_dp_rx_process_err()
3576 reo_except = &ab->dp.reo_except_ring; in ath12k_dp_rx_process_err()
3578 srng = &ab->hal.srng_list[reo_except->ring_id]; in ath12k_dp_rx_process_err()
3580 spin_lock_bh(&srng->lock); in ath12k_dp_rx_process_err()
3587 ab->soc_stats.err_ring_pkts++; in ath12k_dp_rx_process_err()
3597 hw_link_id = le32_get_bits(reo_desc->info0, in ath12k_dp_rx_process_err()
3602 pdev_id = ath12k_hw_mac_id_to_pdev_id(partner_ab->hw_params, in ath12k_dp_rx_process_err()
3604 ar = partner_ab->pdevs[pdev_id].ar; in ath12k_dp_rx_process_err()
3606 link_desc_banks = partner_ab->dp.link_desc_banks; in ath12k_dp_rx_process_err()
3608 (paddr - link_desc_banks[desc_bank].paddr); in ath12k_dp_rx_process_err()
3611 if (rbm != partner_ab->dp.idle_link_rbm && in ath12k_dp_rx_process_err()
3613 rbm != partner_ab->hw_params->hal_params->rx_buf_rbm) { in ath12k_dp_rx_process_err()
3614 ab->soc_stats.invalid_rbm++; in ath12k_dp_rx_process_err()
3621 is_frag = !!(le32_to_cpu(reo_desc->rx_mpdu_info.info0) & in ath12k_dp_rx_process_err()
3626 * Dynamic fragmentation not supported in Multi-link client, so drop the in ath12k_dp_rx_process_err()
3630 partner_ab->device_id != ab->device_id) { in ath12k_dp_rx_process_err()
3653 budget = quota - tot_n_bufs_reaped; in ath12k_dp_rx_process_err()
3659 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_process_err()
3666 rx_ring = &partner_ab->dp.rx_refill_buf_ring; in ath12k_dp_rx_process_err()
3685 (DP_RX_BUFFER_SIZE - ar->ab->hal.hal_desc_sz)); in ath12k_dp_rx_null_q_desc_sg_drop()
3689 if (rxcb->err_rel_src == HAL_WBM_REL_SRC_MODULE_REO && in ath12k_dp_rx_null_q_desc_sg_drop()
3690 rxcb->err_code == HAL_REO_DEST_RING_ERROR_CODE_DESC_ADDR_ZERO) { in ath12k_dp_rx_null_q_desc_sg_drop()
3695 n_buffs--; in ath12k_dp_rx_null_q_desc_sg_drop()
3704 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_null_q_desc()
3706 struct hal_rx_desc *desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_h_null_q_desc()
3709 u32 hal_rx_desc_sz = ar->ab->hal.hal_desc_sz; in ath12k_dp_rx_h_null_q_desc()
3713 if (!rxcb->is_frag && ((msdu_len + hal_rx_desc_sz) > DP_RX_BUFFER_SIZE)) { in ath12k_dp_rx_h_null_q_desc()
3715 msdu_len = msdu_len - (DP_RX_BUFFER_SIZE - hal_rx_desc_sz); in ath12k_dp_rx_h_null_q_desc()
3717 return -EINVAL; in ath12k_dp_rx_h_null_q_desc()
3724 if (rxcb->is_continuation) in ath12k_dp_rx_h_null_q_desc()
3725 return -EINVAL; in ath12k_dp_rx_h_null_q_desc()
3728 ath12k_warn(ar->ab, in ath12k_dp_rx_h_null_q_desc()
3731 return -EIO; in ath12k_dp_rx_h_null_q_desc()
3739 * non-QOS TID queue, in the absence of any other default TID queue. in ath12k_dp_rx_h_null_q_desc()
3743 if (rxcb->is_frag) { in ath12k_dp_rx_h_null_q_desc()
3749 return -EINVAL; in ath12k_dp_rx_h_null_q_desc()
3755 return -EINVAL; in ath12k_dp_rx_h_null_q_desc()
3761 rxcb->tid = ath12k_dp_rx_h_tid(ab, desc); in ath12k_dp_rx_h_null_q_desc()
3777 ar->ab->soc_stats.reo_error[rxcb->err_code]++; in ath12k_dp_rx_h_reo_err()
3779 switch (rxcb->err_code) { in ath12k_dp_rx_h_reo_err()
3804 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_tkip_mic_err()
3806 struct hal_rx_desc *desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_h_tkip_mic_err()
3809 u32 hal_rx_desc_sz = ar->ab->hal.hal_desc_sz; in ath12k_dp_rx_h_tkip_mic_err()
3811 rxcb->is_first_msdu = ath12k_dp_rx_h_first_msdu(ab, desc); in ath12k_dp_rx_h_tkip_mic_err()
3812 rxcb->is_last_msdu = ath12k_dp_rx_h_last_msdu(ab, desc); in ath12k_dp_rx_h_tkip_mic_err()
3824 status->flag |= (RX_FLAG_MMIC_STRIPPED | RX_FLAG_MMIC_ERROR | in ath12k_dp_rx_h_tkip_mic_err()
3835 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_rxdma_err()
3837 struct hal_rx_desc *rx_desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_h_rxdma_err()
3841 ar->ab->soc_stats.rxdma_error[rxcb->err_code]++; in ath12k_dp_rx_h_rxdma_err()
3843 switch (rxcb->err_code) { in ath12k_dp_rx_h_rxdma_err()
3872 switch (rxcb->err_rel_src) { in ath12k_dp_rx_wbm_err()
3896 struct ath12k_hw_group *ag = ab->ag; in ath12k_dp_rx_process_wbm_err()
3898 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_process_wbm_err()
3909 struct ath12k_hw_link *hw_links = ag->hw_links; in ath12k_dp_rx_process_wbm_err()
3921 srng = &ab->hal.srng_list[dp->rx_rel_ring.ring_id]; in ath12k_dp_rx_process_wbm_err()
3922 spin_lock_bh(&srng->lock); in ath12k_dp_rx_process_wbm_err()
3951 if (desc_info->magic != ATH12K_DP_RX_DESC_MAGIC) in ath12k_dp_rx_process_wbm_err()
3954 msdu = desc_info->skb; in ath12k_dp_rx_process_wbm_err()
3955 desc_info->skb = NULL; in ath12k_dp_rx_process_wbm_err()
3957 device_id = desc_info->device_id; in ath12k_dp_rx_process_wbm_err()
3969 list_add_tail(&desc_info->list, &rx_desc_used_list[device_id]); in ath12k_dp_rx_process_wbm_err()
3972 dma_unmap_single(partner_ab->dev, rxcb->paddr, in ath12k_dp_rx_process_wbm_err()
3973 msdu->len + skb_tailroom(msdu), in ath12k_dp_rx_process_wbm_err()
3980 budget--; in ath12k_dp_rx_process_wbm_err()
3988 msdu_data = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_process_wbm_err()
3989 rxcb->err_rel_src = err_info.err_rel_src; in ath12k_dp_rx_process_wbm_err()
3990 rxcb->err_code = err_info.err_code; in ath12k_dp_rx_process_wbm_err()
3991 rxcb->is_first_msdu = err_info.first_msdu; in ath12k_dp_rx_process_wbm_err()
3992 rxcb->is_last_msdu = err_info.last_msdu; in ath12k_dp_rx_process_wbm_err()
3993 rxcb->is_continuation = err_info.continuation; in ath12k_dp_rx_process_wbm_err()
3994 rxcb->rx_desc = msdu_data; in ath12k_dp_rx_process_wbm_err()
4018 rxcb->hw_link_id = hw_link_id; in ath12k_dp_rx_process_wbm_err()
4026 rxcb->hw_link_id = hw_link_id; in ath12k_dp_rx_process_wbm_err()
4037 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_process_wbm_err()
4047 rx_ring = &partner_ab->dp.rx_refill_buf_ring; in ath12k_dp_rx_process_wbm_err()
4057 hw_link_id = rxcb->hw_link_id; in ath12k_dp_rx_process_wbm_err()
4069 pdev_id = ath12k_hw_mac_id_to_pdev_id(partner_ab->hw_params, in ath12k_dp_rx_process_wbm_err()
4071 ar = partner_ab->pdevs[pdev_id].ar; in ath12k_dp_rx_process_wbm_err()
4073 if (!ar || !rcu_dereference(ar->ab->pdevs_active[pdev_id])) { in ath12k_dp_rx_process_wbm_err()
4078 if (test_bit(ATH12K_FLAG_CAC_RUNNING, &ar->dev_flags)) { in ath12k_dp_rx_process_wbm_err()
4091 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_process_reo_status()
4099 srng = &ab->hal.srng_list[dp->reo_status_ring.ring_id]; in ath12k_dp_rx_process_reo_status()
4103 spin_lock_bh(&srng->lock); in ath12k_dp_rx_process_reo_status()
4108 tag = le64_get_bits(hdr->tl, HAL_SRNG_TLV_HDR_TAG); in ath12k_dp_rx_process_reo_status()
4144 spin_lock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_process_reo_status()
4145 list_for_each_entry_safe(cmd, tmp, &dp->reo_cmd_list, list) { in ath12k_dp_rx_process_reo_status()
4146 if (reo_status.uniform_hdr.cmd_num == cmd->cmd_num) { in ath12k_dp_rx_process_reo_status()
4148 list_del(&cmd->list); in ath12k_dp_rx_process_reo_status()
4152 spin_unlock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_process_reo_status()
4155 cmd->handler(dp, (void *)&cmd->data, in ath12k_dp_rx_process_reo_status()
4165 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_process_reo_status()
4170 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_free()
4173 ath12k_dp_srng_cleanup(ab, &dp->rx_refill_buf_ring.refill_buf_ring); in ath12k_dp_rx_free()
4175 for (i = 0; i < ab->hw_params->num_rxdma_per_pdev; i++) { in ath12k_dp_rx_free()
4176 if (ab->hw_params->rx_mac_buf_ring) in ath12k_dp_rx_free()
4177 ath12k_dp_srng_cleanup(ab, &dp->rx_mac_buf_ring[i]); in ath12k_dp_rx_free()
4180 for (i = 0; i < ab->hw_params->num_rxdma_dst_ring; i++) in ath12k_dp_rx_free()
4181 ath12k_dp_srng_cleanup(ab, &dp->rxdma_err_dst_ring[i]); in ath12k_dp_rx_free()
4183 ath12k_dp_srng_cleanup(ab, &dp->rxdma_mon_buf_ring.refill_buf_ring); in ath12k_dp_rx_free()
4190 struct ath12k *ar = ab->pdevs[mac_id].ar; in ath12k_dp_rx_pdev_free()
4197 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rxdma_ring_sel_config_qcn9274()
4201 u32 hal_rx_desc_sz = ab->hal.hal_desc_sz; in ath12k_dp_rxdma_ring_sel_config_qcn9274()
4203 ring_id = dp->rx_refill_buf_ring.refill_buf_ring.ring_id; in ath12k_dp_rxdma_ring_sel_config_qcn9274()
4214 ab->hal_rx_ops->rx_desc_get_mpdu_start_offset(); in ath12k_dp_rxdma_ring_sel_config_qcn9274()
4216 ab->hal_rx_ops->rx_desc_get_msdu_end_offset(); in ath12k_dp_rxdma_ring_sel_config_qcn9274()
4220 ab->hw_params->hal_ops->rxdma_ring_wmask_rx_mpdu_start(); in ath12k_dp_rxdma_ring_sel_config_qcn9274()
4222 ab->hw_params->hal_ops->rxdma_ring_wmask_rx_msdu_end(); in ath12k_dp_rxdma_ring_sel_config_qcn9274()
4238 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4242 u32 hal_rx_desc_sz = ab->hal.hal_desc_sz; in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4245 ring_id = dp->rx_refill_buf_ring.refill_buf_ring.ring_id; in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4258 ab->hal_rx_ops->rx_desc_get_mpdu_start_offset(); in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4260 ab->hal_rx_ops->rx_desc_get_msdu_end_offset(); in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4267 for (i = 0; i < ab->hw_params->num_rxdma_per_pdev; i++) { in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4268 ring_id = dp->rx_mac_buf_ring[i].ring_id; in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4280 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_htt_setup()
4285 ring_id = dp->rx_refill_buf_ring.refill_buf_ring.ring_id; in ath12k_dp_rx_htt_setup()
4293 if (ab->hw_params->rx_mac_buf_ring) { in ath12k_dp_rx_htt_setup()
4294 for (i = 0; i < ab->hw_params->num_rxdma_per_pdev; i++) { in ath12k_dp_rx_htt_setup()
4295 ring_id = dp->rx_mac_buf_ring[i].ring_id; in ath12k_dp_rx_htt_setup()
4306 for (i = 0; i < ab->hw_params->num_rxdma_dst_ring; i++) { in ath12k_dp_rx_htt_setup()
4307 ring_id = dp->rxdma_err_dst_ring[i].ring_id; in ath12k_dp_rx_htt_setup()
4317 if (ab->hw_params->rxdma1_enable) { in ath12k_dp_rx_htt_setup()
4318 ring_id = dp->rxdma_mon_buf_ring.refill_buf_ring.ring_id; in ath12k_dp_rx_htt_setup()
4328 ret = ab->hw_params->hw_ops->rxdma_ring_sel_config(ab); in ath12k_dp_rx_htt_setup()
4339 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_alloc()
4342 idr_init(&dp->rxdma_mon_buf_ring.bufs_idr); in ath12k_dp_rx_alloc()
4343 spin_lock_init(&dp->rxdma_mon_buf_ring.idr_lock); in ath12k_dp_rx_alloc()
4346 &dp->rx_refill_buf_ring.refill_buf_ring, in ath12k_dp_rx_alloc()
4354 if (ab->hw_params->rx_mac_buf_ring) { in ath12k_dp_rx_alloc()
4355 for (i = 0; i < ab->hw_params->num_rxdma_per_pdev; i++) { in ath12k_dp_rx_alloc()
4357 &dp->rx_mac_buf_ring[i], in ath12k_dp_rx_alloc()
4368 for (i = 0; i < ab->hw_params->num_rxdma_dst_ring; i++) { in ath12k_dp_rx_alloc()
4369 ret = ath12k_dp_srng_setup(ab, &dp->rxdma_err_dst_ring[i], in ath12k_dp_rx_alloc()
4378 if (ab->hw_params->rxdma1_enable) { in ath12k_dp_rx_alloc()
4380 &dp->rxdma_mon_buf_ring.refill_buf_ring, in ath12k_dp_rx_alloc()
4400 struct ath12k *ar = ab->pdevs[mac_id].ar; in ath12k_dp_rx_pdev_alloc()
4401 struct ath12k_pdev_dp *dp = &ar->dp; in ath12k_dp_rx_pdev_alloc()
4406 if (!ab->hw_params->rxdma1_enable) in ath12k_dp_rx_pdev_alloc()
4415 for (i = 0; i < ab->hw_params->num_rxdma_per_pdev; i++) { in ath12k_dp_rx_pdev_alloc()
4416 ring_id = dp->rxdma_mon_dst_ring[i].ring_id; in ath12k_dp_rx_pdev_alloc()
4433 struct ath12k_pdev_dp *dp = &ar->dp; in ath12k_dp_rx_pdev_mon_status_attach()
4434 struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&dp->mon_data; in ath12k_dp_rx_pdev_mon_status_attach()
4436 skb_queue_head_init(&pmon->rx_status_q); in ath12k_dp_rx_pdev_mon_status_attach()
4438 pmon->mon_ppdu_status = DP_PPDU_STATUS_START; in ath12k_dp_rx_pdev_mon_status_attach()
4440 memset(&pmon->rx_mon_stats, 0, in ath12k_dp_rx_pdev_mon_status_attach()
4441 sizeof(pmon->rx_mon_stats)); in ath12k_dp_rx_pdev_mon_status_attach()
4447 struct ath12k_pdev_dp *dp = &ar->dp; in ath12k_dp_rx_pdev_mon_attach()
4448 struct ath12k_mon_data *pmon = &dp->mon_data; in ath12k_dp_rx_pdev_mon_attach()
4453 ath12k_warn(ar->ab, "pdev_mon_status_attach() failed"); in ath12k_dp_rx_pdev_mon_attach()
4460 if (!ar->ab->hw_params->rxdma1_enable) in ath12k_dp_rx_pdev_mon_attach()
4463 pmon->mon_last_linkdesc_paddr = 0; in ath12k_dp_rx_pdev_mon_attach()
4464 pmon->mon_last_buf_cookie = DP_RX_DESC_COOKIE_MAX + 1; in ath12k_dp_rx_pdev_mon_attach()
4465 spin_lock_init(&pmon->mon_lock); in ath12k_dp_rx_pdev_mon_attach()