Lines Matching full:lp
133 static struct skbuf_dma_descriptor *axienet_get_rx_desc(struct axienet_local *lp, int i) in axienet_get_rx_desc() argument
135 return lp->rx_skb_ring[i & (RX_BUF_NUM_DEFAULT - 1)]; in axienet_get_rx_desc()
138 static struct skbuf_dma_descriptor *axienet_get_tx_desc(struct axienet_local *lp, int i) in axienet_get_tx_desc() argument
140 return lp->tx_skb_ring[i & (TX_BD_NUM_MAX - 1)]; in axienet_get_tx_desc()
145 * @lp: Pointer to axienet local structure
152 static inline u32 axienet_dma_in32(struct axienet_local *lp, off_t reg) in axienet_dma_in32() argument
154 return ioread32(lp->dma_regs + reg); in axienet_dma_in32()
157 static void desc_set_phys_addr(struct axienet_local *lp, dma_addr_t addr, in desc_set_phys_addr() argument
161 if (lp->features & XAE_FEATURE_DMA_64BIT) in desc_set_phys_addr()
165 static dma_addr_t desc_get_phys_addr(struct axienet_local *lp, in desc_get_phys_addr() argument
170 if (lp->features & XAE_FEATURE_DMA_64BIT) in desc_get_phys_addr()
187 struct axienet_local *lp = netdev_priv(ndev); in axienet_dma_bd_release() local
190 dma_free_coherent(lp->dev, in axienet_dma_bd_release()
191 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in axienet_dma_bd_release()
192 lp->tx_bd_v, in axienet_dma_bd_release()
193 lp->tx_bd_p); in axienet_dma_bd_release()
195 if (!lp->rx_bd_v) in axienet_dma_bd_release()
198 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_release()
204 if (!lp->rx_bd_v[i].skb) in axienet_dma_bd_release()
207 dev_kfree_skb(lp->rx_bd_v[i].skb); in axienet_dma_bd_release()
213 if (lp->rx_bd_v[i].cntrl) { in axienet_dma_bd_release()
214 phys = desc_get_phys_addr(lp, &lp->rx_bd_v[i]); in axienet_dma_bd_release()
215 dma_unmap_single(lp->dev, phys, in axienet_dma_bd_release()
216 lp->max_frm_size, DMA_FROM_DEVICE); in axienet_dma_bd_release()
220 dma_free_coherent(lp->dev, in axienet_dma_bd_release()
221 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_release()
222 lp->rx_bd_v, in axienet_dma_bd_release()
223 lp->rx_bd_p); in axienet_dma_bd_release()
228 * @lp: Pointer to the axienet_local structure
231 static u32 axienet_usec_to_timer(struct axienet_local *lp, u32 coalesce_usec) in axienet_usec_to_timer() argument
236 if (lp->axi_clk) in axienet_usec_to_timer()
237 clk_rate = clk_get_rate(lp->axi_clk); in axienet_usec_to_timer()
247 * @lp: Pointer to the axienet_local structure
249 static void axienet_dma_start(struct axienet_local *lp) in axienet_dma_start() argument
252 lp->rx_dma_cr = (lp->coalesce_count_rx << XAXIDMA_COALESCE_SHIFT) | in axienet_dma_start()
257 if (lp->coalesce_count_rx > 1) in axienet_dma_start()
258 lp->rx_dma_cr |= (axienet_usec_to_timer(lp, lp->coalesce_usec_rx) in axienet_dma_start()
261 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_dma_start()
264 lp->tx_dma_cr = (lp->coalesce_count_tx << XAXIDMA_COALESCE_SHIFT) | in axienet_dma_start()
269 if (lp->coalesce_count_tx > 1) in axienet_dma_start()
270 lp->tx_dma_cr |= (axienet_usec_to_timer(lp, lp->coalesce_usec_tx) in axienet_dma_start()
273 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_dma_start()
278 axienet_dma_out_addr(lp, XAXIDMA_RX_CDESC_OFFSET, lp->rx_bd_p); in axienet_dma_start()
279 lp->rx_dma_cr |= XAXIDMA_CR_RUNSTOP_MASK; in axienet_dma_start()
280 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_dma_start()
281 axienet_dma_out_addr(lp, XAXIDMA_RX_TDESC_OFFSET, lp->rx_bd_p + in axienet_dma_start()
282 (sizeof(*lp->rx_bd_v) * (lp->rx_bd_num - 1))); in axienet_dma_start()
288 axienet_dma_out_addr(lp, XAXIDMA_TX_CDESC_OFFSET, lp->tx_bd_p); in axienet_dma_start()
289 lp->tx_dma_cr |= XAXIDMA_CR_RUNSTOP_MASK; in axienet_dma_start()
290 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_dma_start()
307 struct axienet_local *lp = netdev_priv(ndev); in axienet_dma_bd_init() local
310 lp->tx_bd_ci = 0; in axienet_dma_bd_init()
311 lp->tx_bd_tail = 0; in axienet_dma_bd_init()
312 lp->rx_bd_ci = 0; in axienet_dma_bd_init()
315 lp->tx_bd_v = dma_alloc_coherent(lp->dev, in axienet_dma_bd_init()
316 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in axienet_dma_bd_init()
317 &lp->tx_bd_p, GFP_KERNEL); in axienet_dma_bd_init()
318 if (!lp->tx_bd_v) in axienet_dma_bd_init()
321 lp->rx_bd_v = dma_alloc_coherent(lp->dev, in axienet_dma_bd_init()
322 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_init()
323 &lp->rx_bd_p, GFP_KERNEL); in axienet_dma_bd_init()
324 if (!lp->rx_bd_v) in axienet_dma_bd_init()
327 for (i = 0; i < lp->tx_bd_num; i++) { in axienet_dma_bd_init()
328 dma_addr_t addr = lp->tx_bd_p + in axienet_dma_bd_init()
329 sizeof(*lp->tx_bd_v) * in axienet_dma_bd_init()
330 ((i + 1) % lp->tx_bd_num); in axienet_dma_bd_init()
332 lp->tx_bd_v[i].next = lower_32_bits(addr); in axienet_dma_bd_init()
333 if (lp->features & XAE_FEATURE_DMA_64BIT) in axienet_dma_bd_init()
334 lp->tx_bd_v[i].next_msb = upper_32_bits(addr); in axienet_dma_bd_init()
337 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_init()
340 addr = lp->rx_bd_p + sizeof(*lp->rx_bd_v) * in axienet_dma_bd_init()
341 ((i + 1) % lp->rx_bd_num); in axienet_dma_bd_init()
342 lp->rx_bd_v[i].next = lower_32_bits(addr); in axienet_dma_bd_init()
343 if (lp->features & XAE_FEATURE_DMA_64BIT) in axienet_dma_bd_init()
344 lp->rx_bd_v[i].next_msb = upper_32_bits(addr); in axienet_dma_bd_init()
346 skb = netdev_alloc_skb_ip_align(ndev, lp->max_frm_size); in axienet_dma_bd_init()
350 lp->rx_bd_v[i].skb = skb; in axienet_dma_bd_init()
351 addr = dma_map_single(lp->dev, skb->data, in axienet_dma_bd_init()
352 lp->max_frm_size, DMA_FROM_DEVICE); in axienet_dma_bd_init()
353 if (dma_mapping_error(lp->dev, addr)) { in axienet_dma_bd_init()
357 desc_set_phys_addr(lp, addr, &lp->rx_bd_v[i]); in axienet_dma_bd_init()
359 lp->rx_bd_v[i].cntrl = lp->max_frm_size; in axienet_dma_bd_init()
362 axienet_dma_start(lp); in axienet_dma_bd_init()
381 struct axienet_local *lp = netdev_priv(ndev); in axienet_set_mac_address() local
389 axienet_iow(lp, XAE_UAW0_OFFSET, in axienet_set_mac_address()
394 axienet_iow(lp, XAE_UAW1_OFFSET, in axienet_set_mac_address()
395 (((axienet_ior(lp, XAE_UAW1_OFFSET)) & in axienet_set_mac_address()
435 struct axienet_local *lp = netdev_priv(ndev); in axienet_set_multicast_list() local
437 reg = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_set_multicast_list()
443 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
448 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
449 axienet_iow(lp, XAE_AF0_OFFSET, 1); /* Multicast bit */ in axienet_set_multicast_list()
450 axienet_iow(lp, XAE_AF1_OFFSET, 0); in axienet_set_multicast_list()
451 axienet_iow(lp, XAE_AM0_OFFSET, 1); /* ditto */ in axienet_set_multicast_list()
452 axienet_iow(lp, XAE_AM1_OFFSET, 0); in axienet_set_multicast_list()
453 axienet_iow(lp, XAE_FFE_OFFSET, 1); in axienet_set_multicast_list()
473 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
474 axienet_iow(lp, XAE_AF0_OFFSET, af0reg); in axienet_set_multicast_list()
475 axienet_iow(lp, XAE_AF1_OFFSET, af1reg); in axienet_set_multicast_list()
476 axienet_iow(lp, XAE_AM0_OFFSET, 0xffffffff); in axienet_set_multicast_list()
477 axienet_iow(lp, XAE_AM1_OFFSET, 0x0000ffff); in axienet_set_multicast_list()
478 axienet_iow(lp, XAE_FFE_OFFSET, 1); in axienet_set_multicast_list()
486 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
487 axienet_iow(lp, XAE_FFE_OFFSET, 0); in axienet_set_multicast_list()
505 struct axienet_local *lp = netdev_priv(ndev); in axienet_setoptions() local
509 reg = ((axienet_ior(lp, tp->reg)) & ~(tp->m_or)); in axienet_setoptions()
512 axienet_iow(lp, tp->reg, reg); in axienet_setoptions()
516 lp->options |= options; in axienet_setoptions()
519 static u64 axienet_stat(struct axienet_local *lp, enum temac_stat stat) in axienet_stat() argument
523 if (lp->reset_in_progress) in axienet_stat()
524 return lp->hw_stat_base[stat]; in axienet_stat()
526 counter = axienet_ior(lp, XAE_STATS_OFFSET + stat * 8); in axienet_stat()
527 return lp->hw_stat_base[stat] + (counter - lp->hw_last_counter[stat]); in axienet_stat()
530 static void axienet_stats_update(struct axienet_local *lp, bool reset) in axienet_stats_update() argument
534 write_seqcount_begin(&lp->hw_stats_seqcount); in axienet_stats_update()
535 lp->reset_in_progress = reset; in axienet_stats_update()
537 u32 counter = axienet_ior(lp, XAE_STATS_OFFSET + stat * 8); in axienet_stats_update()
539 lp->hw_stat_base[stat] += counter - lp->hw_last_counter[stat]; in axienet_stats_update()
540 lp->hw_last_counter[stat] = counter; in axienet_stats_update()
542 write_seqcount_end(&lp->hw_stats_seqcount); in axienet_stats_update()
547 struct axienet_local *lp = container_of(work, struct axienet_local, in axienet_refresh_stats() local
550 mutex_lock(&lp->stats_lock); in axienet_refresh_stats()
551 axienet_stats_update(lp, false); in axienet_refresh_stats()
552 mutex_unlock(&lp->stats_lock); in axienet_refresh_stats()
555 schedule_delayed_work(&lp->stats_work, 13 * HZ); in axienet_refresh_stats()
558 static int __axienet_device_reset(struct axienet_local *lp) in __axienet_device_reset() argument
564 mutex_lock(&lp->stats_lock); in __axienet_device_reset()
565 if (lp->features & XAE_FEATURE_STATS) in __axienet_device_reset()
566 axienet_stats_update(lp, true); in __axienet_device_reset()
575 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, XAXIDMA_CR_RESET_MASK); in __axienet_device_reset()
578 DELAY_OF_ONE_MILLISEC, 50000, false, lp, in __axienet_device_reset()
581 dev_err(lp->dev, "%s: DMA reset timeout!\n", __func__); in __axienet_device_reset()
588 DELAY_OF_ONE_MILLISEC, 50000, false, lp, in __axienet_device_reset()
591 dev_err(lp->dev, "%s: timeout waiting for PhyRstCmplt\n", __func__); in __axienet_device_reset()
596 if (lp->features & XAE_FEATURE_STATS) { in __axienet_device_reset()
599 write_seqcount_begin(&lp->hw_stats_seqcount); in __axienet_device_reset()
600 lp->reset_in_progress = false; in __axienet_device_reset()
603 axienet_ior(lp, XAE_STATS_OFFSET + stat * 8); in __axienet_device_reset()
605 lp->hw_stat_base[stat] += in __axienet_device_reset()
606 lp->hw_last_counter[stat] - counter; in __axienet_device_reset()
607 lp->hw_last_counter[stat] = counter; in __axienet_device_reset()
609 write_seqcount_end(&lp->hw_stats_seqcount); in __axienet_device_reset()
613 mutex_unlock(&lp->stats_lock); in __axienet_device_reset()
619 * @lp: Pointer to the axienet_local structure
621 static void axienet_dma_stop(struct axienet_local *lp) in axienet_dma_stop() argument
626 cr = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_dma_stop()
628 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_dma_stop()
629 synchronize_irq(lp->rx_irq); in axienet_dma_stop()
631 cr = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_dma_stop()
633 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_dma_stop()
634 synchronize_irq(lp->tx_irq); in axienet_dma_stop()
637 sr = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_dma_stop()
640 sr = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_dma_stop()
643 sr = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_dma_stop()
646 sr = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_dma_stop()
650 axienet_lock_mii(lp); in axienet_dma_stop()
651 __axienet_device_reset(lp); in axienet_dma_stop()
652 axienet_unlock_mii(lp); in axienet_dma_stop()
670 struct axienet_local *lp = netdev_priv(ndev); in axienet_device_reset() local
673 lp->max_frm_size = XAE_MAX_VLAN_FRAME_SIZE; in axienet_device_reset()
674 lp->options |= XAE_OPTION_VLAN; in axienet_device_reset()
675 lp->options &= (~XAE_OPTION_JUMBO); in axienet_device_reset()
678 lp->max_frm_size = ndev->mtu + VLAN_ETH_HLEN + in axienet_device_reset()
681 if (lp->max_frm_size <= lp->rxmem) in axienet_device_reset()
682 lp->options |= XAE_OPTION_JUMBO; in axienet_device_reset()
685 if (!lp->use_dmaengine) { in axienet_device_reset()
686 ret = __axienet_device_reset(lp); in axienet_device_reset()
698 axienet_status = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_device_reset()
700 axienet_iow(lp, XAE_RCW1_OFFSET, axienet_status); in axienet_device_reset()
702 axienet_status = axienet_ior(lp, XAE_IP_OFFSET); in axienet_device_reset()
704 axienet_iow(lp, XAE_IS_OFFSET, XAE_INT_RXRJECT_MASK); in axienet_device_reset()
705 axienet_iow(lp, XAE_IE_OFFSET, lp->eth_irq > 0 ? in axienet_device_reset()
708 axienet_iow(lp, XAE_FCC_OFFSET, XAE_FCC_FCRX_MASK); in axienet_device_reset()
713 axienet_setoptions(ndev, lp->options & in axienet_device_reset()
717 axienet_setoptions(ndev, lp->options); in axienet_device_reset()
726 * @lp: Pointer to the axienet_local structure
738 static int axienet_free_tx_chain(struct axienet_local *lp, u32 first_bd, in axienet_free_tx_chain() argument
747 cur_p = &lp->tx_bd_v[(first_bd + i) % lp->tx_bd_num]; in axienet_free_tx_chain()
758 phys = desc_get_phys_addr(lp, cur_p); in axienet_free_tx_chain()
759 dma_unmap_single(lp->dev, phys, in axienet_free_tx_chain()
783 lp->tx_bd_ci += i; in axienet_free_tx_chain()
784 if (lp->tx_bd_ci >= lp->tx_bd_num) in axienet_free_tx_chain()
785 lp->tx_bd_ci %= lp->tx_bd_num; in axienet_free_tx_chain()
793 * @lp: Pointer to the axienet_local structure
804 static inline int axienet_check_tx_bd_space(struct axienet_local *lp, in axienet_check_tx_bd_space() argument
811 cur_p = &lp->tx_bd_v[(READ_ONCE(lp->tx_bd_tail) + num_frag) % in axienet_check_tx_bd_space()
812 lp->tx_bd_num]; in axienet_check_tx_bd_space()
828 struct axienet_local *lp = data; in axienet_dma_tx_cb() local
832 skbuf_dma = axienet_get_tx_desc(lp, lp->tx_ring_tail++); in axienet_dma_tx_cb()
834 txq = skb_get_tx_queue(lp->ndev, skbuf_dma->skb); in axienet_dma_tx_cb()
835 u64_stats_update_begin(&lp->tx_stat_sync); in axienet_dma_tx_cb()
836 u64_stats_add(&lp->tx_bytes, len); in axienet_dma_tx_cb()
837 u64_stats_add(&lp->tx_packets, 1); in axienet_dma_tx_cb()
838 u64_stats_update_end(&lp->tx_stat_sync); in axienet_dma_tx_cb()
839 dma_unmap_sg(lp->dev, skbuf_dma->sgl, skbuf_dma->sg_len, DMA_TO_DEVICE); in axienet_dma_tx_cb()
842 CIRC_SPACE(lp->tx_ring_head, lp->tx_ring_tail, TX_BD_NUM_MAX), in axienet_dma_tx_cb()
865 struct axienet_local *lp = netdev_priv(ndev); in axienet_start_xmit_dmaengine() local
875 dma_dev = lp->tx_chan->device; in axienet_start_xmit_dmaengine()
877 if (CIRC_SPACE(lp->tx_ring_head, lp->tx_ring_tail, TX_BD_NUM_MAX) <= sg_len) { in axienet_start_xmit_dmaengine()
884 skbuf_dma = axienet_get_tx_desc(lp, lp->tx_ring_head); in axienet_start_xmit_dmaengine()
888 lp->tx_ring_head++; in axienet_start_xmit_dmaengine()
894 ret = dma_map_sg(lp->dev, skbuf_dma->sgl, sg_len, DMA_TO_DEVICE); in axienet_start_xmit_dmaengine()
900 if (lp->features & XAE_FEATURE_FULL_TX_CSUM) { in axienet_start_xmit_dmaengine()
903 } else if (lp->features & XAE_FEATURE_PARTIAL_TX_CSUM) { in axienet_start_xmit_dmaengine()
914 dma_tx_desc = dma_dev->device_prep_slave_sg(lp->tx_chan, skbuf_dma->sgl, in axienet_start_xmit_dmaengine()
922 dma_tx_desc->callback_param = lp; in axienet_start_xmit_dmaengine()
924 txq = skb_get_tx_queue(lp->ndev, skb); in axienet_start_xmit_dmaengine()
926 netif_txq_maybe_stop(txq, CIRC_SPACE(lp->tx_ring_head, lp->tx_ring_tail, TX_BD_NUM_MAX), in axienet_start_xmit_dmaengine()
930 dma_async_issue_pending(lp->tx_chan); in axienet_start_xmit_dmaengine()
934 dma_unmap_sg(lp->dev, skbuf_dma->sgl, sg_len, DMA_TO_DEVICE); in axienet_start_xmit_dmaengine()
956 struct axienet_local *lp = container_of(napi, struct axienet_local, napi_tx); in axienet_tx_poll() local
957 struct net_device *ndev = lp->ndev; in axienet_tx_poll()
961 packets = axienet_free_tx_chain(lp, lp->tx_bd_ci, lp->tx_bd_num, false, in axienet_tx_poll()
965 u64_stats_update_begin(&lp->tx_stat_sync); in axienet_tx_poll()
966 u64_stats_add(&lp->tx_packets, packets); in axienet_tx_poll()
967 u64_stats_add(&lp->tx_bytes, size); in axienet_tx_poll()
968 u64_stats_update_end(&lp->tx_stat_sync); in axienet_tx_poll()
973 if (!axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) in axienet_tx_poll()
982 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_tx_poll()
1010 struct axienet_local *lp = netdev_priv(ndev); in axienet_start_xmit() local
1013 orig_tail_ptr = lp->tx_bd_tail; in axienet_start_xmit()
1017 cur_p = &lp->tx_bd_v[orig_tail_ptr]; in axienet_start_xmit()
1019 if (axienet_check_tx_bd_space(lp, num_frag + 1)) { in axienet_start_xmit()
1031 if (lp->features & XAE_FEATURE_FULL_TX_CSUM) { in axienet_start_xmit()
1034 } else if (lp->features & XAE_FEATURE_PARTIAL_TX_CSUM) { in axienet_start_xmit()
1045 phys = dma_map_single(lp->dev, skb->data, in axienet_start_xmit()
1047 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_start_xmit()
1054 desc_set_phys_addr(lp, phys, cur_p); in axienet_start_xmit()
1058 if (++new_tail_ptr >= lp->tx_bd_num) in axienet_start_xmit()
1060 cur_p = &lp->tx_bd_v[new_tail_ptr]; in axienet_start_xmit()
1062 phys = dma_map_single(lp->dev, in axienet_start_xmit()
1066 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_start_xmit()
1070 axienet_free_tx_chain(lp, orig_tail_ptr, ii + 1, in axienet_start_xmit()
1075 desc_set_phys_addr(lp, phys, cur_p); in axienet_start_xmit()
1082 tail_p = lp->tx_bd_p + sizeof(*lp->tx_bd_v) * new_tail_ptr; in axienet_start_xmit()
1083 if (++new_tail_ptr >= lp->tx_bd_num) in axienet_start_xmit()
1085 WRITE_ONCE(lp->tx_bd_tail, new_tail_ptr); in axienet_start_xmit()
1088 axienet_dma_out_addr(lp, XAXIDMA_TX_TDESC_OFFSET, tail_p); in axienet_start_xmit()
1091 if (axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) { in axienet_start_xmit()
1098 if (!axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) in axienet_start_xmit()
1116 struct axienet_local *lp = data; in axienet_dma_rx_cb() local
1120 skbuf_dma = axienet_get_rx_desc(lp, lp->rx_ring_tail++); in axienet_dma_rx_cb()
1124 dma_unmap_single(lp->dev, skbuf_dma->dma_address, lp->max_frm_size, in axienet_dma_rx_cb()
1129 skb->protocol = eth_type_trans(skb, lp->ndev); in axienet_dma_rx_cb()
1133 u64_stats_update_begin(&lp->rx_stat_sync); in axienet_dma_rx_cb()
1134 u64_stats_add(&lp->rx_packets, 1); in axienet_dma_rx_cb()
1135 u64_stats_add(&lp->rx_bytes, rx_len); in axienet_dma_rx_cb()
1136 u64_stats_update_end(&lp->rx_stat_sync); in axienet_dma_rx_cb()
1137 axienet_rx_submit_desc(lp->ndev); in axienet_dma_rx_cb()
1138 dma_async_issue_pending(lp->rx_chan); in axienet_dma_rx_cb()
1157 struct axienet_local *lp = container_of(napi, struct axienet_local, napi_rx); in axienet_rx_poll() local
1159 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_rx_poll()
1178 phys = desc_get_phys_addr(lp, cur_p); in axienet_rx_poll()
1179 dma_unmap_single(lp->dev, phys, lp->max_frm_size, in axienet_rx_poll()
1183 skb->protocol = eth_type_trans(skb, lp->ndev); in axienet_rx_poll()
1188 if (lp->features & XAE_FEATURE_FULL_RX_CSUM) { in axienet_rx_poll()
1195 } else if (lp->features & XAE_FEATURE_PARTIAL_RX_CSUM) { in axienet_rx_poll()
1206 new_skb = napi_alloc_skb(napi, lp->max_frm_size); in axienet_rx_poll()
1210 phys = dma_map_single(lp->dev, new_skb->data, in axienet_rx_poll()
1211 lp->max_frm_size, in axienet_rx_poll()
1213 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_rx_poll()
1215 netdev_err(lp->ndev, "RX DMA mapping error\n"); in axienet_rx_poll()
1219 desc_set_phys_addr(lp, phys, cur_p); in axienet_rx_poll()
1221 cur_p->cntrl = lp->max_frm_size; in axienet_rx_poll()
1228 tail_p = lp->rx_bd_p + sizeof(*lp->rx_bd_v) * lp->rx_bd_ci; in axienet_rx_poll()
1230 if (++lp->rx_bd_ci >= lp->rx_bd_num) in axienet_rx_poll()
1231 lp->rx_bd_ci = 0; in axienet_rx_poll()
1232 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_rx_poll()
1235 u64_stats_update_begin(&lp->rx_stat_sync); in axienet_rx_poll()
1236 u64_stats_add(&lp->rx_packets, packets); in axienet_rx_poll()
1237 u64_stats_add(&lp->rx_bytes, size); in axienet_rx_poll()
1238 u64_stats_update_end(&lp->rx_stat_sync); in axienet_rx_poll()
1241 axienet_dma_out_addr(lp, XAXIDMA_RX_TDESC_OFFSET, tail_p); in axienet_rx_poll()
1248 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_rx_poll()
1267 struct axienet_local *lp = netdev_priv(ndev); in axienet_tx_irq() local
1269 status = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_tx_irq()
1274 axienet_dma_out32(lp, XAXIDMA_TX_SR_OFFSET, status); in axienet_tx_irq()
1279 (lp->tx_bd_v[lp->tx_bd_ci]).phys_msb, in axienet_tx_irq()
1280 (lp->tx_bd_v[lp->tx_bd_ci]).phys); in axienet_tx_irq()
1281 schedule_work(&lp->dma_err_task); in axienet_tx_irq()
1286 u32 cr = lp->tx_dma_cr; in axienet_tx_irq()
1289 if (napi_schedule_prep(&lp->napi_tx)) { in axienet_tx_irq()
1290 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_tx_irq()
1291 __napi_schedule(&lp->napi_tx); in axienet_tx_irq()
1312 struct axienet_local *lp = netdev_priv(ndev); in axienet_rx_irq() local
1314 status = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_rx_irq()
1319 axienet_dma_out32(lp, XAXIDMA_RX_SR_OFFSET, status); in axienet_rx_irq()
1324 (lp->rx_bd_v[lp->rx_bd_ci]).phys_msb, in axienet_rx_irq()
1325 (lp->rx_bd_v[lp->rx_bd_ci]).phys); in axienet_rx_irq()
1326 schedule_work(&lp->dma_err_task); in axienet_rx_irq()
1331 u32 cr = lp->rx_dma_cr; in axienet_rx_irq()
1334 if (napi_schedule_prep(&lp->napi_rx)) { in axienet_rx_irq()
1335 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_rx_irq()
1336 __napi_schedule(&lp->napi_rx); in axienet_rx_irq()
1355 struct axienet_local *lp = netdev_priv(ndev); in axienet_eth_irq() local
1358 pending = axienet_ior(lp, XAE_IP_OFFSET); in axienet_eth_irq()
1368 axienet_iow(lp, XAE_IS_OFFSET, pending); in axienet_eth_irq()
1385 struct axienet_local *lp = netdev_priv(ndev); in axienet_rx_submit_desc() local
1390 skbuf_dma = axienet_get_rx_desc(lp, lp->rx_ring_head); in axienet_rx_submit_desc()
1394 lp->rx_ring_head++; in axienet_rx_submit_desc()
1395 skb = netdev_alloc_skb(ndev, lp->max_frm_size); in axienet_rx_submit_desc()
1400 addr = dma_map_single(lp->dev, skb->data, lp->max_frm_size, DMA_FROM_DEVICE); in axienet_rx_submit_desc()
1401 if (unlikely(dma_mapping_error(lp->dev, addr))) { in axienet_rx_submit_desc()
1407 sg_dma_len(skbuf_dma->sgl) = lp->max_frm_size; in axienet_rx_submit_desc()
1408 dma_rx_desc = dmaengine_prep_slave_sg(lp->rx_chan, skbuf_dma->sgl, in axienet_rx_submit_desc()
1417 dma_rx_desc->callback_param = lp; in axienet_rx_submit_desc()
1424 dma_unmap_single(lp->dev, addr, lp->max_frm_size, DMA_FROM_DEVICE); in axienet_rx_submit_desc()
1440 struct axienet_local *lp = netdev_priv(ndev); in axienet_init_dmaengine() local
1444 lp->tx_chan = dma_request_chan(lp->dev, "tx_chan0"); in axienet_init_dmaengine()
1445 if (IS_ERR(lp->tx_chan)) { in axienet_init_dmaengine()
1446 dev_err(lp->dev, "No Ethernet DMA (TX) channel found\n"); in axienet_init_dmaengine()
1447 return PTR_ERR(lp->tx_chan); in axienet_init_dmaengine()
1450 lp->rx_chan = dma_request_chan(lp->dev, "rx_chan0"); in axienet_init_dmaengine()
1451 if (IS_ERR(lp->rx_chan)) { in axienet_init_dmaengine()
1452 ret = PTR_ERR(lp->rx_chan); in axienet_init_dmaengine()
1453 dev_err(lp->dev, "No Ethernet DMA (RX) channel found\n"); in axienet_init_dmaengine()
1457 lp->tx_ring_tail = 0; in axienet_init_dmaengine()
1458 lp->tx_ring_head = 0; in axienet_init_dmaengine()
1459 lp->rx_ring_tail = 0; in axienet_init_dmaengine()
1460 lp->rx_ring_head = 0; in axienet_init_dmaengine()
1461 lp->tx_skb_ring = kcalloc(TX_BD_NUM_MAX, sizeof(*lp->tx_skb_ring), in axienet_init_dmaengine()
1463 if (!lp->tx_skb_ring) { in axienet_init_dmaengine()
1473 lp->tx_skb_ring[i] = skbuf_dma; in axienet_init_dmaengine()
1476 lp->rx_skb_ring = kcalloc(RX_BUF_NUM_DEFAULT, sizeof(*lp->rx_skb_ring), in axienet_init_dmaengine()
1478 if (!lp->rx_skb_ring) { in axienet_init_dmaengine()
1488 lp->rx_skb_ring[i] = skbuf_dma; in axienet_init_dmaengine()
1493 dma_async_issue_pending(lp->rx_chan); in axienet_init_dmaengine()
1499 kfree(lp->rx_skb_ring[i]); in axienet_init_dmaengine()
1500 kfree(lp->rx_skb_ring); in axienet_init_dmaengine()
1503 kfree(lp->tx_skb_ring[i]); in axienet_init_dmaengine()
1504 kfree(lp->tx_skb_ring); in axienet_init_dmaengine()
1506 dma_release_channel(lp->rx_chan); in axienet_init_dmaengine()
1508 dma_release_channel(lp->tx_chan); in axienet_init_dmaengine()
1526 struct axienet_local *lp = netdev_priv(ndev); in axienet_init_legacy_dma() local
1529 lp->stopping = false; in axienet_init_legacy_dma()
1530 INIT_WORK(&lp->dma_err_task, axienet_dma_err_handler); in axienet_init_legacy_dma()
1532 napi_enable(&lp->napi_rx); in axienet_init_legacy_dma()
1533 napi_enable(&lp->napi_tx); in axienet_init_legacy_dma()
1536 ret = request_irq(lp->tx_irq, axienet_tx_irq, IRQF_SHARED, in axienet_init_legacy_dma()
1541 ret = request_irq(lp->rx_irq, axienet_rx_irq, IRQF_SHARED, in axienet_init_legacy_dma()
1546 if (lp->eth_irq > 0) { in axienet_init_legacy_dma()
1547 ret = request_irq(lp->eth_irq, axienet_eth_irq, IRQF_SHARED, in axienet_init_legacy_dma()
1556 free_irq(lp->rx_irq, ndev); in axienet_init_legacy_dma()
1558 free_irq(lp->tx_irq, ndev); in axienet_init_legacy_dma()
1560 napi_disable(&lp->napi_tx); in axienet_init_legacy_dma()
1561 napi_disable(&lp->napi_rx); in axienet_init_legacy_dma()
1562 cancel_work_sync(&lp->dma_err_task); in axienet_init_legacy_dma()
1563 dev_err(lp->dev, "request_irq() failed\n"); in axienet_init_legacy_dma()
1583 struct axienet_local *lp = netdev_priv(ndev); in axienet_open() local
1589 axienet_lock_mii(lp); in axienet_open()
1591 axienet_unlock_mii(lp); in axienet_open()
1593 ret = phylink_of_phy_connect(lp->phylink, lp->dev->of_node, 0); in axienet_open()
1595 dev_err(lp->dev, "phylink_of_phy_connect() failed: %d\n", ret); in axienet_open()
1599 phylink_start(lp->phylink); in axienet_open()
1602 schedule_delayed_work(&lp->stats_work, 0); in axienet_open()
1604 if (lp->use_dmaengine) { in axienet_open()
1606 if (lp->eth_irq > 0) { in axienet_open()
1607 ret = request_irq(lp->eth_irq, axienet_eth_irq, IRQF_SHARED, in axienet_open()
1625 if (lp->eth_irq > 0) in axienet_open()
1626 free_irq(lp->eth_irq, ndev); in axienet_open()
1628 cancel_delayed_work_sync(&lp->stats_work); in axienet_open()
1629 phylink_stop(lp->phylink); in axienet_open()
1630 phylink_disconnect_phy(lp->phylink); in axienet_open()
1646 struct axienet_local *lp = netdev_priv(ndev); in axienet_stop() local
1649 if (!lp->use_dmaengine) { in axienet_stop()
1650 WRITE_ONCE(lp->stopping, true); in axienet_stop()
1651 flush_work(&lp->dma_err_task); in axienet_stop()
1653 napi_disable(&lp->napi_tx); in axienet_stop()
1654 napi_disable(&lp->napi_rx); in axienet_stop()
1657 cancel_delayed_work_sync(&lp->stats_work); in axienet_stop()
1659 phylink_stop(lp->phylink); in axienet_stop()
1660 phylink_disconnect_phy(lp->phylink); in axienet_stop()
1662 axienet_setoptions(ndev, lp->options & in axienet_stop()
1665 if (!lp->use_dmaengine) { in axienet_stop()
1666 axienet_dma_stop(lp); in axienet_stop()
1667 cancel_work_sync(&lp->dma_err_task); in axienet_stop()
1668 free_irq(lp->tx_irq, ndev); in axienet_stop()
1669 free_irq(lp->rx_irq, ndev); in axienet_stop()
1672 dmaengine_terminate_sync(lp->tx_chan); in axienet_stop()
1673 dmaengine_synchronize(lp->tx_chan); in axienet_stop()
1674 dmaengine_terminate_sync(lp->rx_chan); in axienet_stop()
1675 dmaengine_synchronize(lp->rx_chan); in axienet_stop()
1678 kfree(lp->tx_skb_ring[i]); in axienet_stop()
1679 kfree(lp->tx_skb_ring); in axienet_stop()
1681 kfree(lp->rx_skb_ring[i]); in axienet_stop()
1682 kfree(lp->rx_skb_ring); in axienet_stop()
1684 dma_release_channel(lp->rx_chan); in axienet_stop()
1685 dma_release_channel(lp->tx_chan); in axienet_stop()
1688 axienet_iow(lp, XAE_IE_OFFSET, 0); in axienet_stop()
1690 if (lp->eth_irq > 0) in axienet_stop()
1691 free_irq(lp->eth_irq, ndev); in axienet_stop()
1708 struct axienet_local *lp = netdev_priv(ndev); in axienet_change_mtu() local
1714 XAE_TRL_SIZE) > lp->rxmem) in axienet_change_mtu()
1732 struct axienet_local *lp = netdev_priv(ndev); in axienet_poll_controller() local
1734 disable_irq(lp->tx_irq); in axienet_poll_controller()
1735 disable_irq(lp->rx_irq); in axienet_poll_controller()
1736 axienet_rx_irq(lp->tx_irq, ndev); in axienet_poll_controller()
1737 axienet_tx_irq(lp->rx_irq, ndev); in axienet_poll_controller()
1738 enable_irq(lp->tx_irq); in axienet_poll_controller()
1739 enable_irq(lp->rx_irq); in axienet_poll_controller()
1745 struct axienet_local *lp = netdev_priv(dev); in axienet_ioctl() local
1750 return phylink_mii_ioctl(lp->phylink, rq, cmd); in axienet_ioctl()
1756 struct axienet_local *lp = netdev_priv(dev); in axienet_get_stats64() local
1762 start = u64_stats_fetch_begin(&lp->rx_stat_sync); in axienet_get_stats64()
1763 stats->rx_packets = u64_stats_read(&lp->rx_packets); in axienet_get_stats64()
1764 stats->rx_bytes = u64_stats_read(&lp->rx_bytes); in axienet_get_stats64()
1765 } while (u64_stats_fetch_retry(&lp->rx_stat_sync, start)); in axienet_get_stats64()
1768 start = u64_stats_fetch_begin(&lp->tx_stat_sync); in axienet_get_stats64()
1769 stats->tx_packets = u64_stats_read(&lp->tx_packets); in axienet_get_stats64()
1770 stats->tx_bytes = u64_stats_read(&lp->tx_bytes); in axienet_get_stats64()
1771 } while (u64_stats_fetch_retry(&lp->tx_stat_sync, start)); in axienet_get_stats64()
1773 if (!(lp->features & XAE_FEATURE_STATS)) in axienet_get_stats64()
1777 start = read_seqcount_begin(&lp->hw_stats_seqcount); in axienet_get_stats64()
1779 axienet_stat(lp, STAT_RX_LENGTH_ERRORS); in axienet_get_stats64()
1780 stats->rx_crc_errors = axienet_stat(lp, STAT_RX_FCS_ERRORS); in axienet_get_stats64()
1782 axienet_stat(lp, STAT_RX_ALIGNMENT_ERRORS); in axienet_get_stats64()
1783 stats->rx_errors = axienet_stat(lp, STAT_UNDERSIZE_FRAMES) + in axienet_get_stats64()
1784 axienet_stat(lp, STAT_FRAGMENT_FRAMES) + in axienet_get_stats64()
1788 stats->multicast = axienet_stat(lp, STAT_RX_MULTICAST_FRAMES); in axienet_get_stats64()
1791 axienet_stat(lp, STAT_TX_EXCESS_COLLISIONS); in axienet_get_stats64()
1793 axienet_stat(lp, STAT_TX_UNDERRUN_ERRORS); in axienet_get_stats64()
1795 axienet_stat(lp, STAT_TX_LATE_COLLISIONS); in axienet_get_stats64()
1796 stats->tx_errors = axienet_stat(lp, STAT_TX_EXCESS_DEFERRAL) + in axienet_get_stats64()
1800 } while (read_seqcount_retry(&lp->hw_stats_seqcount, start)); in axienet_get_stats64()
1875 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_regs() local
1881 data[0] = axienet_ior(lp, XAE_RAF_OFFSET); in axienet_ethtools_get_regs()
1882 data[1] = axienet_ior(lp, XAE_TPF_OFFSET); in axienet_ethtools_get_regs()
1883 data[2] = axienet_ior(lp, XAE_IFGP_OFFSET); in axienet_ethtools_get_regs()
1884 data[3] = axienet_ior(lp, XAE_IS_OFFSET); in axienet_ethtools_get_regs()
1885 data[4] = axienet_ior(lp, XAE_IP_OFFSET); in axienet_ethtools_get_regs()
1886 data[5] = axienet_ior(lp, XAE_IE_OFFSET); in axienet_ethtools_get_regs()
1887 data[6] = axienet_ior(lp, XAE_TTAG_OFFSET); in axienet_ethtools_get_regs()
1888 data[7] = axienet_ior(lp, XAE_RTAG_OFFSET); in axienet_ethtools_get_regs()
1889 data[8] = axienet_ior(lp, XAE_UAWL_OFFSET); in axienet_ethtools_get_regs()
1890 data[9] = axienet_ior(lp, XAE_UAWU_OFFSET); in axienet_ethtools_get_regs()
1891 data[10] = axienet_ior(lp, XAE_TPID0_OFFSET); in axienet_ethtools_get_regs()
1892 data[11] = axienet_ior(lp, XAE_TPID1_OFFSET); in axienet_ethtools_get_regs()
1893 data[12] = axienet_ior(lp, XAE_PPST_OFFSET); in axienet_ethtools_get_regs()
1894 data[13] = axienet_ior(lp, XAE_RCW0_OFFSET); in axienet_ethtools_get_regs()
1895 data[14] = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_ethtools_get_regs()
1896 data[15] = axienet_ior(lp, XAE_TC_OFFSET); in axienet_ethtools_get_regs()
1897 data[16] = axienet_ior(lp, XAE_FCC_OFFSET); in axienet_ethtools_get_regs()
1898 data[17] = axienet_ior(lp, XAE_EMMC_OFFSET); in axienet_ethtools_get_regs()
1899 data[18] = axienet_ior(lp, XAE_PHYC_OFFSET); in axienet_ethtools_get_regs()
1900 data[19] = axienet_ior(lp, XAE_MDIO_MC_OFFSET); in axienet_ethtools_get_regs()
1901 data[20] = axienet_ior(lp, XAE_MDIO_MCR_OFFSET); in axienet_ethtools_get_regs()
1902 data[21] = axienet_ior(lp, XAE_MDIO_MWD_OFFSET); in axienet_ethtools_get_regs()
1903 data[22] = axienet_ior(lp, XAE_MDIO_MRD_OFFSET); in axienet_ethtools_get_regs()
1904 data[27] = axienet_ior(lp, XAE_UAW0_OFFSET); in axienet_ethtools_get_regs()
1905 data[28] = axienet_ior(lp, XAE_UAW1_OFFSET); in axienet_ethtools_get_regs()
1906 data[29] = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_ethtools_get_regs()
1907 data[30] = axienet_ior(lp, XAE_AF0_OFFSET); in axienet_ethtools_get_regs()
1908 data[31] = axienet_ior(lp, XAE_AF1_OFFSET); in axienet_ethtools_get_regs()
1909 if (!lp->use_dmaengine) { in axienet_ethtools_get_regs()
1910 data[32] = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_ethtools_get_regs()
1911 data[33] = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_ethtools_get_regs()
1912 data[34] = axienet_dma_in32(lp, XAXIDMA_TX_CDESC_OFFSET); in axienet_ethtools_get_regs()
1913 data[35] = axienet_dma_in32(lp, XAXIDMA_TX_TDESC_OFFSET); in axienet_ethtools_get_regs()
1914 data[36] = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_ethtools_get_regs()
1915 data[37] = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_ethtools_get_regs()
1916 data[38] = axienet_dma_in32(lp, XAXIDMA_RX_CDESC_OFFSET); in axienet_ethtools_get_regs()
1917 data[39] = axienet_dma_in32(lp, XAXIDMA_RX_TDESC_OFFSET); in axienet_ethtools_get_regs()
1927 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_ringparam() local
1933 ering->rx_pending = lp->rx_bd_num; in axienet_ethtools_get_ringparam()
1936 ering->tx_pending = lp->tx_bd_num; in axienet_ethtools_get_ringparam()
1945 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_ringparam() local
1957 lp->rx_bd_num = ering->rx_pending; in axienet_ethtools_set_ringparam()
1958 lp->tx_bd_num = ering->tx_pending; in axienet_ethtools_set_ringparam()
1975 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_pauseparam() local
1977 phylink_ethtool_get_pauseparam(lp->phylink, epauseparm); in axienet_ethtools_get_pauseparam()
1996 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_pauseparam() local
1998 return phylink_ethtool_set_pauseparam(lp->phylink, epauseparm); in axienet_ethtools_set_pauseparam()
2020 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_coalesce() local
2022 ecoalesce->rx_max_coalesced_frames = lp->coalesce_count_rx; in axienet_ethtools_get_coalesce()
2023 ecoalesce->rx_coalesce_usecs = lp->coalesce_usec_rx; in axienet_ethtools_get_coalesce()
2024 ecoalesce->tx_max_coalesced_frames = lp->coalesce_count_tx; in axienet_ethtools_get_coalesce()
2025 ecoalesce->tx_coalesce_usecs = lp->coalesce_usec_tx; in axienet_ethtools_get_coalesce()
2048 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_coalesce() local
2077 lp->coalesce_count_rx = ecoalesce->rx_max_coalesced_frames; in axienet_ethtools_set_coalesce()
2078 lp->coalesce_usec_rx = ecoalesce->rx_coalesce_usecs; in axienet_ethtools_set_coalesce()
2079 lp->coalesce_count_tx = ecoalesce->tx_max_coalesced_frames; in axienet_ethtools_set_coalesce()
2080 lp->coalesce_usec_tx = ecoalesce->tx_coalesce_usecs; in axienet_ethtools_set_coalesce()
2089 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_link_ksettings() local
2091 return phylink_ethtool_ksettings_get(lp->phylink, cmd); in axienet_ethtools_get_link_ksettings()
2098 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_link_ksettings() local
2100 return phylink_ethtool_ksettings_set(lp->phylink, cmd); in axienet_ethtools_set_link_ksettings()
2105 struct axienet_local *lp = netdev_priv(dev); in axienet_ethtools_nway_reset() local
2107 return phylink_ethtool_nway_reset(lp->phylink); in axienet_ethtools_nway_reset()
2114 struct axienet_local *lp = netdev_priv(dev); in axienet_ethtools_get_ethtool_stats() local
2118 start = read_seqcount_begin(&lp->hw_stats_seqcount); in axienet_ethtools_get_ethtool_stats()
2119 data[0] = axienet_stat(lp, STAT_RX_BYTES); in axienet_ethtools_get_ethtool_stats()
2120 data[1] = axienet_stat(lp, STAT_TX_BYTES); in axienet_ethtools_get_ethtool_stats()
2121 data[2] = axienet_stat(lp, STAT_RX_VLAN_FRAMES); in axienet_ethtools_get_ethtool_stats()
2122 data[3] = axienet_stat(lp, STAT_TX_VLAN_FRAMES); in axienet_ethtools_get_ethtool_stats()
2123 data[6] = axienet_stat(lp, STAT_TX_PFC_FRAMES); in axienet_ethtools_get_ethtool_stats()
2124 data[7] = axienet_stat(lp, STAT_RX_PFC_FRAMES); in axienet_ethtools_get_ethtool_stats()
2125 data[8] = axienet_stat(lp, STAT_USER_DEFINED0); in axienet_ethtools_get_ethtool_stats()
2126 data[9] = axienet_stat(lp, STAT_USER_DEFINED1); in axienet_ethtools_get_ethtool_stats()
2127 data[10] = axienet_stat(lp, STAT_USER_DEFINED2); in axienet_ethtools_get_ethtool_stats()
2128 } while (read_seqcount_retry(&lp->hw_stats_seqcount, start)); in axienet_ethtools_get_ethtool_stats()
2155 struct axienet_local *lp = netdev_priv(dev); in axienet_ethtools_get_sset_count() local
2159 if (lp->features & XAE_FEATURE_STATS) in axienet_ethtools_get_sset_count()
2171 struct axienet_local *lp = netdev_priv(dev); in axienet_ethtools_get_pause_stats() local
2174 if (!(lp->features & XAE_FEATURE_STATS)) in axienet_ethtools_get_pause_stats()
2178 start = read_seqcount_begin(&lp->hw_stats_seqcount); in axienet_ethtools_get_pause_stats()
2180 axienet_stat(lp, STAT_TX_PAUSE_FRAMES); in axienet_ethtools_get_pause_stats()
2182 axienet_stat(lp, STAT_RX_PAUSE_FRAMES); in axienet_ethtools_get_pause_stats()
2183 } while (read_seqcount_retry(&lp->hw_stats_seqcount, start)); in axienet_ethtools_get_pause_stats()
2190 struct axienet_local *lp = netdev_priv(dev); in axienet_ethtool_get_eth_mac_stats() local
2193 if (!(lp->features & XAE_FEATURE_STATS)) in axienet_ethtool_get_eth_mac_stats()
2197 start = read_seqcount_begin(&lp->hw_stats_seqcount); in axienet_ethtool_get_eth_mac_stats()
2199 axienet_stat(lp, STAT_TX_GOOD_FRAMES); in axienet_ethtool_get_eth_mac_stats()
2201 axienet_stat(lp, STAT_TX_SINGLE_COLLISION_FRAMES); in axienet_ethtool_get_eth_mac_stats()
2203 axienet_stat(lp, STAT_TX_MULTIPLE_COLLISION_FRAMES); in axienet_ethtool_get_eth_mac_stats()
2205 axienet_stat(lp, STAT_RX_GOOD_FRAMES); in axienet_ethtool_get_eth_mac_stats()
2207 axienet_stat(lp, STAT_RX_FCS_ERRORS); in axienet_ethtool_get_eth_mac_stats()
2209 axienet_stat(lp, STAT_RX_ALIGNMENT_ERRORS); in axienet_ethtool_get_eth_mac_stats()
2211 axienet_stat(lp, STAT_TX_DEFERRED_FRAMES); in axienet_ethtool_get_eth_mac_stats()
2213 axienet_stat(lp, STAT_TX_LATE_COLLISIONS); in axienet_ethtool_get_eth_mac_stats()
2215 axienet_stat(lp, STAT_TX_EXCESS_COLLISIONS); in axienet_ethtool_get_eth_mac_stats()
2217 axienet_stat(lp, STAT_TX_MULTICAST_FRAMES); in axienet_ethtool_get_eth_mac_stats()
2219 axienet_stat(lp, STAT_TX_BROADCAST_FRAMES); in axienet_ethtool_get_eth_mac_stats()
2221 axienet_stat(lp, STAT_TX_EXCESS_DEFERRAL); in axienet_ethtool_get_eth_mac_stats()
2223 axienet_stat(lp, STAT_RX_MULTICAST_FRAMES); in axienet_ethtool_get_eth_mac_stats()
2225 axienet_stat(lp, STAT_RX_BROADCAST_FRAMES); in axienet_ethtool_get_eth_mac_stats()
2227 axienet_stat(lp, STAT_RX_LENGTH_ERRORS); in axienet_ethtool_get_eth_mac_stats()
2228 } while (read_seqcount_retry(&lp->hw_stats_seqcount, start)); in axienet_ethtool_get_eth_mac_stats()
2235 struct axienet_local *lp = netdev_priv(dev); in axienet_ethtool_get_eth_ctrl_stats() local
2238 if (!(lp->features & XAE_FEATURE_STATS)) in axienet_ethtool_get_eth_ctrl_stats()
2242 start = read_seqcount_begin(&lp->hw_stats_seqcount); in axienet_ethtool_get_eth_ctrl_stats()
2244 axienet_stat(lp, STAT_TX_CONTROL_FRAMES); in axienet_ethtool_get_eth_ctrl_stats()
2246 axienet_stat(lp, STAT_RX_CONTROL_FRAMES); in axienet_ethtool_get_eth_ctrl_stats()
2248 axienet_stat(lp, STAT_RX_CONTROL_OPCODE_ERRORS); in axienet_ethtool_get_eth_ctrl_stats()
2249 } while (read_seqcount_retry(&lp->hw_stats_seqcount, start)); in axienet_ethtool_get_eth_ctrl_stats()
2268 struct axienet_local *lp = netdev_priv(dev); in axienet_ethtool_get_rmon_stats() local
2271 if (!(lp->features & XAE_FEATURE_STATS)) in axienet_ethtool_get_rmon_stats()
2275 start = read_seqcount_begin(&lp->hw_stats_seqcount); in axienet_ethtool_get_rmon_stats()
2277 axienet_stat(lp, STAT_UNDERSIZE_FRAMES); in axienet_ethtool_get_rmon_stats()
2279 axienet_stat(lp, STAT_RX_OVERSIZE_FRAMES); in axienet_ethtool_get_rmon_stats()
2281 axienet_stat(lp, STAT_FRAGMENT_FRAMES); in axienet_ethtool_get_rmon_stats()
2284 axienet_stat(lp, STAT_RX_64_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2286 axienet_stat(lp, STAT_RX_65_127_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2288 axienet_stat(lp, STAT_RX_128_255_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2290 axienet_stat(lp, STAT_RX_256_511_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2292 axienet_stat(lp, STAT_RX_512_1023_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2294 axienet_stat(lp, STAT_RX_1024_MAX_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2299 axienet_stat(lp, STAT_TX_64_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2301 axienet_stat(lp, STAT_TX_65_127_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2303 axienet_stat(lp, STAT_TX_128_255_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2305 axienet_stat(lp, STAT_TX_256_511_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2307 axienet_stat(lp, STAT_TX_512_1023_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2309 axienet_stat(lp, STAT_TX_1024_MAX_BYTE_FRAMES); in axienet_ethtool_get_rmon_stats()
2311 axienet_stat(lp, STAT_TX_OVERSIZE_FRAMES); in axienet_ethtool_get_rmon_stats()
2312 } while (read_seqcount_retry(&lp->hw_stats_seqcount, start)); in axienet_ethtool_get_rmon_stats()
2370 struct axienet_local *lp = netdev_priv(ndev); in axienet_pcs_config() local
2373 if (lp->switch_x_sgmii) { in axienet_pcs_config()
2403 struct axienet_local *lp = netdev_priv(ndev); in axienet_mac_select_pcs() local
2407 return &lp->pcs; in axienet_mac_select_pcs()
2432 struct axienet_local *lp = netdev_priv(ndev); in axienet_mac_link_up() local
2435 emmc_reg = axienet_ior(lp, XAE_EMMC_OFFSET); in axienet_mac_link_up()
2454 axienet_iow(lp, XAE_EMMC_OFFSET, emmc_reg); in axienet_mac_link_up()
2456 fcc_reg = axienet_ior(lp, XAE_FCC_OFFSET); in axienet_mac_link_up()
2465 axienet_iow(lp, XAE_FCC_OFFSET, fcc_reg); in axienet_mac_link_up()
2487 struct axienet_local *lp = container_of(work, struct axienet_local, in axienet_dma_err_handler() local
2489 struct net_device *ndev = lp->ndev; in axienet_dma_err_handler()
2492 if (READ_ONCE(lp->stopping)) in axienet_dma_err_handler()
2495 napi_disable(&lp->napi_tx); in axienet_dma_err_handler()
2496 napi_disable(&lp->napi_rx); in axienet_dma_err_handler()
2498 axienet_setoptions(ndev, lp->options & in axienet_dma_err_handler()
2501 axienet_dma_stop(lp); in axienet_dma_err_handler()
2503 for (i = 0; i < lp->tx_bd_num; i++) { in axienet_dma_err_handler()
2504 cur_p = &lp->tx_bd_v[i]; in axienet_dma_err_handler()
2506 dma_addr_t addr = desc_get_phys_addr(lp, cur_p); in axienet_dma_err_handler()
2508 dma_unmap_single(lp->dev, addr, in axienet_dma_err_handler()
2527 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_err_handler()
2528 cur_p = &lp->rx_bd_v[i]; in axienet_dma_err_handler()
2537 lp->tx_bd_ci = 0; in axienet_dma_err_handler()
2538 lp->tx_bd_tail = 0; in axienet_dma_err_handler()
2539 lp->rx_bd_ci = 0; in axienet_dma_err_handler()
2541 axienet_dma_start(lp); in axienet_dma_err_handler()
2543 axienet_status = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_dma_err_handler()
2545 axienet_iow(lp, XAE_RCW1_OFFSET, axienet_status); in axienet_dma_err_handler()
2547 axienet_status = axienet_ior(lp, XAE_IP_OFFSET); in axienet_dma_err_handler()
2549 axienet_iow(lp, XAE_IS_OFFSET, XAE_INT_RXRJECT_MASK); in axienet_dma_err_handler()
2550 axienet_iow(lp, XAE_IE_OFFSET, lp->eth_irq > 0 ? in axienet_dma_err_handler()
2552 axienet_iow(lp, XAE_FCC_OFFSET, XAE_FCC_FCRX_MASK); in axienet_dma_err_handler()
2557 axienet_setoptions(ndev, lp->options & in axienet_dma_err_handler()
2561 napi_enable(&lp->napi_rx); in axienet_dma_err_handler()
2562 napi_enable(&lp->napi_tx); in axienet_dma_err_handler()
2563 axienet_setoptions(ndev, lp->options); in axienet_dma_err_handler()
2582 struct axienet_local *lp; in axienet_probe() local
2589 ndev = alloc_etherdev(sizeof(*lp)); in axienet_probe()
2603 lp = netdev_priv(ndev); in axienet_probe()
2604 lp->ndev = ndev; in axienet_probe()
2605 lp->dev = &pdev->dev; in axienet_probe()
2606 lp->options = XAE_OPTION_DEFAULTS; in axienet_probe()
2607 lp->rx_bd_num = RX_BD_NUM_DEFAULT; in axienet_probe()
2608 lp->tx_bd_num = TX_BD_NUM_DEFAULT; in axienet_probe()
2610 u64_stats_init(&lp->rx_stat_sync); in axienet_probe()
2611 u64_stats_init(&lp->tx_stat_sync); in axienet_probe()
2613 mutex_init(&lp->stats_lock); in axienet_probe()
2614 seqcount_mutex_init(&lp->hw_stats_seqcount, &lp->stats_lock); in axienet_probe()
2615 INIT_DEFERRABLE_WORK(&lp->stats_work, axienet_refresh_stats); in axienet_probe()
2617 lp->axi_clk = devm_clk_get_optional(&pdev->dev, "s_axi_lite_clk"); in axienet_probe()
2618 if (!lp->axi_clk) { in axienet_probe()
2622 lp->axi_clk = devm_clk_get_optional(&pdev->dev, NULL); in axienet_probe()
2624 if (IS_ERR(lp->axi_clk)) { in axienet_probe()
2625 ret = PTR_ERR(lp->axi_clk); in axienet_probe()
2628 ret = clk_prepare_enable(lp->axi_clk); in axienet_probe()
2634 lp->misc_clks[0].id = "axis_clk"; in axienet_probe()
2635 lp->misc_clks[1].id = "ref_clk"; in axienet_probe()
2636 lp->misc_clks[2].id = "mgt_clk"; in axienet_probe()
2638 ret = devm_clk_bulk_get_optional(&pdev->dev, XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
2642 ret = clk_bulk_prepare_enable(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
2647 lp->regs = devm_platform_get_and_ioremap_resource(pdev, 0, ðres); in axienet_probe()
2648 if (IS_ERR(lp->regs)) { in axienet_probe()
2649 ret = PTR_ERR(lp->regs); in axienet_probe()
2652 lp->regs_start = ethres->start; in axienet_probe()
2655 lp->features = 0; in axienet_probe()
2657 if (axienet_ior(lp, XAE_ABILITY_OFFSET) & XAE_ABILITY_STATS) in axienet_probe()
2658 lp->features |= XAE_FEATURE_STATS; in axienet_probe()
2664 lp->features |= XAE_FEATURE_PARTIAL_TX_CSUM; in axienet_probe()
2669 lp->features |= XAE_FEATURE_FULL_TX_CSUM; in axienet_probe()
2679 lp->features |= XAE_FEATURE_PARTIAL_RX_CSUM; in axienet_probe()
2683 lp->features |= XAE_FEATURE_FULL_RX_CSUM; in axienet_probe()
2694 of_property_read_u32(pdev->dev.of_node, "xlnx,rxmem", &lp->rxmem); in axienet_probe()
2696 lp->switch_x_sgmii = of_property_read_bool(pdev->dev.of_node, in axienet_probe()
2705 lp->phy_mode = PHY_INTERFACE_MODE_MII; in axienet_probe()
2708 lp->phy_mode = PHY_INTERFACE_MODE_GMII; in axienet_probe()
2711 lp->phy_mode = PHY_INTERFACE_MODE_RGMII_ID; in axienet_probe()
2714 lp->phy_mode = PHY_INTERFACE_MODE_SGMII; in axienet_probe()
2717 lp->phy_mode = PHY_INTERFACE_MODE_1000BASEX; in axienet_probe()
2724 ret = of_get_phy_mode(pdev->dev.of_node, &lp->phy_mode); in axienet_probe()
2728 if (lp->switch_x_sgmii && lp->phy_mode != PHY_INTERFACE_MODE_SGMII && in axienet_probe()
2729 lp->phy_mode != PHY_INTERFACE_MODE_1000BASEX) { in axienet_probe()
2749 lp->dma_regs = devm_ioremap_resource(&pdev->dev, in axienet_probe()
2751 lp->rx_irq = irq_of_parse_and_map(np, 1); in axienet_probe()
2752 lp->tx_irq = irq_of_parse_and_map(np, 0); in axienet_probe()
2754 lp->eth_irq = platform_get_irq_optional(pdev, 0); in axienet_probe()
2757 lp->dma_regs = devm_platform_get_and_ioremap_resource(pdev, 1, NULL); in axienet_probe()
2758 lp->rx_irq = platform_get_irq(pdev, 1); in axienet_probe()
2759 lp->tx_irq = platform_get_irq(pdev, 0); in axienet_probe()
2760 lp->eth_irq = platform_get_irq_optional(pdev, 2); in axienet_probe()
2762 if (IS_ERR(lp->dma_regs)) { in axienet_probe()
2764 ret = PTR_ERR(lp->dma_regs); in axienet_probe()
2767 if (lp->rx_irq <= 0 || lp->tx_irq <= 0) { in axienet_probe()
2774 ret = __axienet_device_reset(lp); in axienet_probe()
2786 if ((axienet_ior(lp, XAE_ID_OFFSET) >> 24) >= 0x9) { in axienet_probe()
2787 void __iomem *desc = lp->dma_regs + XAXIDMA_TX_CDESC_OFFSET + 4; in axienet_probe()
2793 lp->features |= XAE_FEATURE_DMA_64BIT; in axienet_probe()
2801 if (!IS_ENABLED(CONFIG_64BIT) && lp->features & XAE_FEATURE_DMA_64BIT) { in axienet_probe()
2812 netif_napi_add(ndev, &lp->napi_rx, axienet_rx_poll); in axienet_probe()
2813 netif_napi_add(ndev, &lp->napi_tx, axienet_tx_poll); in axienet_probe()
2818 lp->eth_irq = platform_get_irq_optional(pdev, 0); in axienet_probe()
2819 if (lp->eth_irq < 0 && lp->eth_irq != -ENXIO) { in axienet_probe()
2820 ret = lp->eth_irq; in axienet_probe()
2823 tx_chan = dma_request_chan(lp->dev, "tx_chan0"); in axienet_probe()
2826 dev_err_probe(lp->dev, ret, "No Ethernet DMA (TX) channel found\n"); in axienet_probe()
2840 lp->use_dmaengine = 1; in axienet_probe()
2843 if (lp->use_dmaengine) in axienet_probe()
2848 if (lp->eth_irq <= 0) in axienet_probe()
2861 lp->coalesce_count_rx = XAXIDMA_DFT_RX_THRESHOLD; in axienet_probe()
2862 lp->coalesce_count_tx = XAXIDMA_DFT_TX_THRESHOLD; in axienet_probe()
2863 lp->coalesce_usec_rx = XAXIDMA_DFT_RX_USEC; in axienet_probe()
2864 lp->coalesce_usec_tx = XAXIDMA_DFT_TX_USEC; in axienet_probe()
2866 ret = axienet_mdio_setup(lp); in axienet_probe()
2871 if (lp->phy_mode == PHY_INTERFACE_MODE_SGMII || in axienet_probe()
2872 lp->phy_mode == PHY_INTERFACE_MODE_1000BASEX) { in axienet_probe()
2886 lp->pcs_phy = of_mdio_find_device(np); in axienet_probe()
2887 if (!lp->pcs_phy) { in axienet_probe()
2893 lp->pcs.ops = &axienet_pcs_ops; in axienet_probe()
2894 lp->pcs.neg_mode = true; in axienet_probe()
2895 lp->pcs.poll = true; in axienet_probe()
2898 lp->phylink_config.dev = &ndev->dev; in axienet_probe()
2899 lp->phylink_config.type = PHYLINK_NETDEV; in axienet_probe()
2900 lp->phylink_config.mac_managed_pm = true; in axienet_probe()
2901 lp->phylink_config.mac_capabilities = MAC_SYM_PAUSE | MAC_ASYM_PAUSE | in axienet_probe()
2904 __set_bit(lp->phy_mode, lp->phylink_config.supported_interfaces); in axienet_probe()
2905 if (lp->switch_x_sgmii) { in axienet_probe()
2907 lp->phylink_config.supported_interfaces); in axienet_probe()
2909 lp->phylink_config.supported_interfaces); in axienet_probe()
2912 lp->phylink = phylink_create(&lp->phylink_config, pdev->dev.fwnode, in axienet_probe()
2913 lp->phy_mode, in axienet_probe()
2915 if (IS_ERR(lp->phylink)) { in axienet_probe()
2916 ret = PTR_ERR(lp->phylink); in axienet_probe()
2921 ret = register_netdev(lp->ndev); in axienet_probe()
2923 dev_err(lp->dev, "register_netdev() error (%i)\n", ret); in axienet_probe()
2930 phylink_destroy(lp->phylink); in axienet_probe()
2933 if (lp->pcs_phy) in axienet_probe()
2934 put_device(&lp->pcs_phy->dev); in axienet_probe()
2935 if (lp->mii_bus) in axienet_probe()
2936 axienet_mdio_teardown(lp); in axienet_probe()
2938 clk_bulk_disable_unprepare(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
2939 clk_disable_unprepare(lp->axi_clk); in axienet_probe()
2950 struct axienet_local *lp = netdev_priv(ndev); in axienet_remove() local
2954 if (lp->phylink) in axienet_remove()
2955 phylink_destroy(lp->phylink); in axienet_remove()
2957 if (lp->pcs_phy) in axienet_remove()
2958 put_device(&lp->pcs_phy->dev); in axienet_remove()
2960 axienet_mdio_teardown(lp); in axienet_remove()
2962 clk_bulk_disable_unprepare(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_remove()
2963 clk_disable_unprepare(lp->axi_clk); in axienet_remove()