Lines Matching full:fdma

24 static int sparx5_fdma_tx_dataptr_cb(struct fdma *fdma, int dcb, int db,  in sparx5_fdma_tx_dataptr_cb()  argument
27 *dataptr = fdma->dma + (sizeof(struct fdma_dcb) * fdma->n_dcbs) + in sparx5_fdma_tx_dataptr_cb()
28 ((dcb * fdma->n_dbs + db) * fdma->db_size); in sparx5_fdma_tx_dataptr_cb()
33 static int sparx5_fdma_rx_dataptr_cb(struct fdma *fdma, int dcb, int db, in sparx5_fdma_rx_dataptr_cb() argument
36 struct sparx5 *sparx5 = fdma->priv; in sparx5_fdma_rx_dataptr_cb()
40 skb = __netdev_alloc_skb(rx->ndev, fdma->db_size, GFP_ATOMIC); in sparx5_fdma_rx_dataptr_cb()
53 struct fdma *fdma = &rx->fdma; in sparx5_fdma_rx_activate() local
56 spx5_wr(((u64)fdma->dma) & GENMASK(31, 0), sparx5, in sparx5_fdma_rx_activate()
57 FDMA_DCB_LLP(fdma->channel_id)); in sparx5_fdma_rx_activate()
58 spx5_wr(((u64)fdma->dma) >> 32, sparx5, in sparx5_fdma_rx_activate()
59 FDMA_DCB_LLP1(fdma->channel_id)); in sparx5_fdma_rx_activate()
62 spx5_wr(FDMA_CH_CFG_CH_DCB_DB_CNT_SET(fdma->n_dbs) | in sparx5_fdma_rx_activate()
65 sparx5, FDMA_CH_CFG(fdma->channel_id)); in sparx5_fdma_rx_activate()
72 /* Start RX fdma */ in sparx5_fdma_rx_activate()
77 spx5_rmw(BIT(fdma->channel_id), in sparx5_fdma_rx_activate()
78 BIT(fdma->channel_id) & FDMA_INTR_DB_ENA_INTR_DB_ENA, in sparx5_fdma_rx_activate()
82 spx5_wr(BIT(fdma->channel_id), sparx5, FDMA_CH_ACTIVATE); in sparx5_fdma_rx_activate()
87 struct fdma *fdma = &rx->fdma; in sparx5_fdma_rx_deactivate() local
90 spx5_rmw(0, BIT(fdma->channel_id) & FDMA_CH_ACTIVATE_CH_ACTIVATE, in sparx5_fdma_rx_deactivate()
94 spx5_rmw(0, BIT(fdma->channel_id) & FDMA_INTR_DB_ENA_INTR_DB_ENA, in sparx5_fdma_rx_deactivate()
97 /* Stop RX fdma */ in sparx5_fdma_rx_deactivate()
104 struct fdma *fdma = &tx->fdma; in sparx5_fdma_tx_activate() local
107 spx5_wr(((u64)fdma->dma) & GENMASK(31, 0), sparx5, in sparx5_fdma_tx_activate()
108 FDMA_DCB_LLP(fdma->channel_id)); in sparx5_fdma_tx_activate()
109 spx5_wr(((u64)fdma->dma) >> 32, sparx5, in sparx5_fdma_tx_activate()
110 FDMA_DCB_LLP1(fdma->channel_id)); in sparx5_fdma_tx_activate()
113 spx5_wr(FDMA_CH_CFG_CH_DCB_DB_CNT_SET(fdma->n_dbs) | in sparx5_fdma_tx_activate()
116 sparx5, FDMA_CH_CFG(fdma->channel_id)); in sparx5_fdma_tx_activate()
118 /* Start TX fdma */ in sparx5_fdma_tx_activate()
123 spx5_wr(BIT(fdma->channel_id), sparx5, FDMA_CH_ACTIVATE); in sparx5_fdma_tx_activate()
129 spx5_rmw(0, BIT(tx->fdma.channel_id) & FDMA_CH_ACTIVATE_CH_ACTIVATE, in sparx5_fdma_tx_deactivate()
133 void sparx5_fdma_reload(struct sparx5 *sparx5, struct fdma *fdma) in sparx5_fdma_reload() argument
136 spx5_wr(BIT(fdma->channel_id), sparx5, FDMA_CH_RELOAD); in sparx5_fdma_reload()
141 struct fdma *fdma = &rx->fdma; in sparx5_fdma_rx_get_frame() local
148 db_hw = fdma_db_next_get(fdma); in sparx5_fdma_rx_get_frame()
151 skb = rx->skb[fdma->dcb_index][fdma->db_index]; in sparx5_fdma_rx_get_frame()
187 struct fdma *fdma = &rx->fdma; in sparx5_fdma_napi_callback() local
191 fdma_db_advance(fdma); in sparx5_fdma_napi_callback()
194 if (fdma_dcb_is_reusable(fdma)) in sparx5_fdma_napi_callback()
196 fdma_dcb_add(fdma, fdma->dcb_index, in sparx5_fdma_napi_callback()
197 FDMA_DCB_INFO_DATAL(fdma->db_size), in sparx5_fdma_napi_callback()
199 fdma_db_reset(fdma); in sparx5_fdma_napi_callback()
200 fdma_dcb_advance(fdma); in sparx5_fdma_napi_callback()
204 spx5_rmw(BIT(fdma->channel_id), in sparx5_fdma_napi_callback()
205 BIT(fdma->channel_id) & FDMA_INTR_DB_ENA_INTR_DB_ENA, in sparx5_fdma_napi_callback()
209 sparx5_fdma_reload(sparx5, fdma); in sparx5_fdma_napi_callback()
217 struct fdma *fdma = &tx->fdma; in sparx5_fdma_xmit() local
220 fdma_dcb_advance(fdma); in sparx5_fdma_xmit()
221 if (!fdma_db_is_done(fdma_db_get(fdma, fdma->dcb_index, 0))) in sparx5_fdma_xmit()
225 virt_addr = ((u8 *)fdma->dcbs + in sparx5_fdma_xmit()
226 (sizeof(struct fdma_dcb) * fdma->n_dcbs) + in sparx5_fdma_xmit()
227 ((fdma->dcb_index * fdma->n_dbs) * fdma->db_size)); in sparx5_fdma_xmit()
232 fdma_dcb_add(fdma, fdma->dcb_index, 0, in sparx5_fdma_xmit()
238 sparx5_fdma_reload(sparx5, fdma); in sparx5_fdma_xmit()
246 struct fdma *fdma = &rx->fdma; in sparx5_fdma_rx_alloc() local
249 err = fdma_alloc_phys(fdma); in sparx5_fdma_rx_alloc()
253 fdma_dcbs_init(fdma, FDMA_DCB_INFO_DATAL(fdma->db_size), in sparx5_fdma_rx_alloc()
262 struct fdma *fdma = &tx->fdma; in sparx5_fdma_tx_alloc() local
265 err = fdma_alloc_phys(fdma); in sparx5_fdma_tx_alloc()
269 fdma_dcbs_init(fdma, FDMA_DCB_INFO_DATAL(fdma->db_size), in sparx5_fdma_tx_alloc()
278 struct fdma *fdma = &rx->fdma; in sparx5_fdma_rx_init() local
281 fdma->channel_id = channel; in sparx5_fdma_rx_init()
282 fdma->n_dcbs = FDMA_DCB_MAX; in sparx5_fdma_rx_init()
283 fdma->n_dbs = FDMA_RX_DCB_MAX_DBS; in sparx5_fdma_rx_init()
284 fdma->priv = sparx5; in sparx5_fdma_rx_init()
285 fdma->db_size = ALIGN(FDMA_XTR_BUFFER_SIZE, PAGE_SIZE); in sparx5_fdma_rx_init()
286 fdma->size = fdma_get_size(&sparx5->rx.fdma); in sparx5_fdma_rx_init()
287 fdma->ops.dataptr_cb = &sparx5_fdma_rx_dataptr_cb; in sparx5_fdma_rx_init()
288 fdma->ops.nextptr_cb = &fdma_nextptr_cb; in sparx5_fdma_rx_init()
303 struct fdma *fdma = &tx->fdma; in sparx5_fdma_tx_init() local
305 fdma->channel_id = channel; in sparx5_fdma_tx_init()
306 fdma->n_dcbs = FDMA_DCB_MAX; in sparx5_fdma_tx_init()
307 fdma->n_dbs = FDMA_TX_DCB_MAX_DBS; in sparx5_fdma_tx_init()
308 fdma->priv = sparx5; in sparx5_fdma_tx_init()
309 fdma->db_size = ALIGN(FDMA_XTR_BUFFER_SIZE, PAGE_SIZE); in sparx5_fdma_tx_init()
310 fdma->size = fdma_get_size_contiguous(&sparx5->tx.fdma); in sparx5_fdma_tx_init()
311 fdma->ops.dataptr_cb = &sparx5_fdma_tx_dataptr_cb; in sparx5_fdma_tx_init()
312 fdma->ops.nextptr_cb = &fdma_nextptr_cb; in sparx5_fdma_tx_init()
346 /* Change mode to fdma extraction and injection */ in sparx5_fdma_injection_mode()
406 /* Reset FDMA state */ in sparx5_fdma_init()
438 fdma_free_phys(&sparx5->rx.fdma); in sparx5_fdma_deinit()
439 fdma_free_phys(&sparx5->tx.fdma); in sparx5_fdma_deinit()
476 /* Stop the fdma and channel interrupts */ in sparx5_fdma_stop()