Lines Matching +full:tf +full:- +full:a

1 // SPDX-License-Identifier: GPL-2.0
70 * struct dma_test - DMA test device driver private data
114 /* DMA test property directory UUID: 3188cd10-6523-4a5a-a682-fdca07a248d8 */
124 if (dt->rx_ring) { in dma_test_free_rings()
125 tb_xdomain_release_in_hopid(dt->xd, dt->rx_hopid); in dma_test_free_rings()
126 tb_ring_free(dt->rx_ring); in dma_test_free_rings()
127 dt->rx_ring = NULL; in dma_test_free_rings()
129 if (dt->tx_ring) { in dma_test_free_rings()
130 tb_xdomain_release_out_hopid(dt->xd, dt->tx_hopid); in dma_test_free_rings()
131 tb_ring_free(dt->tx_ring); in dma_test_free_rings()
132 dt->tx_ring = NULL; in dma_test_free_rings()
139 struct tb_xdomain *xd = dt->xd; in dma_test_start_rings()
144 * If we are both sender and receiver (traffic goes over a in dma_test_start_rings()
148 if (dt->packets_to_send && dt->packets_to_receive) in dma_test_start_rings()
151 if (dt->packets_to_send) { in dma_test_start_rings()
152 ring = tb_ring_alloc_tx(xd->tb->nhi, -1, DMA_TEST_TX_RING_SIZE, in dma_test_start_rings()
155 return -ENOMEM; in dma_test_start_rings()
157 dt->tx_ring = ring; in dma_test_start_rings()
158 e2e_tx_hop = ring->hop; in dma_test_start_rings()
160 ret = tb_xdomain_alloc_out_hopid(xd, -1); in dma_test_start_rings()
166 dt->tx_hopid = ret; in dma_test_start_rings()
169 if (dt->packets_to_receive) { in dma_test_start_rings()
175 ring = tb_ring_alloc_rx(xd->tb->nhi, -1, DMA_TEST_RX_RING_SIZE, in dma_test_start_rings()
180 return -ENOMEM; in dma_test_start_rings()
183 dt->rx_ring = ring; in dma_test_start_rings()
185 ret = tb_xdomain_alloc_in_hopid(xd, -1); in dma_test_start_rings()
191 dt->rx_hopid = ret; in dma_test_start_rings()
194 ret = tb_xdomain_enable_paths(dt->xd, dt->tx_hopid, in dma_test_start_rings()
195 dt->tx_ring ? dt->tx_ring->hop : -1, in dma_test_start_rings()
196 dt->rx_hopid, in dma_test_start_rings()
197 dt->rx_ring ? dt->rx_ring->hop : -1); in dma_test_start_rings()
203 if (dt->tx_ring) in dma_test_start_rings()
204 tb_ring_start(dt->tx_ring); in dma_test_start_rings()
205 if (dt->rx_ring) in dma_test_start_rings()
206 tb_ring_start(dt->rx_ring); in dma_test_start_rings()
215 if (dt->rx_ring) in dma_test_stop_rings()
216 tb_ring_stop(dt->rx_ring); in dma_test_stop_rings()
217 if (dt->tx_ring) in dma_test_stop_rings()
218 tb_ring_stop(dt->tx_ring); in dma_test_stop_rings()
220 ret = tb_xdomain_disable_paths(dt->xd, dt->tx_hopid, in dma_test_stop_rings()
221 dt->tx_ring ? dt->tx_ring->hop : -1, in dma_test_stop_rings()
222 dt->rx_hopid, in dma_test_stop_rings()
223 dt->rx_ring ? dt->rx_ring->hop : -1); in dma_test_stop_rings()
225 dev_warn(&dt->svc->dev, "failed to disable DMA paths\n"); in dma_test_stop_rings()
233 struct dma_test_frame *tf = container_of(frame, typeof(*tf), frame); in dma_test_rx_callback() local
234 struct dma_test *dt = tf->dma_test; in dma_test_rx_callback()
235 struct device *dma_dev = tb_ring_dma_device(dt->rx_ring); in dma_test_rx_callback()
237 dma_unmap_single(dma_dev, tf->frame.buffer_phy, DMA_TEST_FRAME_SIZE, in dma_test_rx_callback()
239 kfree(tf->data); in dma_test_rx_callback()
242 kfree(tf); in dma_test_rx_callback()
246 dt->packets_received++; in dma_test_rx_callback()
247 dev_dbg(&dt->svc->dev, "packet %u/%u received\n", dt->packets_received, in dma_test_rx_callback()
248 dt->packets_to_receive); in dma_test_rx_callback()
250 if (tf->frame.flags & RING_DESC_CRC_ERROR) in dma_test_rx_callback()
251 dt->crc_errors++; in dma_test_rx_callback()
252 if (tf->frame.flags & RING_DESC_BUFFER_OVERRUN) in dma_test_rx_callback()
253 dt->buffer_overflow_errors++; in dma_test_rx_callback()
255 kfree(tf); in dma_test_rx_callback()
257 if (dt->packets_received == dt->packets_to_receive) in dma_test_rx_callback()
258 complete(&dt->complete); in dma_test_rx_callback()
263 struct device *dma_dev = tb_ring_dma_device(dt->rx_ring); in dma_test_submit_rx()
267 struct dma_test_frame *tf; in dma_test_submit_rx() local
270 tf = kzalloc(sizeof(*tf), GFP_KERNEL); in dma_test_submit_rx()
271 if (!tf) in dma_test_submit_rx()
272 return -ENOMEM; in dma_test_submit_rx()
274 tf->data = kzalloc(DMA_TEST_FRAME_SIZE, GFP_KERNEL); in dma_test_submit_rx()
275 if (!tf->data) { in dma_test_submit_rx()
276 kfree(tf); in dma_test_submit_rx()
277 return -ENOMEM; in dma_test_submit_rx()
280 dma_addr = dma_map_single(dma_dev, tf->data, DMA_TEST_FRAME_SIZE, in dma_test_submit_rx()
283 kfree(tf->data); in dma_test_submit_rx()
284 kfree(tf); in dma_test_submit_rx()
285 return -ENOMEM; in dma_test_submit_rx()
288 tf->frame.buffer_phy = dma_addr; in dma_test_submit_rx()
289 tf->frame.callback = dma_test_rx_callback; in dma_test_submit_rx()
290 tf->dma_test = dt; in dma_test_submit_rx()
291 INIT_LIST_HEAD(&tf->frame.list); in dma_test_submit_rx()
293 tb_ring_rx(dt->rx_ring, &tf->frame); in dma_test_submit_rx()
302 struct dma_test_frame *tf = container_of(frame, typeof(*tf), frame); in dma_test_tx_callback() local
303 struct dma_test *dt = tf->dma_test; in dma_test_tx_callback()
304 struct device *dma_dev = tb_ring_dma_device(dt->tx_ring); in dma_test_tx_callback()
306 dma_unmap_single(dma_dev, tf->frame.buffer_phy, DMA_TEST_FRAME_SIZE, in dma_test_tx_callback()
308 kfree(tf->data); in dma_test_tx_callback()
309 kfree(tf); in dma_test_tx_callback()
314 struct device *dma_dev = tb_ring_dma_device(dt->tx_ring); in dma_test_submit_tx()
318 struct dma_test_frame *tf; in dma_test_submit_tx() local
321 tf = kzalloc(sizeof(*tf), GFP_KERNEL); in dma_test_submit_tx()
322 if (!tf) in dma_test_submit_tx()
323 return -ENOMEM; in dma_test_submit_tx()
325 tf->frame.size = 0; /* means 4096 */ in dma_test_submit_tx()
326 tf->dma_test = dt; in dma_test_submit_tx()
328 tf->data = kmemdup(dma_test_pattern, DMA_TEST_FRAME_SIZE, GFP_KERNEL); in dma_test_submit_tx()
329 if (!tf->data) { in dma_test_submit_tx()
330 kfree(tf); in dma_test_submit_tx()
331 return -ENOMEM; in dma_test_submit_tx()
334 dma_addr = dma_map_single(dma_dev, tf->data, DMA_TEST_FRAME_SIZE, in dma_test_submit_tx()
337 kfree(tf->data); in dma_test_submit_tx()
338 kfree(tf); in dma_test_submit_tx()
339 return -ENOMEM; in dma_test_submit_tx()
342 tf->frame.buffer_phy = dma_addr; in dma_test_submit_tx()
343 tf->frame.callback = dma_test_tx_callback; in dma_test_submit_tx()
344 tf->frame.sof = DMA_TEST_PDF_FRAME_START; in dma_test_submit_tx()
345 tf->frame.eof = DMA_TEST_PDF_FRAME_END; in dma_test_submit_tx()
346 INIT_LIST_HEAD(&tf->frame.list); in dma_test_submit_tx()
348 dt->packets_sent++; in dma_test_submit_tx()
349 dev_dbg(&dt->svc->dev, "packet %u/%u sent\n", dt->packets_sent, in dma_test_submit_tx()
350 dt->packets_to_send); in dma_test_submit_tx()
352 tb_ring_tx(dt->tx_ring, &tf->frame); in dma_test_submit_tx()
365 ret = mutex_lock_interruptible(&dt->lock); \
369 mutex_unlock(&dt->lock); \
381 ret = mutex_lock_interruptible(&dt->lock); \
385 mutex_unlock(&dt->lock); \
393 *val = dt->link_width; in lanes_get()
398 return val > 2 ? -EINVAL : 0; in lanes_validate()
403 dt->link_width = val; in lanes_set()
409 *val = dt->link_speed; in speed_get()
421 return -EINVAL; in speed_validate()
427 dt->link_speed = val; in speed_set()
433 *val = dt->packets_to_receive; in packets_to_receive_get()
438 return val > DMA_TEST_MAX_PACKETS ? -EINVAL : 0; in packets_to_receive_validate()
443 dt->packets_to_receive = val; in packets_to_receive_set()
450 *val = dt->packets_to_send; in packets_to_send_get()
455 return val > DMA_TEST_MAX_PACKETS ? -EINVAL : 0; in packets_to_send_validate()
460 dt->packets_to_send = val; in packets_to_send_set()
467 switch (dt->link_width) { in dma_test_set_bonding()
469 return tb_xdomain_lane_bonding_enable(dt->xd); in dma_test_set_bonding()
471 tb_xdomain_lane_bonding_disable(dt->xd); in dma_test_set_bonding()
480 if (!dt->packets_to_send && !dt->packets_to_receive) in dma_test_validate_config()
482 if (dt->packets_to_send && dt->packets_to_receive && in dma_test_validate_config()
483 dt->packets_to_send != dt->packets_to_receive) in dma_test_validate_config()
490 if (!dt->error_code) { in dma_test_check_errors()
491 if (dt->link_speed && dt->xd->link_speed != dt->link_speed) { in dma_test_check_errors()
492 dt->error_code = DMA_TEST_SPEED_ERROR; in dma_test_check_errors()
493 } else if (dt->link_width && dt->link_width != dt->xd->link_width) { in dma_test_check_errors()
494 dt->error_code = DMA_TEST_WIDTH_ERROR; in dma_test_check_errors()
495 } else if (dt->packets_to_send != dt->packets_sent || in dma_test_check_errors()
496 dt->packets_to_receive != dt->packets_received || in dma_test_check_errors()
497 dt->crc_errors || dt->buffer_overflow_errors) { in dma_test_check_errors()
498 dt->error_code = DMA_TEST_PACKET_ERROR; in dma_test_check_errors()
504 dt->result = DMA_TEST_FAIL; in dma_test_check_errors()
514 return -EINVAL; in test_store()
516 ret = mutex_lock_interruptible(&dt->lock); in test_store()
520 dt->packets_sent = 0; in test_store()
521 dt->packets_received = 0; in test_store()
522 dt->crc_errors = 0; in test_store()
523 dt->buffer_overflow_errors = 0; in test_store()
524 dt->result = DMA_TEST_SUCCESS; in test_store()
525 dt->error_code = DMA_TEST_NO_ERROR; in test_store()
527 dev_dbg(&svc->dev, "DMA test starting\n"); in test_store()
528 if (dt->link_speed) in test_store()
529 dev_dbg(&svc->dev, "link_speed: %u Gb/s\n", dt->link_speed); in test_store()
530 if (dt->link_width) in test_store()
531 dev_dbg(&svc->dev, "link_width: %u\n", dt->link_width); in test_store()
532 dev_dbg(&svc->dev, "packets_to_send: %u\n", dt->packets_to_send); in test_store()
533 dev_dbg(&svc->dev, "packets_to_receive: %u\n", dt->packets_to_receive); in test_store()
536 dev_err(&svc->dev, "invalid test configuration\n"); in test_store()
537 dt->error_code = DMA_TEST_CONFIG_ERROR; in test_store()
543 dev_err(&svc->dev, "failed to set lanes\n"); in test_store()
544 dt->error_code = DMA_TEST_BONDING_ERROR; in test_store()
550 dev_err(&svc->dev, "failed to enable DMA rings\n"); in test_store()
551 dt->error_code = DMA_TEST_DMA_ERROR; in test_store()
555 if (dt->packets_to_receive) { in test_store()
556 reinit_completion(&dt->complete); in test_store()
557 ret = dma_test_submit_rx(dt, dt->packets_to_receive); in test_store()
559 dev_err(&svc->dev, "failed to submit receive buffers\n"); in test_store()
560 dt->error_code = DMA_TEST_BUFFER_ERROR; in test_store()
565 if (dt->packets_to_send) { in test_store()
566 ret = dma_test_submit_tx(dt, dt->packets_to_send); in test_store()
568 dev_err(&svc->dev, "failed to submit transmit buffers\n"); in test_store()
569 dt->error_code = DMA_TEST_BUFFER_ERROR; in test_store()
574 if (dt->packets_to_receive) { in test_store()
575 ret = wait_for_completion_interruptible(&dt->complete); in test_store()
577 dt->error_code = DMA_TEST_INTERRUPTED; in test_store()
586 mutex_unlock(&dt->lock); in test_store()
588 dev_dbg(&svc->dev, "DMA test %s\n", dma_test_result_names[dt->result]); in test_store()
595 struct tb_service *svc = s->private; in status_show()
599 ret = mutex_lock_interruptible(&dt->lock); in status_show()
603 seq_printf(s, "result: %s\n", dma_test_result_names[dt->result]); in status_show()
604 if (dt->result == DMA_TEST_NOT_RUN) in status_show()
607 seq_printf(s, "packets received: %u\n", dt->packets_received); in status_show()
608 seq_printf(s, "packets sent: %u\n", dt->packets_sent); in status_show()
609 seq_printf(s, "CRC errors: %u\n", dt->crc_errors); in status_show()
611 dt->buffer_overflow_errors); in status_show()
612 seq_printf(s, "error: %s\n", dma_test_error_names[dt->error_code]); in status_show()
615 mutex_unlock(&dt->lock); in status_show()
624 dt->debugfs_dir = debugfs_create_dir("dma_test", svc->debugfs_dir); in dma_test_debugfs_init()
626 debugfs_create_file("lanes", 0600, dt->debugfs_dir, svc, &lanes_fops); in dma_test_debugfs_init()
627 debugfs_create_file("speed", 0600, dt->debugfs_dir, svc, &speed_fops); in dma_test_debugfs_init()
628 debugfs_create_file("packets_to_receive", 0600, dt->debugfs_dir, svc, in dma_test_debugfs_init()
630 debugfs_create_file("packets_to_send", 0600, dt->debugfs_dir, svc, in dma_test_debugfs_init()
632 debugfs_create_file("status", 0400, dt->debugfs_dir, svc, &status_fops); in dma_test_debugfs_init()
633 debugfs_create_file("test", 0200, dt->debugfs_dir, svc, &test_fops); in dma_test_debugfs_init()
641 dt = devm_kzalloc(&svc->dev, sizeof(*dt), GFP_KERNEL); in dma_test_probe()
643 return -ENOMEM; in dma_test_probe()
645 dt->svc = svc; in dma_test_probe()
646 dt->xd = xd; in dma_test_probe()
647 mutex_init(&dt->lock); in dma_test_probe()
648 init_completion(&dt->complete); in dma_test_probe()
660 mutex_lock(&dt->lock); in dma_test_remove()
661 debugfs_remove_recursive(dt->debugfs_dir); in dma_test_remove()
662 mutex_unlock(&dt->lock); in dma_test_remove()
670 * wait_for_completion_interruptible() with -ERESTARTSYS and the in dma_test_suspend()
673 * re-running the test. in dma_test_suspend()
711 return -ENOMEM; in dma_test_init()
718 ret = -ENOMEM; in dma_test_init()