Lines Matching +full:rmii +full:- +full:refclk +full:- +full:out
1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright 2021-2023 NXP
4 * Author: Radu Pirea <radu-[email protected]>
20 #include "nxp-c45-tja11xx.h"
195 #define NXP_C45_SKB_CB(skb) ((struct nxp_c45_skb_cb *)(skb)->cb)
300 return phydev->drv->driver_data; in nxp_c45_get_data()
308 return phy_data->regmap; in nxp_c45_get_regmap()
317 if (reg_field->size == 0) { in nxp_c45_read_reg_field()
319 return -EINVAL; in nxp_c45_read_reg_field()
322 ret = phy_read_mmd(phydev, reg_field->devad, reg_field->reg); in nxp_c45_read_reg_field()
326 mask = reg_field->size == 1 ? BIT(reg_field->offset) : in nxp_c45_read_reg_field()
327 GENMASK(reg_field->offset + reg_field->size - 1, in nxp_c45_read_reg_field()
328 reg_field->offset); in nxp_c45_read_reg_field()
330 ret >>= reg_field->offset; in nxp_c45_read_reg_field()
342 if (reg_field->size == 0) { in nxp_c45_write_reg_field()
344 return -EINVAL; in nxp_c45_write_reg_field()
347 mask = reg_field->size == 1 ? BIT(reg_field->offset) : in nxp_c45_write_reg_field()
348 GENMASK(reg_field->offset + reg_field->size - 1, in nxp_c45_write_reg_field()
349 reg_field->offset); in nxp_c45_write_reg_field()
350 set = val << reg_field->offset; in nxp_c45_write_reg_field()
352 return phy_modify_mmd_changed(phydev, reg_field->devad, in nxp_c45_write_reg_field()
353 reg_field->reg, mask, set); in nxp_c45_write_reg_field()
359 if (reg_field->size != 1) { in nxp_c45_set_reg_field()
361 return -EINVAL; in nxp_c45_set_reg_field()
370 if (reg_field->size != 1) { in nxp_c45_clear_reg_field()
372 return -EINVAL; in nxp_c45_clear_reg_field()
380 return phydev->irq <= 0; in nxp_c45_poll_txts()
388 const struct nxp_c45_regmap *regmap = nxp_c45_get_regmap(priv->phydev); in _nxp_c45_ptp_gettimex64()
390 nxp_c45_set_reg_field(priv->phydev, ®map->ltc_read); in _nxp_c45_ptp_gettimex64()
391 ts->tv_nsec = phy_read_mmd(priv->phydev, MDIO_MMD_VEND1, in _nxp_c45_ptp_gettimex64()
392 regmap->vend1_ltc_rd_nsec_0); in _nxp_c45_ptp_gettimex64()
393 ts->tv_nsec |= phy_read_mmd(priv->phydev, MDIO_MMD_VEND1, in _nxp_c45_ptp_gettimex64()
394 regmap->vend1_ltc_rd_nsec_1) << 16; in _nxp_c45_ptp_gettimex64()
395 ts->tv_sec = phy_read_mmd(priv->phydev, MDIO_MMD_VEND1, in _nxp_c45_ptp_gettimex64()
396 regmap->vend1_ltc_rd_sec_0); in _nxp_c45_ptp_gettimex64()
397 ts->tv_sec |= phy_read_mmd(priv->phydev, MDIO_MMD_VEND1, in _nxp_c45_ptp_gettimex64()
398 regmap->vend1_ltc_rd_sec_1) << 16; in _nxp_c45_ptp_gettimex64()
409 mutex_lock(&priv->ptp_lock); in nxp_c45_ptp_gettimex64()
411 mutex_unlock(&priv->ptp_lock); in nxp_c45_ptp_gettimex64()
420 const struct nxp_c45_regmap *regmap = nxp_c45_get_regmap(priv->phydev); in _nxp_c45_ptp_settime64()
422 phy_write_mmd(priv->phydev, MDIO_MMD_VEND1, regmap->vend1_ltc_wr_nsec_0, in _nxp_c45_ptp_settime64()
423 ts->tv_nsec); in _nxp_c45_ptp_settime64()
424 phy_write_mmd(priv->phydev, MDIO_MMD_VEND1, regmap->vend1_ltc_wr_nsec_1, in _nxp_c45_ptp_settime64()
425 ts->tv_nsec >> 16); in _nxp_c45_ptp_settime64()
426 phy_write_mmd(priv->phydev, MDIO_MMD_VEND1, regmap->vend1_ltc_wr_sec_0, in _nxp_c45_ptp_settime64()
427 ts->tv_sec); in _nxp_c45_ptp_settime64()
428 phy_write_mmd(priv->phydev, MDIO_MMD_VEND1, regmap->vend1_ltc_wr_sec_1, in _nxp_c45_ptp_settime64()
429 ts->tv_sec >> 16); in _nxp_c45_ptp_settime64()
430 nxp_c45_set_reg_field(priv->phydev, ®map->ltc_write); in _nxp_c45_ptp_settime64()
440 mutex_lock(&priv->ptp_lock); in nxp_c45_ptp_settime64()
442 mutex_unlock(&priv->ptp_lock); in nxp_c45_ptp_settime64()
450 const struct nxp_c45_phy_data *data = nxp_c45_get_data(priv->phydev); in nxp_c45_ptp_adjfine()
451 const struct nxp_c45_regmap *regmap = data->regmap; in nxp_c45_ptp_adjfine()
456 mutex_lock(&priv->ptp_lock); in nxp_c45_ptp_adjfine()
460 subns_inc_val = PPM_TO_SUBNS_INC(ppb, data->ptp_clk_period); in nxp_c45_ptp_adjfine()
462 phy_write_mmd(priv->phydev, MDIO_MMD_VEND1, in nxp_c45_ptp_adjfine()
463 regmap->vend1_rate_adj_subns_0, in nxp_c45_ptp_adjfine()
470 phy_write_mmd(priv->phydev, MDIO_MMD_VEND1, in nxp_c45_ptp_adjfine()
471 regmap->vend1_rate_adj_subns_1, in nxp_c45_ptp_adjfine()
473 mutex_unlock(&priv->ptp_lock); in nxp_c45_ptp_adjfine()
483 mutex_lock(&priv->ptp_lock); in nxp_c45_ptp_adjtime()
488 mutex_unlock(&priv->ptp_lock); in nxp_c45_ptp_adjtime()
496 ts->tv_nsec = hwts->nsec; in nxp_c45_reconstruct_ts()
497 if ((ts->tv_sec & TS_SEC_MASK) < (hwts->sec & TS_SEC_MASK)) in nxp_c45_reconstruct_ts()
498 ts->tv_sec -= TS_SEC_MASK + 1; in nxp_c45_reconstruct_ts()
499 ts->tv_sec &= ~TS_SEC_MASK; in nxp_c45_reconstruct_ts()
500 ts->tv_sec |= hwts->sec & TS_SEC_MASK; in nxp_c45_reconstruct_ts()
507 return ntohs(header->sequence_id) == hwts->sequence_id && in nxp_c45_match_ts()
508 ptp_get_msgtype(header, type) == hwts->msg_type && in nxp_c45_match_ts()
509 header->domain_number == hwts->domain_number; in nxp_c45_match_ts()
515 const struct nxp_c45_regmap *regmap = nxp_c45_get_regmap(priv->phydev); in nxp_c45_get_extts()
517 extts->tv_nsec = phy_read_mmd(priv->phydev, MDIO_MMD_VEND1, in nxp_c45_get_extts()
518 regmap->vend1_ext_trg_data_0); in nxp_c45_get_extts()
519 extts->tv_nsec |= phy_read_mmd(priv->phydev, MDIO_MMD_VEND1, in nxp_c45_get_extts()
520 regmap->vend1_ext_trg_data_1) << 16; in nxp_c45_get_extts()
521 extts->tv_sec = phy_read_mmd(priv->phydev, MDIO_MMD_VEND1, in nxp_c45_get_extts()
522 regmap->vend1_ext_trg_data_2); in nxp_c45_get_extts()
523 extts->tv_sec |= phy_read_mmd(priv->phydev, MDIO_MMD_VEND1, in nxp_c45_get_extts()
524 regmap->vend1_ext_trg_data_3) << 16; in nxp_c45_get_extts()
525 phy_write_mmd(priv->phydev, MDIO_MMD_VEND1, in nxp_c45_get_extts()
526 regmap->vend1_ext_trg_ctrl, RING_DONE); in nxp_c45_get_extts()
546 const struct nxp_c45_regmap *regmap = nxp_c45_get_regmap(priv->phydev); in tja1120_get_extts()
547 struct phy_device *phydev = priv->phydev; in tja1120_get_extts()
553 regmap->vend1_ext_trg_ctrl); in tja1120_get_extts()
565 regmap->vend1_ext_trg_ctrl, RING_DONE); in tja1120_get_extts()
579 const struct nxp_c45_regmap *regmap = nxp_c45_get_regmap(priv->phydev); in nxp_c45_read_egress_ts()
580 struct phy_device *phydev = priv->phydev; in nxp_c45_read_egress_ts()
582 hwts->domain_number = in nxp_c45_read_egress_ts()
583 nxp_c45_read_reg_field(phydev, ®map->domain_number); in nxp_c45_read_egress_ts()
584 hwts->msg_type = in nxp_c45_read_egress_ts()
585 nxp_c45_read_reg_field(phydev, ®map->msg_type); in nxp_c45_read_egress_ts()
586 hwts->sequence_id = in nxp_c45_read_egress_ts()
587 nxp_c45_read_reg_field(phydev, ®map->sequence_id); in nxp_c45_read_egress_ts()
588 hwts->nsec = in nxp_c45_read_egress_ts()
589 nxp_c45_read_reg_field(phydev, ®map->nsec_15_0); in nxp_c45_read_egress_ts()
590 hwts->nsec |= in nxp_c45_read_egress_ts()
591 nxp_c45_read_reg_field(phydev, ®map->nsec_29_16) << 16; in nxp_c45_read_egress_ts()
592 hwts->sec = nxp_c45_read_reg_field(phydev, ®map->sec_1_0); in nxp_c45_read_egress_ts()
593 hwts->sec |= nxp_c45_read_reg_field(phydev, ®map->sec_4_2) << 2; in nxp_c45_read_egress_ts()
602 mutex_lock(&priv->ptp_lock); in nxp_c45_get_hwtxts()
603 phy_write_mmd(priv->phydev, MDIO_MMD_VEND1, VEND1_EGR_RING_CTRL, in nxp_c45_get_hwtxts()
605 reg = phy_read_mmd(priv->phydev, MDIO_MMD_VEND1, VEND1_EGR_RING_DATA_0); in nxp_c45_get_hwtxts()
612 mutex_unlock(&priv->ptp_lock); in nxp_c45_get_hwtxts()
630 struct phy_device *phydev = priv->phydev; in tja1120_get_hwtxts()
635 mutex_lock(&priv->ptp_lock); in tja1120_get_hwtxts()
656 mutex_unlock(&priv->ptp_lock); in tja1120_get_hwtxts()
670 spin_lock_irqsave(&priv->tx_queue.lock, flags); in nxp_c45_process_txts()
671 skb_queue_walk_safe(&priv->tx_queue, skb, tmp) { in nxp_c45_process_txts()
672 ts_match = nxp_c45_match_ts(NXP_C45_SKB_CB(skb)->header, txts, in nxp_c45_process_txts()
673 NXP_C45_SKB_CB(skb)->type); in nxp_c45_process_txts()
677 __skb_unlink(skb, &priv->tx_queue); in nxp_c45_process_txts()
680 spin_unlock_irqrestore(&priv->tx_queue.lock, flags); in nxp_c45_process_txts()
683 nxp_c45_ptp_gettimex64(&priv->caps, &ts, NULL); in nxp_c45_process_txts()
690 phydev_warn(priv->phydev, in nxp_c45_process_txts()
698 const struct nxp_c45_phy_data *data = nxp_c45_get_data(priv->phydev); in nxp_c45_do_aux_work()
699 bool poll_txts = nxp_c45_poll_txts(priv->phydev); in nxp_c45_do_aux_work()
709 while (!skb_queue_empty_lockless(&priv->tx_queue) && poll_txts) { in nxp_c45_do_aux_work()
710 ts_valid = data->get_egressts(priv, &hwts); in nxp_c45_do_aux_work()
720 while ((skb = skb_dequeue(&priv->rx_queue)) != NULL) { in nxp_c45_do_aux_work()
721 nxp_c45_ptp_gettimex64(&priv->caps, &ts, NULL); in nxp_c45_do_aux_work()
722 ts_raw = __be32_to_cpu(NXP_C45_SKB_CB(skb)->header->reserved2); in nxp_c45_do_aux_work()
727 shhwtstamps_rx->hwtstamp = ns_to_ktime(timespec64_to_ns(&ts)); in nxp_c45_do_aux_work()
728 NXP_C45_SKB_CB(skb)->header->reserved2 = 0; in nxp_c45_do_aux_work()
732 if (priv->extts) { in nxp_c45_do_aux_work()
733 ts_valid = data->get_extts(priv, &ts); in nxp_c45_do_aux_work()
734 if (ts_valid && timespec64_compare(&ts, &priv->extts_ts) != 0) { in nxp_c45_do_aux_work()
735 priv->extts_ts = ts; in nxp_c45_do_aux_work()
736 event.index = priv->extts_index; in nxp_c45_do_aux_work()
739 ptp_clock_event(priv->ptp_clock, &event); in nxp_c45_do_aux_work()
744 return reschedule ? 1 : -1; in nxp_c45_do_aux_work()
750 struct phy_device *phydev = priv->phydev; in nxp_c45_gpio_config()
759 const struct nxp_c45_regmap *regmap = nxp_c45_get_regmap(priv->phydev); in nxp_c45_perout_enable()
760 struct phy_device *phydev = priv->phydev; in nxp_c45_perout_enable()
763 if (perout->flags & ~PTP_PEROUT_PHASE) in nxp_c45_perout_enable()
764 return -EOPNOTSUPP; in nxp_c45_perout_enable()
766 pin = ptp_find_pin(priv->ptp_clock, PTP_PF_PEROUT, perout->index); in nxp_c45_perout_enable()
771 nxp_c45_clear_reg_field(priv->phydev, in nxp_c45_perout_enable()
772 ®map->pps_enable); in nxp_c45_perout_enable()
773 nxp_c45_clear_reg_field(priv->phydev, in nxp_c45_perout_enable()
774 ®map->pps_polarity); in nxp_c45_perout_enable()
785 if (perout->period.sec != 1 || perout->period.nsec != 0) { in nxp_c45_perout_enable()
787 return -EINVAL; in nxp_c45_perout_enable()
790 if (!(perout->flags & PTP_PEROUT_PHASE)) { in nxp_c45_perout_enable()
791 if (perout->start.sec != 0 || perout->start.nsec != 0) { in nxp_c45_perout_enable()
793 return -EINVAL; in nxp_c45_perout_enable()
796 if (perout->phase.nsec != 0 && in nxp_c45_perout_enable()
797 perout->phase.nsec != (NSEC_PER_SEC >> 1)) { in nxp_c45_perout_enable()
799 return -EINVAL; in nxp_c45_perout_enable()
802 if (perout->phase.nsec == 0) in nxp_c45_perout_enable()
803 nxp_c45_clear_reg_field(priv->phydev, in nxp_c45_perout_enable()
804 ®map->pps_polarity); in nxp_c45_perout_enable()
806 nxp_c45_set_reg_field(priv->phydev, in nxp_c45_perout_enable()
807 ®map->pps_polarity); in nxp_c45_perout_enable()
812 nxp_c45_set_reg_field(priv->phydev, ®map->pps_enable); in nxp_c45_perout_enable()
820 if (extts->flags & PTP_RISING_EDGE) in nxp_c45_set_rising_or_falling()
824 if (extts->flags & PTP_FALLING_EDGE) in nxp_c45_set_rising_or_falling()
835 if (extts->flags & PTP_RISING_EDGE || in nxp_c45_set_rising_and_falling()
836 extts->flags == PTP_ENABLE_FEATURE) in nxp_c45_set_rising_and_falling()
845 if (extts->flags & PTP_FALLING_EDGE) in nxp_c45_set_rising_and_falling()
858 const struct nxp_c45_phy_data *data = nxp_c45_get_data(priv->phydev); in nxp_c45_extts_enable()
861 if (extts->flags & ~(PTP_ENABLE_FEATURE | in nxp_c45_extts_enable()
865 return -EOPNOTSUPP; in nxp_c45_extts_enable()
868 if ((extts->flags & PTP_RISING_EDGE) && in nxp_c45_extts_enable()
869 (extts->flags & PTP_FALLING_EDGE) && in nxp_c45_extts_enable()
870 !data->ext_ts_both_edges) in nxp_c45_extts_enable()
871 return -EOPNOTSUPP; in nxp_c45_extts_enable()
873 pin = ptp_find_pin(priv->ptp_clock, PTP_PF_EXTTS, extts->index); in nxp_c45_extts_enable()
879 priv->extts = false; in nxp_c45_extts_enable()
884 if (data->ext_ts_both_edges) in nxp_c45_extts_enable()
885 nxp_c45_set_rising_and_falling(priv->phydev, extts); in nxp_c45_extts_enable()
887 nxp_c45_set_rising_or_falling(priv->phydev, extts); in nxp_c45_extts_enable()
890 priv->extts = true; in nxp_c45_extts_enable()
891 priv->extts_index = extts->index; in nxp_c45_extts_enable()
892 ptp_schedule_worker(priv->ptp_clock, 0); in nxp_c45_extts_enable()
902 switch (req->type) { in nxp_c45_ptp_enable()
904 return nxp_c45_extts_enable(priv, &req->extts, on); in nxp_c45_ptp_enable()
906 return nxp_c45_perout_enable(priv, &req->perout, on); in nxp_c45_ptp_enable()
908 return -EOPNOTSUPP; in nxp_c45_ptp_enable()
931 return -EINVAL; in nxp_c45_ptp_verify_pin()
939 return -EOPNOTSUPP; in nxp_c45_ptp_verify_pin()
947 priv->caps = (struct ptp_clock_info) { in nxp_c45_init_ptp_clock()
964 priv->ptp_clock = ptp_clock_register(&priv->caps, in nxp_c45_init_ptp_clock()
965 &priv->phydev->mdio.dev); in nxp_c45_init_ptp_clock()
967 if (IS_ERR(priv->ptp_clock)) in nxp_c45_init_ptp_clock()
968 return PTR_ERR(priv->ptp_clock); in nxp_c45_init_ptp_clock()
970 if (!priv->ptp_clock) in nxp_c45_init_ptp_clock()
971 return -ENOMEM; in nxp_c45_init_ptp_clock()
982 switch (priv->hwts_tx) { in nxp_c45_txtstamp()
984 NXP_C45_SKB_CB(skb)->type = type; in nxp_c45_txtstamp()
985 NXP_C45_SKB_CB(skb)->header = ptp_parse_header(skb, type); in nxp_c45_txtstamp()
986 skb_shinfo(skb)->tx_flags |= SKBTX_IN_PROGRESS; in nxp_c45_txtstamp()
987 skb_queue_tail(&priv->tx_queue, skb); in nxp_c45_txtstamp()
988 if (nxp_c45_poll_txts(priv->phydev)) in nxp_c45_txtstamp()
989 ptp_schedule_worker(priv->ptp_clock, 0); in nxp_c45_txtstamp()
1008 if (!priv->hwts_rx) in nxp_c45_rxtstamp()
1011 NXP_C45_SKB_CB(skb)->header = header; in nxp_c45_rxtstamp()
1012 skb_queue_tail(&priv->rx_queue, skb); in nxp_c45_rxtstamp()
1013 ptp_schedule_worker(priv->ptp_clock, 0); in nxp_c45_rxtstamp()
1024 struct phy_device *phydev = priv->phydev; in nxp_c45_hwtstamp()
1027 if (cfg->tx_type < 0 || cfg->tx_type > HWTSTAMP_TX_ON) in nxp_c45_hwtstamp()
1028 return -ERANGE; in nxp_c45_hwtstamp()
1031 priv->hwts_tx = cfg->tx_type; in nxp_c45_hwtstamp()
1033 switch (cfg->rx_filter) { in nxp_c45_hwtstamp()
1035 priv->hwts_rx = 0; in nxp_c45_hwtstamp()
1040 priv->hwts_rx = 1; in nxp_c45_hwtstamp()
1041 cfg->rx_filter = HWTSTAMP_FILTER_PTP_V2_L2_EVENT; in nxp_c45_hwtstamp()
1044 return -ERANGE; in nxp_c45_hwtstamp()
1047 if (priv->hwts_rx || priv->hwts_tx) { in nxp_c45_hwtstamp()
1049 data->regmap->vend1_event_msg_filt, in nxp_c45_hwtstamp()
1051 data->ptp_enable(phydev, true); in nxp_c45_hwtstamp()
1054 data->regmap->vend1_event_msg_filt, in nxp_c45_hwtstamp()
1056 data->ptp_enable(phydev, false); in nxp_c45_hwtstamp()
1059 if (nxp_c45_poll_txts(priv->phydev)) in nxp_c45_hwtstamp()
1062 if (priv->hwts_tx) in nxp_c45_hwtstamp()
1063 nxp_c45_set_reg_field(phydev, &data->regmap->irq_egr_ts_en); in nxp_c45_hwtstamp()
1065 nxp_c45_clear_reg_field(phydev, &data->regmap->irq_egr_ts_en); in nxp_c45_hwtstamp()
1077 ts_info->so_timestamping = SOF_TIMESTAMPING_TX_HARDWARE | in nxp_c45_ts_info()
1080 ts_info->phc_index = ptp_clock_index(priv->ptp_clock); in nxp_c45_ts_info()
1081 ts_info->tx_types = (1 << HWTSTAMP_TX_OFF) | (1 << HWTSTAMP_TX_ON); in nxp_c45_ts_info()
1082 ts_info->rx_filters = (1 << HWTSTAMP_FILTER_NONE) | in nxp_c45_ts_info()
1139 return ARRAY_SIZE(common_hw_stats) + (phy_data ? phy_data->n_stats : 0); in nxp_c45_get_sset_count()
1154 idx = i - ARRAY_SIZE(common_hw_stats); in nxp_c45_get_strings()
1155 ethtool_puts(&data, phy_data->stats[idx].name); in nxp_c45_get_strings()
1173 idx = i - ARRAY_SIZE(common_hw_stats); in nxp_c45_get_stats()
1174 reg_field = &phy_data->stats[idx].counter; in nxp_c45_get_stats()
1212 if (phydev->interrupts == PHY_INTERRUPT_ENABLED) { in nxp_c45_config_intr()
1248 if (phydev->interrupts == PHY_INTERRUPT_ENABLED) in tja1120_config_intr()
1265 struct nxp_c45_phy *priv = phydev->priv; in nxp_c45_handle_interrupt()
1278 irq = nxp_c45_read_reg_field(phydev, &data->regmap->irq_egr_ts_status); in nxp_c45_handle_interrupt()
1280 /* If ack_ptp_irq is false, the IRQ bit is self-clear and will in nxp_c45_handle_interrupt()
1284 if (data->ack_ptp_irq) in nxp_c45_handle_interrupt()
1287 while (data->get_egressts(priv, &hwts)) in nxp_c45_handle_interrupt()
1293 data->nmi_handler(phydev, &ret); in nxp_c45_handle_interrupt()
1322 return phy_set_bits_mmd(phydev, MDIO_MMD_VEND1, regmap->cable_test, in nxp_c45_cable_test_start()
1333 ret = nxp_c45_read_reg_field(phydev, ®map->cable_test_valid); in nxp_c45_cable_test_get_status()
1341 ®map->cable_test_result); in nxp_c45_cable_test_get_status()
1361 phy_clear_bits_mmd(phydev, MDIO_MMD_VEND1, regmap->cable_test, in nxp_c45_cable_test_get_status()
1375 return -EINVAL; in nxp_c45_get_sqi()
1387 if (phydev->state == PHY_NOLINK) { in tja1120_link_change_notify()
1404 return -EINVAL; in nxp_c45_check_delay()
1409 return -EINVAL; in nxp_c45_check_delay()
1422 data->counters_enable(phydev); in nxp_c45_counters_enable()
1430 data->regmap->vend1_ptp_clk_period, in nxp_c45_ptp_init()
1431 data->ptp_clk_period); in nxp_c45_ptp_init()
1432 nxp_c45_clear_reg_field(phydev, &data->regmap->ltc_lock_ctrl); in nxp_c45_ptp_init()
1434 data->ptp_init(phydev); in nxp_c45_ptp_init()
1444 phase_offset_raw -= 738; in nxp_c45_get_phase_shift()
1456 struct nxp_c45_phy *priv = phydev->priv; in nxp_c45_set_delays()
1457 u64 tx_delay = priv->tx_delay; in nxp_c45_set_delays()
1458 u64 rx_delay = priv->rx_delay; in nxp_c45_set_delays()
1461 if (phydev->interface == PHY_INTERFACE_MODE_RGMII_ID || in nxp_c45_set_delays()
1462 phydev->interface == PHY_INTERFACE_MODE_RGMII_TXID) { in nxp_c45_set_delays()
1471 if (phydev->interface == PHY_INTERFACE_MODE_RGMII_ID || in nxp_c45_set_delays()
1472 phydev->interface == PHY_INTERFACE_MODE_RGMII_RXID) { in nxp_c45_set_delays()
1484 struct nxp_c45_phy *priv = phydev->priv; in nxp_c45_get_delays()
1487 if (phydev->interface == PHY_INTERFACE_MODE_RGMII_ID || in nxp_c45_get_delays()
1488 phydev->interface == PHY_INTERFACE_MODE_RGMII_TXID) { in nxp_c45_get_delays()
1489 ret = device_property_read_u32(&phydev->mdio.dev, in nxp_c45_get_delays()
1490 "tx-internal-delay-ps", in nxp_c45_get_delays()
1491 &priv->tx_delay); in nxp_c45_get_delays()
1493 priv->tx_delay = DEFAULT_ID_PS; in nxp_c45_get_delays()
1495 ret = nxp_c45_check_delay(phydev, priv->tx_delay); in nxp_c45_get_delays()
1498 "tx-internal-delay-ps invalid value\n"); in nxp_c45_get_delays()
1503 if (phydev->interface == PHY_INTERFACE_MODE_RGMII_ID || in nxp_c45_get_delays()
1504 phydev->interface == PHY_INTERFACE_MODE_RGMII_RXID) { in nxp_c45_get_delays()
1505 ret = device_property_read_u32(&phydev->mdio.dev, in nxp_c45_get_delays()
1506 "rx-internal-delay-ps", in nxp_c45_get_delays()
1507 &priv->rx_delay); in nxp_c45_get_delays()
1509 priv->rx_delay = DEFAULT_ID_PS; in nxp_c45_get_delays()
1511 ret = nxp_c45_check_delay(phydev, priv->rx_delay); in nxp_c45_get_delays()
1514 "rx-internal-delay-ps invalid value\n"); in nxp_c45_get_delays()
1524 struct nxp_c45_phy *priv = phydev->priv; in nxp_c45_set_phy_mode()
1531 switch (phydev->interface) { in nxp_c45_set_phy_mode()
1535 return -EINVAL; in nxp_c45_set_phy_mode()
1545 phydev_err(phydev, "rgmii-id, rgmii-txid, rgmii-rxid modes are not supported\n"); in nxp_c45_set_phy_mode()
1546 return -EINVAL; in nxp_c45_set_phy_mode()
1559 return -EINVAL; in nxp_c45_set_phy_mode()
1566 phydev_err(phydev, "rev-mii mode not supported\n"); in nxp_c45_set_phy_mode()
1567 return -EINVAL; in nxp_c45_set_phy_mode()
1574 phydev_err(phydev, "rmii mode not supported\n"); in nxp_c45_set_phy_mode()
1575 return -EINVAL; in nxp_c45_set_phy_mode()
1581 if (priv->flags & TJA11XX_REVERSE_MODE) in nxp_c45_set_phy_mode()
1590 return -EINVAL; in nxp_c45_set_phy_mode()
1598 return -EINVAL; in nxp_c45_set_phy_mode()
1677 if (phy_id_compare(phydev->phy_id, PHY_ID_TJA_1120, GENMASK(31, 4))) in nxp_c45_config_init()
1687 phydev->autoneg = AUTONEG_DISABLE; in nxp_c45_config_init()
1700 linkmode_set_bit(ETHTOOL_LINK_MODE_TP_BIT, phydev->supported); in nxp_c45_get_features()
1701 linkmode_set_bit(ETHTOOL_LINK_MODE_MII_BIT, phydev->supported); in nxp_c45_get_features()
1708 struct device_node *node = phydev->mdio.dev.of_node; in nxp_c45_parse_dt()
1709 struct nxp_c45_phy *priv = phydev->priv; in nxp_c45_parse_dt()
1714 if (of_property_read_bool(node, "nxp,rmii-refclk-out")) in nxp_c45_parse_dt()
1715 priv->flags |= TJA11XX_REVERSE_MODE; in nxp_c45_parse_dt()
1728 priv = devm_kzalloc(&phydev->mdio.dev, sizeof(*priv), GFP_KERNEL); in nxp_c45_probe()
1730 return -ENOMEM; in nxp_c45_probe()
1732 skb_queue_head_init(&priv->tx_queue); in nxp_c45_probe()
1733 skb_queue_head_init(&priv->rx_queue); in nxp_c45_probe()
1735 priv->phydev = phydev; in nxp_c45_probe()
1737 phydev->priv = priv; in nxp_c45_probe()
1741 mutex_init(&priv->ptp_lock); in nxp_c45_probe()
1753 priv->mii_ts.rxtstamp = nxp_c45_rxtstamp; in nxp_c45_probe()
1754 priv->mii_ts.txtstamp = nxp_c45_txtstamp; in nxp_c45_probe()
1755 priv->mii_ts.hwtstamp = nxp_c45_hwtstamp; in nxp_c45_probe()
1756 priv->mii_ts.ts_info = nxp_c45_ts_info; in nxp_c45_probe()
1757 phydev->mii_ts = &priv->mii_ts; in nxp_c45_probe()
1761 phydev->default_timestamp = true; in nxp_c45_probe()
1787 struct nxp_c45_phy *priv = phydev->priv; in nxp_c45_remove()
1789 if (priv->ptp_clock) in nxp_c45_remove()
1790 ptp_clock_unregister(priv->ptp_clock); in nxp_c45_remove()
1792 skb_queue_purge(&priv->tx_queue); in nxp_c45_remove()
1793 skb_queue_purge(&priv->rx_queue); in nxp_c45_remove()
2089 MODULE_AUTHOR("Radu Pirea <radu-[email protected]>");