Lines Matching +full:fw +full:- +full:cfg
1 /* SPDX-License-Identifier: GPL-2.0-only */
18 #include <linux/io-64-nonatomic-hi-lo.h>
19 #include <linux/io-pgtable.h>
369 struct arm_smmu_cfg *cfg; member
383 struct arm_smmu_cfg cfg; member
395 static inline u32 arm_smmu_lpae_tcr(const struct io_pgtable_cfg *cfg) in arm_smmu_lpae_tcr() argument
397 u32 tcr = FIELD_PREP(ARM_SMMU_TCR_TG0, cfg->arm_lpae_s1_cfg.tcr.tg) | in arm_smmu_lpae_tcr()
398 FIELD_PREP(ARM_SMMU_TCR_SH0, cfg->arm_lpae_s1_cfg.tcr.sh) | in arm_smmu_lpae_tcr()
399 FIELD_PREP(ARM_SMMU_TCR_ORGN0, cfg->arm_lpae_s1_cfg.tcr.orgn) | in arm_smmu_lpae_tcr()
400 FIELD_PREP(ARM_SMMU_TCR_IRGN0, cfg->arm_lpae_s1_cfg.tcr.irgn) | in arm_smmu_lpae_tcr()
401 FIELD_PREP(ARM_SMMU_TCR_T0SZ, cfg->arm_lpae_s1_cfg.tcr.tsz); in arm_smmu_lpae_tcr()
407 if (cfg->quirks & IO_PGTABLE_QUIRK_ARM_TTBR1) { in arm_smmu_lpae_tcr()
416 static inline u32 arm_smmu_lpae_tcr2(const struct io_pgtable_cfg *cfg) in arm_smmu_lpae_tcr2() argument
418 return FIELD_PREP(ARM_SMMU_TCR2_PASIZE, cfg->arm_lpae_s1_cfg.tcr.ips) | in arm_smmu_lpae_tcr2()
422 static inline u32 arm_smmu_lpae_vtcr(const struct io_pgtable_cfg *cfg) in arm_smmu_lpae_vtcr() argument
425 FIELD_PREP(ARM_SMMU_VTCR_PS, cfg->arm_lpae_s2_cfg.vtcr.ps) | in arm_smmu_lpae_vtcr()
426 FIELD_PREP(ARM_SMMU_VTCR_TG0, cfg->arm_lpae_s2_cfg.vtcr.tg) | in arm_smmu_lpae_vtcr()
427 FIELD_PREP(ARM_SMMU_VTCR_SH0, cfg->arm_lpae_s2_cfg.vtcr.sh) | in arm_smmu_lpae_vtcr()
428 FIELD_PREP(ARM_SMMU_VTCR_ORGN0, cfg->arm_lpae_s2_cfg.vtcr.orgn) | in arm_smmu_lpae_vtcr()
429 FIELD_PREP(ARM_SMMU_VTCR_IRGN0, cfg->arm_lpae_s2_cfg.vtcr.irgn) | in arm_smmu_lpae_vtcr()
430 FIELD_PREP(ARM_SMMU_VTCR_SL0, cfg->arm_lpae_s2_cfg.vtcr.sl) | in arm_smmu_lpae_vtcr()
431 FIELD_PREP(ARM_SMMU_VTCR_T0SZ, cfg->arm_lpae_s2_cfg.vtcr.tsz); in arm_smmu_lpae_vtcr()
445 struct io_pgtable_cfg *cfg, struct device *dev);
460 #define INVALID_SMENDX -1
461 #define cfg_smendx(cfg, fw, i) \ argument
462 (i >= fw->num_ids ? INVALID_SMENDX : cfg->smendx[i])
463 #define for_each_cfg_sme(cfg, fw, i, idx) \ argument
464 for (i = 0; idx = cfg_smendx(cfg, fw, i), i < fw->num_ids; ++i)
473 return -ENOSPC; in __arm_smmu_alloc_bitmap()
481 return smmu->base + (n << smmu->pgshift); in arm_smmu_page()
486 if (smmu->impl && unlikely(smmu->impl->read_reg)) in arm_smmu_readl()
487 return smmu->impl->read_reg(smmu, page, offset); in arm_smmu_readl()
494 if (smmu->impl && unlikely(smmu->impl->write_reg)) in arm_smmu_writel()
495 smmu->impl->write_reg(smmu, page, offset, val); in arm_smmu_writel()
502 if (smmu->impl && unlikely(smmu->impl->read_reg64)) in arm_smmu_readq()
503 return smmu->impl->read_reg64(smmu, page, offset); in arm_smmu_readq()
510 if (smmu->impl && unlikely(smmu->impl->write_reg64)) in arm_smmu_writeq()
511 smmu->impl->write_reg64(smmu, page, offset, val); in arm_smmu_writeq()
518 #define ARM_SMMU_CB(s, n) ((s)->numpage + (n))