Lines Matching full:pmu
51 struct pmu pmu; member
88 return container_of(event->pmu, struct amd_uncore_pmu, pmu); in event_to_amd_uncore_pmu()
139 event->pmu->read(event); in amd_uncore_stop()
147 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); in amd_uncore_add() local
148 struct amd_uncore_ctx *ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_add()
155 for (i = 0; i < pmu->num_counters; i++) { in amd_uncore_add()
164 for (i = 0; i < pmu->num_counters; i++) { in amd_uncore_add()
177 hwc->config_base = pmu->msr_base + (2 * hwc->idx); in amd_uncore_add()
178 hwc->event_base = pmu->msr_base + 1 + (2 * hwc->idx); in amd_uncore_add()
179 hwc->event_base_rdpmc = pmu->rdpmc_base + hwc->idx; in amd_uncore_add()
182 if (pmu->rdpmc_base < 0) in amd_uncore_add()
186 event->pmu->start(event, PERF_EF_RELOAD); in amd_uncore_add()
194 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); in amd_uncore_del() local
195 struct amd_uncore_ctx *ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_del()
198 event->pmu->stop(event, PERF_EF_UPDATE); in amd_uncore_del()
200 for (i = 0; i < pmu->num_counters; i++) { in amd_uncore_del()
212 struct amd_uncore_pmu *pmu; in amd_uncore_event_init() local
216 if (event->attr.type != event->pmu->type) in amd_uncore_event_init()
222 pmu = event_to_amd_uncore_pmu(event); in amd_uncore_event_init()
223 ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_event_init()
264 struct pmu *ptr = dev_get_drvdata(dev); in amd_uncore_attr_show_cpumask()
265 struct amd_uncore_pmu *pmu = container_of(ptr, struct amd_uncore_pmu, pmu); in amd_uncore_attr_show_cpumask() local
267 return cpumap_print_to_pagebuf(true, buf, &pmu->active_mask); in amd_uncore_attr_show_cpumask()
417 struct amd_uncore_pmu *pmu; in amd_uncore_ctx_free() local
425 pmu = &uncore->pmus[i]; in amd_uncore_ctx_free()
426 ctx = *per_cpu_ptr(pmu->ctx, cpu); in amd_uncore_ctx_free()
431 cpumask_clear_cpu(cpu, &pmu->active_mask); in amd_uncore_ctx_free()
438 *per_cpu_ptr(pmu->ctx, cpu) = NULL; in amd_uncore_ctx_free()
445 struct amd_uncore_pmu *pmu; in amd_uncore_ctx_init() local
455 pmu = &uncore->pmus[i]; in amd_uncore_ctx_init()
456 *per_cpu_ptr(pmu->ctx, cpu) = NULL; in amd_uncore_ctx_init()
460 if (gid != pmu->group) in amd_uncore_ctx_init()
468 prev = *per_cpu_ptr(pmu->ctx, j); in amd_uncore_ctx_init()
487 pmu->num_counters, in amd_uncore_ctx_init()
494 cpumask_set_cpu(cpu, &pmu->active_mask); in amd_uncore_ctx_init()
498 *per_cpu_ptr(pmu->ctx, cpu) = curr; in amd_uncore_ctx_init()
512 struct amd_uncore_pmu *pmu; in amd_uncore_ctx_move() local
519 pmu = &uncore->pmus[i]; in amd_uncore_ctx_move()
520 curr = *per_cpu_ptr(pmu->ctx, cpu); in amd_uncore_ctx_move()
526 next = *per_cpu_ptr(pmu->ctx, j); in amd_uncore_ctx_move()
531 perf_pmu_migrate_context(&pmu->pmu, cpu, j); in amd_uncore_ctx_move()
532 cpumask_clear_cpu(cpu, &pmu->active_mask); in amd_uncore_ctx_move()
533 cpumask_set_cpu(j, &pmu->active_mask); in amd_uncore_ctx_move()
660 struct amd_uncore_pmu *pmu; in amd_uncore_df_ctx_init() local
681 pmu = &uncore->pmus[0]; in amd_uncore_df_ctx_init()
682 strscpy(pmu->name, boot_cpu_data.x86 >= 0x17 ? "amd_df" : "amd_nb", in amd_uncore_df_ctx_init()
683 sizeof(pmu->name)); in amd_uncore_df_ctx_init()
684 pmu->num_counters = num_counters; in amd_uncore_df_ctx_init()
685 pmu->msr_base = MSR_F15H_NB_PERF_CTL; in amd_uncore_df_ctx_init()
686 pmu->rdpmc_base = RDPMC_BASE_NB; in amd_uncore_df_ctx_init()
687 pmu->group = amd_uncore_ctx_gid(uncore, cpu); in amd_uncore_df_ctx_init()
696 pmu->ctx = alloc_percpu(struct amd_uncore_ctx *); in amd_uncore_df_ctx_init()
697 if (!pmu->ctx) in amd_uncore_df_ctx_init()
700 pmu->pmu = (struct pmu) { in amd_uncore_df_ctx_init()
703 .name = pmu->name, in amd_uncore_df_ctx_init()
714 if (perf_pmu_register(&pmu->pmu, pmu->pmu.name, -1)) { in amd_uncore_df_ctx_init()
715 free_percpu(pmu->ctx); in amd_uncore_df_ctx_init()
716 pmu->ctx = NULL; in amd_uncore_df_ctx_init()
720 pr_info("%d %s%s counters detected\n", pmu->num_counters, in amd_uncore_df_ctx_init()
722 pmu->pmu.name); in amd_uncore_df_ctx_init()
794 struct amd_uncore_pmu *pmu; in amd_uncore_l3_ctx_init() local
815 pmu = &uncore->pmus[0]; in amd_uncore_l3_ctx_init()
816 strscpy(pmu->name, boot_cpu_data.x86 >= 0x17 ? "amd_l3" : "amd_l2", in amd_uncore_l3_ctx_init()
817 sizeof(pmu->name)); in amd_uncore_l3_ctx_init()
818 pmu->num_counters = num_counters; in amd_uncore_l3_ctx_init()
819 pmu->msr_base = MSR_F16H_L2I_PERF_CTL; in amd_uncore_l3_ctx_init()
820 pmu->rdpmc_base = RDPMC_BASE_LLC; in amd_uncore_l3_ctx_init()
821 pmu->group = amd_uncore_ctx_gid(uncore, cpu); in amd_uncore_l3_ctx_init()
831 pmu->ctx = alloc_percpu(struct amd_uncore_ctx *); in amd_uncore_l3_ctx_init()
832 if (!pmu->ctx) in amd_uncore_l3_ctx_init()
835 pmu->pmu = (struct pmu) { in amd_uncore_l3_ctx_init()
839 .name = pmu->name, in amd_uncore_l3_ctx_init()
850 if (perf_pmu_register(&pmu->pmu, pmu->pmu.name, -1)) { in amd_uncore_l3_ctx_init()
851 free_percpu(pmu->ctx); in amd_uncore_l3_ctx_init()
852 pmu->ctx = NULL; in amd_uncore_l3_ctx_init()
856 pr_info("%d %s%s counters detected\n", pmu->num_counters, in amd_uncore_l3_ctx_init()
858 pmu->pmu.name); in amd_uncore_l3_ctx_init()
918 struct amd_uncore_pmu *pmu; in amd_uncore_umc_ctx_init() local
951 pmu = &uncore->pmus[index]; in amd_uncore_umc_ctx_init()
952 snprintf(pmu->name, sizeof(pmu->name), "amd_umc_%hu", index); in amd_uncore_umc_ctx_init()
953 pmu->num_counters = group_num_pmcs[gid] / group_num_pmus[gid]; in amd_uncore_umc_ctx_init()
954 pmu->msr_base = MSR_F19H_UMC_PERF_CTL + i * pmu->num_counters * 2; in amd_uncore_umc_ctx_init()
955 pmu->rdpmc_base = -1; in amd_uncore_umc_ctx_init()
956 pmu->group = gid; in amd_uncore_umc_ctx_init()
958 pmu->ctx = alloc_percpu(struct amd_uncore_ctx *); in amd_uncore_umc_ctx_init()
959 if (!pmu->ctx) in amd_uncore_umc_ctx_init()
962 pmu->pmu = (struct pmu) { in amd_uncore_umc_ctx_init()
965 .name = pmu->name, in amd_uncore_umc_ctx_init()
976 if (perf_pmu_register(&pmu->pmu, pmu->pmu.name, -1)) { in amd_uncore_umc_ctx_init()
977 free_percpu(pmu->ctx); in amd_uncore_umc_ctx_init()
978 pmu->ctx = NULL; in amd_uncore_umc_ctx_init()
982 pr_info("%d %s counters detected\n", pmu->num_counters, in amd_uncore_umc_ctx_init()
983 pmu->pmu.name); in amd_uncore_umc_ctx_init()
1094 struct amd_uncore_pmu *pmu; in amd_uncore_exit() local
1110 pmu = &uncore->pmus[j]; in amd_uncore_exit()
1111 if (!pmu->ctx) in amd_uncore_exit()
1114 perf_pmu_unregister(&pmu->pmu); in amd_uncore_exit()
1115 free_percpu(pmu->ctx); in amd_uncore_exit()
1116 pmu->ctx = NULL; in amd_uncore_exit()