1 /*
2 * Copyright (c) 2006-2018, RT-Thread Development Team
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Change Logs:
7 * Date Author Notes
8 */
9 #ifndef __PMU_H__
10 #define __PMU_H__
11
12 #include "board.h"
13
14 /* Number of counters */
15 #define ARM_PMU_CNTER_NR 4
16
17 enum rt_hw_pmu_event_type {
18 ARM_PMU_EVENT_PMNC_SW_INCR = 0x00,
19 ARM_PMU_EVENT_L1_ICACHE_REFILL = 0x01,
20 ARM_PMU_EVENT_ITLB_REFILL = 0x02,
21 ARM_PMU_EVENT_L1_DCACHE_REFILL = 0x03,
22 ARM_PMU_EVENT_L1_DCACHE_ACCESS = 0x04,
23 ARM_PMU_EVENT_DTLB_REFILL = 0x05,
24 ARM_PMU_EVENT_MEM_READ = 0x06,
25 ARM_PMU_EVENT_MEM_WRITE = 0x07,
26 ARM_PMU_EVENT_INSTR_EXECUTED = 0x08,
27 ARM_PMU_EVENT_EXC_TAKEN = 0x09,
28 ARM_PMU_EVENT_EXC_EXECUTED = 0x0A,
29 ARM_PMU_EVENT_CID_WRITE = 0x0B,
30 };
31
32 /* Enable bit */
33 #define ARM_PMU_PMCR_E (0x01 << 0)
34 /* Event counter reset */
35 #define ARM_PMU_PMCR_P (0x01 << 1)
36 /* Cycle counter reset */
37 #define ARM_PMU_PMCR_C (0x01 << 2)
38 /* Cycle counter divider */
39 #define ARM_PMU_PMCR_D (0x01 << 3)
40
41 #ifdef __GNUC__
rt_hw_pmu_enable_cnt(int divide64)42 rt_inline void rt_hw_pmu_enable_cnt(int divide64)
43 {
44 unsigned long pmcr;
45 unsigned long pmcntenset;
46
47 asm volatile ("mrc p15, 0, %0, c9, c12, 0" : "=r"(pmcr));
48 pmcr |= ARM_PMU_PMCR_E | ARM_PMU_PMCR_P | ARM_PMU_PMCR_C;
49 if (divide64)
50 pmcr |= ARM_PMU_PMCR_D;
51 else
52 pmcr &= ~ARM_PMU_PMCR_D;
53 asm volatile ("mcr p15, 0, %0, c9, c12, 0" :: "r"(pmcr));
54
55 /* enable all the counters */
56 pmcntenset = ~0;
57 asm volatile ("mcr p15, 0, %0, c9, c12, 1" :: "r"(pmcntenset));
58 /* clear overflows(just in case) */
59 asm volatile ("mcr p15, 0, %0, c9, c12, 3" :: "r"(pmcntenset));
60 }
61
rt_hw_pmu_get_control(void)62 rt_inline unsigned long rt_hw_pmu_get_control(void)
63 {
64 unsigned long pmcr;
65 asm ("mrc p15, 0, %0, c9, c12, 0" : "=r"(pmcr));
66 return pmcr;
67 }
68
rt_hw_pmu_get_ceid(void)69 rt_inline unsigned long rt_hw_pmu_get_ceid(void)
70 {
71 unsigned long reg;
72 /* only PMCEID0 is supported, PMCEID1 is RAZ. */
73 asm ("mrc p15, 0, %0, c9, c12, 6" : "=r"(reg));
74 return reg;
75 }
76
rt_hw_pmu_get_cnten(void)77 rt_inline unsigned long rt_hw_pmu_get_cnten(void)
78 {
79 unsigned long pmcnt;
80 asm ("mrc p15, 0, %0, c9, c12, 1" : "=r"(pmcnt));
81 return pmcnt;
82 }
83
rt_hw_pmu_reset_cycle(void)84 rt_inline void rt_hw_pmu_reset_cycle(void)
85 {
86 unsigned long pmcr;
87
88 asm volatile ("mrc p15, 0, %0, c9, c12, 0" : "=r"(pmcr));
89 pmcr |= ARM_PMU_PMCR_C;
90 asm volatile ("mcr p15, 0, %0, c9, c12, 0" :: "r"(pmcr));
91 asm volatile ("isb");
92 }
93
rt_hw_pmu_reset_event(void)94 rt_inline void rt_hw_pmu_reset_event(void)
95 {
96 unsigned long pmcr;
97
98 asm volatile ("mrc p15, 0, %0, c9, c12, 0" : "=r"(pmcr));
99 pmcr |= ARM_PMU_PMCR_P;
100 asm volatile ("mcr p15, 0, %0, c9, c12, 0" :: "r"(pmcr));
101 asm volatile ("isb");
102 }
103
rt_hw_pmu_get_cycle(void)104 rt_inline unsigned long rt_hw_pmu_get_cycle(void)
105 {
106 unsigned long cyc;
107 asm volatile ("isb");
108 asm volatile ("mrc p15, 0, %0, c9, c13, 0" : "=r"(cyc));
109 return cyc;
110 }
111
rt_hw_pmu_select_counter(int idx)112 rt_inline void rt_hw_pmu_select_counter(int idx)
113 {
114 RT_ASSERT(idx < ARM_PMU_CNTER_NR);
115
116 asm volatile ("mcr p15, 0, %0, c9, c12, 5" : : "r"(idx));
117 /* Linux add an isb here, don't know why here. */
118 asm volatile ("isb");
119 }
120
rt_hw_pmu_select_event(int idx,enum rt_hw_pmu_event_type eve)121 rt_inline void rt_hw_pmu_select_event(int idx,
122 enum rt_hw_pmu_event_type eve)
123 {
124 RT_ASSERT(idx < ARM_PMU_CNTER_NR);
125
126 rt_hw_pmu_select_counter(idx);
127 asm volatile ("mcr p15, 0, %0, c9, c13, 1" : : "r"(eve));
128 }
129
rt_hw_pmu_read_counter(int idx)130 rt_inline unsigned long rt_hw_pmu_read_counter(int idx)
131 {
132 unsigned long reg;
133
134 rt_hw_pmu_select_counter(idx);
135 asm volatile ("isb");
136 asm volatile ("mrc p15, 0, %0, c9, c13, 2" : "=r"(reg));
137 return reg;
138 }
139
rt_hw_pmu_get_ovsr(void)140 rt_inline unsigned long rt_hw_pmu_get_ovsr(void)
141 {
142 unsigned long reg;
143 asm volatile ("isb");
144 asm ("mrc p15, 0, %0, c9, c12, 3" : "=r"(reg));
145 return reg;
146 }
147
rt_hw_pmu_clear_ovsr(unsigned long reg)148 rt_inline void rt_hw_pmu_clear_ovsr(unsigned long reg)
149 {
150 asm ("mcr p15, 0, %0, c9, c12, 3" : : "r"(reg));
151 asm volatile ("isb");
152 }
153
154 #endif
155
156 void rt_hw_pmu_dump_feature(void);
157
158 #endif /* end of include guard: __PMU_H__ */
159
160