1 /* SPDX-License-Identifier: GPL-2.0 */
2 #include <asm-generic/vmlinux.lds.h>
3 
4 #ifdef CONFIG_HOTPLUG_CPU
5 #define ARM_CPU_DISCARD(x)
6 #define ARM_CPU_KEEP(x)		x
7 #else
8 #define ARM_CPU_DISCARD(x)	x
9 #define ARM_CPU_KEEP(x)
10 #endif
11 
12 #if (defined(CONFIG_SMP_ON_UP) && !defined(CONFIG_DEBUG_SPINLOCK)) || \
13 	defined(CONFIG_GENERIC_BUG) || defined(CONFIG_JUMP_LABEL)
14 #define ARM_EXIT_KEEP(x)	x
15 #define ARM_EXIT_DISCARD(x)
16 #else
17 #define ARM_EXIT_KEEP(x)
18 #define ARM_EXIT_DISCARD(x)	x
19 #endif
20 
21 #ifdef CONFIG_MMU
22 #define ARM_MMU_KEEP(x)		x
23 #define ARM_MMU_DISCARD(x)
24 #else
25 #define ARM_MMU_KEEP(x)
26 #define ARM_MMU_DISCARD(x)	x
27 #endif
28 
29 /*
30  * ld.lld does not support NOCROSSREFS:
31  * https://github.com/ClangBuiltLinux/linux/issues/1609
32  */
33 #ifdef CONFIG_LD_IS_LLD
34 #define NOCROSSREFS
35 #endif
36 
37 #ifdef CONFIG_LD_CAN_USE_KEEP_IN_OVERLAY
38 #define OVERLAY_KEEP(x)		KEEP(x)
39 #else
40 #define OVERLAY_KEEP(x)		x
41 #endif
42 
43 /* Set start/end symbol names to the LMA for the section */
44 #define ARM_LMA(sym, section)						\
45 	sym##_start = LOADADDR(section);				\
46 	sym##_end = LOADADDR(section) + SIZEOF(section)
47 
48 #define PROC_INFO							\
49 		. = ALIGN(4);						\
50 		__proc_info_begin = .;					\
51 		KEEP(*(.proc.info.init))				\
52 		__proc_info_end = .;
53 
54 #define IDMAP_TEXT							\
55 		ALIGN_FUNCTION();					\
56 		__idmap_text_start = .;					\
57 		*(.idmap.text)						\
58 		__idmap_text_end = .;					\
59 
60 #define ARM_DISCARD							\
61 		*(.ARM.exidx.exit.text)					\
62 		*(.ARM.extab.exit.text)					\
63 		*(.ARM.exidx.text.exit)					\
64 		*(.ARM.extab.text.exit)					\
65 		ARM_CPU_DISCARD(*(.ARM.exidx.cpuexit.text))		\
66 		ARM_CPU_DISCARD(*(.ARM.extab.cpuexit.text))		\
67 		ARM_EXIT_DISCARD(EXIT_TEXT)				\
68 		ARM_EXIT_DISCARD(EXIT_DATA)				\
69 		EXIT_CALL						\
70 		ARM_MMU_DISCARD(*(.text.fixup))				\
71 		ARM_MMU_DISCARD(*(__ex_table))				\
72 		COMMON_DISCARDS
73 
74 /*
75  * Sections that should stay zero sized, which is safer to explicitly
76  * check instead of blindly discarding.
77  */
78 #define ARM_ASSERTS							\
79 	.plt : {							\
80 		*(.iplt) *(.rel.iplt) *(.iplt) *(.igot.plt)		\
81 	}								\
82 	ASSERT(SIZEOF(.plt) == 0,					\
83 	       "Unexpected run-time procedure linkages detected!")
84 
85 #define ARM_DETAILS							\
86 		ELF_DETAILS						\
87 		.ARM.attributes 0 : { *(.ARM.attributes) }
88 
89 #define ARM_STUBS_TEXT							\
90 		*(.gnu.warning)						\
91 		*(.glue_7)						\
92 		*(.glue_7t)						\
93 		*(.vfp11_veneer)                                        \
94 		*(.v4_bx)
95 
96 #define ARM_TEXT							\
97 		IDMAP_TEXT						\
98 		__entry_text_start = .;					\
99 		*(.entry.text)						\
100 		__entry_text_end = .;					\
101 		IRQENTRY_TEXT						\
102 		SOFTIRQENTRY_TEXT					\
103 		TEXT_TEXT						\
104 		SCHED_TEXT						\
105 		LOCK_TEXT						\
106 		KPROBES_TEXT						\
107 		ARM_STUBS_TEXT						\
108 		. = ALIGN(4);						\
109 		*(.got)			/* Global offset table */	\
110 		ARM_CPU_KEEP(PROC_INFO)
111 
112 /* Stack unwinding tables */
113 #define ARM_UNWIND_SECTIONS						\
114 	. = ALIGN(8);							\
115 	.ARM.unwind_idx : {						\
116 		__start_unwind_idx = .;					\
117 		*(.ARM.exidx*)						\
118 		__stop_unwind_idx = .;					\
119 	}								\
120 	.ARM.unwind_tab : {						\
121 		__start_unwind_tab = .;					\
122 		*(.ARM.extab*)						\
123 		__stop_unwind_tab = .;					\
124 	}
125 
126 /*
127  * The vectors and stubs are relocatable code, and the
128  * only thing that matters is their relative offsets
129  */
130 #define ARM_VECTORS							\
131 	__vectors_lma = .;						\
132 	OVERLAY 0xffff0000 : NOCROSSREFS AT(__vectors_lma) {		\
133 		.vectors {						\
134 			OVERLAY_KEEP(*(.vectors))			\
135 		}							\
136 		.vectors.bhb.loop8 {					\
137 			OVERLAY_KEEP(*(.vectors.bhb.loop8))		\
138 		}							\
139 		.vectors.bhb.bpiall {					\
140 			OVERLAY_KEEP(*(.vectors.bhb.bpiall))		\
141 		}							\
142 	}								\
143 	ARM_LMA(__vectors, .vectors);					\
144 	ARM_LMA(__vectors_bhb_loop8, .vectors.bhb.loop8);		\
145 	ARM_LMA(__vectors_bhb_bpiall, .vectors.bhb.bpiall);		\
146 	. = __vectors_lma + SIZEOF(.vectors) +				\
147 		SIZEOF(.vectors.bhb.loop8) +				\
148 		SIZEOF(.vectors.bhb.bpiall);				\
149 									\
150 	__stubs_lma = .;						\
151 	.stubs ADDR(.vectors) + 0x1000 : AT(__stubs_lma) {		\
152 		*(.stubs)						\
153 	}								\
154 	ARM_LMA(__stubs, .stubs);					\
155 	. = __stubs_lma + SIZEOF(.stubs);				\
156 									\
157 	PROVIDE(vector_fiq_offset = vector_fiq - ADDR(.vectors));
158 
159 #define ARM_TCM								\
160 	__itcm_start = ALIGN(4);					\
161 	.text_itcm ITCM_OFFSET : AT(__itcm_start - LOAD_OFFSET) {	\
162 		__sitcm_text = .;					\
163 		*(.tcm.text)						\
164 		*(.tcm.rodata)						\
165 		. = ALIGN(4);						\
166 		__eitcm_text = .;					\
167 	}								\
168 	. = __itcm_start + SIZEOF(.text_itcm);				\
169 									\
170 	__dtcm_start = .;						\
171 	.data_dtcm DTCM_OFFSET : AT(__dtcm_start - LOAD_OFFSET) {	\
172 		__sdtcm_data = .;					\
173 		*(.tcm.data)						\
174 		. = ALIGN(4);						\
175 		__edtcm_data = .;					\
176 	}								\
177 	. = __dtcm_start + SIZEOF(.data_dtcm);
178