6a4475fc6e1e4c8b0420e57c1cf755da760a7ae8
[librecmc/librecmc.git] / target / linux / generic / hack-4.14 / 220-gc_sections.patch
1 From e3d8676f5722b7622685581e06e8f53e6138e3ab Mon Sep 17 00:00:00 2001
2 From: Felix Fietkau <nbd@nbd.name>
3 Date: Sat, 15 Jul 2017 23:42:36 +0200
4 Subject: use -ffunction-sections, -fdata-sections and --gc-sections
5
6 In combination with kernel symbol export stripping this significantly reduces
7 the kernel image size. Used on both ARM and MIPS architectures.
8
9 Signed-off-by: Felix Fietkau <nbd@nbd.name>
10 Signed-off-by: Jonas Gorski <jogo@openwrt.org>
11 Signed-off-by: Gabor Juhos <juhosg@openwrt.org>
12 ---
13  Makefile                          | 10 +++----
14  arch/arm/Kconfig                  |  1 +
15  arch/arm/boot/compressed/Makefile |  1 +
16  arch/arm/kernel/vmlinux.lds.S     | 26 ++++++++--------
17  arch/mips/Kconfig                 |  1 +
18  arch/mips/kernel/vmlinux.lds.S    |  4 +--
19  include/asm-generic/vmlinux.lds.h | 63 ++++++++++++++++++++-------------------
20  7 files changed, 55 insertions(+), 51 deletions(-)
21
22 --- a/Makefile
23 +++ b/Makefile
24 @@ -272,6 +272,11 @@ else
25  scripts/Kbuild.include: ;
26  include scripts/Kbuild.include
27  
28 +ifdef CONFIG_LD_DEAD_CODE_DATA_ELIMINATION
29 +KBUILD_CFLAGS_KERNEL   += $(call cc-option,-ffunction-sections,)
30 +KBUILD_CFLAGS_KERNEL   += $(call cc-option,-fdata-sections,)
31 +endif
32 +
33  # Read KERNELRELEASE from include/config/kernel.release (if it exists)
34  KERNELRELEASE = $(shell cat include/config/kernel.release 2> /dev/null)
35  KERNELVERSION = $(VERSION)$(if $(PATCHLEVEL),.$(PATCHLEVEL)$(if $(SUBLEVEL),.$(SUBLEVEL)))$(EXTRAVERSION)
36 @@ -779,11 +784,6 @@ ifdef CONFIG_DEBUG_SECTION_MISMATCH
37  KBUILD_CFLAGS += $(call cc-option, -fno-inline-functions-called-once)
38  endif
39  
40 -ifdef CONFIG_LD_DEAD_CODE_DATA_ELIMINATION
41 -KBUILD_CFLAGS  += $(call cc-option,-ffunction-sections,)
42 -KBUILD_CFLAGS  += $(call cc-option,-fdata-sections,)
43 -endif
44 -
45  # arch Makefile may override CC so keep this after arch Makefile is included
46  NOSTDINC_FLAGS += -nostdinc -isystem $(shell $(CC) -print-file-name=include)
47  CHECKFLAGS     += $(NOSTDINC_FLAGS)
48 --- a/arch/arm/Kconfig
49 +++ b/arch/arm/Kconfig
50 @@ -91,6 +91,7 @@ config ARM
51         select HAVE_UID16
52         select HAVE_VIRT_CPU_ACCOUNTING_GEN
53         select IRQ_FORCED_THREADING
54 +       select LD_DEAD_CODE_DATA_ELIMINATION
55         select MODULES_USE_ELF_REL
56         select NO_BOOTMEM
57         select OF_EARLY_FLATTREE if OF
58 --- a/arch/arm/boot/compressed/Makefile
59 +++ b/arch/arm/boot/compressed/Makefile
60 @@ -103,6 +103,7 @@ ifeq ($(CONFIG_FUNCTION_TRACER),y)
61  ORIG_CFLAGS := $(KBUILD_CFLAGS)
62  KBUILD_CFLAGS = $(subst -pg, , $(ORIG_CFLAGS))
63  endif
64 +KBUILD_CFLAGS_KERNEL := $(patsubst -f%-sections,,$(KBUILD_CFLAGS_KERNEL))
65  
66  # -fstack-protector-strong triggers protection checks in this code,
67  # but it is being used too early to link to meaningful stack_chk logic.
68 --- a/arch/arm/kernel/vmlinux.lds.S
69 +++ b/arch/arm/kernel/vmlinux.lds.S
70 @@ -18,7 +18,7 @@
71  #define PROC_INFO                                                      \
72         . = ALIGN(4);                                                   \
73         VMLINUX_SYMBOL(__proc_info_begin) = .;                          \
74 -       *(.proc.info.init)                                              \
75 +       KEEP(*(.proc.info.init))                                        \
76         VMLINUX_SYMBOL(__proc_info_end) = .;
77  
78  #define HYPERVISOR_TEXT                                                        \
79 @@ -29,11 +29,11 @@
80  #define IDMAP_TEXT                                                     \
81         ALIGN_FUNCTION();                                               \
82         VMLINUX_SYMBOL(__idmap_text_start) = .;                         \
83 -       *(.idmap.text)                                                  \
84 +       KEEP(*(.idmap.text))                                            \
85         VMLINUX_SYMBOL(__idmap_text_end) = .;                           \
86         . = ALIGN(PAGE_SIZE);                                           \
87         VMLINUX_SYMBOL(__hyp_idmap_text_start) = .;                     \
88 -       *(.hyp.idmap.text)                                              \
89 +       KEEP(*(.hyp.idmap.text))                                        \
90         VMLINUX_SYMBOL(__hyp_idmap_text_end) = .;
91  
92  #ifdef CONFIG_HOTPLUG_CPU
93 @@ -106,7 +106,7 @@ SECTIONS
94                 _stext = .;             /* Text and read-only data      */
95                         IDMAP_TEXT
96                         __exception_text_start = .;
97 -                       *(.exception.text)
98 +                       KEEP(*(.exception.text))
99                         __exception_text_end = .;
100                         IRQENTRY_TEXT
101                         SOFTIRQENTRY_TEXT
102 @@ -135,7 +135,7 @@ SECTIONS
103         __ex_table : AT(ADDR(__ex_table) - LOAD_OFFSET) {
104                 __start___ex_table = .;
105  #ifdef CONFIG_MMU
106 -               *(__ex_table)
107 +               KEEP(*(__ex_table))
108  #endif
109                 __stop___ex_table = .;
110         }
111 @@ -147,12 +147,12 @@ SECTIONS
112         . = ALIGN(8);
113         .ARM.unwind_idx : {
114                 __start_unwind_idx = .;
115 -               *(.ARM.exidx*)
116 +               KEEP(*(.ARM.exidx*))
117                 __stop_unwind_idx = .;
118         }
119         .ARM.unwind_tab : {
120                 __start_unwind_tab = .;
121 -               *(.ARM.extab*)
122 +               KEEP(*(.ARM.extab*))
123                 __stop_unwind_tab = .;
124         }
125  #endif
126 @@ -172,14 +172,14 @@ SECTIONS
127          */
128         __vectors_start = .;
129         .vectors 0xffff0000 : AT(__vectors_start) {
130 -               *(.vectors)
131 +               KEEP(*(.vectors))
132         }
133         . = __vectors_start + SIZEOF(.vectors);
134         __vectors_end = .;
135  
136         __stubs_start = .;
137         .stubs ADDR(.vectors) + 0x1000 : AT(__stubs_start) {
138 -               *(.stubs)
139 +               KEEP(*(.stubs))
140         }
141         . = __stubs_start + SIZEOF(.stubs);
142         __stubs_end = .;
143 @@ -195,24 +195,24 @@ SECTIONS
144         }
145         .init.arch.info : {
146                 __arch_info_begin = .;
147 -               *(.arch.info.init)
148 +               KEEP(*(.arch.info.init))
149                 __arch_info_end = .;
150         }
151         .init.tagtable : {
152                 __tagtable_begin = .;
153 -               *(.taglist.init)
154 +               KEEP(*(.taglist.init))
155                 __tagtable_end = .;
156         }
157  #ifdef CONFIG_SMP_ON_UP
158         .init.smpalt : {
159                 __smpalt_begin = .;
160 -               *(.alt.smp.init)
161 +               KEEP(*(.alt.smp.init))
162                 __smpalt_end = .;
163         }
164  #endif
165         .init.pv_table : {
166                 __pv_table_begin = .;
167 -               *(.pv_table)
168 +               KEEP(*(.pv_table))
169                 __pv_table_end = .;
170         }
171         .init.data : {
172 --- a/arch/mips/Kconfig
173 +++ b/arch/mips/Kconfig
174 @@ -40,6 +40,7 @@ config MIPS
175         select HAVE_CBPF_JIT if (!64BIT && !CPU_MICROMIPS)
176         select HAVE_EBPF_JIT if (64BIT && !CPU_MICROMIPS)
177         select HAVE_CC_STACKPROTECTOR
178 +       select LD_DEAD_CODE_DATA_ELIMINATION
179         select HAVE_CONTEXT_TRACKING
180         select HAVE_COPY_THREAD_TLS
181         select HAVE_C_RECORDMCOUNT
182 --- a/arch/mips/kernel/vmlinux.lds.S
183 +++ b/arch/mips/kernel/vmlinux.lds.S
184 @@ -72,7 +72,7 @@ SECTIONS
185         /* Exception table for data bus errors */
186         __dbe_table : {
187                 __start___dbe_table = .;
188 -               *(__dbe_table)
189 +               KEEP(*(__dbe_table))
190                 __stop___dbe_table = .;
191         }
192  
193 @@ -123,7 +123,7 @@ SECTIONS
194         . = ALIGN(4);
195         .mips.machines.init : AT(ADDR(.mips.machines.init) - LOAD_OFFSET) {
196                 __mips_machines_start = .;
197 -               *(.mips.machines.init)
198 +               KEEP(*(.mips.machines.init))
199                 __mips_machines_end = .;
200         }
201  
202 --- a/include/asm-generic/vmlinux.lds.h
203 +++ b/include/asm-generic/vmlinux.lds.h
204 @@ -105,7 +105,7 @@
205  #ifdef CONFIG_FTRACE_MCOUNT_RECORD
206  #define MCOUNT_REC()   . = ALIGN(8);                           \
207                         VMLINUX_SYMBOL(__start_mcount_loc) = .; \
208 -                       *(__mcount_loc)                         \
209 +                       KEEP(*(__mcount_loc))                   \
210                         VMLINUX_SYMBOL(__stop_mcount_loc) = .;
211  #else
212  #define MCOUNT_REC()
213 @@ -113,7 +113,7 @@
214  
215  #ifdef CONFIG_TRACE_BRANCH_PROFILING
216  #define LIKELY_PROFILE()       VMLINUX_SYMBOL(__start_annotated_branch_profile) = .; \
217 -                               *(_ftrace_annotated_branch)                           \
218 +                               KEEP(*(_ftrace_annotated_branch))                     \
219                                 VMLINUX_SYMBOL(__stop_annotated_branch_profile) = .;
220  #else
221  #define LIKELY_PROFILE()
222 @@ -121,7 +121,7 @@
223  
224  #ifdef CONFIG_PROFILE_ALL_BRANCHES
225  #define BRANCH_PROFILE()       VMLINUX_SYMBOL(__start_branch_profile) = .;   \
226 -                               *(_ftrace_branch)                             \
227 +                               KEEP(*(_ftrace_branch))                       \
228                                 VMLINUX_SYMBOL(__stop_branch_profile) = .;
229  #else
230  #define BRANCH_PROFILE()
231 @@ -237,7 +237,8 @@
232         LIKELY_PROFILE()                                                \
233         BRANCH_PROFILE()                                                \
234         TRACE_PRINTKS()                                                 \
235 -       TRACEPOINT_STR()
236 +       TRACEPOINT_STR()                                                \
237 +       *(.data.[a-zA-Z_]*)
238  
239  /*
240   * Data section helpers
241 @@ -497,7 +498,7 @@
242  #define ENTRY_TEXT                                                     \
243                 ALIGN_FUNCTION();                                       \
244                 VMLINUX_SYMBOL(__entry_text_start) = .;                 \
245 -               *(.entry.text)                                          \
246 +               KEEP(*(.entry.text))                                    \
247                 VMLINUX_SYMBOL(__entry_text_end) = .;
248  
249  #define IRQENTRY_TEXT                                                  \
250 @@ -604,7 +605,7 @@
251         . = ALIGN(sbss_align);                                          \
252         .sbss : AT(ADDR(.sbss) - LOAD_OFFSET) {                         \
253                 *(.dynsbss)                                             \
254 -               *(.sbss)                                                \
255 +               *(.sbss .sbss.*)                                        \
256                 *(.scommon)                                             \
257         }
258