6 #define VMLINUX_SYMBOL(_sym_) _sym_
9 /* Align . to a 8 byte boundary equals to maximum function alignment. */
10 #define ALIGN_FUNCTION() . = ALIGN(8)
13 .rodata : AT(ADDR(.rodata) - LOAD_OFFSET) { \
14 *(.rodata) *(.rodata.*) \
15 *(__vermagic) /* Kernel version magic */ \
18 .rodata1 : AT(ADDR(.rodata1) - LOAD_OFFSET) { \
23 .pci_fixup : AT(ADDR(.pci_fixup) - LOAD_OFFSET) { \
24 VMLINUX_SYMBOL(__start_pci_fixups_early) = .; \
26 VMLINUX_SYMBOL(__end_pci_fixups_early) = .; \
27 VMLINUX_SYMBOL(__start_pci_fixups_header) = .; \
28 *(.pci_fixup_header) \
29 VMLINUX_SYMBOL(__end_pci_fixups_header) = .; \
30 VMLINUX_SYMBOL(__start_pci_fixups_final) = .; \
32 VMLINUX_SYMBOL(__end_pci_fixups_final) = .; \
33 VMLINUX_SYMBOL(__start_pci_fixups_enable) = .; \
34 *(.pci_fixup_enable) \
35 VMLINUX_SYMBOL(__end_pci_fixups_enable) = .; \
38 /* Kernel symbol table: Normal symbols */ \
39 __ksymtab : AT(ADDR(__ksymtab) - LOAD_OFFSET) { \
40 VMLINUX_SYMBOL(__start___ksymtab) = .; \
42 VMLINUX_SYMBOL(__stop___ksymtab) = .; \
45 /* Kernel symbol table: GPL-only symbols */ \
46 __ksymtab_gpl : AT(ADDR(__ksymtab_gpl) - LOAD_OFFSET) { \
47 VMLINUX_SYMBOL(__start___ksymtab_gpl) = .; \
49 VMLINUX_SYMBOL(__stop___ksymtab_gpl) = .; \
52 /* Kernel symbol table: Normal symbols */ \
53 __kcrctab : AT(ADDR(__kcrctab) - LOAD_OFFSET) { \
54 VMLINUX_SYMBOL(__start___kcrctab) = .; \
56 VMLINUX_SYMBOL(__stop___kcrctab) = .; \
59 /* Kernel symbol table: GPL-only symbols */ \
60 __kcrctab_gpl : AT(ADDR(__kcrctab_gpl) - LOAD_OFFSET) { \
61 VMLINUX_SYMBOL(__start___kcrctab_gpl) = .; \
63 VMLINUX_SYMBOL(__stop___kcrctab_gpl) = .; \
66 /* Kernel symbol table: strings */ \
67 __ksymtab_strings : AT(ADDR(__ksymtab_strings) - LOAD_OFFSET) { \
68 *(__ksymtab_strings) \
71 /* Built-in module parameters. */ \
72 __param : AT(ADDR(__param) - LOAD_OFFSET) { \
73 VMLINUX_SYMBOL(__start___param) = .; \
75 VMLINUX_SYMBOL(__stop___param) = .; \
78 #define SECURITY_INIT \
79 .security_initcall.init : AT(ADDR(.security_initcall.init) - LOAD_OFFSET) { \
80 VMLINUX_SYMBOL(__security_initcall_start) = .; \
81 *(.security_initcall.init) \
82 VMLINUX_SYMBOL(__security_initcall_end) = .; \
85 /* sched.text is aling to function alignment to secure we have same
86 * address even at second ld pass when generating System.map */
89 VMLINUX_SYMBOL(__sched_text_start) = .; \
91 VMLINUX_SYMBOL(__sched_text_end) = .;
93 /* spinlock.text is aling to function alignment to secure we have same
94 * address even at second ld pass when generating System.map */
97 VMLINUX_SYMBOL(__lock_text_start) = .; \
99 VMLINUX_SYMBOL(__lock_text_end) = .;
101 #define KPROBES_TEXT \
103 VMLINUX_SYMBOL(__kprobes_text_start) = .; \
105 VMLINUX_SYMBOL(__kprobes_text_end) = .;