Skip to content

Commit 784d569

Browse files
author
Al Viro
committed
x86: move exports to actual definitions
Signed-off-by: Al Viro <[email protected]>
1 parent 22823ab commit 784d569

27 files changed

+68
-150
lines changed

arch/x86/entry/entry_32.S

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@
4444
#include <asm/alternative-asm.h>
4545
#include <asm/asm.h>
4646
#include <asm/smap.h>
47+
#include <asm/export.h>
4748

4849
.section .entry.text, "ax"
4950

@@ -955,6 +956,7 @@ trace:
955956
jmp ftrace_stub
956957
END(mcount)
957958
#endif /* CONFIG_DYNAMIC_FTRACE */
959+
EXPORT_SYMBOL(mcount)
958960
#endif /* CONFIG_FUNCTION_TRACER */
959961

960962
#ifdef CONFIG_FUNCTION_GRAPH_TRACER

arch/x86/entry/entry_64.S

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
#include <asm/asm.h>
3636
#include <asm/smap.h>
3737
#include <asm/pgtable_types.h>
38+
#include <asm/export.h>
3839
#include <linux/err.h>
3940

4041
/* Avoid __ASSEMBLER__'ifying <linux/audit.h> just for this. */
@@ -785,6 +786,7 @@ ENTRY(native_load_gs_index)
785786
popfq
786787
ret
787788
END(native_load_gs_index)
789+
EXPORT_SYMBOL(native_load_gs_index)
788790

789791
_ASM_EXTABLE(.Lgs_change, bad_gs)
790792
.section .fixup, "ax"

arch/x86/entry/thunk_32.S

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
*/
77
#include <linux/linkage.h>
88
#include <asm/asm.h>
9+
#include <asm/export.h>
910

1011
/* put return address in eax (arg1) */
1112
.macro THUNK name, func, put_ret_addr_in_eax=0
@@ -36,5 +37,7 @@
3637
#ifdef CONFIG_PREEMPT
3738
THUNK ___preempt_schedule, preempt_schedule
3839
THUNK ___preempt_schedule_notrace, preempt_schedule_notrace
40+
EXPORT_SYMBOL(___preempt_schedule)
41+
EXPORT_SYMBOL(___preempt_schedule_notrace)
3942
#endif
4043

arch/x86/entry/thunk_64.S

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
#include <linux/linkage.h>
99
#include "calling.h"
1010
#include <asm/asm.h>
11+
#include <asm/export.h>
1112

1213
/* rdi: arg1 ... normal C conventions. rax is saved/restored. */
1314
.macro THUNK name, func, put_ret_addr_in_rdi=0
@@ -49,6 +50,8 @@
4950
#ifdef CONFIG_PREEMPT
5051
THUNK ___preempt_schedule, preempt_schedule
5152
THUNK ___preempt_schedule_notrace, preempt_schedule_notrace
53+
EXPORT_SYMBOL(___preempt_schedule)
54+
EXPORT_SYMBOL(___preempt_schedule_notrace)
5255
#endif
5356

5457
#if defined(CONFIG_TRACE_IRQFLAGS) \

arch/x86/include/asm/export.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
#ifdef CONFIG_64BIT
2+
#define KSYM_ALIGN 16
3+
#endif
4+
#include <asm-generic/export.h>

arch/x86/kernel/Makefile

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,7 @@ obj-$(CONFIG_MODIFY_LDT_SYSCALL) += ldt.o
4646
obj-y += setup.o x86_init.o i8259.o irqinit.o jump_label.o
4747
obj-$(CONFIG_IRQ_WORK) += irq_work.o
4848
obj-y += probe_roms.o
49-
obj-$(CONFIG_X86_32) += i386_ksyms_32.o
50-
obj-$(CONFIG_X86_64) += sys_x86_64.o x8664_ksyms_64.o
51-
obj-$(CONFIG_X86_64) += mcount_64.o
49+
obj-$(CONFIG_X86_64) += sys_x86_64.o mcount_64.o
5250
obj-$(CONFIG_X86_ESPFIX64) += espfix_64.o
5351
obj-$(CONFIG_SYSFS) += ksysfs.o
5452
obj-y += bootflag.o e820.o

arch/x86/kernel/head_32.S

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
#include <asm/percpu.h>
2424
#include <asm/nops.h>
2525
#include <asm/bootparam.h>
26+
#include <asm/export.h>
2627

2728
/* Physical address */
2829
#define pa(X) ((X) - __PAGE_OFFSET)
@@ -673,6 +674,7 @@ ENTRY(empty_zero_page)
673674
.fill 4096,1,0
674675
ENTRY(swapper_pg_dir)
675676
.fill 1024,4,0
677+
EXPORT_SYMBOL(empty_zero_page)
676678

677679
/*
678680
* This starts the data section.

arch/x86/kernel/head_64.S

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
#include <asm/percpu.h>
2222
#include <asm/nops.h>
2323
#include "../entry/calling.h"
24+
#include <asm/export.h>
2425

2526
#ifdef CONFIG_PARAVIRT
2627
#include <asm/asm-offsets.h>
@@ -488,10 +489,12 @@ early_gdt_descr_base:
488489
ENTRY(phys_base)
489490
/* This must match the first entry in level2_kernel_pgt */
490491
.quad 0x0000000000000000
492+
EXPORT_SYMBOL(phys_base)
491493

492494
#include "../../x86/xen/xen-head.S"
493495

494496
__PAGE_ALIGNED_BSS
495497
NEXT_PAGE(empty_zero_page)
496498
.skip PAGE_SIZE
499+
EXPORT_SYMBOL(empty_zero_page)
497500

arch/x86/kernel/i386_ksyms_32.c

Lines changed: 0 additions & 47 deletions
This file was deleted.

arch/x86/kernel/mcount_64.S

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
#include <linux/linkage.h>
88
#include <asm/ptrace.h>
99
#include <asm/ftrace.h>
10+
#include <asm/export.h>
1011

1112

1213
.code64
@@ -294,6 +295,7 @@ trace:
294295
jmp fgraph_trace
295296
END(function_hook)
296297
#endif /* CONFIG_DYNAMIC_FTRACE */
298+
EXPORT_SYMBOL(function_hook)
297299
#endif /* CONFIG_FUNCTION_TRACER */
298300

299301
#ifdef CONFIG_FUNCTION_GRAPH_TRACER

arch/x86/kernel/x8664_ksyms_64.c

Lines changed: 0 additions & 85 deletions
This file was deleted.

arch/x86/lib/checksum_32.S

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
#include <linux/linkage.h>
2929
#include <asm/errno.h>
3030
#include <asm/asm.h>
31+
#include <asm/export.h>
3132

3233
/*
3334
* computes a partial checksum, e.g. for TCP/UDP fragments
@@ -251,6 +252,7 @@ ENTRY(csum_partial)
251252
ENDPROC(csum_partial)
252253

253254
#endif
255+
EXPORT_SYMBOL(csum_partial)
254256

255257
/*
256258
unsigned int csum_partial_copy_generic (const char *src, char *dst,
@@ -490,3 +492,4 @@ ENDPROC(csum_partial_copy_generic)
490492
#undef ROUND1
491493

492494
#endif
495+
EXPORT_SYMBOL(csum_partial_copy_generic)

arch/x86/lib/clear_page_64.S

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
#include <linux/linkage.h>
22
#include <asm/cpufeatures.h>
33
#include <asm/alternative-asm.h>
4+
#include <asm/export.h>
45

56
/*
67
* Most CPUs support enhanced REP MOVSB/STOSB instructions. It is
@@ -23,6 +24,7 @@ ENTRY(clear_page)
2324
rep stosq
2425
ret
2526
ENDPROC(clear_page)
27+
EXPORT_SYMBOL(clear_page)
2628

2729
ENTRY(clear_page_orig)
2830

arch/x86/lib/cmpxchg8b_emu.S

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
*/
88

99
#include <linux/linkage.h>
10+
#include <asm/export.h>
1011

1112
.text
1213

@@ -48,3 +49,4 @@ ENTRY(cmpxchg8b_emu)
4849
ret
4950

5051
ENDPROC(cmpxchg8b_emu)
52+
EXPORT_SYMBOL(cmpxchg8b_emu)

arch/x86/lib/copy_page_64.S

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
#include <linux/linkage.h>
44
#include <asm/cpufeatures.h>
55
#include <asm/alternative-asm.h>
6+
#include <asm/export.h>
67

78
/*
89
* Some CPUs run faster using the string copy instructions (sane microcode).
@@ -17,6 +18,7 @@ ENTRY(copy_page)
1718
rep movsq
1819
ret
1920
ENDPROC(copy_page)
21+
EXPORT_SYMBOL(copy_page)
2022

2123
ENTRY(copy_page_regs)
2224
subq $2*8, %rsp

arch/x86/lib/copy_user_64.S

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
#include <asm/alternative-asm.h>
1515
#include <asm/asm.h>
1616
#include <asm/smap.h>
17+
#include <asm/export.h>
1718

1819
/* Standard copy_to_user with segment limit checking */
1920
ENTRY(_copy_to_user)
@@ -29,6 +30,7 @@ ENTRY(_copy_to_user)
2930
"jmp copy_user_enhanced_fast_string", \
3031
X86_FEATURE_ERMS
3132
ENDPROC(_copy_to_user)
33+
EXPORT_SYMBOL(_copy_to_user)
3234

3335
/* Standard copy_from_user with segment limit checking */
3436
ENTRY(_copy_from_user)
@@ -44,6 +46,8 @@ ENTRY(_copy_from_user)
4446
"jmp copy_user_enhanced_fast_string", \
4547
X86_FEATURE_ERMS
4648
ENDPROC(_copy_from_user)
49+
EXPORT_SYMBOL(_copy_from_user)
50+
4751

4852
.section .fixup,"ax"
4953
/* must zero dest */
@@ -155,6 +159,7 @@ ENTRY(copy_user_generic_unrolled)
155159
_ASM_EXTABLE(21b,50b)
156160
_ASM_EXTABLE(22b,50b)
157161
ENDPROC(copy_user_generic_unrolled)
162+
EXPORT_SYMBOL(copy_user_generic_unrolled)
158163

159164
/* Some CPUs run faster using the string copy instructions.
160165
* This is also a lot simpler. Use them when possible.
@@ -200,6 +205,7 @@ ENTRY(copy_user_generic_string)
200205
_ASM_EXTABLE(1b,11b)
201206
_ASM_EXTABLE(3b,12b)
202207
ENDPROC(copy_user_generic_string)
208+
EXPORT_SYMBOL(copy_user_generic_string)
203209

204210
/*
205211
* Some CPUs are adding enhanced REP MOVSB/STOSB instructions.
@@ -229,6 +235,7 @@ ENTRY(copy_user_enhanced_fast_string)
229235

230236
_ASM_EXTABLE(1b,12b)
231237
ENDPROC(copy_user_enhanced_fast_string)
238+
EXPORT_SYMBOL(copy_user_enhanced_fast_string)
232239

233240
/*
234241
* copy_user_nocache - Uncached memory copy with exception handling
@@ -379,3 +386,4 @@ ENTRY(__copy_user_nocache)
379386
_ASM_EXTABLE(40b,.L_fixup_1b_copy)
380387
_ASM_EXTABLE(41b,.L_fixup_1b_copy)
381388
ENDPROC(__copy_user_nocache)
389+
EXPORT_SYMBOL(__copy_user_nocache)

arch/x86/lib/csum-partial_64.c

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -135,6 +135,7 @@ __wsum csum_partial(const void *buff, int len, __wsum sum)
135135
return (__force __wsum)add32_with_carry(do_csum(buff, len),
136136
(__force u32)sum);
137137
}
138+
EXPORT_SYMBOL(csum_partial);
138139

139140
/*
140141
* this routine is used for miscellaneous IP-like checksums, mainly

0 commit comments

Comments
 (0)