Commit eb039161 authored by Tobin C. Harding's avatar Tobin C. Harding Committed by Michael Ellerman

powerpc/asm: Convert .llong directives to .8byte

.llong is an undocumented PPC specific directive. The generic
equivalent is .quad, but even better (because it's self describing) is
.8byte.

Convert all .llong directives to .8byte.
Signed-off-by: default avatarTobin C. Harding <me@tobin.cc>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
parent 5b593949
...@@ -26,17 +26,17 @@ _zimage_start_opd: ...@@ -26,17 +26,17 @@ _zimage_start_opd:
#ifdef __powerpc64__ #ifdef __powerpc64__
.balign 8 .balign 8
p_start: .llong _start p_start: .8byte _start
p_etext: .llong _etext p_etext: .8byte _etext
p_bss_start: .llong __bss_start p_bss_start: .8byte __bss_start
p_end: .llong _end p_end: .8byte _end
p_toc: .llong __toc_start + 0x8000 - p_base p_toc: .8byte __toc_start + 0x8000 - p_base
p_dyn: .llong __dynamic_start - p_base p_dyn: .8byte __dynamic_start - p_base
p_rela: .llong __rela_dyn_start - p_base p_rela: .8byte __rela_dyn_start - p_base
p_prom: .llong 0 p_prom: .8byte 0
.weak _platform_stack_top .weak _platform_stack_top
p_pstack: .llong _platform_stack_top p_pstack: .8byte _platform_stack_top
#else #else
p_start: .long _start p_start: .long _start
p_etext: .long _etext p_etext: .long _etext
......
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
#define PPC_LCMPI stringify_in_c(cmpdi) #define PPC_LCMPI stringify_in_c(cmpdi)
#define PPC_LCMPLI stringify_in_c(cmpldi) #define PPC_LCMPLI stringify_in_c(cmpldi)
#define PPC_LCMP stringify_in_c(cmpd) #define PPC_LCMP stringify_in_c(cmpd)
#define PPC_LONG stringify_in_c(.llong) #define PPC_LONG stringify_in_c(.8byte)
#define PPC_LONG_ALIGN stringify_in_c(.balign 8) #define PPC_LONG_ALIGN stringify_in_c(.balign 8)
#define PPC_TLNEI stringify_in_c(tdnei) #define PPC_TLNEI stringify_in_c(tdnei)
#define PPC_LLARX(t, a, b, eh) PPC_LDARX(t, a, b, eh) #define PPC_LLARX(t, a, b, eh) PPC_LDARX(t, a, b, eh)
......
...@@ -19,11 +19,11 @@ ...@@ -19,11 +19,11 @@
*/ */
#if defined(CONFIG_PPC64) && !defined(__powerpc64__) #if defined(CONFIG_PPC64) && !defined(__powerpc64__)
/* 64 bits kernel, 32 bits code (ie. vdso32) */ /* 64 bits kernel, 32 bits code (ie. vdso32) */
#define FTR_ENTRY_LONG .llong #define FTR_ENTRY_LONG .8byte
#define FTR_ENTRY_OFFSET .long 0xffffffff; .long #define FTR_ENTRY_OFFSET .long 0xffffffff; .long
#elif defined(CONFIG_PPC64) #elif defined(CONFIG_PPC64)
#define FTR_ENTRY_LONG .llong #define FTR_ENTRY_LONG .8byte
#define FTR_ENTRY_OFFSET .llong #define FTR_ENTRY_OFFSET .8byte
#else #else
#define FTR_ENTRY_LONG .long #define FTR_ENTRY_LONG .long
#define FTR_ENTRY_OFFSET .long #define FTR_ENTRY_OFFSET .long
......
...@@ -1344,12 +1344,12 @@ static inline void msr_check_and_clear(unsigned long bits) ...@@ -1344,12 +1344,12 @@ static inline void msr_check_and_clear(unsigned long bits)
".section __ftr_fixup,\"a\"\n" \ ".section __ftr_fixup,\"a\"\n" \
".align 3\n" \ ".align 3\n" \
"98:\n" \ "98:\n" \
" .llong %1\n" \ " .8byte %1\n" \
" .llong %1\n" \ " .8byte %1\n" \
" .llong 97b-98b\n" \ " .8byte 97b-98b\n" \
" .llong 99b-98b\n" \ " .8byte 99b-98b\n" \
" .llong 0\n" \ " .8byte 0\n" \
" .llong 0\n" \ " .8byte 0\n" \
".previous" \ ".previous" \
: "=r" (rval) \ : "=r" (rval) \
: "i" (CPU_FTR_CELL_TB_BUG), "i" (SPRN_TBRL) : "cr0"); \ : "i" (CPU_FTR_CELL_TB_BUG), "i" (SPRN_TBRL) : "cr0"); \
......
...@@ -1104,7 +1104,7 @@ _ASM_NOKPROBE_SYMBOL(__enter_rtas) ...@@ -1104,7 +1104,7 @@ _ASM_NOKPROBE_SYMBOL(__enter_rtas)
_ASM_NOKPROBE_SYMBOL(rtas_return_loc) _ASM_NOKPROBE_SYMBOL(rtas_return_loc)
.align 3 .align 3
1: .llong rtas_restore_regs 1: .8byte rtas_restore_regs
rtas_restore_regs: rtas_restore_regs:
/* relocation is on at this point */ /* relocation is on at this point */
......
...@@ -92,13 +92,13 @@ END_FTR_SECTION(0, 1) ...@@ -92,13 +92,13 @@ END_FTR_SECTION(0, 1)
.balign 8 .balign 8
.globl __secondary_hold_spinloop .globl __secondary_hold_spinloop
__secondary_hold_spinloop: __secondary_hold_spinloop:
.llong 0x0 .8byte 0x0
/* Secondary processors write this value with their cpu # */ /* Secondary processors write this value with their cpu # */
/* after they enter the spin loop immediately below. */ /* after they enter the spin loop immediately below. */
.globl __secondary_hold_acknowledge .globl __secondary_hold_acknowledge
__secondary_hold_acknowledge: __secondary_hold_acknowledge:
.llong 0x0 .8byte 0x0
#ifdef CONFIG_RELOCATABLE #ifdef CONFIG_RELOCATABLE
/* This flag is set to 1 by a loader if the kernel should run /* This flag is set to 1 by a loader if the kernel should run
...@@ -650,7 +650,7 @@ __after_prom_start: ...@@ -650,7 +650,7 @@ __after_prom_start:
bctr bctr
.balign 8 .balign 8
p_end: .llong _end - copy_to_here p_end: .8byte _end - copy_to_here
4: 4:
/* /*
...@@ -892,7 +892,7 @@ _GLOBAL(relative_toc) ...@@ -892,7 +892,7 @@ _GLOBAL(relative_toc)
blr blr
.balign 8 .balign 8
p_toc: .llong __toc_start + 0x8000 - 0b p_toc: .8byte __toc_start + 0x8000 - 0b
/* /*
* This is where the main kernel code starts. * This is where the main kernel code starts.
......
...@@ -82,7 +82,7 @@ _GLOBAL(relocate) ...@@ -82,7 +82,7 @@ _GLOBAL(relocate)
6: blr 6: blr
.balign 8 .balign 8
p_dyn: .llong __dynamic_start - 0b p_dyn: .8byte __dynamic_start - 0b
p_rela: .llong __rela_dyn_start - 0b p_rela: .8byte __rela_dyn_start - 0b
p_st: .llong _stext - 0b p_st: .8byte _stext - 0b
...@@ -17,13 +17,13 @@ ...@@ -17,13 +17,13 @@
#include <asm/ppc_asm.h> #include <asm/ppc_asm.h>
#ifdef CONFIG_PPC64 #ifdef CONFIG_PPC64
#define SYSCALL(func) .llong DOTSYM(sys_##func),DOTSYM(sys_##func) #define SYSCALL(func) .8byte DOTSYM(sys_##func),DOTSYM(sys_##func)
#define COMPAT_SYS(func) .llong DOTSYM(sys_##func),DOTSYM(compat_sys_##func) #define COMPAT_SYS(func) .8byte DOTSYM(sys_##func),DOTSYM(compat_sys_##func)
#define PPC_SYS(func) .llong DOTSYM(ppc_##func),DOTSYM(ppc_##func) #define PPC_SYS(func) .8byte DOTSYM(ppc_##func),DOTSYM(ppc_##func)
#define OLDSYS(func) .llong DOTSYM(sys_ni_syscall),DOTSYM(sys_ni_syscall) #define OLDSYS(func) .8byte DOTSYM(sys_ni_syscall),DOTSYM(sys_ni_syscall)
#define SYS32ONLY(func) .llong DOTSYM(sys_ni_syscall),DOTSYM(compat_sys_##func) #define SYS32ONLY(func) .8byte DOTSYM(sys_ni_syscall),DOTSYM(compat_sys_##func)
#define PPC64ONLY(func) .llong DOTSYM(ppc_##func),DOTSYM(sys_ni_syscall) #define PPC64ONLY(func) .8byte DOTSYM(ppc_##func),DOTSYM(sys_ni_syscall)
#define SYSX(f, f3264, f32) .llong DOTSYM(f),DOTSYM(f3264) #define SYSX(f, f3264, f32) .8byte DOTSYM(f),DOTSYM(f3264)
#else #else
#define SYSCALL(func) .long sys_##func #define SYSCALL(func) .long sys_##func
#define COMPAT_SYS(func) .long sys_##func #define COMPAT_SYS(func) .long sys_##func
......
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
.globl opal_tracepoint_refcount .globl opal_tracepoint_refcount
opal_tracepoint_refcount: opal_tracepoint_refcount:
.llong 0 .8byte 0
.section ".text" .section ".text"
......
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
.globl hcall_tracepoint_refcount .globl hcall_tracepoint_refcount
hcall_tracepoint_refcount: hcall_tracepoint_refcount:
.llong 0 .8byte 0
.section ".text" .section ".text"
#endif #endif
......
...@@ -104,13 +104,13 @@ master: ...@@ -104,13 +104,13 @@ master:
.balign 8 .balign 8
.globl kernel .globl kernel
kernel: kernel:
.llong 0x0 .8byte 0x0
.size kernel, . - kernel .size kernel, . - kernel
.balign 8 .balign 8
.globl dt_offset .globl dt_offset
dt_offset: dt_offset:
.llong 0x0 .8byte 0x0
.size dt_offset, . - dt_offset .size dt_offset, . - dt_offset
......
...@@ -8,7 +8,7 @@ message: ...@@ -8,7 +8,7 @@ message:
.section ".toc" .section ".toc"
.balign 8 .balign 8
pattern: pattern:
.llong 0x5555AAAA5555AAAA .8byte 0x5555AAAA5555AAAA
.text .text
FUNC_START(_start) FUNC_START(_start)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment