Commit e64b9562 authored by Linus Torvalds's avatar Linus Torvalds

Merge branch 'x86/urgent' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull x86 fixes from Thomas Gleixner:
 "A small set of fixes for x86:

   - Add missing instruction suffixes to assembly code so it can be
     compiled by newer GAS versions without warnings.

   - Switch refcount WARN exceptions to UD2 as we did in general

   - Make the reboot on Intel Edison platforms work

   - A small documentation update so text and sample command match"

* 'x86/urgent' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  Documentation, x86, resctrl: Make text and sample command match
  x86/platform/intel-mid: Handle Intel Edison reboot correctly
  x86/asm: Add instruction suffixes to bitops
  x86/entry/64: Add instruction suffix
  x86/refcounts: Switch to UD2 for exceptions
parents 7225a442 30009746
...@@ -671,7 +671,7 @@ occupancy of the real time threads on these cores. ...@@ -671,7 +671,7 @@ occupancy of the real time threads on these cores.
# mkdir p1 # mkdir p1
Move the cpus 4-7 over to p1 Move the cpus 4-7 over to p1
# echo f0 > p0/cpus # echo f0 > p1/cpus
View the llc occupancy snapshot View the llc occupancy snapshot
......
...@@ -55,7 +55,7 @@ END(native_usergs_sysret64) ...@@ -55,7 +55,7 @@ END(native_usergs_sysret64)
.macro TRACE_IRQS_FLAGS flags:req .macro TRACE_IRQS_FLAGS flags:req
#ifdef CONFIG_TRACE_IRQFLAGS #ifdef CONFIG_TRACE_IRQFLAGS
bt $9, \flags /* interrupts off? */ btl $9, \flags /* interrupts off? */
jnc 1f jnc 1f
TRACE_IRQS_ON TRACE_IRQS_ON
1: 1:
......
...@@ -78,7 +78,7 @@ set_bit(long nr, volatile unsigned long *addr) ...@@ -78,7 +78,7 @@ set_bit(long nr, volatile unsigned long *addr)
: "iq" ((u8)CONST_MASK(nr)) : "iq" ((u8)CONST_MASK(nr))
: "memory"); : "memory");
} else { } else {
asm volatile(LOCK_PREFIX "bts %1,%0" asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0"
: BITOP_ADDR(addr) : "Ir" (nr) : "memory"); : BITOP_ADDR(addr) : "Ir" (nr) : "memory");
} }
} }
...@@ -94,7 +94,7 @@ set_bit(long nr, volatile unsigned long *addr) ...@@ -94,7 +94,7 @@ set_bit(long nr, volatile unsigned long *addr)
*/ */
static __always_inline void __set_bit(long nr, volatile unsigned long *addr) static __always_inline void __set_bit(long nr, volatile unsigned long *addr)
{ {
asm volatile("bts %1,%0" : ADDR : "Ir" (nr) : "memory"); asm volatile(__ASM_SIZE(bts) " %1,%0" : ADDR : "Ir" (nr) : "memory");
} }
/** /**
...@@ -115,7 +115,7 @@ clear_bit(long nr, volatile unsigned long *addr) ...@@ -115,7 +115,7 @@ clear_bit(long nr, volatile unsigned long *addr)
: CONST_MASK_ADDR(nr, addr) : CONST_MASK_ADDR(nr, addr)
: "iq" ((u8)~CONST_MASK(nr))); : "iq" ((u8)~CONST_MASK(nr)));
} else { } else {
asm volatile(LOCK_PREFIX "btr %1,%0" asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0"
: BITOP_ADDR(addr) : BITOP_ADDR(addr)
: "Ir" (nr)); : "Ir" (nr));
} }
...@@ -137,7 +137,7 @@ static __always_inline void clear_bit_unlock(long nr, volatile unsigned long *ad ...@@ -137,7 +137,7 @@ static __always_inline void clear_bit_unlock(long nr, volatile unsigned long *ad
static __always_inline void __clear_bit(long nr, volatile unsigned long *addr) static __always_inline void __clear_bit(long nr, volatile unsigned long *addr)
{ {
asm volatile("btr %1,%0" : ADDR : "Ir" (nr)); asm volatile(__ASM_SIZE(btr) " %1,%0" : ADDR : "Ir" (nr));
} }
static __always_inline bool clear_bit_unlock_is_negative_byte(long nr, volatile unsigned long *addr) static __always_inline bool clear_bit_unlock_is_negative_byte(long nr, volatile unsigned long *addr)
...@@ -182,7 +182,7 @@ static __always_inline void __clear_bit_unlock(long nr, volatile unsigned long * ...@@ -182,7 +182,7 @@ static __always_inline void __clear_bit_unlock(long nr, volatile unsigned long *
*/ */
static __always_inline void __change_bit(long nr, volatile unsigned long *addr) static __always_inline void __change_bit(long nr, volatile unsigned long *addr)
{ {
asm volatile("btc %1,%0" : ADDR : "Ir" (nr)); asm volatile(__ASM_SIZE(btc) " %1,%0" : ADDR : "Ir" (nr));
} }
/** /**
...@@ -201,7 +201,7 @@ static __always_inline void change_bit(long nr, volatile unsigned long *addr) ...@@ -201,7 +201,7 @@ static __always_inline void change_bit(long nr, volatile unsigned long *addr)
: CONST_MASK_ADDR(nr, addr) : CONST_MASK_ADDR(nr, addr)
: "iq" ((u8)CONST_MASK(nr))); : "iq" ((u8)CONST_MASK(nr)));
} else { } else {
asm volatile(LOCK_PREFIX "btc %1,%0" asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0"
: BITOP_ADDR(addr) : BITOP_ADDR(addr)
: "Ir" (nr)); : "Ir" (nr));
} }
...@@ -217,7 +217,8 @@ static __always_inline void change_bit(long nr, volatile unsigned long *addr) ...@@ -217,7 +217,8 @@ static __always_inline void change_bit(long nr, volatile unsigned long *addr)
*/ */
static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr) static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr)
{ {
GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, "Ir", nr, "%0", c); GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts),
*addr, "Ir", nr, "%0", c);
} }
/** /**
...@@ -246,7 +247,7 @@ static __always_inline bool __test_and_set_bit(long nr, volatile unsigned long * ...@@ -246,7 +247,7 @@ static __always_inline bool __test_and_set_bit(long nr, volatile unsigned long *
{ {
bool oldbit; bool oldbit;
asm("bts %2,%1" asm(__ASM_SIZE(bts) " %2,%1"
CC_SET(c) CC_SET(c)
: CC_OUT(c) (oldbit), ADDR : CC_OUT(c) (oldbit), ADDR
: "Ir" (nr)); : "Ir" (nr));
...@@ -263,7 +264,8 @@ static __always_inline bool __test_and_set_bit(long nr, volatile unsigned long * ...@@ -263,7 +264,8 @@ static __always_inline bool __test_and_set_bit(long nr, volatile unsigned long *
*/ */
static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr) static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr)
{ {
GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, "Ir", nr, "%0", c); GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr),
*addr, "Ir", nr, "%0", c);
} }
/** /**
...@@ -286,7 +288,7 @@ static __always_inline bool __test_and_clear_bit(long nr, volatile unsigned long ...@@ -286,7 +288,7 @@ static __always_inline bool __test_and_clear_bit(long nr, volatile unsigned long
{ {
bool oldbit; bool oldbit;
asm volatile("btr %2,%1" asm volatile(__ASM_SIZE(btr) " %2,%1"
CC_SET(c) CC_SET(c)
: CC_OUT(c) (oldbit), ADDR : CC_OUT(c) (oldbit), ADDR
: "Ir" (nr)); : "Ir" (nr));
...@@ -298,7 +300,7 @@ static __always_inline bool __test_and_change_bit(long nr, volatile unsigned lon ...@@ -298,7 +300,7 @@ static __always_inline bool __test_and_change_bit(long nr, volatile unsigned lon
{ {
bool oldbit; bool oldbit;
asm volatile("btc %2,%1" asm volatile(__ASM_SIZE(btc) " %2,%1"
CC_SET(c) CC_SET(c)
: CC_OUT(c) (oldbit), ADDR : CC_OUT(c) (oldbit), ADDR
: "Ir" (nr) : "memory"); : "Ir" (nr) : "memory");
...@@ -316,7 +318,8 @@ static __always_inline bool __test_and_change_bit(long nr, volatile unsigned lon ...@@ -316,7 +318,8 @@ static __always_inline bool __test_and_change_bit(long nr, volatile unsigned lon
*/ */
static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr) static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr)
{ {
GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, "Ir", nr, "%0", c); GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc),
*addr, "Ir", nr, "%0", c);
} }
static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr) static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
...@@ -329,7 +332,7 @@ static __always_inline bool variable_test_bit(long nr, volatile const unsigned l ...@@ -329,7 +332,7 @@ static __always_inline bool variable_test_bit(long nr, volatile const unsigned l
{ {
bool oldbit; bool oldbit;
asm volatile("bt %2,%1" asm volatile(__ASM_SIZE(bt) " %2,%1"
CC_SET(c) CC_SET(c)
: CC_OUT(c) (oldbit) : CC_OUT(c) (oldbit)
: "m" (*(unsigned long *)addr), "Ir" (nr)); : "m" (*(unsigned long *)addr), "Ir" (nr));
......
...@@ -526,7 +526,7 @@ static inline bool x86_this_cpu_variable_test_bit(int nr, ...@@ -526,7 +526,7 @@ static inline bool x86_this_cpu_variable_test_bit(int nr,
{ {
bool oldbit; bool oldbit;
asm volatile("bt "__percpu_arg(2)",%1" asm volatile("btl "__percpu_arg(2)",%1"
CC_SET(c) CC_SET(c)
: CC_OUT(c) (oldbit) : CC_OUT(c) (oldbit)
: "m" (*(unsigned long __percpu *)addr), "Ir" (nr)); : "m" (*(unsigned long __percpu *)addr), "Ir" (nr));
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
#define _REFCOUNT_EXCEPTION \ #define _REFCOUNT_EXCEPTION \
".pushsection .text..refcount\n" \ ".pushsection .text..refcount\n" \
"111:\tlea %[counter], %%" _ASM_CX "\n" \ "111:\tlea %[counter], %%" _ASM_CX "\n" \
"112:\t" ASM_UD0 "\n" \ "112:\t" ASM_UD2 "\n" \
ASM_UNREACHABLE \ ASM_UNREACHABLE \
".popsection\n" \ ".popsection\n" \
"113:\n" \ "113:\n" \
......
...@@ -79,7 +79,7 @@ static void intel_mid_power_off(void) ...@@ -79,7 +79,7 @@ static void intel_mid_power_off(void)
static void intel_mid_reboot(void) static void intel_mid_reboot(void)
{ {
intel_scu_ipc_simple_command(IPCMSG_COLD_BOOT, 0); intel_scu_ipc_simple_command(IPCMSG_COLD_RESET, 0);
} }
static unsigned long __init intel_mid_calibrate_tsc(void) static unsigned long __init intel_mid_calibrate_tsc(void)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment