Commit 208c6772 authored by Linus Torvalds's avatar Linus Torvalds

Merge tag 'x86_alternatives_for_v6.11_rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull x86 alternatives updates from Borislav Petkov:
 "This is basically PeterZ's idea to nest the alternative macros to
  avoid the need to "spell out" the number of alternates in an
  ALTERNATIVE_n() macro and thus have an ever-increasing complexity in
  those definitions.

  For ease of bisection, the old macros are converted to the new, nested
  variants in a step-by-step manner so that in case an issue is
  encountered during testing, one can pinpoint the place where it fails
  easier.

  Because debugging alternatives is a serious pain"

* tag 'x86_alternatives_for_v6.11_rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  x86/alternatives, kvm: Fix a couple of CALLs without a frame pointer
  x86/alternative: Replace the old macros
  x86/alternative: Convert the asm ALTERNATIVE_3() macro
  x86/alternative: Convert the asm ALTERNATIVE_2() macro
  x86/alternative: Convert the asm ALTERNATIVE() macro
  x86/alternative: Convert ALTERNATIVE_3()
  x86/alternative: Convert ALTERNATIVE_TERNARY()
  x86/alternative: Convert alternative_call_2()
  x86/alternative: Convert alternative_call()
  x86/alternative: Convert alternative_io()
  x86/alternative: Convert alternative_input()
  x86/alternative: Convert alternative_2()
  x86/alternative: Convert alternative()
  x86/alternatives: Add nested alternatives macros
  x86/alternative: Zap alternative_ternary()
parents 1467b498 0d3db1f1
......@@ -156,102 +156,50 @@ static inline int alternatives_text_reserved(void *start, void *end)
#define ALT_CALL_INSTR "call BUG_func"
#define b_replacement(num) "664"#num
#define e_replacement(num) "665"#num
#define alt_slen "772b-771b"
#define alt_total_slen "773b-771b"
#define alt_rlen "775f-774f"
#define alt_end_marker "663"
#define alt_slen "662b-661b"
#define alt_total_slen alt_end_marker"b-661b"
#define alt_rlen(num) e_replacement(num)"f-"b_replacement(num)"f"
#define OLDINSTR(oldinstr, num) \
"# ALT: oldnstr\n" \
"661:\n\t" oldinstr "\n662:\n" \
#define OLDINSTR(oldinstr) \
"# ALT: oldinstr\n" \
"771:\n\t" oldinstr "\n772:\n" \
"# ALT: padding\n" \
".skip -(((" alt_rlen(num) ")-(" alt_slen ")) > 0) * " \
"((" alt_rlen(num) ")-(" alt_slen ")),0x90\n" \
alt_end_marker ":\n"
/*
* gas compatible max based on the idea from:
* http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax
*
* The additional "-" is needed because gas uses a "true" value of -1.
*/
#define alt_max_short(a, b) "((" a ") ^ (((" a ") ^ (" b ")) & -(-((" a ") < (" b ")))))"
/*
* Pad the second replacement alternative with additional NOPs if it is
* additionally longer than the first replacement alternative.
*/
#define OLDINSTR_2(oldinstr, num1, num2) \
"# ALT: oldinstr2\n" \
"661:\n\t" oldinstr "\n662:\n" \
"# ALT: padding2\n" \
".skip -((" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")) > 0) * " \
"(" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")), 0x90\n" \
alt_end_marker ":\n"
#define OLDINSTR_3(oldinsn, n1, n2, n3) \
"# ALT: oldinstr3\n" \
"661:\n\t" oldinsn "\n662:\n" \
"# ALT: padding3\n" \
".skip -((" alt_max_short(alt_max_short(alt_rlen(n1), alt_rlen(n2)), alt_rlen(n3)) \
" - (" alt_slen ")) > 0) * " \
"(" alt_max_short(alt_max_short(alt_rlen(n1), alt_rlen(n2)), alt_rlen(n3)) \
" - (" alt_slen ")), 0x90\n" \
alt_end_marker ":\n"
#define ALTINSTR_ENTRY(ft_flags, num) \
" .long 661b - .\n" /* label */ \
" .long " b_replacement(num)"f - .\n" /* new instruction */ \
".skip -(((" alt_rlen ")-(" alt_slen ")) > 0) * " \
"((" alt_rlen ")-(" alt_slen ")),0x90\n" \
"773:\n"
#define ALTINSTR_ENTRY(ft_flags) \
".pushsection .altinstructions,\"a\"\n" \
" .long 771b - .\n" /* label */ \
" .long 774f - .\n" /* new instruction */ \
" .4byte " __stringify(ft_flags) "\n" /* feature + flags */ \
" .byte " alt_total_slen "\n" /* source len */ \
" .byte " alt_rlen(num) "\n" /* replacement len */
" .byte " alt_rlen "\n" /* replacement len */ \
".popsection\n"
#define ALTINSTR_REPLACEMENT(newinstr, num) /* replacement */ \
"# ALT: replacement " #num "\n" \
b_replacement(num)":\n\t" newinstr "\n" e_replacement(num) ":\n"
#define ALTINSTR_REPLACEMENT(newinstr) /* replacement */ \
".pushsection .altinstr_replacement, \"ax\"\n" \
"# ALT: replacement\n" \
"774:\n\t" newinstr "\n775:\n" \
".popsection\n"
/* alternative assembly primitive: */
#define ALTERNATIVE(oldinstr, newinstr, ft_flags) \
OLDINSTR(oldinstr, 1) \
".pushsection .altinstructions,\"a\"\n" \
ALTINSTR_ENTRY(ft_flags, 1) \
".popsection\n" \
".pushsection .altinstr_replacement, \"ax\"\n" \
ALTINSTR_REPLACEMENT(newinstr, 1) \
".popsection\n"
OLDINSTR(oldinstr) \
ALTINSTR_ENTRY(ft_flags) \
ALTINSTR_REPLACEMENT(newinstr)
#define ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
OLDINSTR_2(oldinstr, 1, 2) \
".pushsection .altinstructions,\"a\"\n" \
ALTINSTR_ENTRY(ft_flags1, 1) \
ALTINSTR_ENTRY(ft_flags2, 2) \
".popsection\n" \
".pushsection .altinstr_replacement, \"ax\"\n" \
ALTINSTR_REPLACEMENT(newinstr1, 1) \
ALTINSTR_REPLACEMENT(newinstr2, 2) \
".popsection\n"
ALTERNATIVE(ALTERNATIVE(oldinstr, newinstr1, ft_flags1), newinstr2, ft_flags2)
/* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
#define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
newinstr_yes, ft_flags)
#define ALTERNATIVE_3(oldinsn, newinsn1, ft_flags1, newinsn2, ft_flags2, \
newinsn3, ft_flags3) \
OLDINSTR_3(oldinsn, 1, 2, 3) \
".pushsection .altinstructions,\"a\"\n" \
ALTINSTR_ENTRY(ft_flags1, 1) \
ALTINSTR_ENTRY(ft_flags2, 2) \
ALTINSTR_ENTRY(ft_flags3, 3) \
".popsection\n" \
".pushsection .altinstr_replacement, \"ax\"\n" \
ALTINSTR_REPLACEMENT(newinsn1, 1) \
ALTINSTR_REPLACEMENT(newinsn2, 2) \
ALTINSTR_REPLACEMENT(newinsn3, 3) \
".popsection\n"
ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, newinstr_yes, ft_flags)
#define ALTERNATIVE_3(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, \
newinstr3, ft_flags3) \
ALTERNATIVE(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2), \
newinstr3, ft_flags3)
/*
* Alternative instructions for different CPU types or capabilities.
......@@ -266,14 +214,11 @@ static inline int alternatives_text_reserved(void *start, void *end)
* without volatile and memory clobber.
*/
#define alternative(oldinstr, newinstr, ft_flags) \
asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")
asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")
#define alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")
#define alternative_ternary(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
asm_inline volatile(ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) ::: "memory")
/*
* Alternative inline assembly with input.
*
......@@ -283,18 +228,28 @@ static inline int alternatives_text_reserved(void *start, void *end)
* Leaving an unused argument 0 to keep API compatibility.
*/
#define alternative_input(oldinstr, newinstr, ft_flags, input...) \
asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) \
asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) \
: : "i" (0), ## input)
/* Like alternative_input, but with a single output argument */
#define alternative_io(oldinstr, newinstr, ft_flags, output, input...) \
asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) \
asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) \
: output : "i" (0), ## input)
/* Like alternative_io, but for replacing a direct call with another one. */
#define alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
asm_inline volatile (ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags) \
: output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)
/*
* Like alternative_io, but for replacing a direct call with another one.
*
* Use the %c operand modifier which is the generic way to print a bare
* constant expression with all syntax-specific punctuation omitted. %P
* is the x86-specific variant which can handle constants too, for
* historical reasons, but it should be used primarily for PIC
* references: i.e., if used for a function, it would add the PLT
* suffix.
*/
#define alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
asm_inline volatile(ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags) \
: ALT_OUTPUT_SP(output) \
: [old] "i" (oldfunc), [new] "i" (newfunc), ## input)
/*
* Like alternative_call, but there are two features and respective functions.
......@@ -302,12 +257,12 @@ static inline int alternatives_text_reserved(void *start, void *end)
* Otherwise, if CPU has feature1, function1 is used.
* Otherwise, old function is used.
*/
#define alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
output, input...) \
asm_inline volatile (ALTERNATIVE_2("call %c[old]", "call %c[new1]", ft_flags1, \
"call %c[new2]", ft_flags2) \
: output, ASM_CALL_CONSTRAINT \
: [old] "i" (oldfunc), [new1] "i" (newfunc1), \
#define alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
output, input...) \
asm_inline volatile(ALTERNATIVE_2("call %c[old]", "call %c[new1]", ft_flags1, \
"call %c[new2]", ft_flags2) \
: ALT_OUTPUT_SP(output) \
: [old] "i" (oldfunc), [new1] "i" (newfunc1), \
[new2] "i" (newfunc2), ## input)
/*
......@@ -322,6 +277,8 @@ static inline int alternatives_text_reserved(void *start, void *end)
*/
#define ASM_NO_INPUT_CLOBBER(clbr...) "i" (0) : clbr
#define ALT_OUTPUT_SP(...) ASM_CALL_CONSTRAINT, ## __VA_ARGS__
/* Macro for creating assembler functions avoiding any C magic. */
#define DEFINE_ASM_FUNC(func, instr, sec) \
asm (".pushsection " #sec ", \"ax\"\n" \
......@@ -388,22 +345,23 @@ void nop_func(void);
* @newinstr. ".skip" directive takes care of proper instruction padding
* in case @newinstr is longer than @oldinstr.
*/
.macro ALTERNATIVE oldinstr, newinstr, ft_flags
140:
\oldinstr
141:
.skip -(((144f-143f)-(141b-140b)) > 0) * ((144f-143f)-(141b-140b)),0x90
142:
.pushsection .altinstructions,"a"
altinstr_entry 140b,143f,\ft_flags,142b-140b,144f-143f
.popsection
#define __ALTERNATIVE(oldinst, newinst, flag) \
740: \
oldinst ; \
741: \
.skip -(((744f-743f)-(741b-740b)) > 0) * ((744f-743f)-(741b-740b)),0x90 ;\
742: \
.pushsection .altinstructions,"a" ; \
altinstr_entry 740b,743f,flag,742b-740b,744f-743f ; \
.popsection ; \
.pushsection .altinstr_replacement,"ax" ; \
743: \
newinst ; \
744: \
.popsection ;
.pushsection .altinstr_replacement,"ax"
143:
\newinstr
144:
.popsection
.macro ALTERNATIVE oldinstr, newinstr, ft_flags
__ALTERNATIVE(\oldinstr, \newinstr, \ft_flags)
.endm
#define old_len 141b-140b
......@@ -411,66 +369,19 @@ void nop_func(void);
#define new_len2 145f-144f
#define new_len3 146f-145f
/*
* gas compatible max based on the idea from:
* http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax
*
* The additional "-" is needed because gas uses a "true" value of -1.
*/
#define alt_max_2(a, b) ((a) ^ (((a) ^ (b)) & -(-((a) < (b)))))
#define alt_max_3(a, b, c) (alt_max_2(alt_max_2(a, b), c))
/*
* Same as ALTERNATIVE macro above but for two alternatives. If CPU
* has @feature1, it replaces @oldinstr with @newinstr1. If CPU has
* @feature2, it replaces @oldinstr with @feature2.
*/
.macro ALTERNATIVE_2 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
140:
\oldinstr
141:
.skip -((alt_max_2(new_len1, new_len2) - (old_len)) > 0) * \
(alt_max_2(new_len1, new_len2) - (old_len)),0x90
142:
.pushsection .altinstructions,"a"
altinstr_entry 140b,143f,\ft_flags1,142b-140b,144f-143f
altinstr_entry 140b,144f,\ft_flags2,142b-140b,145f-144f
.popsection
.pushsection .altinstr_replacement,"ax"
143:
\newinstr1
144:
\newinstr2
145:
.popsection
__ALTERNATIVE(__ALTERNATIVE(\oldinstr, \newinstr1, \ft_flags1),
\newinstr2, \ft_flags2)
.endm
.macro ALTERNATIVE_3 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
140:
\oldinstr
141:
.skip -((alt_max_3(new_len1, new_len2, new_len3) - (old_len)) > 0) * \
(alt_max_3(new_len1, new_len2, new_len3) - (old_len)),0x90
142:
.pushsection .altinstructions,"a"
altinstr_entry 140b,143f,\ft_flags1,142b-140b,144f-143f
altinstr_entry 140b,144f,\ft_flags2,142b-140b,145f-144f
altinstr_entry 140b,145f,\ft_flags3,142b-140b,146f-145f
.popsection
.pushsection .altinstr_replacement,"ax"
143:
\newinstr1
144:
\newinstr2
145:
\newinstr3
146:
.popsection
__ALTERNATIVE(ALTERNATIVE_2(\oldinstr, \newinstr1, \ft_flags1, \newinstr2, \ft_flags2),
\newinstr3, \ft_flags3)
.endm
/* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
......
......@@ -54,7 +54,7 @@ static inline void clear_page(void *page)
clear_page_rep, X86_FEATURE_REP_GOOD,
clear_page_erms, X86_FEATURE_ERMS,
"=D" (page),
"0" (page)
"D" (page)
: "cc", "memory", "rax", "rcx");
}
......
......@@ -432,6 +432,11 @@ static int alt_replace_call(u8 *instr, u8 *insn_buff, struct alt_instr *a)
return 5;
}
static inline u8 * instr_va(struct alt_instr *i)
{
return (u8 *)&i->instr_offset + i->instr_offset;
}
/*
* Replace instructions with better alternatives for this CPU type. This runs
* before SMP is initialized to avoid SMP problems with self modifying code.
......@@ -447,7 +452,7 @@ void __init_or_module noinline apply_alternatives(struct alt_instr *start,
{
u8 insn_buff[MAX_PATCH_LEN];
u8 *instr, *replacement;
struct alt_instr *a;
struct alt_instr *a, *b;
DPRINTK(ALT, "alt table %px, -> %px", start, end);
......@@ -473,7 +478,18 @@ void __init_or_module noinline apply_alternatives(struct alt_instr *start,
for (a = start; a < end; a++) {
int insn_buff_sz = 0;
instr = (u8 *)&a->instr_offset + a->instr_offset;
/*
* In case of nested ALTERNATIVE()s the outer alternative might
* add more padding. To ensure consistent patching find the max
* padding for all alt_instr entries for this site (nested
* alternatives result in consecutive entries).
*/
for (b = a+1; b < end && instr_va(b) == instr_va(a); b++) {
u8 len = max(a->instrlen, b->instrlen);
a->instrlen = b->instrlen = len;
}
instr = instr_va(a);
replacement = (u8 *)&a->repl_offset + a->repl_offset;
BUG_ON(a->instrlen > sizeof(insn_buff));
BUG_ON(a->cpuid >= (NCAPINTS + NBUGINTS) * 32);
......@@ -1641,7 +1657,7 @@ static noinline void __init alt_reloc_selftest(void)
*/
asm_inline volatile (
ALTERNATIVE("", "lea %[mem], %%" _ASM_ARG1 "; call __alt_reloc_selftest;", X86_FEATURE_ALWAYS)
: /* output */
: ASM_CALL_CONSTRAINT
: [mem] "m" (__alt_reloc_selftest_addr)
: _ASM_ARG1
);
......
......@@ -106,21 +106,17 @@ static inline u64 xfeatures_mask_independent(void)
* Otherwise, if XSAVEOPT is enabled, XSAVEOPT replaces XSAVE because XSAVEOPT
* supports modified optimization which is not supported by XSAVE.
*
* We use XSAVE as a fallback.
*
* The 661 label is defined in the ALTERNATIVE* macros as the address of the
* original instruction which gets replaced. We need to use it here as the
* address of the instruction where we might get an exception at.
* Use XSAVE as a fallback.
*/
#define XSTATE_XSAVE(st, lmask, hmask, err) \
asm volatile(ALTERNATIVE_3(XSAVE, \
asm volatile("1: " ALTERNATIVE_3(XSAVE, \
XSAVEOPT, X86_FEATURE_XSAVEOPT, \
XSAVEC, X86_FEATURE_XSAVEC, \
XSAVES, X86_FEATURE_XSAVES) \
"\n" \
"xor %[err], %[err]\n" \
"3:\n" \
_ASM_EXTABLE_TYPE_REG(661b, 3b, EX_TYPE_EFAULT_REG, %[err]) \
_ASM_EXTABLE_TYPE_REG(1b, 3b, EX_TYPE_EFAULT_REG, %[err]) \
: [err] "=r" (err) \
: "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \
: "memory")
......@@ -130,11 +126,11 @@ static inline u64 xfeatures_mask_independent(void)
* XSAVE area format.
*/
#define XSTATE_XRESTORE(st, lmask, hmask) \
asm volatile(ALTERNATIVE(XRSTOR, \
asm volatile("1: " ALTERNATIVE(XRSTOR, \
XRSTORS, X86_FEATURE_XSAVES) \
"\n" \
"3:\n" \
_ASM_EXTABLE_TYPE(661b, 3b, EX_TYPE_FPU_RESTORE) \
_ASM_EXTABLE_TYPE(1b, 3b, EX_TYPE_FPU_RESTORE) \
: \
: "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \
: "memory")
......
......@@ -1069,7 +1069,7 @@ static __always_inline u8 test_cc(unsigned int condition, unsigned long flags)
flags = (flags & EFLAGS_MASK) | X86_EFLAGS_IF;
asm("push %[flags]; popf; " CALL_NOSPEC
: "=a"(rc) : [thunk_target]"r"(fop), [flags]"r"(flags));
: "=a"(rc), ASM_CALL_CONSTRAINT : [thunk_target]"r"(fop), [flags]"r"(flags));
return rc;
}
......
......@@ -284,6 +284,25 @@ the objtool maintainers.
Otherwise the stack frame may not get created before the call.
objtool can help with pinpointing the exact function where it happens:
$ OBJTOOL_ARGS="--verbose" make arch/x86/kvm/
arch/x86/kvm/kvm.o: warning: objtool: .altinstr_replacement+0xc5: call without frame pointer save/setup
arch/x86/kvm/kvm.o: warning: objtool: em_loop.part.0+0x29: (alt)
arch/x86/kvm/kvm.o: warning: objtool: em_loop.part.0+0x0: <=== (sym)
LD [M] arch/x86/kvm/kvm-intel.o
0000 0000000000028220 <em_loop.part.0>:
0000 28220: 0f b6 47 61 movzbl 0x61(%rdi),%eax
0004 28224: 3c e2 cmp $0xe2,%al
0006 28226: 74 2c je 28254 <em_loop.part.0+0x34>
0008 28228: 48 8b 57 10 mov 0x10(%rdi),%rdx
000c 2822c: 83 f0 05 xor $0x5,%eax
000f 2822f: 48 c1 e0 04 shl $0x4,%rax
0013 28233: 25 f0 00 00 00 and $0xf0,%eax
0018 28238: 81 e2 d5 08 00 00 and $0x8d5,%edx
001e 2823e: 80 ce 02 or $0x2,%dh
...
2. file.o: warning: objtool: .text+0x53: unreachable instruction
......
......@@ -9,6 +9,29 @@
void arch_handle_alternative(unsigned short feature, struct special_alt *alt)
{
static struct special_alt *group, *prev;
/*
* Recompute orig_len for nested ALTERNATIVE()s.
*/
if (group && group->orig_sec == alt->orig_sec &&
group->orig_off == alt->orig_off) {
struct special_alt *iter = group;
for (;;) {
unsigned int len = max(iter->orig_len, alt->orig_len);
iter->orig_len = alt->orig_len = len;
if (iter == prev)
break;
iter = list_next_entry(iter, list);
}
} else group = alt;
prev = alt;
switch (feature) {
case X86_FEATURE_SMAP:
/*
......
......@@ -84,6 +84,14 @@ static int get_alt_entry(struct elf *elf, const struct special_entry *entry,
entry->new_len);
}
orig_reloc = find_reloc_by_dest(elf, sec, offset + entry->orig);
if (!orig_reloc) {
WARN_FUNC("can't find orig reloc", sec, offset + entry->orig);
return -1;
}
reloc_to_sec_off(orig_reloc, &alt->orig_sec, &alt->orig_off);
if (entry->feature) {
unsigned short feature;
......@@ -94,14 +102,6 @@ static int get_alt_entry(struct elf *elf, const struct special_entry *entry,
arch_handle_alternative(feature, alt);
}
orig_reloc = find_reloc_by_dest(elf, sec, offset + entry->orig);
if (!orig_reloc) {
WARN_FUNC("can't find orig reloc", sec, offset + entry->orig);
return -1;
}
reloc_to_sec_off(orig_reloc, &alt->orig_sec, &alt->orig_off);
if (!entry->group || alt->new_len) {
new_reloc = find_reloc_by_dest(elf, sec, offset + entry->new);
if (!new_reloc) {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment