lists.openwall.net | lists / announce owl-users owl-dev john-users john-dev passwdqc-users yescrypt popa3d-users / oss-security kernel-hardening musl sabotage tlsify passwords / crypt-dev xvendor / Bugtraq Full-Disclosure linux-kernel linux-netdev linux-ext4 linux-hardening PHC | |
Open Source and information security mailing list archives
| ||
|
Date: Mon, 18 May 2020 11:20:59 +0900 From: Masami Hiramatsu <mhiramat@...nel.org> To: Kees Cook <keescook@...omium.org> Cc: Andrew Jeffery <andrew@...id.au>, linux-arm-kernel@...ts.infradead.org, linux@...linux.org.uk, mhiramat@...nel.org, labbott@...hat.com, mathieu.desnoyers@...icios.com, linux-kernel@...r.kernel.org Subject: Re: [PATCH] ARM: kprobes: Avoid fortify_panic() when copying optprobe template On Sun, 17 May 2020 14:48:52 -0700 Kees Cook <keescook@...omium.org> wrote: > On Mon, May 18, 2020 at 01:09:59AM +0930, Andrew Jeffery wrote: > > As mentioned, a couple of attempts have been made to address the issue > > by casting a pointer to optprobe_template_entry before providing it to > > memcpy(), however gccs such as Ubuntu 20.04's arm-linux-gnueabi-gcc > > 9.3.0 (Ubuntu 9.3.0-10ubuntu1) see through these efforts. > > Ah, dang. :P > > How about converting them all to unsized arrays, which would also allow > the code to drop the "&" everywhere, I think. This is untested: > This looks good to me since it uses same technique in sections.h. Acked-by: Masami Hiramatsu <mhiramat@...nel.org> Thank you! > > diff --git a/arch/arm/include/asm/kprobes.h b/arch/arm/include/asm/kprobes.h > index 213607a1f45c..e26a278d301a 100644 > --- a/arch/arm/include/asm/kprobes.h > +++ b/arch/arm/include/asm/kprobes.h > @@ -44,20 +44,20 @@ int kprobe_exceptions_notify(struct notifier_block *self, > unsigned long val, void *data); > > /* optinsn template addresses */ > -extern __visible kprobe_opcode_t optprobe_template_entry; > -extern __visible kprobe_opcode_t optprobe_template_val; > -extern __visible kprobe_opcode_t optprobe_template_call; > -extern __visible kprobe_opcode_t optprobe_template_end; > -extern __visible kprobe_opcode_t optprobe_template_sub_sp; > -extern __visible kprobe_opcode_t optprobe_template_add_sp; > -extern __visible kprobe_opcode_t optprobe_template_restore_begin; > -extern __visible kprobe_opcode_t optprobe_template_restore_orig_insn; > -extern __visible kprobe_opcode_t optprobe_template_restore_end; > +extern __visible kprobe_opcode_t optprobe_template_entry[]; > +extern __visible kprobe_opcode_t optprobe_template_val[]; > +extern __visible kprobe_opcode_t optprobe_template_call[]; > +extern __visible kprobe_opcode_t optprobe_template_end[]; > +extern __visible kprobe_opcode_t optprobe_template_sub_sp[]; > +extern __visible kprobe_opcode_t optprobe_template_add_sp[]; > +extern __visible kprobe_opcode_t optprobe_template_restore_begin[]; > +extern __visible kprobe_opcode_t optprobe_template_restore_orig_insn[]; > +extern __visible kprobe_opcode_t optprobe_template_restore_end[]; > > #define MAX_OPTIMIZED_LENGTH 4 > #define MAX_OPTINSN_SIZE \ > - ((unsigned long)&optprobe_template_end - \ > - (unsigned long)&optprobe_template_entry) > + ((unsigned long)optprobe_template_end - \ > + (unsigned long)optprobe_template_entry) > #define RELATIVEJUMP_SIZE 4 > > struct arch_optimized_insn { > diff --git a/arch/arm/probes/kprobes/opt-arm.c b/arch/arm/probes/kprobes/opt-arm.c > index 7a449df0b359..c78180172120 100644 > --- a/arch/arm/probes/kprobes/opt-arm.c > +++ b/arch/arm/probes/kprobes/opt-arm.c > @@ -85,21 +85,21 @@ asm ( > "optprobe_template_end:\n"); > > #define TMPL_VAL_IDX \ > - ((unsigned long *)&optprobe_template_val - (unsigned long *)&optprobe_template_entry) > + ((unsigned long *)optprobe_template_val - (unsigned long *)optprobe_template_entry) > #define TMPL_CALL_IDX \ > - ((unsigned long *)&optprobe_template_call - (unsigned long *)&optprobe_template_entry) > + ((unsigned long *)optprobe_template_call - (unsigned long *)optprobe_template_entry) > #define TMPL_END_IDX \ > - ((unsigned long *)&optprobe_template_end - (unsigned long *)&optprobe_template_entry) > + ((unsigned long *)optprobe_template_end - (unsigned long *)optprobe_template_entry) > #define TMPL_ADD_SP \ > - ((unsigned long *)&optprobe_template_add_sp - (unsigned long *)&optprobe_template_entry) > + ((unsigned long *)optprobe_template_add_sp - (unsigned long *)optprobe_template_entry) > #define TMPL_SUB_SP \ > - ((unsigned long *)&optprobe_template_sub_sp - (unsigned long *)&optprobe_template_entry) > + ((unsigned long *)optprobe_template_sub_sp - (unsigned long *)optprobe_template_entry) > #define TMPL_RESTORE_BEGIN \ > - ((unsigned long *)&optprobe_template_restore_begin - (unsigned long *)&optprobe_template_entry) > + ((unsigned long *)optprobe_template_restore_begin - (unsigned long *)optprobe_template_entry) > #define TMPL_RESTORE_ORIGN_INSN \ > - ((unsigned long *)&optprobe_template_restore_orig_insn - (unsigned long *)&optprobe_template_entry) > + ((unsigned long *)optprobe_template_restore_orig_insn - (unsigned long *)optprobe_template_entry) > #define TMPL_RESTORE_END \ > - ((unsigned long *)&optprobe_template_restore_end - (unsigned long *)&optprobe_template_entry) > + ((unsigned long *)optprobe_template_restore_end - (unsigned long *)optprobe_template_entry) > > /* > * ARM can always optimize an instruction when using ARM ISA, except > @@ -234,7 +234,7 @@ int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *or > } > > /* Copy arch-dep-instance from template. */ > - memcpy(code, (unsigned long *)&optprobe_template_entry, > + memcpy(code, (unsigned long *)optprobe_template_entry, > TMPL_END_IDX * sizeof(kprobe_opcode_t)); > > /* Adjust buffer according to instruction. */ > > -- > Kees Cook -- Masami Hiramatsu <mhiramat@...nel.org>
Powered by blists - more mailing lists