[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20240810-nolibc-lto-v1-3-a86e514c7fc1@weissschuh.net>
Date: Sat, 10 Aug 2024 12:54:46 +0200
From: Thomas Weißschuh <linux@...ssschuh.net>
To: Willy Tarreau <w@....eu>
Cc: Ammar Faizi <ammarfaizi2@...weeb.org>, linux-kernel@...r.kernel.org,
Thomas Weißschuh <linux@...ssschuh.net>
Subject: [PATCH 3/3] tools/nolibc: x86_64: wrap asm functions in functions
The bare asm usage does not work well together with LTO.
It leads to errors:
/tmp/ccIHTYT6.s: Assembler messages:
/tmp/ccIHTYT6.s:36: Error: symbol `memmove' is already defined
/tmp/ccIHTYT6.s:37: Error: symbol `memcpy' is already defined
/tmp/ccIHTYT6.s:46: Error: symbol `.Lbackward_copy' is already defined
/tmp/ccIHTYT6.s:54: Error: symbol `memset' is already defined
Wrap the asm in naked functions, which leads to the same object code but
avoids the errors.
Signed-off-by: Thomas Weißschuh <linux@...ssschuh.net>
---
tools/include/nolibc/arch-x86_64.h | 80 +++++++++++++++++++++-----------------
1 file changed, 44 insertions(+), 36 deletions(-)
diff --git a/tools/include/nolibc/arch-x86_64.h b/tools/include/nolibc/arch-x86_64.h
index 3c3b703d9b0c..efbea173fb74 100644
--- a/tools/include/nolibc/arch-x86_64.h
+++ b/tools/include/nolibc/arch-x86_64.h
@@ -174,45 +174,53 @@ void __attribute__((weak, noreturn)) __nolibc_naked __no_stack_protector _start(
}
#define NOLIBC_ARCH_HAS_MEMMOVE
-void *memmove(void *dst, const void *src, size_t len);
+__attribute__((weak,unused,section(".text.nolibc_memmove")))
+__nolibc_naked __no_stack_protector
+void *memmove(void *dst __attribute__((unused)),
+ const void *src __attribute__((unused)),
+ size_t len __attribute__((unused)))
+{
+ __asm__ volatile (
+ "movq %rdx, %rcx\n\t"
+ "movq %rdi, %rax\n\t"
+ "movq %rdi, %rdx\n\t"
+ "subq %rsi, %rdx\n\t"
+ "cmpq %rcx, %rdx\n\t"
+ "jb .Lbackward_copy\n\t"
+ "rep movsb\n\t"
+ "retq\n"
+ ".Lbackward_copy:"
+ "leaq -1(%rdi, %rcx, 1), %rdi\n\t"
+ "leaq -1(%rsi, %rcx, 1), %rsi\n\t"
+ "std\n\t"
+ "rep movsb\n\t"
+ "cld\n\t"
+ "retq\n"
+ );
+ __nolibc_naked_epilogue();
+}
#define NOLIBC_ARCH_HAS_MEMCPY
-void *memcpy(void *dst, const void *src, size_t len);
+static __inline__ void *memcpy(void *dst, const void *src, size_t len)
+{
+ return memmove(dst, src, len);
+}
#define NOLIBC_ARCH_HAS_MEMSET
-void *memset(void *dst, int c, size_t len);
-
-__asm__ (
-".section .text.nolibc_memmove_memcpy\n"
-".weak memmove\n"
-".weak memcpy\n"
-"memmove:\n"
-"memcpy:\n"
- "movq %rdx, %rcx\n\t"
- "movq %rdi, %rax\n\t"
- "movq %rdi, %rdx\n\t"
- "subq %rsi, %rdx\n\t"
- "cmpq %rcx, %rdx\n\t"
- "jb .Lbackward_copy\n\t"
- "rep movsb\n\t"
- "retq\n"
-".Lbackward_copy:"
- "leaq -1(%rdi, %rcx, 1), %rdi\n\t"
- "leaq -1(%rsi, %rcx, 1), %rsi\n\t"
- "std\n\t"
- "rep movsb\n\t"
- "cld\n\t"
- "retq\n"
-
-".section .text.nolibc_memset\n"
-".weak memset\n"
-"memset:\n"
- "xchgl %eax, %esi\n\t"
- "movq %rdx, %rcx\n\t"
- "pushq %rdi\n\t"
- "rep stosb\n\t"
- "popq %rax\n\t"
- "retq\n"
-);
+__attribute__((weak,unused,section(".text.nolibc_memset")))
+__nolibc_naked __no_stack_protector
+void *memset(void *dst __attribute__((unused)), int c __attribute__((unused)),
+ size_t len __attribute__((unused)))
+{
+ __asm__ volatile (
+ "xchgl %eax, %esi\n\t"
+ "movq %rdx, %rcx\n\t"
+ "pushq %rdi\n\t"
+ "rep stosb\n\t"
+ "popq %rax\n\t"
+ "retq\n"
+ );
+ __nolibc_naked_epilogue();
+}
#endif /* _NOLIBC_ARCH_X86_64_H */
--
2.46.0
Powered by blists - more mailing lists