[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20190823224153.15223-10-namit@vmware.com>
Date: Fri, 23 Aug 2019 15:41:53 -0700
From: Nadav Amit <namit@...are.com>
To: Andy Lutomirski <luto@...nel.org>,
Dave Hansen <dave.hansen@...ux.intel.com>
Cc: x86@...nel.org, linux-kernel@...r.kernel.org,
Peter Zijlstra <peterz@...radead.org>,
Thomas Gleixner <tglx@...utronix.de>,
Ingo Molnar <mingo@...hat.com>, Nadav Amit <namit@...are.com>
Subject: [PATCH v4 9/9] x86/mm/tlb: Remove unnecessary uses of the inline keyword
The compiler is smart enough without these hints.
Cc: Andy Lutomirski <luto@...nel.org>
Cc: Peter Zijlstra <peterz@...radead.org>
Suggested-by: Dave Hansen <dave.hansen@...ux.intel.com>
Reviewed-by: Dave Hansen <dave.hansen@...ux.intel.com>
Signed-off-by: Nadav Amit <namit@...are.com>
---
arch/x86/mm/tlb.c | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/arch/x86/mm/tlb.c b/arch/x86/mm/tlb.c
index 3dca146edcf1..0addc6e84126 100644
--- a/arch/x86/mm/tlb.c
+++ b/arch/x86/mm/tlb.c
@@ -189,7 +189,7 @@ static void sync_current_stack_to_mm(struct mm_struct *mm)
}
}
-static inline unsigned long mm_mangle_tif_spec_ib(struct task_struct *next)
+static unsigned long mm_mangle_tif_spec_ib(struct task_struct *next)
{
unsigned long next_tif = task_thread_info(next)->flags;
unsigned long ibpb = (next_tif >> TIF_SPEC_IB) & LAST_USER_MM_IBPB;
@@ -741,7 +741,7 @@ static DEFINE_PER_CPU_SHARED_ALIGNED(struct flush_tlb_info, flush_tlb_info);
static DEFINE_PER_CPU(unsigned int, flush_tlb_info_idx);
#endif
-static inline struct flush_tlb_info *get_flush_tlb_info(struct mm_struct *mm,
+static struct flush_tlb_info *get_flush_tlb_info(struct mm_struct *mm,
unsigned long start, unsigned long end,
unsigned int stride_shift, bool freed_tables,
u64 new_tlb_gen)
@@ -768,7 +768,7 @@ static inline struct flush_tlb_info *get_flush_tlb_info(struct mm_struct *mm,
return info;
}
-static inline void put_flush_tlb_info(void)
+static void put_flush_tlb_info(void)
{
#ifdef CONFIG_DEBUG_VM
/* Complete reentrency prevention checks */
--
2.17.1
Powered by blists - more mailing lists