lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <99f5bcb9b5a3604264d43e7305950dcf7f6cfcc7.camel@perches.com>
Date:   Tue, 17 Jul 2018 10:08:45 -0700
From:   Joe Perches <joe@...ches.com>
To:     David Lebrun <dav.lebrun@...il.com>,
        Colin King <colin.king@...onical.com>,
        "David S . Miller" <davem@...emloft.net>,
        Alexey Kuznetsov <kuznet@....inr.ac.ru>,
        Hideaki YOSHIFUJI <yoshfuji@...ux-ipv6.org>,
        netdev@...r.kernel.org
Cc:     kernel-janitors@...r.kernel.org, linux-kernel@...r.kernel.org
Subject: Re: [PATCH] ipv6: sr: fix useless rol32 call on hash

On Tue, 2018-07-17 at 17:03 +0100, David Lebrun wrote:
> On 07/17/2018 04:52 PM, Colin King wrote:
> > From: Colin Ian King<colin.king@...onical.com>
> > 
> > The rol32 call is currently rotating hash but the rol'd value is
> > being discarded. I believe the current code is incorrect and hash
> > should be assigned the rotated value returned from rol32.
> > 
> > Detected by CoverityScan, CID#1468411 ("Useless call")
> > 
> > Fixes: b5facfdba14c ("ipv6: sr: Compute flowlabel for outer IPv6 header of seg6 encap mode")
> > Signed-off-by: Colin Ian King<colin.king@...onical.com>
> 
> Acked-by: dlebrun@...gle.com
> 
> Good catch, thanks !
> 
> In that case, the same issue is present in 
> include/net/ipv6.h:ip6_make_flowlabel().

Perhaps all of the ror and rol definitions should add
__must_check

Something like the below and perhaps many more of the
functions that return some value should have __must_chedk
added as well.
---
 include/linux/bitops.h | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)

diff --git a/include/linux/bitops.h b/include/linux/bitops.h
index af419012d77d..3cddde65c8bb 100644
--- a/include/linux/bitops.h
+++ b/include/linux/bitops.h
@@ -67,7 +67,7 @@ static inline __u64 rol64(__u64 word, unsigned int shift)
  * @word: value to rotate
  * @shift: bits to roll
  */
-static inline __u64 ror64(__u64 word, unsigned int shift)
+static inline __must_check __u64 ror64(__u64 word, unsigned int shift)
 {
 	return (word >> shift) | (word << (64 - shift));
 }
@@ -77,7 +77,7 @@ static inline __u64 ror64(__u64 word, unsigned int shift)
  * @word: value to rotate
  * @shift: bits to roll
  */
-static inline __u32 rol32(__u32 word, unsigned int shift)
+static inline __must_check __u32 rol32(__u32 word, unsigned int shift)
 {
 	return (word << shift) | (word >> ((-shift) & 31));
 }
@@ -87,7 +87,7 @@ static inline __u32 rol32(__u32 word, unsigned int shift)
  * @word: value to rotate
  * @shift: bits to roll
  */
-static inline __u32 ror32(__u32 word, unsigned int shift)
+static inline __must_check __u32 ror32(__u32 word, unsigned int shift)
 {
 	return (word >> shift) | (word << (32 - shift));
 }
@@ -97,7 +97,7 @@ static inline __u32 ror32(__u32 word, unsigned int shift)
  * @word: value to rotate
  * @shift: bits to roll
  */
-static inline __u16 rol16(__u16 word, unsigned int shift)
+static inline __must_check __u16 rol16(__u16 word, unsigned int shift)
 {
 	return (word << shift) | (word >> (16 - shift));
 }
@@ -107,7 +107,7 @@ static inline __u16 rol16(__u16 word, unsigned int shift)
  * @word: value to rotate
  * @shift: bits to roll
  */
-static inline __u16 ror16(__u16 word, unsigned int shift)
+static inline __must_check __u16 ror16(__u16 word, unsigned int shift)
 {
 	return (word >> shift) | (word << (16 - shift));
 }
@@ -117,7 +117,7 @@ static inline __u16 ror16(__u16 word, unsigned int shift)
  * @word: value to rotate
  * @shift: bits to roll
  */
-static inline __u8 rol8(__u8 word, unsigned int shift)
+static inline __must_check __u8 rol8(__u8 word, unsigned int shift)
 {
 	return (word << shift) | (word >> (8 - shift));
 }
@@ -127,7 +127,7 @@ static inline __u8 rol8(__u8 word, unsigned int shift)
  * @word: value to rotate
  * @shift: bits to roll
  */
-static inline __u8 ror8(__u8 word, unsigned int shift)
+static inline __must_check __u8 ror8(__u8 word, unsigned int shift)
 {
 	return (word >> shift) | (word << (8 - shift));
 }
@@ -139,7 +139,7 @@ static inline __u8 ror8(__u8 word, unsigned int shift)
  *
  * This is safe to use for 16- and 8-bit types as well.
  */
-static inline __s32 sign_extend32(__u32 value, int index)
+static inline __must_check __s32 sign_extend32(__u32 value, int index)
 {
 	__u8 shift = 31 - index;
 	return (__s32)(value << shift) >> shift;
@@ -150,7 +150,7 @@ static inline __s32 sign_extend32(__u32 value, int index)
  * @value: value to sign extend
  * @index: 0 based bit index (0<=index<64) to sign bit
  */
-static inline __s64 sign_extend64(__u64 value, int index)
+static inline __must_check __s64 sign_extend64(__u64 value, int index)
 {
 	__u8 shift = 63 - index;
 	return (__s64)(value << shift) >> shift;


Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ