lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Date:   Fri, 3 Nov 2017 19:55:43 +0900
From:   Tetsuo Handa <penguin-kernel@...ove.SAKURA.ne.jp>
To:     wei.w.wang@...el.com, virtio-dev@...ts.oasis-open.org,
        linux-kernel@...r.kernel.org, qemu-devel@...gnu.org,
        virtualization@...ts.linux-foundation.org, kvm@...r.kernel.org,
        linux-mm@...ck.org, mst@...hat.com, mhocko@...nel.org,
        akpm@...ux-foundation.org, mawilcox@...rosoft.com
Cc:     david@...hat.com, cornelia.huck@...ibm.com,
        mgorman@...hsingularity.net, aarcange@...hat.com,
        amit.shah@...hat.com, pbonzini@...hat.com, willy@...radead.org,
        liliang.opensource@...il.com, yang.zhang.wz@...il.com,
        quan.xu@...yun.com
Subject: Re: [PATCH v17 1/6] lib/xbitmap: Introduce xbitmap

I'm commenting without understanding the logic.

Wei Wang wrote:
> +
> +bool xb_preload(gfp_t gfp);
> +

Want __must_check annotation, for __radix_tree_preload() is marked
with __must_check annotation. By error failing to check result of
xb_preload() will lead to preemption kept disabled unexpectedly.



> +int xb_set_bit(struct xb *xb, unsigned long bit)
> +{
> +	int err;
> +	unsigned long index = bit / IDA_BITMAP_BITS;
> +	struct radix_tree_root *root = &xb->xbrt;
> +	struct radix_tree_node *node;
> +	void **slot;
> +	struct ida_bitmap *bitmap;
> +	unsigned long ebit;
> +
> +	bit %= IDA_BITMAP_BITS;
> +	ebit = bit + 2;
> +
> +	err = __radix_tree_create(root, index, 0, &node, &slot);
> +	if (err)
> +		return err;
> +	bitmap = rcu_dereference_raw(*slot);
> +	if (radix_tree_exception(bitmap)) {
> +		unsigned long tmp = (unsigned long)bitmap;
> +
> +		if (ebit < BITS_PER_LONG) {
> +			tmp |= 1UL << ebit;
> +			rcu_assign_pointer(*slot, (void *)tmp);
> +			return 0;
> +		}
> +		bitmap = this_cpu_xchg(ida_bitmap, NULL);
> +		if (!bitmap)

Please write locking rules, in order to explain how memory
allocated by __radix_tree_create() will not leak.

> +			return -EAGAIN;
> +		memset(bitmap, 0, sizeof(*bitmap));
> +		bitmap->bitmap[0] = tmp >> RADIX_TREE_EXCEPTIONAL_SHIFT;
> +		rcu_assign_pointer(*slot, bitmap);
> +	}
> +
> +	if (!bitmap) {
> +		if (ebit < BITS_PER_LONG) {
> +			bitmap = (void *)((1UL << ebit) |
> +					RADIX_TREE_EXCEPTIONAL_ENTRY);
> +			__radix_tree_replace(root, node, slot, bitmap, NULL,
> +						NULL);
> +			return 0;
> +		}
> +		bitmap = this_cpu_xchg(ida_bitmap, NULL);
> +		if (!bitmap)

Same here.

> +			return -EAGAIN;
> +		memset(bitmap, 0, sizeof(*bitmap));
> +		__radix_tree_replace(root, node, slot, bitmap, NULL, NULL);
> +	}
> +
> +	__set_bit(bit, bitmap->bitmap);
> +	return 0;
> +}



> +void xb_clear_bit(struct xb *xb, unsigned long bit)
> +{
> +	unsigned long index = bit / IDA_BITMAP_BITS;
> +	struct radix_tree_root *root = &xb->xbrt;
> +	struct radix_tree_node *node;
> +	void **slot;
> +	struct ida_bitmap *bitmap;
> +	unsigned long ebit;
> +
> +	bit %= IDA_BITMAP_BITS;
> +	ebit = bit + 2;
> +
> +	bitmap = __radix_tree_lookup(root, index, &node, &slot);
> +	if (radix_tree_exception(bitmap)) {
> +		unsigned long tmp = (unsigned long)bitmap;
> +
> +		if (ebit >= BITS_PER_LONG)
> +			return;
> +		tmp &= ~(1UL << ebit);
> +		if (tmp == RADIX_TREE_EXCEPTIONAL_ENTRY)
> +			__radix_tree_delete(root, node, slot);
> +		else
> +			rcu_assign_pointer(*slot, (void *)tmp);
> +		return;
> +	}
> +
> +	if (!bitmap)
> +		return;
> +
> +	__clear_bit(bit, bitmap->bitmap);
> +	if (bitmap_empty(bitmap->bitmap, IDA_BITMAP_BITS)) {

Please write locking rules, in order to explain how double kfree() and/or
use-after-free can be avoided.

> +		kfree(bitmap);
> +		__radix_tree_delete(root, node, slot);
> +	}
> +}



> +void xb_clear_bit_range(struct xb *xb, unsigned long start, unsigned long end)
> +{
> +	struct radix_tree_root *root = &xb->xbrt;
> +	struct radix_tree_node *node;
> +	void **slot;
> +	struct ida_bitmap *bitmap;
> +	unsigned int nbits;
> +
> +	for (; start < end; start = (start | (IDA_BITMAP_BITS - 1)) + 1) {
> +		unsigned long index = start / IDA_BITMAP_BITS;
> +		unsigned long bit = start % IDA_BITMAP_BITS;
> +
> +		bitmap = __radix_tree_lookup(root, index, &node, &slot);
> +		if (radix_tree_exception(bitmap)) {
> +			unsigned long ebit = bit + 2;
> +			unsigned long tmp = (unsigned long)bitmap;
> +
> +			nbits = min(end - start + 1, BITS_PER_LONG - ebit);
> +
> +			if (ebit >= BITS_PER_LONG)
> +				continue;
> +			bitmap_clear(&tmp, ebit, nbits);
> +			if (tmp == RADIX_TREE_EXCEPTIONAL_ENTRY)
> +				__radix_tree_delete(root, node, slot);
> +			else
> +				rcu_assign_pointer(*slot, (void *)tmp);
> +		} else if (bitmap) {
> +			nbits = min(end - start + 1, IDA_BITMAP_BITS - bit);
> +
> +			if (nbits != IDA_BITMAP_BITS)
> +				bitmap_clear(bitmap->bitmap, bit, nbits);
> +
> +			if (nbits == IDA_BITMAP_BITS ||
> +				bitmap_empty(bitmap->bitmap, IDA_BITMAP_BITS)) {

Same here.

> +				kfree(bitmap);
> +				__radix_tree_delete(root, node, slot);
> +			}
> +		}
> +	}
> +}



> +bool xb_test_bit(struct xb *xb, unsigned long bit)
> +{
> +	unsigned long index = bit / IDA_BITMAP_BITS;
> +	const struct radix_tree_root *root = &xb->xbrt;
> +	struct ida_bitmap *bitmap = radix_tree_lookup(root, index);
> +
> +	bit %= IDA_BITMAP_BITS;
> +
> +	if (!bitmap)
> +		return false;
> +	if (radix_tree_exception(bitmap)) {
> +		bit += RADIX_TREE_EXCEPTIONAL_SHIFT;
> +		if (bit > BITS_PER_LONG)

Why not bit >= BITS_PER_LONG here?

> +			return false;
> +		return (unsigned long)bitmap & (1UL << bit);
> +	}
> +
> +	return test_bit(bit, bitmap->bitmap);
> +}

Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ