diff --git a/croaring/src/bitmap64/imp.rs b/croaring/src/bitmap64/imp.rs index d9b139d..aba3fdf 100644 --- a/croaring/src/bitmap64/imp.rs +++ b/croaring/src/bitmap64/imp.rs @@ -696,6 +696,7 @@ impl Bitmap64 { /// assert_eq!(first_over_50, ControlFlow::Break(100)); /// ``` // TODO: If we can do external iteration, this function is unnecessary + // TODO: Panic safety?! #[inline] pub fn for_each(&self, f: F) -> ControlFlow where diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index 8532d7a..56e9e7e 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -27,6 +27,12 @@ path = "fuzz_targets/fuzz_ops.rs" test = false doc = false +[[bin]] +name = "fuzz_ops64" +path = "fuzz_targets/fuzz_ops64.rs" +test = false +doc = false + [[bin]] name = "against_bitvec" path = "fuzz_targets/against_bitvec.rs" diff --git a/fuzz/fuzz_targets/arbitrary_ops64/mod.rs b/fuzz/fuzz_targets/arbitrary_ops64/mod.rs new file mode 100644 index 0000000..0809a93 --- /dev/null +++ b/fuzz/fuzz_targets/arbitrary_ops64/mod.rs @@ -0,0 +1,292 @@ +use croaring::{Bitmap64, Treemap}; +use libfuzzer_sys::arbitrary::{self, Arbitrary, Unstructured}; +use std::mem; +use std::ops::RangeInclusive; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[repr(transparent)] +pub struct Num(pub u64); + +pub const MAX_NUM: u64 = 0x1_0000 * 260; + +impl<'a> Arbitrary<'a> for Num { + fn arbitrary(u: &mut Unstructured<'a>) -> arbitrary::Result { + Ok(Self(u.int_in_range(0..=(MAX_NUM - 1))?)) + } +} + +#[derive(Arbitrary, Debug)] +pub enum MutableBitmapOperation { + Add(Num), + AddChecked(Num), + AddMany(Vec), + AddRange(RangeInclusive), + RemoveRange(RangeInclusive), + Copy, + Clear, + Remove(Num), + RemoveChecked(Num), + RunOptimize, + RemoveRunCompression, + // Add to the max key (or with 0xFFFFFFFF_FFFF0000) + AddToMax(u16), +} + +#[derive(Arbitrary, Debug)] +pub enum ReadBitmapOp { + ContainsRange(RangeInclusive), + Contains(Num), + RangeCardinality(RangeInclusive), + Cardinality, + Flip(RangeInclusive), + ToVec, + GetPortableSerializedSizeInBytes, + GetNativeSerializedSizeInBytes, + GetFrozenSerializedSizeInBytes, + IsEmpty, + AddOffset(i64), + IntersectWithRange(RangeInclusive), + Minimum, + Maximum, + Rank(Num), + Index(Num), + Select(Num), + Statistics, + Clone, + Debug, + WithIter(Vec), +} + +#[derive(Arbitrary, Debug)] +pub enum BitmapCompOperation { + Eq, + IsSubset, + IsStrictSubset, + Intersect, + JacardIndex, + And, + Or, + Xor, + AndNot, +} + +#[derive(Arbitrary, Debug)] +pub enum IterOperation { + ResetAtOrAfter(u64), + ReadNext, + NextMany(u16), +} + +impl MutableBitmapOperation { + pub fn on_treemap(&self, b: &mut Treemap) { + match *self { + MutableBitmapOperation::Add(Num(i)) => { + b.add(i); + } + MutableBitmapOperation::AddChecked(Num(i)) => { + let expected = !b.contains(i); + let result = b.add_checked(i); + assert_eq!(expected, result); + } + MutableBitmapOperation::AddMany(ref items) => { + for &Num(item) in items { + b.add(item) + } + } + MutableBitmapOperation::AddRange(ref r) => { + b.add_range(r.start().0..=r.end().0); + } + MutableBitmapOperation::RemoveRange(ref r) => { + b.remove_range(r.start().0..=r.end().0); + } + MutableBitmapOperation::Clear => { + b.clear(); + } + MutableBitmapOperation::Remove(Num(i)) => { + b.remove(i); + } + MutableBitmapOperation::RemoveChecked(Num(i)) => { + let expected = b.contains(i); + let result = b.remove_checked(i); + assert_eq!(expected, result); + } + MutableBitmapOperation::RunOptimize => { + b.run_optimize(); + } + MutableBitmapOperation::RemoveRunCompression => { + b.remove_run_compression(); + } + MutableBitmapOperation::Copy => { + *b = b.clone(); + } + MutableBitmapOperation::AddToMax(low_bits) => { + const UPPER_BITS: u64 = 0xFFFF_FFFF_FFFF_0000; + b.add(UPPER_BITS | u64::from(low_bits)); + } + } + } + + pub fn on_bitmap64(&self, b: &mut Bitmap64) { + match *self { + MutableBitmapOperation::Add(Num(i)) => { + b.add(i); + } + MutableBitmapOperation::AddChecked(Num(i)) => { + let expected = !b.contains(i); + let result = b.add_checked(i); + assert_eq!(expected, result); + } + MutableBitmapOperation::AddMany(ref items) => { + let items: &[u64] = unsafe { mem::transmute(&items[..]) }; + b.add_many(items); + } + MutableBitmapOperation::AddRange(ref range) => { + b.add_range(range.start().0..=range.end().0); + } + MutableBitmapOperation::RemoveRange(ref range) => { + b.remove_range(range.start().0..=range.end().0); + } + MutableBitmapOperation::Copy => { + *b = b.clone(); + } + MutableBitmapOperation::Clear => { + if !b.is_empty() { + b.remove_range(b.minimum().unwrap()..=b.maximum().unwrap()) + } + }, + MutableBitmapOperation::Remove(Num(i)) => { + b.remove(i); + } + MutableBitmapOperation::RemoveChecked(Num(i)) => { + let expected = b.contains(i); + let result = b.remove_checked(i); + assert_eq!(expected, result); + } + MutableBitmapOperation::RunOptimize => { + b.run_optimize(); + } + MutableBitmapOperation::RemoveRunCompression => { + // Unsupported + } + MutableBitmapOperation::AddToMax(low_bits) => { + const UPPER_BITS: u64 = 0xFFFF_FFFF_FFFF_0000; + b.add(UPPER_BITS | u64::from(low_bits)); + } + } + } +} + +impl BitmapCompOperation { + pub fn on_treemap(&self, lhs: &mut Treemap, rhs: &Treemap) { + match *self { + BitmapCompOperation::Eq => { + _ = lhs == rhs; + assert_eq!(lhs, lhs); + } + BitmapCompOperation::IsSubset => { + _ = lhs.is_subset(rhs); + assert!(lhs.is_subset(lhs)); + } + BitmapCompOperation::IsStrictSubset => { + lhs.is_strict_subset(rhs); + assert!(!lhs.is_strict_subset(lhs)); + } + BitmapCompOperation::Intersect => { + // Unimplemented + } + BitmapCompOperation::JacardIndex => { + // Unimplemented + } + BitmapCompOperation::And => { + assert_eq!(lhs.and(lhs), *lhs); + + let res = lhs.and(rhs); + lhs.and_inplace(rhs); + assert_eq!(*lhs, res); + } + BitmapCompOperation::Or => { + assert_eq!(lhs.or(lhs), *lhs); + + let res = lhs.or(rhs); + + lhs.or_inplace(rhs); + assert_eq!(*lhs, res); + } + BitmapCompOperation::Xor => { + assert!(lhs.xor(lhs).is_empty()); + + let res = lhs.xor(rhs); + + lhs.xor_inplace(rhs); + assert_eq!(*lhs, res); + } + BitmapCompOperation::AndNot => { + assert!(lhs.andnot(lhs).is_empty()); + + let res = lhs.andnot(rhs); + + lhs.andnot_inplace(rhs); + assert_eq!(*lhs, res); + } + } + } + pub fn on_roaring64(&self, lhs: &mut Bitmap64, rhs: &Bitmap64) { + match *self { + BitmapCompOperation::Eq => { + _ = lhs == rhs; + assert_eq!(lhs, lhs); + } + BitmapCompOperation::IsSubset => { + _ = lhs.is_subset(rhs); + assert!(lhs.is_subset(lhs)); + } + BitmapCompOperation::IsStrictSubset => { + lhs.is_strict_subset(rhs); + assert!(!lhs.is_strict_subset(lhs)); + } + BitmapCompOperation::Intersect => { + _ = lhs.intersect(rhs); + assert!(lhs.is_empty() || lhs.intersect(lhs)); + } + BitmapCompOperation::JacardIndex => { + _ = lhs.jaccard_index(rhs); + _ = lhs.jaccard_index(lhs); + } + BitmapCompOperation::And => { + assert_eq!(lhs.and(lhs), *lhs); + + let res = lhs.and(rhs); + assert_eq!(res.cardinality(), lhs.and_cardinality(rhs)); + lhs.and_inplace(rhs); + assert_eq!(*lhs, res); + } + BitmapCompOperation::Or => { + assert_eq!(lhs.or(lhs), *lhs); + + let res = lhs.or(rhs); + assert_eq!(res.cardinality(), lhs.or_cardinality(rhs)); + + lhs.or_inplace(rhs); + assert_eq!(*lhs, res); + } + BitmapCompOperation::Xor => { + assert!(lhs.xor(lhs).is_empty()); + + let res = lhs.xor(rhs); + assert_eq!(res.cardinality(), lhs.xor_cardinality(rhs)); + + lhs.xor_inplace(rhs); + assert_eq!(*lhs, res); + } + BitmapCompOperation::AndNot => { + assert!(lhs.andnot(lhs).is_empty()); + + let res = lhs.andnot(rhs); + assert_eq!(res.cardinality(), lhs.andnot_cardinality(rhs)); + + lhs.andnot_inplace(rhs); + assert_eq!(*lhs, res); + } + } + } +} diff --git a/fuzz/fuzz_targets/fuzz_ops64.rs b/fuzz/fuzz_targets/fuzz_ops64.rs new file mode 100644 index 0000000..2007afe --- /dev/null +++ b/fuzz/fuzz_targets/fuzz_ops64.rs @@ -0,0 +1,52 @@ +#![no_main] + +use crate::arbitrary_ops64::*; +use croaring::{Bitmap64, Treemap}; +use libfuzzer_sys::arbitrary; +use libfuzzer_sys::arbitrary::Arbitrary; +use libfuzzer_sys::fuzz_target; +use std::ops::ControlFlow; + +mod arbitrary_ops64; + +fuzz_target!(|input: FuzzInput| { + let mut lhs64 = Bitmap64::new(); + let mut rhs64 = Bitmap64::new(); + let mut lhs_tree = Treemap::new(); + let mut rhs_tree = Treemap::new(); + + for op in &input.lhs_ops { + op.on_bitmap64(&mut lhs64); + op.on_treemap(&mut lhs_tree); + } + for op in &input.rhs_ops { + op.on_bitmap64(&mut rhs64); + op.on_treemap(&mut rhs_tree); + } + + for op in &input.comp_ops { + op.on_roaring64(&mut lhs64, &rhs64); + op.on_treemap(&mut lhs_tree, &rhs_tree); + } + + assert_64_eq(&lhs64, &lhs_tree); + assert_64_eq(&rhs64, &rhs_tree); +}); + +#[derive(Arbitrary, Debug)] +struct FuzzInput { + lhs_ops: Vec, + rhs_ops: Vec, + comp_ops: Vec, + // view_ops: Vec, +} + +fn assert_64_eq(lhs: &Bitmap64, rhs: &Treemap) { + assert_eq!(lhs.cardinality(), rhs.cardinality()); + let mut rhs_iter = rhs.iter(); + let res = lhs.for_each(|v| -> ControlFlow<()> { + assert_eq!(rhs_iter.next(), Some(v)); + ControlFlow::Continue(()) + }); + assert_eq!(res, ControlFlow::Continue(())); +}