use crate::{
	devel as dvl,
	mutability::Mutability,
	order::BitOrder,
	ptr::BitRef,
	slice::{
		BitSlice,
		Iter,
	},
	store::BitStore,
	vec::BitVec,
};
use alloc::vec::Vec;
use core::{
	fmt::{
		self,
		Debug,
		Formatter,
	},
	iter::{
		FromIterator,
		FusedIterator,
	},
	mem::{
		self,
		ManuallyDrop,
	},
	ops::{
		Range,
		RangeBounds,
	},
	ptr::NonNull,
};
use tap::{
	pipe::Pipe,
	tap::{
		Tap,
		TapOptional,
	},
};
impl<O, T> Extend<bool> for BitVec<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn extend<I>(&mut self, iter: I)
	where I: IntoIterator<Item = bool> {
		let mut iter = iter.into_iter();
		match iter.size_hint() {
			(n, None) | (_, Some(n)) => {
				
				self.reserve(n);
				let len = self.len();
				let new_len = len + n;
				let new = unsafe { self.get_unchecked_mut(len .. new_len) };
				let mut pulled = 0;
				for (slot, bit) in
					unsafe { new.iter_mut().remove_alias() }.zip(iter.by_ref())
				{
					slot.set(bit);
					pulled += 1;
				}
				unsafe {
					self.set_len(len + pulled);
				}
			},
		}
		iter.for_each(|bit| self.push(bit));
	}
}
impl<'a, O, T> Extend<&'a bool> for BitVec<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn extend<I>(&mut self, iter: I)
	where I: IntoIterator<Item = &'a bool> {
		self.extend(iter.into_iter().copied());
	}
}
impl<'a, M, O1, O2, T1, T2> Extend<BitRef<'a, M, O2, T2>> for BitVec<O1, T1>
where
	M: Mutability,
	O1: BitOrder,
	O2: BitOrder,
	T1: BitStore,
	T2: BitStore,
{
	fn extend<I>(&mut self, iter: I)
	where I: IntoIterator<Item = BitRef<'a, M, O2, T2>> {
		self.extend(iter.into_iter().map(|bit| *bit));
	}
}
impl<O, T> Extend<T> for BitVec<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn extend<I>(&mut self, iter: I)
	where I: IntoIterator<Item = T> {
		for elem in iter.into_iter() {
			self.extend(BitSlice::<O, T>::from_element(&elem));
		}
	}
}
impl<'a, O, T> Extend<&'a T> for BitVec<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn extend<I>(&mut self, iter: I)
	where I: IntoIterator<Item = &'a T> {
		for elem in iter.into_iter() {
			self.extend(BitSlice::<O, T>::from_element(elem));
		}
	}
}
impl<O, T> FromIterator<bool> for BitVec<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn from_iter<I>(iter: I) -> Self
	where I: IntoIterator<Item = bool> {
		Self::new().tap_mut(|bv| bv.extend(iter.into_iter()))
	}
}
impl<'a, M, O1, O2, T1, T2> FromIterator<BitRef<'a, M, O2, T2>>
	for BitVec<O1, T1>
where
	M: Mutability,
	O1: BitOrder,
	O2: BitOrder,
	T1: BitStore,
	T2: BitStore,
{
	fn from_iter<I>(iter: I) -> Self
	where I: IntoIterator<Item = BitRef<'a, M, O2, T2>> {
		Self::new().tap_mut(|bv| bv.extend(iter.into_iter()))
	}
}
impl<'a, O, T> FromIterator<&'a bool> for BitVec<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn from_iter<I>(iter: I) -> Self
	where I: IntoIterator<Item = &'a bool> {
		iter.into_iter().copied().pipe(Self::from_iter)
	}
}
impl<O, T> FromIterator<T> for BitVec<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn from_iter<I>(iter: I) -> Self
	where I: IntoIterator<Item = T> {
		iter.into_iter().collect::<Vec<_>>().pipe(Self::from_vec)
	}
}
impl<'a, O, T> FromIterator<&'a T> for BitVec<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn from_iter<I>(iter: I) -> Self
	where I: IntoIterator<Item = &'a T> {
		let mut vec = iter
			.into_iter()
			.map(BitStore::load_value)
			.collect::<Vec<T::Mem>>()
			.pipe(ManuallyDrop::new);
		let (ptr, len, capa) = (vec.as_mut_ptr(), vec.len(), vec.capacity());
		unsafe { Vec::from_raw_parts(ptr as *mut T, len, capa) }
			.pipe(Self::from_vec)
	}
}
impl<O, T> IntoIterator for BitVec<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	type IntoIter = IntoIter<O, T>;
	type Item = bool;
	fn into_iter(self) -> Self::IntoIter {
		IntoIter::new(self)
	}
}
impl<'a, O, T> IntoIterator for &'a BitVec<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	type IntoIter = <&'a BitSlice<O, T> as IntoIterator>::IntoIter;
	type Item = <&'a BitSlice<O, T> as IntoIterator>::Item;
	fn into_iter(self) -> Self::IntoIter {
		self.as_bitslice().into_iter()
	}
}
impl<'a, O, T> IntoIterator for &'a mut BitVec<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	type IntoIter = <&'a mut BitSlice<O, T> as IntoIterator>::IntoIter;
	type Item = <&'a mut BitSlice<O, T> as IntoIterator>::Item;
	fn into_iter(self) -> Self::IntoIter {
		self.as_mut_bitslice().into_iter()
	}
}
pub struct IntoIter<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	
	base: NonNull<T>,
	
	capa: usize,
	
	
	
	iter: Iter<'static, O, T>,
}
impl<O, T> IntoIter<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	
	
	
	fn new(bv: BitVec<O, T>) -> Self {
		let capa = bv.capacity;
		
		
		let iter = bv.bitspan.to_bitslice_ref().iter();
		
		let base = bv.bitspan.address().to_nonnull();
		mem::forget(bv);
		Self { base, capa, iter }
	}
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	pub fn as_bitslice(&self) -> &BitSlice<O, T> {
		self.iter.as_bitslice()
	}
	#[doc(hidden)]
	#[inline(always)]
	#[cfg(not(tarpalin_include))]
	#[deprecated = "Use `as_bitslice` to view the underlying slice"]
	pub fn as_slice(&self) -> &BitSlice<O, T> {
		self.as_bitslice()
	}
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	pub fn as_mut_bitslice(&mut self) -> &mut BitSlice<O, T> {
		let span = self.iter.as_bitslice().as_bitspan();
		let span_mut = unsafe { span.assert_mut() };
		span_mut.to_bitslice_mut()
	}
	#[doc(hidden)]
	#[inline(always)]
	#[cfg(not(tarpaulin_include))]
	#[deprecated = "Use `as_mut_bitslice` to view the underlying slice"]
	pub fn as_mut_slice(&mut self) -> &mut BitSlice<O, T> {
		self.as_mut_bitslice()
	}
}
#[cfg(not(tarpaulin_include))]
impl<O, T> Debug for IntoIter<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
		fmt.debug_tuple("IntoIter")
			.field(&self.as_bitslice())
			.finish()
	}
}
impl<O, T> Iterator for IntoIter<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	type Item = bool;
	fn next(&mut self) -> Option<Self::Item> {
		self.iter.next().as_deref().copied()
	}
	fn size_hint(&self) -> (usize, Option<usize>) {
		self.iter.size_hint()
	}
	fn count(self) -> usize {
		self.len()
	}
	fn nth(&mut self, n: usize) -> Option<Self::Item> {
		self.iter.nth(n).as_deref().copied()
	}
	fn last(mut self) -> Option<Self::Item> {
		self.next_back()
	}
}
impl<O, T> DoubleEndedIterator for IntoIter<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn next_back(&mut self) -> Option<Self::Item> {
		self.iter.next_back().as_deref().copied()
	}
	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
		self.iter.nth_back(n).as_deref().copied()
	}
}
impl<O, T> ExactSizeIterator for IntoIter<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn len(&self) -> usize {
		self.iter.len()
	}
}
impl<O, T> FusedIterator for IntoIter<O, T>
where
	O: BitOrder,
	T: BitStore,
{
}
impl<O, T> Drop for IntoIter<O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn drop(&mut self) {
		
		drop(unsafe { Vec::from_raw_parts(self.base.as_ptr(), 0, self.capa) });
	}
}
pub struct Drain<'a, O, T>
where
	O: BitOrder,
	T: BitStore,
{
	
	source: NonNull<BitVec<O, T>>,
	
	drain: Iter<'a, O, T>,
	
	
	tail: Range<usize>,
}
impl<'a, O, T> Drain<'a, O, T>
where
	O: BitOrder,
	T: BitStore,
{
	pub(super) fn new<R>(source: &'a mut BitVec<O, T>, range: R) -> Self
	where R: RangeBounds<usize> {
		
		let len = source.len();
		
		let drain = dvl::normalize_range(range, len);
		dvl::assert_range(drain.clone(), len);
		
		let tail = drain.end .. len;
		
		let drain = unsafe {
			
			source.set_len(drain.start);
			
			source
				.as_bitslice()
				.get_unchecked(drain)
				
				.as_bitspan()
				.to_bitslice_ref()
				.iter()
		};
		let source = source.into();
		Self {
			source,
			drain,
			tail,
		}
	}
	
	
	
	
	
	
	
	
	
	
	
	
	pub fn as_bitslice(&self) -> &'a BitSlice<O, T> {
		self.drain.as_bitslice()
	}
	#[doc(hidden)]
	#[inline(always)]
	#[cfg(not(tarpaulin_include))]
	#[deprecated = "Use `as_bitslice` to view the underlying slice"]
	pub fn as_slice(&self) -> &BitSlice<O, T> {
		self.as_bitslice()
	}
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	fn fill<I>(&mut self, iter: &mut I) -> FillStatus
	where I: Iterator<Item = bool> {
		let bitvec = unsafe { self.source.as_mut() };
		
		
		let mut len = bitvec.len();
		
		let span = unsafe { bitvec.get_unchecked_mut(len .. self.tail.start) };
		
		let mut out = FillStatus::FullSpan;
		
		for slot in span {
			
			
			if let Some(bit) = iter.next() {
				slot.set(bit);
				len += 1;
			}
			
			
			else {
				out = FillStatus::EmptyInput;
				break;
			}
		}
		
		
		unsafe {
			bitvec.set_len(len);
		}
		out
	}
	
	
	
	
	
	
	
	
	
	
	
	
	
	
	unsafe fn move_tail(&mut self, additional: usize) {
		if additional == 0 {
			return;
		}
		let bitvec = self.source.as_mut();
		let tail_len = self.tail.end - self.tail.start;
		
		
		
		let full_len = additional + tail_len;
		bitvec.reserve(full_len);
		let new_tail_start = additional + self.tail.start;
		let orig_tail = mem::replace(
			&mut self.tail,
			new_tail_start .. new_tail_start + tail_len,
		);
		
		
		
		let len = bitvec.len();
		bitvec.set_len(full_len);
		bitvec.copy_within_unchecked(orig_tail, new_tail_start);
		bitvec.set_len(len);
	}
}
impl<O, T> AsRef<BitSlice<O, T>> for Drain<'_, O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn as_ref(&self) -> &BitSlice<O, T> {
		self.as_bitslice()
	}
}
#[cfg(not(tarpaulin_include))]
impl<'a, O, T> Debug for Drain<'a, O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
		fmt.debug_tuple("Drain")
			.field(&self.drain.as_bitslice())
			.finish()
	}
}
impl<O, T> Iterator for Drain<'_, O, T>
where
	O: BitOrder,
	T: BitStore,
{
	type Item = bool;
	fn next(&mut self) -> Option<Self::Item> {
		self.drain.next().as_deref().copied()
	}
	fn size_hint(&self) -> (usize, Option<usize>) {
		self.drain.size_hint()
	}
	fn count(self) -> usize {
		self.len()
	}
	fn nth(&mut self, n: usize) -> Option<Self::Item> {
		self.drain.nth(n).as_deref().copied()
	}
	fn last(mut self) -> Option<Self::Item> {
		self.next_back()
	}
}
impl<O, T> DoubleEndedIterator for Drain<'_, O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn next_back(&mut self) -> Option<Self::Item> {
		self.drain.next_back().as_deref().copied()
	}
	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
		self.drain.nth_back(n).as_deref().copied()
	}
}
impl<O, T> ExactSizeIterator for Drain<'_, O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn len(&self) -> usize {
		self.drain.len()
	}
}
impl<O, T> FusedIterator for Drain<'_, O, T>
where
	O: BitOrder,
	T: BitStore,
{
}
unsafe impl<O, T> Send for Drain<'_, O, T>
where
	O: BitOrder,
	T: BitStore,
{
}
unsafe impl<O, T> Sync for Drain<'_, O, T>
where
	O: BitOrder,
	T: BitStore,
{
}
impl<O, T> Drop for Drain<'_, O, T>
where
	O: BitOrder,
	T: BitStore,
{
	fn drop(&mut self) {
		
		let tail = self.tail.clone();
		
		let tail_len = tail.end - tail.start;
		
		if tail_len == 0 {
			return;
		}
		
		let bitvec = unsafe { self.source.as_mut() };
		
		let old_len = bitvec.len();
		let new_len = old_len + tail_len;
		unsafe {
			
			bitvec.set_len(new_len);
			
			bitvec.copy_within_unchecked(tail, old_len);
			
			
		}
	}
}
#[repr(u8)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
enum FillStatus {
	
	FullSpan   = 0,
	
	EmptyInput = 1,
}
#[derive(Debug)]
pub struct Splice<'a, O, T, I>
where
	O: BitOrder,
	T: BitStore,
	I: Iterator<Item = bool>,
{
	
	drain: Drain<'a, O, T>,
	
	splice: I,
}
impl<'a, O, T, I> Splice<'a, O, T, I>
where
	O: BitOrder,
	T: BitStore,
	I: Iterator<Item = bool>,
{
	
	pub(super) fn new<II>(drain: Drain<'a, O, T>, splice: II) -> Self
	where II: IntoIterator<IntoIter = I, Item = bool> {
		let splice = splice.into_iter();
		Self { drain, splice }
	}
}
impl<O, T, I> Iterator for Splice<'_, O, T, I>
where
	O: BitOrder,
	T: BitStore,
	I: Iterator<Item = bool>,
{
	type Item = bool;
	fn next(&mut self) -> Option<Self::Item> {
		self.drain.next().tap_some(|_| {
			
			if let Some(bit) = self.splice.next() {
				unsafe {
					let bv = self.drain.source.as_mut();
					let len = bv.len();
					bv.set_len_unchecked(len + 1);
					bv.set_unchecked(len, bit);
				}
			}
		})
	}
	fn size_hint(&self) -> (usize, Option<usize>) {
		self.drain.size_hint()
	}
	fn count(self) -> usize {
		self.len()
	}
	fn last(mut self) -> Option<Self::Item> {
		self.next_back()
	}
}
impl<O, T, I> DoubleEndedIterator for Splice<'_, O, T, I>
where
	O: BitOrder,
	T: BitStore,
	I: Iterator<Item = bool>,
{
	fn next_back(&mut self) -> Option<Self::Item> {
		self.drain.next_back()
	}
	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
		self.drain.nth_back(n)
	}
}
impl<O, T, I> ExactSizeIterator for Splice<'_, O, T, I>
where
	O: BitOrder,
	T: BitStore,
	I: Iterator<Item = bool>,
{
	fn len(&self) -> usize {
		self.drain.len()
	}
}
impl<O, T, I> FusedIterator for Splice<'_, O, T, I>
where
	O: BitOrder,
	T: BitStore,
	I: Iterator<Item = bool>,
{
}
impl<O, T, I> Drop for Splice<'_, O, T, I>
where
	O: BitOrder,
	T: BitStore,
	I: Iterator<Item = bool>,
{
	fn drop(&mut self) {
		let tail = self.drain.tail.clone();
		let tail_len = tail.end - tail.start;
		let bitvec = unsafe { self.drain.source.as_mut() };
		
		
		if tail_len == 0 {
			bitvec.extend(self.splice.by_ref());
			return;
		}
		
		
		if let FillStatus::EmptyInput = self.drain.fill(&mut self.splice) {
			return;
		}
		
		
		let len = match self.splice.size_hint() {
			(n, None) | (_, Some(n)) => n,
		};
		unsafe {
			self.drain.move_tail(len);
		}
		
		
		if let FillStatus::EmptyInput = self.drain.fill(&mut self.splice) {
			return;
		}
		
		let mut collected = self.splice.by_ref().collect::<BitVec>().into_iter();
		let len = collected.len();
		if len > 0 {
			unsafe {
				self.drain.move_tail(len);
			}
			let filled = self.drain.fill(&mut collected);
			debug_assert_eq!(filled, FillStatus::EmptyInput);
			debug_assert_eq!(collected.len(), 0);
		}
	}
}