token-dict 1.0.2

basic dictionary based tokenization
Documentation
impl<I:IntoIterator> From<I> for UTF8CharIter<I::IntoIter> where I::Item:Val<u8>{
	fn from(value:I)->Self{
		Self{inner:value.into_iter()}
	}
}
impl<I:Iterator> Iterator for UTF8CharIter<I> where I::Item:Val<u8>{
	fn next(&mut self)->Option<Result<char,[u8;4]>>{
		let inner=&mut self.inner;

		let firstbyte=inner.next()?.val();
		let mut bytes=[firstbyte,0,0,0];
		let (charlen,value)=if firstbyte&0b10000000==0b00000000{(1,firstbyte)}
		else if firstbyte&0b11000000==0b10000000{return Some(Err(bytes))}
		else if firstbyte&0b11100000==0b11000000{(2,firstbyte&0b00011111)}
		else if firstbyte&0b11110000==0b11100000{(3,firstbyte&0b00001111)}
		else if firstbyte&0b11111000==0b11110000{(4,firstbyte&0b00000111)}
		else                                    {return Some(Err(bytes))};
		let mut value=value as u32;

		for n in 1..charlen{
			let nextbyte=if let Some(b)=inner.next(){b.val()}else{return Some(Err(bytes))};
			bytes[n]=nextbyte;
			if nextbyte&0b11000000==0b10000000{value=(value<<6)+((nextbyte&0b00111111) as u32)}else{return Some(Err(bytes))}
		}
		char::from_u32(value).map(|c|Ok(c)).or(Some(Err(bytes)))
	}
	fn size_hint(&self)->(usize,Option<usize>){
		let (lowerbytes,upperbytes)=self.inner.size_hint();

		(lowerbytes/4,upperbytes)
	}
	type Item=Result<char,[u8;4]>;
}
impl<I:Iterator> UTF8CharIter<I> where I::Item:Val<u8>{
	/// converts into the inner value
	pub fn into_inner(self)->I{self.inner}
}
impl<T:Copy> Val<T> for &T{
	fn val(self)->T{*self}
}
impl<T> Val<T> for T{
	fn val(self)->T{self}
}
#[cfg(test)]
mod tests{
	#[test]
	fn utf8_test_1(){
		let iter=UTF8CharIter::from("correct string".as_bytes());
		let c:String=iter.filter_map(|c|c.ok()).collect();
		assert_eq!(c,"correct string");
		let iter=UTF8CharIter::from("incorrect string".bytes().chain([255,255]).chain(" incorrectness removed".bytes()));
		let c:String=iter.filter_map(|c|c.ok()).collect();
		assert_eq!(c,"incorrect string incorrectness removed");

		let iter=UTF8CharIter::from("正しくない文字列".bytes().chain([128,255,0x20,255,0b10000000]).chain(" 不正確さは削除された".bytes()));
		let c:String=iter.filter_map(|c|c.ok()).collect();
		assert_eq!(c,"正しくない文字列  不正確さは削除された");
	}
	#[test]
	fn test_utf8_char_iter_valid() {
		// Unicode character '€' (euro sign)
		let bytes = vec![0xE2u8, 0x82u8, 0xACu8];
		let mut iter: UTF8CharIter<_> = bytes.into_iter().into();
		match iter.next() {
			Some(Ok(c)) => assert_eq!(c, ''),
			other => panic!("Expected Ok('€'), got {:?}", other),
		}
		// No more characters
		assert!(iter.next().is_none());
	}
	#[test]
	fn test_utf8_char_iter_error() {
		// Invalid UTF-8 start byte
		let bytes = vec![0xFFu8];
		let mut iter: UTF8CharIter<_> = bytes.into_iter().into();
		match iter.next() {
			Some(Err(buf)) => assert_eq!(buf[0], 0xFF),
			other => panic!("Expected Err with first byte 0xFF, got {:?}", other),
		}
	}
	use super::*;
}
/// module for token dictionary
pub mod dict;
/// module for Token type
pub mod token;
#[derive(Clone,Debug)]
/// iterator for live converting utf8 to chars, returning errors for all bytes that aren't part of a valid character. useful for lazily detokenizing into a string
pub struct UTF8CharIter<I:Iterator> where I::Item:Val<u8>{inner:I}
/// trait for unifying primitives and their references
pub trait Val<T>{
	/// gets the value
	fn val(self)->T;
}
pub use {dict::TokenDict,token::Token};