use std::io::{ Read, BufReader };
use std::str;
use std::borrow::Cow;
use std::convert::TryInto;
use serde::de::{
Deserialize, DeserializeSeed, Deserializer, IntoDeserializer,
SeqAccess, MapAccess, EnumAccess, VariantAccess,
Visitor, IgnoredAny, Error as DeError,
Expected, Unexpected,
};
use unicode_segmentation::UnicodeSegmentation;
use unicode_xid::UnicodeXID;
use ordered_float::NotNan;
use crate::error::{ Error, ResultExt };
use crate::span::Location;
pub fn from_str<'a, T: Deserialize<'a>>(s: &'a str) -> Result<T, Error> {
let mut de = TextDeserializer::new(s);
let value = T::deserialize(&mut de)?;
de.finalize()?;
Ok(value)
}
pub fn from_bytes<'a, T: Deserialize<'a>>(b: &'a [u8]) -> Result<T, Error> {
str::from_utf8(b).then_conv_err(from_str)
}
pub fn from_reader<R, T>(mut reader: R) -> Result<T, Error>
where
R: Read,
T: for<'a> Deserialize<'a>,
{
let mut s = String::with_capacity(4096);
reader.read_to_string(&mut s)?;
from_str(&s)
}
pub fn from_reader_buffered<R, T>(reader: R) -> Result<T, Error>
where
R: Read,
T: for<'a> Deserialize<'a>,
{
from_reader(BufReader::new(reader))
}
fn visit_cow_str<'de, V: Visitor<'de>>(cow: Cow<'de, str>, visitor: V) -> Result<V::Value, Error> {
match cow {
Cow::Borrowed(s) => visitor.visit_borrowed_str(s),
Cow::Owned(s) => visitor.visit_string(s),
}
}
#[must_use]
fn is_word_boundary(string: &str, index: usize) -> bool {
if index == string.len() {
return true;
}
if index > string.len() {
return false;
}
string
.split_word_bound_indices()
.find(|&(i, _)| index <= i)
.map_or(false, |(i, _)| i == index)
}
#[must_use]
fn is_word_boundary_or_punct(string: &str, index: usize) -> bool {
if is_word_boundary(string, index) {
true
} else if string.is_char_boundary(index) {
let (head, tail) = string.split_at(index);
(
head.chars()
.rev()
.next()
.map_or(true, |ch| ch.is_ascii_punctuation())
) || (
tail.chars()
.next()
.map_or(true, |ch| ch.is_ascii_punctuation())
)
} else {
false }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct Token<'de> {
len: usize,
parsed: TokenValue<'de>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum TokenValue<'de> {
Null,
QuestionMark,
Bool(bool),
Int(i64),
Uint(u64),
Float(NotNan<f64>),
String(Cow<'de, str>),
Blob(Vec<u8>),
LeftBracket,
RightBracket,
LeftBrace,
RightBrace,
Comma,
Colon,
}
impl<'de> From<&'de TokenValue<'de>> for Unexpected<'de> {
fn from(token: &'de TokenValue<'de>) -> Self {
use TokenValue::*;
match *token {
Null => Unexpected::Unit,
QuestionMark => Unexpected::Option,
Bool(b) => Unexpected::Bool(b),
Int(i) => Unexpected::Signed(i),
Uint(u) => Unexpected::Unsigned(u),
Float(f) => Unexpected::Float(f.into()),
String(ref s) => Unexpected::Str(s),
Blob(ref bytes) => Unexpected::Bytes(bytes),
LeftBracket => Unexpected::Seq,
LeftBrace => Unexpected::Map,
RightBracket => Unexpected::Char(']'),
RightBrace => Unexpected::Char('}'),
Comma => Unexpected::Char(','),
Colon => Unexpected::Char(':'),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TextDeserializer<'de> {
string: &'de str,
cursor: &'de str,
prev_byte_pos: usize,
next_byte_pos: usize,
}
impl<'de> TextDeserializer<'de> {
#[must_use]
pub const fn new(s: &'de str) -> Self {
TextDeserializer {
string: s,
cursor: s,
prev_byte_pos: 0,
next_byte_pos: 0,
}
}
pub fn finalize(mut self) -> Result<(), Error> {
if self.next().is_some() {
Err(self.error_at_prev(
Unexpected::Other("garbage"), &"end of input"
))
} else {
Ok(())
}
}
fn location_at(&self, byte_pos: usize) -> Location {
Location::default().advanced_by(&self.string[..byte_pos])
}
fn error_at<'a, U>(
&self,
byte_pos: usize,
unexpected: U,
expected: &dyn Expected,
) -> Error
where
U: Into<Unexpected<'a>>,
{
Error::custom(
format_args!(
"unexpected {}, expected {}", unexpected.into(), expected,
)
).with_context::<Location>(
self.location_at(byte_pos)
)
}
fn error_at_prev<'a, U>(&self, unexpected: U, expected: &dyn Expected) -> Error
where
U: Into<Unexpected<'a>>,
{
self.error_at(self.prev_byte_pos, unexpected, expected)
}
fn error_at_next<'a, U>(&self, unexpected: U, expected: &dyn Expected) -> Error
where
U: Into<Unexpected<'a>>,
{
self.error_at(self.next_byte_pos, unexpected, expected)
}
fn with_next(&self, error: Error) -> Error {
error.with_context::<Location>(self.location_at(self.next_byte_pos))
}
fn advance_to_byte_index(&mut self, index: usize) {
self.next_byte_pos += index;
self.cursor = &self.cursor[index..];
}
fn advance_to_end_of_input(&mut self) {
self.advance_to_byte_index(self.cursor.len());
}
fn skip_whitespace(&mut self) {
let new_cursor = self.cursor.trim_start();
let ws_len = self.cursor.len() - new_cursor.len();
self.advance_to_byte_index(ws_len);
self.prev_byte_pos = self.next_byte_pos;
}
fn lex_string(&mut self) -> Result<Token<'de>, Error> {
let mut iter = self.cursor
.char_indices()
.skip_while(|&it| it == (0, '"'));
let start = match iter.clone().next() {
None => {
self.advance_to_end_of_input();
return Err(self.error_at_next(
Unexpected::Other("end of input in string"),
&"characters or '\"'",
));
}
Some((i, _)) => i
};
let mut string = Cow::Borrowed(&self.cursor[start..start]);
let len = loop {
match iter.next() {
Some((i, ch)) => {
match ch {
'"' => break i + ch.len_utf8(),
'\\' => {
let unescaped = self.unescape(&mut iter)?;
string.to_mut().push(unescaped);
}
_ => {
match string {
Cow::Borrowed(_) => string = Cow::Borrowed(
&self.cursor[start..i + ch.len_utf8()]
),
Cow::Owned(ref mut buf) => buf.push(ch),
}
}
}
}
None => {
self.advance_to_end_of_input();
return Err(self.error_at_next(
Unexpected::Other("end of input in string"),
&"characters or '\"'"
));
}
}
};
let parsed = TokenValue::String(string);
Ok(Token { len, parsed })
}
fn unescape<T>(&mut self, iter: &mut T) -> Result<char, Error>
where
T: Iterator<Item = (usize, char)>,
{
let (i, ch) = match iter.next() {
Some(it) => it,
None => {
self.advance_to_end_of_input();
return Err(self.error_at_next(
Unexpected::Other("end of input in escape sequence"),
&"escape sequence"
));
}
};
match ch {
'n' => Ok('\n'),
'r' => Ok('\r'),
't' => Ok('\t'),
'\\' | '\'' | '"' => Ok(ch),
'u' => self.unescape_unicode(iter),
_ => {
self.advance_to_byte_index(i);
Err(self.error_at_next(
Unexpected::Char(ch), &"one of `nrtu\'\"\\`"
))
}
}
}
fn unescape_unicode<T>(
&mut self,
iter: &mut T,
) -> Result<char, Error>
where
T: Iterator<Item = (usize, char)>,
{
let start = match iter.next() {
Some((i, ch)) => if ch == '{' {
i + ch.len_utf8()
} else {
self.advance_to_byte_index(i);
return Err(self.error_at_next(
Unexpected::Char(ch), &"'{' in Unicode escape"
));
}
None => {
self.advance_to_end_of_input();
return Err(self.error_at_next(
Unexpected::Other("end of input in Unicode escape"), &"'{'"
));
}
};
let end = loop {
let (i, ch) = match iter.next() {
Some(it) => it,
None => {
self.advance_to_end_of_input();
return Err(self.error_at_next(
Unexpected::Other("end of input in Unicode escape"),
&"'}'"
));
}
};
match ch {
'}' => break i,
'0'..='9' | 'a'..='f' | 'A'..='F' => {},
_ => {
self.advance_to_byte_index(i);
return Err(self.error_at_next(
Unexpected::Char(ch), &"hex digits"
));
}
}
};
let hex_str = &self.cursor[start..end];
let code_point = u32::from_str_radix(hex_str, 16).map_err(|cause| {
self.advance_to_byte_index(start);
self.with_next(Error::with_cause("invalid Unicode escape", cause))
})?;
std::char::from_u32(code_point).ok_or_else(|| {
self.advance_to_byte_index(start);
self.error_at_next(
Unexpected::Other(&format!(
"invalid Unicode code point: U+{:04X}", code_point
)),
&"Unicode code point in the valid range"
)
})
}
fn lex_blob(&mut self) -> Result<Token<'de>, Error> {
let mut buf = Vec::with_capacity(4096);
let mut iter = self.cursor.char_indices();
match iter.next() {
Some((_, '#')) => {}
Some((_, ch)) => return Err(self.error_at_next(
Unexpected::Char(ch), &"'#' at beginning of blob"
)),
None => return Err(self.error_at_next(
Unexpected::Other("end of input"), &"'#' at beginning of blob"
)),
}
let len = loop {
let res = self.expect_hex_pair_or_pound(
iter.by_ref().skip_while(|&(_, ch)| ch.is_whitespace())
)?;
match res {
Ok(byte) => buf.push(byte),
Err(len) => break len,
}
};
buf.shrink_to_fit();
let parsed = TokenValue::Blob(buf);
Ok(Token { len, parsed })
}
fn expect_hex_pair_or_pound<T>(
&mut self,
mut iter: T,
) -> Result<Result<u8, usize>, Error>
where
T: Iterator<Item = (usize, char)>,
{
let byte_from_hex = |hi, lo| {
#[allow(clippy::cast_possible_truncation)]
let nibble_from_hex = |ch| match ch {
'0'..='9' => (ch as u32 - '0' as u32) as u8,
'a'..='f' => (ch as u32 - 'a' as u32) as u8 + 10,
'A'..='F' => (ch as u32 - 'A' as u32) as u8 + 10,
_ => unreachable!("invalid hex character: '{}'", ch)
};
nibble_from_hex(hi) << 4 | nibble_from_hex(lo)
};
let (i, hi) = match iter.next() {
Some(it) => it,
None => {
self.advance_to_end_of_input();
return Err(self.error_at_next(
Unexpected::Other("end of input"), &"'#' at end of blob"
));
}
};
match hi {
'#' => Ok(Err(i + hi.len_utf8())),
'0'..='9' | 'a'..='f' | 'A'..='F' => {
let (j, lo) = match iter.next() {
Some(it) => it,
None => {
self.advance_to_end_of_input();
return Err(self.error_at_next(
Unexpected::Other("end of input"),
&"hex digit"
));
}
};
match lo {
'0'..='9' | 'a'..='f' | 'A'..='F' => {
Ok(Ok(byte_from_hex(hi, lo)))
}
'#' => {
self.advance_to_byte_index(i);
Err(self.error_at_next(
Unexpected::Other("odd number of hex digits in blob"),
&"even number of hex digits",
))
}
_ => {
self.advance_to_byte_index(j);
Err(self.error_at_next(
Unexpected::Char(lo), &"hex digits"
))
}
}
}
_ => {
self.advance_to_byte_index(i);
Err(self.error_at_next(
Unexpected::Char(hi), &"hex digits or '#'"
))
}
}
}
fn lex_number(&mut self) -> Result<Token<'de>, Error> {
let iter = self.cursor
.char_indices()
.skip_while(|&(_, ch)| ch == '+' || ch == '-');
match iter.clone().next() {
None => {
self.advance_to_end_of_input();
Err(self.error_at_next(
Unexpected::Other("end of input"),
&"decimal digits or 'inf'",
))
}
Some((i, ch)) => {
let inf = "inf";
if
self.cursor[i..].starts_with(inf)
&& is_word_boundary_or_punct(&self.cursor[i..], inf.len())
{
let len = i + inf.len();
let number = self.cursor[..len]
.parse()
.conv_err()
.map_err(|e| self.with_next(e))?;
let parsed = TokenValue::Float(number);
Ok(Token { len, parsed })
} else if ch.is_numeric() || ch == '.' {
let mut iter = iter.skip_while(|&(_, ch)| ch.is_numeric());
let len = match iter.next() {
Some((j, ch)) => if ch == '.' {
let mut iter = iter.skip_while(
|&(_, ch)| ch.is_numeric()
);
match iter.next() {
Some((k, _)) => k,
None => self.cursor.len(),
}
} else {
j
}
None => self.cursor.len(),
};
if is_word_boundary_or_punct(self.cursor, len) {
let num_str = &self.cursor[..len];
let parsed = if num_str.contains('.') {
num_str
.parse()
.conv_err()
.map(TokenValue::Float)
.map_err(|e| self.with_next(e))?
} else if num_str.starts_with(['+', '-'].as_ref()) {
num_str
.parse()
.conv_err()
.map(TokenValue::Int)
.map_err(|e| self.with_next(e))?
} else {
num_str
.parse()
.conv_err()
.map(TokenValue::Uint)
.map_err(|e| self.with_next(e))?
};
Ok(Token { len, parsed })
} else {
self.advance_to_byte_index(len);
Err(self.error_at_next(
Unexpected::Other("characters"),
&"Unicode word boundary or punctuation"
))
}
} else {
self.advance_to_byte_index(i);
Err(self.error_at_next(
Unexpected::Char(ch), &"digits or decimal point"
))
}
}
}
}
fn lex_word(&mut self) -> Result<Token<'de>, Error> {
let len = self.cursor
.char_indices()
.find(|&(_, ch)| !UnicodeXID::is_xid_continue(ch))
.map_or(self.cursor.len(), |(i, _)| i);
let word = &self.cursor[..len];
let parsed = match word {
"null" => TokenValue::Null,
"false" => TokenValue::Bool(false),
"true" => TokenValue::Bool(true),
"inf" => TokenValue::Float(f64::INFINITY.try_into()?),
_ => return Err(self.error_at_next(
Unexpected::Other(&format!("word `{}`", word)),
&"`true`, `false`, `null`, or `inf`"
))
};
Ok(Token { len, parsed })
}
fn lex_punctuation(&mut self, ch: char) -> Result<Token<'de>, Error> {
let payload = match ch {
'?' => TokenValue::QuestionMark,
'[' => TokenValue::LeftBracket,
']' => TokenValue::RightBracket,
'{' => TokenValue::LeftBrace,
'}' => TokenValue::RightBrace,
',' => TokenValue::Comma,
':' => TokenValue::Colon,
_ => return Err(self.error_at_next(
Unexpected::Char(ch), &"one of `?[]{},:`"
)),
};
Ok(Token {
len: ch.len_utf8(),
parsed: payload,
})
}
fn expect_token(&mut self) -> Result<TokenValue<'de>, Error> {
self.next().unwrap_or_else(|| Err(
self.error_at_next(Unexpected::Other("end of input"), &"any token")
))
}
fn deserialize_number<V: Visitor<'de>>(&mut self, visitor: V) -> Result<V::Value, Error> {
match self.expect_token()? {
TokenValue::Int(i) => visitor.visit_i64(i),
TokenValue::Uint(u) => visitor.visit_u64(u),
TokenValue::Float(f) => visitor.visit_f64(f.into()),
token @ _ => Err(self.error_at_prev(&token, &visitor)),
}
}
fn visit_and_exhaust_seq<V: Visitor<'de>>(
&mut self,
visitor: V,
) -> Result<V::Value, Error> {
let mut seq = SeqDeserializer::new(self);
let value = visitor.visit_seq(&mut seq)?;
seq.exhaust()?;
Ok(value)
}
fn visit_and_exhaust_map<V: Visitor<'de>>(
&mut self,
visitor: V,
) -> Result<V::Value, Error> {
let mut map = MapDeserializer::new(self);
let value = visitor.visit_map(&mut map)?;
map.exhaust()?;
Ok(value)
}
fn next(&mut self) -> Option<Result<TokenValue<'de>, Error>> {
self.skip_whitespace();
let ch = self.cursor.chars().next()?;
let result = match ch {
'"' => self.lex_string(),
'#' => self.lex_blob(),
'+' | '-' | '.' => self.lex_number(),
_ if ch.is_numeric() => self.lex_number(),
_ if ch.is_ascii_punctuation() => self.lex_punctuation(ch),
_ if UnicodeXID::is_xid_start(ch) => self.lex_word(),
_ => Err(self.error_at_next(
Unexpected::Char(ch), &"a Neodyn Exchange value"
))
};
Some(
result.map(|Token { len, parsed }| {
self.advance_to_byte_index(len);
parsed
})
)
}
}
impl<'de> Deserializer<'de> for &mut TextDeserializer<'de> {
type Error = Error;
fn is_human_readable(&self) -> bool {
true
}
fn deserialize_any<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
use TokenValue::*;
match self.expect_token()? {
Null => visitor.visit_unit(),
QuestionMark => visitor.visit_some(self),
Bool(b) => visitor.visit_bool(b),
Int(i) => visitor.visit_i64(i),
Uint(u) => visitor.visit_u64(u),
Float(f) => visitor.visit_f64(f.into()),
String(s) => visit_cow_str(s, visitor),
Blob(bytes) => visitor.visit_byte_buf(bytes),
LeftBracket => self.visit_and_exhaust_seq(visitor),
LeftBrace => self.visit_and_exhaust_map(visitor),
token @ _ => Err(self.error_at_prev(&token, &visitor)),
}
}
fn deserialize_bool<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
match self.expect_token()? {
TokenValue::Bool(b) => visitor.visit_bool(b),
token @ _ => Err(self.error_at_prev(&token, &visitor)),
}
}
fn deserialize_i8<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_i16<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_i32<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_i64<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_i128<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_u8<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_u16<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_u32<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_u64<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_u128<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_f32<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_f64<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_number(visitor)
}
fn deserialize_char<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_str(visitor)
}
fn deserialize_str<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_string(visitor)
}
fn deserialize_string<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
match self.expect_token()? {
TokenValue::String(s) => visit_cow_str(s, visitor),
TokenValue::Blob(bytes) => {
let s = String::from_utf8(bytes)?;
visitor.visit_string(s)
},
token @ _ => Err(self.error_at_prev(&token, &visitor)),
}
}
fn deserialize_bytes<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_byte_buf(visitor)
}
fn deserialize_byte_buf<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
match self.expect_token()? {
TokenValue::Blob(bytes) => visitor.visit_byte_buf(bytes),
TokenValue::String(s) => visit_cow_str(s, visitor),
TokenValue::LeftBracket => self.visit_and_exhaust_seq(visitor), token @ _ => Err(self.error_at_prev(&token, &visitor)),
}
}
fn deserialize_option<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
match self.expect_token()? {
TokenValue::Null => visitor.visit_none(),
TokenValue::QuestionMark => visitor.visit_some(self),
token @ _ => Err(self.error_at_prev(&token, &visitor)),
}
}
fn deserialize_unit<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
match self.expect_token()? {
TokenValue::Null => visitor.visit_unit(),
token @ _ => Err(self.error_at_prev(&token, &visitor)),
}
}
fn deserialize_unit_struct<V: Visitor<'de>>(
self,
_name: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error> {
self.deserialize_unit(visitor)
}
fn deserialize_newtype_struct<V: Visitor<'de>>(
self,
_name: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error> {
visitor.visit_newtype_struct(self)
}
fn deserialize_seq<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
match self.expect_token()? {
TokenValue::LeftBracket => self.visit_and_exhaust_seq(visitor),
token @ _ => Err(self.error_at_prev(&token, &visitor)),
}
}
fn deserialize_tuple<V: Visitor<'de>>(
self,
_len: usize,
visitor: V,
) -> Result<V::Value, Self::Error> {
self.deserialize_seq(visitor)
}
fn deserialize_tuple_struct<V: Visitor<'de>>(
self,
_name: &'static str,
len: usize,
visitor: V,
) -> Result<V::Value, Self::Error> {
self.deserialize_tuple(len, visitor)
}
fn deserialize_map<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
match self.expect_token()? {
TokenValue::LeftBrace => self.visit_and_exhaust_map(visitor),
token @ _ => Err(self.error_at_prev(&token, &visitor)),
}
}
fn deserialize_struct<V: Visitor<'de>>(
self,
_name: &'static str,
_fields: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error> {
self.deserialize_map(visitor)
}
fn deserialize_enum<V: Visitor<'de>>(
self,
_type_name: &'static str,
_variants: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error> {
match self.expect_token()? {
TokenValue::String(s) => {
visitor.visit_enum(s.into_deserializer())
}
TokenValue::LeftBrace => {
let value = visitor.visit_enum(&mut *self)?;
match self.expect_token()? {
TokenValue::RightBrace => Ok(value),
TokenValue::Comma => {
match self.expect_token()? {
TokenValue::RightBrace => Ok(value),
token @ _ => Err(self.error_at_prev(
&token, &"enum as single-key map"
)),
}
},
token @ _ => Err(self.error_at_prev(
&token, &"enum as single-key map"
)),
}
}
token @ _ => Err(self.error_at_prev(
&token, &"enum as string or single-key map"
))
}
}
fn deserialize_identifier<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_str(visitor)
}
fn deserialize_ignored_any<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Self::Error> {
self.deserialize_any(IgnoredAny).and_then(|_| visitor.visit_unit())
}
}
#[derive(Debug)]
struct SeqDeserializer<'a, 'de: 'a> {
deserializer: &'a mut TextDeserializer<'de>,
seen_closing_bracket: bool,
}
impl<'a, 'de> SeqDeserializer<'a, 'de> {
fn new(de: &'a mut TextDeserializer<'de>) -> Self {
SeqDeserializer {
deserializer: de,
seen_closing_bracket: false,
}
}
fn exhaust(&mut self) -> Result<(), Error> {
while let Some(IgnoredAny) = self.next_element()? {}
Ok(())
}
}
impl<'a, 'de> SeqAccess<'de> for SeqDeserializer<'a, 'de> {
type Error = Error;
fn next_element_seed<T: DeserializeSeed<'de>>(
&mut self,
seed: T,
) -> Result<Option<T::Value>, Self::Error> {
if self.seen_closing_bracket {
return Ok(None);
}
let saved_state = *self.deserializer;
#[allow(clippy::wildcard_enum_match_arm)]
match self.deserializer.expect_token()? {
TokenValue::RightBracket => {
self.seen_closing_bracket = true;
Ok(None)
}
_ => {
*self.deserializer = saved_state;
let value = seed.deserialize(&mut *self.deserializer)?;
match self.deserializer.expect_token()? {
TokenValue::Comma => {}
TokenValue::RightBracket => {
self.seen_closing_bracket = true;
}
token @ _ => return Err(self.deserializer.error_at_prev(
&token, &"',' or ']' after value in array,"
))
}
Ok(Some(value))
}
}
}
}
#[derive(Debug)]
struct MapDeserializer<'a, 'de: 'a> {
deserializer: &'a mut TextDeserializer<'de>,
seen_closing_brace: bool,
}
impl<'a, 'de> MapDeserializer<'a, 'de> {
fn new(de: &'a mut TextDeserializer<'de>) -> Self {
MapDeserializer {
deserializer: de,
seen_closing_brace: false,
}
}
fn exhaust(&mut self) -> Result<(), Error> {
while let Some((IgnoredAny, IgnoredAny)) = self.next_entry()? {}
Ok(())
}
}
impl<'a, 'de> MapAccess<'de> for MapDeserializer<'a, 'de> {
type Error = Error;
fn next_key_seed<K: DeserializeSeed<'de>>(
&mut self,
seed: K,
) -> Result<Option<K::Value>, Self::Error> {
if self.seen_closing_brace {
return Ok(None);
}
let saved_state = *self.deserializer;
#[allow(clippy::wildcard_enum_match_arm)]
match self.deserializer.expect_token()? {
TokenValue::RightBrace => {
self.seen_closing_brace = true;
Ok(None)
}
_ => {
*self.deserializer = saved_state;
let key = seed.deserialize(&mut *self.deserializer)?;
match self.deserializer.expect_token()? {
TokenValue::Colon => Ok(Some(key)),
token @ _ => Err(self.deserializer.error_at_prev(
&token, &"':' after key in map"
))
}
}
}
}
fn next_value_seed<V: DeserializeSeed<'de>>(
&mut self,
seed: V,
) -> Result<V::Value, Self::Error> {
let value = seed.deserialize(&mut *self.deserializer)?;
match self.deserializer.expect_token()? {
TokenValue::Comma => {},
TokenValue::RightBrace => {
self.seen_closing_brace = true;
}
token @ _ => return Err(self.deserializer.error_at_prev(
&token, &"',' or '}' after value in map"
)),
}
Ok(value)
}
}
impl<'de> EnumAccess<'de> for &mut TextDeserializer<'de> {
type Error = Error;
type Variant = Self;
fn variant_seed<V: DeserializeSeed<'de>>(
self,
seed: V
) -> Result<(V::Value, Self::Variant), Self::Error> {
let value = seed.deserialize(&mut *self)?;
match self.expect_token()? {
TokenValue::Colon => Ok((value, self)),
token @ _ => Err(self.error_at_prev(&token, &"':' after key in map")),
}
}
}
impl<'de> VariantAccess<'de> for &mut TextDeserializer<'de> {
type Error = <Self as EnumAccess<'de>>::Error;
fn unit_variant(self) -> Result<(), Self::Error> {
Deserialize::deserialize(self)
}
fn newtype_variant_seed<T: DeserializeSeed<'de>>(
self,
seed: T,
) -> Result<T::Value, Self::Error> {
seed.deserialize(self)
}
fn tuple_variant<V: Visitor<'de>>(
self,
len: usize,
visitor: V,
) -> Result<V::Value, Self::Error> {
self.deserialize_tuple(len, visitor)
}
fn struct_variant<V: Visitor<'de>>(
self,
_fields: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error> {
self.deserialize_map(visitor)
}
}
#[cfg(test)]
mod tests {
use super::{ is_word_boundary, is_word_boundary_or_punct };
#[test]
fn is_word_boundary_works() {
assert!(is_word_boundary("", 0));
assert!(!is_word_boundary("", 1));
assert!(is_word_boundary("foo", 0));
assert!(is_word_boundary("foo", 3));
assert!(!is_word_boundary("foo", 1));
assert!(!is_word_boundary("foo", 4));
assert!(is_word_boundary("bar qux", 0));
assert!(is_word_boundary("bar qux", 3));
assert!(is_word_boundary("bar qux", 4));
assert!(is_word_boundary("bar qux", 7));
assert!(!is_word_boundary("bar qux", 2));
assert!(!is_word_boundary("bar qux", 5));
assert!(!is_word_boundary("bar qux", 6));
assert!(!is_word_boundary("bar qux", 8));
assert!(!is_word_boundary("1,2,3", 1));
assert!(!is_word_boundary("1,2,3", 2));
assert!(!is_word_boundary("9.7", 1));
assert!(!is_word_boundary("9.7", 2));
}
#[test]
fn is_word_boundary_or_punct_works() {
assert!(is_word_boundary_or_punct("", 0));
assert!(!is_word_boundary_or_punct("", 1));
assert!(is_word_boundary_or_punct("foo", 0));
assert!(is_word_boundary_or_punct("foo", 3));
assert!(!is_word_boundary_or_punct("foo", 1));
assert!(!is_word_boundary_or_punct("foo", 4));
assert!(is_word_boundary_or_punct("bar qux", 0));
assert!(is_word_boundary_or_punct("bar qux", 3));
assert!(is_word_boundary_or_punct("bar qux", 4));
assert!(is_word_boundary_or_punct("bar qux", 7));
assert!(!is_word_boundary_or_punct("bar qux", 2));
assert!(!is_word_boundary_or_punct("bar qux", 5));
assert!(!is_word_boundary_or_punct("bar qux", 6));
assert!(!is_word_boundary_or_punct("bar qux", 8));
assert!(is_word_boundary_or_punct("1,2,345", 1));
assert!(is_word_boundary_or_punct("1,2,345", 2));
assert!(!is_word_boundary_or_punct("1,2,345", 5));
assert!(is_word_boundary_or_punct("9.753", 1));
assert!(is_word_boundary_or_punct("9.753", 2));
assert!(!is_word_boundary_or_punct("9.753", 3));
}
}