use cow_rc_str::CowRcStr;
use std::ops::Range;
use std::ascii::AsciiExt;
use std::ops::BitOr;
use tokenizer::{self, Token, Tokenizer, SourceLocation};
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub struct SourcePosition {
position: tokenizer::SourcePosition,
at_start_of: Option<BlockType>,
}
#[derive(Clone, Debug, PartialEq)]
pub enum BasicParseError<'a> {
UnexpectedToken(Token<'a>),
EndOfInput,
AtRuleInvalid(CowRcStr<'a>),
AtRuleBodyInvalid,
QualifiedRuleInvalid,
}
impl<'a, T> From<BasicParseError<'a>> for ParseError<'a, T> {
fn from(this: BasicParseError<'a>) -> ParseError<'a, T> {
ParseError::Basic(this)
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum ParseError<'a, T: 'a> {
Basic(BasicParseError<'a>),
Custom(T),
}
impl<'a, T> ParseError<'a, T> {
pub fn basic(self) -> BasicParseError<'a> {
match self {
ParseError::Basic(e) => e,
ParseError::Custom(_) => panic!("Not a basic parse error"),
}
}
}
pub struct ParserInput<'i> {
tokenizer: Tokenizer<'i>,
cached_token: Option<CachedToken<'i>>,
}
struct CachedToken<'i> {
token: Token<'i>,
start_position: tokenizer::SourcePosition,
end_position: tokenizer::SourcePosition,
}
impl<'i> ParserInput<'i> {
pub fn new(input: &'i str) -> ParserInput<'i> {
ParserInput {
tokenizer: Tokenizer::new(input),
cached_token: None,
}
}
#[inline]
fn cached_token_ref(&self) -> &Token<'i> {
&self.cached_token.as_ref().unwrap().token
}
}
pub struct Parser<'i: 't, 't> {
input: &'t mut ParserInput<'i>,
at_start_of: Option<BlockType>,
stop_before: Delimiters,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum BlockType {
Parenthesis,
SquareBracket,
CurlyBracket,
}
impl BlockType {
fn opening(token: &Token) -> Option<BlockType> {
match *token {
Token::Function(_) |
Token::ParenthesisBlock => Some(BlockType::Parenthesis),
Token::SquareBracketBlock => Some(BlockType::SquareBracket),
Token::CurlyBracketBlock => Some(BlockType::CurlyBracket),
_ => None
}
}
fn closing(token: &Token) -> Option<BlockType> {
match *token {
Token::CloseParenthesis => Some(BlockType::Parenthesis),
Token::CloseSquareBracket => Some(BlockType::SquareBracket),
Token::CloseCurlyBracket => Some(BlockType::CurlyBracket),
_ => None
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct Delimiters {
bits: u8,
}
#[allow(non_upper_case_globals, non_snake_case)]
pub mod Delimiter {
use super::Delimiters;
pub const None: Delimiters = Delimiters { bits: 0 };
pub const CurlyBracketBlock: Delimiters = Delimiters { bits: 1 << 1 };
pub const Semicolon: Delimiters = Delimiters { bits: 1 << 2 };
pub const Bang: Delimiters = Delimiters { bits: 1 << 3 };
pub const Comma: Delimiters = Delimiters { bits: 1 << 4 };
}
#[allow(non_upper_case_globals, non_snake_case)]
mod ClosingDelimiter {
use super::Delimiters;
pub const CloseCurlyBracket: Delimiters = Delimiters { bits: 1 << 5 };
pub const CloseSquareBracket: Delimiters = Delimiters { bits: 1 << 6 };
pub const CloseParenthesis: Delimiters = Delimiters { bits: 1 << 7 };
}
impl BitOr<Delimiters> for Delimiters {
type Output = Delimiters;
fn bitor(self, other: Delimiters) -> Delimiters {
Delimiters { bits: self.bits | other.bits }
}
}
impl Delimiters {
fn contains(self, other: Delimiters) -> bool {
(self.bits & other.bits) != 0
}
fn from_byte(byte: Option<u8>) -> Delimiters {
match byte {
Some(b';') => Delimiter::Semicolon,
Some(b'!') => Delimiter::Bang,
Some(b',') => Delimiter::Comma,
Some(b'{') => Delimiter::CurlyBracketBlock,
Some(b'}') => ClosingDelimiter::CloseCurlyBracket,
Some(b']') => ClosingDelimiter::CloseSquareBracket,
Some(b')') => ClosingDelimiter::CloseParenthesis,
_ => Delimiter::None,
}
}
}
impl<'i: 't, 't> Parser<'i, 't> {
#[inline]
pub fn new(input: &'t mut ParserInput<'i>) -> Parser<'i, 't> {
Parser {
input: input,
at_start_of: None,
stop_before: Delimiter::None,
}
}
pub fn current_line(&self) -> &'i str {
self.input.tokenizer.current_source_line()
}
#[inline]
pub fn is_exhausted(&mut self) -> bool {
self.expect_exhausted().is_ok()
}
#[inline]
pub fn expect_exhausted(&mut self) -> Result<(), BasicParseError<'i>> {
let start_position = self.position();
let result = match self.next() {
Err(BasicParseError::EndOfInput) => Ok(()),
Err(e) => unreachable!("Unexpected error encountered: {:?}", e),
Ok(t) => Err(BasicParseError::UnexpectedToken(t.clone())),
};
self.reset(start_position);
result
}
#[inline]
pub fn position(&self) -> SourcePosition {
SourcePosition {
position: self.input.tokenizer.position(),
at_start_of: self.at_start_of,
}
}
#[inline]
pub fn reset(&mut self, new_position: SourcePosition) {
self.input.tokenizer.reset(new_position.position);
self.at_start_of = new_position.at_start_of;
}
#[inline]
pub fn look_for_var_functions(&mut self) {
self.input.tokenizer.look_for_var_functions()
}
#[inline]
pub fn seen_var_functions(&mut self) -> bool {
self.input.tokenizer.seen_var_functions()
}
#[inline]
pub fn look_for_viewport_percentages(&mut self) {
self.input.tokenizer.look_for_viewport_percentages()
}
#[inline]
pub fn seen_viewport_percentages(&mut self) -> bool {
self.input.tokenizer.seen_viewport_percentages()
}
#[inline]
pub fn try<F, T, E>(&mut self, thing: F) -> Result<T, E>
where F: FnOnce(&mut Parser<'i, 't>) -> Result<T, E> {
let start_position = self.position();
let result = thing(self);
if result.is_err() {
self.reset(start_position)
}
result
}
#[inline]
pub fn slice(&self, range: Range<SourcePosition>) -> &'i str {
self.input.tokenizer.slice(range.start.position..range.end.position)
}
#[inline]
pub fn slice_from(&self, start_position: SourcePosition) -> &'i str {
self.input.tokenizer.slice_from(start_position.position)
}
#[inline]
pub fn current_source_location(&self) -> SourceLocation {
self.input.tokenizer.current_source_location()
}
#[inline]
pub fn source_location(&self, target: SourcePosition) -> SourceLocation {
self.input.tokenizer.source_location(target.position)
}
pub fn next(&mut self) -> Result<&Token<'i>, BasicParseError<'i>> {
loop {
match self.next_including_whitespace_and_comments() {
Err(e) => return Err(e),
Ok(&Token::WhiteSpace(_)) | Ok(&Token::Comment(_)) => {},
_ => break
}
}
Ok(self.input.cached_token_ref())
}
pub fn next_including_whitespace(&mut self) -> Result<&Token<'i>, BasicParseError<'i>> {
loop {
match self.next_including_whitespace_and_comments() {
Err(e) => return Err(e),
Ok(&Token::Comment(_)) => {},
_ => break
}
}
Ok(self.input.cached_token_ref())
}
pub fn next_including_whitespace_and_comments(&mut self) -> Result<&Token<'i>, BasicParseError<'i>> {
if let Some(block_type) = self.at_start_of.take() {
consume_until_end_of_block(block_type, &mut self.input.tokenizer);
}
let byte = self.input.tokenizer.next_byte();
if self.stop_before.contains(Delimiters::from_byte(byte)) {
return Err(BasicParseError::EndOfInput)
}
let token_start_position = self.input.tokenizer.position();
let token;
match self.input.cached_token {
Some(ref cached_token) if cached_token.start_position == token_start_position => {
self.input.tokenizer.reset(cached_token.end_position);
match cached_token.token {
Token::Dimension { ref unit, .. } => self.input.tokenizer.see_dimension(unit),
Token::Function(ref name) => self.input.tokenizer.see_function(name),
_ => {}
}
token = &cached_token.token
}
_ => {
let new_token = self.input.tokenizer.next().map_err(|()| BasicParseError::EndOfInput)?;
self.input.cached_token = Some(CachedToken {
token: new_token,
start_position: token_start_position,
end_position: self.input.tokenizer.position(),
});
token = self.input.cached_token_ref()
}
}
if let Some(block_type) = BlockType::opening(token) {
self.at_start_of = Some(block_type);
}
Ok(token)
}
#[inline]
pub fn parse_entirely<F, T, E>(&mut self, parse: F) -> Result<T, ParseError<'i, E>>
where F: FnOnce(&mut Parser<'i, 't>) -> Result<T, ParseError<'i, E>> {
let result = parse(self)?;
self.expect_exhausted()?;
Ok(result)
}
#[inline]
pub fn parse_comma_separated<F, T, E>(&mut self, mut parse_one: F) -> Result<Vec<T>, ParseError<'i, E>>
where F: for<'tt> FnMut(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>> {
let mut values = vec![];
loop {
values.push(self.parse_until_before(Delimiter::Comma, &mut parse_one)?);
match self.next() {
Err(_) => return Ok(values),
Ok(&Token::Comma) => continue,
Ok(_) => unreachable!(),
}
}
}
#[inline]
pub fn parse_nested_block<F, T, E>(&mut self, parse: F) -> Result <T, ParseError<'i, E>>
where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>> {
parse_nested_block(self, parse)
}
#[inline]
pub fn parse_until_before<F, T, E>(&mut self, delimiters: Delimiters, parse: F)
-> Result <T, ParseError<'i, E>>
where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>> {
parse_until_before(self, delimiters, parse)
}
#[inline]
pub fn parse_until_after<F, T, E>(&mut self, delimiters: Delimiters, parse: F)
-> Result <T, ParseError<'i, E>>
where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>> {
parse_until_after(self, delimiters, parse)
}
#[inline]
pub fn expect_whitespace(&mut self) -> Result<&'i str, BasicParseError<'i>> {
match *self.next_including_whitespace()? {
Token::WhiteSpace(value) => Ok(value),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_ident(&mut self) -> Result<&CowRcStr<'i>, BasicParseError<'i>> {
match *self.next()? {
Token::Ident(ref value) => Ok(value),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_ident_cloned(&mut self) -> Result<CowRcStr<'i>, BasicParseError<'i>> {
self.expect_ident().map(|s| s.clone())
}
#[inline]
pub fn expect_ident_matching(&mut self, expected_value: &str) -> Result<(), BasicParseError<'i>> {
match *self.next()? {
Token::Ident(ref value) if value.eq_ignore_ascii_case(expected_value) => Ok(()),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_string(&mut self) -> Result<&CowRcStr<'i>, BasicParseError<'i>> {
match *self.next()? {
Token::QuotedString(ref value) => Ok(value),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_string_cloned(&mut self) -> Result<CowRcStr<'i>, BasicParseError<'i>> {
self.expect_string().map(|s| s.clone())
}
#[inline]
pub fn expect_ident_or_string(&mut self) -> Result<&CowRcStr<'i>, BasicParseError<'i>> {
match *self.next()? {
Token::Ident(ref value) => Ok(value),
Token::QuotedString(ref value) => Ok(value),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_url(&mut self) -> Result<CowRcStr<'i>, BasicParseError<'i>> {
match *self.next()? {
Token::UnquotedUrl(ref value) => return Ok(value.clone()),
Token::Function(ref name) if name.eq_ignore_ascii_case("url") => {}
ref t => return Err(BasicParseError::UnexpectedToken(t.clone()))
}
self.parse_nested_block(|input| input.expect_string().map_err(ParseError::Basic).map(|s| s.clone()))
.map_err(ParseError::<()>::basic)
}
#[inline]
pub fn expect_url_or_string(&mut self) -> Result<CowRcStr<'i>, BasicParseError<'i>> {
match *self.next()? {
Token::UnquotedUrl(ref value) => return Ok(value.clone()),
Token::QuotedString(ref value) => return Ok(value.clone()),
Token::Function(ref name) if name.eq_ignore_ascii_case("url") => {}
ref t => return Err(BasicParseError::UnexpectedToken(t.clone()))
}
self.parse_nested_block(|input| input.expect_string().map_err(ParseError::Basic).map(|s| s.clone()))
.map_err(ParseError::<()>::basic)
}
#[inline]
pub fn expect_number(&mut self) -> Result<f32, BasicParseError<'i>> {
match *self.next()? {
Token::Number { value, .. } => Ok(value),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_integer(&mut self) -> Result<i32, BasicParseError<'i>> {
match *self.next()? {
Token::Number { int_value: Some(int_value), .. } => {
Ok(int_value)
}
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_percentage(&mut self) -> Result<f32, BasicParseError<'i>> {
match *self.next()? {
Token::Percentage { unit_value, .. } => Ok(unit_value),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_colon(&mut self) -> Result<(), BasicParseError<'i>> {
match *self.next()? {
Token::Colon => Ok(()),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_semicolon(&mut self) -> Result<(), BasicParseError<'i>> {
match *self.next()? {
Token::Semicolon => Ok(()),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_comma(&mut self) -> Result<(), BasicParseError<'i>> {
match *self.next()? {
Token::Comma => Ok(()),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_delim(&mut self, expected_value: char) -> Result<(), BasicParseError<'i>> {
match *self.next()? {
Token::Delim(value) if value == expected_value => Ok(()),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_curly_bracket_block(&mut self) -> Result<(), BasicParseError<'i>> {
match *self.next()? {
Token::CurlyBracketBlock => Ok(()),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_square_bracket_block(&mut self) -> Result<(), BasicParseError<'i>> {
match *self.next()? {
Token::SquareBracketBlock => Ok(()),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_parenthesis_block(&mut self) -> Result<(), BasicParseError<'i>> {
match *self.next()? {
Token::ParenthesisBlock => Ok(()),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_function(&mut self) -> Result<&CowRcStr<'i>, BasicParseError<'i>> {
match *self.next()? {
Token::Function(ref name) => Ok(name),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_function_matching(&mut self, expected_name: &str) -> Result<(), BasicParseError<'i>> {
match *self.next()? {
Token::Function(ref name) if name.eq_ignore_ascii_case(expected_name) => Ok(()),
ref t => Err(BasicParseError::UnexpectedToken(t.clone()))
}
}
#[inline]
pub fn expect_no_error_token(&mut self) -> Result<(), BasicParseError<'i>> {
loop {
match self.next_including_whitespace_and_comments() {
Ok(&Token::Function(_)) |
Ok(&Token::ParenthesisBlock) |
Ok(&Token::SquareBracketBlock) |
Ok(&Token::CurlyBracketBlock) => {}
Ok(token) => {
if token.is_parse_error() {
return Err(BasicParseError::UnexpectedToken(token.clone()))
}
continue
}
Err(_) => return Ok(())
}
let result = self.parse_nested_block(|input| input.expect_no_error_token()
.map_err(|e| ParseError::Basic(e)));
result.map_err(ParseError::<()>::basic)?
}
}
}
pub fn parse_until_before<'i: 't, 't, F, T, E>(parser: &mut Parser<'i, 't>,
delimiters: Delimiters,
parse: F)
-> Result <T, ParseError<'i, E>>
where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>> {
let delimiters = parser.stop_before | delimiters;
let result;
{
let mut delimited_parser = Parser {
input: parser.input,
at_start_of: parser.at_start_of.take(),
stop_before: delimiters,
};
result = delimited_parser.parse_entirely(parse);
if let Some(block_type) = delimited_parser.at_start_of {
consume_until_end_of_block(block_type, &mut delimited_parser.input.tokenizer);
}
}
loop {
if delimiters.contains(Delimiters::from_byte((parser.input.tokenizer).next_byte())) {
break
}
if let Ok(token) = (parser.input.tokenizer).next() {
if let Some(block_type) = BlockType::opening(&token) {
consume_until_end_of_block(block_type, &mut parser.input.tokenizer);
}
} else {
break
}
}
result
}
pub fn parse_until_after<'i: 't, 't, F, T, E>(parser: &mut Parser<'i, 't>,
delimiters: Delimiters,
parse: F)
-> Result <T, ParseError<'i, E>>
where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>> {
let result = parser.parse_until_before(delimiters, parse);
let next_byte = (parser.input.tokenizer).next_byte();
if next_byte.is_some() && !parser.stop_before.contains(Delimiters::from_byte(next_byte)) {
debug_assert!(delimiters.contains(Delimiters::from_byte(next_byte)));
(parser.input.tokenizer).advance(1);
if next_byte == Some(b'{') {
consume_until_end_of_block(BlockType::CurlyBracket, &mut parser.input.tokenizer);
}
}
result
}
pub fn parse_nested_block<'i: 't, 't, F, T, E>(parser: &mut Parser<'i, 't>, parse: F)
-> Result <T, ParseError<'i, E>>
where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>> {
let block_type = parser.at_start_of.take().expect("\
A nested parser can only be created when a Function, \
ParenthesisBlock, SquareBracketBlock, or CurlyBracketBlock \
token was just consumed.\
");
let closing_delimiter = match block_type {
BlockType::CurlyBracket => ClosingDelimiter::CloseCurlyBracket,
BlockType::SquareBracket => ClosingDelimiter::CloseSquareBracket,
BlockType::Parenthesis => ClosingDelimiter::CloseParenthesis,
};
let result;
{
let mut nested_parser = Parser {
input: parser.input,
at_start_of: None,
stop_before: closing_delimiter,
};
result = nested_parser.parse_entirely(parse);
if let Some(block_type) = nested_parser.at_start_of {
consume_until_end_of_block(block_type, &mut nested_parser.input.tokenizer);
}
}
consume_until_end_of_block(block_type, &mut parser.input.tokenizer);
result
}
fn consume_until_end_of_block(block_type: BlockType, tokenizer: &mut Tokenizer) {
let mut stack = vec![block_type];
while let Ok(ref token) = tokenizer.next() {
if let Some(b) = BlockType::closing(token) {
if *stack.last().unwrap() == b {
stack.pop();
if stack.is_empty() {
return;
}
}
}
if let Some(block_type) = BlockType::opening(token) {
stack.push(block_type);
}
}
}