use std::ops::Range;
use std::ascii::AsciiExt;
use std::ops::BitOr;
use std::borrow::Cow;
use std::ops;
use tokenizer::{self, Token, NumericValue, PercentageValue, Tokenizer, SourceLocation};
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub struct SourcePosition {
position: tokenizer::SourcePosition,
at_start_of: Option<BlockType>,
}
enum MaybeOwned<'a, T: 'a> {
Owned(Box<T>),
Borrowed(&'a mut T),
}
impl<'a, T> ops::Deref for MaybeOwned<'a, T> {
type Target = T;
fn deref<'b>(&'b self) -> &'b T {
match *self {
MaybeOwned::Owned(ref pointer) => &**pointer,
MaybeOwned::Borrowed(ref pointer) => &**pointer,
}
}
}
impl<'a, T> ops::DerefMut for MaybeOwned<'a, T> {
fn deref_mut<'b>(&'b mut self) -> &'b mut T {
match *self {
MaybeOwned::Owned(ref mut pointer) => &mut **pointer,
MaybeOwned::Borrowed(ref mut pointer) => &mut **pointer,
}
}
}
impl<'a, T> Clone for MaybeOwned<'a, T> where T: Clone {
fn clone(&self) -> MaybeOwned<'a, T> {
MaybeOwned::Owned(Box::new((**self).clone()))
}
}
#[derive(Clone)]
pub struct Parser<'i: 't, 't> {
tokenizer: MaybeOwned<'t, Tokenizer<'i>>,
at_start_of: Option<BlockType>,
stop_before: Delimiters,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum BlockType {
Parenthesis,
SquareBracket,
CurlyBracket,
}
impl BlockType {
fn opening(token: &Token) -> Option<BlockType> {
match *token {
Token::Function(_) |
Token::ParenthesisBlock => Some(BlockType::Parenthesis),
Token::SquareBracketBlock => Some(BlockType::SquareBracket),
Token::CurlyBracketBlock => Some(BlockType::CurlyBracket),
_ => None
}
}
fn closing(token: &Token) -> Option<BlockType> {
match *token {
Token::CloseParenthesis => Some(BlockType::Parenthesis),
Token::CloseSquareBracket => Some(BlockType::SquareBracket),
Token::CloseCurlyBracket => Some(BlockType::CurlyBracket),
_ => None
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct Delimiters {
bits: u8,
}
#[allow(non_upper_case_globals, non_snake_case)]
pub mod Delimiter {
use super::Delimiters;
pub const None: Delimiters = Delimiters { bits: 0 };
pub const CurlyBracketBlock: Delimiters = Delimiters { bits: 1 << 1 };
pub const Semicolon: Delimiters = Delimiters { bits: 1 << 2 };
pub const Bang: Delimiters = Delimiters { bits: 1 << 3 };
pub const Comma: Delimiters = Delimiters { bits: 1 << 4 };
}
#[allow(non_upper_case_globals, non_snake_case)]
mod ClosingDelimiter {
use super::Delimiters;
pub const CloseCurlyBracket: Delimiters = Delimiters { bits: 1 << 5 };
pub const CloseSquareBracket: Delimiters = Delimiters { bits: 1 << 6 };
pub const CloseParenthesis: Delimiters = Delimiters { bits: 1 << 7 };
}
impl BitOr<Delimiters> for Delimiters {
type Output = Delimiters;
fn bitor(self, other: Delimiters) -> Delimiters {
Delimiters { bits: self.bits | other.bits }
}
}
impl Delimiters {
fn contains(self, other: Delimiters) -> bool {
(self.bits & other.bits) != 0
}
fn from_byte(byte: Option<u8>) -> Delimiters {
match byte {
Some(b';') => Delimiter::Semicolon,
Some(b'!') => Delimiter::Bang,
Some(b',') => Delimiter::Comma,
Some(b'{') => Delimiter::CurlyBracketBlock,
Some(b'}') => ClosingDelimiter::CloseCurlyBracket,
Some(b']') => ClosingDelimiter::CloseSquareBracket,
Some(b')') => ClosingDelimiter::CloseParenthesis,
_ => Delimiter::None,
}
}
}
impl<'i, 't> Parser<'i, 't> {
#[inline]
pub fn new(input: &'i str) -> Parser<'i, 'i> {
Parser {
tokenizer: MaybeOwned::Owned(Box::new(Tokenizer::new(input))),
at_start_of: None,
stop_before: Delimiter::None,
}
}
#[inline]
pub fn is_exhausted(&mut self) -> bool {
self.expect_exhausted().is_ok()
}
#[inline]
pub fn expect_exhausted(&mut self) -> Result<(), ()> {
let start_position = self.position();
let result = match self.next() {
Err(()) => Ok(()),
Ok(_) => {
Err(())
}
};
self.reset(start_position);
result
}
#[inline]
pub fn position(&self) -> SourcePosition {
SourcePosition {
position: self.tokenizer.position(),
at_start_of: self.at_start_of,
}
}
#[inline]
pub fn reset(&mut self, new_position: SourcePosition) {
self.tokenizer.reset(new_position.position);
self.at_start_of = new_position.at_start_of;
}
#[inline]
pub fn look_for_var_functions(&mut self) {
self.tokenizer.look_for_var_functions()
}
#[inline]
pub fn seen_var_functions(&mut self) -> bool {
self.tokenizer.seen_var_functions()
}
#[inline]
pub fn try<F, T, E>(&mut self, thing: F) -> Result<T, E>
where F: FnOnce(&mut Parser<'i, 't>) -> Result<T, E> {
let start_position = self.position();
let result = thing(self);
if result.is_err() {
self.reset(start_position)
}
result
}
#[inline]
pub fn slice(&self, range: Range<SourcePosition>) -> &'i str {
self.tokenizer.slice(range.start.position..range.end.position)
}
#[inline]
pub fn slice_from(&self, start_position: SourcePosition) -> &'i str {
self.tokenizer.slice_from(start_position.position)
}
#[inline]
pub fn current_source_location(&self) -> SourceLocation {
self.tokenizer.current_source_location()
}
#[inline]
pub fn source_location(&self, target: SourcePosition) -> SourceLocation {
self.tokenizer.source_location(target.position)
}
pub fn next(&mut self) -> Result<Token<'i>, ()> {
loop {
match self.next_including_whitespace_and_comments() {
Ok(Token::WhiteSpace(_)) | Ok(Token::Comment(_)) => {},
result => return result
}
}
}
pub fn next_including_whitespace(&mut self) -> Result<Token<'i>, ()> {
loop {
match self.next_including_whitespace_and_comments() {
Ok(Token::Comment(_)) => {},
result => return result
}
}
}
pub fn next_including_whitespace_and_comments(&mut self) -> Result<Token<'i>, ()> {
if let Some(block_type) = self.at_start_of.take() {
consume_until_end_of_block(block_type, &mut *self.tokenizer);
}
if self.stop_before.contains(Delimiters::from_byte(self.tokenizer.next_byte())) {
return Err(())
}
let token = try!(self.tokenizer.next());
if let Some(block_type) = BlockType::opening(&token) {
self.at_start_of = Some(block_type);
}
Ok(token)
}
#[inline]
pub fn parse_entirely<F, T>(&mut self, parse: F) -> Result<T, ()>
where F: FnOnce(&mut Parser) -> Result<T, ()> {
let result = parse(self);
try!(self.expect_exhausted());
result
}
#[inline]
pub fn parse_comma_separated<F, T>(&mut self, mut parse_one: F) -> Result<Vec<T>, ()>
where F: FnMut(&mut Parser) -> Result<T, ()> {
let mut values = vec![];
loop {
values.push(try!(self.parse_until_before(Delimiter::Comma, |parser| parse_one(parser))));
match self.next() {
Err(()) => return Ok(values),
Ok(Token::Comma) => continue,
Ok(_) => unreachable!(),
}
}
}
#[inline]
pub fn parse_nested_block<F, T>(&mut self, parse: F) -> Result <T, ()>
where F: FnOnce(&mut Parser) -> Result<T, ()> {
let block_type = self.at_start_of.take().expect("\
A nested parser can only be created when a Function, \
ParenthesisBlock, SquareBracketBlock, or CurlyBracketBlock \
token was just consumed.\
");
let closing_delimiter = match block_type {
BlockType::CurlyBracket => ClosingDelimiter::CloseCurlyBracket,
BlockType::SquareBracket => ClosingDelimiter::CloseSquareBracket,
BlockType::Parenthesis => ClosingDelimiter::CloseParenthesis,
};
let result;
{
let mut nested_parser = Parser {
tokenizer: MaybeOwned::Borrowed(&mut *self.tokenizer),
at_start_of: None,
stop_before: closing_delimiter,
};
result = nested_parser.parse_entirely(parse);
if let Some(block_type) = nested_parser.at_start_of {
consume_until_end_of_block(block_type, &mut *nested_parser.tokenizer);
}
}
consume_until_end_of_block(block_type, &mut *self.tokenizer);
result
}
#[inline]
pub fn parse_until_before<F, T>(&mut self, delimiters: Delimiters, parse: F)
-> Result <T, ()>
where F: FnOnce(&mut Parser) -> Result<T, ()> {
let delimiters = self.stop_before | delimiters;
let result;
{
let mut delimited_parser = Parser {
tokenizer: MaybeOwned::Borrowed(&mut *self.tokenizer),
at_start_of: self.at_start_of.take(),
stop_before: delimiters,
};
result = delimited_parser.parse_entirely(parse);
if let Some(block_type) = delimited_parser.at_start_of {
consume_until_end_of_block(block_type, &mut *delimited_parser.tokenizer);
}
}
loop {
if delimiters.contains(Delimiters::from_byte(self.tokenizer.next_byte())) {
break
}
if let Ok(token) = self.tokenizer.next() {
if let Some(block_type) = BlockType::opening(&token) {
consume_until_end_of_block(block_type, &mut *self.tokenizer);
}
} else {
break
}
}
result
}
#[inline]
pub fn parse_until_after<F, T>(&mut self, delimiters: Delimiters, parse: F)
-> Result <T, ()>
where F: FnOnce(&mut Parser) -> Result<T, ()> {
let result = self.parse_until_before(delimiters, parse);
let next_byte = self.tokenizer.next_byte();
if next_byte.is_some() && !self.stop_before.contains(Delimiters::from_byte(next_byte)) {
debug_assert!(delimiters.contains(Delimiters::from_byte(next_byte)));
self.tokenizer.advance(1);
}
result
}
#[inline]
pub fn expect_whitespace(&mut self) -> Result<&'i str, ()> {
match try!(self.next_including_whitespace()) {
Token::WhiteSpace(value) => Ok(value),
_ => Err(())
}
}
#[inline]
pub fn expect_ident(&mut self) -> Result<Cow<'i, str>, ()> {
match try!(self.next()) {
Token::Ident(value) => Ok(value),
_ => Err(())
}
}
#[inline]
pub fn expect_ident_matching<'a>(&mut self, expected_value: &str) -> Result<(), ()> {
match try!(self.next()) {
Token::Ident(ref value) if value.eq_ignore_ascii_case(expected_value) => Ok(()),
_ => Err(())
}
}
#[inline]
pub fn expect_string(&mut self) -> Result<Cow<'i, str>, ()> {
match try!(self.next()) {
Token::QuotedString(value) => Ok(value),
_ => Err(())
}
}
#[inline]
pub fn expect_ident_or_string(&mut self) -> Result<Cow<'i, str>, ()> {
match try!(self.next()) {
Token::Ident(value) => Ok(value),
Token::QuotedString(value) => Ok(value),
_ => Err(())
}
}
#[inline]
pub fn expect_url(&mut self) -> Result<Cow<'i, str>, ()> {
match try!(self.next()) {
Token::Url(value) => Ok(value),
_ => Err(())
}
}
#[inline]
pub fn expect_url_or_string(&mut self) -> Result<Cow<'i, str>, ()> {
match try!(self.next()) {
Token::Url(value) => Ok(value),
Token::QuotedString(value) => Ok(value),
_ => Err(())
}
}
#[inline]
pub fn expect_number(&mut self) -> Result<f32, ()> {
match try!(self.next()) {
Token::Number(NumericValue { value, .. }) => Ok(value),
_ => Err(())
}
}
#[inline]
pub fn expect_integer(&mut self) -> Result<i32, ()> {
match try!(self.next()) {
Token::Number(NumericValue { int_value, .. }) => int_value.ok_or(()),
_ => Err(())
}
}
#[inline]
pub fn expect_percentage(&mut self) -> Result<f32, ()> {
match try!(self.next()) {
Token::Percentage(PercentageValue { unit_value, .. }) => Ok(unit_value),
_ => Err(())
}
}
#[inline]
pub fn expect_colon(&mut self) -> Result<(), ()> {
match try!(self.next()) {
Token::Colon => Ok(()),
_ => Err(())
}
}
#[inline]
pub fn expect_semicolon(&mut self) -> Result<(), ()> {
match try!(self.next()) {
Token::Semicolon => Ok(()),
_ => Err(())
}
}
#[inline]
pub fn expect_comma(&mut self) -> Result<(), ()> {
match try!(self.next()) {
Token::Comma => Ok(()),
_ => Err(())
}
}
#[inline]
pub fn expect_delim(&mut self, expected_value: char) -> Result<(), ()> {
match try!(self.next()) {
Token::Delim(value) if value == expected_value => Ok(()),
_ => Err(())
}
}
#[inline]
pub fn expect_curly_bracket_block(&mut self) -> Result<(), ()> {
match try!(self.next()) {
Token::CurlyBracketBlock => Ok(()),
_ => Err(())
}
}
#[inline]
pub fn expect_square_bracket_block(&mut self) -> Result<(), ()> {
match try!(self.next()) {
Token::SquareBracketBlock => Ok(()),
_ => Err(())
}
}
#[inline]
pub fn expect_parenthesis_block(&mut self) -> Result<(), ()> {
match try!(self.next()) {
Token::ParenthesisBlock => Ok(()),
_ => Err(())
}
}
#[inline]
pub fn expect_function(&mut self) -> Result<Cow<'i, str>, ()> {
match try!(self.next()) {
Token::Function(name) => Ok(name),
_ => Err(())
}
}
#[inline]
pub fn expect_function_matching(&mut self, expected_name: &str) -> Result<(), ()> {
match try!(self.next()) {
Token::Function(ref name) if name.eq_ignore_ascii_case(expected_name) => Ok(()),
_ => Err(())
}
}
#[inline]
pub fn expect_no_error_token(&mut self) -> Result<(), ()> {
loop {
match self.next_including_whitespace_and_comments() {
Ok(Token::Function(_)) | Ok(Token::ParenthesisBlock) |
Ok(Token::SquareBracketBlock) | Ok(Token::CurlyBracketBlock) => {
try!(self.parse_nested_block(|input| input.expect_no_error_token()))
}
Ok(token) => {
if token.is_parse_error() {
return Err(())
}
}
Err(()) => return Ok(())
}
}
}
}
fn consume_until_end_of_block(block_type: BlockType, tokenizer: &mut Tokenizer) {
while let Ok(ref token) = tokenizer.next() {
if BlockType::closing(token) == Some(block_type) {
return
}
if let Some(block_type) = BlockType::opening(token) {
consume_until_end_of_block(block_type, tokenizer);
}
}
}