diff --git a/crates/swc_css_parser/src/lexer/mod.rs b/crates/swc_css_parser/src/lexer/mod.rs index 7574eed97711..36ab034c43ec 100644 --- a/crates/swc_css_parser/src/lexer/mod.rs +++ b/crates/swc_css_parser/src/lexer/mod.rs @@ -140,7 +140,7 @@ where { type State = LexerState; - fn start_pos(&mut self) -> swc_common::BytePos { + fn start_pos(&mut self) -> BytePos { self.input.cur_pos() } diff --git a/crates/swc_css_parser/src/lib.rs b/crates/swc_css_parser/src/lib.rs index 0bf15bbb0621..e6ac6451e75d 100644 --- a/crates/swc_css_parser/src/lib.rs +++ b/crates/swc_css_parser/src/lib.rs @@ -5,13 +5,13 @@ #![allow(clippy::nonminimal_bool)] #![allow(clippy::wrong_self_convention)] -use swc_common::{input::StringInput, BytePos, SourceFile}; +use swc_common::{input::StringInput, SourceFile}; use crate::{ error::Error, lexer::Lexer, parser::{ - input::{Tokens, TokensInput}, + input::{Input, InputType}, PResult, Parser, ParserConfig, }, }; @@ -37,28 +37,6 @@ where } } -/// Parse a given string as `T`. -/// -/// If there are syntax errors but if it was recoverable, it will be appended -/// to `errors`. -pub fn parse_str<'a, T>( - src: &'a str, - start_pos: BytePos, - end_pos: BytePos, - config: ParserConfig, - errors: &mut Vec, -) -> PResult -where - Parser>>: Parse, -{ - let lexer = Lexer::new(StringInput::new(src, start_pos, end_pos), config); - let mut parser = Parser::new(lexer, config); - - let res = parser.parse(); - errors.extend(parser.take_errors()); - res -} - /// Parse a given file as `T`. /// /// If there are syntax errors but if it was recoverable, it will be appended @@ -75,7 +53,9 @@ where let mut parser = Parser::new(lexer, config); let res = parser.parse(); + errors.extend(parser.take_errors()); + res } @@ -83,18 +63,20 @@ where /// /// If there are syntax errors but if it was recoverable, it will be appended /// to `errors`. -pub fn parse_tokens<'a, T>( - tokens: &'a Tokens, +pub fn parse_input<'a, T>( + input: InputType<'a>, config: ParserConfig, errors: &mut Vec, ) -> PResult where - Parser>: Parse, + Parser>: Parse, { - let lexer = TokensInput::new(tokens); + let lexer = Input::new(input); let mut parser = Parser::new(lexer, config); let res = parser.parse(); + errors.extend(parser.take_errors()); + res } diff --git a/crates/swc_css_parser/src/parser/input.rs b/crates/swc_css_parser/src/parser/input.rs index 15c84e1a2f16..d338990b434a 100644 --- a/crates/swc_css_parser/src/parser/input.rs +++ b/crates/swc_css_parser/src/parser/input.rs @@ -50,7 +50,6 @@ where pub fn last_pos(&mut self) -> BytePos { self.cur(); - self.last_pos } @@ -148,7 +147,6 @@ where span, }) => { self.last_pos = span.hi; - self.cur = None; } Some(..) => return, @@ -183,117 +181,42 @@ pub(super) struct WrappedState { inner: S, } -#[derive(Debug)] -pub struct TokensState { - idx: usize, -} - #[derive(Debug, Clone)] -pub struct TokensInput<'a> { - tokens: &'a Tokens, - idx: usize, -} - -#[derive(Debug)] -pub struct Tokens { - pub span: Span, - pub tokens: Vec, -} - -impl<'a> TokensInput<'a> { - pub fn new(tokens: &'a Tokens) -> Self { - TokensInput { tokens, idx: 0 } - } - - fn cur(&mut self) -> PResult<&TokenAndSpan> { - let token_and_span = match self.tokens.tokens.get(self.idx) { - Some(v) => v, - None => { - let bp = self.tokens.span.hi; - let span = Span::new(bp, bp, SyntaxContext::empty()); - - return Err(Error::new(span, ErrorKind::Eof)); - } - }; - - Ok(token_and_span) - } -} - -impl<'a> ParserInput for TokensInput<'a> { - type State = TokensState; - - fn start_pos(&mut self) -> BytePos { - self.tokens.span.lo - } - - fn state(&mut self) -> Self::State { - TokensState { idx: self.idx } - } - - fn reset(&mut self, state: &Self::State) { - self.idx = state.idx; - } - - fn take_errors(&mut self) -> Vec { - vec![] - } - - fn skip_ws(&mut self) -> Option { - let mut last_pos = None; +#[allow(clippy::enum_variant_names)] +enum BalanceToken { + /// `]` + RBracket, - while let Ok(TokenAndSpan { - token: tok!(" "), - span, - }) = self.cur() - { - last_pos = Some(span.hi); - self.idx += 1; - } + /// `)` + RParen, - last_pos - } + /// `}` + RBrace, } -impl<'a> Iterator for TokensInput<'a> { - type Item = TokenAndSpan; - - fn next(&mut self) -> Option { - let token_and_span = match self.cur() { - Ok(token_and_span) => token_and_span.clone(), - _ => return None, - }; - - self.idx += 1; - - Some(token_and_span) - } +#[derive(Debug)] +pub struct State { + idx: Vec, + balance_stack: Option>, } #[derive(Debug)] -pub struct ListOfComponentValuesState { - idx: Vec, - balance_stack: Vec, +pub struct Tokens { + pub span: Span, + pub tokens: Vec, } #[derive(Debug, Clone)] -pub struct ListOfComponentValuesInput<'a> { - list: &'a ListOfComponentValues, +pub struct Input<'a> { + input: InputType<'a>, idx: Vec, - balance_stack: Vec, + balance_stack: Option>, } #[derive(Debug, Clone)] -#[allow(clippy::enum_variant_names)] -enum BalanceToken { - /// `]` - RBracket, - - /// `)` - RParen, - - /// `}` - RBrace, +pub enum InputType<'a> { + Tokens(&'a Tokens), + ListOfComponentValues(&'a ListOfComponentValues), } #[derive(Debug)] @@ -308,16 +231,27 @@ pub enum TokenOrBlock { RBrace(Span), } -impl<'a> ListOfComponentValuesInput<'a> { - pub fn new(list: &'a ListOfComponentValues) -> Self { - let mut idx = Vec::with_capacity(16); +impl<'a> Input<'a> { + pub fn new(input: InputType<'a>) -> Self { + let idx = match input { + InputType::Tokens(_) => vec![0], + InputType::ListOfComponentValues(_) => { + let mut idx = Vec::with_capacity(16); + + idx.push(0); - idx.push(0); + idx + } + }; + let balance_stack = match input { + InputType::Tokens(_) => None, + InputType::ListOfComponentValues(_) => Some(Vec::with_capacity(16)), + }; - ListOfComponentValuesInput { - list, + Input { + input, idx, - balance_stack: Vec::with_capacity(16), + balance_stack, } } @@ -407,59 +341,89 @@ impl<'a> ListOfComponentValuesInput<'a> { } fn cur(&mut self) -> PResult { - let token_and_span = match self.get_component_value(&self.list.children, 0) { - Some(token_or_block) => match token_or_block { - TokenOrBlock::Token(token_and_span) => token_and_span, - TokenOrBlock::Function(span, value, raw) => TokenAndSpan { - span, - token: Token::Function { value, raw }, - }, - TokenOrBlock::LBracket(span) => TokenAndSpan { - span, - token: Token::LBracket, - }, - TokenOrBlock::LBrace(span) => TokenAndSpan { - span, - token: Token::LBrace, - }, - TokenOrBlock::LParen(span) => TokenAndSpan { - span, - token: Token::LParen, - }, - TokenOrBlock::RBracket(span) => TokenAndSpan { - span, - token: Token::RBracket, - }, - TokenOrBlock::RBrace(span) => TokenAndSpan { - span, - token: Token::RBrace, - }, - TokenOrBlock::RParen(span) => TokenAndSpan { - span, - token: Token::RParen, - }, - }, - None => { - let bp = self.list.span.hi; - let span = Span::new(bp, bp, SyntaxContext::empty()); + match self.input { + InputType::Tokens(input) => { + let idx = match self.idx.last() { + Some(idx) => idx, + _ => { + let bp = input.span.hi; + let span = Span::new(bp, bp, SyntaxContext::empty()); - return Err(Error::new(span, ErrorKind::Eof)); + return Err(Error::new(span, ErrorKind::Eof)); + } + }; + + let token_and_span = match input.tokens.get(*idx) { + Some(token_and_span) => token_and_span.clone(), + None => { + let bp = input.span.hi; + let span = Span::new(bp, bp, SyntaxContext::empty()); + + return Err(Error::new(span, ErrorKind::Eof)); + } + }; + + Ok(token_and_span) } - }; + InputType::ListOfComponentValues(input) => { + let token_and_span = match self.get_component_value(&input.children, 0) { + Some(token_or_block) => match token_or_block { + TokenOrBlock::Token(token_and_span) => token_and_span, + TokenOrBlock::Function(span, value, raw) => TokenAndSpan { + span, + token: Token::Function { value, raw }, + }, + TokenOrBlock::LBracket(span) => TokenAndSpan { + span, + token: Token::LBracket, + }, + TokenOrBlock::LBrace(span) => TokenAndSpan { + span, + token: Token::LBrace, + }, + TokenOrBlock::LParen(span) => TokenAndSpan { + span, + token: Token::LParen, + }, + TokenOrBlock::RBracket(span) => TokenAndSpan { + span, + token: Token::RBracket, + }, + TokenOrBlock::RBrace(span) => TokenAndSpan { + span, + token: Token::RBrace, + }, + TokenOrBlock::RParen(span) => TokenAndSpan { + span, + token: Token::RParen, + }, + }, + None => { + let bp = input.span.hi; + let span = Span::new(bp, bp, SyntaxContext::empty()); + + return Err(Error::new(span, ErrorKind::Eof)); + } + }; - Ok(token_and_span) + Ok(token_and_span) + } + } } } -impl<'a> ParserInput for ListOfComponentValuesInput<'a> { - type State = ListOfComponentValuesState; +impl<'a> ParserInput for Input<'a> { + type State = State; fn start_pos(&mut self) -> BytePos { - self.list.span.lo + match self.input { + InputType::Tokens(input) => input.span.lo, + InputType::ListOfComponentValues(input) => input.span.lo, + } } fn state(&mut self) -> Self::State { - ListOfComponentValuesState { + State { idx: self.idx.clone(), balance_stack: self.balance_stack.clone(), } @@ -493,7 +457,7 @@ impl<'a> ParserInput for ListOfComponentValuesInput<'a> { } } -impl<'a> Iterator for ListOfComponentValuesInput<'a> { +impl<'a> Iterator for Input<'a> { type Item = TokenAndSpan; fn next(&mut self) -> Option { @@ -502,46 +466,53 @@ impl<'a> Iterator for ListOfComponentValuesInput<'a> { _ => return None, }; - match &token_and_span.token { - Token::Function { .. } | Token::LParen | Token::LBracket | Token::LBrace => { - self.idx.push(0); - - let balance = match &token_and_span.token { - Token::Function { .. } | Token::LParen => BalanceToken::RParen, - Token::LBracket => BalanceToken::RBracket, - Token::LBrace => BalanceToken::RBrace, - _ => { - unreachable!(); - } - }; - - self.balance_stack.push(balance); + match self.input { + InputType::Tokens(_) => { + if let Some(idx) = self.idx.last_mut() { + *idx += 1; + } } - token => { - match token { - Token::RBrace | Token::RBracket | Token::RParen => { - if let Some(last) = self.balance_stack.last() { - match (token, last) { - (Token::RBrace, BalanceToken::RBrace) - | (Token::RParen, BalanceToken::RParen) - | (Token::RBracket, BalanceToken::RBracket) => { - self.balance_stack.pop(); - self.idx.pop(); + InputType::ListOfComponentValues(_) => match &token_and_span.token { + Token::Function { .. } | Token::LParen | Token::LBracket | Token::LBrace => { + self.idx.push(0); + + let balance = match &token_and_span.token { + Token::Function { .. } | Token::LParen => BalanceToken::RParen, + Token::LBracket => BalanceToken::RBracket, + Token::LBrace => BalanceToken::RBrace, + _ => { + unreachable!(); + } + }; + + self.balance_stack.as_mut().unwrap().push(balance); + } + token => { + match token { + Token::RBrace | Token::RBracket | Token::RParen => { + if let Some(last) = self.balance_stack.as_ref().unwrap().last() { + match (token, last) { + (Token::RBrace, BalanceToken::RBrace) + | (Token::RParen, BalanceToken::RParen) + | (Token::RBracket, BalanceToken::RBracket) => { + self.balance_stack.as_mut().unwrap().pop(); + self.idx.pop(); + } + _ => {} } - _ => {} } } + _ => {} } - _ => {} - } - let index = match self.idx.last_mut() { - Some(index) => index, - _ => return None, - }; + let index = match self.idx.last_mut() { + Some(index) => index, + _ => return None, + }; - *index += 1; - } + *index += 1; + } + }, } Some(token_and_span) diff --git a/crates/swc_css_parser/src/parser/util.rs b/crates/swc_css_parser/src/parser/util.rs index c852ca336fd0..df13b41702a8 100644 --- a/crates/swc_css_parser/src/parser/util.rs +++ b/crates/swc_css_parser/src/parser/util.rs @@ -3,8 +3,10 @@ use std::ops::{Deref, DerefMut}; use swc_common::{Span, Spanned, SyntaxContext, DUMMY_SP}; use swc_css_ast::*; -use super::{input::ParserInput, Ctx, PResult, Parse, Parser}; -use crate::parser::input::ListOfComponentValuesInput; +use super::{ + input::{Input, InputType, ParserInput}, + Ctx, PResult, Parse, Parser, +}; impl Parser where @@ -60,9 +62,9 @@ where pub(super) fn parse_according_to_grammar( &mut self, list_of_component_values: &ListOfComponentValues, - op: impl FnOnce(&mut Parser) -> PResult, + op: impl FnOnce(&mut Parser) -> PResult, ) -> PResult { - let lexer = ListOfComponentValuesInput::new(list_of_component_values); + let lexer = Input::new(InputType::ListOfComponentValues(list_of_component_values)); let mut parser = Parser::new(lexer, self.config); let res = op(&mut parser.with_ctx(self.ctx)); diff --git a/crates/swc_css_parser/tests/fixture.rs b/crates/swc_css_parser/tests/fixture.rs index dfcf7c6b921f..31c89cb22b75 100644 --- a/crates/swc_css_parser/tests/fixture.rs +++ b/crates/swc_css_parser/tests/fixture.rs @@ -7,9 +7,9 @@ use swc_common::{errors::Handler, input::SourceFileInput, Span, Spanned}; use swc_css_ast::*; use swc_css_parser::{ lexer::Lexer, - parse_tokens, + parse_input, parser::{ - input::{ParserInput, Tokens}, + input::{InputType, ParserInput, Tokens}, PResult, Parser, ParserConfig, }, }; @@ -79,7 +79,8 @@ fn stylesheet_test_tokens(input: PathBuf, config: ParserConfig) { } }; - let stylesheet: PResult = parse_tokens(&tokens, config, &mut errors); + let stylesheet: PResult = + parse_input(InputType::Tokens(&tokens), config, &mut errors); for err in &errors { err.to_diagnostics(&handler).emit(); @@ -204,7 +205,8 @@ fn stylesheet_recovery_test_tokens(input: PathBuf, config: ParserConfig) { let mut parser_errors = vec![]; - let stylesheet: PResult = parse_tokens(&tokens, config, &mut parser_errors); + let stylesheet: PResult = + parse_input(InputType::Tokens(&tokens), config, &mut parser_errors); parser_errors.extend(lexer_errors); parser_errors.sort_by(|a, b| a.message().cmp(&b.message()));