Skip to content

Commit

Permalink
Reduced boilerplate code in the parser
Browse files Browse the repository at this point in the history
  • Loading branch information
Razican committed Nov 6, 2022
1 parent 8e14d76 commit 4e78079
Show file tree
Hide file tree
Showing 49 changed files with 351 additions and 493 deletions.
14 changes: 7 additions & 7 deletions boa_engine/src/syntax/parser/cursor/buffered_lexer/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use crate::syntax::{
lexer::{InputElement, Lexer, Token, TokenKind},
parser::error::ParseError,
parser::{error::ParseError, ParseResult},
};
use boa_ast::Position;
use boa_interner::Interner;
Expand Down Expand Up @@ -83,7 +83,7 @@ where
&mut self,
start: Position,
interner: &mut Interner,
) -> Result<Token, ParseError> {
) -> ParseResult<Token> {
let _timer = Profiler::global().start_event("cursor::lex_regex()", "Parsing");
self.set_goal(InputElement::RegExp);
self.lexer
Expand All @@ -97,7 +97,7 @@ where
&mut self,
start: Position,
interner: &mut Interner,
) -> Result<Token, ParseError> {
) -> ParseResult<Token> {
self.lexer
.lex_template(start, interner)
.map_err(ParseError::from)
Expand All @@ -116,7 +116,7 @@ where
/// Fills the peeking buffer with the next token.
///
/// It will not fill two line terminators one after the other.
fn fill(&mut self, interner: &mut Interner) -> Result<(), ParseError> {
fn fill(&mut self, interner: &mut Interner) -> ParseResult<()> {
debug_assert!(
self.write_index < PEEK_BUF_SIZE,
"write index went out of bounds"
Expand Down Expand Up @@ -166,12 +166,12 @@ where
///
/// This follows iterator semantics in that a `peek(0, false)` followed by a `next(false)` will
/// return the same value. Note that because a `peek(n, false)` may return a line terminator a
// subsequent `next(true)` may not return the same value.
/// subsequent `next(true)` may not return the same value.
pub(super) fn next(
&mut self,
skip_line_terminators: bool,
interner: &mut Interner,
) -> Result<Option<Token>, ParseError> {
) -> ParseResult<Option<Token>> {
if self.read_index == self.write_index {
self.fill(interner)?;
}
Expand Down Expand Up @@ -217,7 +217,7 @@ where
skip_n: usize,
skip_line_terminators: bool,
interner: &mut Interner,
) -> Result<Option<&Token>, ParseError> {
) -> ParseResult<Option<&Token>> {
assert!(
skip_n <= MAX_PEEK_SKIP,
"you cannot skip more than {} elements",
Expand Down
73 changes: 47 additions & 26 deletions boa_engine/src/syntax/parser/cursor/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! Cursor implementation for the parser.
mod buffered_lexer;

use super::{statement::PrivateElement, ParseError};
use super::{statement::PrivateElement, OrAbrupt, ParseError, ParseResult};
use crate::syntax::lexer::{InputElement, Lexer, Token, TokenKind};
use boa_ast::{Position, Punctuator};
use boa_interner::{Interner, Sym};
Expand Down Expand Up @@ -54,7 +54,7 @@ where
&mut self,
start: Position,
interner: &mut Interner,
) -> Result<Token, ParseError> {
) -> ParseResult<Token> {
self.buffered_lexer.lex_regex(start, interner)
}

Expand All @@ -63,35 +63,55 @@ where
&mut self,
start: Position,
interner: &mut Interner,
) -> Result<Token, ParseError> {
) -> ParseResult<Token> {
self.buffered_lexer.lex_template(start, interner)
}

/// Advances the cursor and returns the next token.
#[inline]
pub(super) fn next(&mut self, interner: &mut Interner) -> Result<Option<Token>, ParseError> {
pub(super) fn next(&mut self, interner: &mut Interner) -> ParseResult<Option<Token>> {
self.buffered_lexer.next(true, interner)
}

/// Advances the cursor without returning the next token.
///
/// # Panics
///
/// This function will panic if there is no further token in the cursor.
#[inline]
#[track_caller]
#[allow(clippy::let_underscore_drop)]
pub(super) fn advance(&mut self, interner: &mut Interner) {
let _ = self
.next(interner)
.expect("tried to advance cursor, but the buffer was empty");
}

/// Peeks a future token, without consuming it or advancing the cursor.
///
/// You can skip some tokens with the `skip_n` option.
#[inline]
pub(super) fn peek(
&mut self,
skip_n: usize,
interner: &mut Interner,
) -> Result<Option<&Token>, ParseError> {
) -> ParseResult<Option<&Token>> {
self.buffered_lexer.peek(skip_n, true, interner)
}

/// Gets the current strict mode for the cursor.
#[inline]
pub(super) fn strict_mode(&self) -> bool {
self.buffered_lexer.strict_mode()
}

/// Sets the strict mode to strict or non-strict.
#[inline]
pub(super) fn set_strict_mode(&mut self, strict_mode: bool) {
self.buffered_lexer.set_strict_mode(strict_mode);
}

/// Returns if the cursor is currently in a arrow function declaration.
/// Returns if the cursor is currently in an arrow function declaration.
#[inline]
pub(super) fn arrow(&self) -> bool {
self.arrow
Expand All @@ -116,7 +136,7 @@ where
&mut self,
identifier: Sym,
position: Position,
) -> Result<(), ParseError> {
) -> ParseResult<()> {
if let Some(env) = self.private_environments_stack.last_mut() {
env.entry(identifier).or_insert(position);
Ok(())
Expand All @@ -136,7 +156,7 @@ where
pub(super) fn pop_private_environment(
&mut self,
identifiers: &FxHashMap<Sym, PrivateElement>,
) -> Result<(), ParseError> {
) -> ParseResult<()> {
let last = self
.private_environments_stack
.pop()
Expand All @@ -163,11 +183,11 @@ where
kind: K,
context: &'static str,
interner: &mut Interner,
) -> Result<Token, ParseError>
) -> ParseResult<Token>
where
K: Into<TokenKind>,
{
let next_token = self.next(interner)?.ok_or(ParseError::AbruptEnd)?;
let next_token = self.next(interner).or_abrupt()?;
let kind = kind.into();

if next_token.kind() == &kind {
Expand All @@ -191,7 +211,7 @@ where
pub(super) fn peek_semicolon(
&mut self,
interner: &mut Interner,
) -> Result<SemicolonResult<'_>, ParseError> {
) -> ParseResult<SemicolonResult<'_>> {
match self.buffered_lexer.peek(0, false, interner)? {
Some(tk) => match tk.kind() {
TokenKind::Punctuator(Punctuator::Semicolon | Punctuator::CloseBlock)
Expand All @@ -212,7 +232,7 @@ where
&mut self,
context: &'static str,
interner: &mut Interner,
) -> Result<(), ParseError> {
) -> ParseResult<()> {
match self.peek_semicolon(interner)? {
SemicolonResult::Found(Some(tk)) => match *tk.kind() {
TokenKind::Punctuator(Punctuator::Semicolon) | TokenKind::LineTerminator => {
Expand Down Expand Up @@ -243,19 +263,20 @@ where
skip_n: usize,
context: &'static str,
interner: &mut Interner,
) -> Result<&Token, ParseError> {
if let Some(t) = self.buffered_lexer.peek(skip_n, false, interner)? {
if t.kind() == &TokenKind::LineTerminator {
Err(ParseError::unexpected(
t.to_string(interner),
t.span(),
context,
))
} else {
Ok(t)
}
) -> ParseResult<&Token> {
let tok = self
.buffered_lexer
.peek(skip_n, false, interner)
.or_abrupt()?;

if tok.kind() == &TokenKind::LineTerminator {
Err(ParseError::unexpected(
tok.to_string(interner),
tok.span(),
context,
))
} else {
Err(ParseError::AbruptEnd)
Ok(tok)
}
}

Expand All @@ -265,7 +286,7 @@ where
&mut self,
skip_n: usize,
interner: &mut Interner,
) -> Result<Option<bool>, ParseError> {
) -> ParseResult<Option<bool>> {
if let Some(t) = self.buffered_lexer.peek(skip_n, false, interner)? {
Ok(Some(t.kind() == &TokenKind::LineTerminator))
} else {
Expand All @@ -283,7 +304,7 @@ where
&mut self,
kind: K,
interner: &mut Interner,
) -> Result<Option<Token>, ParseError>
) -> ParseResult<Option<Token>>
where
K: Into<TokenKind>,
{
Expand Down
4 changes: 3 additions & 1 deletion boa_engine/src/syntax/parser/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,15 @@ pub(crate) trait ErrorContext {
fn context(self, context: &'static str) -> Self;
}

impl<T> ErrorContext for Result<T, ParseError> {
impl<T> ErrorContext for ParseResult<T> {
#[inline]
fn context(self, context: &'static str) -> Self {
self.map_err(|e| e.context(context))
}
}

impl From<LexError> for ParseError {
#[inline]
fn from(e: LexError) -> Self {
Self::lex(e)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ use crate::syntax::{
error::{ErrorContext, ParseError, ParseResult},
expression::BindingIdentifier,
function::{FormalParameters, FunctionBody},
name_in_lexically_declared_names, AllowAwait, AllowIn, AllowYield, Cursor, TokenParser,
name_in_lexically_declared_names, AllowAwait, AllowIn, AllowYield, Cursor, OrAbrupt,
TokenParser,
},
};
use ast::operations::{bound_names, top_level_lexically_declared_names};
Expand Down Expand Up @@ -78,7 +79,7 @@ where

fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
let _timer = Profiler::global().start_event("ArrowFunction", "Parsing");
let next_token = cursor.peek(0, interner)?.ok_or(ParseError::AbruptEnd)?;
let next_token = cursor.peek(0, interner).or_abrupt()?;

let (params, params_start_position) = if let TokenKind::Punctuator(Punctuator::OpenParen) =
&next_token.kind()
Expand Down Expand Up @@ -191,13 +192,9 @@ where
type Output = StatementList;

fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
match cursor
.peek(0, interner)?
.ok_or(ParseError::AbruptEnd)?
.kind()
{
match cursor.peek(0, interner).or_abrupt()?.kind() {
TokenKind::Punctuator(Punctuator::OpenBlock) => {
let _next = cursor.next(interner)?;
cursor.advance(interner);
let body = FunctionBody::new(false, false).parse(cursor, interner)?;
cursor.expect(Punctuator::CloseBlock, "arrow function", interner)?;
Ok(body)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use crate::syntax::{
error::{ErrorContext, ParseError, ParseResult},
expression::BindingIdentifier,
function::{FormalParameters, FunctionBody},
name_in_lexically_declared_names, AllowIn, AllowYield, Cursor, TokenParser,
name_in_lexically_declared_names, AllowIn, AllowYield, Cursor, OrAbrupt, TokenParser,
},
};
use ast::{
Expand Down Expand Up @@ -76,7 +76,7 @@ where
cursor.expect((Keyword::Async, false), "async arrow function", interner)?;
cursor.peek_expect_no_lineterminator(0, "async arrow function", interner)?;

let next_token = cursor.peek(0, interner)?.ok_or(ParseError::AbruptEnd)?;
let next_token = cursor.peek(0, interner).or_abrupt()?;
let (params, params_start_position) = if let TokenKind::Punctuator(Punctuator::OpenParen) =
&next_token.kind()
{
Expand Down Expand Up @@ -180,13 +180,9 @@ where
type Output = StatementList;

fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
match cursor
.peek(0, interner)?
.ok_or(ParseError::AbruptEnd)?
.kind()
{
match cursor.peek(0, interner).or_abrupt()?.kind() {
TokenKind::Punctuator(Punctuator::OpenBlock) => {
cursor.next(interner)?;
cursor.advance(interner);
let body = FunctionBody::new(false, true).parse(cursor, interner)?;
cursor.expect(Punctuator::CloseBlock, "async arrow function", interner)?;
Ok(body)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ where

if let Some(tok) = cursor.peek(0, interner)? {
if tok.kind() == &TokenKind::Punctuator(Punctuator::Question) {
cursor.next(interner)?.expect("? character vanished"); // Consume the token.
cursor.advance(interner);
let then_clause =
AssignmentExpression::new(None, true, self.allow_yield, self.allow_await)
.parse(cursor, interner)?;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,11 @@
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Arithmetic_Operators#Exponentiation
//! [spec]: https://tc39.es/ecma262/#sec-exp-operator
use super::ParseError;
use crate::syntax::{
lexer::TokenKind,
parser::{
expression::{unary::UnaryExpression, update::UpdateExpression},
AllowAwait, AllowYield, Cursor, ParseResult, TokenParser,
AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
},
};
use boa_ast::{
Expand Down Expand Up @@ -70,7 +69,7 @@ where
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
let _timer = Profiler::global().start_event("ExponentiationExpression", "Parsing");

let next = cursor.peek(0, interner)?.ok_or(ParseError::AbruptEnd)?;
let next = cursor.peek(0, interner).or_abrupt()?;
match next.kind() {
TokenKind::Keyword((Keyword::Delete | Keyword::Void | Keyword::TypeOf, _))
| TokenKind::Punctuator(
Expand All @@ -90,7 +89,7 @@ where
.parse(cursor, interner)?;
if let Some(tok) = cursor.peek(0, interner)? {
if let TokenKind::Punctuator(Punctuator::Exp) = tok.kind() {
cursor.next(interner)?.expect("** token vanished"); // Consume the token.
cursor.advance(interner);
return Ok(Binary::new(
ArithmeticOp::Exp.into(),
lhs,
Expand Down
Loading

0 comments on commit 4e78079

Please sign in to comment.