Skip to content

Commit

Permalink
Merge pull request #102 from anweiss/parser-fixes
Browse files Browse the repository at this point in the history
Parser fixes for parenthesized groups
  • Loading branch information
anweiss authored Oct 11, 2021
2 parents 1eb7387 + 63b81d9 commit 8654f6b
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 50 deletions.
95 changes: 45 additions & 50 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -512,9 +512,9 @@ where
let mut is_type_choice_alternate = false;
let mut is_group_choice_alternate = false;

if self.cur_token_is(Token::TCHOICEALT) {
if let Token::TCHOICEALT = &self.cur_token {
is_type_choice_alternate = true;
} else if self.cur_token_is(Token::GCHOICEALT) {
} else if let Token::GCHOICEALT = &self.cur_token {
is_group_choice_alternate = true;
}

Expand Down Expand Up @@ -766,10 +766,7 @@ where
#[cfg(not(feature = "ast-comments"))]
self.advance_newline()?;

if self.cur_token_is(Token::ASSIGN)
|| self.cur_token_is(Token::TCHOICEALT)
|| self.cur_token_is(Token::GCHOICEALT)
{
if let Token::ASSIGN | Token::TCHOICEALT | Token::GCHOICEALT = &self.cur_token {
self.errors.push(Error::PARSER {
#[cfg(feature = "ast-span")]
position: Position {
Expand Down Expand Up @@ -818,7 +815,7 @@ where
#[cfg(feature = "ast-span")]
let begin_range = self.lexer_position.range.0;

if self.cur_token_is(Token::LANGLEBRACKET) {
if let Token::LANGLEBRACKET = &self.cur_token {
self.next_token()?;
}

Expand Down Expand Up @@ -924,7 +921,7 @@ where
let begin_generic_arg_line = self.lexer_position.line;

// Required for type2 mutual recursion
if self.cur_token_is(Token::LANGLEBRACKET) {
if let Token::LANGLEBRACKET = &self.cur_token {
self.next_token()?;
}

Expand Down Expand Up @@ -955,7 +952,7 @@ where
self.next_token()?;
}

if self.cur_token_is(Token::EOF) {
if let Token::EOF = &self.cur_token {
self.errors.push(Error::PARSER {
#[cfg(feature = "ast-span")]
position: self.parser_position,
Expand All @@ -966,7 +963,7 @@ where
}
}

if self.cur_token_is(Token::RANGLEBRACKET) {
if let Token::RANGLEBRACKET = &self.cur_token {
#[cfg(feature = "ast-span")]
{
self.parser_position.range.1 = self.lexer_position.range.1;
Expand Down Expand Up @@ -1030,7 +1027,7 @@ where

t.type_choices.push(tc);

while self.cur_token_is(Token::TCHOICE) {
while let Token::TCHOICE = &self.cur_token {
self.next_token()?;

#[cfg(feature = "ast-comments")]
Expand Down Expand Up @@ -1723,7 +1720,7 @@ where
self.parser_position.range = self.lexer_position.range;
}

if self.cur_token_is(Token::COLON) || self.cur_token_is(Token::ARROWMAP) {
if let Token::COLON | Token::ARROWMAP = &self.cur_token {
self.errors.push(Error::PARSER {
#[cfg(feature = "ast-span")]
position: self.parser_position,
Expand All @@ -1733,10 +1730,7 @@ where
return Err(Error::INCREMENTAL);
}

if self.cur_token_is(Token::RBRACE)
|| self.cur_token_is(Token::RBRACKET)
|| self.cur_token_is(Token::RPAREN)
{
if let Token::RBRACE | Token::RBRACKET | Token::RPAREN = &self.cur_token {
self.errors.push(Error::PARSER {
#[cfg(feature = "ast-span")]
position: self.parser_position,
Expand Down Expand Up @@ -1771,15 +1765,12 @@ where
#[allow(missing_docs)]
pub fn parse_group(&mut self) -> Result<Group<'a>> {
#[cfg(feature = "ast-span")]
let begin_group_range = if self.cur_token_is(Token::LBRACE)
|| self.cur_token_is(Token::LPAREN)
|| self.cur_token_is(Token::LBRACKET)
|| self.cur_token_is(Token::GCHOICE)
{
self.peek_lexer_position.range.0
} else {
self.lexer_position.range.0
};
let begin_group_range =
if let Token::LBRACE | Token::LPAREN | Token::LBRACKET | Token::GCHOICE = &self.cur_token {
self.peek_lexer_position.range.0
} else {
self.lexer_position.range.0
};

let closing_delimiter = token::closing_delimiter(&self.cur_token);

Expand All @@ -1791,7 +1782,7 @@ where

group.group_choices.push(self.parse_grpchoice()?);

while self.cur_token_is(Token::GCHOICE) {
while let Token::GCHOICE = &self.cur_token {
group.group_choices.push(self.parse_grpchoice()?);
}

Expand Down Expand Up @@ -1825,7 +1816,7 @@ where
span: (self.lexer_position.range.0, 0, self.lexer_position.line),
};

if self.cur_token_is(Token::GCHOICE) {
if let Token::GCHOICE = &self.cur_token {
self.next_token()?;

#[cfg(feature = "ast-comments")]
Expand All @@ -1839,7 +1830,7 @@ where
{
grpchoice.span.0 = self.lexer_position.range.0;
}
} else if self.cur_token_is(Token::LBRACE) || self.cur_token_is(Token::LBRACKET) {
} else if let Token::LBRACE | Token::LBRACKET = &self.cur_token {
self.next_token()?;

#[cfg(feature = "ast-span")]
Expand All @@ -1865,7 +1856,7 @@ where
{
let ge = self.parse_grpent(false)?;

if self.cur_token_is(Token::GCHOICE) {
if let Token::GCHOICE = &self.cur_token {
grpchoice.group_entries.push((
ge,
OptionalComma {
Expand Down Expand Up @@ -1910,7 +1901,7 @@ where

let mut optional_comma = false;

if self.cur_token_is(Token::COMMA) {
if let Token::COMMA = &self.cur_token {
optional_comma = true;

#[cfg(feature = "ast-span")]
Expand Down Expand Up @@ -2034,7 +2025,7 @@ where
comments_after_type_or_group,
}) => {
#[cfg(feature = "ast-span")]
if self.cur_token_is(Token::COMMA) {
if let Token::COMMA = &self.cur_token {
span.1 = self.lexer_position.range.1;
}

Expand Down Expand Up @@ -2170,7 +2161,7 @@ where
comments_after_type_or_group,
}) => {
#[cfg(feature = "ast-span")]
if self.cur_token_is(Token::COMMA) {
if let Token::COMMA = &self.cur_token {
span.1 = self.lexer_position.range.1;
}

Expand Down Expand Up @@ -2200,7 +2191,7 @@ where
}

#[cfg(feature = "ast-span")]
if self.cur_token_is(Token::COMMA) {
if let Token::COMMA = &self.cur_token {
span.1 = self.lexer_position.range.1;
}

Expand Down Expand Up @@ -2273,7 +2264,7 @@ where
self.advance_newline()?;

#[cfg(feature = "ast-span")]
if self.cur_token_is(Token::COMMA) {
if let Token::COMMA = &self.cur_token {
span.1 = self.lexer_position.range.1;
}

Expand Down Expand Up @@ -2445,7 +2436,7 @@ where
#[cfg(not(feature = "ast-comments"))]
self.advance_newline()?;

let mk = if self.cur_token_is(Token::CUT) {
let mk = if let Token::CUT = &self.cur_token {
self.next_token()?;

#[cfg(feature = "ast-comments")]
Expand Down Expand Up @@ -2509,12 +2500,12 @@ where
self.next_token()?;

Some(t1)
} else if self.cur_token_is(Token::ARROWMAP) {
} else if let Token::ARROWMAP = &self.cur_token {
#[cfg(feature = "ast-span")]
let end_memberkey_range = self.lexer_position.range.1;

#[cfg(feature = "ast-comments")]
let comments_after_arrowmap = if let Token::COMMENT(_) = self.peek_token {
let comments_after_arrowmap = if let Token::COMMENT(_) = &self.peek_token {
self.next_token()?;

self.collect_comments()?
Expand Down Expand Up @@ -2558,7 +2549,7 @@ where

Some(t1)
} else {
if self.cur_token_is(Token::COLON) {
if let Token::COLON = &self.cur_token {
self.next_token()?;
}

Expand Down Expand Up @@ -2639,7 +2630,7 @@ where
#[cfg(not(feature = "ast-comments"))]
self.advance_newline()?;

let mk = if self.cur_token_is(Token::CUT) {
let mk = if let Token::CUT = &self.cur_token {
self.next_token()?;

#[cfg(feature = "ast-comments")]
Expand Down Expand Up @@ -2724,7 +2715,7 @@ where
})
};

if self.cur_token_is(Token::COLON) {
if let Token::COLON = &self.cur_token {
self.next_token()?;
}

Expand Down Expand Up @@ -2757,16 +2748,21 @@ where
#[cfg(feature = "ast-span")]
let mut closing_parend_index = 0;
while !closing_parend {
if self.cur_token_is(Token::ARROWMAP) || self.cur_token_is(Token::COLON) {
if let Token::ARROWMAP
| Token::COLON
| Token::OPTIONAL
| Token::ASTERISK
| Token::GCHOICE = &self.cur_token
{
has_group_entries = true;
}

// TODO: parse nested comments
if self.cur_token_is(Token::LPAREN) {
if let Token::LPAREN = &self.cur_token {
nested_parend_count += 1;
}

if self.cur_token_is(Token::RPAREN) {
if let Token::RPAREN = &self.cur_token {
match nested_parend_count.cmp(&0) {
Ordering::Greater => nested_parend_count -= 1,
Ordering::Equal | Ordering::Less => {
Expand All @@ -2779,8 +2775,7 @@ where
}
}

let t = self.cur_token.clone();
tokens.push(Ok((self.lexer_position, t)));
tokens.push(Ok((self.lexer_position, self.cur_token.clone())));

#[cfg(feature = "ast-span")]
{
Expand All @@ -2796,7 +2791,7 @@ where
#[cfg(not(feature = "ast-comments"))]
self.advance_newline()?;

if self.cur_token_is(Token::EOF) {
if let Token::EOF = &self.cur_token {
self.errors.push(Error::PARSER {
#[cfg(feature = "ast-span")]
position: self.lexer_position,
Expand Down Expand Up @@ -2850,7 +2845,7 @@ where
#[cfg(not(feature = "ast-comments"))]
self.advance_newline()?;

if self.cur_token_is(Token::CUT) {
if let Token::CUT = &self.cur_token {
self.next_token()?;

#[cfg(feature = "ast-comments")]
Expand Down Expand Up @@ -2913,7 +2908,7 @@ where
return Ok(t1);
}

let t1 = if self.cur_token_is(Token::ARROWMAP) {
let t1 = if let Token::ARROWMAP = &self.cur_token {
self.next_token()?;

#[cfg(feature = "ast-span")]
Expand Down Expand Up @@ -2993,7 +2988,7 @@ where
#[cfg(not(feature = "ast-comments"))]
self.advance_newline()?;

if self.cur_token_is(Token::CUT) {
if let Token::CUT = &self.cur_token {
self.next_token()?;

#[cfg(feature = "ast-comments")]
Expand Down Expand Up @@ -3038,7 +3033,7 @@ where
}));
}

let t1 = if self.cur_token_is(Token::ARROWMAP) {
let t1 = if let Token::ARROWMAP = &self.cur_token {
self.next_token()?;

#[cfg(feature = "ast-span")]
Expand Down
32 changes: 32 additions & 0 deletions tests/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ use std::marker::PhantomData;

use cddl::{
ast::*,
cddl_from_str,
lexer::Lexer,
lexer_from_str,
parser::{Error, Parser, Result},
};
use indoc::indoc;
Expand Down Expand Up @@ -644,3 +646,33 @@ fn verify_cddl() -> Result<()> {
Err(e) => Err(e),
}
}

#[test]
fn cri_reference() -> std::result::Result<(), String> {
let cddl = indoc!(
r#"
CRI-Reference = [
(?scheme, ?((host.name // host.ip), ?port) // path.type),
*path,
*query,
?fragment
]
scheme = (0, text .regexp "[a-z][a-z0-9+.-]*")
host.name = (1, text)
host.ip = (2, bytes .size 4 / bytes .size 16)
port = (3, 0..65535)
path.type = (4, 0..127)
path = (5, text)
query = (6, text)
fragment = (7, text)
"#
);

let mut l = lexer_from_str(cddl);
let c_ast = cddl_from_str(&mut l, cddl, true)?;

println!("{}", c_ast);

Ok(())
}

0 comments on commit 8654f6b

Please sign in to comment.