Skip to content

Commit

Permalink
Added import as alias support
Browse files Browse the repository at this point in the history
  • Loading branch information
davemackintosh committed Feb 2, 2024
1 parent c557f99 commit 69e1ec3
Show file tree
Hide file tree
Showing 4 changed files with 135 additions and 42 deletions.
11 changes: 7 additions & 4 deletions packages/parser/src/ast/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ pub enum Keyword {
As,
Fn,
Var,
Const,
Import,
From,
Interface,
Expand Down Expand Up @@ -85,7 +86,8 @@ impl ToString for TokenType {
TokenType::Keyword(Keyword::Interface) => "interface".into(),
TokenType::Keyword(Keyword::Enum) => "enum".into(),
TokenType::Keyword(Keyword::Fn) => "fn".into(),
TokenType::Keyword(Keyword::Var) => "keyword".into(),
TokenType::Keyword(Keyword::Var) => "var".into(),
TokenType::Keyword(Keyword::Const) => "const".into(),
TokenType::Keyword(Keyword::Import) => "import".into(),
TokenType::Keyword(Keyword::From) => "from".into(),
TokenType::Keyword(Keyword::Object) => "object".into(),
Expand All @@ -95,8 +97,8 @@ impl ToString for TokenType {
TokenType::Keyword(Keyword::Else) => "else".into(),
TokenType::Keyword(Keyword::Nil) => "nil".into(),
TokenType::Keyword(Keyword::Empty) => "empty".into(),
TokenType::ReturnType => "return type expression".into(),
TokenType::Ident(s) => format!("Ident {}", s),
TokenType::ReturnType => "return type".into(),
TokenType::Ident(s) => format!("ident({})", s),
TokenType::Symbol(Symbol::CloseParen) => ")".into(),
TokenType::Symbol(Symbol::DoubleSpeechMark) => "\"".into(),
TokenType::Symbol(Symbol::SingleSpeechMark) => "'".into(),
Expand Down Expand Up @@ -204,9 +206,10 @@ impl Lexer {
s if s == "true" => TokenType::LiteralBoolean(true),
s if s == "false" => TokenType::LiteralBoolean(false),
s if s == "pub" => TokenType::AccessModifier(AccessModifier::Pub),
s if s == "const" => TokenType::AccessModifier(AccessModifier::Private),
s if s == "private" => TokenType::AccessModifier(AccessModifier::Private),
s if s == "fn" => TokenType::Keyword(Keyword::Fn),
s if s == "var" => TokenType::Keyword(Keyword::Var),
s if s == "const" => TokenType::Keyword(Keyword::Const),
s if s == "import" => TokenType::Keyword(Keyword::Import),
s if s == "from" => TokenType::Keyword(Keyword::From),
s if s == "interface" => TokenType::Keyword(Keyword::Interface),
Expand Down
8 changes: 7 additions & 1 deletion packages/parser/src/ast/lexer_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,15 @@ pub struct Fn {
pub returns: Box<Type>,
}

#[derive(Default, Debug, PartialEq)]
pub struct ImportStatementMember {
pub name: Identifier,
pub alias: Option<String>,
}

#[derive(Default, Debug, PartialEq)]
pub struct ImportStatement {
pub members: Vec<Identifier>,
pub members: Vec<ImportStatementMember>,
pub source_path: String,
}

Expand Down
133 changes: 120 additions & 13 deletions packages/parser/src/ast/parsers/import.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,61 @@
use crate::ast::{
lexer::{self, TokenType},
lexer_parser::{Identifier, ImportStatement, Parser},
lexer::{self, AccessModifier, TokenType},
lexer_parser::{Identifier, ImportStatement, ImportStatementMember, Parser},
syntax_error::SyntaxError,
};

fn parse_import_ident(
parser: &mut Parser,
import_name: String,
) -> Result<ImportStatementMember, SyntaxError> {
let mut member = ImportStatementMember {
alias: None,
name: Identifier {
immutable: true,
access_modifier: AccessModifier::Pub,
name: import_name,
},
};

let mut dangling_alias = false;

while parser.consume().is_some() {
if let Some(token) = &parser.current_token {
match &token.token_type {
TokenType::Symbol(lexer::Symbol::Comma) => {
if dangling_alias {
return Err(SyntaxError::MissingToken("ident"));
}
break;
}
TokenType::Keyword(lexer::Keyword::As) => {
dangling_alias = true;
continue;
}
TokenType::Whitespace(_) => continue,
TokenType::Symbol(lexer::Symbol::CloseBlock) => {
if dangling_alias {
return Err(SyntaxError::MissingToken("ident"));
}
parser.unconsume();
break;
}
TokenType::Ident(value) => {
member.alias = Some(value.clone());
dangling_alias = false;
}
_ => {
return Err(SyntaxError::UnexpectedToken(
parser.current_token.clone().unwrap(),
));
}
}
}
}

return Ok(member);
}

pub fn parse_import(parser: &mut Parser) -> Result<ImportStatement, SyntaxError> {
match parser.current_token.clone() {
None => Err(SyntaxError::MissingToken("import")),
Expand All @@ -24,7 +76,7 @@ pub fn parse_import(parser: &mut Parser) -> Result<ImportStatement, SyntaxError>
// We skip most of these characters.
let mut found_opening_brace = false;
let mut found_closing_brace = false;
let mut imports: Vec<lexer::Token> = vec![];
let mut imports: Vec<ImportStatementMember> = vec![];
while parser.consume().is_some() {
if let Some(token) = &parser.current_token {
match &token.token_type {
Expand Down Expand Up @@ -60,11 +112,16 @@ pub fn parse_import(parser: &mut Parser) -> Result<ImportStatement, SyntaxError>
}
lexer::TokenType::Symbol(..) => continue,
lexer::TokenType::Whitespace(..) => continue,
lexer::TokenType::Ident(..) => {
lexer::TokenType::Ident(value) => {
if found_closing_brace {
import_statement.source_path = token.value.clone();
} else {
imports.push(token.clone());
match parse_import_ident(parser, value.clone()) {
Ok(member) => imports.push(member),
Err(err) => {
return Err(err);
}
}
}
}
lexer::TokenType::EOF => break,
Expand All @@ -81,17 +138,67 @@ pub fn parse_import(parser: &mut Parser) -> Result<ImportStatement, SyntaxError>
return Err(SyntaxError::MissingToken("}"));
}

import_statement.members = imports
.iter()
.map(|token| Identifier {
name: token.value.clone(),
immutable: true,
access_modifier: lexer::AccessModifier::Pub,
})
.collect();
import_statement.members = imports;

Ok(import_statement)
}
}
}
}

#[cfg(test)]
mod tests {
use tests::lexer::Lexer;

use crate::ast::testing::Test;

use self::lexer::AccessModifier;

use super::*;

#[test]
fn test_import() {
let tests: Vec<Test<&'static str, ImportStatement>> = vec![
Test {
name: "basic import",
input: "import { Thing } from \"elp\"",
expected: ImportStatement {
source_path: "elp".into(),
members: vec![ImportStatementMember {
name: Identifier {
name: "Thing".into(),
access_modifier: AccessModifier::Pub,
immutable: true,
},
alias: None,
}],
},
},
Test {
name: "import as alias",
input: "import { Thing as Other } from \"elp\"",
expected: ImportStatement {
source_path: "elp".into(),
members: vec![ImportStatementMember {
alias: Some("Other".into()),
name: Identifier {
name: "Thing".into(),
access_modifier: AccessModifier::Pub,
immutable: true,
},
}],
},
},
];

for test in tests {
let mut lexer = Lexer::new(test.input.to_string());
let tokens = lexer.consume_all_tokens();
let mut parser = Parser::new(tokens);
parser.consume();

println!("{}", test.name);
assert_eq!(parse_import(&mut parser).unwrap(), test.expected);
}
}
}
25 changes: 1 addition & 24 deletions packages/parser/src/ast/parsers/interface.rs
Original file line number Diff line number Diff line change
Expand Up @@ -145,34 +145,11 @@ mod tests {
use crate::ast::{
lexer::{AccessModifier, Lexer},
lexer_parser::{
AstNode, Identifier, ImportStatement, InterfaceDeclaration, InterfaceProperty, Parser,
Trie, Type,
AstNode, Identifier, InterfaceDeclaration, InterfaceProperty, Parser, Trie, Type,
},
};
use pretty_assertions::assert_eq;

#[test]
fn test_parse_import() {
let input = "import { Thing } from \"elp\"".to_string();
let mut lexer = Lexer::new(input.clone());
let tokens = lexer.consume_all_tokens();
let mut parser = Parser::new(tokens);

assert_eq!(
parser.parse(),
Trie {
nodes: vec!(AstNode::Import(ImportStatement {
members: vec!(Identifier {
name: "Thing".to_string(),
immutable: true,
access_modifier: AccessModifier::Pub,
}),
source_path: "elp".to_string(),
}))
}
);
}

#[test]
fn test_parse_basic_interface() {
let input = "interface MyInterface {
Expand Down

0 comments on commit 69e1ec3

Please sign in to comment.