You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

359 lines
12 KiB
Rust

pub(crate) mod tree;
use crate::error::Error;
use crate::lex::token;
use crate::lex::token::Token;
use crate::lex::Lexer;
use std::fs;
use std::io;
use crate::ast::tree::Operator;
#[derive(PartialEq)]
enum Scope {
File,
Target,
DepList,
SourceList,
}
struct Parser {
lexer: Lexer,
scope: Vec<Scope>,
filename: String,
}
pub fn parse(filename: String) -> io::Result<Result<tree::Node, Error>> {
let raw: String = fs::read_to_string(filename.clone())?;
let mut p = Parser::new(filename, raw);
Ok(p.parse_file())
}
impl Parser {
pub fn new(filename: String, raw: String) -> Parser {
let lexer = Lexer::new(filename.clone(), raw);
Parser {
lexer,
scope: Vec::new(),
filename,
}
}
pub fn parse_file(&mut self) -> Result<tree::Node, Error> {
let mut nodes = Vec::new();
self.scope.push(Scope::File);
while self.lexer.peek().is_some() {
nodes.push(self.parse_stmt()?);
}
self.scope.pop();
Ok(tree::Node::File {
name: self.filename.clone(),
content: nodes,
})
}
fn parse_stmt(&mut self) -> Result<tree::Node, Error> {
let token = self.lexer.peek_or_err()?;
match token.kind {
token::Kind::DependKeyword => self.parse_depend_stmt(),
token::Kind::SetKeyword => self.parse_set_stmt(),
token::Kind::SourceKeyword => self.parse_source_stmt(),
token::Kind::TargetKeyword => self.parse_target_stmt(),
token::Kind::TypeKeyword => self.parse_type_stmt(),
token::Kind::Ident => self.parse_expr_stmt(),
_ => self.syntax_error(format!("Unexpected token {}", token), &token),
}
}
fn parse_target_stmt(&mut self) -> Result<tree::Node, Error> {
self.assert_scope(Scope::File)?;
self.assert_scope_not(Scope::Target)?;
self.scope.push(Scope::Target);
self.lexer.expect_kind(token::Kind::TargetKeyword)?;
let name_token = self.lexer.expect_kind(token::Kind::Ident)?;
self.lexer.expect_kind(token::Kind::OBrace)?;
let mut children = Vec::new();
while let Some(result) = self.lexer.peek() {
match result?.kind {
token::Kind::CBrace => {
self.lexer.next();
break;
}
_ => children.push(self.parse_stmt()?),
}
}
self.scope.pop();
Ok(tree::Node::Target {
name: Box::new(tree::Node::Ident(name_token.raw)),
content: children,
})
}
fn parse_depend_stmt(&mut self) -> Result<tree::Node, Error> {
self.assert_scope(Scope::Target)?;
self.scope.push(Scope::DepList);
self.lexer.expect_kind(token::Kind::DependKeyword)?;
let rvalue = self.parse_expr(&[token::Kind::Semi])?;
self.scope.pop();
Ok(tree::Node::DepList(Box::new(rvalue)))
}
fn parse_set_stmt(&mut self) -> Result<tree::Node, Error> {
self.assert_scope(Scope::File)?;
self.lexer.expect_kind(token::Kind::SetKeyword)?;
let expr = self.parse_expr(&[token::Kind::Semi])?;
match expr {
tree::Node::BinaryExpr { op, lhs, rhs } => {
if op == Operator::Eq {
Ok(tree::Node::SetExpr {
name: lhs,
val: rhs,
})
} else {
self.syntax_error(format!("Invalid operator"), self.lexer.current().unwrap())
}
}
_ => self.syntax_error(format!("Expected an assignment"), self.lexer.current().unwrap())
}
}
fn parse_type_stmt(&mut self) -> Result<tree::Node, Error> {
self.assert_scope(Scope::Target)?;
self.lexer.expect_kind(token::Kind::TypeKeyword)?;
let expr = self.parse_expr(&[token::Kind::Semi])?;
Ok(tree::Node::TypeExpr(Box::new(expr)))
}
fn parse_source_stmt(&mut self) -> Result<tree::Node, Error> {
self.assert_scope(Scope::Target)?;
self.lexer.expect_kind(token::Kind::SourceKeyword)?;
self.scope.push(Scope::SourceList);
let source = self.parse_expr(&[token::Kind::Semi])?;
self.scope.pop();
Ok(tree::Node::SourceList(Box::new(source)))
}
fn parse_expr_stmt(&mut self) -> Result<tree::Node, Error> {
self.parse_expr(&[token::Kind::Semi])
}
fn parse_expr(&mut self, terminators: &[token::Kind]) -> Result<tree::Node, Error> {
self.assert_scope(Scope::File)?;
let expr = if let Some(result) = self.lexer.peek() {
let token = result?;
if !token.kind.is_start_of_expr() {
self.syntax_error(String::from("Expected an expression"), &token)
} else {
self.parse_assignment_expr_or_higher(terminators)
}
} else {
self.syntax_error(
String::from("Unexpected EOF"),
&self.lexer.current().unwrap(),
)
};
expr
}
fn parse_assignment_expr_or_higher(&mut self, terminators: &[token::Kind]) -> Result<tree::Node, Error> {
let lhs = self.parse_primary_expr()?;
if let Some(Ok(token)) = self.lexer.peek() {
if token.kind.is_assignment_op() {
let op_token = self.lexer.require_next()?;
let op = Operator::from_token(&op_token)?;
let rhs = self.parse_binary_expr_or_higher(terminators)?;
return Ok(tree::Node::BinaryExpr {
op,
lhs: Box::new(lhs),
rhs: Box::new(rhs),
});
} else {
self.lexer.expect_kinds(terminators)?;
}
}
Ok(lhs)
}
/// Binary expressions are generally left associative.
/// However, things get a little more tricky when taking the fact that there
/// are 9 different levels of precedence into account.
fn parse_binary_expr_or_higher(&mut self, terminators: &[token::Kind]) -> Result<tree::Node, Error> {
let mut expr = self.parse_unary_expr_or_higher()?;
while let Some(Ok(token)) = self.lexer.peek() {
if terminators.contains(&token.kind) {
self.lexer.next();
break;
}
let op = Operator::from_token(&token)?;
self.lexer.next();
let precedence = token.kind.binary_op_precedence().unwrap();
expr = tree::Node::BinaryExpr {
op,
lhs: Box::new(expr),
rhs: Box::new(self.parse_partial_binary_expr(precedence, terminators)?),
};
}
Ok(expr)
}
fn parse_partial_binary_expr(&mut self, precedence: u32, terminators: &[token::Kind]) -> Result<tree::Node, Error> {
let mut lhs = self.parse_unary_expr_or_higher()?;
while let Some(Ok(token)) = self.lexer.peek() {
if let Some(new_precedence) = token.kind.binary_op_precedence() {
if new_precedence > precedence {
let op = Operator::from_token(&token)?;
self.lexer.next();
lhs = tree::Node::BinaryExpr {
op,
lhs: Box::new(lhs),
rhs: Box::new(self.parse_partial_binary_expr(new_precedence, terminators)?),
};
} else {
break;
}
} else {
break;
}
}
Ok(lhs)
}
fn parse_unary_expr_or_higher(&mut self) -> Result<tree::Node, Error> {
if let Some(result) = self.lexer.peek() {
let token = result?;
if token.kind == token::Kind::Bang || token.kind == token::Kind::Minus {
self.lexer.next(); // consume unary operator token
let op = Operator::from_token(&token)?;
let expr = self.parse_primary_expr()?;
return Ok(tree::Node::UnaryExpr {
op,
node: Box::new(expr),
});
}
}
self.parse_primary_expr()
}
fn parse_primary_expr(&mut self) -> Result<tree::Node, Error> {
let token = self.lexer.require_next()?;
match token.kind {
token::Kind::Ident => {
let ident = tree::Node::Ident(String::from(token.raw));
self.parse_primary_expr_rest(ident)
}
token::Kind::IntLiteral => {
let raw = token.raw;
let num = match raw.chars().nth(1) {
Some('x') => i128::from_str_radix(&raw[2..], 16).unwrap(),
Some('o') => i128::from_str_radix(&raw[2..], 8).unwrap(),
Some('b') => i128::from_str_radix(&raw[2..], 2).unwrap(),
_ => raw.parse().unwrap(),
};
Ok(tree::Node::Int(num))
}
token::Kind::StringLiteral => Ok(tree::Node::String(token.raw)),
token::Kind::OBracket => self.parse_array(),
_ => self.syntax_error(format!("Unexpected token {}", token.kind), &token),
}
}
fn parse_primary_expr_rest(&mut self, start: tree::Node) -> Result<tree::Node, Error> {
if let Some(Ok(token)) = self.lexer.peek() {
match token.kind {
token::Kind::OParen => {
// function call
self.lexer.next();
let params = self.parse_param_list()?;
Ok(tree::Node::CallExpr {
func: Box::new(start),
params,
})
}
token::Kind::OBracket => {
// array index
self.lexer.next();
let index = self.parse_expr(&[token::Kind::CBracket])?;
Ok(tree::Node::ArrayExpr {
array: Box::new(start),
index: Box::new(index),
})
}
_ => Ok(start),
}
} else {
Ok(start)
}
}
fn parse_array(&mut self) -> Result<tree::Node, Error> {
let mut elements = Vec::new();
while let Some(result) = self.lexer.peek() {
if result?.kind == token::Kind::CBracket {
self.lexer.next();
break;
} else {
elements.push(self.parse_expr(&[token::Kind::Comma, token::Kind::CBracket])?);
}
}
Ok(tree::Node::Array(elements))
}
fn parse_param_list(&mut self) -> Result<Vec<tree::Node>, Error> {
let mut params = Vec::new();
while let Some(result) = self.lexer.peek() {
match result?.kind {
token::Kind::CParen => {
self.lexer.next();
break;
},
_ => {
params.push(self.parse_expr(&[token::Kind::Comma, token::Kind::CParen])?);
if self.lexer.current().unwrap().kind == token::Kind::CParen {
break;
}
},
}
}
Ok(params)
}
fn assert_scope(&self, scope: Scope) -> Result<(), Error> {
if self.scope.contains(&scope) {
Ok(())
} else {
let token = self.lexer.current().unwrap();
self.syntax_error(
format!("Token {} cannot be used in this context", token),
token,
)
}
}
fn assert_scope_not(&self, scope: Scope) -> Result<(), Error> {
if self.scope.contains(&scope) {
let token = self.lexer.current().unwrap();
self.syntax_error(
format!("Token {} cannot be used in this context", token),
token,
)
} else {
Ok(())
}
}
fn syntax_error<T>(&self, msg: String, token: &Token) -> Result<T, Error> {
Err(Error::syntax_error(token.pos.clone(), msg))
}
}