improve readability and stuff idk and also rustfmt

This commit is contained in:
anna 2021-04-18 17:55:25 +02:00
parent 01e9bd2a48
commit 60076379eb
Signed by: fef
GPG key ID: EC22E476DC2D3D84
5 changed files with 80 additions and 84 deletions

View file

@ -13,10 +13,12 @@ fn main() {
let s = fs::read_to_string(&argv[1]).expect("Cannot open file"); let s = fs::read_to_string(&argv[1]).expect("Cannot open file");
match Config::parse(s.chars()) { match Config::parse(s.chars()) {
Ok(conf) => for k in conf.keys() { Ok(conf) => {
for k in conf.keys() {
println!("{:?}", conf.get_node(k)); println!("{:?}", conf.get_node(k));
}, }
Err(e) => panic!("{:?}", e.msg) }
Err(e) => panic!("{:?}", e.msg),
} }
} }

View file

@ -32,7 +32,7 @@ impl<'a> Cursor<'a> {
pub fn advance_by(&mut self, n: usize) -> Result<(), usize> { pub fn advance_by(&mut self, n: usize) -> Result<(), usize> {
for i in 0..n { for i in 0..n {
if self.next() == None { if self.next() == None {
return Err(i) return Err(i);
} }
} }
@ -42,15 +42,12 @@ impl<'a> Cursor<'a> {
pub fn next_until(&mut self, test: fn(c: char) -> bool) -> String { pub fn next_until(&mut self, test: fn(c: char) -> bool) -> String {
let mut s = String::new(); let mut s = String::new();
for _ in 0.. { while let Some(c) = self.peek() {
match self.peek() { if test(c) {
Some(c) => if test(c) {
break; break;
} else { } else {
s.push(c); s.push(c);
self.next(); self.next();
},
None => break,
} }
} }

View file

@ -1,10 +1,11 @@
// See the end of this file for copyright and license terms. // See the end of this file for copyright and license terms.
use std::any::Any;
use std::str::Chars; use std::str::Chars;
mod cursor;
use cursor::Cursor; use cursor::Cursor;
use std::any::Any;
mod cursor;
pub struct Lexer<'a> { pub struct Lexer<'a> {
cursor: Cursor<'a>, cursor: Cursor<'a>,
@ -61,7 +62,7 @@ impl Iterator for Lexer<'_> {
'"' => self.read_string(), '"' => self.read_string(),
c @ '0'..='9' => Ok(self.read_int_rest(c)), c @ '1'..='9' => Ok(self.read_int_rest(c)),
c @ 'a'..='z' | c @ 'A'..='Z' => Ok(self.read_keyword_or_ident(c)), c @ 'a'..='z' | c @ 'A'..='Z' => Ok(self.read_keyword_or_ident(c)),
@ -97,17 +98,21 @@ impl <'a> Lexer<'a> {
fn read_keyword_or_ident(&mut self, first: char) -> Token { fn read_keyword_or_ident(&mut self, first: char) -> Token {
let kind = match first { let kind = match first {
't' => if self.skip_if_match(String::from("rue")) { 't' => {
if self.skip_if_match(String::from("rue")) {
TokenKind::TrueKeyword TokenKind::TrueKeyword
} else { } else {
TokenKind::Ident(self.read_ident_rest(first)) TokenKind::Ident(self.read_ident_rest(first))
}, }
}
'f' => if self.skip_if_match(String::from("alse")) { 'f' => {
if self.skip_if_match(String::from("alse")) {
TokenKind::FalseKeyword TokenKind::FalseKeyword
} else { } else {
TokenKind::Ident(self.read_ident_rest(first)) TokenKind::Ident(self.read_ident_rest(first))
}, }
}
_ => TokenKind::Ident(self.read_ident_rest(first)), _ => TokenKind::Ident(self.read_ident_rest(first)),
}; };
@ -118,10 +123,10 @@ impl <'a> Lexer<'a> {
fn read_ident_rest(&mut self, first: char) -> String { fn read_ident_rest(&mut self, first: char) -> String {
let mut s = String::from(first); let mut s = String::from(first);
s += self.cursor.next_until(|c| match c { s += self
'A'..='Z' | 'a'..='z' | '0'..='9' | '_' => false, .cursor
_ => true, .next_until(|c| !matches!(c, 'A'..='Z' | 'a'..='z' | '0'..='9' | '_'))
}).as_str(); .as_str();
s s
} }
@ -158,26 +163,17 @@ impl <'a> Lexer<'a> {
fn read_int_rest(&mut self, first: char) -> Token { fn read_int_rest(&mut self, first: char) -> Token {
let mut s = String::from(first); let mut s = String::from(first);
s.push_str(&self.cursor.next_until(|c| !matches!(c, '0'..='9')));
loop {
match self.cursor.peek() {
Some(c @ '0'..='9') => {
s.push(c);
self.cursor.next();
},
_ => break,
}
}
self.make_token(TokenKind::IntLiteral( self.make_token(TokenKind::IntLiteral(
s.parse().expect("this shouldn't be possible lol") s.parse().expect("this shouldn't be possible lol"),
)) ))
} }
fn read_whitespace(&mut self) -> Option<Token> { fn read_whitespace(&mut self) -> Option<Token> {
let ws = self.cursor.next_until(|c| !is_whitespace(c)); let ws = self.cursor.next_until(|c| !is_whitespace(c));
if ws.len() > 0 { if !ws.is_empty() {
Some(self.make_token(TokenKind::Whitespace(ws))) Some(self.make_token(TokenKind::Whitespace(ws)))
} else { } else {
None None
@ -193,13 +189,15 @@ impl <'a> Lexer<'a> {
} }
} }
self.cursor.advance_by(s.len()).expect("this shouldn't be possible lol"); self.cursor
.advance_by(s.len())
.expect("this shouldn't be possible lol");
true true
} }
fn make_token(&mut self, kind: TokenKind) -> Token { fn make_token(&mut self, kind: TokenKind) -> Token {
let t = Token { let t = Token {
kind: kind, kind,
line: self.token_line, line: self.token_line,
col: self.token_col, col: self.token_col,
}; };
@ -212,10 +210,7 @@ impl <'a> Lexer<'a> {
} }
fn is_whitespace(c: char) -> bool { fn is_whitespace(c: char) -> bool {
match c { matches!(c, ' ' | '\n' | '\t')
' ' | '\n' | '\t' => true,
_ => false,
}
} }
pub struct SyntaxError { pub struct SyntaxError {

View file

@ -1,12 +1,13 @@
// See the end of this file for copyright and license terms. // See the end of this file for copyright and license terms.
use std::str::Chars;
use std::collections::HashMap; use std::collections::HashMap;
use std::str::Chars;
pub mod lex; pub mod lex;
use crate::lex::{Lexer, SyntaxError}; use crate::lex::{Lexer, SyntaxError};
pub mod parser; pub mod parser;
use parser::Parser; use parser::Parser;
use std::collections::hash_map::Keys; use std::collections::hash_map::Keys;
@ -25,14 +26,12 @@ impl Config {
Ok(node) => { Ok(node) => {
let name = node.name.clone(); let name = node.name.clone();
nodes.insert(name, node); nodes.insert(name, node);
}, }
Err(err) => return Err(err), Err(err) => return Err(err),
} }
} }
Ok(Config { Ok(Config { nodes })
nodes
})
} }
pub fn keys(&self) -> Keys<String, Node> { pub fn keys(&self) -> Keys<String, Node> {

View file

@ -1,63 +1,66 @@
// See the end of this file for copyright and license terms. // See the end of this file for copyright and license terms.
use crate::lex::{Token, TokenKind, Lexer, SyntaxError};
use crate::{Node, NodeVal};
use std::any::Any; use std::any::Any;
use crate::lex::{Lexer, SyntaxError, Token, TokenKind};
use crate::{Node, NodeVal};
pub struct Parser<'a> { pub struct Parser<'a> {
lexer: Lexer<'a>, lexer: Lexer<'a>,
} }
impl<'a> Parser<'a> { impl<'a> Parser<'a> {
pub const fn new(lexer: Lexer<'a>) -> Parser<'a> { pub const fn new(lexer: Lexer<'a>) -> Parser<'a> {
Parser { Parser { lexer }
lexer
}
} }
fn parse_node(&mut self) -> Option<Result<Node, SyntaxError>> { fn parse_node(&mut self) -> Option<Result<Node, SyntaxError>> {
Some(self.lexer.next()?.and_then(|token| match token.kind { Some(self.lexer.next()?.and_then(|token| {
TokenKind::Ident(name) => { match token.kind {
self.require_kind(TokenKind::Eq ).and_then(|_| { TokenKind::Ident(name) => self
self.require_next() .require_kind(TokenKind::Eq)
}).and_then(|token| match token.kind { .and_then(|_| self.require_next())
.and_then(|token| match token.kind {
TokenKind::TrueKeyword => self.node_result(name, NodeVal::Bool(true)), TokenKind::TrueKeyword => self.node_result(name, NodeVal::Bool(true)),
TokenKind::FalseKeyword => self.node_result(name, NodeVal::Bool(false)), TokenKind::FalseKeyword => self.node_result(name, NodeVal::Bool(false)),
TokenKind::IntLiteral(i) => self.node_result(name, NodeVal::Int(i)), TokenKind::IntLiteral(i) => self.node_result(name, NodeVal::Int(i)),
TokenKind::StringLiteral(s) => self.node_result(name, NodeVal::String(s)), TokenKind::StringLiteral(s) => self.node_result(name, NodeVal::String(s)),
_ => self.lexer.syntax_error(String::from("Expected a value")), _ => self.lexer.syntax_error(String::from("Expected a value")),
}).and_then(|node| {
self.require_kind(TokenKind::Semi).and_then(|_| Ok(node))
}) })
}, .and_then(|node| self.require_kind(TokenKind::Semi).and_then(|_| Ok(node))),
_ => self.lexer.syntax_error(String::from("Expected identifier")), _ => self.lexer.syntax_error(String::from("Expected identifier")),
}
})) }))
} }
fn require_kind(&mut self, kind: TokenKind) -> Result<Token, SyntaxError> { fn require_kind(&mut self, kind: TokenKind) -> Result<Token, SyntaxError> {
self.require_next().and_then(|token| self.require_next().and_then(|token| {
if token.kind.type_id() == kind.type_id() { if token.kind.type_id() == kind.type_id() {
Ok(token) Ok(token)
} else { } else {
self.lexer.syntax_error(format!("Expected {:?}, got {:?}", kind, token.kind)) self.lexer
.syntax_error(format!("Expected {:?}, got {:?}", kind, token.kind))
} }
) })
} }
fn require_next(&mut self) -> Result<Token, SyntaxError> { fn require_next(&mut self) -> Result<Token, SyntaxError> {
match self.lexer.next() { match self.lexer.next() {
Some(result) => result, Some(result) => result,
None => self.lexer.syntax_error(String::from("Unexpected end of file")), None => self
.lexer
.syntax_error(String::from("Unexpected end of file")),
} }
} }
#[allow(clippy::unnecessary_wraps)] // Convenience wrapper
fn node_result<T>(&self, name: String, val: NodeVal) -> Result<Node, T> { fn node_result<T>(&self, name: String, val: NodeVal) -> Result<Node, T> {
Ok(self.make_node(name, val)) Ok(self.make_node(name, val))
} }
fn make_node(&self, name: String, val: NodeVal) -> Node { fn make_node(&self, name: String, val: NodeVal) -> Node {
Node { Node {
name: String::from(name), name,
val, val,
} }
} }