From 60076379ebbc411e6eaa667ad6d470883b52e9c2 Mon Sep 17 00:00:00 2001 From: Felix Date: Sun, 18 Apr 2021 17:55:25 +0200 Subject: [PATCH] improve readability and stuff idk and also rustfmt --- src/bin/ckconf.rs | 10 ++++--- src/lex/cursor.rs | 21 +++++++-------- src/lex/mod.rs | 67 ++++++++++++++++++++++------------------------- src/lib.rs | 9 +++---- src/parser.rs | 57 +++++++++++++++++++++------------------- 5 files changed, 80 insertions(+), 84 deletions(-) diff --git a/src/bin/ckconf.rs b/src/bin/ckconf.rs index 6a7d4f6..e095f49 100644 --- a/src/bin/ckconf.rs +++ b/src/bin/ckconf.rs @@ -13,10 +13,12 @@ fn main() { let s = fs::read_to_string(&argv[1]).expect("Cannot open file"); match Config::parse(s.chars()) { - Ok(conf) => for k in conf.keys() { - println!("{:?}", conf.get_node(k)); - }, - Err(e) => panic!("{:?}", e.msg) + Ok(conf) => { + for k in conf.keys() { + println!("{:?}", conf.get_node(k)); + } + } + Err(e) => panic!("{:?}", e.msg), } } diff --git a/src/lex/cursor.rs b/src/lex/cursor.rs index ea89db6..7fb5c5f 100644 --- a/src/lex/cursor.rs +++ b/src/lex/cursor.rs @@ -32,25 +32,22 @@ impl<'a> Cursor<'a> { pub fn advance_by(&mut self, n: usize) -> Result<(), usize> { for i in 0..n { if self.next() == None { - return Err(i) + return Err(i); } } Ok(()) } - pub fn next_until(&mut self, test: fn (c: char) -> bool) -> String { + pub fn next_until(&mut self, test: fn(c: char) -> bool) -> String { let mut s = String::new(); - - for _ in 0.. { - match self.peek() { - Some(c) => if test(c) { - break; - } else { - s.push(c); - self.next(); - }, - None => break, + + while let Some(c) = self.peek() { + if test(c) { + break; + } else { + s.push(c); + self.next(); } } diff --git a/src/lex/mod.rs b/src/lex/mod.rs index 5cc503e..5401ee4 100644 --- a/src/lex/mod.rs +++ b/src/lex/mod.rs @@ -1,10 +1,11 @@ // See the end of this file for copyright and license terms. +use std::any::Any; use std::str::Chars; -mod cursor; use cursor::Cursor; -use std::any::Any; + +mod cursor; pub struct Lexer<'a> { cursor: Cursor<'a>, @@ -61,7 +62,7 @@ impl Iterator for Lexer<'_> { '"' => self.read_string(), - c @ '0'..='9' => Ok(self.read_int_rest(c)), + c @ '1'..='9' => Ok(self.read_int_rest(c)), c @ 'a'..='z' | c @ 'A'..='Z' => Ok(self.read_keyword_or_ident(c)), @@ -70,7 +71,7 @@ impl Iterator for Lexer<'_> { } } -impl <'a> Lexer<'a> { +impl<'a> Lexer<'a> { pub const fn new(stream: Chars<'a>) -> Lexer<'a> { Lexer { cursor: Cursor::new(stream), @@ -97,17 +98,21 @@ impl <'a> Lexer<'a> { fn read_keyword_or_ident(&mut self, first: char) -> Token { let kind = match first { - 't' => if self.skip_if_match(String::from("rue")) { - TokenKind::TrueKeyword - } else { - TokenKind::Ident(self.read_ident_rest(first)) - }, + 't' => { + if self.skip_if_match(String::from("rue")) { + TokenKind::TrueKeyword + } else { + TokenKind::Ident(self.read_ident_rest(first)) + } + } - 'f' => if self.skip_if_match(String::from("alse")) { - TokenKind::FalseKeyword - } else { - TokenKind::Ident(self.read_ident_rest(first)) - }, + 'f' => { + if self.skip_if_match(String::from("alse")) { + TokenKind::FalseKeyword + } else { + TokenKind::Ident(self.read_ident_rest(first)) + } + } _ => TokenKind::Ident(self.read_ident_rest(first)), }; @@ -118,10 +123,10 @@ impl <'a> Lexer<'a> { fn read_ident_rest(&mut self, first: char) -> String { let mut s = String::from(first); - s += self.cursor.next_until(|c| match c { - 'A'..='Z' | 'a'..='z' | '0'..='9' | '_' => false, - _ => true, - }).as_str(); + s += self + .cursor + .next_until(|c| !matches!(c, 'A'..='Z' | 'a'..='z' | '0'..='9' | '_')) + .as_str(); s } @@ -158,26 +163,17 @@ impl <'a> Lexer<'a> { fn read_int_rest(&mut self, first: char) -> Token { let mut s = String::from(first); - - loop { - match self.cursor.peek() { - Some(c @ '0'..='9') => { - s.push(c); - self.cursor.next(); - }, - _ => break, - } - } + s.push_str(&self.cursor.next_until(|c| !matches!(c, '0'..='9'))); self.make_token(TokenKind::IntLiteral( - s.parse().expect("this shouldn't be possible lol") + s.parse().expect("this shouldn't be possible lol"), )) } fn read_whitespace(&mut self) -> Option { let ws = self.cursor.next_until(|c| !is_whitespace(c)); - if ws.len() > 0 { + if !ws.is_empty() { Some(self.make_token(TokenKind::Whitespace(ws))) } else { None @@ -193,13 +189,15 @@ impl <'a> Lexer<'a> { } } - self.cursor.advance_by(s.len()).expect("this shouldn't be possible lol"); + self.cursor + .advance_by(s.len()) + .expect("this shouldn't be possible lol"); true } fn make_token(&mut self, kind: TokenKind) -> Token { let t = Token { - kind: kind, + kind, line: self.token_line, col: self.token_col, }; @@ -212,10 +210,7 @@ impl <'a> Lexer<'a> { } fn is_whitespace(c: char) -> bool { - match c { - ' ' | '\n' | '\t' => true, - _ => false, - } + matches!(c, ' ' | '\n' | '\t') } pub struct SyntaxError { diff --git a/src/lib.rs b/src/lib.rs index 57eb334..ed1b87c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,12 +1,13 @@ // See the end of this file for copyright and license terms. -use std::str::Chars; use std::collections::HashMap; +use std::str::Chars; pub mod lex; use crate::lex::{Lexer, SyntaxError}; pub mod parser; + use parser::Parser; use std::collections::hash_map::Keys; @@ -25,14 +26,12 @@ impl Config { Ok(node) => { let name = node.name.clone(); nodes.insert(name, node); - }, + } Err(err) => return Err(err), } } - Ok(Config { - nodes - }) + Ok(Config { nodes }) } pub fn keys(&self) -> Keys { diff --git a/src/parser.rs b/src/parser.rs index dd402fb..0de5672 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -1,69 +1,72 @@ // See the end of this file for copyright and license terms. -use crate::lex::{Token, TokenKind, Lexer, SyntaxError}; -use crate::{Node, NodeVal}; use std::any::Any; +use crate::lex::{Lexer, SyntaxError, Token, TokenKind}; +use crate::{Node, NodeVal}; + pub struct Parser<'a> { lexer: Lexer<'a>, } -impl <'a> Parser<'a> { +impl<'a> Parser<'a> { pub const fn new(lexer: Lexer<'a>) -> Parser<'a> { - Parser { - lexer - } + Parser { lexer } } fn parse_node(&mut self) -> Option> { - Some(self.lexer.next()?.and_then(|token| match token.kind { - TokenKind::Ident(name) => { - self.require_kind(TokenKind::Eq ).and_then(|_| { - self.require_next() - }).and_then(|token| match token.kind { - TokenKind::TrueKeyword => self.node_result(name, NodeVal::Bool(true)), - TokenKind::FalseKeyword => self.node_result(name, NodeVal::Bool(false)), - TokenKind::IntLiteral(i) => self.node_result(name, NodeVal::Int(i)), - TokenKind::StringLiteral(s) => self.node_result(name, NodeVal::String(s)), - _ => self.lexer.syntax_error(String::from("Expected a value")), - }).and_then(|node| { - self.require_kind(TokenKind::Semi).and_then(|_| Ok(node)) - }) - }, - _ => self.lexer.syntax_error(String::from("Expected identifier")), + Some(self.lexer.next()?.and_then(|token| { + match token.kind { + TokenKind::Ident(name) => self + .require_kind(TokenKind::Eq) + .and_then(|_| self.require_next()) + .and_then(|token| match token.kind { + TokenKind::TrueKeyword => self.node_result(name, NodeVal::Bool(true)), + TokenKind::FalseKeyword => self.node_result(name, NodeVal::Bool(false)), + TokenKind::IntLiteral(i) => self.node_result(name, NodeVal::Int(i)), + TokenKind::StringLiteral(s) => self.node_result(name, NodeVal::String(s)), + _ => self.lexer.syntax_error(String::from("Expected a value")), + }) + .and_then(|node| self.require_kind(TokenKind::Semi).and_then(|_| Ok(node))), + _ => self.lexer.syntax_error(String::from("Expected identifier")), + } })) } fn require_kind(&mut self, kind: TokenKind) -> Result { - self.require_next().and_then(|token| + self.require_next().and_then(|token| { if token.kind.type_id() == kind.type_id() { Ok(token) } else { - self.lexer.syntax_error(format!("Expected {:?}, got {:?}", kind, token.kind)) + self.lexer + .syntax_error(format!("Expected {:?}, got {:?}", kind, token.kind)) } - ) + }) } fn require_next(&mut self) -> Result { match self.lexer.next() { Some(result) => result, - None => self.lexer.syntax_error(String::from("Unexpected end of file")), + None => self + .lexer + .syntax_error(String::from("Unexpected end of file")), } } + #[allow(clippy::unnecessary_wraps)] // Convenience wrapper fn node_result(&self, name: String, val: NodeVal) -> Result { Ok(self.make_node(name, val)) } fn make_node(&self, name: String, val: NodeVal) -> Node { Node { - name: String::from(name), + name, val, } } } -impl <'a> Iterator for Parser<'a> { +impl<'a> Iterator for Parser<'a> { type Item = Result; fn next(&mut self) -> Option> {