From e0e1bc83400fcde45c7b3d20bd10f9e3a73362da Mon Sep 17 00:00:00 2001 From: Yandrik Date: Mon, 29 Nov 2021 23:43:52 +0100 Subject: [PATCH] refactor: auto re-formatted the code --- src/errors.rs | 7 ------- src/lexer/mod.rs | 5 +++-- src/lexer/tokens.rs | 11 +++++------ src/lib.rs | 6 +++--- src/main.rs | 2 +- src/parser/errors.rs | 1 + src/parser/mod.rs | 8 ++++---- 7 files changed, 17 insertions(+), 23 deletions(-) delete mode 100644 src/errors.rs diff --git a/src/errors.rs b/src/errors.rs deleted file mode 100644 index 39dd0e2..0000000 --- a/src/errors.rs +++ /dev/null @@ -1,7 +0,0 @@ -use thiserror::Error; - -#[derive(Error, Debug)] -pub enum CalculatorErrors { - #[error("IOError")] - IOError, -} diff --git a/src/lexer/mod.rs b/src/lexer/mod.rs index b54a144..90b5e81 100644 --- a/src/lexer/mod.rs +++ b/src/lexer/mod.rs @@ -52,7 +52,8 @@ impl Lexer { .iter() .take(5) .collect(), - pos: self.pos }) + pos: self.pos, + }) } else { // If at the end of the input Ok(None) @@ -136,4 +137,4 @@ mod test { assert!(matches!(lexer.next(), Ok(Some(Token::CBR(TokenMeta { pos: 36 }))))); assert!(matches!(lexer.next(), Ok(None))); } - } +} diff --git a/src/lexer/tokens.rs b/src/lexer/tokens.rs index f20370c..a24fd23 100644 --- a/src/lexer/tokens.rs +++ b/src/lexer/tokens.rs @@ -1,4 +1,3 @@ - /// # Token Metadata /// Data contained is: /// * File that the token was parsed in @@ -27,7 +26,7 @@ impl OpType { '+' => Some(OpType::ADD), '-' => Some(OpType::SUB), '^' => Some(OpType::POW), - _ => None, + _ => None, } } } @@ -47,8 +46,8 @@ pub enum BrType { /// 1. `OP`: An operation. Containing an [Operation Type](OpType). #[derive(Debug, Clone)] pub enum Token { - ID (TokenMeta, f64), - OBR (TokenMeta), - CBR (TokenMeta), - OP (TokenMeta, OpType), + ID(TokenMeta, f64), + OBR(TokenMeta), + CBR(TokenMeta), + OP(TokenMeta, OpType), } diff --git a/src/lib.rs b/src/lib.rs index e87a07f..b83216d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,9 +1,9 @@ -mod lexer; -mod parser; - pub use lexer::errors as lexer_errors; pub use parser::errors as parser_errors; +mod lexer; +mod parser; + pub fn calculate(expression: &str) -> Result { Ok(parser::parse(lexer::Lexer::new(expression))?) } diff --git a/src/main.rs b/src/main.rs index 6047c9a..d9a03de 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,5 +1,6 @@ use std::io; use std::io::Write; + use s5_cb_calculator::calculate; fn main() { @@ -17,5 +18,4 @@ fn main() { Err(err) => println!("Expression couldn't be parsed: {:?}", err) } } - } diff --git a/src/parser/errors.rs b/src/parser/errors.rs index 342643d..2c7d815 100644 --- a/src/parser/errors.rs +++ b/src/parser/errors.rs @@ -1,4 +1,5 @@ use thiserror::Error; + use crate::lexer::errors::LexerErrors; use crate::lexer::tokens::Token; diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 98ff27d..8a4a17c 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -65,7 +65,7 @@ fn a_proc(tbox: &mut TokenBox) -> Result { Some(token) => { tbox.regress(); Ok(result) - }, + } } } @@ -79,7 +79,7 @@ fn m_proc(tbox: &mut TokenBox) -> Result { Some(token) => { tbox.regress(); Ok(result) - }, + } } } @@ -92,7 +92,7 @@ fn g_proc(tbox: &mut TokenBox) -> Result { Some(token) => { tbox.regress(); Ok(result) - }, + } } } @@ -105,7 +105,7 @@ fn p_proc(tbox: &mut TokenBox) -> Result { Token::CBR(_) => Ok(result), token => Err(UnexpectedTokenError(token.clone(), String::from("P"))), } - }, + } Token::ID(_, val) => Ok(*val), token => Err(ParserErrors::UnexpectedTokenError(token.clone(), String::from("P"))), }