Compare commits

...

5 Commits

7 changed files with 20 additions and 28 deletions

4
Cargo.lock generated
View File

@ -3,8 +3,8 @@
version = 3
[[package]]
name = "OverComplicatedCalculator"
version = "0.1.0"
name = "over_complicated_calculator"
version = "1.0.0"
dependencies = [
"thiserror",
]

View File

@ -1,6 +1,6 @@
[package]
name = "OverComplicatedCalculator"
version = "0.1.0"
name = "over_complicated_calculator"
version = "1.0.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@ -2,6 +2,7 @@ use std::str::FromStr;
use crate::lexer::tokens::{OpType, Token, TokenMeta};
#[allow(dead_code)]
const STATES: [i32; 6] = [1, 2, 3, 4, 5, 6];
const FINAL_STATES: [i32; 4] = [2, 3, 4, 5];
const ERROR_STATE: i32 = 6;
@ -31,6 +32,7 @@ pub struct FSM {
last: i32,
}
#[allow(dead_code)]
impl FSM {
#[inline]
pub fn new() -> FSM {

View File

@ -1,14 +1,8 @@
use std::collections::VecDeque;
use std::fs::File;
use std::path::Path;
use std::str::{Chars, FromStr};
use tokens::Token;
use crate::lexer::errors::LexerErrors;
use crate::lexer::errors::LexerErrors::EmptyTextSequenceError;
use crate::lexer::fsm::{FSM, get_token};
use crate::lexer::tokens::{OpType, TokenMeta};
use crate::lexer::fsm::FSM;
use crate::lexer::tokens::TokenMeta;
pub mod errors;
pub mod tokens;
@ -90,6 +84,8 @@ impl Lexer {
#[warn(dead_code)]
#[cfg(test)]
mod test {
use tokens::OpType;
use super::*;
#[test]

View File

@ -31,13 +31,6 @@ impl OpType {
}
}
/// Bracket types, either OPEN or CLOSE.
#[derive(Debug)]
pub enum BrType {
OPEN,
CLOSE,
}
/// # Tokens
/// The tokens all contain [metadata](TokenMeta).
/// 1. `ID`: A number, parsed into 64 bit floating-point.

View File

@ -1,7 +1,7 @@
use std::io;
use std::io::Write;
use OverComplicatedCalculator::calculate;
use over_complicated_calculator::calculate;
fn main() {
println!("Hi there! Welcome to OverComplicatedCalculator v1.0!");

View File

@ -41,14 +41,14 @@ fn expect_token(maybe_token: &Option<Token>) -> Result<&Token> {
}
}
pub fn parse(mut lexer: lexer::Lexer) -> Result<f64> {
pub fn parse(lexer: lexer::Lexer) -> Result<f64> {
let mut tbox = TokenBox { lexer, cur_token: None, regress: false };
s_proc(&mut tbox)
}
pub fn s_proc(tbox: &mut TokenBox) -> Result<f64> {
println!("Proc S");
let mut result = a_proc(tbox)?;
let result = a_proc(tbox)?;
match tbox.read_token()? {
None => Ok(result),
Some(token) => Err(ParserErrors::UnexpectedTokenError(token.clone(), String::from("S"))),
@ -57,12 +57,12 @@ pub fn s_proc(tbox: &mut TokenBox) -> Result<f64> {
fn a_proc(tbox: &mut TokenBox) -> Result<f64> {
println!("Proc A");
let mut result = m_proc(tbox)?;
let result = m_proc(tbox)?;
match &tbox.read_token()? {
Some(Token::OP(_, OpType::ADD)) => Ok(result + a_proc(tbox)?),
Some(Token::OP(_, OpType::SUB)) => Ok(result - a_proc(tbox)?),
None => Ok(result),
Some(token) => {
Some(_) => {
tbox.regress();
Ok(result)
}
@ -71,12 +71,12 @@ fn a_proc(tbox: &mut TokenBox) -> Result<f64> {
fn m_proc(tbox: &mut TokenBox) -> Result<f64> {
println!("Proc M");
let mut result = g_proc(tbox)?;
let result = g_proc(tbox)?;
match &tbox.read_token()? {
Some(Token::OP(_, OpType::MUL)) => Ok(result * m_proc(tbox)?),
Some(Token::OP(_, OpType::DIV)) => Ok(result / m_proc(tbox)?),
None => Ok(result),
Some(token) => {
Some(_) => {
tbox.regress();
Ok(result)
}
@ -85,11 +85,11 @@ fn m_proc(tbox: &mut TokenBox) -> Result<f64> {
fn g_proc(tbox: &mut TokenBox) -> Result<f64> {
println!("Proc G");
let mut result = p_proc(tbox)?;
let result = p_proc(tbox)?;
match tbox.read_token()? {
Some(Token::OP(_, OpType::POW)) => Ok(result.powf(g_proc(tbox)?)),
None => Ok(result),
Some(token) => {
Some(_) => {
tbox.regress();
Ok(result)
}
@ -123,6 +123,7 @@ mod test {
Ok(())
}
//noinspection ALL
#[test]
fn skript_formula_1() -> Result<()> {
let res = parse(lexer::Lexer::new("2+5/3*2"))?;