feat(*): lexer/parser WIP backup
This commit is contained in:
parent
a0c4282d71
commit
7d0528264d
|
@ -3,10 +3,5 @@
|
|||
# will have compiled files and executables
|
||||
/target/
|
||||
|
||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||
Cargo.lock
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "computorv1"
|
||||
version = "0.1.0"
|
|
@ -0,0 +1,8 @@
|
|||
[package]
|
||||
name = "computorv1"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
|
@ -0,0 +1,874 @@
|
|||
use std::error::Error;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
enum Token {
|
||||
Number(String),
|
||||
Variable(String),
|
||||
ImaginaryUnit(),
|
||||
Addition(),
|
||||
Multiplication(),
|
||||
Substraction(),
|
||||
Division(),
|
||||
Modulo(),
|
||||
Exponentiation(),
|
||||
Equal(),
|
||||
OpenParenthesis(),
|
||||
CloseParenthesis(),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum TokenType {
|
||||
Number(),
|
||||
Variable(),
|
||||
ImaginaryUnit(),
|
||||
Operator(),
|
||||
Substraction(),
|
||||
OpenParenthesis(),
|
||||
CloseParenthesis(),
|
||||
Equal(),
|
||||
}
|
||||
|
||||
struct Rational {
|
||||
numerator: i128,
|
||||
denominator: i128,
|
||||
}
|
||||
|
||||
struct GaussianRational {
|
||||
real: Rational,
|
||||
imaginary: Rational,
|
||||
}
|
||||
|
||||
/*
|
||||
fn tokenize(query: &str) -> Result<Vec<Token>, Box<dyn Error>> {
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
|
||||
for token in query.split(" ") {
|
||||
tokens.push(match token {
|
||||
"*" => Token::Multiply(),
|
||||
"+" => Token::Add(),
|
||||
"-" => Token::Substract(),
|
||||
"=" => Token::Equal(),
|
||||
_ if token.starts_with("X^") => Token::Exponent(token[2..].parse()?),
|
||||
_ => Token::Number(token.parse()?),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(tokens)
|
||||
}
|
||||
*/
|
||||
|
||||
fn check_number(my_string: String, i: usize) -> Result<Token, String> {
|
||||
println!("Checking number at index {i}");
|
||||
if my_string.as_str() == "." {
|
||||
return Err(format!("unexpected token: `.` at position {i}"));
|
||||
}
|
||||
let find = my_string.find(".");
|
||||
if let Some(j) = find {
|
||||
let find = my_string[j+1..].find(".");
|
||||
if let Some(_) = find {
|
||||
return Err(format!("unexpected token: `{my_string}` at position {i}"))
|
||||
}
|
||||
}
|
||||
Ok(Token::Number(my_string))
|
||||
}
|
||||
|
||||
fn tokenize(query: &str) -> Result<Vec<Token>, String> {
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
let mut my_string = String::new();
|
||||
let mut is_last_number = false;
|
||||
let mut is_last_variable = false;
|
||||
|
||||
for (i, token) in query.chars().enumerate() {
|
||||
let mut is_still_number = false;
|
||||
let mut is_still_variable = false;
|
||||
let mut is_pop_needed = true;
|
||||
match token {
|
||||
'+' => tokens.push(Token::Addition()),
|
||||
'-' => tokens.push(Token::Substraction()),
|
||||
'*' => tokens.push(Token::Multiplication()),
|
||||
'/' => tokens.push(Token::Division()),
|
||||
'%' => tokens.push(Token::Modulo()),
|
||||
'^' => tokens.push(Token::Exponentiation()),
|
||||
'=' => tokens.push(Token::Equal()),
|
||||
'(' => tokens.push(Token::OpenParenthesis()),
|
||||
')' => tokens.push(Token::CloseParenthesis()),
|
||||
'a'..='z' | 'A'..='Z' => {
|
||||
if is_last_number == true {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
tokens.push(check_number(my_string, i)?);
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_number = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
if is_last_variable == false {
|
||||
is_last_variable = true;
|
||||
}
|
||||
is_still_variable = true;
|
||||
my_string += &token.to_string();
|
||||
},
|
||||
'0'..='9' | '.' => {
|
||||
if is_last_variable == true {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
match my_string.as_str() {
|
||||
"i" => tokens.push(Token::ImaginaryUnit()),
|
||||
_ => tokens.push(Token::Variable(my_string)),
|
||||
}
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_variable = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
if is_last_number == false {
|
||||
is_last_number = true;
|
||||
}
|
||||
is_still_number = true;
|
||||
my_string += &token.to_string();
|
||||
},
|
||||
' ' => is_pop_needed = false,
|
||||
_ => return Err(format!("unexpected token: `{token}` at position {i}")),
|
||||
};
|
||||
if is_last_variable && !is_still_variable {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
match my_string.as_str() {
|
||||
"i" => tokens.push(Token::ImaginaryUnit()),
|
||||
_ => tokens.push(Token::Variable(my_string)),
|
||||
}
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_variable = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
else if is_last_number && !is_still_number {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
tokens.push(check_number(my_string, i)?);
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_number = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
}
|
||||
if is_last_variable {
|
||||
match my_string.as_str() {
|
||||
"i" => tokens.push(Token::ImaginaryUnit()),
|
||||
_ => tokens.push(Token::Variable(my_string)),
|
||||
}
|
||||
}
|
||||
else if is_last_number {
|
||||
tokens.push(check_number(my_string, query.len())?);
|
||||
}
|
||||
|
||||
Ok(tokens)
|
||||
}
|
||||
|
||||
fn get_token_type(token: &Token) -> TokenType {
|
||||
match token {
|
||||
Token::Number(_) => TokenType::Number(),
|
||||
Token::Variable(_) => TokenType::Variable(),
|
||||
Token::ImaginaryUnit() => TokenType::ImaginaryUnit(),
|
||||
Token::OpenParenthesis() => TokenType::OpenParenthesis(),
|
||||
Token::CloseParenthesis() => TokenType::CloseParenthesis(),
|
||||
Token::Substraction() => TokenType::Substraction(),
|
||||
Token::Equal() => TokenType::Equal(),
|
||||
_ => TokenType::Operator(),
|
||||
}
|
||||
}
|
||||
|
||||
fn sanitize_tokens(tokens: Vec<Token>) -> Result<Vec<Token>, String> {
|
||||
let mut sanitized_tokens: Vec<Token> = vec![];
|
||||
let mut last_token_type = None;
|
||||
let mut open_close_equal_count = (0, 0, 0);
|
||||
|
||||
for token in tokens {
|
||||
let token_type = get_token_type(&token);
|
||||
match token_type {
|
||||
TokenType::OpenParenthesis() => open_close_equal_count.0 += 1,
|
||||
TokenType::CloseParenthesis() => open_close_equal_count.1 += 1,
|
||||
TokenType::Equal() => open_close_equal_count.2 += 1,
|
||||
_ => (),
|
||||
}
|
||||
if let Some(last_token_type) = last_token_type {
|
||||
match (&last_token_type, &token_type) {
|
||||
(TokenType::Number(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Number(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Number(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::ImaginaryUnit(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::OpenParenthesis(), TokenType::Operator()) => return Err(format!("Error at token {:?}, operator forbidden directly after opening parenthesis", token)),
|
||||
(TokenType::Operator(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)),
|
||||
(TokenType::Substraction(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)),
|
||||
(TokenType::OpenParenthesis(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, empty parentheses", token)),
|
||||
(TokenType::OpenParenthesis(), TokenType::OpenParenthesis()) => (),
|
||||
(TokenType::CloseParenthesis(), TokenType::CloseParenthesis()) => (),
|
||||
_ => {
|
||||
if token_type == last_token_type {
|
||||
return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token));
|
||||
}
|
||||
},
|
||||
}
|
||||
} else if token_type == TokenType::Operator() {
|
||||
return Err(format!("Error at token {:?}, query can't start with an operator", token));
|
||||
}
|
||||
sanitized_tokens.push(token);
|
||||
|
||||
last_token_type = Some(token_type);
|
||||
}
|
||||
Ok(sanitized_tokens)
|
||||
}
|
||||
|
||||
/*
|
||||
pub fn parse(query: &str) -> Result<Vec<f64>, Box<dyn Error>> {
|
||||
let tokens = tokenize(query)?;
|
||||
|
||||
let mut degree: Option<usize> = None;
|
||||
for token in tokens {
|
||||
match token {
|
||||
Token::Exponent(n) => {
|
||||
if degree == None || degree < Some(n) {
|
||||
degree = Some(n);
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
let mut results: Vec<f64> = vec![0.; degree.unwrap() + 1];
|
||||
let tokens = tokenize(query)?;
|
||||
let mut sign = 1.;
|
||||
let mut left = 1.;
|
||||
let mut constant = 0.;
|
||||
for token in tokens {
|
||||
match token {
|
||||
Token::Add() => sign = 1.,
|
||||
Token::Substract() => sign = -1.,
|
||||
Token::Multiply() => (),
|
||||
Token::Equal() => {
|
||||
left = -1.;
|
||||
sign = 1.
|
||||
},
|
||||
Token::Number(n) => constant = n,
|
||||
Token::Exponent(e) => results[e] += sign * left * constant,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
*/
|
||||
|
||||
pub fn parse(query: &str) -> Result<Vec<f64>, Box<dyn Error>> {
|
||||
let tokens = tokenize(query)?;
|
||||
let sanitized_tokens = sanitize_tokens(tokens)?;
|
||||
//let ast = build_ast(tokens);
|
||||
|
||||
println!("{:?}", tokenize(query));
|
||||
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
fn print_reduced_form(equation: &Vec<f64>) {
|
||||
let mut string = String::from("Reduced form: ");
|
||||
|
||||
for (i, n) in equation.iter().enumerate() {
|
||||
let mut n = *n;
|
||||
if n < 0. {
|
||||
string += " - ";
|
||||
n *= -1.;
|
||||
} else if i != 0 {
|
||||
string += " + ";
|
||||
}
|
||||
string.push_str(&n.to_string());
|
||||
string.push_str(" * X^");
|
||||
string.push_str(&i.to_string());
|
||||
}
|
||||
string += " = 0";
|
||||
|
||||
println!("{string}");
|
||||
}
|
||||
|
||||
fn sqrt(n: f64) -> f64 {
|
||||
let mut z = 1.;
|
||||
|
||||
for _ in 0..10 {
|
||||
z -= (z * z - n) / (2. * z);
|
||||
}
|
||||
|
||||
z
|
||||
}
|
||||
|
||||
fn solve_degree_0(equation: Vec<f64>) {
|
||||
if equation[0] == 0. {
|
||||
println!("Each real number is a solution.");
|
||||
} else {
|
||||
println!("There are no solutions to this equation");
|
||||
}
|
||||
}
|
||||
|
||||
fn solve_degree_1(equation: Vec<f64>) {
|
||||
println!("The solution is:");
|
||||
println!("{}", -1. * equation[0] / equation[1]);
|
||||
}
|
||||
|
||||
fn solve_degree_2(equation: Vec<f64>) {
|
||||
let delta = equation[1] * equation[1] - 4. * equation[2] * equation[0];
|
||||
|
||||
if delta > 0. {
|
||||
let sqrt_delta = sqrt(delta);
|
||||
let x1 = (-equation[1] - sqrt_delta) / (2. * equation[2]);
|
||||
let x2 = (-equation[1] + sqrt_delta) / (2. * equation[2]);
|
||||
println!("Discriminant is strictly positive, the two solutions are:");
|
||||
println!("{x1}");
|
||||
println!("{x2}");
|
||||
//(-b +- sqrt(delta)) / 2a
|
||||
} else if delta < 0. {
|
||||
let sqrt_delta = sqrt(delta);
|
||||
let a = -equation[1] / (2. * equation[2]);
|
||||
let b = sqrt_delta / (2. * equation[2]);
|
||||
println!("Discriminant is strictly negative, the two complex solutions are:");
|
||||
println!("{a} + {b}i");
|
||||
println!("{a} - {b}i");
|
||||
//(-b +- i sqrt(-delta)) / 2a
|
||||
} else {
|
||||
let x = -equation[1] / (2. * equation[2]);
|
||||
println!("Discriminant is zero, the solution is:");
|
||||
println!("{x}");
|
||||
//-b / 2a
|
||||
}
|
||||
}
|
||||
|
||||
pub fn solve(equation: Vec<f64>) {
|
||||
let degree = equation.len() - 1;
|
||||
|
||||
print_reduced_form(&equation);
|
||||
println!("Polynomial degree: {degree}");
|
||||
match degree {
|
||||
0 => solve_degree_0(equation),
|
||||
1 => solve_degree_1(equation),
|
||||
2 => solve_degree_2(equation),
|
||||
_ if degree > 2 => println!("The polynomial degree is strictly greater than 2, I can't solve."),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn tokenize_addition() {
|
||||
let query = "+";
|
||||
let result: Vec<Token> = vec![Token::Addition()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_substraction() {
|
||||
let query = "-";
|
||||
let result: Vec<Token> = vec![Token::Substraction()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_multiplication() {
|
||||
let query = "*";
|
||||
let result: Vec<Token> = vec![Token::Multiplication()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_division() {
|
||||
let query = "/";
|
||||
let result: Vec<Token> = vec![Token::Division()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_modulo() {
|
||||
let query = "%";
|
||||
let result: Vec<Token> = vec![Token::Modulo()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_exponentiation() {
|
||||
let query = "^";
|
||||
let result: Vec<Token> = vec![Token::Exponentiation()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_equal() {
|
||||
let query = "=";
|
||||
let result: Vec<Token> = vec![Token::Equal()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_open_parenthesis() {
|
||||
let query = "(";
|
||||
let result: Vec<Token> = vec![Token::OpenParenthesis()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_close_parenthesis() {
|
||||
let query = ")";
|
||||
let result: Vec<Token> = vec![Token::CloseParenthesis()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_imaginary_unit() {
|
||||
let query = "i";
|
||||
let result: Vec<Token> = vec![Token::ImaginaryUnit()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_variable() {
|
||||
let query = "variable";
|
||||
let result: Vec<Token> = vec![Token::Variable(String::from("variable"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_variable_double_i() {
|
||||
let query = "ii";
|
||||
let result: Vec<Token> = vec![Token::Variable(String::from("ii"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_number_natural() {
|
||||
let query = "123456";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("123456"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_number_rational() {
|
||||
let query = "123.456";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("123.456"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_number_point_something() {
|
||||
let query = ".123456";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from(".123456"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_number_trailing_point() {
|
||||
let query = "123456.";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("123456."))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_number_variable() {
|
||||
let query = "23x";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("23")), Token::Variable(String::from("x"))];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
|
||||
let query = "23i";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("23")), Token::ImaginaryUnit()];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
|
||||
let query = "x23";
|
||||
let result: Vec<Token> = vec![Token::Variable(String::from("x")), Token::Number(String::from("23"))];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
|
||||
let query = "i23";
|
||||
let result: Vec<Token> = vec![Token::ImaginaryUnit(), Token::Number(String::from("23"))];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn tokenize_number_double_point() {
|
||||
let query = "12.34.56";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn tokenize_number_double_point_in_a_row() {
|
||||
let query = "123..456";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn tokenize_number_only_point() {
|
||||
let query = ".";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn tokenize_invalid_token() {
|
||||
let query = "324*43224+243_+234=234";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
|
||||
mod sanitize_tokens {
|
||||
use super::*;
|
||||
|
||||
fn plus() -> Token {
|
||||
Token::Addition()
|
||||
}
|
||||
|
||||
fn minus() -> Token {
|
||||
Token::Substraction()
|
||||
}
|
||||
|
||||
fn times() -> Token {
|
||||
Token::Multiplication()
|
||||
}
|
||||
|
||||
fn divided_by() -> Token {
|
||||
Token::Division()
|
||||
}
|
||||
|
||||
fn modulo() -> Token {
|
||||
Token::Modulo()
|
||||
}
|
||||
|
||||
fn equals() -> Token {
|
||||
Token::Equal()
|
||||
}
|
||||
|
||||
fn x() -> Token {
|
||||
Token::Variable(String::from("x"))
|
||||
}
|
||||
|
||||
fn i() -> Token {
|
||||
Token::ImaginaryUnit()
|
||||
}
|
||||
|
||||
fn number() -> Token {
|
||||
Token::Number(String::from("123"))
|
||||
}
|
||||
|
||||
fn open() -> Token {
|
||||
Token::OpenParenthesis()
|
||||
}
|
||||
|
||||
fn close() -> Token {
|
||||
Token::CloseParenthesis()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_number() {
|
||||
let tokens = vec![number(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_operator() {
|
||||
let tokens = vec![number(), plus(), divided_by()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_variable() {
|
||||
let tokens = vec![x(), x()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_imaginary_unit() {
|
||||
let tokens = vec![i(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn sanitize_tokens_starts_with_operator() {
|
||||
let tokens = vec![plus(), number(), divided_by(), x()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn starts_with_negative_number() {
|
||||
let tokens = vec![minus(), number(), divided_by(), x()];
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_operator_variable() {
|
||||
let tokens = vec![number(), divided_by(), x()];
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_multiplication() {
|
||||
let tokens = vec![number(), x()];
|
||||
let results = vec![number(), times(), x()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![number(), i()];
|
||||
let results = vec![number(), times(), i()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_multiplication_parenthesis() {
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), open(), i(), times(), x(), close()];
|
||||
let results = vec![open(), number(), modulo(), number(), close(), times(), open(), i(), times(), x(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![number(), open(), number(), modulo(), number(), close()];
|
||||
let results= vec![number(), times(), open(), number(), modulo(), number(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![i(), open(), number(), modulo(), number(), close()];
|
||||
let results= vec![i(), times(), open(), number(), modulo(), number(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![x(), open(), number(), modulo(), number(), close()];
|
||||
let results= vec![x(), times(), open(), number(), modulo(), number(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), number()];
|
||||
let results= vec![open(), number(), modulo(), number(), close(), times(), number()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), i()];
|
||||
let results= vec![open(), number(), modulo(), number(), close(), times(), i()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), x()];
|
||||
let results= vec![open(), number(), modulo(), number(), close(), times(), x()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![number(), open(), number(), modulo(), number(), close(), x()];
|
||||
let results= vec![number(), times(), open(), number(), modulo(), number(), close(), times(), x()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn parenthesis_operator() {
|
||||
let tokens = vec!(open(), plus(), number(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn operator_parenthesis() {
|
||||
let tokens = vec!(open(), number(), modulo(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn minus_parenthesis() {
|
||||
let tokens = vec!(open(), number(), minus(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn empty_parentheses() {
|
||||
let tokens = vec!(open(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parenthesis_minus() {
|
||||
let tokens = vec!(open(), minus(), number(), close());
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn double_parentheses() {
|
||||
let tokens = vec!(open(), open(), number(), plus(), number(), close(), plus(), number(), close());
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec!(open(), number(), plus(), open(), number(), plus(), number(), close(), close());
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec!(open(), open(), number(), plus(), number(), close(), number(), close());
|
||||
let results = vec!(open(), open(), number(), plus(), number(), close(), times(), number(), close());
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec!(open(), number(), open(), number(), plus(), number(), close(), close());
|
||||
let results= vec!(open(), number(), times(), open(), number(), plus(), number(), close(), close());
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_parentheses_count() {
|
||||
let tokens = vec![open(), number(), close(), close()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_parentheses_count_2() {
|
||||
let tokens = vec![open(), open(), number(), close()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_parentheses_count_hard() {
|
||||
let tokens = vec![open(), open(), number(), close(), equals(), x(), close()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_equal_count() {
|
||||
let tokens = vec![number(), equals(), number(), equals(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn starts_with_equal() {
|
||||
let tokens = vec![equals(), number(), plus(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn ends_with_equal() {
|
||||
let tokens = vec![number(), plus(), number(), equals()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn ends_with_minus() {
|
||||
let tokens = vec![number(), plus(), number(), minus()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn ends_with_operator() {
|
||||
let tokens = vec![number(), plus(), number(), divided_by()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
#[test]
|
||||
fn tokenize_exponent() {
|
||||
let query = "X^3";
|
||||
let result: Vec<Token> = vec![Token::Exponent(3)];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
#[test]
|
||||
fn tokenize_constant() {
|
||||
let query = "6.964";
|
||||
let result: Vec<Token> = vec![Token::Number(6.964)];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
#[test]
|
||||
fn tokenize_complex() {
|
||||
let query = "8 * X^0 - 6 * X^1 + 0 * X^2 - 5.6 * X^3 = 3 * X^0";
|
||||
let result: Vec<Token> = vec![
|
||||
Token::Number(8.), Token::Multiplication(), Token::Exponentiation(),
|
||||
Token::Substract(),
|
||||
Token::Number(6.), Token::Multiply(), Token::Exponentiation(),
|
||||
Token::Add(),
|
||||
Token::Number(0.), Token::Multiply(), Token::Exponent(2),
|
||||
Token::Substract(),
|
||||
Token::Number(5.6), Token::Multiply(), Token::Exponent(3),
|
||||
Token::Equal(),
|
||||
Token::Number(3.), Token::Multiply(), Token::Exponent(0)];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
*/
|
||||
|
||||
#[test]
|
||||
fn parse_degree_0() {
|
||||
let query = "5 * X^0 = 3 * X^0";
|
||||
let result: Vec<f64> = vec![2.];
|
||||
|
||||
assert_eq!(parse(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_degree_1() {
|
||||
let query = "5 * X^0 + 3 * X^1 = 3 * X^0";
|
||||
let result: Vec<f64> = vec![2., 3.];
|
||||
|
||||
assert_eq!(parse(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_degree_2() {
|
||||
let query = "5 * X^0 + 6 * X^1 + 8 * X^2 = 3 * X^0 - 2 * X^2";
|
||||
let result = vec![2., 6., 10.];
|
||||
|
||||
assert_eq!(parse(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_random_order() {
|
||||
let query = "9.3 * X^3 + 4.3 * X^0 + 3.4 * X^2 - 1.5 * X^3 - 13.12 * X^1 = 1.4 * X^2 - 5.1 * X^3 + 1.4 * X^1 -6.3 * X^0";
|
||||
let result = vec![10.6, -14.52, 2., 12.9];
|
||||
|
||||
assert_eq!(parse(query).unwrap(), result);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
use std::env;
|
||||
use std::process;
|
||||
|
||||
fn main() {
|
||||
let args: Vec<String> = env::args().collect();
|
||||
let equation = computorv1::parse(&args[1]).unwrap_or_else(|e| {
|
||||
println!("Error during parsing: {e}");
|
||||
process::exit(1);
|
||||
});
|
||||
computorv1::solve(equation);
|
||||
}
|
Loading…
Reference in New Issue