clean(parser::*): organize parser in modules
This commit is contained in:
parent
7c13270089
commit
17dbc79624
794
src/lib.rs
794
src/lib.rs
|
@ -1,32 +1,9 @@
|
|||
use std::error::Error;
|
||||
use crate::parser::tokenizer::tokenize;
|
||||
use crate::parser::sanitizer::sanitize_tokens;
|
||||
use crate::parser::ast_builder::build_ast;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
enum Token {
|
||||
Number(String),
|
||||
Variable(String),
|
||||
ImaginaryUnit(),
|
||||
Addition(),
|
||||
Multiplication(),
|
||||
Substraction(),
|
||||
Division(),
|
||||
Modulo(),
|
||||
Exponentiation(),
|
||||
Equal(),
|
||||
OpenParenthesis(),
|
||||
CloseParenthesis(),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum TokenType {
|
||||
Number(),
|
||||
Variable(),
|
||||
ImaginaryUnit(),
|
||||
Operator(),
|
||||
Substraction(),
|
||||
OpenParenthesis(),
|
||||
CloseParenthesis(),
|
||||
Equal(),
|
||||
}
|
||||
pub mod parser;
|
||||
|
||||
struct _Rational {
|
||||
numerator: i128,
|
||||
|
@ -38,232 +15,13 @@ struct _GaussianRational {
|
|||
imaginary: _Rational,
|
||||
}
|
||||
|
||||
fn check_number(my_string: String, i: usize) -> Result<Token, String> {
|
||||
println!("Checking number at index {i}");
|
||||
if my_string.as_str() == "." {
|
||||
return Err(format!("unexpected token: `.` at position {i}"));
|
||||
}
|
||||
let find = my_string.find(".");
|
||||
if let Some(j) = find {
|
||||
let find = my_string[j+1..].find(".");
|
||||
if let Some(_) = find {
|
||||
return Err(format!("unexpected token: `{my_string}` at position {i}"))
|
||||
}
|
||||
}
|
||||
Ok(Token::Number(my_string))
|
||||
}
|
||||
|
||||
fn tokenize(query: &str) -> Result<Vec<Token>, String> {
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
let mut my_string = String::new();
|
||||
let mut is_last_number = false;
|
||||
let mut is_last_variable = false;
|
||||
|
||||
for (i, token) in query.chars().enumerate() {
|
||||
let mut is_still_number = false;
|
||||
let mut is_still_variable = false;
|
||||
let mut is_pop_needed = true;
|
||||
match token {
|
||||
'+' => tokens.push(Token::Addition()),
|
||||
'-' => tokens.push(Token::Substraction()),
|
||||
'*' => tokens.push(Token::Multiplication()),
|
||||
'/' => tokens.push(Token::Division()),
|
||||
'%' => tokens.push(Token::Modulo()),
|
||||
'^' => tokens.push(Token::Exponentiation()),
|
||||
'=' => tokens.push(Token::Equal()),
|
||||
'(' => tokens.push(Token::OpenParenthesis()),
|
||||
')' => tokens.push(Token::CloseParenthesis()),
|
||||
'a'..='z' | 'A'..='Z' => {
|
||||
if is_last_number == true {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
tokens.push(check_number(my_string, i)?);
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_number = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
if is_last_variable == false {
|
||||
is_last_variable = true;
|
||||
}
|
||||
is_still_variable = true;
|
||||
my_string += &token.to_string();
|
||||
},
|
||||
'0'..='9' | '.' => {
|
||||
if is_last_variable == true {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
match my_string.as_str() {
|
||||
"i" => tokens.push(Token::ImaginaryUnit()),
|
||||
_ => tokens.push(Token::Variable(my_string)),
|
||||
}
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_variable = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
if is_last_number == false {
|
||||
is_last_number = true;
|
||||
}
|
||||
is_still_number = true;
|
||||
my_string += &token.to_string();
|
||||
},
|
||||
' ' => is_pop_needed = false,
|
||||
_ => return Err(format!("unexpected token: `{token}` at position {i}")),
|
||||
};
|
||||
if is_last_variable && !is_still_variable {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
match my_string.as_str() {
|
||||
"i" => tokens.push(Token::ImaginaryUnit()),
|
||||
_ => tokens.push(Token::Variable(my_string)),
|
||||
}
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_variable = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
else if is_last_number && !is_still_number {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
tokens.push(check_number(my_string, i)?);
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_number = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
}
|
||||
if is_last_variable {
|
||||
match my_string.as_str() {
|
||||
"i" => tokens.push(Token::ImaginaryUnit()),
|
||||
_ => tokens.push(Token::Variable(my_string)),
|
||||
}
|
||||
}
|
||||
else if is_last_number {
|
||||
tokens.push(check_number(my_string, query.len())?);
|
||||
}
|
||||
|
||||
Ok(tokens)
|
||||
}
|
||||
|
||||
fn get_token_type(token: &Token) -> TokenType {
|
||||
match token {
|
||||
Token::Number(_) => TokenType::Number(),
|
||||
Token::Variable(_) => TokenType::Variable(),
|
||||
Token::ImaginaryUnit() => TokenType::ImaginaryUnit(),
|
||||
Token::OpenParenthesis() => TokenType::OpenParenthesis(),
|
||||
Token::CloseParenthesis() => TokenType::CloseParenthesis(),
|
||||
Token::Substraction() => TokenType::Substraction(),
|
||||
Token::Equal() => TokenType::Equal(),
|
||||
_ => TokenType::Operator(),
|
||||
}
|
||||
}
|
||||
|
||||
fn sanitize_tokens(tokens: Vec<Token>) -> Result<Vec<Token>, String> {
|
||||
let mut sanitized_tokens: Vec<Token> = vec![];
|
||||
let mut last_token_type = None;
|
||||
let mut open_close_equal_count = (0, 0, 0);
|
||||
|
||||
for token in tokens {
|
||||
let token_type = get_token_type(&token);
|
||||
match token_type {
|
||||
TokenType::OpenParenthesis() => open_close_equal_count.0 += 1,
|
||||
TokenType::CloseParenthesis() => {
|
||||
open_close_equal_count.1 += 1;
|
||||
if open_close_equal_count.1 > open_close_equal_count.0 {
|
||||
return Err(format!("Error at token {:?}, closing parenthesis that was never opened", token));
|
||||
}
|
||||
},
|
||||
TokenType::Equal() => {
|
||||
open_close_equal_count.2 += 1;
|
||||
if open_close_equal_count.0 != open_close_equal_count.1 {
|
||||
return Err(format!("Error, query has a different number of opening and closing parentheses"));
|
||||
}
|
||||
open_close_equal_count.0 = 0;
|
||||
open_close_equal_count.1 = 0;
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
if let Some(last_token_type) = last_token_type {
|
||||
match (&last_token_type, &token_type) {
|
||||
(TokenType::Number(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Number(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Number(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::ImaginaryUnit(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::ImaginaryUnit(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::ImaginaryUnit(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::OpenParenthesis(), TokenType::Operator()) => return Err(format!("Error at token {:?}, operator forbidden directly after opening parenthesis", token)),
|
||||
(TokenType::Operator(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)),
|
||||
(TokenType::Substraction(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)),
|
||||
(TokenType::OpenParenthesis(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, empty parentheses", token)),
|
||||
(TokenType::Substraction(), TokenType::Equal()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||
(TokenType::Substraction(), TokenType::Operator()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||
(TokenType::Operator(), TokenType::Equal()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||
(TokenType::Operator(), TokenType::Substraction()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||
(TokenType::Equal(), TokenType::Operator()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||
(TokenType::OpenParenthesis(), TokenType::OpenParenthesis()) => (),
|
||||
(TokenType::CloseParenthesis(), TokenType::CloseParenthesis()) => (),
|
||||
_ => {
|
||||
if token_type == last_token_type {
|
||||
return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token));
|
||||
}
|
||||
},
|
||||
}
|
||||
} else if token_type == TokenType::Operator() || token_type == TokenType::Equal() {
|
||||
return Err(format!("Error at token {:?}, query can't start with this token", token));
|
||||
}
|
||||
sanitized_tokens.push(token);
|
||||
|
||||
last_token_type = Some(token_type);
|
||||
}
|
||||
if open_close_equal_count.0 != open_close_equal_count.1 {
|
||||
return Err(format!("Error, query has a different number of opening and closing parentheses"));
|
||||
}
|
||||
if open_close_equal_count.2 > 1 {
|
||||
return Err(format!("Error, query shouldn't have more than 1 equals sign"));
|
||||
}
|
||||
let last_token = sanitized_tokens.last().unwrap();
|
||||
match get_token_type(&last_token) {
|
||||
TokenType::Equal() | TokenType::Operator() | TokenType::Substraction() => return Err(format!("Error at token {:?}, query can't end with this token", last_token)),
|
||||
_ => (),
|
||||
}
|
||||
Ok(sanitized_tokens)
|
||||
}
|
||||
|
||||
struct Node {
|
||||
_lhs: Option<Box<Node>>,
|
||||
_rhs: Option<Box<Node>>,
|
||||
}
|
||||
|
||||
fn build_ast(_tokens: Vec<Token>) -> Node {
|
||||
Node { _lhs: None, _rhs: None }
|
||||
}
|
||||
|
||||
pub fn parse(query: &str) -> Result<Vec<f64>, Box<dyn Error>> {
|
||||
let tokens = tokenize(query)?;
|
||||
println!("{:?}", tokens);
|
||||
let sanitized_tokens = sanitize_tokens(tokens)?;
|
||||
println!("{:?}", sanitized_tokens);
|
||||
let _ast = build_ast(sanitized_tokens);
|
||||
|
||||
println!("{:?}", tokenize(query));
|
||||
|
||||
Ok(vec![])
|
||||
}
|
||||
|
@ -356,539 +114,7 @@ pub fn solve(equation: Vec<f64>) {
|
|||
mod tests {
|
||||
use super::*;
|
||||
|
||||
mod tokenize {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn addition() {
|
||||
let query = "+";
|
||||
let result: Vec<Token> = vec![Token::Addition()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn substraction() {
|
||||
let query = "-";
|
||||
let result: Vec<Token> = vec![Token::Substraction()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiplication() {
|
||||
let query = "*";
|
||||
let result: Vec<Token> = vec![Token::Multiplication()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn division() {
|
||||
let query = "/";
|
||||
let result: Vec<Token> = vec![Token::Division()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn modulo() {
|
||||
let query = "%";
|
||||
let result: Vec<Token> = vec![Token::Modulo()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exponentiation() {
|
||||
let query = "^";
|
||||
let result: Vec<Token> = vec![Token::Exponentiation()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn equal() {
|
||||
let query = "=";
|
||||
let result: Vec<Token> = vec![Token::Equal()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn open_parenthesis() {
|
||||
let query = "(";
|
||||
let result: Vec<Token> = vec![Token::OpenParenthesis()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn close_parenthesis() {
|
||||
let query = ")";
|
||||
let result: Vec<Token> = vec![Token::CloseParenthesis()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn imaginary_unit() {
|
||||
let query = "i";
|
||||
let result: Vec<Token> = vec![Token::ImaginaryUnit()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn variable() {
|
||||
let query = "variable";
|
||||
let result: Vec<Token> = vec![Token::Variable(String::from("variable"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn variable_double_i() {
|
||||
let query = "ii";
|
||||
let result: Vec<Token> = vec![Token::Variable(String::from("ii"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_natural() {
|
||||
let query = "123456";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("123456"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_rational() {
|
||||
let query = "123.456";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("123.456"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_point_something() {
|
||||
let query = ".123456";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from(".123456"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_trailing_point() {
|
||||
let query = "123456.";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("123456."))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_variable() {
|
||||
let query = "23x";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("23")), Token::Variable(String::from("x"))];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
|
||||
let query = "23i";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("23")), Token::ImaginaryUnit()];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
|
||||
let query = "x23";
|
||||
let result: Vec<Token> = vec![Token::Variable(String::from("x")), Token::Number(String::from("23"))];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
|
||||
let query = "i23";
|
||||
let result: Vec<Token> = vec![Token::ImaginaryUnit(), Token::Number(String::from("23"))];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn number_double_point() {
|
||||
let query = "12.34.56";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn number_double_point_in_a_row() {
|
||||
let query = "123..456";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn number_only_point() {
|
||||
let query = ".";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn invalid_token() {
|
||||
let query = "324*43224+243_+234=234";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
mod sanitize_tokens {
|
||||
use super::*;
|
||||
|
||||
fn plus() -> Token {
|
||||
Token::Addition()
|
||||
}
|
||||
|
||||
fn minus() -> Token {
|
||||
Token::Substraction()
|
||||
}
|
||||
|
||||
fn times() -> Token {
|
||||
Token::Multiplication()
|
||||
}
|
||||
|
||||
fn divided_by() -> Token {
|
||||
Token::Division()
|
||||
}
|
||||
|
||||
fn modulo() -> Token {
|
||||
Token::Modulo()
|
||||
}
|
||||
|
||||
fn equals() -> Token {
|
||||
Token::Equal()
|
||||
}
|
||||
|
||||
fn x() -> Token {
|
||||
Token::Variable(String::from("x"))
|
||||
}
|
||||
|
||||
fn i() -> Token {
|
||||
Token::ImaginaryUnit()
|
||||
}
|
||||
|
||||
fn number() -> Token {
|
||||
Token::Number(String::from("123"))
|
||||
}
|
||||
|
||||
fn open() -> Token {
|
||||
Token::OpenParenthesis()
|
||||
}
|
||||
|
||||
fn close() -> Token {
|
||||
Token::CloseParenthesis()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_number() {
|
||||
let tokens = vec![number(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_operator() {
|
||||
let tokens = vec![number(), plus(), divided_by()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_variable() {
|
||||
let tokens = vec![x(), x()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_imaginary_unit() {
|
||||
let tokens = vec![i(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn sanitize_tokens_starts_with_operator() {
|
||||
let tokens = vec![plus(), number(), divided_by(), x()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn starts_with_negative_number() {
|
||||
let tokens = vec![minus(), number(), divided_by(), x()];
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_operator_variable() {
|
||||
let tokens = vec![number(), divided_by(), x()];
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_multiplication() {
|
||||
let tokens = vec![number(), x()];
|
||||
let results = vec![number(), times(), x()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![x(), number()];
|
||||
let results = vec![x(), times(), number()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![number(), i()];
|
||||
let results = vec![number(), times(), i()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![i(), number()];
|
||||
let results = vec![i(), times(), number()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![i(), x()];
|
||||
let results = vec![i(), times(), x()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![x(), i()];
|
||||
let results = vec![x(), times(), i()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_multiplication_parenthesis() {
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), open(), i(), times(), x(), close()];
|
||||
let results = vec![open(), number(), modulo(), number(), close(), times(), open(), i(), times(), x(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![number(), open(), number(), modulo(), number(), close()];
|
||||
let results= vec![number(), times(), open(), number(), modulo(), number(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![i(), open(), number(), modulo(), number(), close()];
|
||||
let results= vec![i(), times(), open(), number(), modulo(), number(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![x(), open(), number(), modulo(), number(), close()];
|
||||
let results= vec![x(), times(), open(), number(), modulo(), number(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), number()];
|
||||
let results= vec![open(), number(), modulo(), number(), close(), times(), number()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), i()];
|
||||
let results= vec![open(), number(), modulo(), number(), close(), times(), i()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), x()];
|
||||
let results= vec![open(), number(), modulo(), number(), close(), times(), x()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![number(), open(), number(), modulo(), number(), close(), x()];
|
||||
let results= vec![number(), times(), open(), number(), modulo(), number(), close(), times(), x()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn parenthesis_operator() {
|
||||
let tokens = vec!(open(), plus(), number(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn operator_parenthesis() {
|
||||
let tokens = vec!(open(), number(), modulo(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn minus_parenthesis() {
|
||||
let tokens = vec!(open(), number(), minus(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn empty_parentheses() {
|
||||
let tokens = vec!(open(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parenthesis_minus() {
|
||||
let tokens = vec!(open(), minus(), number(), close());
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn double_parentheses() {
|
||||
let tokens = vec!(open(), open(), number(), plus(), number(), close(), plus(), number(), close());
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec!(open(), number(), plus(), open(), number(), plus(), number(), close(), close());
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec!(open(), open(), number(), plus(), number(), close(), number(), close());
|
||||
let results = vec!(open(), open(), number(), plus(), number(), close(), times(), number(), close());
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec!(open(), number(), open(), number(), plus(), number(), close(), close());
|
||||
let results= vec!(open(), number(), times(), open(), number(), plus(), number(), close(), close());
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_parentheses_count() {
|
||||
let tokens = vec![open(), number(), close(), close()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_parentheses_count_2() {
|
||||
let tokens = vec![open(), open(), number(), close()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_parentheses_count_hard() {
|
||||
let tokens = vec![open(), open(), number(), close(), equals(), x(), close()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_equal_count() {
|
||||
let tokens = vec![number(), equals(), number(), equals(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn starts_with_equal() {
|
||||
let tokens = vec![equals(), number(), plus(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn ends_with_equal() {
|
||||
let tokens = vec![number(), plus(), number(), equals()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn ends_with_minus() {
|
||||
let tokens = vec![number(), plus(), number(), minus()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn ends_with_operator() {
|
||||
let tokens = vec![number(), plus(), number(), divided_by()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn ends_with_open_brackets() {
|
||||
let tokens = vec![number(), plus(), number(), open(), number(), close(), open()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn starts_with_close_brackets() {
|
||||
let tokens = vec![close(), number(), plus(), number(), open(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_brackets_order() {
|
||||
let tokens = vec![number(), close(), number(), plus(), number(), open(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn minus_operator() {
|
||||
let tokens = vec![number(), minus(), plus(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn operator_minus() {
|
||||
let tokens = vec![number(), plus(), minus(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn minus_equals() {
|
||||
let tokens = vec![number(), minus(), equals(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn operator_equals() {
|
||||
let tokens = vec![number(), modulo(), equals(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn equals_operator() {
|
||||
let tokens = vec![number(), equals(), times(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn equals_minus() {
|
||||
let tokens = vec![number(), equals(), minus(), x()];
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
mod get_token_type {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn exhaustive() {
|
||||
assert_eq!(get_token_type(&Token::Number(String::from("123"))), TokenType::Number());
|
||||
assert_eq!(get_token_type(&Token::Variable(String::from("var"))), TokenType::Variable());
|
||||
assert_eq!(get_token_type(&Token::ImaginaryUnit()), TokenType::ImaginaryUnit());
|
||||
assert_eq!(get_token_type(&Token::OpenParenthesis()), TokenType::OpenParenthesis());
|
||||
assert_eq!(get_token_type(&Token::CloseParenthesis()), TokenType::CloseParenthesis());
|
||||
assert_eq!(get_token_type(&Token::Substraction()), TokenType::Substraction());
|
||||
assert_eq!(get_token_type(&Token::Equal()), TokenType::Equal());
|
||||
assert_eq!(get_token_type(&Token::Addition()), TokenType::Operator());
|
||||
assert_eq!(get_token_type(&Token::Multiplication()), TokenType::Operator());
|
||||
assert_eq!(get_token_type(&Token::Division()), TokenType::Operator());
|
||||
assert_eq!(get_token_type(&Token::Modulo()), TokenType::Operator());
|
||||
assert_eq!(get_token_type(&Token::Exponentiation()), TokenType::Operator());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[test]
|
||||
fn parse_degree_0() {
|
||||
let query = "5 * X^0 = 3 * X^0";
|
||||
let result: Vec<f64> = vec![2.];
|
||||
|
@ -896,7 +122,7 @@ mod tests {
|
|||
assert_eq!(parse(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[test]
|
||||
fn parse_degree_1() {
|
||||
let query = "5 * X^0 + 3 * X^1 = 3 * X^0";
|
||||
let result: Vec<f64> = vec![2., 3.];
|
||||
|
@ -904,7 +130,7 @@ mod tests {
|
|||
assert_eq!(parse(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[test]
|
||||
fn parse_degree_2() {
|
||||
let query = "5 * X^0 + 6 * X^1 + 8 * X^2 = 3 * X^0 - 2 * X^2";
|
||||
let result = vec![2., 6., 10.];
|
||||
|
@ -912,7 +138,7 @@ mod tests {
|
|||
assert_eq!(parse(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[test]
|
||||
fn parse_random_order() {
|
||||
let query = "9.3 * X^3 + 4.3 * X^0 + 3.4 * X^2 - 1.5 * X^3 - 13.12 * X^1 = 1.4 * X^2 - 5.1 * X^3 + 1.4 * X^1 -6.3 * X^0";
|
||||
let result = vec![10.6, -14.52, 2., 12.9];
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
pub mod tokenizer;
|
||||
pub mod sanitizer;
|
||||
pub mod ast_builder;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Token {
|
||||
Number(String),
|
||||
Variable(String),
|
||||
ImaginaryUnit(),
|
||||
Addition(),
|
||||
Multiplication(),
|
||||
Substraction(),
|
||||
Division(),
|
||||
Modulo(),
|
||||
Exponentiation(),
|
||||
Equal(),
|
||||
OpenParenthesis(),
|
||||
CloseParenthesis(),
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
use super::Token;
|
||||
|
||||
pub struct Node {
|
||||
_lhs: Option<Box<Node>>,
|
||||
_rhs: Option<Box<Node>>,
|
||||
}
|
||||
|
||||
pub fn build_ast(_tokens: Vec<Token>) -> Node {
|
||||
Node { _lhs: None, _rhs: None }
|
||||
}
|
|
@ -0,0 +1,457 @@
|
|||
use super::Token;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum TokenType {
|
||||
Number(),
|
||||
Variable(),
|
||||
ImaginaryUnit(),
|
||||
Operator(),
|
||||
Substraction(),
|
||||
OpenParenthesis(),
|
||||
CloseParenthesis(),
|
||||
Equal(),
|
||||
}
|
||||
|
||||
fn get_token_type(token: &Token) -> TokenType {
|
||||
match token {
|
||||
Token::Number(_) => TokenType::Number(),
|
||||
Token::Variable(_) => TokenType::Variable(),
|
||||
Token::ImaginaryUnit() => TokenType::ImaginaryUnit(),
|
||||
Token::OpenParenthesis() => TokenType::OpenParenthesis(),
|
||||
Token::CloseParenthesis() => TokenType::CloseParenthesis(),
|
||||
Token::Substraction() => TokenType::Substraction(),
|
||||
Token::Equal() => TokenType::Equal(),
|
||||
_ => TokenType::Operator(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sanitize_tokens(tokens: Vec<Token>) -> Result<Vec<Token>, String> {
|
||||
let mut sanitized_tokens: Vec<Token> = vec![];
|
||||
let mut last_token_type = None;
|
||||
let mut open_close_equal_count = (0, 0, 0);
|
||||
|
||||
for token in tokens {
|
||||
let token_type = get_token_type(&token);
|
||||
match token_type {
|
||||
TokenType::OpenParenthesis() => open_close_equal_count.0 += 1,
|
||||
TokenType::CloseParenthesis() => {
|
||||
open_close_equal_count.1 += 1;
|
||||
if open_close_equal_count.1 > open_close_equal_count.0 {
|
||||
return Err(format!("Error at token {:?}, closing parenthesis that was never opened", token));
|
||||
}
|
||||
},
|
||||
TokenType::Equal() => {
|
||||
open_close_equal_count.2 += 1;
|
||||
if open_close_equal_count.0 != open_close_equal_count.1 {
|
||||
return Err(format!("Error, query has a different number of opening and closing parentheses"));
|
||||
}
|
||||
open_close_equal_count.0 = 0;
|
||||
open_close_equal_count.1 = 0;
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
if let Some(last_token_type) = last_token_type {
|
||||
match (&last_token_type, &token_type) {
|
||||
(TokenType::Number(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Number(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Number(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::ImaginaryUnit(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::ImaginaryUnit(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::ImaginaryUnit(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::OpenParenthesis(), TokenType::Operator()) => return Err(format!("Error at token {:?}, operator forbidden directly after opening parenthesis", token)),
|
||||
(TokenType::Operator(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)),
|
||||
(TokenType::Substraction(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)),
|
||||
(TokenType::OpenParenthesis(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, empty parentheses", token)),
|
||||
(TokenType::Substraction(), TokenType::Equal()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||
(TokenType::Substraction(), TokenType::Operator()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||
(TokenType::Operator(), TokenType::Equal()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||
(TokenType::Operator(), TokenType::Substraction()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||
(TokenType::Equal(), TokenType::Operator()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||
(TokenType::OpenParenthesis(), TokenType::OpenParenthesis()) => (),
|
||||
(TokenType::CloseParenthesis(), TokenType::CloseParenthesis()) => (),
|
||||
_ => {
|
||||
if token_type == last_token_type {
|
||||
return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token));
|
||||
}
|
||||
},
|
||||
}
|
||||
} else if token_type == TokenType::Operator() || token_type == TokenType::Equal() {
|
||||
return Err(format!("Error at token {:?}, query can't start with this token", token));
|
||||
}
|
||||
sanitized_tokens.push(token);
|
||||
|
||||
last_token_type = Some(token_type);
|
||||
}
|
||||
if open_close_equal_count.0 != open_close_equal_count.1 {
|
||||
return Err(format!("Error, query has a different number of opening and closing parentheses"));
|
||||
}
|
||||
if open_close_equal_count.2 > 1 {
|
||||
return Err(format!("Error, query shouldn't have more than 1 equals sign"));
|
||||
}
|
||||
let last_token = sanitized_tokens.last().unwrap();
|
||||
match get_token_type(&last_token) {
|
||||
TokenType::Equal() | TokenType::Operator() | TokenType::Substraction() => return Err(format!("Error at token {:?}, query can't end with this token", last_token)),
|
||||
_ => (),
|
||||
}
|
||||
Ok(sanitized_tokens)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn plus() -> Token {
|
||||
Token::Addition()
|
||||
}
|
||||
|
||||
fn minus() -> Token {
|
||||
Token::Substraction()
|
||||
}
|
||||
|
||||
fn times() -> Token {
|
||||
Token::Multiplication()
|
||||
}
|
||||
|
||||
fn divided_by() -> Token {
|
||||
Token::Division()
|
||||
}
|
||||
|
||||
fn modulo() -> Token {
|
||||
Token::Modulo()
|
||||
}
|
||||
|
||||
fn equals() -> Token {
|
||||
Token::Equal()
|
||||
}
|
||||
|
||||
fn x() -> Token {
|
||||
Token::Variable(String::from("x"))
|
||||
}
|
||||
|
||||
fn i() -> Token {
|
||||
Token::ImaginaryUnit()
|
||||
}
|
||||
|
||||
fn number() -> Token {
|
||||
Token::Number(String::from("123"))
|
||||
}
|
||||
|
||||
fn open() -> Token {
|
||||
Token::OpenParenthesis()
|
||||
}
|
||||
|
||||
fn close() -> Token {
|
||||
Token::CloseParenthesis()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_number() {
|
||||
let tokens = vec![number(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_operator() {
|
||||
let tokens = vec![number(), plus(), divided_by()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_variable() {
|
||||
let tokens = vec![x(), x()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn double_imaginary_unit() {
|
||||
let tokens = vec![i(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn sanitize_tokens_starts_with_operator() {
|
||||
let tokens = vec![plus(), number(), divided_by(), x()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn starts_with_negative_number() {
|
||||
let tokens = vec![minus(), number(), divided_by(), x()];
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_operator_variable() {
|
||||
let tokens = vec![number(), divided_by(), x()];
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_multiplication() {
|
||||
let tokens = vec![number(), x()];
|
||||
let results = vec![number(), times(), x()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![x(), number()];
|
||||
let results = vec![x(), times(), number()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![number(), i()];
|
||||
let results = vec![number(), times(), i()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![i(), number()];
|
||||
let results = vec![i(), times(), number()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![i(), x()];
|
||||
let results = vec![i(), times(), x()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![x(), i()];
|
||||
let results = vec![x(), times(), i()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_multiplication_parenthesis() {
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), open(), i(), times(), x(), close()];
|
||||
let results = vec![open(), number(), modulo(), number(), close(), times(), open(), i(), times(), x(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![number(), open(), number(), modulo(), number(), close()];
|
||||
let results= vec![number(), times(), open(), number(), modulo(), number(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![i(), open(), number(), modulo(), number(), close()];
|
||||
let results= vec![i(), times(), open(), number(), modulo(), number(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![x(), open(), number(), modulo(), number(), close()];
|
||||
let results= vec![x(), times(), open(), number(), modulo(), number(), close()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), number()];
|
||||
let results= vec![open(), number(), modulo(), number(), close(), times(), number()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), i()];
|
||||
let results= vec![open(), number(), modulo(), number(), close(), times(), i()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![open(), number(), modulo(), number(), close(), x()];
|
||||
let results= vec![open(), number(), modulo(), number(), close(), times(), x()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec![number(), open(), number(), modulo(), number(), close(), x()];
|
||||
let results= vec![number(), times(), open(), number(), modulo(), number(), close(), times(), x()];
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn parenthesis_operator() {
|
||||
let tokens = vec!(open(), plus(), number(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn operator_parenthesis() {
|
||||
let tokens = vec!(open(), number(), modulo(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn minus_parenthesis() {
|
||||
let tokens = vec!(open(), number(), minus(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn empty_parentheses() {
|
||||
let tokens = vec!(open(), close());
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parenthesis_minus() {
|
||||
let tokens = vec!(open(), minus(), number(), close());
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn double_parentheses() {
|
||||
let tokens = vec!(open(), open(), number(), plus(), number(), close(), plus(), number(), close());
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec!(open(), number(), plus(), open(), number(), plus(), number(), close(), close());
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec!(open(), open(), number(), plus(), number(), close(), number(), close());
|
||||
let results = vec!(open(), open(), number(), plus(), number(), close(), times(), number(), close());
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
|
||||
let tokens = vec!(open(), number(), open(), number(), plus(), number(), close(), close());
|
||||
let results= vec!(open(), number(), times(), open(), number(), plus(), number(), close(), close());
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_parentheses_count() {
|
||||
let tokens = vec![open(), number(), close(), close()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_parentheses_count_2() {
|
||||
let tokens = vec![open(), open(), number(), close()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_parentheses_count_hard() {
|
||||
let tokens = vec![open(), open(), number(), close(), equals(), x(), close()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_equal_count() {
|
||||
let tokens = vec![number(), equals(), number(), equals(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn starts_with_equal() {
|
||||
let tokens = vec![equals(), number(), plus(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn ends_with_equal() {
|
||||
let tokens = vec![number(), plus(), number(), equals()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn ends_with_minus() {
|
||||
let tokens = vec![number(), plus(), number(), minus()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn ends_with_operator() {
|
||||
let tokens = vec![number(), plus(), number(), divided_by()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn ends_with_open_brackets() {
|
||||
let tokens = vec![number(), plus(), number(), open(), number(), close(), open()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn starts_with_close_brackets() {
|
||||
let tokens = vec![close(), number(), plus(), number(), open(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn wrong_brackets_order() {
|
||||
let tokens = vec![number(), close(), number(), plus(), number(), open(), number()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn minus_operator() {
|
||||
let tokens = vec![number(), minus(), plus(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn operator_minus() {
|
||||
let tokens = vec![number(), plus(), minus(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn minus_equals() {
|
||||
let tokens = vec![number(), minus(), equals(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn operator_equals() {
|
||||
let tokens = vec![number(), modulo(), equals(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn equals_operator() {
|
||||
let tokens = vec![number(), equals(), times(), i()];
|
||||
sanitize_tokens(tokens).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn equals_minus() {
|
||||
let tokens = vec![number(), equals(), minus(), x()];
|
||||
let results = tokens.clone();
|
||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||
}
|
||||
|
||||
mod get_token_type {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn exhaustive() {
|
||||
assert_eq!(get_token_type(&Token::Number(String::from("123"))), TokenType::Number());
|
||||
assert_eq!(get_token_type(&Token::Variable(String::from("var"))), TokenType::Variable());
|
||||
assert_eq!(get_token_type(&Token::ImaginaryUnit()), TokenType::ImaginaryUnit());
|
||||
assert_eq!(get_token_type(&Token::OpenParenthesis()), TokenType::OpenParenthesis());
|
||||
assert_eq!(get_token_type(&Token::CloseParenthesis()), TokenType::CloseParenthesis());
|
||||
assert_eq!(get_token_type(&Token::Substraction()), TokenType::Substraction());
|
||||
assert_eq!(get_token_type(&Token::Equal()), TokenType::Equal());
|
||||
assert_eq!(get_token_type(&Token::Addition()), TokenType::Operator());
|
||||
assert_eq!(get_token_type(&Token::Multiplication()), TokenType::Operator());
|
||||
assert_eq!(get_token_type(&Token::Division()), TokenType::Operator());
|
||||
assert_eq!(get_token_type(&Token::Modulo()), TokenType::Operator());
|
||||
assert_eq!(get_token_type(&Token::Exponentiation()), TokenType::Operator());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,301 @@
|
|||
use super::Token;
|
||||
|
||||
fn check_number(my_string: String, i: usize) -> Result<Token, String> {
|
||||
println!("Checking number at index {i}");
|
||||
if my_string.as_str() == "." {
|
||||
return Err(format!("unexpected token: `.` at position {i}"));
|
||||
}
|
||||
let find = my_string.find(".");
|
||||
if let Some(j) = find {
|
||||
let find = my_string[j+1..].find(".");
|
||||
if let Some(_) = find {
|
||||
return Err(format!("unexpected token: `{my_string}` at position {i}"))
|
||||
}
|
||||
}
|
||||
Ok(Token::Number(my_string))
|
||||
}
|
||||
|
||||
pub fn tokenize(query: &str) -> Result<Vec<Token>, String> {
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
let mut my_string = String::new();
|
||||
let mut is_last_number = false;
|
||||
let mut is_last_variable = false;
|
||||
|
||||
for (i, token) in query.chars().enumerate() {
|
||||
let mut is_still_number = false;
|
||||
let mut is_still_variable = false;
|
||||
let mut is_pop_needed = true;
|
||||
match token {
|
||||
'+' => tokens.push(Token::Addition()),
|
||||
'-' => tokens.push(Token::Substraction()),
|
||||
'*' => tokens.push(Token::Multiplication()),
|
||||
'/' => tokens.push(Token::Division()),
|
||||
'%' => tokens.push(Token::Modulo()),
|
||||
'^' => tokens.push(Token::Exponentiation()),
|
||||
'=' => tokens.push(Token::Equal()),
|
||||
'(' => tokens.push(Token::OpenParenthesis()),
|
||||
')' => tokens.push(Token::CloseParenthesis()),
|
||||
'a'..='z' | 'A'..='Z' => {
|
||||
if is_last_number == true {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
tokens.push(check_number(my_string, i)?);
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_number = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
if is_last_variable == false {
|
||||
is_last_variable = true;
|
||||
}
|
||||
is_still_variable = true;
|
||||
my_string += &token.to_string();
|
||||
},
|
||||
'0'..='9' | '.' => {
|
||||
if is_last_variable == true {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
match my_string.as_str() {
|
||||
"i" => tokens.push(Token::ImaginaryUnit()),
|
||||
_ => tokens.push(Token::Variable(my_string)),
|
||||
}
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_variable = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
if is_last_number == false {
|
||||
is_last_number = true;
|
||||
}
|
||||
is_still_number = true;
|
||||
my_string += &token.to_string();
|
||||
},
|
||||
' ' => is_pop_needed = false,
|
||||
_ => return Err(format!("unexpected token: `{token}` at position {i}")),
|
||||
};
|
||||
if is_last_variable && !is_still_variable {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
match my_string.as_str() {
|
||||
"i" => tokens.push(Token::ImaginaryUnit()),
|
||||
_ => tokens.push(Token::Variable(my_string)),
|
||||
}
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_variable = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
else if is_last_number && !is_still_number {
|
||||
let mut tmp_token = None;
|
||||
if is_pop_needed {
|
||||
tmp_token = tokens.pop();
|
||||
}
|
||||
tokens.push(check_number(my_string, i)?);
|
||||
if let Some(tok) = tmp_token {
|
||||
tokens.push(tok);
|
||||
}
|
||||
is_last_number = false;
|
||||
my_string = String::new();
|
||||
}
|
||||
}
|
||||
if is_last_variable {
|
||||
match my_string.as_str() {
|
||||
"i" => tokens.push(Token::ImaginaryUnit()),
|
||||
_ => tokens.push(Token::Variable(my_string)),
|
||||
}
|
||||
}
|
||||
else if is_last_number {
|
||||
tokens.push(check_number(my_string, query.len())?);
|
||||
}
|
||||
|
||||
Ok(tokens)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn addition() {
|
||||
let query = "+";
|
||||
let result: Vec<Token> = vec![Token::Addition()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn substraction() {
|
||||
let query = "-";
|
||||
let result: Vec<Token> = vec![Token::Substraction()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiplication() {
|
||||
let query = "*";
|
||||
let result: Vec<Token> = vec![Token::Multiplication()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn division() {
|
||||
let query = "/";
|
||||
let result: Vec<Token> = vec![Token::Division()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn modulo() {
|
||||
let query = "%";
|
||||
let result: Vec<Token> = vec![Token::Modulo()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exponentiation() {
|
||||
let query = "^";
|
||||
let result: Vec<Token> = vec![Token::Exponentiation()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn equal() {
|
||||
let query = "=";
|
||||
let result: Vec<Token> = vec![Token::Equal()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn open_parenthesis() {
|
||||
let query = "(";
|
||||
let result: Vec<Token> = vec![Token::OpenParenthesis()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn close_parenthesis() {
|
||||
let query = ")";
|
||||
let result: Vec<Token> = vec![Token::CloseParenthesis()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn imaginary_unit() {
|
||||
let query = "i";
|
||||
let result: Vec<Token> = vec![Token::ImaginaryUnit()];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn variable() {
|
||||
let query = "variable";
|
||||
let result: Vec<Token> = vec![Token::Variable(String::from("variable"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn variable_double_i() {
|
||||
let query = "ii";
|
||||
let result: Vec<Token> = vec![Token::Variable(String::from("ii"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_natural() {
|
||||
let query = "123456";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("123456"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_rational() {
|
||||
let query = "123.456";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("123.456"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_point_something() {
|
||||
let query = ".123456";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from(".123456"))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_trailing_point() {
|
||||
let query = "123456.";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("123456."))];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn number_variable() {
|
||||
let query = "23x";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("23")), Token::Variable(String::from("x"))];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
|
||||
let query = "23i";
|
||||
let result: Vec<Token> = vec![Token::Number(String::from("23")), Token::ImaginaryUnit()];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
|
||||
let query = "x23";
|
||||
let result: Vec<Token> = vec![Token::Variable(String::from("x")), Token::Number(String::from("23"))];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
|
||||
let query = "i23";
|
||||
let result: Vec<Token> = vec![Token::ImaginaryUnit(), Token::Number(String::from("23"))];
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn number_double_point() {
|
||||
let query = "12.34.56";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn number_double_point_in_a_row() {
|
||||
let query = "123..456";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn number_only_point() {
|
||||
let query = ".";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn invalid_token() {
|
||||
let query = "324*43224+243_+234=234";
|
||||
tokenize(query).unwrap();
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue