From d2b73945a7a051f1dd8482c8d778401478c7fa6a Mon Sep 17 00:00:00 2001 From: gbrochar Date: Tue, 1 Aug 2023 18:08:50 +0200 Subject: [PATCH] feat(parser): sanitizer passes first test round --- src/lib.rs | 420 +++++++++++++++++++++++++++++++++++------------------ 1 file changed, 278 insertions(+), 142 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 62f810c..691daea 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -199,14 +199,30 @@ fn sanitize_tokens(tokens: Vec) -> Result, String> { let token_type = get_token_type(&token); match token_type { TokenType::OpenParenthesis() => open_close_equal_count.0 += 1, - TokenType::CloseParenthesis() => open_close_equal_count.1 += 1, - TokenType::Equal() => open_close_equal_count.2 += 1, + TokenType::CloseParenthesis() => { + open_close_equal_count.1 += 1; + if open_close_equal_count.1 > open_close_equal_count.0 { + return Err(format!("Error at token {:?}, closing parenthesis that was never opened", token)); + } + }, + TokenType::Equal() => { + open_close_equal_count.2 += 1; + if open_close_equal_count.0 != open_close_equal_count.1 { + return Err(format!("Error, query has a different number of opening and closing parentheses")); + } + open_close_equal_count.0 = 0; + open_close_equal_count.1 = 0; + }, _ => (), } if let Some(last_token_type) = last_token_type { match (&last_token_type, &token_type) { (TokenType::Number(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()), + (TokenType::Variable(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()), (TokenType::Number(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()), + (TokenType::ImaginaryUnit(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()), + (TokenType::ImaginaryUnit(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()), + (TokenType::Variable(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()), (TokenType::Number(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()), (TokenType::Variable(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()), (TokenType::ImaginaryUnit(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()), @@ -218,6 +234,11 @@ fn sanitize_tokens(tokens: Vec) -> Result, String> { (TokenType::Operator(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)), (TokenType::Substraction(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)), (TokenType::OpenParenthesis(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, empty parentheses", token)), + (TokenType::Substraction(), TokenType::Equal()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)), + (TokenType::Substraction(), TokenType::Operator()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)), + (TokenType::Operator(), TokenType::Equal()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)), + (TokenType::Operator(), TokenType::Substraction()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)), + (TokenType::Equal(), TokenType::Operator()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)), (TokenType::OpenParenthesis(), TokenType::OpenParenthesis()) => (), (TokenType::CloseParenthesis(), TokenType::CloseParenthesis()) => (), _ => { @@ -226,13 +247,24 @@ fn sanitize_tokens(tokens: Vec) -> Result, String> { } }, } - } else if token_type == TokenType::Operator() { - return Err(format!("Error at token {:?}, query can't start with an operator", token)); + } else if token_type == TokenType::Operator() || token_type == TokenType::Equal() { + return Err(format!("Error at token {:?}, query can't start with this token", token)); } sanitized_tokens.push(token); last_token_type = Some(token_type); } + if open_close_equal_count.0 != open_close_equal_count.1 { + return Err(format!("Error, query has a different number of opening and closing parentheses")); + } + if open_close_equal_count.2 > 1 { + return Err(format!("Error, query shouldn't have more than 1 equals sign")); + } + let last_token = sanitized_tokens.last().unwrap(); + match get_token_type(&last_token) { + TokenType::Equal() | TokenType::Operator() | TokenType::Substraction() => return Err(format!("Error at token {:?}, query can't end with this token", last_token)), + _ => (), + } Ok(sanitized_tokens) } @@ -373,179 +405,183 @@ pub fn solve(equation: Vec) { mod tests { use super::*; - #[test] - fn tokenize_addition() { - let query = "+"; - let result: Vec = vec![Token::Addition()]; + mod tokenize { + use super::*; - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn addition() { + let query = "+"; + let result: Vec = vec![Token::Addition()]; - #[test] - fn tokenize_substraction() { - let query = "-"; - let result: Vec = vec![Token::Substraction()]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn substraction() { + let query = "-"; + let result: Vec = vec![Token::Substraction()]; - #[test] - fn tokenize_multiplication() { - let query = "*"; - let result: Vec = vec![Token::Multiplication()]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn multiplication() { + let query = "*"; + let result: Vec = vec![Token::Multiplication()]; - #[test] - fn tokenize_division() { - let query = "/"; - let result: Vec = vec![Token::Division()]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn division() { + let query = "/"; + let result: Vec = vec![Token::Division()]; - #[test] - fn tokenize_modulo() { - let query = "%"; - let result: Vec = vec![Token::Modulo()]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn modulo() { + let query = "%"; + let result: Vec = vec![Token::Modulo()]; - #[test] - fn tokenize_exponentiation() { - let query = "^"; - let result: Vec = vec![Token::Exponentiation()]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn exponentiation() { + let query = "^"; + let result: Vec = vec![Token::Exponentiation()]; - #[test] - fn tokenize_equal() { - let query = "="; - let result: Vec = vec![Token::Equal()]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn equal() { + let query = "="; + let result: Vec = vec![Token::Equal()]; - #[test] - fn tokenize_open_parenthesis() { - let query = "("; - let result: Vec = vec![Token::OpenParenthesis()]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn open_parenthesis() { + let query = "("; + let result: Vec = vec![Token::OpenParenthesis()]; - #[test] - fn tokenize_close_parenthesis() { - let query = ")"; - let result: Vec = vec![Token::CloseParenthesis()]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn close_parenthesis() { + let query = ")"; + let result: Vec = vec![Token::CloseParenthesis()]; - #[test] - fn tokenize_imaginary_unit() { - let query = "i"; - let result: Vec = vec![Token::ImaginaryUnit()]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn imaginary_unit() { + let query = "i"; + let result: Vec = vec![Token::ImaginaryUnit()]; - #[test] - fn tokenize_variable() { - let query = "variable"; - let result: Vec = vec![Token::Variable(String::from("variable"))]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn variable() { + let query = "variable"; + let result: Vec = vec![Token::Variable(String::from("variable"))]; - #[test] - fn tokenize_variable_double_i() { - let query = "ii"; - let result: Vec = vec![Token::Variable(String::from("ii"))]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn variable_double_i() { + let query = "ii"; + let result: Vec = vec![Token::Variable(String::from("ii"))]; - #[test] - fn tokenize_number_natural() { - let query = "123456"; - let result: Vec = vec![Token::Number(String::from("123456"))]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn number_natural() { + let query = "123456"; + let result: Vec = vec![Token::Number(String::from("123456"))]; - #[test] - fn tokenize_number_rational() { - let query = "123.456"; - let result: Vec = vec![Token::Number(String::from("123.456"))]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn number_rational() { + let query = "123.456"; + let result: Vec = vec![Token::Number(String::from("123.456"))]; - #[test] - fn tokenize_number_point_something() { - let query = ".123456"; - let result: Vec = vec![Token::Number(String::from(".123456"))]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn number_point_something() { + let query = ".123456"; + let result: Vec = vec![Token::Number(String::from(".123456"))]; - #[test] - fn tokenize_number_trailing_point() { - let query = "123456."; - let result: Vec = vec![Token::Number(String::from("123456."))]; + assert_eq!(tokenize(query).unwrap(), result); + } - assert_eq!(tokenize(query).unwrap(), result); - } + #[test] + fn number_trailing_point() { + let query = "123456."; + let result: Vec = vec![Token::Number(String::from("123456."))]; - #[test] - fn tokenize_number_variable() { - let query = "23x"; - let result: Vec = vec![Token::Number(String::from("23")), Token::Variable(String::from("x"))]; - assert_eq!(tokenize(query).unwrap(), result); + assert_eq!(tokenize(query).unwrap(), result); + } - let query = "23i"; - let result: Vec = vec![Token::Number(String::from("23")), Token::ImaginaryUnit()]; - assert_eq!(tokenize(query).unwrap(), result); + #[test] + fn number_variable() { + let query = "23x"; + let result: Vec = vec![Token::Number(String::from("23")), Token::Variable(String::from("x"))]; + assert_eq!(tokenize(query).unwrap(), result); - let query = "x23"; - let result: Vec = vec![Token::Variable(String::from("x")), Token::Number(String::from("23"))]; - assert_eq!(tokenize(query).unwrap(), result); + let query = "23i"; + let result: Vec = vec![Token::Number(String::from("23")), Token::ImaginaryUnit()]; + assert_eq!(tokenize(query).unwrap(), result); - let query = "i23"; - let result: Vec = vec![Token::ImaginaryUnit(), Token::Number(String::from("23"))]; - assert_eq!(tokenize(query).unwrap(), result); - } + let query = "x23"; + let result: Vec = vec![Token::Variable(String::from("x")), Token::Number(String::from("23"))]; + assert_eq!(tokenize(query).unwrap(), result); - #[test] - #[should_panic] - fn tokenize_number_double_point() { - let query = "12.34.56"; - tokenize(query).unwrap(); - } + let query = "i23"; + let result: Vec = vec![Token::ImaginaryUnit(), Token::Number(String::from("23"))]; + assert_eq!(tokenize(query).unwrap(), result); + } - #[test] - #[should_panic] - fn tokenize_number_double_point_in_a_row() { - let query = "123..456"; - tokenize(query).unwrap(); - } + #[test] + #[should_panic] + fn number_double_point() { + let query = "12.34.56"; + tokenize(query).unwrap(); + } - #[test] - #[should_panic] - fn tokenize_number_only_point() { - let query = "."; - tokenize(query).unwrap(); - } + #[test] + #[should_panic] + fn number_double_point_in_a_row() { + let query = "123..456"; + tokenize(query).unwrap(); + } - #[test] - #[should_panic] - fn tokenize_invalid_token() { - let query = "324*43224+243_+234=234"; - tokenize(query).unwrap(); + #[test] + #[should_panic] + fn number_only_point() { + let query = "."; + tokenize(query).unwrap(); + } + + #[test] + #[should_panic] + fn invalid_token() { + let query = "324*43224+243_+234=234"; + tokenize(query).unwrap(); + } } mod sanitize_tokens { @@ -650,9 +686,25 @@ mod tests { let results = vec![number(), times(), x()]; assert_eq!(sanitize_tokens(tokens).unwrap(), results); + let tokens = vec![x(), number()]; + let results = vec![x(), times(), number()]; + assert_eq!(sanitize_tokens(tokens).unwrap(), results); + let tokens = vec![number(), i()]; let results = vec![number(), times(), i()]; assert_eq!(sanitize_tokens(tokens).unwrap(), results); + + let tokens = vec![i(), number()]; + let results = vec![i(), times(), number()]; + assert_eq!(sanitize_tokens(tokens).unwrap(), results); + + let tokens = vec![i(), x()]; + let results = vec![i(), times(), x()]; + assert_eq!(sanitize_tokens(tokens).unwrap(), results); + + let tokens = vec![x(), i()]; + let results = vec![x(), times(), i()]; + assert_eq!(sanitize_tokens(tokens).unwrap(), results); } #[test] @@ -799,6 +851,90 @@ mod tests { let tokens = vec![number(), plus(), number(), divided_by()]; sanitize_tokens(tokens).unwrap(); } + + #[test] + #[should_panic] + fn ends_with_open_brackets() { + let tokens = vec![number(), plus(), number(), open(), number(), close(), open()]; + sanitize_tokens(tokens).unwrap(); + } + + #[test] + #[should_panic] + fn starts_with_close_brackets() { + let tokens = vec![close(), number(), plus(), number(), open(), number()]; + sanitize_tokens(tokens).unwrap(); + } + + #[test] + #[should_panic] + fn wrong_brackets_order() { + let tokens = vec![number(), close(), number(), plus(), number(), open(), number()]; + sanitize_tokens(tokens).unwrap(); + } + + #[test] + #[should_panic] + fn minus_operator() { + let tokens = vec![number(), minus(), plus(), i()]; + sanitize_tokens(tokens).unwrap(); + } + + #[test] + #[should_panic] + fn operator_minus() { + let tokens = vec![number(), plus(), minus(), i()]; + sanitize_tokens(tokens).unwrap(); + } + + #[test] + #[should_panic] + fn minus_equals() { + let tokens = vec![number(), minus(), equals(), i()]; + sanitize_tokens(tokens).unwrap(); + } + + #[test] + #[should_panic] + fn operator_equals() { + let tokens = vec![number(), modulo(), equals(), i()]; + sanitize_tokens(tokens).unwrap(); + } + + #[test] + #[should_panic] + fn equals_operator() { + let tokens = vec![number(), equals(), times(), i()]; + sanitize_tokens(tokens).unwrap(); + } + + #[test] + fn equals_minus() { + let tokens = vec![number(), equals(), minus(), x()]; + let results = tokens.clone(); + assert_eq!(sanitize_tokens(tokens).unwrap(), results); + } + + mod get_token_type { + use super::*; + + #[test] + fn exhaustive() { + assert_eq!(get_token_type(&Token::Number(String::from("123"))), TokenType::Number()); + assert_eq!(get_token_type(&Token::Variable(String::from("var"))), TokenType::Variable()); + assert_eq!(get_token_type(&Token::ImaginaryUnit()), TokenType::ImaginaryUnit()); + assert_eq!(get_token_type(&Token::OpenParenthesis()), TokenType::OpenParenthesis()); + assert_eq!(get_token_type(&Token::CloseParenthesis()), TokenType::CloseParenthesis()); + assert_eq!(get_token_type(&Token::Substraction()), TokenType::Substraction()); + assert_eq!(get_token_type(&Token::Equal()), TokenType::Equal()); + assert_eq!(get_token_type(&Token::Addition()), TokenType::Operator()); + assert_eq!(get_token_type(&Token::Multiplication()), TokenType::Operator()); + assert_eq!(get_token_type(&Token::Division()), TokenType::Operator()); + assert_eq!(get_token_type(&Token::Modulo()), TokenType::Operator()); + assert_eq!(get_token_type(&Token::Exponentiation()), TokenType::Operator()); + } + } + } /* @@ -840,7 +976,7 @@ mod tests { } */ - #[test] +#[test] fn parse_degree_0() { let query = "5 * X^0 = 3 * X^0"; let result: Vec = vec![2.]; @@ -848,7 +984,7 @@ mod tests { assert_eq!(parse(query).unwrap(), result); } - #[test] +#[test] fn parse_degree_1() { let query = "5 * X^0 + 3 * X^1 = 3 * X^0"; let result: Vec = vec![2., 3.]; @@ -856,7 +992,7 @@ mod tests { assert_eq!(parse(query).unwrap(), result); } - #[test] +#[test] fn parse_degree_2() { let query = "5 * X^0 + 6 * X^1 + 8 * X^2 = 3 * X^0 - 2 * X^2"; let result = vec![2., 6., 10.]; @@ -864,7 +1000,7 @@ mod tests { assert_eq!(parse(query).unwrap(), result); } - #[test] +#[test] fn parse_random_order() { let query = "9.3 * X^3 + 4.3 * X^0 + 3.4 * X^2 - 1.5 * X^3 - 13.12 * X^1 = 1.4 * X^2 - 5.1 * X^3 + 1.4 * X^1 -6.3 * X^0"; let result = vec![10.6, -14.52, 2., 12.9];