feat(parser): sanitizer passes first test round
This commit is contained in:
parent
7d0528264d
commit
d2b73945a7
194
src/lib.rs
194
src/lib.rs
|
@ -199,14 +199,30 @@ fn sanitize_tokens(tokens: Vec<Token>) -> Result<Vec<Token>, String> {
|
||||||
let token_type = get_token_type(&token);
|
let token_type = get_token_type(&token);
|
||||||
match token_type {
|
match token_type {
|
||||||
TokenType::OpenParenthesis() => open_close_equal_count.0 += 1,
|
TokenType::OpenParenthesis() => open_close_equal_count.0 += 1,
|
||||||
TokenType::CloseParenthesis() => open_close_equal_count.1 += 1,
|
TokenType::CloseParenthesis() => {
|
||||||
TokenType::Equal() => open_close_equal_count.2 += 1,
|
open_close_equal_count.1 += 1;
|
||||||
|
if open_close_equal_count.1 > open_close_equal_count.0 {
|
||||||
|
return Err(format!("Error at token {:?}, closing parenthesis that was never opened", token));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
TokenType::Equal() => {
|
||||||
|
open_close_equal_count.2 += 1;
|
||||||
|
if open_close_equal_count.0 != open_close_equal_count.1 {
|
||||||
|
return Err(format!("Error, query has a different number of opening and closing parentheses"));
|
||||||
|
}
|
||||||
|
open_close_equal_count.0 = 0;
|
||||||
|
open_close_equal_count.1 = 0;
|
||||||
|
},
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
if let Some(last_token_type) = last_token_type {
|
if let Some(last_token_type) = last_token_type {
|
||||||
match (&last_token_type, &token_type) {
|
match (&last_token_type, &token_type) {
|
||||||
(TokenType::Number(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
(TokenType::Number(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||||
|
(TokenType::Variable(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||||
(TokenType::Number(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
(TokenType::Number(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||||
|
(TokenType::ImaginaryUnit(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||||
|
(TokenType::ImaginaryUnit(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||||
|
(TokenType::Variable(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||||
(TokenType::Number(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
(TokenType::Number(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||||
(TokenType::Variable(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
(TokenType::Variable(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||||
(TokenType::ImaginaryUnit(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
(TokenType::ImaginaryUnit(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||||
|
@ -218,6 +234,11 @@ fn sanitize_tokens(tokens: Vec<Token>) -> Result<Vec<Token>, String> {
|
||||||
(TokenType::Operator(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)),
|
(TokenType::Operator(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)),
|
||||||
(TokenType::Substraction(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)),
|
(TokenType::Substraction(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, all operators are forbidden directly before closing parenthesis", token)),
|
||||||
(TokenType::OpenParenthesis(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, empty parentheses", token)),
|
(TokenType::OpenParenthesis(), TokenType::CloseParenthesis()) => return Err(format!("Error at token {:?}, empty parentheses", token)),
|
||||||
|
(TokenType::Substraction(), TokenType::Equal()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||||
|
(TokenType::Substraction(), TokenType::Operator()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||||
|
(TokenType::Operator(), TokenType::Equal()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||||
|
(TokenType::Operator(), TokenType::Substraction()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||||
|
(TokenType::Equal(), TokenType::Operator()) => return Err(format!("Error at token {:?}, two incompatible tokens type in a row", token)),
|
||||||
(TokenType::OpenParenthesis(), TokenType::OpenParenthesis()) => (),
|
(TokenType::OpenParenthesis(), TokenType::OpenParenthesis()) => (),
|
||||||
(TokenType::CloseParenthesis(), TokenType::CloseParenthesis()) => (),
|
(TokenType::CloseParenthesis(), TokenType::CloseParenthesis()) => (),
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -226,13 +247,24 @@ fn sanitize_tokens(tokens: Vec<Token>) -> Result<Vec<Token>, String> {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
} else if token_type == TokenType::Operator() {
|
} else if token_type == TokenType::Operator() || token_type == TokenType::Equal() {
|
||||||
return Err(format!("Error at token {:?}, query can't start with an operator", token));
|
return Err(format!("Error at token {:?}, query can't start with this token", token));
|
||||||
}
|
}
|
||||||
sanitized_tokens.push(token);
|
sanitized_tokens.push(token);
|
||||||
|
|
||||||
last_token_type = Some(token_type);
|
last_token_type = Some(token_type);
|
||||||
}
|
}
|
||||||
|
if open_close_equal_count.0 != open_close_equal_count.1 {
|
||||||
|
return Err(format!("Error, query has a different number of opening and closing parentheses"));
|
||||||
|
}
|
||||||
|
if open_close_equal_count.2 > 1 {
|
||||||
|
return Err(format!("Error, query shouldn't have more than 1 equals sign"));
|
||||||
|
}
|
||||||
|
let last_token = sanitized_tokens.last().unwrap();
|
||||||
|
match get_token_type(&last_token) {
|
||||||
|
TokenType::Equal() | TokenType::Operator() | TokenType::Substraction() => return Err(format!("Error at token {:?}, query can't end with this token", last_token)),
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
Ok(sanitized_tokens)
|
Ok(sanitized_tokens)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -373,8 +405,11 @@ pub fn solve(equation: Vec<f64>) {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
mod tokenize {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_addition() {
|
fn addition() {
|
||||||
let query = "+";
|
let query = "+";
|
||||||
let result: Vec<Token> = vec![Token::Addition()];
|
let result: Vec<Token> = vec![Token::Addition()];
|
||||||
|
|
||||||
|
@ -382,7 +417,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_substraction() {
|
fn substraction() {
|
||||||
let query = "-";
|
let query = "-";
|
||||||
let result: Vec<Token> = vec![Token::Substraction()];
|
let result: Vec<Token> = vec![Token::Substraction()];
|
||||||
|
|
||||||
|
@ -390,7 +425,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_multiplication() {
|
fn multiplication() {
|
||||||
let query = "*";
|
let query = "*";
|
||||||
let result: Vec<Token> = vec![Token::Multiplication()];
|
let result: Vec<Token> = vec![Token::Multiplication()];
|
||||||
|
|
||||||
|
@ -398,7 +433,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_division() {
|
fn division() {
|
||||||
let query = "/";
|
let query = "/";
|
||||||
let result: Vec<Token> = vec![Token::Division()];
|
let result: Vec<Token> = vec![Token::Division()];
|
||||||
|
|
||||||
|
@ -406,7 +441,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_modulo() {
|
fn modulo() {
|
||||||
let query = "%";
|
let query = "%";
|
||||||
let result: Vec<Token> = vec![Token::Modulo()];
|
let result: Vec<Token> = vec![Token::Modulo()];
|
||||||
|
|
||||||
|
@ -414,7 +449,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_exponentiation() {
|
fn exponentiation() {
|
||||||
let query = "^";
|
let query = "^";
|
||||||
let result: Vec<Token> = vec![Token::Exponentiation()];
|
let result: Vec<Token> = vec![Token::Exponentiation()];
|
||||||
|
|
||||||
|
@ -422,7 +457,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_equal() {
|
fn equal() {
|
||||||
let query = "=";
|
let query = "=";
|
||||||
let result: Vec<Token> = vec![Token::Equal()];
|
let result: Vec<Token> = vec![Token::Equal()];
|
||||||
|
|
||||||
|
@ -430,7 +465,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_open_parenthesis() {
|
fn open_parenthesis() {
|
||||||
let query = "(";
|
let query = "(";
|
||||||
let result: Vec<Token> = vec![Token::OpenParenthesis()];
|
let result: Vec<Token> = vec![Token::OpenParenthesis()];
|
||||||
|
|
||||||
|
@ -438,7 +473,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_close_parenthesis() {
|
fn close_parenthesis() {
|
||||||
let query = ")";
|
let query = ")";
|
||||||
let result: Vec<Token> = vec![Token::CloseParenthesis()];
|
let result: Vec<Token> = vec![Token::CloseParenthesis()];
|
||||||
|
|
||||||
|
@ -446,7 +481,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_imaginary_unit() {
|
fn imaginary_unit() {
|
||||||
let query = "i";
|
let query = "i";
|
||||||
let result: Vec<Token> = vec![Token::ImaginaryUnit()];
|
let result: Vec<Token> = vec![Token::ImaginaryUnit()];
|
||||||
|
|
||||||
|
@ -454,7 +489,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_variable() {
|
fn variable() {
|
||||||
let query = "variable";
|
let query = "variable";
|
||||||
let result: Vec<Token> = vec![Token::Variable(String::from("variable"))];
|
let result: Vec<Token> = vec![Token::Variable(String::from("variable"))];
|
||||||
|
|
||||||
|
@ -462,7 +497,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_variable_double_i() {
|
fn variable_double_i() {
|
||||||
let query = "ii";
|
let query = "ii";
|
||||||
let result: Vec<Token> = vec![Token::Variable(String::from("ii"))];
|
let result: Vec<Token> = vec![Token::Variable(String::from("ii"))];
|
||||||
|
|
||||||
|
@ -470,7 +505,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_number_natural() {
|
fn number_natural() {
|
||||||
let query = "123456";
|
let query = "123456";
|
||||||
let result: Vec<Token> = vec![Token::Number(String::from("123456"))];
|
let result: Vec<Token> = vec![Token::Number(String::from("123456"))];
|
||||||
|
|
||||||
|
@ -478,7 +513,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_number_rational() {
|
fn number_rational() {
|
||||||
let query = "123.456";
|
let query = "123.456";
|
||||||
let result: Vec<Token> = vec![Token::Number(String::from("123.456"))];
|
let result: Vec<Token> = vec![Token::Number(String::from("123.456"))];
|
||||||
|
|
||||||
|
@ -486,7 +521,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_number_point_something() {
|
fn number_point_something() {
|
||||||
let query = ".123456";
|
let query = ".123456";
|
||||||
let result: Vec<Token> = vec![Token::Number(String::from(".123456"))];
|
let result: Vec<Token> = vec![Token::Number(String::from(".123456"))];
|
||||||
|
|
||||||
|
@ -494,7 +529,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_number_trailing_point() {
|
fn number_trailing_point() {
|
||||||
let query = "123456.";
|
let query = "123456.";
|
||||||
let result: Vec<Token> = vec![Token::Number(String::from("123456."))];
|
let result: Vec<Token> = vec![Token::Number(String::from("123456."))];
|
||||||
|
|
||||||
|
@ -502,7 +537,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_number_variable() {
|
fn number_variable() {
|
||||||
let query = "23x";
|
let query = "23x";
|
||||||
let result: Vec<Token> = vec![Token::Number(String::from("23")), Token::Variable(String::from("x"))];
|
let result: Vec<Token> = vec![Token::Number(String::from("23")), Token::Variable(String::from("x"))];
|
||||||
assert_eq!(tokenize(query).unwrap(), result);
|
assert_eq!(tokenize(query).unwrap(), result);
|
||||||
|
@ -522,31 +557,32 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_panic]
|
#[should_panic]
|
||||||
fn tokenize_number_double_point() {
|
fn number_double_point() {
|
||||||
let query = "12.34.56";
|
let query = "12.34.56";
|
||||||
tokenize(query).unwrap();
|
tokenize(query).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_panic]
|
#[should_panic]
|
||||||
fn tokenize_number_double_point_in_a_row() {
|
fn number_double_point_in_a_row() {
|
||||||
let query = "123..456";
|
let query = "123..456";
|
||||||
tokenize(query).unwrap();
|
tokenize(query).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_panic]
|
#[should_panic]
|
||||||
fn tokenize_number_only_point() {
|
fn number_only_point() {
|
||||||
let query = ".";
|
let query = ".";
|
||||||
tokenize(query).unwrap();
|
tokenize(query).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_panic]
|
#[should_panic]
|
||||||
fn tokenize_invalid_token() {
|
fn invalid_token() {
|
||||||
let query = "324*43224+243_+234=234";
|
let query = "324*43224+243_+234=234";
|
||||||
tokenize(query).unwrap();
|
tokenize(query).unwrap();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
mod sanitize_tokens {
|
mod sanitize_tokens {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -650,9 +686,25 @@ mod tests {
|
||||||
let results = vec![number(), times(), x()];
|
let results = vec![number(), times(), x()];
|
||||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||||
|
|
||||||
|
let tokens = vec![x(), number()];
|
||||||
|
let results = vec![x(), times(), number()];
|
||||||
|
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||||
|
|
||||||
let tokens = vec![number(), i()];
|
let tokens = vec![number(), i()];
|
||||||
let results = vec![number(), times(), i()];
|
let results = vec![number(), times(), i()];
|
||||||
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||||
|
|
||||||
|
let tokens = vec![i(), number()];
|
||||||
|
let results = vec![i(), times(), number()];
|
||||||
|
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||||
|
|
||||||
|
let tokens = vec![i(), x()];
|
||||||
|
let results = vec![i(), times(), x()];
|
||||||
|
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||||
|
|
||||||
|
let tokens = vec![x(), i()];
|
||||||
|
let results = vec![x(), times(), i()];
|
||||||
|
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -799,6 +851,90 @@ mod tests {
|
||||||
let tokens = vec![number(), plus(), number(), divided_by()];
|
let tokens = vec![number(), plus(), number(), divided_by()];
|
||||||
sanitize_tokens(tokens).unwrap();
|
sanitize_tokens(tokens).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn ends_with_open_brackets() {
|
||||||
|
let tokens = vec![number(), plus(), number(), open(), number(), close(), open()];
|
||||||
|
sanitize_tokens(tokens).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn starts_with_close_brackets() {
|
||||||
|
let tokens = vec![close(), number(), plus(), number(), open(), number()];
|
||||||
|
sanitize_tokens(tokens).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn wrong_brackets_order() {
|
||||||
|
let tokens = vec![number(), close(), number(), plus(), number(), open(), number()];
|
||||||
|
sanitize_tokens(tokens).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn minus_operator() {
|
||||||
|
let tokens = vec![number(), minus(), plus(), i()];
|
||||||
|
sanitize_tokens(tokens).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn operator_minus() {
|
||||||
|
let tokens = vec![number(), plus(), minus(), i()];
|
||||||
|
sanitize_tokens(tokens).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn minus_equals() {
|
||||||
|
let tokens = vec![number(), minus(), equals(), i()];
|
||||||
|
sanitize_tokens(tokens).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn operator_equals() {
|
||||||
|
let tokens = vec![number(), modulo(), equals(), i()];
|
||||||
|
sanitize_tokens(tokens).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn equals_operator() {
|
||||||
|
let tokens = vec![number(), equals(), times(), i()];
|
||||||
|
sanitize_tokens(tokens).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn equals_minus() {
|
||||||
|
let tokens = vec![number(), equals(), minus(), x()];
|
||||||
|
let results = tokens.clone();
|
||||||
|
assert_eq!(sanitize_tokens(tokens).unwrap(), results);
|
||||||
|
}
|
||||||
|
|
||||||
|
mod get_token_type {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn exhaustive() {
|
||||||
|
assert_eq!(get_token_type(&Token::Number(String::from("123"))), TokenType::Number());
|
||||||
|
assert_eq!(get_token_type(&Token::Variable(String::from("var"))), TokenType::Variable());
|
||||||
|
assert_eq!(get_token_type(&Token::ImaginaryUnit()), TokenType::ImaginaryUnit());
|
||||||
|
assert_eq!(get_token_type(&Token::OpenParenthesis()), TokenType::OpenParenthesis());
|
||||||
|
assert_eq!(get_token_type(&Token::CloseParenthesis()), TokenType::CloseParenthesis());
|
||||||
|
assert_eq!(get_token_type(&Token::Substraction()), TokenType::Substraction());
|
||||||
|
assert_eq!(get_token_type(&Token::Equal()), TokenType::Equal());
|
||||||
|
assert_eq!(get_token_type(&Token::Addition()), TokenType::Operator());
|
||||||
|
assert_eq!(get_token_type(&Token::Multiplication()), TokenType::Operator());
|
||||||
|
assert_eq!(get_token_type(&Token::Division()), TokenType::Operator());
|
||||||
|
assert_eq!(get_token_type(&Token::Modulo()), TokenType::Operator());
|
||||||
|
assert_eq!(get_token_type(&Token::Exponentiation()), TokenType::Operator());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -840,7 +976,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_degree_0() {
|
fn parse_degree_0() {
|
||||||
let query = "5 * X^0 = 3 * X^0";
|
let query = "5 * X^0 = 3 * X^0";
|
||||||
let result: Vec<f64> = vec![2.];
|
let result: Vec<f64> = vec![2.];
|
||||||
|
@ -848,7 +984,7 @@ mod tests {
|
||||||
assert_eq!(parse(query).unwrap(), result);
|
assert_eq!(parse(query).unwrap(), result);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_degree_1() {
|
fn parse_degree_1() {
|
||||||
let query = "5 * X^0 + 3 * X^1 = 3 * X^0";
|
let query = "5 * X^0 + 3 * X^1 = 3 * X^0";
|
||||||
let result: Vec<f64> = vec![2., 3.];
|
let result: Vec<f64> = vec![2., 3.];
|
||||||
|
@ -856,7 +992,7 @@ mod tests {
|
||||||
assert_eq!(parse(query).unwrap(), result);
|
assert_eq!(parse(query).unwrap(), result);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_degree_2() {
|
fn parse_degree_2() {
|
||||||
let query = "5 * X^0 + 6 * X^1 + 8 * X^2 = 3 * X^0 - 2 * X^2";
|
let query = "5 * X^0 + 6 * X^1 + 8 * X^2 = 3 * X^0 - 2 * X^2";
|
||||||
let result = vec![2., 6., 10.];
|
let result = vec![2., 6., 10.];
|
||||||
|
@ -864,7 +1000,7 @@ mod tests {
|
||||||
assert_eq!(parse(query).unwrap(), result);
|
assert_eq!(parse(query).unwrap(), result);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_random_order() {
|
fn parse_random_order() {
|
||||||
let query = "9.3 * X^3 + 4.3 * X^0 + 3.4 * X^2 - 1.5 * X^3 - 13.12 * X^1 = 1.4 * X^2 - 5.1 * X^3 + 1.4 * X^1 -6.3 * X^0";
|
let query = "9.3 * X^3 + 4.3 * X^0 + 3.4 * X^2 - 1.5 * X^3 - 13.12 * X^1 = 1.4 * X^2 - 5.1 * X^3 + 1.4 * X^1 -6.3 * X^0";
|
||||||
let result = vec![10.6, -14.52, 2., 12.9];
|
let result = vec![10.6, -14.52, 2., 12.9];
|
||||||
|
|
Loading…
Reference in New Issue