clean(parser): remove old code and warnings
This commit is contained in:
parent
d2b73945a7
commit
7c13270089
120
src/lib.rs
120
src/lib.rs
|
@ -28,35 +28,16 @@ enum TokenType {
|
|||
Equal(),
|
||||
}
|
||||
|
||||
struct Rational {
|
||||
struct _Rational {
|
||||
numerator: i128,
|
||||
denominator: i128,
|
||||
}
|
||||
|
||||
struct GaussianRational {
|
||||
real: Rational,
|
||||
imaginary: Rational,
|
||||
struct _GaussianRational {
|
||||
real: _Rational,
|
||||
imaginary: _Rational,
|
||||
}
|
||||
|
||||
/*
|
||||
fn tokenize(query: &str) -> Result<Vec<Token>, Box<dyn Error>> {
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
|
||||
for token in query.split(" ") {
|
||||
tokens.push(match token {
|
||||
"*" => Token::Multiply(),
|
||||
"+" => Token::Add(),
|
||||
"-" => Token::Substract(),
|
||||
"=" => Token::Equal(),
|
||||
_ if token.starts_with("X^") => Token::Exponent(token[2..].parse()?),
|
||||
_ => Token::Number(token.parse()?),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(tokens)
|
||||
}
|
||||
*/
|
||||
|
||||
fn check_number(my_string: String, i: usize) -> Result<Token, String> {
|
||||
println!("Checking number at index {i}");
|
||||
if my_string.as_str() == "." {
|
||||
|
@ -218,13 +199,13 @@ fn sanitize_tokens(tokens: Vec<Token>) -> Result<Vec<Token>, String> {
|
|||
if let Some(last_token_type) = last_token_type {
|
||||
match (&last_token_type, &token_type) {
|
||||
(TokenType::Number(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Number(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Number(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::ImaginaryUnit(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::ImaginaryUnit(), TokenType::Variable()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::ImaginaryUnit()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Number(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::Variable(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::ImaginaryUnit(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::OpenParenthesis()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
(TokenType::CloseParenthesis(), TokenType::Number()) => sanitized_tokens.push(Token::Multiplication()),
|
||||
|
@ -268,49 +249,19 @@ fn sanitize_tokens(tokens: Vec<Token>) -> Result<Vec<Token>, String> {
|
|||
Ok(sanitized_tokens)
|
||||
}
|
||||
|
||||
/*
|
||||
pub fn parse(query: &str) -> Result<Vec<f64>, Box<dyn Error>> {
|
||||
let tokens = tokenize(query)?;
|
||||
struct Node {
|
||||
_lhs: Option<Box<Node>>,
|
||||
_rhs: Option<Box<Node>>,
|
||||
}
|
||||
|
||||
let mut degree: Option<usize> = None;
|
||||
for token in tokens {
|
||||
match token {
|
||||
Token::Exponent(n) => {
|
||||
if degree == None || degree < Some(n) {
|
||||
degree = Some(n);
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
let mut results: Vec<f64> = vec![0.; degree.unwrap() + 1];
|
||||
let tokens = tokenize(query)?;
|
||||
let mut sign = 1.;
|
||||
let mut left = 1.;
|
||||
let mut constant = 0.;
|
||||
for token in tokens {
|
||||
match token {
|
||||
Token::Add() => sign = 1.,
|
||||
Token::Substract() => sign = -1.,
|
||||
Token::Multiply() => (),
|
||||
Token::Equal() => {
|
||||
left = -1.;
|
||||
sign = 1.
|
||||
},
|
||||
Token::Number(n) => constant = n,
|
||||
Token::Exponent(e) => results[e] += sign * left * constant,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
*/
|
||||
fn build_ast(_tokens: Vec<Token>) -> Node {
|
||||
Node { _lhs: None, _rhs: None }
|
||||
}
|
||||
|
||||
pub fn parse(query: &str) -> Result<Vec<f64>, Box<dyn Error>> {
|
||||
let tokens = tokenize(query)?;
|
||||
let sanitized_tokens = sanitize_tokens(tokens)?;
|
||||
//let ast = build_ast(tokens);
|
||||
let _ast = build_ast(sanitized_tokens);
|
||||
|
||||
println!("{:?}", tokenize(query));
|
||||
|
||||
|
@ -937,45 +888,6 @@ mod tests {
|
|||
|
||||
}
|
||||
|
||||
/*
|
||||
#[test]
|
||||
fn tokenize_exponent() {
|
||||
let query = "X^3";
|
||||
let result: Vec<Token> = vec![Token::Exponent(3)];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
#[test]
|
||||
fn tokenize_constant() {
|
||||
let query = "6.964";
|
||||
let result: Vec<Token> = vec![Token::Number(6.964)];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
#[test]
|
||||
fn tokenize_complex() {
|
||||
let query = "8 * X^0 - 6 * X^1 + 0 * X^2 - 5.6 * X^3 = 3 * X^0";
|
||||
let result: Vec<Token> = vec![
|
||||
Token::Number(8.), Token::Multiplication(), Token::Exponentiation(),
|
||||
Token::Substract(),
|
||||
Token::Number(6.), Token::Multiply(), Token::Exponentiation(),
|
||||
Token::Add(),
|
||||
Token::Number(0.), Token::Multiply(), Token::Exponent(2),
|
||||
Token::Substract(),
|
||||
Token::Number(5.6), Token::Multiply(), Token::Exponent(3),
|
||||
Token::Equal(),
|
||||
Token::Number(3.), Token::Multiply(), Token::Exponent(0)];
|
||||
|
||||
assert_eq!(tokenize(query).unwrap(), result);
|
||||
}
|
||||
*/
|
||||
|
||||
#[test]
|
||||
fn parse_degree_0() {
|
||||
let query = "5 * X^0 = 3 * X^0";
|
||||
|
|
Loading…
Reference in New Issue