handle errors

This commit is contained in:
2024-04-08 12:52:40 +02:00
parent 017450ce7e
commit e59d23d26d

View File

@@ -5,7 +5,13 @@ enum TokenizeError {
NumberParseError(String),
}
#[derive(Debug)]
#[derive(Debug, PartialEq)]
enum InfixToPostfixError {
ExpectedOperator,
ExpectedLeftParenthesis,
}
#[derive(Debug, PartialEq)]
enum CalculateError {
PostfixExpectedNumbers,
EmptyStack,
@@ -45,7 +51,7 @@ fn main() {
if std::env::args().len() == 1 {
loop {
print!("> ");
io::stdout().flush().unwrap();
io::stdout().flush().expect("failed to flush stdout");
let mut input = String::new();
io::stdin()
@@ -101,10 +107,21 @@ fn compute(input: &str) -> f64 {
}
};
println!("tokens before: {:?}", tokens);
let tokens = implicit_operations(tokens);
println!("tokens after: {:?}", tokens);
let tokens = infix_to_postfix(tokens);
let tokens = match implicit_operations(tokens) {
Some(v) => v,
None => {
eprintln!("expected at least two tokens");
std::process::exit(1);
}
};
let tokens = match infix_to_postfix(tokens) {
Ok(v) => v,
Err(e) => {
eprintln!("failed to convert infix to postfix: {:?}", e);
std::process::exit(1);
}
};
match calculate(tokens) {
Ok(v) => v,
@@ -168,47 +185,37 @@ fn tokenize(input: &str) -> Result<Vec<Token>, TokenizeError> {
'e' => Token::Number(std::f64::consts::E),
'τ' => Token::Number(std::f64::consts::TAU),
'0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | 'E' | '.' => {
if !fun_buf.is_empty() {
tokens.push(parse_buffer(&fun_buf)?);
fun_buf.clear();
}
push_buf(&mut tokens, &mut fun_buf)?;
num_buf.push(c);
prev = Some(c);
continue;
}
_ => {
if !num_buf.is_empty() {
tokens.push(parse_buffer(&num_buf)?);
num_buf.clear();
}
push_buf(&mut tokens, &mut num_buf)?;
fun_buf.push(c);
prev = Some(c);
continue;
}
};
if !num_buf.is_empty() {
tokens.push(parse_buffer(&num_buf)?);
num_buf.clear();
}
if !fun_buf.is_empty() {
tokens.push(parse_buffer(&fun_buf)?);
fun_buf.clear();
}
push_buf(&mut tokens, &mut num_buf)?;
push_buf(&mut tokens, &mut fun_buf)?;
tokens.push(token);
prev = Some(c);
}
if !num_buf.is_empty() {
tokens.push(parse_buffer(&num_buf)?);
num_buf.clear();
}
if !fun_buf.is_empty() {
tokens.push(parse_buffer(&fun_buf)?);
fun_buf.clear();
}
push_buf(&mut tokens, &mut num_buf)?;
push_buf(&mut tokens, &mut fun_buf)?;
Ok(tokens)
}
fn push_buf(tokens: &mut Vec<Token>, buf: &mut String) -> Result<(), TokenizeError> {
if !buf.is_empty() {
tokens.push(parse_buffer(buf)?);
buf.clear();
}
Ok(())
}
fn parse_buffer(buf: &str) -> Result<Token, TokenizeError> {
match buf {
"sin" => Ok(Token::Function(FunctionType::Sine)),
@@ -226,9 +233,9 @@ fn parse_buffer(buf: &str) -> Result<Token, TokenizeError> {
}
}
fn implicit_operations(tokens: Vec<Token>) -> Vec<Token> {
fn implicit_operations(tokens: Vec<Token>) -> Option<Vec<Token>> {
if tokens.len() < 2 {
return tokens;
return Some(tokens);
}
let mut new_tokens = vec![tokens[0].clone()];
let mut prev = tokens[0].clone();
@@ -263,7 +270,7 @@ fn implicit_operations(tokens: Vec<Token>) -> Vec<Token> {
}
let mut tokens = new_tokens.iter();
let mut new_tokens = Vec::new();
let (mut a, mut b) = (tokens.next().unwrap(), tokens.next().unwrap());
let (mut a, mut b) = (tokens.next()?, tokens.next()?);
new_tokens.push(a.clone());
new_tokens.push(b.clone());
@@ -312,18 +319,18 @@ fn implicit_operations(tokens: Vec<Token>) -> Vec<Token> {
a = b;
b = c;
}
new_tokens
Some(new_tokens)
}
fn associativity(token: &Token) -> Associativity {
fn associativity(token: &Token) -> Option<Associativity> {
match token {
Token::Power => Associativity::Right,
Token::Number(_) => unreachable!(),
_ => Associativity::Left,
Token::Power => Some(Associativity::Right),
Token::Number(_) => None,
_ => Some(Associativity::Left),
}
}
fn infix_to_postfix(tokens: Vec<Token>) -> Vec<Token> {
fn infix_to_postfix(tokens: Vec<Token>) -> Result<Vec<Token>, InfixToPostfixError> {
let mut output = Vec::new();
let mut op_stack: Vec<Token> = Vec::new();
for token in tokens.iter() {
@@ -331,16 +338,24 @@ fn infix_to_postfix(tokens: Vec<Token>) -> Vec<Token> {
Token::Number(n) => output.push(Token::Number(*n)),
Token::LeftParenthesis => op_stack.push(Token::LeftParenthesis),
Token::RightParenthesis => {
while op_stack.last().unwrap() != &Token::LeftParenthesis {
output.push(op_stack.pop().unwrap());
while op_stack.last() != Some(&Token::LeftParenthesis) {
output.push(
op_stack
.pop()
.ok_or(InfixToPostfixError::ExpectedLeftParenthesis)?,
);
}
assert!(op_stack.last().unwrap() == &Token::LeftParenthesis);
assert!(op_stack.last() == Some(&Token::LeftParenthesis));
op_stack.pop();
}
Token::Function(_) => op_stack.push(token.clone()),
Token::Separator => {
while op_stack.last().unwrap() != &Token::LeftParenthesis {
output.push(op_stack.pop().unwrap());
while op_stack.last() != Some(&Token::LeftParenthesis) {
output.push(
op_stack
.pop()
.ok_or(InfixToPostfixError::ExpectedLeftParenthesis)?,
);
}
}
op => {
@@ -348,9 +363,15 @@ fn infix_to_postfix(tokens: Vec<Token>) -> Vec<Token> {
if op2 != &Token::LeftParenthesis
&& (precedence(op2) > precedence(op)
|| (precedence(op) == precedence(op2)
&& associativity(op) == Associativity::Left))
&& associativity(op)
.ok_or(InfixToPostfixError::ExpectedOperator)?
== Associativity::Left))
{
output.push(op_stack.pop().unwrap());
output.push(
op_stack
.pop()
.ok_or(InfixToPostfixError::ExpectedLeftParenthesis)?,
);
} else {
break;
}
@@ -359,11 +380,10 @@ fn infix_to_postfix(tokens: Vec<Token>) -> Vec<Token> {
}
}
}
while !op_stack.is_empty() {
assert!(op_stack.last().unwrap() != &Token::LeftParenthesis);
output.push(op_stack.pop().unwrap());
while let Some(item) = op_stack.pop() {
output.push(item);
}
output
Ok(output)
}
fn precedence(token: &Token) -> u8 {
@@ -387,8 +407,7 @@ fn calculate(tokens: Vec<Token>) -> Result<f64, CalculateError> {
None => return Err(CalculateError::EmptyStack),
}
}
while tokens.len() != 0 {
let mut token = tokens.next().unwrap();
while let Some(mut token) = tokens.next() {
while let Token::Number(_) = token {
stack.push(token.clone());
if let Some(t) = tokens.next() {
@@ -526,13 +545,13 @@ mod tests {
Token::RightParenthesis,
Token::Number(2.),
]),
vec![
Some(vec![
Token::LeftParenthesis,
Token::Number(3.),
Token::RightParenthesis,
Token::Multiply,
Token::Number(2.),
]
])
);
// 3(2) == 3*(2)
@@ -543,13 +562,13 @@ mod tests {
Token::Number(2.),
Token::RightParenthesis,
]),
vec![
Some(vec![
Token::Number(3.),
Token::Multiply,
Token::LeftParenthesis,
Token::Number(2.),
Token::RightParenthesis,
]
])
);
// (3)(2) = (3)*(2)
@@ -562,7 +581,7 @@ mod tests {
Token::Number(2.),
Token::RightParenthesis
]),
vec![
Some(vec![
Token::LeftParenthesis,
Token::Number(3.),
Token::RightParenthesis,
@@ -570,7 +589,7 @@ mod tests {
Token::LeftParenthesis,
Token::Number(2.),
Token::RightParenthesis
]
])
);
// 3 -- 2 == 3 + 2
@@ -581,12 +600,12 @@ mod tests {
Token::Subtract,
Token::Number(2.),
]),
vec![Token::Number(3.), Token::Add, Token::Number(2.),]
Some(vec![Token::Number(3.), Token::Add, Token::Number(2.),])
);
assert_eq!(
implicit_operations(vec![Token::Subtract, Token::Number(3.),]),
vec![Token::Number(-3.)]
Some(vec![Token::Number(-3.)])
);
assert_eq!(
@@ -596,13 +615,13 @@ mod tests {
Token::Number(3.),
Token::RightParenthesis,
]),
vec![
Some(vec![
Token::Number(-1.),
Token::Multiply,
Token::LeftParenthesis,
Token::Number(3.),
Token::RightParenthesis,
]
])
);
assert_eq!(
@@ -614,7 +633,7 @@ mod tests {
Token::RightParenthesis,
Token::RightParenthesis,
]),
vec![
Some(vec![
Token::LeftParenthesis,
Token::Number(-1.),
Token::Multiply,
@@ -622,7 +641,7 @@ mod tests {
Token::Number(3.),
Token::RightParenthesis,
Token::RightParenthesis,
]
])
);
assert_eq!(
@@ -635,7 +654,7 @@ mod tests {
Token::RightParenthesis,
Token::RightParenthesis,
]),
vec![
Some(vec![
Token::Number(-1.),
Token::Multiply,
Token::LeftParenthesis,
@@ -645,18 +664,18 @@ mod tests {
Token::Number(3.),
Token::RightParenthesis,
Token::RightParenthesis,
]
])
);
}
#[test]
fn infix_to_postfix_test() {
assert_eq!(
infix_to_postfix(vec![Token::Number(3.), Token::Add, Token::Number(2.)]),
vec![Token::Number(3.), Token::Number(2.), Token::Add,]
Ok(vec![Token::Number(3.), Token::Number(2.), Token::Add,])
);
assert_eq!(
infix_to_postfix(vec![Token::Number(3.), Token::Add, Token::Number(2.)]),
vec![Token::Number(3.), Token::Number(2.), Token::Add,]
Ok(vec![Token::Number(3.), Token::Number(2.), Token::Add,])
);
assert_eq!(
// 3 + 4 × 2 ÷ ( 1 5 ) ^ 2 ^ 3
@@ -678,7 +697,7 @@ mod tests {
Token::Number(3.),
]),
// 3 4 2 × 1 5 2 3 ^ ^ ÷ +
vec![
Ok(vec![
Token::Number(3.),
Token::Number(4.),
Token::Number(2.),
@@ -692,15 +711,15 @@ mod tests {
Token::Power,
Token::Divide,
Token::Add,
]
])
);
}
#[test]
fn test_calculate() {
assert_eq!(
calculate(vec![Token::Number(3.), Token::Number(2.), Token::Multiply]).unwrap(),
6.
calculate(vec![Token::Number(3.), Token::Number(2.), Token::Multiply]),
Ok(6.)
);
}