rune/tests/lexer.rs

148 lines
4.8 KiB
Rust

use std::fs::{self, File};
use std::io::Write;
use std::path::PathBuf;
use rune::*;
struct TestCase<'a>
{
name: &'a str,
input: &'a str,
expected: Vec<(TokenType, &'a str)>
}
fn dummy_transform(tokens: &TokenStream<TokenType>) -> TokenStream<TokenType>
{
/*
let mut stream: TokenStream<(TokenType, String)> = TokenStream::default();
stream.lexemes = tokens.lexemes.clone();
stream.locations = tokens.locations.clone();
for 0..tokens.lexemes.len()
{
stream.variants
}
stream
*/
tokens.clone()
}
fn write_temp_file(name: &str, content: &str) -> PathBuf
{
let mut path = std::env::temp_dir();
path.push(format!("rune_test_{}.txt", name));
let mut file = File::create(&path).expect("Failed to create temp file");
write!(file, "{}", content).expect("Failed to write test content");
path
}
fn cleanup_temp_file(path: &PathBuf)
{
if path.exists()
{
let _ = fs::remove_file(path);
}
}
#[test]
fn basic_lexing()
{
let tokens =
Lexer::scan_text("magic runes", dummy_transform).expect("Lexer should \
succeed");
let tokens = tokens.into_iter()
.map(|t| (*t.variant, String::from(t.lexeme)))
.collect::<Vec<_>>();
let expected = vec![(TokenType::Text, "magic".to_string()),
(TokenType::Whitespace, " ".to_string()),
(TokenType::Text, "runes".to_string()),];
assert_eq!(tokens, expected);
}
#[test]
fn symbols_and_numbers()
{
let tokens =
Lexer::scan_text("13 + 37", dummy_transform).expect("Lexer should \
succeed");
let tokens = tokens.into_iter()
.map(|t| (*t.variant, String::from(t.lexeme)))
.collect::<Vec<_>>();
let expected = vec![(TokenType::Numeric, "13".into()),
(TokenType::Whitespace, " ".into()),
(TokenType::Symbol, "+".into()),
(TokenType::Whitespace, " ".into()),
(TokenType::Numeric, "37".into()),];
assert_eq!(tokens, expected);
}
#[test]
fn lexer_with_cases()
{
let cases = vec![TestCase { name: "simple_words",
input: "magic rune",
expected: vec![(TokenType::Text, "magic"),
(TokenType::Whitespace, " "),
(TokenType::Text, "rune"),
(TokenType::Newline, "\n"),] },
TestCase { name: "symbols_and_digits",
input: "12 + 7",
expected: vec![(TokenType::Numeric, "12"),
(TokenType::Whitespace, " "),
(TokenType::Symbol, "+"),
(TokenType::Whitespace, " "),
(TokenType::Numeric, "7"),
(TokenType::Newline, "\n"),] },
TestCase { name: "only_symbols",
input: "###",
expected: vec![(TokenType::Symbol, "#"),
(TokenType::Symbol, "#"),
(TokenType::Symbol, "#"),
(TokenType::Newline, "\n"),] },
TestCase { name: "whitespace_and_text",
input: " spell",
expected: vec![(TokenType::Whitespace, " "),
(TokenType::Whitespace, " "),
(TokenType::Whitespace, " "),
(TokenType::Text, "spell"),
(TokenType::Newline, "\n"),] },];
for case in cases
{
let path = write_temp_file(case.name, case.input);
let result =
Lexer::scan_file(&path, dummy_transform).expect(&format!("Lexer failed \
on case '{}'",
case.name));
let result = result.into_iter()
.map(|t| (*t.variant, String::from(t.lexeme)))
.collect::<Vec<_>>();
let expected = case.expected
.iter()
.map(|(ty, s)| (*ty, s.to_string()))
.collect::<Vec<_>>();
assert_eq!(result, expected, "Mismatch in test case '{}'", case.name);
cleanup_temp_file(&path);
}
}