2025-04-15 21:17:28 -04:00
|
|
|
use std::fs::{self, File};
|
|
|
|
use std::io::Write;
|
|
|
|
use std::path::PathBuf;
|
|
|
|
|
|
|
|
use rune::*;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
struct TestCase<'a>
|
|
|
|
{
|
|
|
|
name: &'a str,
|
|
|
|
input: &'a str,
|
|
|
|
expected: Vec<(TokenType, &'a str)>
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
2025-04-16 01:54:22 -04:00
|
|
|
fn dummy_transform(tokens: &TokenStream<TokenType>) -> TokenStream<TokenType>
|
2025-04-15 21:17:28 -04:00
|
|
|
{
|
2025-04-16 01:54:22 -04:00
|
|
|
/*
|
|
|
|
let mut stream: TokenStream<(TokenType, String)> = TokenStream::default();
|
2025-04-15 21:17:28 -04:00
|
|
|
|
2025-04-16 01:54:22 -04:00
|
|
|
stream.lexemes = tokens.lexemes.clone();
|
|
|
|
stream.locations = tokens.locations.clone();
|
|
|
|
|
|
|
|
for 0..tokens.lexemes.len()
|
2025-04-15 21:17:28 -04:00
|
|
|
{
|
2025-04-16 01:54:22 -04:00
|
|
|
stream.variants
|
2025-04-15 21:17:28 -04:00
|
|
|
}
|
|
|
|
|
2025-04-16 01:54:22 -04:00
|
|
|
stream
|
|
|
|
*/
|
|
|
|
tokens.clone()
|
2025-04-15 21:17:28 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
fn write_temp_file(name: &str, content: &str) -> PathBuf
|
|
|
|
{
|
|
|
|
let mut path = std::env::temp_dir();
|
|
|
|
path.push(format!("rune_test_{}.txt", name));
|
|
|
|
let mut file = File::create(&path).expect("Failed to create temp file");
|
|
|
|
write!(file, "{}", content).expect("Failed to write test content");
|
|
|
|
path
|
|
|
|
}
|
|
|
|
|
|
|
|
fn cleanup_temp_file(path: &PathBuf)
|
|
|
|
{
|
|
|
|
if path.exists()
|
|
|
|
{
|
|
|
|
let _ = fs::remove_file(path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_basic_lexing()
|
|
|
|
{
|
|
|
|
let tokens =
|
|
|
|
Lexer::scan_text("magic runes", dummy_transform).expect("Lexer should \
|
|
|
|
succeed");
|
|
|
|
|
2025-04-16 01:54:22 -04:00
|
|
|
let tokens = tokens.into_iter().map(|t| { (*t.variant, String::from(t.lexeme))}).collect::<Vec<_>>();
|
|
|
|
|
2025-04-15 21:17:28 -04:00
|
|
|
let expected = vec![(TokenType::Text, "magic".to_string()),
|
|
|
|
(TokenType::Whitespace, " ".to_string()),
|
|
|
|
(TokenType::Text, "runes".to_string()),
|
|
|
|
(TokenType::Newline, "\n".to_string()),];
|
|
|
|
|
|
|
|
assert_eq!(tokens, expected);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_symbols_and_numbers()
|
|
|
|
{
|
|
|
|
let tokens =
|
|
|
|
Lexer::scan_text("13 + 37", dummy_transform).expect("Lexer should \
|
|
|
|
succeed");
|
|
|
|
|
2025-04-16 01:54:22 -04:00
|
|
|
let tokens = tokens.into_iter().map(|t| { (*t.variant, String::from(t.lexeme))}).collect::<Vec<_>>();
|
|
|
|
|
2025-04-15 21:17:28 -04:00
|
|
|
let expected = vec![(TokenType::Numeric, "13".into()),
|
|
|
|
(TokenType::Whitespace, " ".into()),
|
|
|
|
(TokenType::Symbol, "+".into()),
|
|
|
|
(TokenType::Whitespace, " ".into()),
|
|
|
|
(TokenType::Numeric, "37".into()),
|
|
|
|
(TokenType::Newline, "\n".into()),];
|
|
|
|
|
|
|
|
assert_eq!(tokens, expected);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_lexer_with_cases()
|
|
|
|
{
|
|
|
|
let cases = vec![TestCase { name: "simple_words",
|
|
|
|
input: "magic rune",
|
|
|
|
expected: vec![(TokenType::Text, "magic"),
|
|
|
|
(TokenType::Whitespace, " "),
|
|
|
|
(TokenType::Text, "rune"),
|
|
|
|
(TokenType::Newline, "\n"),] },
|
|
|
|
TestCase { name: "symbols_and_digits",
|
|
|
|
input: "12 + 7",
|
|
|
|
expected: vec![(TokenType::Numeric, "12"),
|
|
|
|
(TokenType::Whitespace, " "),
|
|
|
|
(TokenType::Symbol, "+"),
|
|
|
|
(TokenType::Whitespace, " "),
|
|
|
|
(TokenType::Numeric, "7"),
|
|
|
|
(TokenType::Newline, "\n"),] },
|
|
|
|
TestCase { name: "only_symbols",
|
|
|
|
input: "###",
|
|
|
|
expected: vec![(TokenType::Symbol, "#"),
|
|
|
|
(TokenType::Symbol, "#"),
|
|
|
|
(TokenType::Symbol, "#"),
|
|
|
|
(TokenType::Newline, "\n"),] },
|
|
|
|
TestCase { name: "whitespace_and_text",
|
|
|
|
input: " spell",
|
|
|
|
expected: vec![(TokenType::Whitespace, " "),
|
|
|
|
(TokenType::Whitespace, " "),
|
|
|
|
(TokenType::Whitespace, " "),
|
|
|
|
(TokenType::Text, "spell"),
|
|
|
|
(TokenType::Newline, "\n"),] },];
|
|
|
|
|
|
|
|
for case in cases
|
|
|
|
{
|
|
|
|
let path = write_temp_file(case.name, case.input);
|
|
|
|
let result =
|
|
|
|
Lexer::scan_file(&path, dummy_transform).expect(&format!("Lexer failed \
|
|
|
|
on case '{}'",
|
|
|
|
case.name));
|
|
|
|
|
2025-04-16 01:54:22 -04:00
|
|
|
let result = result.into_iter().map(|t| { (*t.variant, String::from(t.lexeme))}).collect::<Vec<_>>();
|
2025-04-15 21:17:28 -04:00
|
|
|
|
|
|
|
let expected = case.expected
|
|
|
|
.iter()
|
|
|
|
.map(|(ty, s)| (*ty, s.to_string()))
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
2025-04-16 01:54:22 -04:00
|
|
|
assert_eq!(result, expected,
|
2025-04-15 21:17:28 -04:00
|
|
|
"Mismatch in test case '{}'",
|
|
|
|
case.name);
|
|
|
|
|
|
|
|
cleanup_temp_file(&path);
|
|
|
|
}
|
|
|
|
}
|