Added the new LexerResult to use as an Error.

The code now returns a result that has the TokenStream. For now
it is returning any Error, but later there should be a lexical analysis
error type.
This commit is contained in:
2025-10-21 11:43:11 -04:00
parent c950b70c9b
commit 6984455c12
4 changed files with 108 additions and 26 deletions

View File

@ -15,7 +15,8 @@ struct TestCase<'a>
fn dummy_transform(tokens: &TokenStream<TokenType>) -> TokenStream<TokenType>
fn dummy_transform(tokens: &TokenStream<TokenType>)
-> Result<TokenStream<TokenType>, Box<dyn std::error::Error + Send + Sync>>
{
/*
let mut stream: TokenStream<(TokenType, String)> = TokenStream::default();
@ -30,7 +31,7 @@ fn dummy_transform(tokens: &TokenStream<TokenType>) -> TokenStream<TokenType>
stream
*/
tokens.clone()
Ok(tokens.clone())
}
fn write_temp_file(name: &str, content: &str) -> PathBuf
@ -56,8 +57,9 @@ fn cleanup_temp_file(path: &PathBuf)
fn basic_lexing()
{
let tokens =
Lexer::scan_text("magic runes", dummy_transform).expect("Lexer should \
succeed");
Lexer::scan_text::<_, TokenType, LexerError>("magic runes",
dummy_transform).expect("Lexer should \
succeed");
let tokens = tokens.into_iter()
.map(|t| (*t.variant, String::from(t.lexeme)))
@ -75,7 +77,7 @@ fn basic_lexing()
fn symbols_and_numbers()
{
let tokens =
Lexer::scan_text("13 + 37", dummy_transform).expect("Lexer should \
Lexer::scan_text::<_, TokenType, LexerError>("13 + 37", dummy_transform).expect("Lexer should \
succeed");
let tokens = tokens.into_iter()