[#3] TokenStream now hold generic variants.

This makes it so that the TokenStream and all it's associated Token
types use a generic when dealing with the variant of the Token.

Span was also given the ability to merge with another span. This
will make it easier to track the span as users group TokenTypes
together to make their domain specific types.

All tests and examples were updated for this change.

The version was incremented to 0.2.0.
This commit is contained in:
2025-04-16 01:54:22 -04:00
parent f924811c47
commit 7c564d18a2
8 changed files with 201 additions and 64 deletions

View File

@ -31,24 +31,17 @@ const BUFFER_SIZE: usize = 1024 * 1024;
/// ```rust
/// use rune::{Lexer, TokenStream, TokenType};
///
/// fn transform(tokens: &TokenStream) -> Vec<(TokenType, String)>
/// fn transform(tokens: &TokenStream<TokenType>) -> TokenStream<TokenType>
/// {
/// let mut new_tokens = Vec::new();
///
/// for token in tokens
/// {
/// new_tokens.push((*token.variant, token.lexeme.to_string()));
/// }
///
/// new_tokens
/// tokens.clone()
/// }
///
/// let tokens = Lexer::scan_text("Runes += 42", transform).unwrap();
///
/// // The tuple here is from the transform functions return type.
/// for (ty, lexeme) in tokens
/// for token in &tokens
/// {
/// println!("{:?}: {:?}", ty, lexeme);
/// println!("{}", token);
/// }
/// ```
///
@ -72,9 +65,9 @@ impl Lexer
{
/// Scans a file and produces a vector of transformed tokens.
pub fn scan_file<P, F, T>(path: P, transform: F)
-> Result<Vec<T>, LexerError>
-> Result<TokenStream<T>, LexerError>
where P: AsRef<std::path::Path>,
F: FnOnce(&TokenStream) -> Vec<T>
F: FnOnce(&TokenStream<TokenType>) -> TokenStream<T>
{
let mut cursor = Position::default();
let mut stream = TokenStream::new();
@ -123,8 +116,8 @@ impl Lexer
/// Scans a full in-memory string and returns transformed tokens.
pub fn scan_text<F, T>(text: &str, transform: F)
-> Result<Vec<T>, LexerError>
where F: FnOnce(&TokenStream) -> Vec<T>
-> Result<TokenStream<T>, LexerError>
where F: FnOnce(&TokenStream<TokenType>) -> TokenStream<T>
{
let mut cursor = Position::default();
let mut stream = TokenStream::new();
@ -145,7 +138,7 @@ impl Lexer
}
/// Internal: scans a single line of text into tokens.
fn scan(line: &str, stream: &mut TokenStream, cursor: &mut Position)
fn scan(line: &str, stream: &mut TokenStream<TokenType>, cursor: &mut Position)
{
for c in line.chars()
{