49 lines
1.2 KiB
Rust
49 lines
1.2 KiB
Rust
|
use logos::{Lexer, Logos};
|
||
|
|
||
|
#[derive(Logos, Debug, PartialEq)]
|
||
|
#[logos(skip r"[ \t\n\f]+")] // Ignore this regex pattern between tokens
|
||
|
pub enum Token {
|
||
|
// Tokens can be literal strings, of any length.
|
||
|
#[regex("[a-zA-Z0-9]+:/?/?", scheme_parse)]
|
||
|
Scheme(String),
|
||
|
|
||
|
#[regex(r#"[a-zA-Z0-9.]+"#, |lex| lex.slice().parse().ok(), priority = 10)]
|
||
|
Authority(String),
|
||
|
|
||
|
#[regex(":[0-9]+", port)]
|
||
|
Port(u16),
|
||
|
|
||
|
#[regex(r#"[a-zA-Z0-9./\-=_!-+]+"#, |lex| lex.slice().parse().ok(), priority = 1)]
|
||
|
Path(String),
|
||
|
|
||
|
#[regex(r#"\?[a-zA-Z0-9./\-!-+=]+"#, fragment)]
|
||
|
Query(String),
|
||
|
|
||
|
#[regex("#[a-zA-Z0-9./]+", fragment)]
|
||
|
Fragment(String),
|
||
|
}
|
||
|
|
||
|
fn port(lex: &mut Lexer<Token>) -> Option<u16> {
|
||
|
let slice = lex.slice();
|
||
|
// println!("{}", slice);
|
||
|
|
||
|
let n: u16 = slice[1..slice.len()].parse().ok()?;
|
||
|
Some(n)
|
||
|
}
|
||
|
|
||
|
pub fn scheme_parse(lex: &mut Lexer<Token>) -> Option<String> {
|
||
|
let slice: &str = lex.slice();
|
||
|
// println!("{}", slice);
|
||
|
|
||
|
let mut slice: String = slice.to_string();
|
||
|
// slice.pop();
|
||
|
Some(slice)
|
||
|
}
|
||
|
|
||
|
pub fn fragment(lex: &mut Lexer<Token>) -> Option<String> {
|
||
|
let slice: &str = lex.slice();
|
||
|
let mut slice: String = slice.to_string();
|
||
|
slice.remove(0);
|
||
|
Some(slice)
|
||
|
}
|