elf_lojban/src/lib.rs

73 lines
1.3 KiB
Rust

//! # elf_lojban
//!
//! elf_lojban: lex your lojban. Parser coming soon.
//!
//! ## Basic Usage
//!
//! ```
//! use elf_lojban::lex;
//!
//! // Parse including whitespace
//! let tokens = lex("mi prami do");
//!
//! assert_eq![
//! tokens.iter().map(|t| t.s ).collect::<Vec<&str>>(),
//! ["mi", " ", "prami", " ", "do"]
//! ];
//! ```
mod lex;
pub mod lojbanic;
mod strange;
pub use lex::*;
#[derive(Debug, Eq, PartialEq)]
pub struct Token<'src_buf> {
pub s: &'src_buf str,
pub kind: TokenKind,
}
macro_rules! gen_token_fns {
($($nym:ident, $v:ident),*) => {$(
pub fn $nym(s: &'src_buf str) -> Self {
Self {
s,
kind: TokenKind::$v,
}
})*
};
}
impl<'src_buf> Token<'src_buf> {
gen_token_fns![
brivla, Brivla, cmavo, Cmavo, cmevla, Cmevla, number, Number, unknown, Unknown, whitespace,
Whitespace
];
}
#[derive(Debug, Eq, PartialEq)]
pub enum TokenKind {
Brivla,
Cmavo,
Cmevla,
Number,
Unknown,
Whitespace,
}
#[test]
fn lexes() {
assert_eq![
lex("mi prami do"),
[
Token::cmavo("mi"),
Token::whitespace(" "),
Token::brivla("prami"),
Token::whitespace(" "),
Token::cmavo("do")
]
];
assert_eq![lex("garbage"), [Token::brivla("garbage")]];
assert_eq![lex("loprami"), [Token::brivla("loprami")]];
}