I don't remember writing half of this :)

main
able 2022-07-02 02:25:43 -05:00
commit 436ebea1a7
9 changed files with 1942 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

1666
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

10
Cargo.toml Normal file
View File

@ -0,0 +1,10 @@
[package]
name = "web-lisp"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
logos = "*"
eframe = "*"

127
src/lexer.rs Normal file
View File

@ -0,0 +1,127 @@
/// This ID is explicitly picked to prevent excessive bloat
pub type ID = u16;
use std::{
error::Error,
hash::Hash,
num::{ParseFloatError, ParseIntError},
};
pub struct Metadata {
pub title: Option<String>,
}
use logos::{Lexer, Logos};
#[derive(Logos, Debug, PartialEq)]
pub enum Token {
// Tokens can be literal strings, of any length.
#[regex(r#"\([a-zA-Z-]+"#, tag_parser)]
Tag(String),
#[token("'")]
Quote,
#[token("(")]
StartParen,
#[token(")")]
EndParen,
#[regex("\"[a-zA-Z ,.!]+\"", strg)]
Strg(String),
#[regex(":[a-zA-Z ,.!]+", kwarg_parse)]
Kwarg(Kwarg),
#[regex("[+-]?[0-9]+", num)]
Num(i64),
#[regex("[+-]?[0-9]*[.]?[0-9]+(?:[eE][+-]?[0-9]+)?", priority = 2, callback = float_parse)]
Float(f64),
#[regex("#[0-9A-F][0-9A-F][0-9A-F][0-9A-F][0-9A-F][0-9A-F]", hexa)]
HexaDec(Rgb),
#[error]
#[regex(r"[ \t\n\f]+", logos::skip)]
Error,
}
pub fn lex_string(strn: String) -> Vec<Token> {
let lex = Token::lexer(&strn);
let mut vec = vec![];
for token in lex {
vec.push(token);
}
vec
}
pub fn tag_parser(lex: &mut Lexer<Token>) -> Option<String> {
let mut tag = lex.slice().to_string();
tag.remove(0);
Some(tag)
}
pub fn strg(lex: &mut Lexer<Token>) -> Option<String> {
let mut strg = lex.slice().to_string();
strg.remove(0);
strg.pop();
Some(strg)
}
pub fn float_parse(lex: &mut Lexer<Token>) -> f64 {
let num = lex.slice();
num.parse::<f64>().unwrap()
}
pub fn num(lex: &mut Lexer<Token>) -> i64 {
let num = lex.slice();
let num: Result<i64, ParseIntError> = num.parse::<i64>();
match num {
Ok(num) => num,
Err(err) => {
unreachable!("{}", err)
}
}
}
pub fn kwarg_parse(lex: &mut Lexer<Token>) -> Kwarg {
let mut strg = lex.slice().to_string();
strg.remove(0);
let mut spl = strg.split(" ");
let arg_name = spl.next().unwrap().to_string();
let arg_value = spl.next().unwrap().to_string();
Kwarg {
name: arg_name,
value: arg_value,
}
}
#[derive(Debug, PartialEq)]
pub struct Rgb {
pub red: u8,
pub green: u8,
pub blue: u8,
}
#[derive(Debug, PartialEq)]
pub struct Kwarg {
name: String,
value: String,
}
pub fn hexa(lex: &mut Lexer<Token>) -> Rgb {
let slice = lex.slice();
let rgb = Rgb {
red: slice[0..=1].as_bytes()[0],
green: slice[2..=3].as_bytes()[0],
blue: slice[4..=5].as_bytes()[0],
};
rgb
}

3
src/lib.rs Normal file
View File

@ -0,0 +1,3 @@
pub mod lexer;
pub mod node;
pub mod parser;

45
src/main.rs Normal file
View File

@ -0,0 +1,45 @@
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
// hide console window on Windows in release
use web_lisp::{
lexer::{self, Token},
parser,
};
fn main() {
let abc = lexer::lex_string(include_str!("../web_lisp_src/hello_world.wisp").to_string());
let alksjdhfhlkj = parser::parse_vec(abc.as_slice());
eframe::run_native(
"Web Lisp Browser",
eframe::NativeOptions::default(),
Box::new(|_cc| Box::new(MyApp::new(abc))),
);
}
use eframe::egui;
struct MyApp {
tokens: Vec<Token>,
}
impl MyApp {
fn new(tokens: Vec<Token>) -> Self {
Self { tokens }
}
}
impl eframe::App for MyApp {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
egui::CentralPanel::default().show(ctx, |ui| {
for token in &self.tokens {
match token {
Token::Tag(tag) => {
ui.heading(tag);
}
_ => {}
}
}
});
}
}

14
src/node.rs Normal file
View File

@ -0,0 +1,14 @@
use std::fmt::Display;
#[derive(Debug)]
pub struct Node<'a> {
pub text: Option<&'a str>,
pub children: Vec<Node<'a>>,
}
impl Display for Node<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:#?}", self)?;
Ok(())
}
}

67
src/parser.rs Normal file
View File

@ -0,0 +1,67 @@
use crate::{
lexer::{Kwarg, Token},
node::Node,
};
pub enum ParserError {}
pub fn parse_vec(mut tokens: &[Token]) {
let mut nodes: Node = Node {
text: Some("Root"),
children: vec![],
};
let mut current_token: Option<Token> = None;
let mut kwargs: Vec<&Kwarg> = vec![];
let mut strin = &String::new();
let mut expr_finished = false;
// let mut
for i in 1..tokens.len() - 1 {
let token = &tokens[i];
match token {
Token::Tag(ref tag) => match tag.as_str() {
"text" => {
current_token = Some(Token::Tag(tag.to_string()));
}
"title" => {}
_ => {}
},
Token::Kwarg(kwarg) => kwargs.push(kwarg),
Token::Strg(strg) => {
let st = strg;
strin = st
}
Token::EndParen => expr_finished = true,
/*
Token::Quote => todo!(),
Token::StartParen => todo!(),
Token::Num(_) => todo!(),
Token::Float(_) => todo!(),
Token::HexaDec(_) => todo!(),
Token::Error => todo!(),
*/
_ => {}
}
if expr_finished {
// panic!()
println!("text finished");
expr_finished = false;
let node = Node {
text: Some(&strin),
children: vec![],
};
nodes.children.push(node)
}
}
println!("{}", nodes);
}
pub enum TagTypes {
Text,
Unknown,
}

View File

@ -0,0 +1,9 @@
(wisp
(metadata (title "Web Lisp"))
(onload (code '()))
(on-update (code '()))
(document
(style :id "abc" :style '())
(text :style "abc" "Smol paragraph!!")
(button (lambda '()))
(image :id 1)))