mirror of
https://github.com/azur1s/bobbylisp.git
synced 2024-10-16 02:37:40 -05:00
fix: updated makefile to be in parent dir
This commit is contained in:
parent
5b6352074d
commit
e55fee86cb
11
Makefile
Normal file
11
Makefile
Normal file
|
@ -0,0 +1,11 @@
|
|||
all: build-blspc build-blvm
|
||||
|
||||
build-blspc:
|
||||
cd ./blspc; cargo build
|
||||
rm ~/bin/blspc -f
|
||||
mv ./target/debug/blspc ~/bin/blspc
|
||||
|
||||
build-blvm:
|
||||
cd ./blvm; cargo build
|
||||
rm ~/bin/blvm -f
|
||||
mv ./target/debug/blvm ~/bin/blvm
|
|
@ -1,13 +1,19 @@
|
|||
1: STORE r1 $true
|
||||
2: JUMP_IF_FALSE r1 9
|
||||
3: STORE r2 $34
|
||||
4: STORE r3 $35
|
||||
5: IADD r2 r3 r4
|
||||
6: STORE r5 $1
|
||||
7: CALL r5 r4
|
||||
; ------------------------------
|
||||
; (if true (print (+ 34 35)) (print "False"))
|
||||
; ------- Condition block ------
|
||||
1: STORE r1 $true ; r1 = True
|
||||
2: JUMP_IF_FALSE r1 9 ; if
|
||||
; --------- True block ---------
|
||||
3: STORE r2 $34 ; r2 = 34
|
||||
4: STORE r3 $35 ; r3 = 35
|
||||
5: IADD r2 r3 r4 ; r2 + r3 -> r4
|
||||
6: STORE r5 $1 ; r5 = 1 (function calling)
|
||||
7: CALL r5 r4 ; call 1 (print) 34 (r4)
|
||||
8: JUMP 12
|
||||
9: STORE r6 $"False"
|
||||
10: STORE r7 $1
|
||||
11: CALL r7 r6
|
||||
12: STORE r8 $0
|
||||
13: RETURN r8
|
||||
; -------- False block ---------
|
||||
9: STORE r6 $"False" ; r6 = "False"
|
||||
10: STORE r7 $1 ; r7 = 1 (function calling)
|
||||
11: CALL r7 r6 ; call 1 (print) "False" (r6)
|
||||
12: STORE r8 $0 ; r8 = 0 (return value)
|
||||
13: RETURN r8 ; return 0 (exit code)
|
||||
; ------------ End -------------
|
|
@ -5,10 +5,13 @@ mod args;
|
|||
use args::Args;
|
||||
|
||||
mod parser;
|
||||
use parser::tokenize;
|
||||
|
||||
fn main() {
|
||||
let args = Args::from_args();
|
||||
|
||||
let src = read_to_string(&args.file).unwrap();
|
||||
let _file_name = Path::new(&args.file).file_stem().unwrap().to_str().unwrap();
|
||||
|
||||
let tokens = tokenize(&src);
|
||||
}
|
||||
|
|
|
@ -1 +1,17 @@
|
|||
use middle::*;
|
||||
|
||||
pub fn tokenize(src: &str) -> Vec<Instr> {
|
||||
let mut result = Vec::new();
|
||||
|
||||
for line in src.lines() {
|
||||
// <label>: <instr> <arg>+
|
||||
let mut parts = line.split_whitespace();
|
||||
|
||||
let label = parts.next();
|
||||
let instr = parts.next();
|
||||
let args = parts.collect::<Vec<_>>();
|
||||
println!("{:?} {:?} {:?}", label, instr, args);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
Loading…
Reference in a new issue