forked from AbleOS/holey-bytes
Compare commits
424 commits
soft-float
...
trunk
Author | SHA1 | Date | |
---|---|---|---|
65e9f272a8 | |||
d2052cd2a3 | |||
29367d8f8b | |||
a299bad75b | |||
7d48d3beb1 | |||
68c0248189 | |||
0ef74d89cb | |||
1b2b9f899d | |||
455f70db6e | |||
0374848b28 | |||
513d2c7127 | |||
9d2f419140 | |||
f535ea7b0a | |||
be6d0d3f18 | |||
2718ef8523 | |||
3ee78f3a31 | |||
2bac7c1fb3 | |||
mlokis | 79a3f1ab2b | ||
koniifer | b15e66b2af | ||
koniifer | d2ba7cc101 | ||
koniifer | d3ee72306e | ||
87cb77a553 | |||
276d1bb0cf | |||
5cce904135 | |||
3338d50672 | |||
2e36f32ae0 | |||
e8f1d2af8c | |||
999b25df8b | |||
61250c906a | |||
44fc9c3e2e | |||
798000c756 | |||
9de631234d | |||
843fbddf3b | |||
38a00cbaa0 | |||
4664240e08 | |||
728d563cea | |||
56984f08ff | |||
3f9f99ff65 | |||
9ed3c7ab9e | |||
acacd10ee9 | |||
f6f661cee3 | |||
4bfb5f192e | |||
ea628c1278 | |||
7448339605 | |||
da7cd5926c | |||
9cf7933251 | |||
24b9f9e78b | |||
80558ea7e6 | |||
348d9014e3 | |||
30bd6103a6 | |||
97eb985a02 | |||
7ef1adf7e2 | |||
be828b8c54 | |||
b4b3bae104 | |||
33d78fbc52 | |||
be2d38a6d2 | |||
bbd7e12af4 | |||
37db783699 | |||
948710dc27 | |||
f0a588fcff | |||
9c32f260a1 | |||
047e1ed15c | |||
2c2f0c048b | |||
3c12c0e288 | |||
ca8497550a | |||
849e842336 | |||
5c82623db9 | |||
e8a8fa3eb1 | |||
5926f69e6c | |||
83d3fb4919 | |||
b429534d23 | |||
b187af64a8 | |||
ce7bb001da | |||
9c90adbfe8 | |||
db62434736 | |||
3d721812f0 | |||
5b23a0661b | |||
7c919cd453 | |||
bb61526d3e | |||
45e1c6743a | |||
39588579a8 | |||
9095af6d84 | |||
b62413046d | |||
af4d965b8c | |||
855da58e06 | |||
2fc24f0f58 | |||
8016b1fad5 | |||
46f9903562 | |||
517850f283 | |||
faa8dd2e6f | |||
d23d010917 | |||
b1da36ecde | |||
e62aab9b4b | |||
423361a80e | |||
62a7c61cdc | |||
2bab16d3ce | |||
c88daa4800 | |||
6988d8893f | |||
64e228450f | |||
897e121eeb | |||
648bd24d0d | |||
aefa7e6405 | |||
026f6141e6 | |||
cb88edea1f | |||
127e8dcb38 | |||
9c43dafcf5 | |||
e65dbcfcbe | |||
e0d4955bd5 | |||
78ebc3292c | |||
0c2db878f0 | |||
cb9d7f7d1e | |||
41b70bec43 | |||
f013e90936 | |||
6977cb218c | |||
3f30735eaa | |||
58f4837ae0 | |||
b95bddac7b | |||
7d53706e71 | |||
4d699fcbf1 | |||
5aa6150c70 | |||
b0a85f44c9 | |||
2aa5ba9abc | |||
35d34dca54 | |||
bc817c4ea2 | |||
0298b32e38 | |||
73c9ccef6a | |||
ad4aed9c98 | |||
8528bef8cf | |||
11c8755b18 | |||
d5c90b95a7 | |||
1da900461c | |||
3aff6fc006 | |||
ccfde6c237 | |||
44c4b71bb3 | |||
c3a6e62bf2 | |||
00949c4ea8 | |||
15e4762d4a | |||
959bfd7f76 | |||
6ad0b41759 | |||
89cc611f7a | |||
cf74fdd99c | |||
58578dd4b2 | |||
4a7b4e4ead | |||
c900f4ef5c | |||
3a494147ec | |||
4336fec653 | |||
11f6537a09 | |||
da58a5926d | |||
f5ef62c6bb | |||
f386c332e5 | |||
23b90b3dd7 | |||
ea736d8824 | |||
dc2e0cc5b3 | |||
c9b85f9004 | |||
af147b3cb6 | |||
0f8a720fe8 | |||
2ab6f6c914 | |||
54d93608aa | |||
19a6cdd764 | |||
2660d976fe | |||
659ccbd637 | |||
3a2367f24f | |||
0f4ff918d2 | |||
6d7e726066 | |||
9e65f3949d | |||
bf00dc85b2 | |||
69b58c2b36 | |||
5364b66629 | |||
c4826d3bfd | |||
07638caff0 | |||
5ef1ec4811 | |||
f0ae65606d | |||
a538c0ddb0 | |||
c31d1dcb9c | |||
54a7f85978 | |||
e200c2fc98 | |||
1626734c1a | |||
13f63c7700 | |||
c7dbe1c43d | |||
4c15f61cb7 | |||
f1ea01ef0c | |||
2361e166cd | |||
4d913462cb | |||
bdc2c43773 | |||
b2254e9820 | |||
d293e02f62 | |||
1ee8d464c6 | |||
2a4d27d8e6 | |||
1f5846afaa | |||
006bc80f12 | |||
802e8b5d55 | |||
6b7572f089 | |||
1d04287532 | |||
8b6d9b5de3 | |||
136bba1631 | |||
c1b00b6d6b | |||
a51b23187d | |||
c3f9e535d3 | |||
6d805dc2ec | |||
4291ebc25e | |||
02c74a181d | |||
c0d4464097 | |||
602249a48a | |||
338e3f1519 | |||
0e9f4402cb | |||
6057e88034 | |||
2a3d077476 | |||
8e62bd747b | |||
b8ff503c14 | |||
9e69e53e24 | |||
4d163a2313 | |||
e4e7f8d5b5 | |||
4849807353 | |||
6e30968c54 | |||
6fc0eb3498 | |||
98dfd6b09c | |||
ece9bb8bf2 | |||
09fcbbc03b | |||
a7fda408ef | |||
5d77ae93b4 | |||
4a9b9de87f | |||
bba3570788 | |||
6852452f1a | |||
254d5ed962 | |||
faf068885a | |||
a2e864360e | |||
79e4cead2d | |||
6968e7d769 | |||
c133c2dbe7 | |||
2bc7a5c13f | |||
16e2c32521 | |||
da85d91a09 | |||
e2a8373c42 | |||
fbdabd8314 | |||
39c4526797 | |||
2e3fbfa966 | |||
eebabc5070 | |||
b177cbe7c7 | |||
641d344d2d | |||
dc418bd5e0 | |||
8bbc40b9b1 | |||
8083bcb0e8 | |||
8928888481 | |||
d64fa7e1f9 | |||
b51f964cae | |||
67b8ffe2f2 | |||
32bed04914 | |||
6cb9489e9a | |||
73727c2383 | |||
e8a5027cab | |||
50f3350418 | |||
bb41da484f | |||
ee30069195 | |||
58c1c29293 | |||
49387dbe16 | |||
803095c0c5 | |||
514c2fe630 | |||
b4f64656fe | |||
73e13bd93c | |||
b404e5b86d | |||
4bcab25231 | |||
414a07b99a | |||
fdf4cccde0 | |||
1a3b0c2eec | |||
955e4a5c7a | |||
d9aab2191b | |||
9dd09b2122 | |||
937c107dec | |||
ed1b9459fc | |||
f063d0a4fd | |||
a21dee61e7 | |||
3807276a55 | |||
894f73ca35 | |||
00ad474881 | |||
9e0e0242aa | |||
a31e02449c | |||
b956cc78bb | |||
7279ed88e9 | |||
9500db8764 | |||
9404eb32a2 | |||
f172c33247 | |||
75dca64648 | |||
97c62e424a | |||
a2c08b6ef6 | |||
a78d2bc3e9 | |||
ad3fc1190c | |||
641be15703 | |||
cbe6f98dff | |||
9bdacfffb2 | |||
f13f500d6e | |||
mlokis | 4e9d6094bd | ||
28e33d11c9 | |||
koniifer | 581c4d531c | ||
9012f976c5 | |||
koniifer | 27462d9a33 | ||
koniifer | 781c40ede0 | ||
9af7bf559f | |||
5a6474f066 | |||
33a4bf7d01 | |||
cac99cd34d | |||
5555b9865a | |||
f964520641 | |||
a88d3a5c9d | |||
416f646957 | |||
12b39c5b3f | |||
4dcaae8362 | |||
ab903fa4ea | |||
c48a2d2799 | |||
fb01407465 | |||
71359d82aa | |||
29d5774c47 | |||
434acfbc7b | |||
6a03f125a5 | |||
03aedb5d3f | |||
a1179f3320 | |||
ba73a89171 | |||
523ca6d103 | |||
654b7eb7af | |||
4c3b63df25 | |||
9a8a56fe97 | |||
aeb3a37f7d | |||
3c01a40ef2 | |||
4f9d4f2e71 | |||
25bbe247e9 | |||
ab41d49a3d | |||
11cb875882 | |||
8984dce0e7 | |||
fd64968f3a | |||
e00f2f08c8 | |||
880cd66c66 | |||
fa41c56cb3 | |||
efa7271a59 | |||
bd7384123c | |||
e9589ebcae | |||
22f925b3f5 | |||
3807fe22da | |||
12c7467be2 | |||
cdc8cb35f7 | |||
36bd1a796b | |||
59705c062d | |||
9fe734c68c | |||
dc0562553d | |||
91907a90ff | |||
e147358fce | |||
f9e46b4641 | |||
93deeee6b9 | |||
876690319f | |||
c835317287 | |||
8442b55aa6 | |||
e07265c88b | |||
6a69042cb7 | |||
c85437e4e8 | |||
76b3f9ff4b | |||
66c3f7b0d4 | |||
b04d9e517e | |||
b46c64db4f | |||
6de8496aa5 | |||
499fe34f1d | |||
36d978d798 | |||
bd2a49d29a | |||
1c8645bf11 | |||
1624559e7b | |||
1ca5d89644 | |||
61ecbbd304 | |||
002a7df509 | |||
20903ef294 | |||
aafcb2fbbd | |||
98862edd58 | |||
b9de362ba2 | |||
e494785f93 | |||
aef9951bc5 | |||
b922dbd232 | |||
71c4d3632a | |||
8cb9f2eaac | |||
aae217dd00 | |||
4502a64514 | |||
ca1d471646 | |||
2dff9f7244 | |||
3127d04e41 | |||
589a30c8a3 | |||
8b81cfef37 | |||
6b74640c3f | |||
87ba7aa203 | |||
78f9eb6acc | |||
3c09a5f23e | |||
70955c1792 | |||
d8a922df26 | |||
9aa5da82c9 | |||
fb481a0600 | |||
d90f386bd2 | |||
c14e6c352d | |||
9ccf91d072 | |||
7cca9a3683 | |||
b28baa86f7 | |||
2226a47aaa | |||
0aec47e985 | |||
5c38115119 | |||
c3cbd054f7 | |||
06e30529bf | |||
4ec635dc56 | |||
a08856a464 | |||
d5a5c932e7 | |||
bc59886428 | |||
f87959aacb | |||
80b05779ea | |||
4bb5ec1953 | |||
2aa315a863 | |||
86013a50a4 | |||
465b185452 | |||
b794fa7c3c | |||
able | ebefc85566 | ||
a3c4b878b2 | |||
7f32e7775c | |||
1d74f27b0e | |||
7435218999 | |||
cf99091a45 | |||
81952cfc40 | |||
68d53544fd | |||
aa77a2f822 | |||
b80528bfd7 | |||
1c08148dc9 | |||
774735b515 | |||
870c1f4718 | |||
326adf47ce |
|
@ -1,2 +1,4 @@
|
|||
[alias]
|
||||
xtask = "r -p xtask --"
|
||||
wasm-build = "b --target wasm32-unknown-unknown --profile=small -Zbuild-std=core,alloc -Zbuild-std-features=optimize_for_size,panic_immediate_abort -p"
|
||||
wasm-build-debug = "b --target wasm32-unknown-unknown --profile=small-dev -Zbuild-std=core,alloc -Zbuild-std-features=optimize_for_size -p"
|
||||
|
|
12
.gitignore
vendored
12
.gitignore
vendored
|
@ -1 +1,13 @@
|
|||
# garbage
|
||||
/target
|
||||
rustc-ice-*
|
||||
|
||||
# sqlite
|
||||
db.sqlite
|
||||
db.sqlite-journal
|
||||
|
||||
# assets
|
||||
/depell/src/*.gz
|
||||
/depell/src/*.wasm
|
||||
#**/*-sv.rs
|
||||
/bytecode/src/instrs.rs
|
||||
|
|
1603
Cargo.lock
generated
1603
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
49
Cargo.toml
49
Cargo.toml
|
@ -1,3 +1,50 @@
|
|||
cargo-features = ["profile-rustflags"]
|
||||
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = ["hbasm", "hbbytecode", "hbvm", "hbxrt", "xtask"]
|
||||
members = [
|
||||
"bytecode",
|
||||
"vm",
|
||||
"xrt",
|
||||
"xtask",
|
||||
"lang",
|
||||
"depell",
|
||||
"depell/wasm-fmt",
|
||||
"depell/wasm-hbc",
|
||||
"depell/wasm-rt",
|
||||
]
|
||||
|
||||
[workspace.dependencies]
|
||||
hbbytecode = { path = "bytecode", default-features = false }
|
||||
hbvm = { path = "vm", default-features = false }
|
||||
hbxrt = { path = "xrt" }
|
||||
hblang = { path = "lang", default-features = false }
|
||||
hbjit = { path = "jit" }
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
#debug = true
|
||||
strip = true
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
||||
[profile.small]
|
||||
rustflags = ["-Zfmt-debug=none", "-Zlocation-detail=none"]
|
||||
inherits = "release"
|
||||
opt-level = "z"
|
||||
strip = "debuginfo"
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
||||
[profile.small-dev]
|
||||
inherits = "dev"
|
||||
opt-level = "z"
|
||||
strip = "debuginfo"
|
||||
panic = "abort"
|
||||
|
||||
[profile.fuzz]
|
||||
inherits = "dev"
|
||||
debug = true
|
||||
opt-level = 3
|
||||
panic = "abort"
|
||||
|
|
|
@ -3,6 +3,8 @@ name = "hbbytecode"
|
|||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
paste = "1.0.14"
|
||||
with_builtin_macros = "0.0.3"
|
||||
[features]
|
||||
default = ["disasm"]
|
||||
std = []
|
||||
disasm = ["std"]
|
||||
|
204
bytecode/build.rs
Normal file
204
bytecode/build.rs
Normal file
|
@ -0,0 +1,204 @@
|
|||
#![feature(iter_next_chunk)]
|
||||
|
||||
use std::{collections::HashSet, fmt::Write};
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
println!("cargo:rerun-if-changed=instructions.in");
|
||||
|
||||
let mut generated = String::new();
|
||||
gen_instrs(&mut generated)?;
|
||||
std::fs::write("src/instrs.rs", generated)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn gen_instrs(generated: &mut String) -> Result<(), Box<dyn std::error::Error>> {
|
||||
writeln!(generated, "#![expect(dead_code)]")?;
|
||||
writeln!(generated, "use crate::*;")?;
|
||||
|
||||
'_opcode_structs: {
|
||||
let mut seen = HashSet::new();
|
||||
for [.., args, _] in instructions() {
|
||||
if !seen.insert(args) {
|
||||
continue;
|
||||
}
|
||||
|
||||
writeln!(generated, "#[derive(Clone, Copy, Debug)]")?;
|
||||
writeln!(generated, "#[repr(packed)]")?;
|
||||
write!(generated, "pub struct Ops{args}(")?;
|
||||
let mut first = true;
|
||||
for ch in args.chars().filter(|&ch| ch != 'N') {
|
||||
if !std::mem::take(&mut first) {
|
||||
write!(generated, ",")?;
|
||||
}
|
||||
write!(generated, "pub Op{ch}")?;
|
||||
}
|
||||
writeln!(generated, ");")?;
|
||||
writeln!(generated, "unsafe impl BytecodeItem for Ops{args} {{}}")?;
|
||||
}
|
||||
}
|
||||
|
||||
'_max_size: {
|
||||
let max = instructions()
|
||||
.map(
|
||||
|[_, _, ty, _]| {
|
||||
if ty == "N" {
|
||||
1
|
||||
} else {
|
||||
iter_args(ty).map(arg_to_width).sum::<usize>() + 1
|
||||
}
|
||||
},
|
||||
)
|
||||
.max()
|
||||
.unwrap();
|
||||
|
||||
writeln!(generated, "pub const MAX_SIZE: usize = {max};")?;
|
||||
}
|
||||
|
||||
'_encoders: {
|
||||
for [op, name, ty, doc] in instructions() {
|
||||
writeln!(generated, "/// {}", doc.trim_matches('"'))?;
|
||||
let name = name.to_lowercase();
|
||||
let args = comma_sep(
|
||||
iter_args(ty)
|
||||
.enumerate()
|
||||
.map(|(i, c)| format!("{}{i}: {}", arg_to_name(c), arg_to_type(c))),
|
||||
);
|
||||
writeln!(generated, "pub fn {name}({args}) -> (usize, [u8; MAX_SIZE]) {{")?;
|
||||
let arg_names =
|
||||
comma_sep(iter_args(ty).enumerate().map(|(i, c)| format!("{}{i}", arg_to_name(c))));
|
||||
writeln!(generated, " unsafe {{ crate::encode({ty}({op}, {arg_names})) }}")?;
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
}
|
||||
|
||||
'_structs: {
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
for [_, _, ty, _] in instructions() {
|
||||
if !seen.insert(ty) {
|
||||
continue;
|
||||
}
|
||||
let types = comma_sep(iter_args(ty).map(arg_to_type).map(|s| s.to_string()));
|
||||
writeln!(generated, "#[repr(packed)] pub struct {ty}(u8, {types});")?;
|
||||
}
|
||||
}
|
||||
|
||||
'_name_list: {
|
||||
writeln!(generated, "pub const COUNT: u8 = {};", instructions().count())?;
|
||||
}
|
||||
|
||||
let instr = "Instr";
|
||||
let oper = "Oper";
|
||||
|
||||
'_instr_enum: {
|
||||
writeln!(generated, "#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[repr(u8)]")?;
|
||||
writeln!(generated, "pub enum {instr} {{")?;
|
||||
for [id, name, ..] in instructions() {
|
||||
writeln!(generated, " {name} = {id},")?;
|
||||
}
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
|
||||
'_arg_kind: {
|
||||
writeln!(generated, "#[derive(Debug, Clone, Copy, PartialEq, Eq)]")?;
|
||||
writeln!(generated, "pub enum {oper} {{")?;
|
||||
let mut seen = HashSet::new();
|
||||
for ty in instructions().flat_map(|[.., ty, _]| iter_args(ty)) {
|
||||
if !seen.insert(ty) {
|
||||
continue;
|
||||
}
|
||||
writeln!(generated, " {ty}({}),", arg_to_type(ty))?;
|
||||
}
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
|
||||
'_parse_opers: {
|
||||
writeln!(
|
||||
generated,
|
||||
"/// This assumes the instruction byte is still at the beginning of the buffer"
|
||||
)?;
|
||||
writeln!(generated, "#[cfg(feature = \"disasm\")]")?;
|
||||
writeln!(generated, "pub fn parse_args(bytes: &mut &[u8], kind: {instr}, buf: &mut alloc::vec::Vec<{oper}>) -> Option<()> {{")?;
|
||||
writeln!(generated, " match kind {{")?;
|
||||
let mut instrs = instructions().collect::<Vec<_>>();
|
||||
instrs.sort_unstable_by_key(|&[.., ty, _]| ty);
|
||||
for group in instrs.chunk_by(|[.., a, _], [.., b, _]| a == b) {
|
||||
let ty = group[0][2];
|
||||
for &[_, name, ..] in group {
|
||||
writeln!(generated, " | {instr}::{name}")?;
|
||||
}
|
||||
generated.pop();
|
||||
writeln!(generated, " => {{")?;
|
||||
if iter_args(ty).count() != 0 {
|
||||
writeln!(generated, " let data = crate::decode::<{ty}>(bytes)?;")?;
|
||||
writeln!(
|
||||
generated,
|
||||
" buf.extend([{}]);",
|
||||
comma_sep(
|
||||
iter_args(ty).zip(1u32..).map(|(t, i)| format!("{oper}::{t}(data.{i})"))
|
||||
)
|
||||
)?;
|
||||
} else {
|
||||
writeln!(generated, " crate::decode::<{ty}>(bytes)?;")?;
|
||||
}
|
||||
|
||||
writeln!(generated, " }}")?;
|
||||
}
|
||||
writeln!(generated, " }}")?;
|
||||
writeln!(generated, " Some(())")?;
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
|
||||
std::fs::write("src/instrs.rs", generated)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn comma_sep(items: impl Iterator<Item = String>) -> String {
|
||||
items.map(|item| item.to_string()).collect::<Vec<_>>().join(", ")
|
||||
}
|
||||
|
||||
fn instructions() -> impl Iterator<Item = [&'static str; 4]> {
|
||||
include_str!("instructions.in")
|
||||
.lines()
|
||||
.filter_map(|line| line.strip_suffix(';'))
|
||||
.map(|line| line.splitn(4, ',').map(str::trim).next_chunk().unwrap())
|
||||
}
|
||||
|
||||
fn arg_to_type(arg: char) -> &'static str {
|
||||
match arg {
|
||||
'R' | 'B' => "u8",
|
||||
'H' => "u16",
|
||||
'W' => "u32",
|
||||
'D' | 'A' => "u64",
|
||||
'P' => "i16",
|
||||
'O' => "i32",
|
||||
_ => panic!("unknown type: {}", arg),
|
||||
}
|
||||
}
|
||||
|
||||
fn arg_to_width(arg: char) -> usize {
|
||||
match arg {
|
||||
'R' | 'B' => 1,
|
||||
'H' => 2,
|
||||
'W' => 4,
|
||||
'D' | 'A' => 8,
|
||||
'P' => 2,
|
||||
'O' => 4,
|
||||
_ => panic!("unknown type: {}", arg),
|
||||
}
|
||||
}
|
||||
|
||||
fn arg_to_name(arg: char) -> &'static str {
|
||||
match arg {
|
||||
'R' => "reg",
|
||||
'B' | 'H' | 'W' | 'D' => "imm",
|
||||
'P' | 'O' => "offset",
|
||||
'A' => "addr",
|
||||
_ => panic!("unknown type: {}", arg),
|
||||
}
|
||||
}
|
||||
|
||||
fn iter_args(ty: &'static str) -> impl Iterator<Item = char> {
|
||||
ty.chars().filter(|c| *c != 'N')
|
||||
}
|
284
bytecode/src/lib.rs
Normal file
284
bytecode/src/lib.rs
Normal file
|
@ -0,0 +1,284 @@
|
|||
#![no_std]
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
extern crate alloc;
|
||||
|
||||
pub use crate::instrs::*;
|
||||
use core::convert::TryFrom;
|
||||
|
||||
mod instrs;
|
||||
|
||||
type OpR = u8;
|
||||
|
||||
type OpA = u64;
|
||||
type OpO = i32;
|
||||
type OpP = i16;
|
||||
|
||||
type OpB = u8;
|
||||
type OpH = u16;
|
||||
type OpW = u32;
|
||||
type OpD = u64;
|
||||
|
||||
/// # Safety
|
||||
/// Has to be valid to be decoded from bytecode.
|
||||
pub unsafe trait BytecodeItem {}
|
||||
unsafe impl BytecodeItem for u8 {}
|
||||
|
||||
impl TryFrom<u8> for Instr {
|
||||
type Error = u8;
|
||||
|
||||
#[inline]
|
||||
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||
#[cold]
|
||||
fn failed(value: u8) -> Result<Instr, u8> {
|
||||
Err(value)
|
||||
}
|
||||
|
||||
if value < COUNT {
|
||||
unsafe { Ok(core::mem::transmute::<u8, Instr>(value)) }
|
||||
} else {
|
||||
failed(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn encode<T>(instr: T) -> (usize, [u8; instrs::MAX_SIZE]) {
|
||||
let mut buf = [0; instrs::MAX_SIZE];
|
||||
core::ptr::write(buf.as_mut_ptr() as *mut T, instr);
|
||||
(core::mem::size_of::<T>(), buf)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg(feature = "disasm")]
|
||||
fn decode<T>(binary: &mut &[u8]) -> Option<T> {
|
||||
let (front, rest) = core::mem::take(binary).split_at_checked(core::mem::size_of::<T>())?;
|
||||
*binary = rest;
|
||||
unsafe { Some(core::ptr::read(front.as_ptr() as *const T)) }
|
||||
}
|
||||
|
||||
/// Rounding mode
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
#[repr(u8)]
|
||||
pub enum RoundingMode {
|
||||
NearestEven = 0,
|
||||
Truncate = 1,
|
||||
Up = 2,
|
||||
Down = 3,
|
||||
}
|
||||
|
||||
impl TryFrom<u8> for RoundingMode {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||
(value <= 3).then(|| unsafe { core::mem::transmute(value) }).ok_or(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum DisasmItem {
|
||||
Func,
|
||||
Global,
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
#[derive(Debug)]
|
||||
pub enum DisasmError<'a> {
|
||||
InvalidInstruction(u8),
|
||||
InstructionOutOfBounds(&'a str),
|
||||
FmtFailed(core::fmt::Error),
|
||||
HasOutOfBoundsJumps,
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
impl From<core::fmt::Error> for DisasmError<'_> {
|
||||
fn from(value: core::fmt::Error) -> Self {
|
||||
Self::FmtFailed(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
impl core::fmt::Display for DisasmError<'_> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
match *self {
|
||||
DisasmError::InvalidInstruction(b) => write!(f, "invalid instruction opcode: {b}"),
|
||||
DisasmError::InstructionOutOfBounds(name) => {
|
||||
write!(f, "instruction would go out of bounds of {name} symbol")
|
||||
}
|
||||
DisasmError::FmtFailed(error) => write!(f, "fmt failed: {error}"),
|
||||
DisasmError::HasOutOfBoundsJumps => write!(
|
||||
f,
|
||||
"the code contained jumps that dont got neither to a \
|
||||
valid symbol or local insturction"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
impl core::error::Error for DisasmError<'_> {}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
pub fn disasm<'a>(
|
||||
binary: &mut &[u8],
|
||||
functions: &alloc::collections::BTreeMap<u32, (&'a str, u32, DisasmItem)>,
|
||||
out: &mut alloc::string::String,
|
||||
mut eca_handler: impl FnMut(&mut &[u8]),
|
||||
) -> Result<(), DisasmError<'a>> {
|
||||
use {
|
||||
self::instrs::Instr,
|
||||
alloc::{
|
||||
collections::btree_map::{BTreeMap, Entry},
|
||||
vec::Vec,
|
||||
},
|
||||
core::{convert::TryInto, fmt::Write},
|
||||
};
|
||||
|
||||
fn instr_from_byte(b: u8) -> Result<Instr, DisasmError<'static>> {
|
||||
b.try_into().map_err(DisasmError::InvalidInstruction)
|
||||
}
|
||||
|
||||
let mut labels = BTreeMap::<u32, u32>::default();
|
||||
let mut buf = Vec::<instrs::Oper>::new();
|
||||
let mut has_oob = false;
|
||||
|
||||
'_offset_pass: for (&off, &(name, len, kind)) in functions.iter() {
|
||||
if matches!(kind, DisasmItem::Global) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let prev = *binary;
|
||||
|
||||
*binary = &binary[off as usize..];
|
||||
|
||||
let mut label_count = 0;
|
||||
while let Some(&byte) = binary.first() {
|
||||
let offset: i32 = (prev.len() - binary.len()).try_into().unwrap();
|
||||
if offset as u32 == off + len {
|
||||
break;
|
||||
}
|
||||
let Ok(inst) = instr_from_byte(byte) else { break };
|
||||
instrs::parse_args(binary, inst, &mut buf)
|
||||
.ok_or(DisasmError::InstructionOutOfBounds(name))?;
|
||||
|
||||
for op in buf.drain(..) {
|
||||
let rel = match op {
|
||||
instrs::Oper::O(rel) => rel,
|
||||
instrs::Oper::P(rel) => rel.into(),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let global_offset: u32 = (offset + rel).try_into().unwrap();
|
||||
if functions.get(&global_offset).is_some() {
|
||||
continue;
|
||||
}
|
||||
label_count += match labels.entry(global_offset) {
|
||||
Entry::Occupied(_) => 0,
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(label_count);
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(inst, Instr::ECA) {
|
||||
eca_handler(binary);
|
||||
}
|
||||
}
|
||||
|
||||
*binary = prev;
|
||||
}
|
||||
|
||||
let mut ordered = functions.iter().collect::<Vec<_>>();
|
||||
ordered.sort_unstable_by_key(|(_, (name, _, _))| name);
|
||||
|
||||
'_dump: for (&off, &(name, len, kind)) in ordered {
|
||||
if matches!(kind, DisasmItem::Global) {
|
||||
continue;
|
||||
}
|
||||
let prev = *binary;
|
||||
|
||||
writeln!(out, "{name}:")?;
|
||||
|
||||
*binary = &binary[off as usize..];
|
||||
while let Some(&byte) = binary.first() {
|
||||
let offset: i32 = (prev.len() - binary.len()).try_into().unwrap();
|
||||
if offset as u32 == off + len {
|
||||
break;
|
||||
}
|
||||
let Ok(inst) = instr_from_byte(byte) else {
|
||||
writeln!(out, "invalid instr {byte}")?;
|
||||
break;
|
||||
};
|
||||
instrs::parse_args(binary, inst, &mut buf).unwrap();
|
||||
|
||||
if let Some(label) = labels.get(&offset.try_into().unwrap()) {
|
||||
write!(out, "{:>2}: ", label)?;
|
||||
} else {
|
||||
write!(out, " ")?;
|
||||
}
|
||||
|
||||
write!(out, "{inst:<8?} ")?;
|
||||
|
||||
'a: for (i, op) in buf.drain(..).enumerate() {
|
||||
if i != 0 {
|
||||
write!(out, ", ")?;
|
||||
}
|
||||
|
||||
let rel = 'b: {
|
||||
match op {
|
||||
instrs::Oper::O(rel) => break 'b rel,
|
||||
instrs::Oper::P(rel) => break 'b rel.into(),
|
||||
instrs::Oper::R(r) => write!(out, "r{r}")?,
|
||||
instrs::Oper::B(b) => write!(out, "{b}b")?,
|
||||
instrs::Oper::H(h) => write!(out, "{h}h")?,
|
||||
instrs::Oper::W(w) => write!(out, "{w}w")?,
|
||||
instrs::Oper::D(d) if (d as i64) < 0 => write!(out, "{}d", d as i64)?,
|
||||
instrs::Oper::D(d) => write!(out, "{d}d")?,
|
||||
instrs::Oper::A(a) => write!(out, "{a}a")?,
|
||||
}
|
||||
|
||||
continue 'a;
|
||||
};
|
||||
|
||||
let global_offset: u32 = (offset + rel).try_into().unwrap();
|
||||
if let Some(&(name, ..)) = functions.get(&global_offset) {
|
||||
if name.contains('\0') {
|
||||
write!(out, ":{name:?}")?;
|
||||
} else {
|
||||
write!(out, ":{name}")?;
|
||||
}
|
||||
} else {
|
||||
let local_has_oob = global_offset < off
|
||||
|| global_offset > off + len
|
||||
|| prev
|
||||
.get(global_offset as usize)
|
||||
.map_or(true, |&b| instr_from_byte(b).is_err())
|
||||
|| prev[global_offset as usize] == 0;
|
||||
has_oob |= local_has_oob;
|
||||
let label = labels.get(&global_offset).unwrap();
|
||||
if local_has_oob {
|
||||
write!(out, "!!!!!!!!!{rel}")?;
|
||||
} else {
|
||||
write!(out, ":{label}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(out)?;
|
||||
|
||||
if matches!(inst, Instr::ECA) {
|
||||
eca_handler(binary);
|
||||
}
|
||||
}
|
||||
|
||||
*binary = prev;
|
||||
}
|
||||
|
||||
if has_oob {
|
||||
return Err(DisasmError::HasOutOfBoundsJumps);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
23
depell/Cargo.toml
Normal file
23
depell/Cargo.toml
Normal file
|
@ -0,0 +1,23 @@
|
|||
[package]
|
||||
name = "depell"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
argon2 = "0.5.3"
|
||||
axum = "0.7.7"
|
||||
axum-server = { version = "0.7.1", optional = true, features = ["rustls", "tls-rustls"] }
|
||||
const_format = "0.2.33"
|
||||
getrandom = "0.2.15"
|
||||
hblang.workspace = true
|
||||
htmlm = "0.5.0"
|
||||
log = "0.4.22"
|
||||
rand_core = { version = "0.6.4", features = ["getrandom"] }
|
||||
rusqlite = { version = "0.32.1", features = ["bundled"] }
|
||||
serde = { version = "1.0.210", features = ["derive"] }
|
||||
time = "0.3.36"
|
||||
tokio = { version = "1.40.0", features = ["rt"] }
|
||||
|
||||
[features]
|
||||
#default = ["tls"]
|
||||
tls = ["dep:axum-server"]
|
14
depell/README.md
Normal file
14
depell/README.md
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Depell
|
||||
|
||||
Depell is a website that allows users to import/post/run hblang code and create huge dependency graphs. Its currently hosted at https://depell.mlokis.tech.
|
||||
|
||||
## Local Development
|
||||
|
||||
Prerequirements:
|
||||
- rust nigthly toolchain: install rust from [here](https://www.rust-lang.org/tools/install)
|
||||
|
||||
```bash
|
||||
rustup default nightly
|
||||
cargo xtask watch-depell-debug
|
||||
# browser http://localhost:8080
|
||||
```
|
143
depell/src/index.css
Normal file
143
depell/src/index.css
Normal file
|
@ -0,0 +1,143 @@
|
|||
* {
|
||||
font-family: var(--font);
|
||||
}
|
||||
|
||||
body {
|
||||
--primary: white;
|
||||
--secondary: #EFEFEF;
|
||||
--timestamp: #777777;
|
||||
--error: #ff3333;
|
||||
--placeholder: #333333;
|
||||
}
|
||||
|
||||
|
||||
body {
|
||||
--small-gap: 5px;
|
||||
--font: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
|
||||
--monospace: 'Courier New', Courier, monospace;
|
||||
|
||||
nav {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
|
||||
section:last-child {
|
||||
display: flex;
|
||||
gap: var(--small-gap);
|
||||
}
|
||||
}
|
||||
|
||||
main {
|
||||
margin-top: var(--small-gap);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--small-gap);
|
||||
}
|
||||
}
|
||||
|
||||
div.preview {
|
||||
div.info {
|
||||
display: flex;
|
||||
gap: var(--small-gap);
|
||||
|
||||
span[apply=timestamp] {
|
||||
color: var(--timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
div.stats {
|
||||
display: flex;
|
||||
gap: var(--small-gap);
|
||||
}
|
||||
}
|
||||
|
||||
form {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--small-gap);
|
||||
|
||||
|
||||
.error {
|
||||
color: var(--error);
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
textarea {
|
||||
outline: none;
|
||||
border: none;
|
||||
background: var(--secondary);
|
||||
padding: var(--small-gap);
|
||||
padding-top: calc(var(--small-gap) * 1.5);
|
||||
font-family: var(--monospace);
|
||||
resize: none;
|
||||
tab-size: 4;
|
||||
}
|
||||
|
||||
pre {
|
||||
background: var(--secondary);
|
||||
padding: var(--small-gap);
|
||||
padding-top: calc(var(--small-gap) * 1.5);
|
||||
margin: 0px;
|
||||
font-family: var(--monospace);
|
||||
tab-size: 4;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
input {
|
||||
font-size: inherit;
|
||||
outline: none;
|
||||
border: none;
|
||||
background: var(--secondary);
|
||||
padding: var(--small-gap);
|
||||
}
|
||||
|
||||
input:is(:hover, :focus) {
|
||||
background: var(--primary);
|
||||
}
|
||||
|
||||
button {
|
||||
border: none;
|
||||
outline: none;
|
||||
font-size: inherit;
|
||||
background: var(--secondary);
|
||||
}
|
||||
|
||||
button:hover:not(:active) {
|
||||
background: var(--primary);
|
||||
}
|
||||
|
||||
div#code-editor {
|
||||
display: flex;
|
||||
position: relative;
|
||||
|
||||
textarea {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
span#code-size {
|
||||
position: absolute;
|
||||
right: 2px;
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
div#dep-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: var(--small-gap);
|
||||
|
||||
section {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
text-align: center;
|
||||
gap: var(--small-gap);
|
||||
|
||||
div {
|
||||
text-align: left;
|
||||
}
|
||||
}
|
||||
}
|
427
depell/src/index.js
Normal file
427
depell/src/index.js
Normal file
|
@ -0,0 +1,427 @@
|
|||
/// @ts-check
|
||||
|
||||
/** @return {never} */
|
||||
function never() { throw new Error() }
|
||||
|
||||
/**@type{WebAssembly.Instance}*/ let hbcInstance;
|
||||
/**@type{Promise<WebAssembly.WebAssemblyInstantiatedSource>}*/ let hbcInstaceFuture;
|
||||
async function getHbcInstance() {
|
||||
hbcInstaceFuture ??= WebAssembly.instantiateStreaming(fetch("/hbc.wasm"), {});
|
||||
return hbcInstance ??= (await hbcInstaceFuture).instance;
|
||||
}
|
||||
|
||||
const stack_pointer_offset = 1 << 20;
|
||||
|
||||
/** @param {WebAssembly.Instance} instance @param {Post[]} packages @param {number} fuel
|
||||
* @returns {string} */
|
||||
function compileCode(instance, packages, fuel) {
|
||||
let {
|
||||
INPUT, INPUT_LEN,
|
||||
LOG_MESSAGES, LOG_MESSAGES_LEN,
|
||||
memory, compile_and_run,
|
||||
} = instance.exports;
|
||||
|
||||
if (!(true
|
||||
&& memory instanceof WebAssembly.Memory
|
||||
&& INPUT instanceof WebAssembly.Global
|
||||
&& INPUT_LEN instanceof WebAssembly.Global
|
||||
&& LOG_MESSAGES instanceof WebAssembly.Global
|
||||
&& LOG_MESSAGES_LEN instanceof WebAssembly.Global
|
||||
&& typeof compile_and_run === "function"
|
||||
)) never();
|
||||
|
||||
const codeLength = packPosts(packages, new DataView(memory.buffer, INPUT.value));
|
||||
new DataView(memory.buffer).setUint32(INPUT_LEN.value, codeLength, true);
|
||||
|
||||
runWasmFunction(instance, compile_and_run, fuel);
|
||||
return bufToString(memory, LOG_MESSAGES, LOG_MESSAGES_LEN);
|
||||
}
|
||||
|
||||
/**@type{WebAssembly.Instance}*/ let fmtInstance;
|
||||
/**@type{Promise<WebAssembly.WebAssemblyInstantiatedSource>}*/ let fmtInstaceFuture;
|
||||
async function getFmtInstance() {
|
||||
fmtInstaceFuture ??= WebAssembly.instantiateStreaming(fetch("/hbfmt.wasm"), {});
|
||||
return fmtInstance ??= (await fmtInstaceFuture).instance;
|
||||
}
|
||||
|
||||
/** @param {WebAssembly.Instance} instance @param {string} code @param {"fmt" | "minify"} action
|
||||
* @returns {string | undefined} */
|
||||
function modifyCode(instance, code, action) {
|
||||
let {
|
||||
INPUT, INPUT_LEN,
|
||||
OUTPUT, OUTPUT_LEN,
|
||||
memory, fmt, minify
|
||||
} = instance.exports;
|
||||
|
||||
if (!(true
|
||||
&& memory instanceof WebAssembly.Memory
|
||||
&& INPUT instanceof WebAssembly.Global
|
||||
&& INPUT_LEN instanceof WebAssembly.Global
|
||||
&& OUTPUT instanceof WebAssembly.Global
|
||||
&& OUTPUT_LEN instanceof WebAssembly.Global
|
||||
&& typeof fmt === "function"
|
||||
&& typeof minify === "function"
|
||||
)) never();
|
||||
|
||||
if (action !== "fmt") {
|
||||
INPUT = OUTPUT;
|
||||
INPUT_LEN = OUTPUT_LEN;
|
||||
}
|
||||
|
||||
let dw = new DataView(memory.buffer);
|
||||
dw.setUint32(INPUT_LEN.value, code.length, true);
|
||||
new Uint8Array(memory.buffer, INPUT.value).set(new TextEncoder().encode(code));
|
||||
|
||||
return runWasmFunction(instance, action === "fmt" ? fmt : minify) ?
|
||||
bufToString(memory, OUTPUT, OUTPUT_LEN) : undefined;
|
||||
}
|
||||
|
||||
|
||||
/** @param {WebAssembly.Instance} instance @param {CallableFunction} func @param {any[]} args
|
||||
* @returns {boolean} */
|
||||
function runWasmFunction(instance, func, ...args) {
|
||||
const { PANIC_MESSAGE, PANIC_MESSAGE_LEN, memory, stack_pointer } = instance.exports;
|
||||
if (!(true
|
||||
&& memory instanceof WebAssembly.Memory
|
||||
&& stack_pointer instanceof WebAssembly.Global
|
||||
)) never();
|
||||
const ptr = stack_pointer.value;
|
||||
try {
|
||||
func(...args);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error instanceof WebAssembly.RuntimeError
|
||||
&& error.message == "unreachable"
|
||||
&& PANIC_MESSAGE instanceof WebAssembly.Global
|
||||
&& PANIC_MESSAGE_LEN instanceof WebAssembly.Global) {
|
||||
console.error(bufToString(memory, PANIC_MESSAGE, PANIC_MESSAGE_LEN), error);
|
||||
} else {
|
||||
console.error(error);
|
||||
}
|
||||
stack_pointer.value = ptr;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/** @typedef {Object} Post
|
||||
* @property {string} path
|
||||
* @property {string} code */
|
||||
|
||||
/** @param {Post[]} posts @param {DataView} view @returns {number} */
|
||||
function packPosts(posts, view) {
|
||||
const enc = new TextEncoder(), buf = new Uint8Array(view.buffer, view.byteOffset);
|
||||
let len = 0; for (const post of posts) {
|
||||
view.setUint16(len, post.path.length, true); len += 2;
|
||||
buf.set(enc.encode(post.path), len); len += post.path.length;
|
||||
view.setUint16(len, post.code.length, true); len += 2;
|
||||
buf.set(enc.encode(post.code), len); len += post.code.length;
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
/** @param {WebAssembly.Memory} mem
|
||||
* @param {WebAssembly.Global} ptr
|
||||
* @param {WebAssembly.Global} len
|
||||
* @return {string} */
|
||||
function bufToString(mem, ptr, len) {
|
||||
const res = new TextDecoder()
|
||||
.decode(new Uint8Array(mem.buffer, ptr.value,
|
||||
new DataView(mem.buffer).getUint32(len.value, true)));
|
||||
new DataView(mem.buffer).setUint32(len.value, 0, true);
|
||||
return res;
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function wireUp(target) {
|
||||
execApply(target);
|
||||
cacheInputs(target);
|
||||
bindCodeEdit(target);
|
||||
bindTextareaAutoResize(target);
|
||||
}
|
||||
|
||||
const importRe = /@use\s*\(\s*"(([^"]|\\")+)"\s*\)/g;
|
||||
|
||||
/** @param {string} code
|
||||
* @param {string[]} roots
|
||||
* @param {Post[]} buf
|
||||
* @param {Set<string>} prevRoots
|
||||
* @returns {void} */
|
||||
function loadCachedPackages(code, roots, buf, prevRoots) {
|
||||
buf[0].code = code;
|
||||
|
||||
roots.length = 0;
|
||||
let changed = false;
|
||||
for (const match of code.matchAll(importRe)) {
|
||||
changed ||= !prevRoots.has(match[1]);
|
||||
roots.push(match[1]);
|
||||
}
|
||||
|
||||
if (!changed) return;
|
||||
buf.length = 1;
|
||||
prevRoots.clear();
|
||||
|
||||
for (let imp = roots.pop(); imp !== undefined; imp = roots.pop()) {
|
||||
if (prevRoots.has(imp)) continue; prevRoots.add(imp);
|
||||
buf.push({ path: imp, code: localStorage.getItem("package-" + imp) ?? never() });
|
||||
for (const match of buf[buf.length - 1].code.matchAll(importRe)) {
|
||||
roots.push(match[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**@type{Set<string>}*/ const prevRoots = new Set();
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
async function bindCodeEdit(target) {
|
||||
const edit = target.querySelector("#code-edit");
|
||||
if (!(edit instanceof HTMLTextAreaElement)) return;
|
||||
|
||||
const codeSize = target.querySelector("#code-size");
|
||||
const errors = target.querySelector("#compiler-output");
|
||||
if (!(true
|
||||
&& codeSize instanceof HTMLSpanElement
|
||||
&& errors instanceof HTMLPreElement
|
||||
)) never();
|
||||
|
||||
const MAX_CODE_SIZE = parseInt(codeSize.innerHTML);
|
||||
if (Number.isNaN(MAX_CODE_SIZE)) never();
|
||||
|
||||
const hbc = await getHbcInstance(), fmt = await getFmtInstance();
|
||||
let importDiff = new Set();
|
||||
const keyBuf = [];
|
||||
/**@type{Post[]}*/
|
||||
const packages = [{ path: "local.hb", code: "" }];
|
||||
const debounce = 100;
|
||||
/**@type{AbortController|undefined}*/
|
||||
let cancelation = undefined;
|
||||
let timeout = 0;
|
||||
|
||||
prevRoots.clear();
|
||||
|
||||
const onInput = () => {
|
||||
importDiff.clear();
|
||||
for (const match of edit.value.matchAll(importRe)) {
|
||||
if (localStorage["package-" + match[1]]) continue;
|
||||
importDiff.add(match[1]);
|
||||
}
|
||||
|
||||
if (importDiff.size !== 0) {
|
||||
if (cancelation) cancelation.abort();
|
||||
cancelation = new AbortController();
|
||||
|
||||
keyBuf.length = 0;
|
||||
keyBuf.push(...importDiff.keys());
|
||||
|
||||
errors.textContent = "fetching: " + keyBuf.join(", ");
|
||||
|
||||
fetch(`/code`, {
|
||||
method: "POST",
|
||||
signal: cancelation.signal,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(keyBuf),
|
||||
}).then(async e => {
|
||||
try {
|
||||
const json = await e.json();
|
||||
if (e.status == 200) {
|
||||
for (const [key, value] of Object.entries(json)) {
|
||||
localStorage["package-" + key] = value;
|
||||
}
|
||||
const missing = keyBuf.filter(i => json[i] === undefined);
|
||||
if (missing.length !== 0) {
|
||||
errors.textContent = "deps not found: " + missing.join(", ");
|
||||
} else {
|
||||
cancelation = undefined;
|
||||
edit.dispatchEvent(new InputEvent("input"));
|
||||
}
|
||||
}
|
||||
} catch (er) {
|
||||
errors.textContent = "completely failed to fetch ("
|
||||
+ e.status + "): " + keyBuf.join(", ");
|
||||
console.error(e, er);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (cancelation && importDiff.size !== 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
loadCachedPackages(edit.value, keyBuf, packages, prevRoots);
|
||||
|
||||
errors.textContent = compileCode(hbc, packages, 1);
|
||||
const minified_size = modifyCode(fmt, edit.value, "minify")?.length;
|
||||
if (minified_size) {
|
||||
codeSize.textContent = (MAX_CODE_SIZE - minified_size) + "";
|
||||
const perc = Math.min(100, Math.floor(100 * (minified_size / MAX_CODE_SIZE)));
|
||||
codeSize.style.color = `color-mix(in srgb, white, var(--error) ${perc}%)`;
|
||||
}
|
||||
timeout = 0;
|
||||
};
|
||||
|
||||
edit.addEventListener("input", () => {
|
||||
if (timeout) clearTimeout(timeout);
|
||||
timeout = setTimeout(onInput, debounce)
|
||||
});
|
||||
edit.dispatchEvent(new InputEvent("input"));
|
||||
}
|
||||
|
||||
/** @type {{ [key: string]: (content: string) => Promise<string> | string }} */
|
||||
const applyFns = {
|
||||
timestamp: (content) => new Date(parseInt(content) * 1000).toLocaleString(),
|
||||
fmt: (content) => getFmtInstance().then(i => modifyCode(i, content, "fmt") ?? "invalid code"),
|
||||
};
|
||||
/** @param {HTMLElement} target */
|
||||
function execApply(target) {
|
||||
for (const elem of target.querySelectorAll('[apply]')) {
|
||||
if (!(elem instanceof HTMLElement)) continue;
|
||||
const funcname = elem.getAttribute('apply') ?? never();
|
||||
let res = applyFns[funcname](elem.textContent ?? "");
|
||||
if (res instanceof Promise) res.then(c => elem.textContent = c);
|
||||
else elem.textContent = res;
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function bindTextareaAutoResize(target) {
|
||||
for (const textarea of target.querySelectorAll("textarea")) {
|
||||
if (!(textarea instanceof HTMLTextAreaElement)) never();
|
||||
|
||||
const taCssMap = window.getComputedStyle(textarea);
|
||||
const padding = parseInt(taCssMap.getPropertyValue('padding-top') ?? "0")
|
||||
+ parseInt(taCssMap.getPropertyValue('padding-top') ?? "0");
|
||||
textarea.style.height = "auto";
|
||||
textarea.style.height = (textarea.scrollHeight - padding) + "px";
|
||||
textarea.style.overflowY = "hidden";
|
||||
textarea.addEventListener("input", function() {
|
||||
let top = window.scrollY;
|
||||
textarea.style.height = "auto";
|
||||
textarea.style.height = (textarea.scrollHeight - padding) + "px";
|
||||
window.scrollTo({ top });
|
||||
});
|
||||
|
||||
textarea.onkeydown = (ev) => {
|
||||
if (ev.key === "Tab") {
|
||||
ev.preventDefault();
|
||||
document.execCommand('insertText', false, "\t");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function cacheInputs(target) {
|
||||
/**@type {HTMLFormElement}*/ let form;
|
||||
for (form of target.querySelectorAll('form')) {
|
||||
const path = form.getAttribute('hx-post') || form.getAttribute('hx-delete');
|
||||
if (!path) {
|
||||
console.warn('form does not have a hx-post or hx-delete attribute', form);
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const input of form.elements) {
|
||||
if (input instanceof HTMLInputElement || input instanceof HTMLTextAreaElement) {
|
||||
if ('password submit button'.includes(input.type)) continue;
|
||||
const key = path + input.name;
|
||||
input.value = localStorage.getItem(key) ?? '';
|
||||
input.addEventListener("input", () => localStorage.setItem(key, input.value));
|
||||
} else {
|
||||
console.warn("unhandled form element: ", input);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {string} [path] */
|
||||
function updaetTab(path) {
|
||||
for (const elem of document.querySelectorAll("button[hx-push-url]")) {
|
||||
if (elem instanceof HTMLButtonElement)
|
||||
elem.disabled = elem.getAttribute("hx-push-url") === (path ?? window.location.pathname);
|
||||
}
|
||||
}
|
||||
|
||||
if (window.location.hostname === 'localhost') {
|
||||
let id; setInterval(async () => {
|
||||
let new_id = await fetch('/hot-reload').then(reps => reps.text());
|
||||
id ??= new_id;
|
||||
if (id !== new_id) window.location.reload();
|
||||
}, 300);
|
||||
|
||||
(async function test() {
|
||||
{
|
||||
const code = "main:=fn():void{return}";
|
||||
const inst = await getFmtInstance()
|
||||
const fmtd = modifyCode(inst, code, "fmt") ?? never();
|
||||
const prev = modifyCode(inst, fmtd, "minify") ?? never();
|
||||
if (code != prev) console.error(code, prev);
|
||||
}
|
||||
{
|
||||
const posts = [{
|
||||
path: "foo.hb",
|
||||
code: "main:=fn():int{return 42}",
|
||||
}];
|
||||
const res = compileCode(await getHbcInstance(), posts, 1) ?? never();
|
||||
const expected = "exit code: 42\n";
|
||||
if (expected != res) console.error(expected, res);
|
||||
}
|
||||
})()
|
||||
}
|
||||
|
||||
document.body.addEventListener('htmx:afterSwap', (ev) => {
|
||||
if (!(ev.target instanceof HTMLElement)) never();
|
||||
wireUp(ev.target);
|
||||
if (ev.target.tagName == "MAIN" || ev.target.tagName == "BODY")
|
||||
updaetTab(ev['detail'].pathInfo.finalRequestPath);
|
||||
console.log(ev);
|
||||
});
|
||||
|
||||
getFmtInstance().then(inst => {
|
||||
document.body.addEventListener('htmx:configRequest', (ev) => {
|
||||
const details = ev['detail'];
|
||||
if (details.path === "/post" && details.verb === "post") {
|
||||
details.parameters['code'] = modifyCode(inst, details.parameters['code'], "minify");
|
||||
}
|
||||
});
|
||||
|
||||
/** @param {string} query @param {string} target @returns {number} */
|
||||
function fuzzyCost(query, target) {
|
||||
let qi = 0, bi = 0, cost = 0, matched = false;
|
||||
while (qi < query.length) {
|
||||
if (query.charAt(qi) === target.charAt(bi++)) {
|
||||
matched = true;
|
||||
qi++;
|
||||
} else {
|
||||
cost++;
|
||||
}
|
||||
if (bi === target.length) (bi = 0, qi++);
|
||||
}
|
||||
return cost + (matched ? 0 : 100 * target.length);
|
||||
}
|
||||
|
||||
let deps = undefined;
|
||||
/** @param {HTMLInputElement} input @returns {void} */
|
||||
function filterCodeDeps(input) {
|
||||
deps ??= document.getElementById("deps");
|
||||
if (!(deps instanceof HTMLElement)) never();
|
||||
if (input.value === "") {
|
||||
deps.textContent = "results show here...";
|
||||
return;
|
||||
}
|
||||
deps.innerHTML = "";
|
||||
for (const root of [...prevRoots.keys()]
|
||||
.sort((a, b) => fuzzyCost(input.value, a) - fuzzyCost(input.value, b))) {
|
||||
const pane = document.createElement("div");
|
||||
const code = modifyCode(inst, localStorage["package-" + root], "fmt");
|
||||
pane.innerHTML = `<div>${root}</div><pre>${code}</pre>`;
|
||||
deps.appendChild(pane);
|
||||
}
|
||||
if (deps.innerHTML === "") {
|
||||
deps.textContent = "no results";
|
||||
}
|
||||
}
|
||||
|
||||
Object.assign(window, { filterCodeDeps });
|
||||
});
|
||||
|
||||
updaetTab();
|
||||
wireUp(document.body);
|
||||
|
813
depell/src/main.rs
Normal file
813
depell/src/main.rs
Normal file
|
@ -0,0 +1,813 @@
|
|||
#![feature(iter_collect_into)]
|
||||
use {
|
||||
argon2::{password_hash::SaltString, PasswordVerifier},
|
||||
axum::{
|
||||
body::Bytes,
|
||||
extract::Path,
|
||||
http::{header::COOKIE, request::Parts},
|
||||
response::{AppendHeaders, Html},
|
||||
},
|
||||
const_format::formatcp,
|
||||
core::fmt,
|
||||
htmlm::{html, write_html},
|
||||
rand_core::OsRng,
|
||||
serde::{Deserialize, Serialize},
|
||||
std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::{Display, Write},
|
||||
net::Ipv4Addr,
|
||||
},
|
||||
};
|
||||
|
||||
const MAX_NAME_LENGTH: usize = 32;
|
||||
const MAX_POSTNAME_LENGTH: usize = 64;
|
||||
const MAX_CODE_LENGTH: usize = 1024 * 4;
|
||||
const SESSION_DURATION_SECS: u64 = 60 * 60;
|
||||
const MAX_FEED_SIZE: usize = 8 * 1024;
|
||||
|
||||
type Redirect<const COUNT: usize = 1> = AppendHeaders<[(&'static str, &'static str); COUNT]>;
|
||||
|
||||
macro_rules! static_asset {
|
||||
($mime:literal, $body:literal) => {
|
||||
get(|| async {
|
||||
axum::http::Response::builder()
|
||||
.header("content-type", $mime)
|
||||
.header("content-encoding", "gzip")
|
||||
.body(axum::body::Body::from(Bytes::from_static(include_bytes!(concat!(
|
||||
$body, ".gz"
|
||||
)))))
|
||||
.unwrap()
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
async fn amain() {
|
||||
use axum::routing::{delete, get, post};
|
||||
|
||||
let debug = cfg!(debug_assertions);
|
||||
|
||||
log::set_logger(&Logger).unwrap();
|
||||
log::set_max_level(if debug { log::LevelFilter::Warn } else { log::LevelFilter::Error });
|
||||
|
||||
db::init();
|
||||
|
||||
let router = axum::Router::new()
|
||||
.route("/", get(Index::page))
|
||||
.route("/index.css", static_asset!("text/css", "index.css"))
|
||||
.route("/index.js", static_asset!("text/javascript", "index.js"))
|
||||
.route("/hbfmt.wasm", static_asset!("application/wasm", "hbfmt.wasm"))
|
||||
.route("/hbc.wasm", static_asset!("application/wasm", "hbc.wasm"))
|
||||
.route("/index-view", get(Index::get))
|
||||
.route("/feed", get(Feed::page))
|
||||
.route("/feed-view", get(Feed::get))
|
||||
.route("/feed-more", post(Feed::more))
|
||||
.route("/profile", get(Profile::page))
|
||||
.route("/profile-view", get(Profile::get))
|
||||
.route("/profile/:name", get(Profile::get_other_page))
|
||||
.route("/profile-view/:name", get(Profile::get_other))
|
||||
.route("/post", get(Post::page))
|
||||
.route("/post-view", get(Post::get))
|
||||
.route("/post", post(Post::post))
|
||||
.route("/code", post(fetch_code))
|
||||
.route("/login", get(Login::page))
|
||||
.route("/login-view", get(Login::get))
|
||||
.route("/login", post(Login::post))
|
||||
.route("/login", delete(Login::delete))
|
||||
.route("/signup", get(Signup::page))
|
||||
.route("/signup-view", get(Signup::get))
|
||||
.route("/signup", post(Signup::post))
|
||||
.route(
|
||||
"/hot-reload",
|
||||
get({
|
||||
let id = std::time::SystemTime::now()
|
||||
.duration_since(std::time::SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
move || async move { id.to_string() }
|
||||
}),
|
||||
);
|
||||
|
||||
#[cfg(feature = "tls")]
|
||||
{
|
||||
let addr =
|
||||
(Ipv4Addr::UNSPECIFIED, std::env::var("DEPELL_PORT").unwrap().parse::<u16>().unwrap());
|
||||
let config = axum_server::tls_rustls::RustlsConfig::from_pem_file(
|
||||
std::env::var("DEPELL_CERT_PATH").unwrap(),
|
||||
std::env::var("DEPELL_KEY_PATH").unwrap(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
axum_server::bind_rustls(addr.into(), config)
|
||||
.serve(router.into_make_service())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
#[cfg(not(feature = "tls"))]
|
||||
{
|
||||
let addr = (Ipv4Addr::UNSPECIFIED, 8080);
|
||||
let socket = tokio::net::TcpListener::bind(addr).await.unwrap();
|
||||
axum::serve(socket, router).await.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_code(
|
||||
axum::Json(paths): axum::Json<Vec<String>>,
|
||||
) -> axum::Json<HashMap<String, String>> {
|
||||
let mut deps = HashMap::<String, String>::new();
|
||||
db::with(|db| {
|
||||
for path in &paths {
|
||||
let Some((author, name)) = path.split_once('/') else { continue };
|
||||
db.fetch_deps
|
||||
.query_map((name, author), |r| {
|
||||
Ok((
|
||||
r.get::<_, String>(1)? + "/" + r.get_ref(0)?.as_str()?,
|
||||
r.get::<_, String>(2)?,
|
||||
))
|
||||
})
|
||||
.log("fetch deps query")
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|r| r.log("deps row"))
|
||||
.collect_into(&mut deps);
|
||||
}
|
||||
});
|
||||
axum::Json(deps)
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum Feed {
|
||||
Before { before_timestamp: u64 },
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Before {
|
||||
before_timestamp: u64,
|
||||
}
|
||||
|
||||
impl Feed {
|
||||
async fn more(session: Session, axum::Form(data): axum::Form<Before>) -> Html<String> {
|
||||
Self::Before { before_timestamp: data.before_timestamp }.render(&session)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Feed {
|
||||
fn default() -> Self {
|
||||
Self::Before { before_timestamp: now() + 3600 }
|
||||
}
|
||||
}
|
||||
|
||||
impl Page for Feed {
|
||||
fn render_to_buf(self, _: &Session, buf: &mut String) {
|
||||
db::with(|db| {
|
||||
let cursor = match self {
|
||||
Feed::Before { before_timestamp } => db
|
||||
.get_pots_before
|
||||
.query_map((before_timestamp,), Post::from_row)
|
||||
.log("fetch before posts query")
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|r| r.log("fetch before posts row")),
|
||||
};
|
||||
|
||||
let base_len = buf.len();
|
||||
let mut last_timestamp = None;
|
||||
for post in cursor {
|
||||
write!(buf, "{}", post).unwrap();
|
||||
if buf.len() - base_len > MAX_FEED_SIZE {
|
||||
last_timestamp = Some(post.timestamp);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
write_html!((*buf)
|
||||
if let Some(last_timestamp) = last_timestamp {
|
||||
<div "hx-post"="/feed-more"
|
||||
"hx-trigger"="intersect once"
|
||||
"hx-swap"="outerHTML"
|
||||
"hx-vals"={format_args!("{{\"before_timestamp\":{last_timestamp}}}")}
|
||||
>"there might be more"</div>
|
||||
} else {
|
||||
"no more stuff"
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Index;
|
||||
|
||||
impl PublicPage for Index {
|
||||
fn render_to_buf(self, buf: &mut String) {
|
||||
buf.push_str(include_str!("welcome-page.html"));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default)]
|
||||
struct Post {
|
||||
author: String,
|
||||
name: String,
|
||||
#[serde(skip)]
|
||||
timestamp: u64,
|
||||
#[serde(skip)]
|
||||
imports: usize,
|
||||
#[serde(skip)]
|
||||
runs: usize,
|
||||
#[serde(skip)]
|
||||
dependencies: usize,
|
||||
code: String,
|
||||
#[serde(skip)]
|
||||
error: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl Page for Post {
|
||||
fn render_to_buf(self, session: &Session, buf: &mut String) {
|
||||
let Self { name, code, error, .. } = self;
|
||||
write_html! { (buf)
|
||||
<form id="postForm" "hx-post"="/post" "hx-swap"="outerHTML">
|
||||
if let Some(e) = error { <div class="error">e</div> }
|
||||
<input name="author" type="text" value={session.name} hidden>
|
||||
<input name="name" type="text" placeholder="name" value=name
|
||||
required maxlength=MAX_POSTNAME_LENGTH>
|
||||
<div id="code-editor">
|
||||
<textarea id="code-edit" name="code" placeholder="code" rows=1
|
||||
required>code</textarea>
|
||||
<span id="code-size">MAX_CODE_LENGTH</span>
|
||||
</div>
|
||||
<input type="submit" value="submit">
|
||||
<pre id="compiler-output"></pre>
|
||||
</form>
|
||||
!{include_str!("post-page.html")}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Post {
|
||||
pub fn from_row(r: &rusqlite::Row) -> rusqlite::Result<Self> {
|
||||
Ok(Post {
|
||||
author: r.get(0)?,
|
||||
name: r.get(1)?,
|
||||
timestamp: r.get(2)?,
|
||||
code: r.get(3)?,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
async fn post(
|
||||
session: Session,
|
||||
axum::Form(mut data): axum::Form<Self>,
|
||||
) -> Result<Redirect, Html<String>> {
|
||||
if data.name.len() > MAX_POSTNAME_LENGTH {
|
||||
data.error = Some(formatcp!("name too long, max length is {MAX_POSTNAME_LENGTH}"));
|
||||
return Err(data.render(&session));
|
||||
}
|
||||
|
||||
if data.code.len() > MAX_CODE_LENGTH {
|
||||
data.error = Some(formatcp!("code too long, max length is {MAX_CODE_LENGTH}"));
|
||||
return Err(data.render(&session));
|
||||
}
|
||||
|
||||
db::with(|db| {
|
||||
if let Err(e) = db.create_post.insert((&data.name, &session.name, now(), &data.code)) {
|
||||
if let rusqlite::Error::SqliteFailure(e, _) = e {
|
||||
if e.code == rusqlite::ErrorCode::ConstraintViolation {
|
||||
data.error = Some("this name is already used");
|
||||
}
|
||||
}
|
||||
data.error = data.error.or_else(|| {
|
||||
log::error!("create post error: {e}");
|
||||
Some("internal server error")
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
for (author, name) in hblang::lexer::Lexer::uses(&data.code)
|
||||
.filter_map(|v| v.split_once('/'))
|
||||
.collect::<HashSet<_>>()
|
||||
{
|
||||
if db
|
||||
.create_import
|
||||
.insert((author, name, &session.name, &data.name))
|
||||
.log("create import query")
|
||||
.is_none()
|
||||
{
|
||||
data.error = Some("internal server error");
|
||||
return;
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
if data.error.is_some() {
|
||||
Err(data.render(&session))
|
||||
} else {
|
||||
Ok(redirect("/profile"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Post {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let Self { author, name, timestamp, imports, runs, dependencies, code, .. } = self;
|
||||
write_html! { f <div class="preview">
|
||||
<div class="info">
|
||||
<span>
|
||||
<a "hx-get"={format_args!("/profile-view/{author}")} href="" "hx-target"="main"
|
||||
"hx-push-url"={format_args!("/profile/{author}")}
|
||||
"hx-swam"="innerHTML">author</a>
|
||||
"/"
|
||||
name
|
||||
</span>
|
||||
<span apply="timestamp">timestamp</span>
|
||||
</div>
|
||||
<div class="stats">
|
||||
for (name, count) in "inps runs deps".split(' ')
|
||||
.zip([imports, runs, dependencies])
|
||||
.filter(|(_, &c)| c != 0)
|
||||
{
|
||||
name ": "<span>count</span>
|
||||
}
|
||||
</div>
|
||||
<pre apply="fmt">code</pre>
|
||||
if *timestamp == 0 {
|
||||
<button "hx-get"="/post" "hx-swap"="outerHTML"
|
||||
"hx-target"="[preview]">"edit"</button>
|
||||
}
|
||||
</div> }
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Profile {
|
||||
other: Option<String>,
|
||||
}
|
||||
|
||||
impl Profile {
|
||||
async fn get_other(session: Session, Path(name): Path<String>) -> Html<String> {
|
||||
Profile { other: Some(name) }.render(&session)
|
||||
}
|
||||
|
||||
async fn get_other_page(session: Session, Path(name): Path<String>) -> Html<String> {
|
||||
base(|b| Profile { other: Some(name) }.render_to_buf(&session, b), Some(&session))
|
||||
}
|
||||
}
|
||||
|
||||
impl Page for Profile {
|
||||
fn render_to_buf(self, session: &Session, buf: &mut String) {
|
||||
db::with(|db| {
|
||||
let iter = db
|
||||
.get_user_posts
|
||||
.query_map((self.other.as_ref().unwrap_or(&session.name),), Post::from_row)
|
||||
.log("get user posts query")
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|p| p.log("user post row"));
|
||||
write_html! { (buf)
|
||||
for post in iter {
|
||||
!{post}
|
||||
} else {
|
||||
"no posts"
|
||||
}
|
||||
!{include_str!("profile-page.html")}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_password(password: &str) -> String {
|
||||
use argon2::PasswordHasher;
|
||||
argon2::Argon2::default()
|
||||
.hash_password(password.as_bytes(), &SaltString::generate(&mut OsRng))
|
||||
.unwrap()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn verify_password(hash: &str, password: &str) -> Result<(), argon2::password_hash::Error> {
|
||||
argon2::Argon2::default()
|
||||
.verify_password(password.as_bytes(), &argon2::PasswordHash::new(hash)?)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Debug)]
|
||||
struct Login {
|
||||
name: String,
|
||||
password: String,
|
||||
#[serde(skip)]
|
||||
error: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl PublicPage for Login {
|
||||
fn render_to_buf(self, buf: &mut String) {
|
||||
let Login { name, password, error } = self;
|
||||
write_html! { (buf)
|
||||
<form "hx-post"="/login" "hx-swap"="outerHTML">
|
||||
if let Some(e) = error { <div class="error">e</div> }
|
||||
<input name="name" type="text" autocomplete="name" placeholder="name" value=name
|
||||
required maxlength=MAX_NAME_LENGTH>
|
||||
<input name="password" type="password" autocomplete="current-password" placeholder="password"
|
||||
value=password>
|
||||
<input type="submit" value="submit">
|
||||
</form>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Login {
|
||||
async fn post(
|
||||
axum::Form(mut data): axum::Form<Self>,
|
||||
) -> Result<AppendHeaders<[(&'static str, String); 2]>, Html<String>> {
|
||||
// TODO: hash password
|
||||
let mut id = [0u8; 32];
|
||||
db::with(|db| match db.authenticate.query_row((&data.name,), |r| r.get::<_, String>(1)) {
|
||||
Ok(hash) => {
|
||||
if verify_password(&hash, &data.password).is_err() {
|
||||
data.error = Some("invalid credentials");
|
||||
} else {
|
||||
getrandom::getrandom(&mut id).unwrap();
|
||||
if db
|
||||
.login
|
||||
.insert((id, &data.name, now() + SESSION_DURATION_SECS))
|
||||
.log("create session query")
|
||||
.is_none()
|
||||
{
|
||||
data.error = Some("internal server error");
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(rusqlite::Error::QueryReturnedNoRows) => {
|
||||
data.error = Some("invalid credentials");
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("foo {e}");
|
||||
data.error = Some("internal server error");
|
||||
}
|
||||
});
|
||||
|
||||
if data.error.is_some() {
|
||||
log::error!("what {:?}", data);
|
||||
Err(data.render())
|
||||
} else {
|
||||
Ok(AppendHeaders([
|
||||
("hx-location", "/feed".into()),
|
||||
(
|
||||
"set-cookie",
|
||||
format!(
|
||||
"id={}; SameSite=Strict; Secure; Max-Age={SESSION_DURATION_SECS}",
|
||||
to_hex(&id)
|
||||
),
|
||||
),
|
||||
]))
|
||||
}
|
||||
}
|
||||
|
||||
async fn delete(session: Session) -> Redirect {
|
||||
_ = db::with(|q| q.logout.execute((session.id,)).log("delete session query"));
|
||||
redirect("/login")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
struct Signup {
|
||||
name: String,
|
||||
new_password: String,
|
||||
confirm_password: String,
|
||||
#[serde(default)]
|
||||
confirm_no_password: bool,
|
||||
#[serde(skip)]
|
||||
error: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl PublicPage for Signup {
|
||||
fn render_to_buf(self, buf: &mut String) {
|
||||
let Signup { name, new_password, confirm_password, confirm_no_password, error } = self;
|
||||
let vals = if confirm_no_password { "{\"confirm_no_password\":true}" } else { "{}" };
|
||||
write_html! { (buf)
|
||||
<form "hx-post"="/signup" "hx-swap"="outerHTML" "hx-vals"=vals>
|
||||
if let Some(e) = error { <div class="error">e</div> }
|
||||
<input name="name" type="text" autocomplete="name" placeholder="name" value=name
|
||||
maxlength=MAX_NAME_LENGTH required>
|
||||
<input name="new_password" type="password" autocomplete="new-password" placeholder="new password"
|
||||
value=new_password>
|
||||
<input name="confirm_password" type="password" autocomplete="confirm-password"
|
||||
placeholder="confirm password" value=confirm_password>
|
||||
<input type="submit" value="submit">
|
||||
</form>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Signup {
|
||||
async fn post(axum::Form(mut data): axum::Form<Self>) -> Result<Redirect, Html<String>> {
|
||||
if data.name.len() > MAX_NAME_LENGTH {
|
||||
data.error = Some(formatcp!("name too long, max length is {MAX_NAME_LENGTH}"));
|
||||
return Err(data.render());
|
||||
}
|
||||
|
||||
if !data.confirm_no_password && data.new_password.is_empty() {
|
||||
data.confirm_no_password = true;
|
||||
data.error = Some("Are you sure you don't want to use a password? (then submit again)");
|
||||
return Err(data.render());
|
||||
}
|
||||
|
||||
db::with(|db| {
|
||||
// TODO: hash passwords
|
||||
match db.register.insert((&data.name, hash_password(&data.new_password))) {
|
||||
Ok(_) => {}
|
||||
Err(rusqlite::Error::SqliteFailure(e, _))
|
||||
if e.code == rusqlite::ErrorCode::ConstraintViolation =>
|
||||
{
|
||||
data.error = Some("username already taken");
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("create user query: {e}");
|
||||
data.error = Some("internal server error");
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
if data.error.is_some() {
|
||||
Err(data.render())
|
||||
} else {
|
||||
Ok(redirect("/login"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn base(body: impl FnOnce(&mut String), session: Option<&Session>) -> Html<String> {
|
||||
let username = session.map(|s| &s.name);
|
||||
|
||||
let nav_button = |f: &mut String, name: &str| {
|
||||
write_html! {(f)
|
||||
<button "hx-push-url"={format_args!("/{name}")}
|
||||
"hx-get"={format_args!("/{name}-view")}
|
||||
"hx-target"="main"
|
||||
"hx-swap"="innerHTML">name</button>
|
||||
}
|
||||
};
|
||||
|
||||
Html(html! {
|
||||
"<!DOCTYPE html>"
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta name="charset" content="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link rel="stylesheet" href="/index.css">
|
||||
</head>
|
||||
<body>
|
||||
<nav>
|
||||
<button "hx-push-url"="/" "hx-get"="/index-view" "hx-target"="main" "hx-swap"="innerHTML">"depell"</button>
|
||||
<section>
|
||||
if let Some(username) = username {
|
||||
<button "hx-push-url"="/profile" "hx-get"="/profile-view" "hx-target"="main"
|
||||
"hx-swap"="innerHTML">username</button>
|
||||
|f|{nav_button(f, "feed"); nav_button(f, "post")}
|
||||
<button "hx-delete"="/login">"logout"</button>
|
||||
} else {
|
||||
|f|{nav_button(f, "login"); nav_button(f, "signup")}
|
||||
}
|
||||
</section>
|
||||
</nav>
|
||||
<section id="post-form"></section>
|
||||
<main>|f|{body(f)}</main>
|
||||
</body>
|
||||
<script src="https://unpkg.com/htmx.org@2.0.3/dist/htmx.min.js" integrity="sha384-0895/pl2MU10Hqc6jd4RvrthNlDiE9U1tWmX7WRESftEDRosgxNsQG/Ze9YMRzHq" crossorigin="anonymous"></script>
|
||||
<script type="module" src="/index.js"></script>
|
||||
</html>
|
||||
})
|
||||
}
|
||||
|
||||
struct Session {
|
||||
name: String,
|
||||
id: [u8; 32],
|
||||
}
|
||||
|
||||
#[axum::async_trait]
|
||||
impl<S> axum::extract::FromRequestParts<S> for Session {
|
||||
/// If the extractor fails it'll use this "rejection" type. A rejection is
|
||||
/// a kind of error that can be converted into a response.
|
||||
type Rejection = Redirect;
|
||||
|
||||
/// Perform the extraction.
|
||||
async fn from_request_parts(parts: &mut Parts, _: &S) -> Result<Self, Self::Rejection> {
|
||||
let err = redirect("/login");
|
||||
|
||||
let value = parts
|
||||
.headers
|
||||
.get_all(COOKIE)
|
||||
.into_iter()
|
||||
.find_map(|c| c.to_str().ok()?.trim().strip_prefix("id="))
|
||||
.map(|c| c.split_once(';').unwrap_or((c, "")).0)
|
||||
.ok_or(err)?;
|
||||
let mut id = [0u8; 32];
|
||||
parse_hex(value, &mut id).ok_or(err)?;
|
||||
|
||||
let (name, expiration) = db::with(|db| {
|
||||
db.get_session
|
||||
.query_row((id,), |r| Ok((r.get::<_, String>(0)?, r.get::<_, u64>(1)?)))
|
||||
.log("fetching session")
|
||||
.ok_or(err)
|
||||
})?;
|
||||
|
||||
if expiration < now() {
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
Ok(Self { name, id })
|
||||
}
|
||||
}
|
||||
|
||||
fn now() -> u64 {
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs()
|
||||
}
|
||||
|
||||
fn parse_hex(hex: &str, dst: &mut [u8]) -> Option<()> {
|
||||
fn hex_to_nibble(b: u8) -> Option<u8> {
|
||||
Some(match b {
|
||||
b'a'..=b'f' => b - b'a' + 10,
|
||||
b'A'..=b'F' => b - b'A' + 10,
|
||||
b'0'..=b'9' => b - b'0',
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
|
||||
if hex.len() != dst.len() * 2 {
|
||||
return None;
|
||||
}
|
||||
|
||||
for (d, p) in dst.iter_mut().zip(hex.as_bytes().chunks_exact(2)) {
|
||||
*d = (hex_to_nibble(p[0])? << 4) | hex_to_nibble(p[1])?;
|
||||
}
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn to_hex(src: &[u8]) -> String {
|
||||
use std::fmt::Write;
|
||||
let mut buf = String::new();
|
||||
for &b in src {
|
||||
write!(buf, "{b:02x}").unwrap()
|
||||
}
|
||||
buf
|
||||
}
|
||||
|
||||
fn main() {
|
||||
tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(amain());
|
||||
}
|
||||
|
||||
mod db {
|
||||
use std::cell::RefCell;
|
||||
|
||||
macro_rules! gen_queries {
|
||||
($vis:vis struct $name:ident {
|
||||
$($qname:ident: $code:expr,)*
|
||||
}) => {
|
||||
$vis struct $name<'a> {
|
||||
$($vis $qname: rusqlite::Statement<'a>,)*
|
||||
}
|
||||
|
||||
impl<'a> $name<'a> {
|
||||
fn new(db: &'a rusqlite::Connection) -> Self {
|
||||
Self {
|
||||
$($qname: db.prepare($code).unwrap(),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
gen_queries! {
|
||||
pub struct Queries {
|
||||
register: "INSERT INTO user (name, password_hash) VALUES(?, ?)",
|
||||
authenticate: "SELECT name, password_hash FROM user WHERE name = ?",
|
||||
login: "INSERT OR REPLACE INTO session (id, username, expiration) VALUES(?, ?, ?)",
|
||||
logout: "DELETE FROM session WHERE id = ?",
|
||||
get_session: "SELECT username, expiration FROM session WHERE id = ?",
|
||||
get_user_posts: "SELECT author, name, timestamp, code FROM post WHERE author = ?
|
||||
ORDER BY timestamp DESC",
|
||||
get_pots_before: "SELECT author, name, timestamp, code FROM post WHERE timestamp < ?",
|
||||
create_post: "INSERT INTO post (name, author, timestamp, code) VALUES(?, ?, ?, ?)",
|
||||
fetch_deps: "
|
||||
WITH RECURSIVE roots(name, author, code) AS (
|
||||
SELECT name, author, code FROM post WHERE name = ? AND author = ?
|
||||
UNION
|
||||
SELECT post.name, post.author, post.code FROM
|
||||
post JOIN import ON post.name = import.to_name
|
||||
AND post.author = import.to_author
|
||||
JOIN roots ON import.from_name = roots.name
|
||||
AND import.from_author = roots.author
|
||||
) SELECT * FROM roots;
|
||||
",
|
||||
create_import: "INSERT INTO import(to_author, to_name, from_author, from_name)
|
||||
VALUES(?, ?, ?, ?)",
|
||||
}
|
||||
}
|
||||
|
||||
struct Db {
|
||||
queries: Queries<'static>,
|
||||
_db: Box<rusqlite::Connection>,
|
||||
}
|
||||
|
||||
impl Db {
|
||||
fn new() -> Self {
|
||||
let db = Box::new(rusqlite::Connection::open("db.sqlite").unwrap());
|
||||
Self {
|
||||
queries: Queries::new(unsafe {
|
||||
std::mem::transmute::<&rusqlite::Connection, &rusqlite::Connection>(&db)
|
||||
}),
|
||||
_db: db,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with<T>(with: impl FnOnce(&mut Queries) -> T) -> T {
|
||||
thread_local! { static DB_CONN: RefCell<Db> = RefCell::new(Db::new()); }
|
||||
DB_CONN.with_borrow_mut(|q| with(&mut q.queries))
|
||||
}
|
||||
|
||||
pub fn init() {
|
||||
let db = rusqlite::Connection::open("db.sqlite").unwrap();
|
||||
db.execute_batch(include_str!("schema.sql")).unwrap();
|
||||
Queries::new(&db);
|
||||
}
|
||||
}
|
||||
|
||||
fn redirect(to: &'static str) -> Redirect {
|
||||
AppendHeaders([("hx-location", to)])
|
||||
}
|
||||
|
||||
trait PublicPage: Default {
|
||||
fn render_to_buf(self, buf: &mut String);
|
||||
|
||||
fn render(self) -> Html<String> {
|
||||
let mut str = String::new();
|
||||
self.render_to_buf(&mut str);
|
||||
Html(str)
|
||||
}
|
||||
|
||||
async fn get() -> Html<String> {
|
||||
Self::default().render()
|
||||
}
|
||||
|
||||
async fn page(session: Option<Session>) -> Html<String> {
|
||||
base(|s| Self::default().render_to_buf(s), session.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
trait Page: Default {
|
||||
fn render_to_buf(self, session: &Session, buf: &mut String);
|
||||
|
||||
fn render(self, session: &Session) -> Html<String> {
|
||||
let mut str = String::new();
|
||||
self.render_to_buf(session, &mut str);
|
||||
Html(str)
|
||||
}
|
||||
|
||||
async fn get(session: Session) -> Html<String> {
|
||||
Self::default().render(&session)
|
||||
}
|
||||
|
||||
async fn page(session: Option<Session>) -> Result<Html<String>, axum::response::Redirect> {
|
||||
match session {
|
||||
Some(session) => {
|
||||
Ok(base(|f| Self::default().render_to_buf(&session, f), Some(&session)))
|
||||
}
|
||||
None => Err(axum::response::Redirect::permanent("/login")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait ResultExt<O, E> {
|
||||
fn log(self, prefix: impl Display) -> Option<O>;
|
||||
}
|
||||
|
||||
impl<O, E: Display> ResultExt<O, E> for Result<O, E> {
|
||||
fn log(self, prefix: impl Display) -> Option<O> {
|
||||
match self {
|
||||
Ok(v) => Some(v),
|
||||
Err(e) => {
|
||||
log::error!("{prefix}: {e}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Logger;
|
||||
|
||||
impl log::Log for Logger {
|
||||
fn enabled(&self, _: &log::Metadata) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn log(&self, record: &log::Record) {
|
||||
if self.enabled(record.metadata()) {
|
||||
eprintln!("{} - {}", record.module_path().unwrap_or("=="), record.args());
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&self) {}
|
||||
}
|
21
depell/src/post-page.html
Normal file
21
depell/src/post-page.html
Normal file
|
@ -0,0 +1,21 @@
|
|||
<div id="dep-list">
|
||||
<input placeholder="search impoted deps.." oninput="filterCodeDeps(this, event)">
|
||||
<section id="deps">
|
||||
results show here...
|
||||
</section>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
|
||||
<h3>About posting code</h3>
|
||||
<p>
|
||||
If you are unfammiliar with <a href="https://git.ablecorp.us/AbleOS/holey-bytes">hblang</a>, refer to the
|
||||
<strong>hblang/README.md</strong> or
|
||||
vizit <a href="/profile/mlokis">mlokis'es posts</a>. Preferably don't edit the code here.
|
||||
</p>
|
||||
|
||||
<h3>Extra textarea features</h3>
|
||||
<ul>
|
||||
<li>proper tab behaviour</li>
|
||||
<li>snap to previous tab boundary on "empty" lines</li>
|
||||
</ul>
|
55
depell/src/schema.sql
Normal file
55
depell/src/schema.sql
Normal file
|
@ -0,0 +1,55 @@
|
|||
PRAGMA foreign_keys = ON;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS user(
|
||||
name TEXT NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
PRIMARY KEY (name)
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS session(
|
||||
id BLOB NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
expiration INTEGER NOT NULL,
|
||||
FOREIGN KEY (username) REFERENCES user (name)
|
||||
PRIMARY KEY (username)
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS
|
||||
session_id ON session (id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS post(
|
||||
name TEXT NOT NULL,
|
||||
author TEXT,
|
||||
timestamp INTEGER,
|
||||
code TEXT NOT NULL,
|
||||
FOREIGN KEY (author) REFERENCES user(name) ON DELETE SET NULL,
|
||||
PRIMARY KEY (author, name)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS
|
||||
post_timestamp ON post(timestamp DESC);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS import(
|
||||
from_name TEXT NOT NULL,
|
||||
from_author TEXT,
|
||||
to_name TEXT NOT NULL,
|
||||
to_author TEXT,
|
||||
FOREIGN KEY (from_name, from_author) REFERENCES post(name, author),
|
||||
FOREIGN KEY (to_name, to_author) REFERENCES post(name, author)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS
|
||||
dependencies ON import(from_name, from_author);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS
|
||||
dependants ON import(to_name, to_author);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS run(
|
||||
code_name TEXT NOT NULL,
|
||||
code_author TEXT NOT NULL,
|
||||
runner TEXT NOT NULL,
|
||||
FOREIGN KEY (code_name, code_author) REFERENCES post(name, author),
|
||||
FOREIGN KEY (runner) REFERENCES user(name),
|
||||
PRIMARY KEY (code_name, code_author, runner)
|
||||
);
|
||||
|
17
depell/src/welcome-page.html
Normal file
17
depell/src/welcome-page.html
Normal file
|
@ -0,0 +1,17 @@
|
|||
<h1>Welcome to depell</h1>
|
||||
<p>
|
||||
Depell (dependency hell) is a simple "social" media site best compared to twitter, except that all you can post is
|
||||
<a href="https://git.ablecorp.us/AbleOS/holey-bytes">hblang</a> code with no comments allowed. Instead of likes you
|
||||
run the program, and instead of retweets you import the program as dependency. Run counts even when ran indirectly.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
The backend only serves the code and frontend compiles and runs it locally. All posts are immutable.
|
||||
</p>
|
||||
|
||||
<h2>Security?</h2>
|
||||
<p>
|
||||
All code runs in WASM (inside a holey-bytes VM until hblang compiles to wasm) and is controlled by JavaScript. WASM
|
||||
cant do any form of IO without going trough JavaScript so as long as JS import does not allow wasm to execute
|
||||
arbitrary JS code, WASM can act as a container inside the JS.
|
||||
</p>
|
11
depell/wasm-fmt/Cargo.toml
Normal file
11
depell/wasm-fmt/Cargo.toml
Normal file
|
@ -0,0 +1,11 @@
|
|||
[package]
|
||||
name = "wasm-hbfmt"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
hblang = { workspace = true, features = ["no_log"] }
|
||||
wasm-rt = { version = "0.1.0", path = "../wasm-rt" }
|
34
depell/wasm-fmt/src/lib.rs
Normal file
34
depell/wasm-fmt/src/lib.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
#![no_std]
|
||||
#![feature(str_from_raw_parts)]
|
||||
#![feature(alloc_error_handler)]
|
||||
|
||||
use hblang::{fmt, parser};
|
||||
|
||||
wasm_rt::decl_runtime!(128 * 1024, 1024 * 4);
|
||||
|
||||
const MAX_OUTPUT_SIZE: usize = 1024 * 10;
|
||||
wasm_rt::decl_buffer!(MAX_OUTPUT_SIZE, MAX_OUTPUT, OUTPUT, OUTPUT_LEN);
|
||||
|
||||
const MAX_INPUT_SIZE: usize = 1024 * 4;
|
||||
wasm_rt::decl_buffer!(MAX_INPUT_SIZE, MAX_INPUT, INPUT, INPUT_LEN);
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn fmt() {
|
||||
ALLOCATOR.reset();
|
||||
|
||||
let code = core::str::from_raw_parts(core::ptr::addr_of!(INPUT).cast(), INPUT_LEN);
|
||||
|
||||
let arena = parser::Arena::with_capacity(code.len() * parser::SOURCE_TO_AST_FACTOR);
|
||||
let mut ctx = parser::Ctx::default();
|
||||
let exprs = parser::Parser::parse(&mut ctx, code, "source.hb", &mut parser::no_loader, &arena);
|
||||
|
||||
let mut f = wasm_rt::Write(&mut OUTPUT[..]);
|
||||
fmt::fmt_file(exprs, code, &mut f).unwrap();
|
||||
OUTPUT_LEN = MAX_OUTPUT_SIZE - f.0.len();
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn minify() {
|
||||
let code = core::str::from_raw_parts_mut(core::ptr::addr_of_mut!(OUTPUT).cast(), OUTPUT_LEN);
|
||||
OUTPUT_LEN = fmt::minify(code);
|
||||
}
|
14
depell/wasm-hbc/Cargo.toml
Normal file
14
depell/wasm-hbc/Cargo.toml
Normal file
|
@ -0,0 +1,14 @@
|
|||
[package]
|
||||
name = "wasm-hbc"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
hblang = { workspace = true, features = [] }
|
||||
hbvm.workspace = true
|
||||
log = { version = "0.4.22", features = ["release_max_level_error"] }
|
||||
wasm-rt = { version = "0.1.0", path = "../wasm-rt", features = ["log"] }
|
||||
|
119
depell/wasm-hbc/src/lib.rs
Normal file
119
depell/wasm-hbc/src/lib.rs
Normal file
|
@ -0,0 +1,119 @@
|
|||
#![feature(alloc_error_handler)]
|
||||
#![feature(slice_take)]
|
||||
#![no_std]
|
||||
|
||||
use {
|
||||
alloc::{string::String, vec::Vec},
|
||||
hblang::{
|
||||
son::{hbvm::HbvmBackend, Codegen, CodegenCtx},
|
||||
ty::Module,
|
||||
Ent,
|
||||
},
|
||||
};
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
const ARENA_CAP: usize = 128 * 16 * 1024;
|
||||
wasm_rt::decl_runtime!(ARENA_CAP, 1024 * 4);
|
||||
|
||||
const MAX_INPUT_SIZE: usize = 32 * 4 * 1024;
|
||||
wasm_rt::decl_buffer!(MAX_INPUT_SIZE, MAX_INPUT, INPUT, INPUT_LEN);
|
||||
|
||||
#[no_mangle]
|
||||
unsafe fn compile_and_run(mut fuel: usize) {
|
||||
ALLOCATOR.reset();
|
||||
|
||||
_ = log::set_logger(&wasm_rt::Logger);
|
||||
log::set_max_level(log::LevelFilter::Error);
|
||||
|
||||
struct File<'a> {
|
||||
path: &'a str,
|
||||
code: &'a mut str,
|
||||
}
|
||||
|
||||
let mut root = 0;
|
||||
|
||||
let files = {
|
||||
let mut input_bytes =
|
||||
core::slice::from_raw_parts_mut(core::ptr::addr_of_mut!(INPUT).cast::<u8>(), INPUT_LEN);
|
||||
|
||||
let mut files = Vec::with_capacity(32);
|
||||
while let Some((&mut path_len, rest)) = input_bytes.split_first_chunk_mut() {
|
||||
let (path, rest) = rest.split_at_mut(u16::from_le_bytes(path_len) as usize);
|
||||
let (&mut code_len, rest) = rest.split_first_chunk_mut().unwrap();
|
||||
let (code, rest) = rest.split_at_mut(u16::from_le_bytes(code_len) as usize);
|
||||
files.push(File {
|
||||
path: core::str::from_utf8_unchecked(path),
|
||||
code: core::str::from_utf8_unchecked_mut(code),
|
||||
});
|
||||
input_bytes = rest;
|
||||
}
|
||||
|
||||
let root_path = files[root].path;
|
||||
hblang::quad_sort(&mut files, |a, b| a.path.cmp(b.path));
|
||||
root = files.binary_search_by_key(&root_path, |p| p.path).unwrap();
|
||||
|
||||
files
|
||||
};
|
||||
|
||||
let mut ctx = CodegenCtx::default();
|
||||
|
||||
let files = {
|
||||
let paths = files.iter().map(|f| f.path).collect::<Vec<_>>();
|
||||
let mut loader = |path: &str, _: &str, kind| match kind {
|
||||
hblang::parser::FileKind::Module => Ok(paths.binary_search(&path).unwrap()),
|
||||
hblang::parser::FileKind::Embed => Err("embeds are not supported".into()),
|
||||
};
|
||||
files
|
||||
.into_iter()
|
||||
.map(|f| {
|
||||
hblang::parser::Ast::new(
|
||||
f.path,
|
||||
// since 'free' does nothing this is fine
|
||||
String::from_raw_parts(f.code.as_mut_ptr(), f.code.len(), f.code.len()),
|
||||
&mut ctx.parser,
|
||||
&mut loader,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
let mut ct = {
|
||||
let mut backend = HbvmBackend::default();
|
||||
Codegen::new(&mut backend, &files, &mut ctx).generate(Module::new(root));
|
||||
|
||||
if !ctx.parser.errors.borrow().is_empty() {
|
||||
log::error!("{}", ctx.parser.errors.borrow());
|
||||
return;
|
||||
}
|
||||
|
||||
let mut c = Codegen::new(&mut backend, &files, &mut ctx);
|
||||
c.assemble_comptime()
|
||||
};
|
||||
|
||||
while fuel != 0 {
|
||||
match ct.vm.run() {
|
||||
Ok(hbvm::VmRunOk::End) => {
|
||||
log::error!("exit code: {}", ct.vm.read_reg(1).0 as i64);
|
||||
break;
|
||||
}
|
||||
Ok(hbvm::VmRunOk::Ecall) => {
|
||||
let unknown = ct.vm.read_reg(2).0;
|
||||
log::error!("unknown ecall: {unknown}")
|
||||
}
|
||||
Ok(hbvm::VmRunOk::Timer) => {
|
||||
fuel -= 1;
|
||||
if fuel == 0 {
|
||||
log::error!("program timed out");
|
||||
}
|
||||
}
|
||||
Ok(hbvm::VmRunOk::Breakpoint) => todo!(),
|
||||
Err(e) => {
|
||||
log::error!("vm error: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//log::error!("memory consumption: {}b / {}b", ALLOCATOR.used(), ARENA_CAP);
|
||||
}
|
7
depell/wasm-rt/Cargo.toml
Normal file
7
depell/wasm-rt/Cargo.toml
Normal file
|
@ -0,0 +1,7 @@
|
|||
[package]
|
||||
name = "wasm-rt"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
log = { version = "0.4.22", optional = true }
|
162
depell/wasm-rt/src/lib.rs
Normal file
162
depell/wasm-rt/src/lib.rs
Normal file
|
@ -0,0 +1,162 @@
|
|||
#![feature(alloc_error_handler)]
|
||||
#![feature(pointer_is_aligned_to)]
|
||||
#![feature(slice_take)]
|
||||
#![no_std]
|
||||
|
||||
use core::{
|
||||
alloc::{GlobalAlloc, Layout},
|
||||
cell::UnsafeCell,
|
||||
};
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! decl_buffer {
|
||||
($cap:expr, $export_cap:ident, $export_base:ident, $export_len:ident) => {
|
||||
#[no_mangle]
|
||||
static $export_cap: usize = $cap;
|
||||
#[no_mangle]
|
||||
static mut $export_base: [u8; $cap] = [0; $cap];
|
||||
#[no_mangle]
|
||||
static mut $export_len: usize = 0;
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! decl_runtime {
|
||||
($memory_size:expr, $max_panic_size:expr) => {
|
||||
#[cfg(debug_assertions)]
|
||||
#[no_mangle]
|
||||
static mut PANIC_MESSAGE: [u8; $max_panic_size] = [0; $max_panic_size];
|
||||
#[cfg(debug_assertions)]
|
||||
#[no_mangle]
|
||||
static mut PANIC_MESSAGE_LEN: usize = 0;
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[panic_handler]
|
||||
pub fn handle_panic(_info: &core::panic::PanicInfo) -> ! {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
unsafe {
|
||||
use core::fmt::Write;
|
||||
let mut f = $crate::Write(&mut PANIC_MESSAGE[..]);
|
||||
_ = writeln!(f, "{}", _info);
|
||||
PANIC_MESSAGE_LEN = $max_panic_size - f.0.len();
|
||||
}
|
||||
}
|
||||
|
||||
core::arch::wasm32::unreachable();
|
||||
}
|
||||
|
||||
#[global_allocator]
|
||||
static ALLOCATOR: $crate::ArenaAllocator<{ $memory_size }> = $crate::ArenaAllocator::new();
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[alloc_error_handler]
|
||||
fn alloc_error(_: core::alloc::Layout) -> ! {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
unsafe {
|
||||
use core::fmt::Write;
|
||||
let mut f = $crate::Write(&mut PANIC_MESSAGE[..]);
|
||||
_ = writeln!(f, "out of memory");
|
||||
PANIC_MESSAGE_LEN = $max_panic_size - f.0.len();
|
||||
}
|
||||
}
|
||||
|
||||
core::arch::wasm32::unreachable()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "log")]
|
||||
pub struct Logger;
|
||||
|
||||
#[cfg(feature = "log")]
|
||||
impl log::Log for Logger {
|
||||
fn enabled(&self, _: &log::Metadata) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn log(&self, record: &log::Record) {
|
||||
if self.enabled(record.metadata()) {
|
||||
const MAX_LOG_MESSAGE: usize = 1024 * 8;
|
||||
#[no_mangle]
|
||||
static mut LOG_MESSAGES: [u8; MAX_LOG_MESSAGE] = [0; MAX_LOG_MESSAGE];
|
||||
#[no_mangle]
|
||||
static mut LOG_MESSAGES_LEN: usize = 0;
|
||||
|
||||
unsafe {
|
||||
use core::fmt::Write;
|
||||
let mut f = Write(&mut LOG_MESSAGES[LOG_MESSAGES_LEN..]);
|
||||
_ = writeln!(f, "{}", record.args());
|
||||
LOG_MESSAGES_LEN = MAX_LOG_MESSAGE - f.0.len();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&self) {}
|
||||
}
|
||||
|
||||
pub struct ArenaAllocator<const SIZE: usize> {
|
||||
arena: UnsafeCell<[u8; SIZE]>,
|
||||
head: UnsafeCell<*mut u8>,
|
||||
}
|
||||
|
||||
impl<const SIZE: usize> ArenaAllocator<SIZE> {
|
||||
#[expect(clippy::new_without_default)]
|
||||
pub const fn new() -> Self {
|
||||
ArenaAllocator {
|
||||
arena: UnsafeCell::new([0; SIZE]),
|
||||
head: UnsafeCell::new(core::ptr::null_mut()),
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(clippy::missing_safety_doc)]
|
||||
pub unsafe fn reset(&self) {
|
||||
(*self.head.get()) = self.arena.get().cast::<u8>().add(SIZE);
|
||||
}
|
||||
|
||||
pub fn used(&self) -> usize {
|
||||
unsafe { self.arena.get() as usize + SIZE - (*self.head.get()) as usize }
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl<const SIZE: usize> Sync for ArenaAllocator<SIZE> {}
|
||||
|
||||
unsafe impl<const SIZE: usize> GlobalAlloc for ArenaAllocator<SIZE> {
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
let size = layout.size();
|
||||
let align = layout.align();
|
||||
|
||||
let until = self.arena.get() as *mut u8;
|
||||
|
||||
let new_head = (*self.head.get()).sub(size);
|
||||
let aligned_head = (new_head as usize & !(align - 1)) as *mut u8;
|
||||
debug_assert!(aligned_head.is_aligned_to(align));
|
||||
|
||||
if until > aligned_head {
|
||||
return core::ptr::null_mut();
|
||||
}
|
||||
|
||||
*self.head.get() = aligned_head;
|
||||
aligned_head
|
||||
}
|
||||
|
||||
unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
|
||||
/* lol */
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Write<'a>(pub &'a mut [u8]);
|
||||
|
||||
impl core::fmt::Write for Write<'_> {
|
||||
fn write_str(&mut self, s: &str) -> core::fmt::Result {
|
||||
if let Some(m) = self.0.take_mut(..s.len()) {
|
||||
m.copy_from_slice(s.as_bytes());
|
||||
Ok(())
|
||||
} else {
|
||||
Err(core::fmt::Error)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
[package]
|
||||
name = "hbasm"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
paste = "1.0"
|
||||
rhai = "1.16"
|
||||
with_builtin_macros = "0.0.3"
|
|
@ -1,13 +0,0 @@
|
|||
import "hbasm/examples/ableos/std" as std;
|
||||
|
||||
fn main(){
|
||||
std::Error(":+)");
|
||||
std::Warn("Your mom fell in a well!");
|
||||
std::Info("Hello, world!");
|
||||
std::Debug("ABC");
|
||||
std::Trace("Trace Deez");
|
||||
|
||||
tx();
|
||||
}
|
||||
|
||||
main();
|
|
@ -1,24 +0,0 @@
|
|||
fn ipc_send(buffer_id, mem_addr, length){
|
||||
// set the ecall
|
||||
li8(r1, 3);
|
||||
// Set the buffer ID to be the BufferID
|
||||
li64(r2, buffer_id);
|
||||
lra(r3, r0, mem_addr);
|
||||
// set the length
|
||||
li64(r4, length);
|
||||
// ecall
|
||||
eca();
|
||||
}
|
||||
|
||||
private fn log(log_level, string){
|
||||
let str = data::str(string);
|
||||
ipc_send(1, str, str.len);
|
||||
}
|
||||
|
||||
fn Error(string) {log(0, string);}
|
||||
fn Warn(string) {log(1, string);}
|
||||
fn Info(string) {log(2, string);}
|
||||
// Due to rhai limitations this cannot be debug
|
||||
// because of this all of the log levels are upper case
|
||||
fn Debug(string) {log(3, string);}
|
||||
fn Trace(string) {log(4, string);}
|
|
@ -1,9 +0,0 @@
|
|||
let hello = data::str("Hello, world!");
|
||||
|
||||
li8 (r1, 1); // Write syscall
|
||||
li8 (r2, 1); // Stdout FD
|
||||
lra16 (r3, r0, hello); // String buffer
|
||||
li8 (r4, hello.len); // String length
|
||||
eca (); // System call
|
||||
|
||||
tx (); // End program
|
|
@ -1,33 +0,0 @@
|
|||
li8(r1, 69);
|
||||
li8(r2, 0);
|
||||
|
||||
if_eq(r1, r2,
|
||||
|| puts("Equals!"),
|
||||
|| puts("Not equals!"),
|
||||
);
|
||||
|
||||
|
||||
tx(); // END OF MAIN
|
||||
|
||||
/// Inline function – write text to stdout
|
||||
fn puts(string) {
|
||||
let d = data::str(string);
|
||||
li8 (r1, 1); // Write syscall
|
||||
li8 (r2, 1); // Stdout handle
|
||||
lra16 (r3, r0, d);
|
||||
li64 (r4, d.len);
|
||||
eca ();
|
||||
}
|
||||
|
||||
fn if_eq(a, b, thenblk, elseblk) {
|
||||
let elselbl = declabel();
|
||||
let endlbl = declabel();
|
||||
|
||||
jne(a, b, elselbl);
|
||||
thenblk.call();
|
||||
jmp16(endlbl);
|
||||
|
||||
elselbl.here();
|
||||
elseblk.call();
|
||||
endlbl.here();
|
||||
}
|
|
@ -1,101 +0,0 @@
|
|||
//! Data section inserts
|
||||
|
||||
use {
|
||||
crate::{object::SymbolRef, SharedObject},
|
||||
rhai::{CustomType, Engine, FuncRegistration, ImmutableString, Module},
|
||||
};
|
||||
|
||||
/// Generate insertions for data types
|
||||
///
|
||||
/// `gen_data_instructions!($module, $obj, [$type, …]);`
|
||||
/// - `$module`: Rhai module
|
||||
/// - `$obj`: Code object
|
||||
/// - `$type`: Type of single array item
|
||||
macro_rules! gen_data_insertions {
|
||||
($module:expr, $obj:expr, [$($ty:ident),* $(,)?] $(,)?) => {{
|
||||
let (module, obj) = ($module, $obj);
|
||||
$({
|
||||
// Clone object to each function
|
||||
let obj = ::std::rc::Rc::clone(obj);
|
||||
|
||||
FuncRegistration::new(stringify!($ty))
|
||||
.with_namespace(rhai::FnNamespace::Global)
|
||||
.set_into_module::<_, 1, false, _, true, _>(module, move |arr: ::rhai::Array| {
|
||||
let obj = &mut *obj.borrow_mut();
|
||||
let symbol = obj.symbol($crate::object::Section::Data);
|
||||
|
||||
// Reserve space for object so we don't resize it
|
||||
// all the time
|
||||
obj.sections
|
||||
.data
|
||||
.reserve(arr.len() * ::std::mem::size_of::<$ty>());
|
||||
|
||||
// For every item…
|
||||
for item in arr {
|
||||
// … try do conversions from i32 to desired type
|
||||
// and insert it.
|
||||
obj.sections.data.extend(
|
||||
match item.as_int() {
|
||||
Ok(num) => $ty::try_from(num).map_err(|_| "i64".to_owned()),
|
||||
Err(ty) => Err(ty.to_owned()),
|
||||
}
|
||||
.map_err(|err| {
|
||||
|
||||
::rhai::EvalAltResult::ErrorMismatchDataType(
|
||||
stringify!($ty).to_owned(),
|
||||
err,
|
||||
::rhai::Position::NONE,
|
||||
)
|
||||
})?
|
||||
.to_le_bytes(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(DataRef {
|
||||
symbol,
|
||||
len: obj.sections.data.len() - symbol.0,
|
||||
})
|
||||
});
|
||||
})*
|
||||
}};
|
||||
}
|
||||
|
||||
/// Reference to entry in data section
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DataRef {
|
||||
pub symbol: SymbolRef,
|
||||
pub len: usize,
|
||||
}
|
||||
|
||||
impl CustomType for DataRef {
|
||||
fn build(mut builder: rhai::TypeBuilder<Self>) {
|
||||
builder
|
||||
.with_name("DataRef")
|
||||
.with_get("symbol", |this: &mut Self| this.symbol)
|
||||
.with_get("len", |this: &mut Self| this.len as u64 as i64);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn module(engine: &mut Engine, obj: SharedObject) -> Module {
|
||||
let mut module = Module::new();
|
||||
|
||||
gen_data_insertions!(&mut module, &obj, [i8, i16, i32, i64]);
|
||||
|
||||
// Specialisation for strings, they should be
|
||||
// inserted as plain UTF-8 arrays
|
||||
FuncRegistration::new("str")
|
||||
.with_namespace(rhai::FnNamespace::Global)
|
||||
.set_into_module::<_, 1, false, _, true, _>(&mut module, move |s: ImmutableString| {
|
||||
let obj = &mut *obj.borrow_mut();
|
||||
let symbol = obj.symbol(crate::object::Section::Data);
|
||||
|
||||
obj.sections.data.extend(s.as_bytes());
|
||||
Ok(DataRef {
|
||||
symbol,
|
||||
len: s.len(),
|
||||
})
|
||||
});
|
||||
|
||||
engine.build_type::<DataRef>();
|
||||
module
|
||||
}
|
321
hbasm/src/ins.rs
321
hbasm/src/ins.rs
|
@ -1,321 +0,0 @@
|
|||
//! Functions for inserting instructions
|
||||
//!
|
||||
//! Most of the code you see is just metaprogramming stuff.
|
||||
//! This ensures that adding new instructions won't need any
|
||||
//! specific changes and consistent behaviour.
|
||||
//!
|
||||
//! > I tried to comment stuff here, but I meanwhile forgor how it works.
|
||||
//!
|
||||
//! — Erin
|
||||
|
||||
use {
|
||||
crate::object::Object,
|
||||
rhai::{FuncRegistration, Module},
|
||||
std::{cell::RefCell, rc::Rc},
|
||||
};
|
||||
|
||||
/// Operand types and their insertions
|
||||
pub mod optypes {
|
||||
use {
|
||||
crate::{
|
||||
label::UnboundLabel,
|
||||
object::{Object, RelocKey, RelocType, SymbolRef},
|
||||
},
|
||||
rhai::{Dynamic, EvalAltResult, ImmutableString, Position},
|
||||
};
|
||||
|
||||
// These types represent operand types to be inserted
|
||||
pub type R = u8;
|
||||
pub type B = i8;
|
||||
pub type H = i16;
|
||||
pub type W = i32;
|
||||
pub type D = i64;
|
||||
|
||||
pub type A = Dynamic;
|
||||
pub type O = Dynamic;
|
||||
pub type P = Dynamic;
|
||||
|
||||
/// Insert relocation into code
|
||||
///
|
||||
/// - If integer, just write it to the code
|
||||
/// - Otherwise insert entry into relocation table
|
||||
/// and fill zeroes
|
||||
pub fn insert_reloc(
|
||||
obj: &mut Object,
|
||||
ty: RelocType,
|
||||
val: &Dynamic,
|
||||
) -> Result<(), EvalAltResult> {
|
||||
match () {
|
||||
// Direct references – insert directly to table
|
||||
_ if val.is::<SymbolRef>() => {
|
||||
obj.relocation(RelocKey::Symbol(val.clone_cast::<SymbolRef>().0), ty)
|
||||
}
|
||||
_ if val.is::<UnboundLabel>() => {
|
||||
obj.relocation(RelocKey::Symbol(val.clone_cast::<UnboundLabel>().0), ty)
|
||||
}
|
||||
_ if val.is::<DataRef>() => {
|
||||
obj.relocation(RelocKey::Symbol(val.clone_cast::<DataRef>().symbol.0), ty)
|
||||
}
|
||||
|
||||
// String (indirect) reference
|
||||
_ if val.is_string() => {
|
||||
obj.relocation(RelocKey::Label(val.clone_cast::<ImmutableString>()), ty)
|
||||
}
|
||||
|
||||
// Manual offset
|
||||
_ if val.is_int() => {
|
||||
let int = val.clone_cast::<i64>();
|
||||
match ty {
|
||||
RelocType::Rel32 => obj.sections.text.extend((int as i32).to_le_bytes()),
|
||||
RelocType::Rel16 => obj.sections.text.extend((int as i16).to_le_bytes()),
|
||||
RelocType::Abs64 => obj.sections.text.extend(int.to_le_bytes()),
|
||||
}
|
||||
}
|
||||
|
||||
_ => {
|
||||
return Err(EvalAltResult::ErrorMismatchDataType(
|
||||
"SymbolRef, UnboundLabel, String or Int".to_owned(),
|
||||
val.type_name().to_owned(),
|
||||
Position::NONE,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Generate macro for inserting item into the output object
|
||||
///
|
||||
/// Pre-defines inserts for absolute address and relative offsets.
|
||||
/// These are inserted with function [`insert_reloc`]
|
||||
/// # le_bytes
|
||||
/// `gen_insert!(le_bytes: [B, …]);`
|
||||
///
|
||||
/// Takes sequence of operand types which should be inserted
|
||||
/// by invoking `to_le_bytes` method on it.
|
||||
macro_rules! gen_insert {
|
||||
(le_bytes: [$($lety:ident),* $(,)?]) => {
|
||||
/// `insert!($thing, $obj, $type)` where
|
||||
/// - `$thing`: Value you want to insert
|
||||
/// - `$obj`: Code object
|
||||
/// - `$type`: Type of inserted value
|
||||
///
|
||||
/// Eg. `insert!(69_u8, obj, B);`
|
||||
macro_rules! insert {
|
||||
$(($thing:expr, $obj: expr, $lety) => {
|
||||
$obj.sections.text.extend($thing.to_le_bytes());
|
||||
};)*
|
||||
|
||||
($thing:expr, $obj:expr, A) => {
|
||||
$crate::ins::optypes::insert_reloc(
|
||||
$obj,
|
||||
$crate::object::RelocType::Abs64,
|
||||
$thing
|
||||
)?
|
||||
};
|
||||
($thing:expr, $obj:expr, O) => {
|
||||
$crate::ins::optypes::insert_reloc(
|
||||
$obj,
|
||||
$crate::object::RelocType::Rel32,
|
||||
$thing
|
||||
)?
|
||||
};
|
||||
($thing:expr, $obj:expr, P) => {
|
||||
$crate::ins::optypes::insert_reloc(
|
||||
$obj,
|
||||
$crate::object::RelocType::Rel16,
|
||||
$thing
|
||||
)?
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
gen_insert!(le_bytes: [R, B, H, W, D]);
|
||||
|
||||
#[allow(clippy::single_component_path_imports)]
|
||||
pub(super) use insert;
|
||||
|
||||
use crate::data::DataRef;
|
||||
}
|
||||
|
||||
/// Rhai Types (types for function parameters as Rhai uses only 64bit signed integers)
|
||||
pub mod rity {
|
||||
pub use super::optypes::{A, O, P, R};
|
||||
pub type B = i64;
|
||||
pub type H = i64;
|
||||
pub type W = i64;
|
||||
pub type D = i64;
|
||||
}
|
||||
|
||||
/// Generic instruction (instruction of certain operands type) inserts
|
||||
pub mod generic {
|
||||
use {crate::object::Object, rhai::EvalAltResult};
|
||||
|
||||
pub(super) fn convert_op<A, B>(from: A) -> Result<B, EvalAltResult>
|
||||
where
|
||||
B: TryFrom<A>,
|
||||
<B as TryFrom<A>>::Error: std::error::Error + Sync + Send + 'static,
|
||||
{
|
||||
B::try_from(from).map_err(|e| {
|
||||
EvalAltResult::ErrorSystem("Data conversion error".to_owned(), Box::new(e))
|
||||
})
|
||||
}
|
||||
|
||||
/// Generate opcode-generic instruction insert macro
|
||||
macro_rules! gen_ins {
|
||||
($($($name:ident : $ty:ty),*;)*) => {
|
||||
paste::paste! {
|
||||
$(
|
||||
/// Instruction-generic opcode insertion function
|
||||
/// - `obj`: Code object
|
||||
/// - `opcode`: opcode, not checked if valid for instruction type
|
||||
/// - … for operands
|
||||
#[inline]
|
||||
pub fn [<$($ty:lower)*>](
|
||||
obj: &mut Object,
|
||||
opcode: u8,
|
||||
$($name: $crate::ins::optypes::$ty),*,
|
||||
) -> Result<(), EvalAltResult> {
|
||||
// Push opcode
|
||||
obj.sections.text.push(opcode);
|
||||
|
||||
// Insert based on type
|
||||
$($crate::ins::optypes::insert!(&$name, obj, $ty);)*
|
||||
Ok(())
|
||||
}
|
||||
)*
|
||||
|
||||
/// Generate Rhai opcode-specific instruction insertion functions
|
||||
///
|
||||
/// `gen_ins_fn!($obj, $opcode, $optype);` where:
|
||||
/// - `$obj`: Code object
|
||||
/// - `$opcode`: Opcode value
|
||||
macro_rules! gen_ins_fn {
|
||||
$(
|
||||
($obj:expr, $opcode:expr, [<$($ty)*>]) => {
|
||||
// Opcode-specific insertion function
|
||||
// - Parameters = operands
|
||||
move |$($name: $crate::ins::rity::$ty),*| {
|
||||
// Invoke generic function
|
||||
$crate::ins::generic::[<$($ty:lower)*>](
|
||||
&mut *$obj.borrow_mut(),
|
||||
$opcode,
|
||||
$(
|
||||
// Convert to desired type (from Rhai-provided values)
|
||||
$crate::ins::generic::convert_op::<
|
||||
_,
|
||||
$crate::ins::optypes::$ty
|
||||
>($name)?
|
||||
),*
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
|
||||
// Internal-use: count args
|
||||
(@arg_count [<$($ty)*>]) => {
|
||||
{ ["", $(stringify!($ty)),*].len() - 1 }
|
||||
};
|
||||
)*
|
||||
|
||||
// Specialisation for no-operand instructions
|
||||
($obj:expr, $opcode:expr, N) => {
|
||||
move || {
|
||||
$crate::ins::generic::n(&mut *$obj.borrow_mut(), $opcode);
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
|
||||
// Internal-use specialisation: no-operand instructions
|
||||
(@arg_count N) => {
|
||||
{ 0 }
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Specialisation for no-operand instructions – simply just push opcode
|
||||
#[inline]
|
||||
pub fn n(obj: &mut Object, opcode: u8) {
|
||||
obj.sections.text.push(opcode);
|
||||
}
|
||||
|
||||
// Generate opcode-generic instruction inserters
|
||||
// (operand identifiers are arbitrary)
|
||||
//
|
||||
// New instruction types have to be added manually here
|
||||
gen_ins! {
|
||||
o0: R, o1: R;
|
||||
o0: R, o1: R, o2: R;
|
||||
o0: R, o1: R, o2: R, o3: R;
|
||||
o0: R, o1: R, o2: B;
|
||||
o0: R, o1: R, o2: H;
|
||||
o0: R, o1: R, o2: W;
|
||||
o0: R, o1: R, o2: D;
|
||||
o0: R, o1: B;
|
||||
o0: R, o1: H;
|
||||
o0: R, o1: W;
|
||||
o0: R, o1: D;
|
||||
o0: R, o1: R, o2: A;
|
||||
o0: R, o1: R, o2: A, o3: H;
|
||||
o0: R, o1: R, o2: O, o3: H;
|
||||
o0: R, o1: R, o2: P, o3: H;
|
||||
o0: R, o1: R, o2: O;
|
||||
o0: R, o1: R, o2: P;
|
||||
o0: O;
|
||||
o0: P;
|
||||
}
|
||||
|
||||
#[allow(clippy::single_component_path_imports)]
|
||||
pub(super) use gen_ins_fn;
|
||||
}
|
||||
|
||||
/// Generate instructions from instruction table
|
||||
///
|
||||
/// ```ignore
|
||||
/// instructions!(($module, $obj) {
|
||||
/// // Data from instruction table
|
||||
/// $opcode, $mnemonic, $opty, $doc;
|
||||
/// …
|
||||
/// });
|
||||
/// ```
|
||||
/// - `$module`: Rhai module
|
||||
/// - `$obj`: Code object
|
||||
macro_rules! instructions {
|
||||
(
|
||||
($module:expr, $obj:expr $(,)?)
|
||||
{ $($opcode:expr, $mnemonic:ident, $ops:tt, $doc:literal;)* }
|
||||
) => {{
|
||||
paste::paste! {
|
||||
let (module, obj) = ($module, $obj);
|
||||
$({
|
||||
// Object is shared across all functions
|
||||
let obj = Rc::clone(&obj);
|
||||
|
||||
// Register newly generated function for each instruction
|
||||
FuncRegistration::new(stringify!([<$mnemonic:lower>]))
|
||||
.with_namespace(rhai::FnNamespace::Global)
|
||||
.set_into_module::<_, { generic::gen_ins_fn!(@arg_count $ops) }, false, _, true, _>(
|
||||
module,
|
||||
generic::gen_ins_fn!(
|
||||
obj,
|
||||
$opcode,
|
||||
$ops
|
||||
)
|
||||
);
|
||||
})*
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
/// Setup instruction insertors
|
||||
pub fn setup(module: &mut Module, obj: Rc<RefCell<Object>>) {
|
||||
// Import instructions table and use it for generation
|
||||
with_builtin_macros::with_builtin! {
|
||||
let $spec = include_from_root!("../hbbytecode/instructions.in") in {
|
||||
instructions!((module, obj) { $spec });
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,112 +0,0 @@
|
|||
//! Stuff related to labels
|
||||
|
||||
use {
|
||||
crate::SharedObject,
|
||||
rhai::{Engine, FuncRegistration, ImmutableString, Module},
|
||||
};
|
||||
|
||||
/// Macro for creating functions for Rhai which
|
||||
/// is bit more friendly
|
||||
///
|
||||
/// ```ignore
|
||||
/// shdm_fns!{
|
||||
/// module: $module;
|
||||
/// shared: $shared => $shname;
|
||||
///
|
||||
/// $vis fn $name($param_name: $param_ty, …) -> $ret { … }
|
||||
/// …
|
||||
/// }
|
||||
/// ```
|
||||
/// - `$module`: Rhai module
|
||||
/// - `$shared`: Data to be shared across the functions
|
||||
/// - `$shname`: The binding name inside functions
|
||||
/// - `$vis`: Function visibility for Rhai
|
||||
/// - Lowercased [`rhai::FnNamespace`] variants
|
||||
/// - `$name`: Function name
|
||||
/// - `$param_name`: Parameter name
|
||||
/// - `$param_ty`: Rust parameter type
|
||||
/// - `$ret`: Optional return type (otherwise infer)
|
||||
macro_rules! shdm_fns {
|
||||
(
|
||||
module: $module:expr;
|
||||
shared: $shared:expr => $shname:ident;
|
||||
|
||||
$(
|
||||
$vis:ident fn $name:ident($($param_name:ident: $param_ty:ty),*) $(-> $ret:ty)? $blk:block
|
||||
)*
|
||||
) => {{
|
||||
let module = $module;
|
||||
let shared = $shared;
|
||||
paste::paste! {
|
||||
$({
|
||||
|
||||
let $shname = SharedObject::clone(&shared);
|
||||
|
||||
FuncRegistration::new(stringify!($name))
|
||||
.with_namespace(rhai::FnNamespace::[<$vis:camel>])
|
||||
.set_into_module::<_, { ["", $(stringify!($param_name)),*].len() - 1 }, false, _, true, _>(
|
||||
module,
|
||||
move |$($param_name: $param_ty),*| $(-> $ret)? {
|
||||
let mut $shname = $shname.borrow_mut();
|
||||
$blk
|
||||
}
|
||||
);
|
||||
})*
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
/// Label without any place bound
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct UnboundLabel(pub usize);
|
||||
|
||||
pub fn setup(engine: &mut Engine, module: &mut Module, object: SharedObject) {
|
||||
shdm_fns! {
|
||||
module: module;
|
||||
shared: object => obj;
|
||||
|
||||
// Insert unnamed label
|
||||
global fn label() {
|
||||
let symbol = obj.symbol(crate::object::Section::Text);
|
||||
Ok(symbol)
|
||||
}
|
||||
|
||||
// Insert string-labeled label
|
||||
global fn label(label: ImmutableString) {
|
||||
let symbol = obj.symbol(crate::object::Section::Text);
|
||||
obj.labels.insert(label, symbol.0);
|
||||
|
||||
Ok(symbol)
|
||||
}
|
||||
|
||||
// Declare unbound label (to be bound later)
|
||||
global fn declabel() {
|
||||
let index = obj.symbols.len();
|
||||
obj.symbols.push(None);
|
||||
|
||||
Ok(UnboundLabel(index))
|
||||
}
|
||||
|
||||
// Declare unbound label (to be bound later)
|
||||
// with string label
|
||||
global fn declabel(label: ImmutableString) {
|
||||
let index = obj.symbols.len();
|
||||
obj.symbols.push(None);
|
||||
obj.labels.insert(label, index);
|
||||
|
||||
Ok(UnboundLabel(index))
|
||||
}
|
||||
|
||||
// Set location for unbound label
|
||||
global fn here(label: UnboundLabel) {
|
||||
obj.symbols[label.0] = Some(crate::object::SymbolEntry {
|
||||
location: crate::object::Section::Text,
|
||||
offset: obj.sections.text.len(),
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
engine.register_type_with_name::<UnboundLabel>("UnboundLabel");
|
||||
}
|
|
@ -1,45 +0,0 @@
|
|||
pub mod data;
|
||||
pub mod ins;
|
||||
pub mod label;
|
||||
pub mod linker;
|
||||
pub mod object;
|
||||
|
||||
use {
|
||||
object::Object,
|
||||
rhai::{Engine, Module},
|
||||
std::{cell::RefCell, rc::Rc},
|
||||
};
|
||||
|
||||
type SharedObject = Rc<RefCell<Object>>;
|
||||
|
||||
pub fn assembler(
|
||||
linkout: &mut impl std::io::Write,
|
||||
loader: impl FnOnce(&mut Engine) -> Result<(), Box<rhai::EvalAltResult>>,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let mut engine = Engine::new();
|
||||
let mut module = Module::new();
|
||||
let obj = Rc::new(RefCell::new(Object::default()));
|
||||
ins::setup(&mut module, Rc::clone(&obj));
|
||||
label::setup(&mut engine, &mut module, Rc::clone(&obj));
|
||||
|
||||
// Registers
|
||||
for n in 0_u8..=255 {
|
||||
module.set_var(format!("r{n}"), n);
|
||||
}
|
||||
|
||||
module.set_native_fn("reg", |n: i64| {
|
||||
Ok(u8::try_from(n).map_err(|_| {
|
||||
rhai::EvalAltResult::ErrorRuntime("Invalid register value".into(), rhai::Position::NONE)
|
||||
})?)
|
||||
});
|
||||
|
||||
module.set_native_fn("as_i64", |n: u8| Ok(n as i64));
|
||||
|
||||
let datamod = Rc::new(data::module(&mut engine, SharedObject::clone(&obj)));
|
||||
engine.register_global_module(Rc::new(module));
|
||||
engine.register_static_module("data", datamod);
|
||||
engine.register_type_with_name::<object::SymbolRef>("SymbolRef");
|
||||
loader(&mut engine)?;
|
||||
linker::link(obj, linkout)?;
|
||||
Ok(())
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
//! Simple flat-bytecode linker
|
||||
|
||||
use {
|
||||
crate::{
|
||||
object::{RelocKey, RelocType, Section},
|
||||
SharedObject,
|
||||
},
|
||||
std::io::Write,
|
||||
};
|
||||
|
||||
pub fn link(object: SharedObject, out: &mut impl Write) -> std::io::Result<()> {
|
||||
let obj = &mut *object.borrow_mut();
|
||||
|
||||
// Walk relocation table entries
|
||||
for (&loc, entry) in &obj.relocs {
|
||||
let value = match &entry.key {
|
||||
// Symbol – direct reference
|
||||
RelocKey::Symbol(sym) => obj.symbols[*sym],
|
||||
|
||||
// Label – indirect label reference
|
||||
RelocKey::Label(label) => obj.symbols[obj.labels[label]],
|
||||
}
|
||||
.ok_or_else(|| std::io::Error::other("Invalid symbol"))?;
|
||||
|
||||
let offset = match value.location {
|
||||
// Text section is on the beginning
|
||||
Section::Text => value.offset,
|
||||
|
||||
// Data section follows text section immediately
|
||||
Section::Data => value.offset + obj.sections.text.len(),
|
||||
};
|
||||
|
||||
// Insert address or calulate relative offset
|
||||
match entry.ty {
|
||||
RelocType::Rel32 => obj.sections.text[loc..loc + 4]
|
||||
.copy_from_slice(&((offset as isize - loc as isize) as i32).to_le_bytes()),
|
||||
RelocType::Rel16 => obj.sections.text[loc..loc + 2]
|
||||
.copy_from_slice(&((offset as isize - loc as isize) as i16).to_le_bytes()),
|
||||
RelocType::Abs64 => obj.sections.text[loc..loc + 8]
|
||||
.copy_from_slice(&(offset as isize - loc as isize).to_le_bytes()),
|
||||
}
|
||||
}
|
||||
|
||||
// Write to output
|
||||
out.write_all(&obj.sections.text)?;
|
||||
out.write_all(&obj.sections.data)
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
use std::{io::stdout, path::PathBuf};
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let path = PathBuf::from(std::env::args().nth(1).ok_or("Missing path")?);
|
||||
hbasm::assembler(&mut stdout(), |engine| engine.run_file(path))?;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,94 +0,0 @@
|
|||
//! Code object
|
||||
|
||||
use {rhai::ImmutableString, std::collections::HashMap};
|
||||
|
||||
/// Section tabel
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum Section {
|
||||
Text,
|
||||
Data,
|
||||
}
|
||||
|
||||
/// Symbol entry (in what section, where)
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct SymbolEntry {
|
||||
pub location: Section,
|
||||
pub offset: usize,
|
||||
}
|
||||
|
||||
/// Relocation table key
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum RelocKey {
|
||||
/// Direct reference
|
||||
Symbol(usize),
|
||||
/// Indirect reference
|
||||
Label(ImmutableString),
|
||||
}
|
||||
|
||||
/// Relocation type
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum RelocType {
|
||||
Rel32,
|
||||
Rel16,
|
||||
Abs64,
|
||||
}
|
||||
|
||||
/// Relocation table entry
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RelocEntry {
|
||||
pub key: RelocKey,
|
||||
pub ty: RelocType,
|
||||
}
|
||||
|
||||
/// Object code
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct Sections {
|
||||
pub text: Vec<u8>,
|
||||
pub data: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Object
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct Object {
|
||||
/// Vectors with sections
|
||||
pub sections: Sections,
|
||||
/// Symbol table
|
||||
pub symbols: Vec<Option<SymbolEntry>>,
|
||||
/// Labels to symbols table
|
||||
pub labels: HashMap<ImmutableString, usize>,
|
||||
/// Relocation table
|
||||
pub relocs: HashMap<usize, RelocEntry>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[repr(transparent)]
|
||||
pub struct SymbolRef(pub usize);
|
||||
|
||||
impl Object {
|
||||
/// Insert symbol at current location in specified section
|
||||
pub fn symbol(&mut self, section: Section) -> SymbolRef {
|
||||
let section_buf = match section {
|
||||
Section::Text => &mut self.sections.text,
|
||||
Section::Data => &mut self.sections.data,
|
||||
};
|
||||
|
||||
self.symbols.push(Some(SymbolEntry {
|
||||
location: section,
|
||||
offset: section_buf.len(),
|
||||
}));
|
||||
|
||||
SymbolRef(self.symbols.len() - 1)
|
||||
}
|
||||
|
||||
/// Insert to relocation table and write zeroes to code
|
||||
pub fn relocation(&mut self, key: RelocKey, ty: RelocType) {
|
||||
self.relocs
|
||||
.insert(self.sections.text.len(), RelocEntry { key, ty });
|
||||
|
||||
self.sections.text.extend(match ty {
|
||||
RelocType::Rel32 => &[0_u8; 4] as &[u8],
|
||||
RelocType::Rel16 => &[0; 2],
|
||||
RelocType::Abs64 => &[0; 8],
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,185 +0,0 @@
|
|||
#![no_std]
|
||||
|
||||
use core::convert::TryFrom;
|
||||
|
||||
type OpR = u8;
|
||||
|
||||
type OpA = u64;
|
||||
type OpO = i32;
|
||||
type OpP = i16;
|
||||
|
||||
type OpB = u8;
|
||||
type OpH = u16;
|
||||
type OpW = u32;
|
||||
type OpD = u64;
|
||||
|
||||
/// # Safety
|
||||
/// Has to be valid to be decoded from bytecode.
|
||||
pub unsafe trait BytecodeItem {}
|
||||
macro_rules! define_items {
|
||||
($($name:ident ($($nm:ident: $item:ident),* $(,)?)),* $(,)?) => {
|
||||
$(
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[repr(packed)]
|
||||
pub struct $name($(pub $item),*);
|
||||
unsafe impl BytecodeItem for $name {}
|
||||
|
||||
impl Encodable for $name {
|
||||
fn encode(self, _buffer: &mut impl Buffer) {
|
||||
let Self($($nm),*) = self;
|
||||
$(
|
||||
for byte in $nm.to_le_bytes() {
|
||||
unsafe { _buffer.write(byte) };
|
||||
}
|
||||
)*
|
||||
}
|
||||
|
||||
fn encode_len(self) -> usize {
|
||||
core::mem::size_of::<Self>()
|
||||
}
|
||||
}
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
define_items! {
|
||||
OpsRR (a: OpR, b: OpR ),
|
||||
OpsRRR (a: OpR, b: OpR, c: OpR ),
|
||||
OpsRRRR (a: OpR, b: OpR, c: OpR, d: OpR),
|
||||
OpsRRB (a: OpR, b: OpR, c: OpB ),
|
||||
OpsRRH (a: OpR, b: OpR, c: OpH ),
|
||||
OpsRRW (a: OpR, b: OpR, c: OpW ),
|
||||
OpsRRD (a: OpR, b: OpR, c: OpD ),
|
||||
OpsRB (a: OpR, b: OpB ),
|
||||
OpsRH (a: OpR, b: OpH ),
|
||||
OpsRW (a: OpR, b: OpW ),
|
||||
OpsRD (a: OpR, b: OpD ),
|
||||
OpsRRA (a: OpR, b: OpR, c: OpA ),
|
||||
OpsRRAH (a: OpR, b: OpR, c: OpA, d: OpH),
|
||||
OpsRROH (a: OpR, b: OpR, c: OpO, d: OpH),
|
||||
OpsRRPH (a: OpR, b: OpR, c: OpP, d: OpH),
|
||||
OpsRRO (a: OpR, b: OpR, c: OpO ),
|
||||
OpsRRP (a: OpR, b: OpR, c: OpP ),
|
||||
OpsO (a: OpO, ),
|
||||
OpsP (a: OpP, ),
|
||||
OpsN ( ),
|
||||
}
|
||||
|
||||
unsafe impl BytecodeItem for u8 {}
|
||||
|
||||
::with_builtin_macros::with_builtin! {
|
||||
let $spec = include_from_root!("instructions.in") in {
|
||||
/// Invoke macro with bytecode definition
|
||||
///
|
||||
/// # Format
|
||||
/// ```text
|
||||
/// Opcode, Mnemonic, Type, Docstring;
|
||||
/// ```
|
||||
///
|
||||
/// # Type
|
||||
/// ```text
|
||||
/// Types consist of letters meaning a single field
|
||||
/// | Type | Size (B) | Meaning |
|
||||
/// |:-----|:---------|:------------------------|
|
||||
/// | N | 0 | Empty |
|
||||
/// | R | 1 | Register |
|
||||
/// | A | 8 | Absolute address |
|
||||
/// | O | 4 | Relative address offset |
|
||||
/// | P | 2 | Relative address offset |
|
||||
/// | B | 1 | Immediate |
|
||||
/// | H | 2 | Immediate |
|
||||
/// | W | 4 | Immediate |
|
||||
/// | D | 8 | Immediate |
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! invoke_with_def {
|
||||
($($macro:tt)*) => {
|
||||
$($macro)*! { $spec }
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Buffer {
|
||||
fn reserve(&mut self, bytes: usize);
|
||||
/// # Safety
|
||||
/// Reserve needs to be called before this function, and only reserved amount can be written.
|
||||
unsafe fn write(&mut self, byte: u8);
|
||||
}
|
||||
|
||||
pub trait Encodable {
|
||||
fn encode(self, buffer: &mut impl Buffer);
|
||||
fn encode_len(self) -> usize;
|
||||
}
|
||||
|
||||
macro_rules! gen_opcodes {
|
||||
($($opcode:expr, $mnemonic:ident, $ty:ident, $doc:literal;)*) => {
|
||||
pub mod opcode {
|
||||
$(
|
||||
#[doc = $doc]
|
||||
pub const $mnemonic: u8 = $opcode;
|
||||
)*
|
||||
|
||||
paste::paste! {
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[repr(u8)]
|
||||
pub enum Op { $(
|
||||
[< $mnemonic:lower:camel >](super::[<Ops $ty>]),
|
||||
)* }
|
||||
|
||||
impl Op {
|
||||
pub fn size(&self) -> usize {
|
||||
(match self {
|
||||
$(Self::[<$mnemonic:lower:camel>] { .. } => core::mem::size_of::<super::[<Ops $ty>]>(),)*
|
||||
}) + 1
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::Encodable for Op {
|
||||
fn encode(self, buffer: &mut impl crate::Buffer) {
|
||||
match self {
|
||||
$(
|
||||
Self::[< $mnemonic:lower:camel >](op) => {
|
||||
unsafe { buffer.write($opcode) };
|
||||
op.encode(buffer);
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_len(self) -> usize {
|
||||
match self {
|
||||
$(
|
||||
Self::[< $mnemonic:lower:camel >](op) => {
|
||||
1 + crate::Encodable::encode_len(op)
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Rounding mode
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
#[repr(u8)]
|
||||
pub enum RoundingMode {
|
||||
NearestEven = 0,
|
||||
Truncate = 1,
|
||||
Up = 2,
|
||||
Down = 3,
|
||||
}
|
||||
|
||||
impl TryFrom<u8> for RoundingMode {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||
(value <= 3)
|
||||
.then(|| unsafe { core::mem::transmute(value) })
|
||||
.ok_or(())
|
||||
}
|
||||
}
|
||||
|
||||
invoke_with_def!(gen_opcodes);
|
|
@ -1,47 +0,0 @@
|
|||
use std::alloc::Layout;
|
||||
|
||||
use hbvm::mem::{Address, LoadError, Memory, StoreError};
|
||||
|
||||
pub struct HostMemory;
|
||||
impl Memory for HostMemory {
|
||||
#[inline]
|
||||
unsafe fn load(
|
||||
&mut self,
|
||||
addr: Address,
|
||||
target: *mut u8,
|
||||
count: usize,
|
||||
) -> Result<(), LoadError> {
|
||||
unsafe { core::ptr::copy(addr.get() as *const u8, target, count) }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn store(
|
||||
&mut self,
|
||||
addr: Address,
|
||||
source: *const u8,
|
||||
count: usize,
|
||||
) -> Result<(), StoreError> {
|
||||
unsafe { core::ptr::copy(source, addr.get() as *mut u8, count) }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn prog_read<T: Copy>(&mut self, addr: Address) -> T {
|
||||
unsafe { core::ptr::read(addr.get() as *const T) }
|
||||
}
|
||||
}
|
||||
|
||||
const STACK_SIZE: usize = 2; // MiB
|
||||
type Stack = [u8; 1024 * 1024 * STACK_SIZE];
|
||||
|
||||
/// Allocate stack of size [`STACK_SIZE`] MiB
|
||||
pub unsafe fn alloc_stack() -> Box<Stack> {
|
||||
let layout = Layout::new::<Stack>();
|
||||
let ptr = unsafe { std::alloc::alloc(layout) };
|
||||
if ptr.is_null() {
|
||||
std::alloc::handle_alloc_error(layout);
|
||||
}
|
||||
|
||||
unsafe { Box::from_raw(ptr.cast()) }
|
||||
}
|
28
lang/Cargo.toml
Normal file
28
lang/Cargo.toml
Normal file
|
@ -0,0 +1,28 @@
|
|||
[package]
|
||||
name = "hblang"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[[bin]]
|
||||
name = "hbc"
|
||||
path = "src/main.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "fuzz"
|
||||
path = "src/fuzz_main.rs"
|
||||
|
||||
[dependencies]
|
||||
hashbrown = { version = "0.15.0", default-features = false, features = ["raw-entry", "allocator-api2"] }
|
||||
hbbytecode = { workspace = true, features = ["disasm"] }
|
||||
hbvm = { workspace = true, features = ["nightly"] }
|
||||
log = "0.4.22"
|
||||
|
||||
[dependencies.regalloc2]
|
||||
git = "https://github.com/jakubDoka/regalloc2"
|
||||
branch = "reuse-allocations"
|
||||
default-features = false
|
||||
|
||||
[features]
|
||||
default = ["std", "regalloc2/trace-log"]
|
||||
std = []
|
||||
no_log = ["log/max_level_off"]
|
1510
lang/README.md
Normal file
1510
lang/README.md
Normal file
File diff suppressed because one or more lines are too long
4
lang/command-help.txt
Normal file
4
lang/command-help.txt
Normal file
|
@ -0,0 +1,4 @@
|
|||
--fmt - format all imported source files
|
||||
--fmt-stdout - dont write the formatted file but print it
|
||||
--dump-asm - output assembly instead of raw code, (the assembly is more for debugging the compiler)
|
||||
--threads <1...> - number of extra threads compiler can use [default: 0]
|
526
lang/src/fmt.rs
Normal file
526
lang/src/fmt.rs
Normal file
|
@ -0,0 +1,526 @@
|
|||
use {
|
||||
crate::{
|
||||
lexer::{self, Lexer, TokenKind},
|
||||
parser::{self, CommentOr, CtorField, Expr, Poser, Radix, StructField},
|
||||
},
|
||||
core::fmt::{self},
|
||||
};
|
||||
|
||||
pub fn display_radix(radix: Radix, mut value: u64, buf: &mut [u8; 64]) -> &str {
|
||||
fn conv_radix(d: u8) -> u8 {
|
||||
match d {
|
||||
0..=9 => d + b'0',
|
||||
_ => d - 10 + b'A',
|
||||
}
|
||||
}
|
||||
|
||||
for (i, b) in buf.iter_mut().enumerate().rev() {
|
||||
let d = (value % radix as u64) as u8;
|
||||
value /= radix as u64;
|
||||
*b = conv_radix(d);
|
||||
if value == 0 {
|
||||
return unsafe { core::str::from_utf8_unchecked(&buf[i..]) };
|
||||
}
|
||||
}
|
||||
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
pub fn minify(source: &mut str) -> usize {
|
||||
fn needs_space(c: u8) -> bool {
|
||||
matches!(c, b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | 127..)
|
||||
}
|
||||
|
||||
let mut writer = source.as_mut_ptr();
|
||||
let mut reader = &source[..];
|
||||
let mut prev_needs_whitecpace = false;
|
||||
let mut prev_needs_newline = false;
|
||||
loop {
|
||||
let mut token = lexer::Lexer::new(reader).eat();
|
||||
match token.kind {
|
||||
TokenKind::Eof => break,
|
||||
TokenKind::CtIdent | TokenKind::Directive => token.start -= 1,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let cpy_len = token.range().len();
|
||||
|
||||
let mut prefix = 0;
|
||||
if prev_needs_whitecpace && needs_space(reader.as_bytes()[token.start as usize]) {
|
||||
prefix = b' ';
|
||||
debug_assert!(token.start != 0, "{reader}");
|
||||
}
|
||||
prev_needs_whitecpace = needs_space(reader.as_bytes()[token.end as usize - 1]);
|
||||
|
||||
let inbetween_new_lines =
|
||||
reader[..token.start as usize].bytes().filter(|&b| b == b'\n').count()
|
||||
+ token.kind.precedence().is_some() as usize;
|
||||
let extra_prefix_new_lines = if inbetween_new_lines > 1 {
|
||||
1 + token.kind.precedence().is_none() as usize
|
||||
} else {
|
||||
prev_needs_newline as usize
|
||||
};
|
||||
|
||||
if token.kind == TokenKind::Comment && reader.as_bytes()[token.end as usize - 1] != b'/' {
|
||||
prev_needs_newline = true;
|
||||
prev_needs_whitecpace = false;
|
||||
} else {
|
||||
prev_needs_newline = false;
|
||||
}
|
||||
|
||||
let sstr = reader[token.start as usize..].as_ptr();
|
||||
reader = &reader[token.end as usize..];
|
||||
unsafe {
|
||||
if extra_prefix_new_lines != 0 {
|
||||
for _ in 0..extra_prefix_new_lines {
|
||||
writer.write(b'\n');
|
||||
writer = writer.add(1);
|
||||
}
|
||||
} else if prefix != 0 {
|
||||
writer.write(prefix);
|
||||
writer = writer.add(1);
|
||||
}
|
||||
writer.copy_from(sstr, cpy_len);
|
||||
writer = writer.add(cpy_len);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe { writer.sub_ptr(source.as_mut_ptr()) }
|
||||
}
|
||||
|
||||
pub struct Formatter<'a> {
|
||||
source: &'a str,
|
||||
depth: usize,
|
||||
}
|
||||
|
||||
// we exclusively use `write_str` to reduce bloat
|
||||
impl<'a> Formatter<'a> {
|
||||
pub fn new(source: &'a str) -> Self {
|
||||
Self { source, depth: 0 }
|
||||
}
|
||||
|
||||
fn fmt_list<T: Poser, F: core::fmt::Write>(
|
||||
&mut self,
|
||||
f: &mut F,
|
||||
trailing: bool,
|
||||
end: &str,
|
||||
sep: &str,
|
||||
list: &[T],
|
||||
fmt: impl Fn(&mut Self, &T, &mut F) -> fmt::Result,
|
||||
) -> fmt::Result {
|
||||
self.fmt_list_low(f, trailing, end, sep, list, |s, v, f| {
|
||||
fmt(s, v, f)?;
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
fn fmt_list_low<T: Poser, F: core::fmt::Write>(
|
||||
&mut self,
|
||||
f: &mut F,
|
||||
trailing: bool,
|
||||
end: &str,
|
||||
sep: &str,
|
||||
list: &[T],
|
||||
fmt: impl Fn(&mut Self, &T, &mut F) -> Result<bool, fmt::Error>,
|
||||
) -> fmt::Result {
|
||||
if !trailing {
|
||||
let mut first = true;
|
||||
for expr in list {
|
||||
if !core::mem::take(&mut first) {
|
||||
f.write_str(sep)?;
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
first = !fmt(self, expr, f)?;
|
||||
}
|
||||
return f.write_str(end);
|
||||
}
|
||||
|
||||
writeln!(f)?;
|
||||
self.depth += 1;
|
||||
let res = (|| {
|
||||
for (i, stmt) in list.iter().enumerate() {
|
||||
for _ in 0..self.depth {
|
||||
f.write_str("\t")?;
|
||||
}
|
||||
let add_sep = fmt(self, stmt, f)?;
|
||||
if add_sep {
|
||||
f.write_str(sep)?;
|
||||
}
|
||||
if let Some(expr) = list.get(i + 1)
|
||||
&& let Some(rest) = self.source.get(expr.posi() as usize..)
|
||||
{
|
||||
if insert_needed_semicolon(rest) {
|
||||
f.write_str(";")?;
|
||||
}
|
||||
if preserve_newlines(&self.source[..expr.posi() as usize]) > 1 {
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
}
|
||||
if add_sep {
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})();
|
||||
self.depth -= 1;
|
||||
|
||||
for _ in 0..self.depth {
|
||||
f.write_str("\t")?;
|
||||
}
|
||||
f.write_str(end)?;
|
||||
res
|
||||
}
|
||||
|
||||
fn fmt_paren<F: core::fmt::Write>(
|
||||
&mut self,
|
||||
expr: &Expr,
|
||||
f: &mut F,
|
||||
cond: impl FnOnce(&Expr) -> bool,
|
||||
) -> fmt::Result {
|
||||
if cond(expr) {
|
||||
f.write_str("(")?;
|
||||
self.fmt(expr, f)?;
|
||||
f.write_str(")")
|
||||
} else {
|
||||
self.fmt(expr, f)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fmt<F: core::fmt::Write>(&mut self, expr: &Expr, f: &mut F) -> fmt::Result {
|
||||
macro_rules! impl_parenter {
|
||||
($($name:ident => $pat:pat,)*) => {
|
||||
$(
|
||||
let $name = |e: &Expr| matches!(e, $pat);
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
impl_parenter! {
|
||||
unary => Expr::BinOp { .. },
|
||||
postfix => Expr::UnOp { .. } | Expr::BinOp { .. },
|
||||
consecutive => Expr::UnOp { .. },
|
||||
}
|
||||
|
||||
match *expr {
|
||||
Expr::Ct { value, .. } => {
|
||||
f.write_str("$: ")?;
|
||||
self.fmt(value, f)
|
||||
}
|
||||
Expr::String { literal, .. } => f.write_str(literal),
|
||||
Expr::Comment { literal, .. } => f.write_str(literal),
|
||||
Expr::Mod { path, .. } => write!(f, "@use(\"{path}\")"),
|
||||
Expr::Embed { path, .. } => write!(f, "@embed(\"{path}\")"),
|
||||
Expr::Field { target, name: field, .. } => {
|
||||
self.fmt_paren(target, f, postfix)?;
|
||||
f.write_str(".")?;
|
||||
f.write_str(field)
|
||||
}
|
||||
Expr::Directive { name, args, .. } => {
|
||||
f.write_str("@")?;
|
||||
f.write_str(name)?;
|
||||
f.write_str("(")?;
|
||||
self.fmt_list(f, false, ")", ",", args, Self::fmt)
|
||||
}
|
||||
Expr::Struct { fields, trailing_comma, packed, .. } => {
|
||||
if packed {
|
||||
f.write_str("packed ")?;
|
||||
}
|
||||
|
||||
write!(f, "struct {{")?;
|
||||
self.fmt_list_low(f, trailing_comma, "}", ",", fields, |s, field, f| {
|
||||
match field {
|
||||
CommentOr::Or(StructField { name, ty, .. }) => {
|
||||
f.write_str(name)?;
|
||||
f.write_str(": ")?;
|
||||
s.fmt(ty, f)?
|
||||
}
|
||||
CommentOr::Comment { literal, .. } => {
|
||||
f.write_str(literal)?;
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
}
|
||||
Ok(field.or().is_some())
|
||||
})
|
||||
}
|
||||
Expr::Ctor { ty, fields, trailing_comma, .. } => {
|
||||
if let Some(ty) = ty {
|
||||
self.fmt_paren(ty, f, unary)?;
|
||||
}
|
||||
f.write_str(".{")?;
|
||||
self.fmt_list(
|
||||
f,
|
||||
trailing_comma,
|
||||
"}",
|
||||
",",
|
||||
fields,
|
||||
|s: &mut Self, CtorField { name, value, .. }: &_, f| {
|
||||
f.write_str(name)?;
|
||||
if !matches!(value, &Expr::Ident { id, .. } if *name == &self.source[id.range()]) {
|
||||
f.write_str(": ")?;
|
||||
s.fmt(value, f)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
}
|
||||
Expr::Tupl {
|
||||
pos,
|
||||
ty: Some(&Expr::Slice { pos: spos, size: Some(&Expr::Number { value, .. }), item }),
|
||||
fields,
|
||||
trailing_comma,
|
||||
} if value as usize == fields.len() => self.fmt(
|
||||
&Expr::Tupl {
|
||||
pos,
|
||||
ty: Some(&Expr::Slice { pos: spos, size: None, item }),
|
||||
fields,
|
||||
trailing_comma,
|
||||
},
|
||||
f,
|
||||
),
|
||||
Expr::Tupl { ty, fields, trailing_comma, .. } => {
|
||||
if let Some(ty) = ty {
|
||||
self.fmt_paren(ty, f, unary)?;
|
||||
}
|
||||
f.write_str(".(")?;
|
||||
self.fmt_list(f, trailing_comma, ")", ",", fields, Self::fmt)
|
||||
}
|
||||
Expr::Slice { item, size, .. } => {
|
||||
f.write_str("[")?;
|
||||
self.fmt(item, f)?;
|
||||
if let Some(size) = size {
|
||||
f.write_str("; ")?;
|
||||
self.fmt(size, f)?;
|
||||
}
|
||||
f.write_str("]")
|
||||
}
|
||||
Expr::Index { base, index } => {
|
||||
self.fmt(base, f)?;
|
||||
f.write_str("[")?;
|
||||
self.fmt(index, f)?;
|
||||
f.write_str("]")
|
||||
}
|
||||
Expr::UnOp { op, val, .. } => {
|
||||
f.write_str(op.name())?;
|
||||
self.fmt_paren(val, f, unary)
|
||||
}
|
||||
Expr::Break { .. } => f.write_str("break"),
|
||||
Expr::Continue { .. } => f.write_str("continue"),
|
||||
Expr::If { cond, then, else_, .. } => {
|
||||
f.write_str("if ")?;
|
||||
self.fmt(cond, f)?;
|
||||
f.write_str(" ")?;
|
||||
self.fmt_paren(then, f, consecutive)?;
|
||||
if let Some(e) = else_ {
|
||||
f.write_str(" else ")?;
|
||||
self.fmt(e, f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Expr::Loop { body, .. } => {
|
||||
f.write_str("loop ")?;
|
||||
self.fmt(body, f)
|
||||
}
|
||||
Expr::Closure { ret, body, args, .. } => {
|
||||
f.write_str("fn(")?;
|
||||
self.fmt_list(f, false, "", ",", args, |s, arg, f| {
|
||||
if arg.is_ct {
|
||||
f.write_str("$")?;
|
||||
}
|
||||
f.write_str(arg.name)?;
|
||||
f.write_str(": ")?;
|
||||
s.fmt(&arg.ty, f)
|
||||
})?;
|
||||
f.write_str("): ")?;
|
||||
self.fmt(ret, f)?;
|
||||
f.write_str(" ")?;
|
||||
self.fmt_paren(body, f, consecutive)?;
|
||||
Ok(())
|
||||
}
|
||||
Expr::Call { func, args, trailing_comma } => {
|
||||
self.fmt_paren(func, f, postfix)?;
|
||||
f.write_str("(")?;
|
||||
self.fmt_list(f, trailing_comma, ")", ",", args, Self::fmt)
|
||||
}
|
||||
Expr::Return { val: Some(val), .. } => {
|
||||
f.write_str("return ")?;
|
||||
self.fmt(val, f)
|
||||
}
|
||||
Expr::Return { val: None, .. } => f.write_str("return"),
|
||||
Expr::Wildcard { .. } => f.write_str("_"),
|
||||
Expr::Ident { pos, is_ct, .. } => {
|
||||
if is_ct {
|
||||
f.write_str("$")?;
|
||||
}
|
||||
f.write_str(&self.source[Lexer::restore(self.source, pos).eat().range()])
|
||||
}
|
||||
Expr::Block { stmts, .. } => {
|
||||
f.write_str("{")?;
|
||||
self.fmt_list(f, true, "}", "", stmts, Self::fmt)
|
||||
}
|
||||
Expr::Number { value, radix, .. } => {
|
||||
f.write_str(match radix {
|
||||
Radix::Decimal => "",
|
||||
Radix::Hex => "0x",
|
||||
Radix::Octal => "0o",
|
||||
Radix::Binary => "0b",
|
||||
})?;
|
||||
let mut buf = [0u8; 64];
|
||||
f.write_str(display_radix(radix, value as u64, &mut buf))
|
||||
}
|
||||
Expr::Float { pos, .. } => {
|
||||
f.write_str(&self.source[Lexer::restore(self.source, pos).eat().range()])
|
||||
}
|
||||
Expr::Bool { value, .. } => f.write_str(if value { "true" } else { "false" }),
|
||||
Expr::Idk { .. } => f.write_str("idk"),
|
||||
Expr::Die { .. } => f.write_str("die"),
|
||||
Expr::Null { .. } => f.write_str("null"),
|
||||
Expr::BinOp {
|
||||
left,
|
||||
op: TokenKind::Assign,
|
||||
right: &Expr::BinOp { left: lleft, op, right, .. },
|
||||
..
|
||||
} if left.pos() == lleft.pos() => {
|
||||
self.fmt(left, f)?;
|
||||
f.write_str(" ")?;
|
||||
f.write_str(op.name())?;
|
||||
f.write_str("= ")?;
|
||||
self.fmt(right, f)
|
||||
}
|
||||
Expr::BinOp { right, op, left, .. } => {
|
||||
let prec_miss_left = |e: &Expr| {
|
||||
matches!(
|
||||
e, Expr::BinOp { op: lop, .. } if op.precedence() > lop.precedence()
|
||||
)
|
||||
};
|
||||
let prec_miss_right = |e: &Expr| {
|
||||
matches!(
|
||||
e, Expr::BinOp { op: lop, .. }
|
||||
if (op.precedence() == lop.precedence() && !op.is_comutative())
|
||||
|| op.precedence() > lop.precedence()
|
||||
)
|
||||
};
|
||||
|
||||
self.fmt_paren(left, f, prec_miss_left)?;
|
||||
if let Some(mut prev) = self.source.get(..right.pos() as usize) {
|
||||
prev = prev.trim_end();
|
||||
let estimate_bound =
|
||||
prev.rfind(|c: char| c.is_ascii_whitespace()).map_or(prev.len(), |i| i + 1);
|
||||
let exact_bound = lexer::Lexer::new(&prev[estimate_bound..]).last().start;
|
||||
prev = &prev[..exact_bound as usize + estimate_bound];
|
||||
if preserve_newlines(prev) > 0 {
|
||||
f.write_str("\n")?;
|
||||
for _ in 0..self.depth + 1 {
|
||||
f.write_str("\t")?;
|
||||
}
|
||||
f.write_str(op.name())?;
|
||||
f.write_str(" ")?;
|
||||
} else {
|
||||
f.write_str(" ")?;
|
||||
f.write_str(op.name())?;
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
} else {
|
||||
f.write_str(" ")?;
|
||||
f.write_str(op.name())?;
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
self.fmt_paren(right, f, prec_miss_right)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn preserve_newlines(source: &str) -> usize {
|
||||
source[source.trim_end().len()..].bytes().filter(|&c| c == b'\n').count()
|
||||
}
|
||||
|
||||
pub fn insert_needed_semicolon(source: &str) -> bool {
|
||||
let kind = lexer::Lexer::new(source).eat().kind;
|
||||
kind.precedence().is_some() || matches!(kind, TokenKind::Ctor | TokenKind::Tupl)
|
||||
}
|
||||
|
||||
impl core::fmt::Display for parser::Ast {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt_file(self.exprs(), &self.file, f)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fmt_file(exprs: &[Expr], file: &str, f: &mut impl fmt::Write) -> fmt::Result {
|
||||
for (i, expr) in exprs.iter().enumerate() {
|
||||
Formatter::new(file).fmt(expr, f)?;
|
||||
if let Some(expr) = exprs.get(i + 1)
|
||||
&& let Some(rest) = file.get(expr.pos() as usize..)
|
||||
{
|
||||
if insert_needed_semicolon(rest) {
|
||||
write!(f, ";")?;
|
||||
}
|
||||
|
||||
if preserve_newlines(&file[..expr.pos() as usize]) > 1 {
|
||||
writeln!(f)?;
|
||||
}
|
||||
}
|
||||
|
||||
if i + 1 != exprs.len() {
|
||||
writeln!(f)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod test {
|
||||
use {
|
||||
crate::parser::{self, Ctx},
|
||||
alloc::borrow::ToOwned,
|
||||
std::{fmt::Write, string::String},
|
||||
};
|
||||
|
||||
pub fn format(ident: &str, input: &str) {
|
||||
let mut minned = input.to_owned();
|
||||
let len = crate::fmt::minify(&mut minned);
|
||||
minned.truncate(len);
|
||||
|
||||
let mut ctx = Ctx::default();
|
||||
let ast = parser::Ast::new(ident, minned, &mut ctx, &mut parser::no_loader);
|
||||
let mut output = String::new();
|
||||
write!(output, "{ast}").unwrap();
|
||||
|
||||
let input_path = format!("formatter_{ident}.expected");
|
||||
let output_path = format!("formatter_{ident}.actual");
|
||||
std::fs::write(&input_path, input).unwrap();
|
||||
std::fs::write(&output_path, output).unwrap();
|
||||
|
||||
let success = std::process::Command::new("diff")
|
||||
.arg("-u")
|
||||
.arg("--color")
|
||||
.arg(&input_path)
|
||||
.arg(&output_path)
|
||||
.status()
|
||||
.unwrap()
|
||||
.success();
|
||||
std::fs::remove_file(&input_path).unwrap();
|
||||
std::fs::remove_file(&output_path).unwrap();
|
||||
assert!(success, "test failed");
|
||||
}
|
||||
|
||||
macro_rules! test {
|
||||
($($name:ident => $input:expr;)*) => {$(
|
||||
#[test]
|
||||
fn $name() {
|
||||
format(stringify!($name), $input);
|
||||
}
|
||||
)*};
|
||||
}
|
||||
|
||||
test! {
|
||||
comments => "// comment\n// comment\n\n// comment\n\n\
|
||||
/* comment */\n/* comment */\n\n/* comment */";
|
||||
some_ordinary_code => "loft := fn(): int return loft(1, 2, 3)";
|
||||
some_arg_per_line_code => "loft := fn(): int return loft(\
|
||||
\n\t1,\n\t2,\n\t3,\n)";
|
||||
some_ordinary_struct => "loft := fn(): int return loft.{a: 1, b: 2}";
|
||||
some_ordinary_fild_per_lin_struct => "loft := fn(): int return loft.{\
|
||||
\n\ta: 1,\n\tb: 2,\n}";
|
||||
code_block => "loft := fn(): int {\n\tloft()\n\treturn 1\n}";
|
||||
}
|
||||
}
|
372
lang/src/fs.rs
Normal file
372
lang/src/fs.rs
Normal file
|
@ -0,0 +1,372 @@
|
|||
use {
|
||||
crate::{
|
||||
parser::{Ast, Ctx, FileKind},
|
||||
son::{self, hbvm::HbvmBackend},
|
||||
ty,
|
||||
},
|
||||
alloc::{string::String, vec::Vec},
|
||||
core::{fmt::Write, num::NonZeroUsize, ops::Deref},
|
||||
hashbrown::hash_map,
|
||||
std::{
|
||||
collections::VecDeque,
|
||||
eprintln,
|
||||
ffi::OsStr,
|
||||
io::{self, Write as _},
|
||||
path::{Path, PathBuf},
|
||||
string::ToString,
|
||||
sync::Mutex,
|
||||
},
|
||||
};
|
||||
|
||||
pub struct Logger;
|
||||
|
||||
impl log::Log for Logger {
|
||||
fn enabled(&self, _: &log::Metadata) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn log(&self, record: &log::Record) {
|
||||
if self.enabled(record.metadata()) {
|
||||
eprintln!("{}", record.args())
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&self) {}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Options {
|
||||
pub fmt: bool,
|
||||
pub fmt_stdout: bool,
|
||||
pub dump_asm: bool,
|
||||
pub extra_threads: usize,
|
||||
}
|
||||
|
||||
impl Options {
|
||||
pub fn from_args(args: &[&str], out: &mut Vec<u8>) -> std::io::Result<Self> {
|
||||
if args.contains(&"--help") || args.contains(&"-h") {
|
||||
writeln!(out, "Usage: hbc [OPTIONS...] <FILE>")?;
|
||||
writeln!(out, include_str!("../command-help.txt"))?;
|
||||
return Err(std::io::ErrorKind::Other.into());
|
||||
}
|
||||
|
||||
Ok(Options {
|
||||
fmt: args.contains(&"--fmt"),
|
||||
fmt_stdout: args.contains(&"--fmt-stdout"),
|
||||
dump_asm: args.contains(&"--dump-asm"),
|
||||
extra_threads: args
|
||||
.iter()
|
||||
.position(|&a| a == "--threads")
|
||||
.map(|i| {
|
||||
args[i + 1].parse::<NonZeroUsize>().map_err(|e| {
|
||||
writeln!(out, "--threads expects non zero integer: {e}")
|
||||
.err()
|
||||
.unwrap_or(std::io::ErrorKind::Other.into())
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.map_or(1, NonZeroUsize::get)
|
||||
- 1,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_compiler(root_file: &str, options: Options, out: &mut Vec<u8>) -> std::io::Result<()> {
|
||||
let parsed = parse_from_fs(options.extra_threads, root_file)?;
|
||||
|
||||
if (options.fmt || options.fmt_stdout) && !parsed.errors.is_empty() {
|
||||
*out = parsed.errors.into_bytes();
|
||||
return Err(std::io::Error::other("fmt fialed (errors are in out)"));
|
||||
}
|
||||
|
||||
if options.fmt {
|
||||
let mut output = String::new();
|
||||
for ast in parsed.ast {
|
||||
write!(output, "{ast}").unwrap();
|
||||
if ast.file.deref().trim() != output.as_str().trim() {
|
||||
std::fs::write(&*ast.path, &output)?;
|
||||
}
|
||||
output.clear();
|
||||
}
|
||||
} else if options.fmt_stdout {
|
||||
write!(out, "{}", &parsed.ast[0])?;
|
||||
} else {
|
||||
let mut backend = HbvmBackend::default();
|
||||
let mut ctx = crate::son::CodegenCtx::default();
|
||||
*ctx.parser.errors.get_mut() = parsed.errors;
|
||||
let mut codegen = son::Codegen::new(&mut backend, &parsed.ast, &mut ctx);
|
||||
|
||||
codegen.push_embeds(parsed.embeds);
|
||||
codegen.generate(ty::Module::MAIN);
|
||||
|
||||
if !codegen.errors.borrow().is_empty() {
|
||||
drop(codegen);
|
||||
*out = ctx.parser.errors.into_inner().into_bytes();
|
||||
return Err(std::io::Error::other("compilation faoled (errors are in out)"));
|
||||
}
|
||||
|
||||
codegen.assemble(out);
|
||||
|
||||
if options.dump_asm {
|
||||
let mut disasm = String::new();
|
||||
codegen.disasm(&mut disasm, out).map_err(|e| io::Error::other(e.to_string()))?;
|
||||
*out = disasm.into_bytes();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct TaskQueue<T> {
|
||||
inner: Mutex<TaskQueueInner<T>>,
|
||||
}
|
||||
|
||||
impl<T> TaskQueue<T> {
|
||||
fn new(max_waiters: usize) -> Self {
|
||||
Self { inner: Mutex::new(TaskQueueInner::new(max_waiters)) }
|
||||
}
|
||||
|
||||
pub fn push(&self, message: T) {
|
||||
self.extend([message]);
|
||||
}
|
||||
|
||||
pub fn extend(&self, messages: impl IntoIterator<Item = T>) {
|
||||
self.inner.lock().unwrap().push(messages);
|
||||
}
|
||||
|
||||
pub fn pop(&self) -> Option<T> {
|
||||
TaskQueueInner::pop(&self.inner)
|
||||
}
|
||||
}
|
||||
|
||||
enum TaskSlot<T> {
|
||||
Waiting,
|
||||
Delivered(T),
|
||||
Closed,
|
||||
}
|
||||
|
||||
struct TaskQueueInner<T> {
|
||||
max_waiters: usize,
|
||||
messages: VecDeque<T>,
|
||||
parked: VecDeque<(*mut TaskSlot<T>, std::thread::Thread)>,
|
||||
}
|
||||
|
||||
unsafe impl<T: Send> Send for TaskQueueInner<T> {}
|
||||
unsafe impl<T: Send + Sync> Sync for TaskQueueInner<T> {}
|
||||
|
||||
impl<T> TaskQueueInner<T> {
|
||||
fn new(max_waiters: usize) -> Self {
|
||||
Self { max_waiters, messages: Default::default(), parked: Default::default() }
|
||||
}
|
||||
|
||||
fn push(&mut self, messages: impl IntoIterator<Item = T>) {
|
||||
for msg in messages {
|
||||
if let Some((dest, thread)) = self.parked.pop_front() {
|
||||
unsafe { *dest = TaskSlot::Delivered(msg) };
|
||||
thread.unpark();
|
||||
} else {
|
||||
self.messages.push_back(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pop(s: &Mutex<Self>) -> Option<T> {
|
||||
let mut res = TaskSlot::Waiting;
|
||||
{
|
||||
let mut s = s.lock().unwrap();
|
||||
if let Some(msg) = s.messages.pop_front() {
|
||||
return Some(msg);
|
||||
}
|
||||
|
||||
if s.max_waiters == s.parked.len() + 1 {
|
||||
for (dest, thread) in s.parked.drain(..) {
|
||||
unsafe { *dest = TaskSlot::Closed };
|
||||
thread.unpark();
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
s.parked.push_back((&mut res, std::thread::current()));
|
||||
}
|
||||
|
||||
loop {
|
||||
std::thread::park();
|
||||
|
||||
let _s = s.lock().unwrap();
|
||||
match core::mem::replace(&mut res, TaskSlot::Waiting) {
|
||||
TaskSlot::Delivered(msg) => return Some(msg),
|
||||
TaskSlot::Closed => return None,
|
||||
TaskSlot::Waiting => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Loaded {
|
||||
ast: Vec<Ast>,
|
||||
embeds: Vec<Vec<u8>>,
|
||||
errors: String,
|
||||
}
|
||||
|
||||
pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
||||
fn resolve(path: &str, from: &str, tmp: &mut PathBuf) -> Result<PathBuf, CantLoadFile> {
|
||||
tmp.clear();
|
||||
match Path::new(from).parent() {
|
||||
Some(parent) => tmp.extend([parent, Path::new(path)]),
|
||||
None => tmp.push(path),
|
||||
};
|
||||
|
||||
tmp.canonicalize().map_err(|source| CantLoadFile { path: std::mem::take(tmp), source })
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CantLoadFile {
|
||||
path: PathBuf,
|
||||
source: io::Error,
|
||||
}
|
||||
|
||||
impl core::fmt::Display for CantLoadFile {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
|
||||
write!(f, "can't load file: {}", display_rel_path(&self.path),)
|
||||
}
|
||||
}
|
||||
|
||||
impl core::error::Error for CantLoadFile {
|
||||
fn source(&self) -> Option<&(dyn core::error::Error + 'static)> {
|
||||
Some(&self.source)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CantLoadFile> for io::Error {
|
||||
fn from(e: CantLoadFile) -> Self {
|
||||
io::Error::new(io::ErrorKind::InvalidData, e)
|
||||
}
|
||||
}
|
||||
|
||||
type Task = (usize, PathBuf);
|
||||
|
||||
let seen_modules = Mutex::new(crate::HashMap::<PathBuf, usize>::default());
|
||||
let seen_embeds = Mutex::new(crate::HashMap::<PathBuf, usize>::default());
|
||||
let tasks = TaskQueue::<Task>::new(extra_threads + 1);
|
||||
let ast = Mutex::new(Vec::<io::Result<Ast>>::new());
|
||||
let embeds = Mutex::new(Vec::<Vec<u8>>::new());
|
||||
|
||||
let loader = |path: &str, from: &str, kind: FileKind, tmp: &mut _| {
|
||||
let mut physiscal_path = resolve(path, from, tmp)?;
|
||||
|
||||
match kind {
|
||||
FileKind::Module => {
|
||||
let id = {
|
||||
let mut seen = seen_modules.lock().unwrap();
|
||||
let len = seen.len();
|
||||
match seen.entry(physiscal_path) {
|
||||
hash_map::Entry::Occupied(entry) => {
|
||||
return Ok(*entry.get());
|
||||
}
|
||||
hash_map::Entry::Vacant(entry) => {
|
||||
physiscal_path = entry.insert_entry(len as _).key().clone();
|
||||
len
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if !physiscal_path.exists() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
format!("can't find file: {}", display_rel_path(&physiscal_path)),
|
||||
));
|
||||
}
|
||||
|
||||
tasks.push((id, physiscal_path));
|
||||
Ok(id)
|
||||
}
|
||||
FileKind::Embed => {
|
||||
let id = {
|
||||
let mut seen = seen_embeds.lock().unwrap();
|
||||
let len = seen.len();
|
||||
match seen.entry(physiscal_path) {
|
||||
hash_map::Entry::Occupied(entry) => {
|
||||
return Ok(*entry.get());
|
||||
}
|
||||
hash_map::Entry::Vacant(entry) => {
|
||||
physiscal_path = entry.insert_entry(len as _).key().clone();
|
||||
len
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let content = std::fs::read(&physiscal_path).map_err(|e| {
|
||||
io::Error::new(
|
||||
e.kind(),
|
||||
format!(
|
||||
"can't load embed file: {}: {e}",
|
||||
display_rel_path(&physiscal_path)
|
||||
),
|
||||
)
|
||||
})?;
|
||||
let mut embeds = embeds.lock().unwrap();
|
||||
if id as usize >= embeds.len() {
|
||||
embeds.resize(id as usize + 1, Default::default());
|
||||
}
|
||||
embeds[id as usize] = content;
|
||||
Ok(id)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let execute_task = |ctx: &mut _, (_, path): Task, tmp: &mut _| {
|
||||
let path = path.to_str().ok_or_else(|| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("path contains invalid characters: {}", display_rel_path(&path)),
|
||||
)
|
||||
})?;
|
||||
Ok(Ast::new(path, std::fs::read_to_string(path)?, ctx, &mut |path, from, kind| {
|
||||
loader(path, from, kind, tmp).map_err(|e| e.to_string())
|
||||
}))
|
||||
};
|
||||
|
||||
let thread = || {
|
||||
let mut ctx = Ctx::default();
|
||||
let mut tmp = PathBuf::new();
|
||||
while let Some(task @ (indx, ..)) = tasks.pop() {
|
||||
let res = execute_task(&mut ctx, task, &mut tmp);
|
||||
let mut ast = ast.lock().unwrap();
|
||||
let len = ast.len().max(indx + 1);
|
||||
ast.resize_with(len, || Err(io::ErrorKind::InvalidData.into()));
|
||||
ast[indx] = res;
|
||||
}
|
||||
ctx.errors.into_inner()
|
||||
};
|
||||
|
||||
let path = Path::new(root).canonicalize().map_err(|e| {
|
||||
io::Error::new(e.kind(), format!("can't canonicalize root file path ({root})"))
|
||||
})?;
|
||||
seen_modules.lock().unwrap().insert(path.clone(), 0);
|
||||
tasks.push((0, path));
|
||||
|
||||
let errors = if extra_threads == 0 {
|
||||
thread()
|
||||
} else {
|
||||
std::thread::scope(|s| {
|
||||
(0..extra_threads + 1)
|
||||
.map(|_| s.spawn(thread))
|
||||
.collect::<Vec<_>>()
|
||||
.into_iter()
|
||||
.map(|t| t.join().unwrap())
|
||||
.collect::<String>()
|
||||
})
|
||||
};
|
||||
|
||||
Ok(Loaded {
|
||||
ast: ast.into_inner().unwrap().into_iter().collect::<io::Result<Vec<_>>>()?,
|
||||
embeds: embeds.into_inner().unwrap(),
|
||||
errors,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn display_rel_path(path: &(impl AsRef<OsStr> + ?Sized)) -> std::path::Display {
|
||||
static CWD: std::sync::LazyLock<PathBuf> =
|
||||
std::sync::LazyLock::new(|| std::env::current_dir().unwrap_or_default());
|
||||
std::path::Path::new(path).strip_prefix(&*CWD).unwrap_or(std::path::Path::new(path)).display()
|
||||
}
|
141
lang/src/fuzz.rs
Normal file
141
lang/src/fuzz.rs
Normal file
|
@ -0,0 +1,141 @@
|
|||
use {
|
||||
crate::{
|
||||
lexer::TokenKind,
|
||||
parser,
|
||||
son::{hbvm::HbvmBackend, Codegen, CodegenCtx},
|
||||
ty::Module,
|
||||
},
|
||||
alloc::string::String,
|
||||
core::{fmt::Write, hash::BuildHasher, ops::Range},
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
struct Rand(pub u64);
|
||||
|
||||
impl Rand {
|
||||
pub fn next(&mut self) -> u64 {
|
||||
self.0 = crate::FnvBuildHasher::default().hash_one(self.0);
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn range(&mut self, min: u64, max: u64) -> u64 {
|
||||
self.next() % (max - min) + min
|
||||
}
|
||||
|
||||
fn bool(&mut self) -> bool {
|
||||
self.next() % 2 == 0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FuncGen {
|
||||
rand: Rand,
|
||||
buf: String,
|
||||
vars: u64,
|
||||
}
|
||||
|
||||
impl FuncGen {
|
||||
fn gen(&mut self, seed: u64) -> &str {
|
||||
self.rand = Rand(seed);
|
||||
self.buf.clear();
|
||||
self.buf.push_str("main := fn(): void ");
|
||||
self.block().unwrap();
|
||||
&self.buf
|
||||
}
|
||||
|
||||
fn block(&mut self) -> core::fmt::Result {
|
||||
let prev_vars = self.vars;
|
||||
self.buf.push('{');
|
||||
for _ in 0..self.rand.range(1, 10) {
|
||||
self.stmt()?;
|
||||
}
|
||||
self.buf.push('}');
|
||||
self.vars = prev_vars;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stmt(&mut self) -> core::fmt::Result {
|
||||
match self.rand.range(0, 100) {
|
||||
0..4 => _ = self.block(),
|
||||
4..10 => {
|
||||
write!(self.buf, "var{} := ", self.vars)?;
|
||||
self.expr()?;
|
||||
self.vars += 1;
|
||||
}
|
||||
|
||||
10..20 if self.vars != 0 => {
|
||||
write!(self.buf, "var{} = ", self.rand.range(0, self.vars))?;
|
||||
self.expr()?;
|
||||
}
|
||||
20..23 => {
|
||||
self.buf.push_str("if ");
|
||||
self.expr()?;
|
||||
self.block()?;
|
||||
if self.rand.bool() {
|
||||
self.buf.push_str(" else ");
|
||||
self.block()?;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.buf.push_str("return ");
|
||||
self.expr()?;
|
||||
}
|
||||
}
|
||||
|
||||
self.buf.push(';');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn expr(&mut self) -> core::fmt::Result {
|
||||
match self.rand.range(0, 100) {
|
||||
0..80 => {
|
||||
write!(self.buf, "{}", self.rand.next())
|
||||
}
|
||||
80..90 if self.vars != 0 => {
|
||||
write!(self.buf, "var{}", self.rand.range(0, self.vars))
|
||||
}
|
||||
80..100 => {
|
||||
self.expr()?;
|
||||
let ops = [
|
||||
TokenKind::Add,
|
||||
TokenKind::Sub,
|
||||
TokenKind::Mul,
|
||||
TokenKind::Div,
|
||||
TokenKind::Shl,
|
||||
TokenKind::Eq,
|
||||
TokenKind::Ne,
|
||||
TokenKind::Lt,
|
||||
TokenKind::Gt,
|
||||
TokenKind::Le,
|
||||
TokenKind::Ge,
|
||||
TokenKind::Band,
|
||||
TokenKind::Bor,
|
||||
TokenKind::Xor,
|
||||
TokenKind::Mod,
|
||||
TokenKind::Shr,
|
||||
];
|
||||
let op = ops[self.rand.range(0, ops.len() as u64) as usize];
|
||||
write!(self.buf, " {op} ")?;
|
||||
self.expr()
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fuzz(seed_range: Range<u64>) {
|
||||
let mut gen = FuncGen::default();
|
||||
let mut ctx = CodegenCtx::default();
|
||||
for i in seed_range {
|
||||
ctx.clear();
|
||||
let src = gen.gen(i);
|
||||
let parsed = parser::Ast::new("fuzz", src, &mut ctx.parser, &mut parser::no_loader);
|
||||
|
||||
assert!(ctx.parser.errors.get_mut().is_empty());
|
||||
|
||||
let mut backend = HbvmBackend::default();
|
||||
let mut cdg = Codegen::new(&mut backend, core::slice::from_ref(&parsed), &mut ctx);
|
||||
cdg.generate(Module::MAIN);
|
||||
}
|
||||
}
|
3
lang/src/fuzz_main.rs
Normal file
3
lang/src/fuzz_main.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
fn main() {
|
||||
hblang::fuzz::fuzz(0..1000000);
|
||||
}
|
556
lang/src/lexer.rs
Normal file
556
lang/src/lexer.rs
Normal file
|
@ -0,0 +1,556 @@
|
|||
const fn ascii_mask(chars: &[u8]) -> u128 {
|
||||
let mut eq = 0;
|
||||
let mut i = 0;
|
||||
while i < chars.len() {
|
||||
let b = chars[i];
|
||||
eq |= 1 << b;
|
||||
i += 1;
|
||||
}
|
||||
eq
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub struct Token {
|
||||
pub kind: TokenKind,
|
||||
pub start: u32,
|
||||
pub end: u32,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn range(&self) -> core::ops::Range<usize> {
|
||||
self.start as usize..self.end as usize
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! gen_token_kind {
|
||||
($(
|
||||
#[$atts:meta])*
|
||||
$vis:vis enum $name:ident {
|
||||
#[patterns] $(
|
||||
$pattern:ident,
|
||||
)*
|
||||
#[keywords] $(
|
||||
$keyword:ident = $keyword_lit:literal,
|
||||
)*
|
||||
#[punkt] $(
|
||||
$punkt:ident = $punkt_lit:literal,
|
||||
)*
|
||||
#[ops] $(
|
||||
#[$prec:ident] $(
|
||||
$op:ident = $op_lit:literal $(=> $assign:ident)?,
|
||||
)*
|
||||
)*
|
||||
}
|
||||
) => {
|
||||
impl core::fmt::Display for $name {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
|
||||
f.write_str(self.name())
|
||||
}
|
||||
}
|
||||
|
||||
impl $name {
|
||||
pub const OPS: &[Self] = &[$($(Self::$op),*),*];
|
||||
|
||||
pub fn name(&self) -> &str {
|
||||
let sf = unsafe { &*(self as *const _ as *const u8) } ;
|
||||
match *self {
|
||||
$( Self::$pattern => concat!('<', stringify!($pattern), '>'), )*
|
||||
$( Self::$keyword => stringify!($keyword_lit), )*
|
||||
$( Self::$punkt => stringify!($punkt_lit), )*
|
||||
$($( Self::$op => $op_lit,
|
||||
$(Self::$assign => concat!($op_lit, "="),)?)*)*
|
||||
_ => unsafe { core::str::from_utf8_unchecked(core::slice::from_ref(&sf)) },
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn precedence(&self) -> Option<u8> {
|
||||
Some(match self {
|
||||
$($(Self::$op => ${ignore($prec)} ${index(1)},
|
||||
$(Self::$assign => 0,)?)*)*
|
||||
_ => return None,
|
||||
} + 1)
|
||||
}
|
||||
|
||||
fn from_ident(ident: &[u8]) -> Self {
|
||||
match ident {
|
||||
$($keyword_lit => Self::$keyword,)*
|
||||
_ => Self::Ident,
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Hash, PartialOrd, Ord)]
|
||||
#[repr(u8)]
|
||||
pub enum TokenKind {
|
||||
Not = b'!',
|
||||
DQuote = b'"',
|
||||
Pound = b'#',
|
||||
CtIdent = b'$',
|
||||
Mod = b'%',
|
||||
Band = b'&',
|
||||
Quote = b'\'',
|
||||
LParen = b'(',
|
||||
RParen = b')',
|
||||
Mul = b'*',
|
||||
Add = b'+',
|
||||
Comma = b',',
|
||||
Sub = b'-',
|
||||
Dot = b'.',
|
||||
Div = b'/',
|
||||
// Unused = 2-6
|
||||
Shl = b'<' - 5,
|
||||
// Unused = 8
|
||||
Shr = b'>' - 5,
|
||||
Colon = b':',
|
||||
Semi = b';',
|
||||
Lt = b'<',
|
||||
Assign = b'=',
|
||||
Gt = b'>',
|
||||
Que = b'?',
|
||||
Directive = b'@',
|
||||
|
||||
Comment,
|
||||
|
||||
Ident,
|
||||
Number,
|
||||
Float,
|
||||
Eof,
|
||||
|
||||
Ct,
|
||||
|
||||
Return,
|
||||
If,
|
||||
Else,
|
||||
Loop,
|
||||
Break,
|
||||
Continue,
|
||||
Fn,
|
||||
Struct,
|
||||
Packed,
|
||||
True,
|
||||
False,
|
||||
Null,
|
||||
Idk,
|
||||
Die,
|
||||
|
||||
Ctor,
|
||||
Tupl,
|
||||
|
||||
Or,
|
||||
And,
|
||||
|
||||
// Unused = R-Z
|
||||
LBrack = b'[',
|
||||
BSlash = b'\\',
|
||||
RBrack = b']',
|
||||
Xor = b'^',
|
||||
Tick = b'`',
|
||||
Under = b'_',
|
||||
// Unused = a-z
|
||||
LBrace = b'{',
|
||||
Bor = b'|',
|
||||
RBrace = b'}',
|
||||
Tilde = b'~',
|
||||
|
||||
Decl = b':' + 128,
|
||||
Eq = b'=' + 128,
|
||||
Ne = b'!' + 128,
|
||||
Le = b'<' + 128,
|
||||
Ge = b'>' + 128,
|
||||
|
||||
BorAss = b'|' + 128,
|
||||
AddAss = b'+' + 128,
|
||||
SubAss = b'-' + 128,
|
||||
MulAss = b'*' + 128,
|
||||
DivAss = b'/' + 128,
|
||||
ModAss = b'%' + 128,
|
||||
XorAss = b'^' + 128,
|
||||
BandAss = b'&' + 128,
|
||||
ShrAss = b'>' - 5 + 128,
|
||||
ShlAss = b'<' - 5 + 128,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for TokenKind {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
core::fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenKind {
|
||||
pub fn ass_op(self) -> Option<Self> {
|
||||
let id = (self as u8).saturating_sub(128);
|
||||
if ascii_mask(b"|+-*/%^&79") & (1u128 << id) == 0 {
|
||||
return None;
|
||||
}
|
||||
Some(unsafe { core::mem::transmute::<u8, Self>(id) })
|
||||
}
|
||||
|
||||
pub fn is_comutative(self) -> bool {
|
||||
use TokenKind as S;
|
||||
matches!(self, S::Eq | S::Ne | S::Bor | S::Xor | S::Band | S::Add | S::Mul)
|
||||
}
|
||||
|
||||
pub fn is_supported_float_op(self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::Add
|
||||
| Self::Sub
|
||||
| Self::Mul
|
||||
| Self::Div
|
||||
| Self::Eq
|
||||
| Self::Ne
|
||||
| Self::Le
|
||||
| Self::Ge
|
||||
| Self::Lt
|
||||
| Self::Gt
|
||||
)
|
||||
}
|
||||
|
||||
pub fn apply_binop(self, a: i64, b: i64, float: bool) -> i64 {
|
||||
if float {
|
||||
debug_assert!(self.is_supported_float_op());
|
||||
let [a, b] = [f64::from_bits(a as _), f64::from_bits(b as _)];
|
||||
let res = match self {
|
||||
Self::Add => a + b,
|
||||
Self::Sub => a - b,
|
||||
Self::Mul => a * b,
|
||||
Self::Div => a / b,
|
||||
Self::Eq => return (a == b) as i64,
|
||||
Self::Ne => return (a != b) as i64,
|
||||
Self::Lt => return (a < b) as i64,
|
||||
Self::Gt => return (a > b) as i64,
|
||||
Self::Le => return (a >= b) as i64,
|
||||
Self::Ge => return (a <= b) as i64,
|
||||
_ => todo!("floating point op: {self}"),
|
||||
};
|
||||
|
||||
return res.to_bits() as _;
|
||||
}
|
||||
|
||||
match self {
|
||||
Self::Add => a.wrapping_add(b),
|
||||
Self::Sub => a.wrapping_sub(b),
|
||||
Self::Mul => a.wrapping_mul(b),
|
||||
Self::Div if b == 0 => 0,
|
||||
Self::Div => a.wrapping_div(b),
|
||||
Self::Shl => a.wrapping_shl(b as _),
|
||||
Self::Eq => (a == b) as i64,
|
||||
Self::Ne => (a != b) as i64,
|
||||
Self::Lt => (a < b) as i64,
|
||||
Self::Gt => (a > b) as i64,
|
||||
Self::Le => (a >= b) as i64,
|
||||
Self::Ge => (a <= b) as i64,
|
||||
Self::Band => a & b,
|
||||
Self::Bor => a | b,
|
||||
Self::Xor => a ^ b,
|
||||
Self::Mod if b == 0 => 0,
|
||||
Self::Mod => a.wrapping_rem(b),
|
||||
Self::Shr => a.wrapping_shr(b as _),
|
||||
s => todo!("{s}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_homogenous(&self) -> bool {
|
||||
self.precedence() != Self::Eq.precedence()
|
||||
&& self.precedence() != Self::Gt.precedence()
|
||||
&& self.precedence() != Self::Eof.precedence()
|
||||
}
|
||||
|
||||
pub fn apply_unop(&self, value: i64, float: bool) -> i64 {
|
||||
match self {
|
||||
Self::Sub if float => (-f64::from_bits(value as _)).to_bits() as _,
|
||||
Self::Sub => value.wrapping_neg(),
|
||||
Self::Float if float => value,
|
||||
Self::Float => (value as f64).to_bits() as _,
|
||||
Self::Number => {
|
||||
debug_assert!(float);
|
||||
f64::from_bits(value as _).to_bits() as _
|
||||
}
|
||||
s => todo!("{s}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn closing(&self) -> Option<TokenKind> {
|
||||
Some(match self {
|
||||
Self::Ctor => Self::RBrace,
|
||||
Self::Tupl => Self::RParen,
|
||||
Self::LParen => Self::RParen,
|
||||
Self::LBrack => Self::RBrack,
|
||||
Self::LBrace => Self::RBrace,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
gen_token_kind! {
|
||||
pub enum TokenKind {
|
||||
#[patterns]
|
||||
CtIdent,
|
||||
Ident,
|
||||
Number,
|
||||
Float,
|
||||
Eof,
|
||||
Directive,
|
||||
#[keywords]
|
||||
Return = b"return",
|
||||
If = b"if",
|
||||
Else = b"else",
|
||||
Loop = b"loop",
|
||||
Break = b"break",
|
||||
Continue = b"continue",
|
||||
Fn = b"fn",
|
||||
Struct = b"struct",
|
||||
Packed = b"packed",
|
||||
True = b"true",
|
||||
False = b"false",
|
||||
Null = b"null",
|
||||
Idk = b"idk",
|
||||
Die = b"die",
|
||||
Under = b"_",
|
||||
#[punkt]
|
||||
Ctor = ".{",
|
||||
Tupl = ".(",
|
||||
// #define OP: each `#[prec]` delimeters a level of precedence from lowest to highest
|
||||
#[ops]
|
||||
#[prec]
|
||||
// this also includess all `<op>=` tokens
|
||||
Decl = ":=",
|
||||
Assign = "=",
|
||||
#[prec]
|
||||
Or = "||",
|
||||
#[prec]
|
||||
And = "&&",
|
||||
#[prec]
|
||||
Bor = "|" => BorAss,
|
||||
#[prec]
|
||||
Xor = "^" => XorAss,
|
||||
#[prec]
|
||||
Band = "&" => BandAss,
|
||||
#[prec]
|
||||
Eq = "==",
|
||||
Ne = "!=",
|
||||
#[prec]
|
||||
Le = "<=",
|
||||
Ge = ">=",
|
||||
Lt = "<",
|
||||
Gt = ">",
|
||||
#[prec]
|
||||
Shl = "<<" => ShlAss,
|
||||
Shr = ">>" => ShrAss,
|
||||
#[prec]
|
||||
Add = "+" => AddAss,
|
||||
Sub = "-" => SubAss,
|
||||
#[prec]
|
||||
Mul = "*" => MulAss,
|
||||
Div = "/" => DivAss,
|
||||
Mod = "%" => ModAss,
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Lexer<'a> {
|
||||
pos: u32,
|
||||
source: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> Lexer<'a> {
|
||||
pub fn new(input: &'a str) -> Self {
|
||||
Self::restore(input, 0)
|
||||
}
|
||||
|
||||
pub fn uses(input: &'a str) -> impl Iterator<Item = &'a str> {
|
||||
let mut s = Self::new(input);
|
||||
core::iter::from_fn(move || loop {
|
||||
let t = s.eat();
|
||||
if t.kind == TokenKind::Eof {
|
||||
return None;
|
||||
}
|
||||
if t.kind == TokenKind::Directive
|
||||
&& s.slice(t.range()) == "use"
|
||||
&& s.eat().kind == TokenKind::LParen
|
||||
{
|
||||
let t = s.eat();
|
||||
if t.kind == TokenKind::DQuote {
|
||||
return Some(&s.slice(t.range())[1..t.range().len() - 1]);
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn restore(input: &'a str, pos: u32) -> Self {
|
||||
Self { pos, source: input.as_bytes() }
|
||||
}
|
||||
|
||||
pub fn source(&self) -> &'a str {
|
||||
unsafe { core::str::from_utf8_unchecked(self.source) }
|
||||
}
|
||||
|
||||
pub fn slice(&self, tok: core::ops::Range<usize>) -> &'a str {
|
||||
unsafe { core::str::from_utf8_unchecked(&self.source[tok]) }
|
||||
}
|
||||
|
||||
fn peek(&self) -> Option<u8> {
|
||||
if core::intrinsics::unlikely(self.pos >= self.source.len() as u32) {
|
||||
None
|
||||
} else {
|
||||
Some(unsafe { *self.source.get_unchecked(self.pos as usize) })
|
||||
}
|
||||
}
|
||||
|
||||
fn advance(&mut self) -> Option<u8> {
|
||||
let c = self.peek()?;
|
||||
self.pos += 1;
|
||||
Some(c)
|
||||
}
|
||||
|
||||
pub fn last(&mut self) -> Token {
|
||||
let mut token = self.eat();
|
||||
loop {
|
||||
let next = self.eat();
|
||||
if next.kind == TokenKind::Eof {
|
||||
break;
|
||||
}
|
||||
token = next;
|
||||
}
|
||||
token
|
||||
}
|
||||
|
||||
pub fn eat(&mut self) -> Token {
|
||||
use TokenKind as T;
|
||||
loop {
|
||||
let mut start = self.pos;
|
||||
|
||||
let Some(c) = self.advance() else {
|
||||
return Token { kind: T::Eof, start, end: self.pos };
|
||||
};
|
||||
|
||||
let advance_ident = |s: &mut Self| {
|
||||
while let Some(b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_' | 127..) = s.peek() {
|
||||
s.advance();
|
||||
}
|
||||
};
|
||||
|
||||
let identity = |s: u8| unsafe { core::mem::transmute::<u8, T>(s) };
|
||||
|
||||
let kind = match c {
|
||||
..=b' ' => continue,
|
||||
b'0' if self.advance_if(b'x') => {
|
||||
while let Some(b'0'..=b'9' | b'A'..=b'F' | b'a'..=b'f') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Number
|
||||
}
|
||||
b'0' if self.advance_if(b'b') => {
|
||||
while let Some(b'0' | b'1') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Number
|
||||
}
|
||||
b'0' if self.advance_if(b'o') => {
|
||||
while let Some(b'0'..=b'7') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Number
|
||||
}
|
||||
b'0'..=b'9' => {
|
||||
while let Some(b'0'..=b'9') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
|
||||
if self.advance_if(b'.') {
|
||||
while let Some(b'0'..=b'9') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Float
|
||||
} else {
|
||||
T::Number
|
||||
}
|
||||
}
|
||||
b'a'..=b'z' | b'A'..=b'Z' | b'_' | 127.. => {
|
||||
advance_ident(self);
|
||||
let ident = &self.source[start as usize..self.pos as usize];
|
||||
T::from_ident(ident)
|
||||
}
|
||||
b'"' | b'\'' => loop {
|
||||
match self.advance() {
|
||||
Some(b'\\') => _ = self.advance(),
|
||||
Some(nc) if nc == c => break identity(c),
|
||||
Some(_) => {}
|
||||
None => break T::Eof,
|
||||
}
|
||||
},
|
||||
b'/' if self.advance_if(b'/') => {
|
||||
while let Some(l) = self.peek()
|
||||
&& l != b'\n'
|
||||
{
|
||||
self.pos += 1;
|
||||
}
|
||||
|
||||
let end = self.source[..self.pos as usize]
|
||||
.iter()
|
||||
.rposition(|&b| !b.is_ascii_whitespace())
|
||||
.map_or(self.pos, |i| i as u32 + 1);
|
||||
|
||||
return Token { kind: T::Comment, start, end };
|
||||
}
|
||||
b'/' if self.advance_if(b'*') => {
|
||||
let mut depth = 1;
|
||||
while let Some(l) = self.advance() {
|
||||
match l {
|
||||
b'/' if self.advance_if(b'*') => depth += 1,
|
||||
b'*' if self.advance_if(b'/') => match depth {
|
||||
1 => break,
|
||||
_ => depth -= 1,
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
T::Comment
|
||||
}
|
||||
b'.' if self.advance_if(b'{') => T::Ctor,
|
||||
b'.' if self.advance_if(b'(') => T::Tupl,
|
||||
b'&' if self.advance_if(b'&') => T::And,
|
||||
b'|' if self.advance_if(b'|') => T::Or,
|
||||
b'$' if self.advance_if(b':') => T::Ct,
|
||||
b'@' | b'$' => {
|
||||
start += 1;
|
||||
advance_ident(self);
|
||||
identity(c)
|
||||
}
|
||||
b'<' | b'>' if self.advance_if(c) => {
|
||||
identity(c - 5 + 128 * self.advance_if(b'=') as u8)
|
||||
}
|
||||
b':' | b'=' | b'!' | b'<' | b'>' | b'|' | b'+' | b'-' | b'*' | b'/' | b'%'
|
||||
| b'^' | b'&'
|
||||
if self.advance_if(b'=') =>
|
||||
{
|
||||
identity(c + 128)
|
||||
}
|
||||
_ => identity(c),
|
||||
};
|
||||
|
||||
return Token { kind, start, end: self.pos };
|
||||
}
|
||||
}
|
||||
|
||||
fn advance_if(&mut self, arg: u8) -> bool {
|
||||
if self.peek() == Some(arg) {
|
||||
self.advance();
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn line_col(bytes: &[u8], pos: u32) -> (usize, usize) {
|
||||
bytes[..pos as usize]
|
||||
.split(|&b| b == b'\n')
|
||||
.map(<[u8]>::len)
|
||||
.enumerate()
|
||||
.last()
|
||||
.map(|(line, col)| (line + 1, col + 1))
|
||||
.unwrap_or((1, 1))
|
||||
}
|
1671
lang/src/lib.rs
Normal file
1671
lang/src/lib.rs
Normal file
File diff suppressed because it is too large
Load diff
26
lang/src/main.rs
Normal file
26
lang/src/main.rs
Normal file
|
@ -0,0 +1,26 @@
|
|||
#[cfg(feature = "std")]
|
||||
fn main() {
|
||||
use std::io::Write;
|
||||
|
||||
fn run(out: &mut Vec<u8>) -> std::io::Result<()> {
|
||||
let args = std::env::args().collect::<Vec<_>>();
|
||||
let args = args.iter().map(String::as_str).collect::<Vec<_>>();
|
||||
|
||||
let opts = hblang::Options::from_args(&args, out)?;
|
||||
let file = args.iter().filter(|a| !a.starts_with('-')).nth(1).copied().unwrap_or("main.hb");
|
||||
|
||||
hblang::run_compiler(file, opts, out)
|
||||
}
|
||||
|
||||
log::set_logger(&hblang::fs::Logger).unwrap();
|
||||
log::set_max_level(log::LevelFilter::Error);
|
||||
|
||||
let mut out = Vec::new();
|
||||
match run(&mut out) {
|
||||
Ok(_) => std::io::stdout().write_all(&out).unwrap(),
|
||||
Err(_) => {
|
||||
std::io::stderr().write_all(&out).unwrap();
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
1529
lang/src/parser.rs
Normal file
1529
lang/src/parser.rs
Normal file
File diff suppressed because it is too large
Load diff
2
lang/src/regalloc.rs
Normal file
2
lang/src/regalloc.rs
Normal file
|
@ -0,0 +1,2 @@
|
|||
|
||||
|
4691
lang/src/son.rs
Normal file
4691
lang/src/son.rs
Normal file
File diff suppressed because it is too large
Load diff
767
lang/src/son/hbvm.rs
Normal file
767
lang/src/son/hbvm.rs
Normal file
|
@ -0,0 +1,767 @@
|
|||
use {
|
||||
super::{AssemblySpec, Backend, Nid, Node, Nodes},
|
||||
crate::{
|
||||
lexer::TokenKind,
|
||||
parser, reg,
|
||||
son::{debug_assert_matches, write_reloc, Kind, MEM},
|
||||
ty::{self, Loc, Module},
|
||||
utils::{Ent, EntVec},
|
||||
Offset, Reloc, Size, TypedReloc, Types,
|
||||
},
|
||||
alloc::{boxed::Box, collections::BTreeMap, string::String, vec::Vec},
|
||||
core::mem,
|
||||
hbbytecode::{self as instrs, *},
|
||||
};
|
||||
|
||||
mod my_regalloc;
|
||||
mod their_regalloc;
|
||||
|
||||
struct FuncDt {
|
||||
offset: Offset,
|
||||
// TODO: change to indices into common vec
|
||||
relocs: Vec<TypedReloc>,
|
||||
code: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Default for FuncDt {
|
||||
fn default() -> Self {
|
||||
Self { offset: u32::MAX, relocs: Default::default(), code: Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
struct GlobalDt {
|
||||
offset: Offset,
|
||||
}
|
||||
|
||||
impl Default for GlobalDt {
|
||||
fn default() -> Self {
|
||||
Self { offset: u32::MAX }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Assembler {
|
||||
frontier: Vec<ty::Id>,
|
||||
globals: Vec<ty::Global>,
|
||||
funcs: Vec<ty::Func>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct HbvmBackend {
|
||||
funcs: EntVec<ty::Func, FuncDt>,
|
||||
globals: EntVec<ty::Global, GlobalDt>,
|
||||
asm: Assembler,
|
||||
ralloc: their_regalloc::Regalloc,
|
||||
ralloc_my: my_regalloc::Res,
|
||||
|
||||
ret_relocs: Vec<Reloc>,
|
||||
relocs: Vec<TypedReloc>,
|
||||
jump_relocs: Vec<(Nid, Reloc)>,
|
||||
code: Vec<u8>,
|
||||
offsets: Vec<Offset>,
|
||||
}
|
||||
|
||||
impl HbvmBackend {
|
||||
fn emit(&mut self, instr: (usize, [u8; instrs::MAX_SIZE])) {
|
||||
emit(&mut self.code, instr);
|
||||
}
|
||||
}
|
||||
|
||||
impl Backend for HbvmBackend {
|
||||
fn assemble_bin(&mut self, entry: ty::Func, types: &Types, to: &mut Vec<u8>) {
|
||||
to.extend([0u8; HEADER_SIZE]);
|
||||
|
||||
binary_prelude(to);
|
||||
let AssemblySpec { code_length, data_length, entry } =
|
||||
self.assemble_reachable(entry, types, to);
|
||||
|
||||
let exe = AbleOsExecutableHeader {
|
||||
magic_number: [0x15, 0x91, 0xD2],
|
||||
executable_version: 0,
|
||||
code_length,
|
||||
data_length,
|
||||
debug_length: 0,
|
||||
config_length: 0,
|
||||
metadata_length: 0,
|
||||
};
|
||||
Reloc::new(HEADER_SIZE, 3, 4).apply_jump(to, entry, 0);
|
||||
|
||||
unsafe { *to.as_mut_ptr().cast::<AbleOsExecutableHeader>() = exe }
|
||||
}
|
||||
|
||||
fn assemble_reachable(
|
||||
&mut self,
|
||||
from: ty::Func,
|
||||
types: &Types,
|
||||
to: &mut Vec<u8>,
|
||||
) -> AssemblySpec {
|
||||
debug_assert!(self.asm.frontier.is_empty());
|
||||
debug_assert!(self.asm.funcs.is_empty());
|
||||
debug_assert!(self.asm.globals.is_empty());
|
||||
|
||||
self.globals.shadow(types.ins.globals.len());
|
||||
|
||||
self.asm.frontier.push(ty::Kind::Func(from).compress());
|
||||
while let Some(itm) = self.asm.frontier.pop() {
|
||||
match itm.expand() {
|
||||
ty::Kind::Func(func) => {
|
||||
let fuc = &mut self.funcs[func];
|
||||
debug_assert!(!fuc.code.is_empty());
|
||||
if fuc.offset != u32::MAX {
|
||||
continue;
|
||||
}
|
||||
fuc.offset = 0;
|
||||
self.asm.funcs.push(func);
|
||||
self.asm.frontier.extend(fuc.relocs.iter().map(|r| r.target));
|
||||
}
|
||||
ty::Kind::Global(glob) => {
|
||||
let glb = &mut self.globals[glob];
|
||||
if glb.offset != u32::MAX {
|
||||
continue;
|
||||
}
|
||||
glb.offset = 0;
|
||||
self.asm.globals.push(glob);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
let init_len = to.len();
|
||||
|
||||
for &func in &self.asm.funcs {
|
||||
let fuc = &mut self.funcs[func];
|
||||
fuc.offset = to.len() as _;
|
||||
debug_assert!(!fuc.code.is_empty());
|
||||
to.extend(&fuc.code);
|
||||
}
|
||||
|
||||
let code_length = to.len() - init_len;
|
||||
|
||||
for global in self.asm.globals.drain(..) {
|
||||
self.globals[global].offset = to.len() as _;
|
||||
to.extend(&types.ins.globals[global].data);
|
||||
}
|
||||
|
||||
let data_length = to.len() - code_length - init_len;
|
||||
|
||||
for func in self.asm.funcs.drain(..) {
|
||||
let fuc = &self.funcs[func];
|
||||
for rel in &fuc.relocs {
|
||||
let offset = match rel.target.expand() {
|
||||
ty::Kind::Func(fun) => self.funcs[fun].offset,
|
||||
ty::Kind::Global(glo) => self.globals[glo].offset,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
rel.reloc.apply_jump(to, offset, fuc.offset);
|
||||
}
|
||||
}
|
||||
|
||||
AssemblySpec {
|
||||
code_length: code_length as _,
|
||||
data_length: data_length as _,
|
||||
entry: self.funcs[from].offset,
|
||||
}
|
||||
}
|
||||
|
||||
fn disasm<'a>(
|
||||
&'a self,
|
||||
mut sluce: &[u8],
|
||||
eca_handler: &mut dyn FnMut(&mut &[u8]),
|
||||
types: &'a Types,
|
||||
files: &'a [parser::Ast],
|
||||
output: &mut String,
|
||||
) -> Result<(), hbbytecode::DisasmError<'a>> {
|
||||
use hbbytecode::DisasmItem;
|
||||
let functions = types
|
||||
.ins
|
||||
.funcs
|
||||
.iter()
|
||||
.zip(self.funcs.iter())
|
||||
.filter(|(_, f)| f.offset != u32::MAX)
|
||||
.map(|(f, fd)| {
|
||||
let name = if f.file != Module::default() {
|
||||
let file = &files[f.file.index()];
|
||||
file.ident_str(f.name)
|
||||
} else {
|
||||
"target_fn"
|
||||
};
|
||||
(fd.offset, (name, fd.code.len() as u32, DisasmItem::Func))
|
||||
})
|
||||
.chain(
|
||||
types
|
||||
.ins
|
||||
.globals
|
||||
.iter()
|
||||
.zip(self.globals.iter())
|
||||
.filter(|(_, g)| g.offset != u32::MAX)
|
||||
.map(|(g, gd)| {
|
||||
let name = if g.file == Module::default() {
|
||||
core::str::from_utf8(&g.data).unwrap_or("invalid utf-8")
|
||||
} else {
|
||||
let file = &files[g.file.index()];
|
||||
file.ident_str(g.name)
|
||||
};
|
||||
(gd.offset, (name, g.data.len() as Size, DisasmItem::Global))
|
||||
}),
|
||||
)
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
hbbytecode::disasm(&mut sluce, &functions, output, eca_handler)
|
||||
}
|
||||
|
||||
fn emit_ct_body(
|
||||
&mut self,
|
||||
id: ty::Func,
|
||||
nodes: &mut Nodes,
|
||||
tys: &Types,
|
||||
files: &[parser::Ast],
|
||||
) {
|
||||
self.emit_body(id, nodes, tys, files);
|
||||
let fd = &mut self.funcs[id];
|
||||
fd.code.truncate(fd.code.len() - instrs::jala(0, 0, 0).0);
|
||||
emit(&mut fd.code, instrs::tx());
|
||||
}
|
||||
|
||||
fn emit_body(&mut self, id: ty::Func, nodes: &mut Nodes, tys: &Types, files: &[parser::Ast]) {
|
||||
let sig = tys.ins.funcs[id].sig.unwrap();
|
||||
|
||||
debug_assert!(self.code.is_empty());
|
||||
|
||||
self.offsets.clear();
|
||||
self.offsets.resize(nodes.values.len(), Offset::MAX);
|
||||
|
||||
let mut stack_size = 0;
|
||||
'_compute_stack: {
|
||||
let mems = mem::take(&mut nodes[MEM].outputs);
|
||||
for &stck in mems.iter() {
|
||||
if !matches!(nodes[stck].kind, Kind::Stck | Kind::Arg) {
|
||||
debug_assert_matches!(
|
||||
nodes[stck].kind,
|
||||
Kind::Phi | Kind::Return | Kind::Load | Kind::Call { .. } | Kind::Stre
|
||||
);
|
||||
continue;
|
||||
}
|
||||
stack_size += tys.size_of(nodes[stck].ty);
|
||||
self.offsets[stck as usize] = stack_size;
|
||||
}
|
||||
for &stck in mems.iter() {
|
||||
if !matches!(nodes[stck].kind, Kind::Stck | Kind::Arg) {
|
||||
continue;
|
||||
}
|
||||
self.offsets[stck as usize] = stack_size - self.offsets[stck as usize];
|
||||
}
|
||||
nodes[MEM].outputs = mems;
|
||||
}
|
||||
|
||||
let (saved, tail) = self.emit_body_code(nodes, sig, tys, files);
|
||||
//let (saved, tail) = self.emit_body_code_my(nodes, sig, tys, files);
|
||||
|
||||
if let Some(last_ret) = self.ret_relocs.last()
|
||||
&& last_ret.offset as usize == self.code.len() - 5
|
||||
&& self
|
||||
.jump_relocs
|
||||
.last()
|
||||
.map_or(true, |&(r, _)| self.offsets[r as usize] as usize != self.code.len())
|
||||
{
|
||||
self.code.truncate(self.code.len() - 5);
|
||||
self.ret_relocs.pop();
|
||||
}
|
||||
|
||||
// FIXME: maybe do this incrementally
|
||||
for (nd, rel) in self.jump_relocs.drain(..) {
|
||||
let offset = self.offsets[nd as usize];
|
||||
//debug_assert!(offset < self.code.len() as u32 - 1);
|
||||
rel.apply_jump(&mut self.code, offset, 0);
|
||||
}
|
||||
|
||||
let end = self.code.len();
|
||||
for ret_rel in self.ret_relocs.drain(..) {
|
||||
ret_rel.apply_jump(&mut self.code, end as _, 0);
|
||||
}
|
||||
|
||||
let mut stripped_prelude_size = 0;
|
||||
'_close_function: {
|
||||
let pushed = (saved as i64 + !tail as i64) * 8;
|
||||
let stack = stack_size as i64;
|
||||
|
||||
let add_len = instrs::addi64(0, 0, 0).0;
|
||||
let st_len = instrs::st(0, 0, 0, 0).0;
|
||||
|
||||
match (pushed, stack) {
|
||||
(0, 0) => {
|
||||
stripped_prelude_size = add_len + st_len;
|
||||
self.code.drain(0..stripped_prelude_size);
|
||||
break '_close_function;
|
||||
}
|
||||
(0, stack) => {
|
||||
write_reloc(&mut self.code, 3, -stack, 8);
|
||||
stripped_prelude_size = st_len;
|
||||
let end = add_len + st_len;
|
||||
self.code.drain(add_len..end);
|
||||
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, stack as _));
|
||||
break '_close_function;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
write_reloc(&mut self.code, 3, -(pushed + stack), 8);
|
||||
write_reloc(&mut self.code, 3 + 8 + 3, stack, 8);
|
||||
write_reloc(&mut self.code, 3 + 8 + 3 + 8, pushed, 2);
|
||||
|
||||
self.emit(instrs::ld(
|
||||
reg::RET_ADDR + tail as u8,
|
||||
reg::STACK_PTR,
|
||||
stack as _,
|
||||
pushed as _,
|
||||
));
|
||||
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, (pushed + stack) as _));
|
||||
}
|
||||
self.relocs.iter_mut().for_each(|r| r.reloc.offset -= stripped_prelude_size as u32);
|
||||
if sig.ret != ty::Id::NEVER {
|
||||
self.emit(instrs::jala(reg::ZERO, reg::RET_ADDR, 0));
|
||||
}
|
||||
|
||||
self.funcs.shadow(tys.ins.funcs.len());
|
||||
self.funcs[id].code = mem::take(&mut self.code);
|
||||
self.funcs[id].relocs = mem::take(&mut self.relocs);
|
||||
|
||||
debug_assert_eq!(self.ret_relocs.len(), 0);
|
||||
debug_assert_eq!(self.relocs.len(), 0);
|
||||
debug_assert_eq!(self.jump_relocs.len(), 0);
|
||||
debug_assert_eq!(self.code.len(), 0);
|
||||
}
|
||||
}
|
||||
|
||||
impl Nodes {
|
||||
fn is_never_used(&self, nid: Nid, tys: &Types) -> bool {
|
||||
let node = &self[nid];
|
||||
match node.kind {
|
||||
Kind::CInt { .. } => node.outputs.iter().all(|&o| {
|
||||
matches!(self[o].kind, Kind::BinOp { op }
|
||||
if op.imm_binop(self[o].ty).is_some()
|
||||
&& self.is_const(self[o].inputs[2])
|
||||
&& op.cond_op(self[o].ty).is_none())
|
||||
}),
|
||||
Kind::BinOp { op: TokenKind::Add | TokenKind::Sub } => {
|
||||
self[node.inputs[1]].lock_rc != 0
|
||||
|| (self.is_const(node.inputs[2])
|
||||
&& node.outputs.iter().all(|&n| self[n].uses_direct_offset_of(nid, tys)))
|
||||
}
|
||||
Kind::BinOp { op } => {
|
||||
op.cond_op(node.ty).is_some()
|
||||
&& node.outputs.iter().all(|&n| self[n].kind == Kind::If)
|
||||
}
|
||||
Kind::Stck if tys.size_of(node.ty) == 0 => true,
|
||||
Kind::Stck | Kind::Arg => node.outputs.iter().all(|&n| {
|
||||
self[n].uses_direct_offset_of(nid, tys)
|
||||
|| (matches!(self[n].kind, Kind::BinOp { op: TokenKind::Add })
|
||||
&& self.is_never_used(n, tys))
|
||||
}),
|
||||
Kind::Load { .. } => node.ty.loc(tys) == Loc::Stack,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Node {
|
||||
fn uses_direct_offset_of(&self, nid: Nid, tys: &Types) -> bool {
|
||||
((self.kind == Kind::Stre && self.inputs[2] == nid)
|
||||
|| (self.kind == Kind::Load && self.inputs[1] == nid))
|
||||
&& self.ty.loc(tys) == Loc::Reg
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenKind {
|
||||
fn cmp_against(self) -> Option<u64> {
|
||||
Some(match self {
|
||||
TokenKind::Le | TokenKind::Gt => 1,
|
||||
TokenKind::Ne | TokenKind::Eq => 0,
|
||||
TokenKind::Ge | TokenKind::Lt => (-1i64) as _,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
|
||||
fn float_cmp(self, ty: ty::Id) -> Option<fn(u8, u8, u8) -> EncodedInstr> {
|
||||
if !ty.is_float() {
|
||||
return None;
|
||||
}
|
||||
let size = ty.simple_size().unwrap();
|
||||
|
||||
let ops = match self {
|
||||
TokenKind::Gt => [instrs::fcmpgt32, instrs::fcmpgt64],
|
||||
TokenKind::Lt => [instrs::fcmplt32, instrs::fcmplt64],
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(ops[size.ilog2() as usize - 2])
|
||||
}
|
||||
|
||||
#[expect(clippy::type_complexity)]
|
||||
fn cond_op(self, ty: ty::Id) -> Option<(fn(u8, u8, i16) -> EncodedInstr, bool)> {
|
||||
if ty.is_float() {
|
||||
return None;
|
||||
}
|
||||
let signed = ty.is_signed();
|
||||
Some((
|
||||
match self {
|
||||
Self::Le if signed => instrs::jgts,
|
||||
Self::Le => instrs::jgtu,
|
||||
Self::Lt if signed => instrs::jlts,
|
||||
Self::Lt => instrs::jltu,
|
||||
Self::Ge if signed => instrs::jlts,
|
||||
Self::Ge => instrs::jltu,
|
||||
Self::Gt if signed => instrs::jgts,
|
||||
Self::Gt => instrs::jgtu,
|
||||
Self::Eq => instrs::jne,
|
||||
Self::Ne => instrs::jeq,
|
||||
_ => return None,
|
||||
},
|
||||
matches!(self, Self::Lt | TokenKind::Gt),
|
||||
))
|
||||
}
|
||||
|
||||
fn binop(self, ty: ty::Id) -> Option<fn(u8, u8, u8) -> EncodedInstr> {
|
||||
let size = ty.simple_size().unwrap();
|
||||
if ty.is_integer() || ty == ty::Id::BOOL || ty.is_pointer() {
|
||||
macro_rules! div { ($($op:ident),*) => {[$(|a, b, c| $op(a, 0, b, c)),*]}; }
|
||||
macro_rules! rem { ($($op:ident),*) => {[$(|a, b, c| $op(0, a, b, c)),*]}; }
|
||||
let signed = ty.is_signed();
|
||||
|
||||
let ops = match self {
|
||||
Self::Add => [add8, add16, add32, add64],
|
||||
Self::Sub => [sub8, sub16, sub32, sub64],
|
||||
Self::Mul => [mul8, mul16, mul32, mul64],
|
||||
Self::Div if signed => div!(dirs8, dirs16, dirs32, dirs64),
|
||||
Self::Div => div!(diru8, diru16, diru32, diru64),
|
||||
Self::Mod if signed => rem!(dirs8, dirs16, dirs32, dirs64),
|
||||
Self::Mod => rem!(diru8, diru16, diru32, diru64),
|
||||
Self::Band => return Some(and),
|
||||
Self::Bor => return Some(or),
|
||||
Self::Xor => return Some(xor),
|
||||
Self::Shl => [slu8, slu16, slu32, slu64],
|
||||
Self::Shr if signed => [srs8, srs16, srs32, srs64],
|
||||
Self::Shr => [sru8, sru16, sru32, sru64],
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(ops[size.ilog2() as usize])
|
||||
} else {
|
||||
debug_assert!(ty.is_float(), "{self} {ty:?}");
|
||||
let ops = match self {
|
||||
Self::Add => [fadd32, fadd64],
|
||||
Self::Sub => [fsub32, fsub64],
|
||||
Self::Mul => [fmul32, fmul64],
|
||||
Self::Div => [fdiv32, fdiv64],
|
||||
_ => return None,
|
||||
};
|
||||
Some(ops[size.ilog2() as usize - 2])
|
||||
}
|
||||
}
|
||||
|
||||
fn imm_binop(self, ty: ty::Id) -> Option<fn(u8, u8, u64) -> EncodedInstr> {
|
||||
macro_rules! def_op {
|
||||
($name:ident |$a:ident, $b:ident, $c:ident| $($tt:tt)*) => {
|
||||
macro_rules! $name {
|
||||
($$($$op:ident),*) => {
|
||||
[$$(
|
||||
|$a, $b, $c: u64| $$op($($tt)*),
|
||||
)*]
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if ty.is_float() {
|
||||
return None;
|
||||
}
|
||||
|
||||
def_op!(basic_op | a, b, c | a, b, c as _);
|
||||
def_op!(sub_op | a, b, c | a, b, c.wrapping_neg() as _);
|
||||
|
||||
let signed = ty.is_signed();
|
||||
let ops = match self {
|
||||
Self::Add => basic_op!(addi8, addi16, addi32, addi64),
|
||||
Self::Sub => sub_op!(addi8, addi16, addi32, addi64),
|
||||
Self::Mul => basic_op!(muli8, muli16, muli32, muli64),
|
||||
Self::Band => return Some(andi),
|
||||
Self::Bor => return Some(ori),
|
||||
Self::Xor => return Some(xori),
|
||||
Self::Shr if signed => basic_op!(srui8, srui16, srui32, srui64),
|
||||
Self::Shr => basic_op!(srui8, srui16, srui32, srui64),
|
||||
Self::Shl => basic_op!(slui8, slui16, slui32, slui64),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let size = ty.simple_size().unwrap();
|
||||
Some(ops[size.ilog2() as usize])
|
||||
}
|
||||
|
||||
fn unop(&self, dst: ty::Id, src: ty::Id) -> Option<fn(u8, u8) -> EncodedInstr> {
|
||||
let src_idx = src.simple_size().unwrap().ilog2() as usize - 2;
|
||||
Some(match self {
|
||||
Self::Sub => instrs::neg,
|
||||
Self::Float if dst.is_float() && src.is_integer() => {
|
||||
debug_assert_eq!(dst.simple_size(), src.simple_size());
|
||||
[instrs::itf32, instrs::itf64][src_idx]
|
||||
}
|
||||
Self::Number if src.is_float() && dst.is_integer() => {
|
||||
[|a, b| instrs::fti32(a, b, 1), |a, b| instrs::fti64(a, b, 1)][src_idx]
|
||||
}
|
||||
Self::Float if dst.is_float() && src.is_float() => {
|
||||
[instrs::fc32t64, |a, b| instrs::fc64t32(a, b, 1)][src_idx]
|
||||
}
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type EncodedInstr = (usize, [u8; instrs::MAX_SIZE]);
|
||||
fn emit(out: &mut Vec<u8>, (len, instr): EncodedInstr) {
|
||||
out.extend_from_slice(&instr[..len]);
|
||||
}
|
||||
|
||||
fn binary_prelude(to: &mut Vec<u8>) {
|
||||
emit(to, instrs::jal(reg::RET_ADDR, reg::ZERO, 0));
|
||||
emit(to, instrs::tx());
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct LoggedMem {
|
||||
pub mem: hbvm::mem::HostMemory,
|
||||
op_buf: Vec<hbbytecode::Oper>,
|
||||
disp_buf: String,
|
||||
prev_instr: Option<hbbytecode::Instr>,
|
||||
}
|
||||
|
||||
impl LoggedMem {
|
||||
unsafe fn display_instr<T>(&mut self, instr: hbbytecode::Instr, addr: hbvm::mem::Address) {
|
||||
let novm: *const hbvm::Vm<Self, 0> = core::ptr::null();
|
||||
let offset = core::ptr::addr_of!((*novm).memory) as usize;
|
||||
let regs = unsafe {
|
||||
&*core::ptr::addr_of!(
|
||||
(*(((self as *mut _ as *mut u8).sub(offset)) as *const hbvm::Vm<Self, 0>))
|
||||
.registers
|
||||
)
|
||||
};
|
||||
|
||||
let mut bytes = core::slice::from_raw_parts(
|
||||
(addr.get() - 1) as *const u8,
|
||||
core::mem::size_of::<T>() + 1,
|
||||
);
|
||||
use core::fmt::Write;
|
||||
hbbytecode::parse_args(&mut bytes, instr, &mut self.op_buf).unwrap();
|
||||
debug_assert!(bytes.is_empty());
|
||||
self.disp_buf.clear();
|
||||
write!(self.disp_buf, "{:<10}", format!("{instr:?}")).unwrap();
|
||||
for (i, op) in self.op_buf.drain(..).enumerate() {
|
||||
if i != 0 {
|
||||
write!(self.disp_buf, ", ").unwrap();
|
||||
}
|
||||
write!(self.disp_buf, "{op:?}").unwrap();
|
||||
if let hbbytecode::Oper::R(r) = op {
|
||||
write!(self.disp_buf, "({})", regs[r as usize].0).unwrap()
|
||||
}
|
||||
}
|
||||
log::trace!("read-typed: {:x}: {}", addr.get(), self.disp_buf);
|
||||
}
|
||||
}
|
||||
|
||||
impl hbvm::mem::Memory for LoggedMem {
|
||||
unsafe fn load(
|
||||
&mut self,
|
||||
addr: hbvm::mem::Address,
|
||||
target: *mut u8,
|
||||
count: usize,
|
||||
) -> Result<(), hbvm::mem::LoadError> {
|
||||
log::trace!(
|
||||
"load: {:x} {}",
|
||||
addr.get(),
|
||||
AsHex(core::slice::from_raw_parts(addr.get() as *const u8, count))
|
||||
);
|
||||
self.mem.load(addr, target, count)
|
||||
}
|
||||
|
||||
unsafe fn store(
|
||||
&mut self,
|
||||
addr: hbvm::mem::Address,
|
||||
source: *const u8,
|
||||
count: usize,
|
||||
) -> Result<(), hbvm::mem::StoreError> {
|
||||
log::trace!(
|
||||
"store: {:x} {}",
|
||||
addr.get(),
|
||||
AsHex(core::slice::from_raw_parts(source, count))
|
||||
);
|
||||
self.mem.store(addr, source, count)
|
||||
}
|
||||
|
||||
unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: hbvm::mem::Address) -> T {
|
||||
if log::log_enabled!(log::Level::Trace) {
|
||||
if core::any::TypeId::of::<u8>() == core::any::TypeId::of::<T>() {
|
||||
if let Some(instr) = self.prev_instr {
|
||||
self.display_instr::<()>(instr, addr);
|
||||
}
|
||||
self.prev_instr = hbbytecode::Instr::try_from(*(addr.get() as *const u8)).ok();
|
||||
} else {
|
||||
let instr = self.prev_instr.take().unwrap();
|
||||
self.display_instr::<T>(instr, addr);
|
||||
}
|
||||
}
|
||||
|
||||
self.mem.prog_read(addr)
|
||||
}
|
||||
}
|
||||
|
||||
struct AsHex<'a>(&'a [u8]);
|
||||
|
||||
impl core::fmt::Display for AsHex<'_> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
for &b in self.0 {
|
||||
write!(f, "{b:02x}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
const VM_STACK_SIZE: usize = 1024 * 64;
|
||||
|
||||
pub struct Comptime {
|
||||
pub vm: hbvm::Vm<LoggedMem, { 1024 * 10 }>,
|
||||
stack: Box<[u8; VM_STACK_SIZE]>,
|
||||
pub code: Vec<u8>,
|
||||
depth: usize,
|
||||
}
|
||||
|
||||
impl Comptime {
|
||||
pub fn run(&mut self, ret_loc: &mut [u8], offset: u32) -> u64 {
|
||||
self.vm.write_reg(reg::RET, ret_loc.as_mut_ptr() as u64);
|
||||
let prev_pc = self.push_pc(offset);
|
||||
loop {
|
||||
match self.vm.run().expect("TODO") {
|
||||
hbvm::VmRunOk::End => break,
|
||||
hbvm::VmRunOk::Timer => todo!(),
|
||||
hbvm::VmRunOk::Ecall => todo!(),
|
||||
hbvm::VmRunOk::Breakpoint => todo!(),
|
||||
}
|
||||
}
|
||||
self.pop_pc(prev_pc);
|
||||
|
||||
if let len @ 1..=8 = ret_loc.len() {
|
||||
ret_loc.copy_from_slice(&self.vm.read_reg(reg::RET).0.to_ne_bytes()[..len])
|
||||
}
|
||||
|
||||
self.vm.read_reg(reg::RET).0
|
||||
}
|
||||
|
||||
pub fn reset(&mut self) {
|
||||
let ptr = unsafe { self.stack.as_mut_ptr().cast::<u8>().add(VM_STACK_SIZE) as u64 };
|
||||
self.vm.registers.fill(hbvm::value::Value(0));
|
||||
self.vm.write_reg(reg::STACK_PTR, ptr);
|
||||
self.vm.pc = hbvm::mem::Address::new(self.code.as_ptr() as u64 + HEADER_SIZE as u64);
|
||||
}
|
||||
|
||||
fn push_pc(&mut self, offset: Offset) -> hbvm::mem::Address {
|
||||
let entry = &mut self.code[offset as usize] as *mut _ as _;
|
||||
core::mem::replace(&mut self.vm.pc, hbvm::mem::Address::new(entry))
|
||||
- self.code.as_ptr() as usize
|
||||
}
|
||||
|
||||
fn pop_pc(&mut self, prev_pc: hbvm::mem::Address) {
|
||||
self.vm.pc = prev_pc + self.code.as_ptr() as usize;
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.code.clear();
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn active(&self) -> bool {
|
||||
self.depth != 0
|
||||
}
|
||||
|
||||
pub fn activate(&mut self) {
|
||||
self.depth += 1;
|
||||
}
|
||||
|
||||
pub fn deactivate(&mut self) {
|
||||
self.depth -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Comptime {
|
||||
fn default() -> Self {
|
||||
let mut stack = Box::<[u8; VM_STACK_SIZE]>::new_uninit();
|
||||
let mut vm = hbvm::Vm::default();
|
||||
let ptr = unsafe { stack.as_mut_ptr().cast::<u8>().add(VM_STACK_SIZE) as u64 };
|
||||
vm.write_reg(reg::STACK_PTR, ptr);
|
||||
Self { vm, stack: unsafe { stack.assume_init() }, code: Default::default(), depth: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
const HEADER_SIZE: usize = core::mem::size_of::<AbleOsExecutableHeader>();
|
||||
|
||||
#[repr(packed)]
|
||||
#[expect(dead_code)]
|
||||
pub struct AbleOsExecutableHeader {
|
||||
magic_number: [u8; 3],
|
||||
executable_version: u32,
|
||||
|
||||
code_length: u64,
|
||||
data_length: u64,
|
||||
debug_length: u64,
|
||||
config_length: u64,
|
||||
metadata_length: u64,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn test_run_vm(out: &[u8], output: &mut String) {
|
||||
use core::fmt::Write;
|
||||
|
||||
let mut stack = [0_u64; 1024 * 20];
|
||||
|
||||
let mut vm = unsafe {
|
||||
hbvm::Vm::<_, { 1024 * 100 }>::new(
|
||||
LoggedMem::default(),
|
||||
hbvm::mem::Address::new(out.as_ptr() as u64).wrapping_add(HEADER_SIZE),
|
||||
)
|
||||
};
|
||||
|
||||
vm.write_reg(reg::STACK_PTR, unsafe { stack.as_mut_ptr().add(stack.len()) } as u64);
|
||||
|
||||
let stat = loop {
|
||||
match vm.run() {
|
||||
Ok(hbvm::VmRunOk::End) => break Ok(()),
|
||||
Ok(hbvm::VmRunOk::Ecall) => match vm.read_reg(2).0 {
|
||||
1 => writeln!(output, "ev: Ecall").unwrap(), // compatibility with a test
|
||||
69 => {
|
||||
let [size, align] = [vm.read_reg(3).0 as usize, vm.read_reg(4).0 as usize];
|
||||
let layout = core::alloc::Layout::from_size_align(size, align).unwrap();
|
||||
let ptr = unsafe { alloc::alloc::alloc(layout) };
|
||||
vm.write_reg(1, ptr as u64);
|
||||
}
|
||||
96 => {
|
||||
let [ptr, size, align] = [
|
||||
vm.read_reg(3).0 as usize,
|
||||
vm.read_reg(4).0 as usize,
|
||||
vm.read_reg(5).0 as usize,
|
||||
];
|
||||
|
||||
let layout = core::alloc::Layout::from_size_align(size, align).unwrap();
|
||||
unsafe { alloc::alloc::dealloc(ptr as *mut u8, layout) };
|
||||
}
|
||||
3 => vm.write_reg(1, 42),
|
||||
8 => {}
|
||||
unknown => writeln!(output, "unknown ecall: {unknown:?}").unwrap(),
|
||||
},
|
||||
Ok(hbvm::VmRunOk::Timer) => {
|
||||
writeln!(output, "timed out").unwrap();
|
||||
break Ok(());
|
||||
}
|
||||
Ok(ev) => writeln!(output, "ev: {:?}", ev).unwrap(),
|
||||
Err(e) => break Err(e),
|
||||
}
|
||||
};
|
||||
|
||||
writeln!(output, "code size: {}", out.len() - HEADER_SIZE).unwrap();
|
||||
writeln!(output, "ret: {:?}", vm.read_reg(1).0).unwrap();
|
||||
writeln!(output, "status: {:?}", stat).unwrap();
|
||||
}
|
903
lang/src/son/hbvm/my_regalloc.rs
Normal file
903
lang/src/son/hbvm/my_regalloc.rs
Normal file
|
@ -0,0 +1,903 @@
|
|||
use {
|
||||
super::{HbvmBackend, Nid, Nodes},
|
||||
crate::{
|
||||
lexer::TokenKind,
|
||||
parser,
|
||||
reg::{self, Reg},
|
||||
son::{debug_assert_matches, Kind, ARG_START, MEM, VOID},
|
||||
ty::{self, Arg, Loc},
|
||||
utils::{BitSet, Vc},
|
||||
Offset, PLoc, Reloc, Sig, TypedReloc, Types,
|
||||
},
|
||||
alloc::{borrow::ToOwned, vec::Vec},
|
||||
core::{mem, ops::Range},
|
||||
hbbytecode::{self as instrs},
|
||||
};
|
||||
|
||||
impl HbvmBackend {
|
||||
pub fn emit_body_code_my(
|
||||
&mut self,
|
||||
nodes: &mut Nodes,
|
||||
sig: Sig,
|
||||
tys: &Types,
|
||||
files: &[parser::Ast],
|
||||
) -> (usize, bool) {
|
||||
let mut fuc = Function::new(nodes, tys, sig);
|
||||
log::info!("{fuc:?}");
|
||||
|
||||
let mut res = mem::take(&mut self.ralloc_my);
|
||||
|
||||
Env::new(&fuc, &fuc.func, &mut res).run();
|
||||
|
||||
'_open_function: {
|
||||
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, 0));
|
||||
self.emit(instrs::st(reg::RET_ADDR + fuc.tail as u8, reg::STACK_PTR, 0, 0));
|
||||
}
|
||||
|
||||
let reg_offset = if fuc.tail { reg::RET + 12 } else { reg::RET_ADDR + 1 };
|
||||
|
||||
res.node_to_reg.iter_mut().filter(|r| **r != 0).for_each(|r| {
|
||||
*r += reg_offset - 1;
|
||||
if fuc.tail && *r >= reg::RET_ADDR {
|
||||
*r += 1;
|
||||
}
|
||||
});
|
||||
|
||||
let atr = |allc: Nid| res.node_to_reg[allc as usize];
|
||||
|
||||
//for (id, node) in fuc.nodes.iter() {
|
||||
// if node.kind == Kind::Phi {
|
||||
// debug_assert_eq!(atr(node.inputs[1]), atr(node.inputs[2]));
|
||||
// debug_assert_eq!(atr(id), atr(node.inputs[2]));
|
||||
// }
|
||||
//}
|
||||
|
||||
let (retl, mut parama) = tys.parama(sig.ret);
|
||||
let mut typs = sig.args.args();
|
||||
let mut args = fuc.nodes[VOID].outputs[ARG_START..].iter();
|
||||
while let Some(aty) = typs.next(tys) {
|
||||
let Arg::Value(ty) = aty else { continue };
|
||||
let Some(loc) = parama.next(ty, tys) else { continue };
|
||||
let &arg = args.next().unwrap();
|
||||
let (rg, size) = match loc {
|
||||
PLoc::WideReg(rg, size) => (rg, size),
|
||||
PLoc::Reg(rg, size) if ty.loc(tys) == Loc::Stack => (rg, size),
|
||||
PLoc::Reg(r, ..) | PLoc::Ref(r, ..) => {
|
||||
self.emit(instrs::cp(atr(arg), r));
|
||||
continue;
|
||||
}
|
||||
};
|
||||
self.emit(instrs::st(rg, reg::STACK_PTR, self.offsets[arg as usize] as _, size));
|
||||
if fuc.nodes[arg].lock_rc == 0 {
|
||||
self.emit(instrs::addi64(rg, reg::STACK_PTR, self.offsets[arg as usize] as _));
|
||||
}
|
||||
self.emit(instrs::cp(atr(arg), rg));
|
||||
}
|
||||
|
||||
for (i, block) in fuc.func.blocks.iter().enumerate() {
|
||||
self.offsets[block.entry as usize] = self.code.len() as _;
|
||||
for &nid in &fuc.func.instrs[block.range.clone()] {
|
||||
if nid == VOID {
|
||||
continue;
|
||||
}
|
||||
|
||||
let node = &fuc.nodes[nid];
|
||||
|
||||
let extend = |base: ty::Id, dest: ty::Id, from: Nid, to: Nid| {
|
||||
let (bsize, dsize) = (tys.size_of(base), tys.size_of(dest));
|
||||
debug_assert!(bsize <= 8, "{}", ty::Display::new(tys, files, base));
|
||||
debug_assert!(dsize <= 8, "{}", ty::Display::new(tys, files, dest));
|
||||
if bsize == dsize {
|
||||
return Default::default();
|
||||
}
|
||||
match (base.is_signed(), dest.is_signed()) {
|
||||
(true, true) => {
|
||||
let op = [instrs::sxt8, instrs::sxt16, instrs::sxt32]
|
||||
[bsize.ilog2() as usize];
|
||||
op(atr(to), atr(from))
|
||||
}
|
||||
_ => {
|
||||
let mask = (1u64 << (bsize * 8)) - 1;
|
||||
instrs::andi(atr(to), atr(from), mask)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match node.kind {
|
||||
Kind::If => {
|
||||
let &[_, cnd] = node.inputs.as_slice() else { unreachable!() };
|
||||
if let Kind::BinOp { op } = fuc.nodes[cnd].kind
|
||||
&& let Some((op, swapped)) =
|
||||
op.cond_op(fuc.nodes[fuc.nodes[cnd].inputs[1]].ty)
|
||||
{
|
||||
let &[_, lhs, rhs] = fuc.nodes[cnd].inputs.as_slice() else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
self.emit(extend(fuc.nodes[lhs].ty, fuc.nodes[lhs].ty.extend(), 0, 0));
|
||||
self.emit(extend(fuc.nodes[rhs].ty, fuc.nodes[rhs].ty.extend(), 1, 1));
|
||||
|
||||
let rel = Reloc::new(self.code.len(), 3, 2);
|
||||
self.jump_relocs.push((node.outputs[!swapped as usize], rel));
|
||||
self.emit(op(atr(lhs), atr(rhs), 0));
|
||||
} else {
|
||||
self.emit(extend(fuc.nodes[cnd].ty, fuc.nodes[cnd].ty.extend(), 0, 0));
|
||||
let rel = Reloc::new(self.code.len(), 3, 2);
|
||||
self.jump_relocs.push((node.outputs[0], rel));
|
||||
self.emit(instrs::jne(atr(cnd), reg::ZERO, 0));
|
||||
}
|
||||
}
|
||||
Kind::Loop | Kind::Region => {
|
||||
if (mem::replace(&mut fuc.backrefs[nid as usize], u16::MAX) != u16::MAX)
|
||||
^ (node.kind == Kind::Loop)
|
||||
{
|
||||
let index = (node.kind == Kind::Loop) as usize + 1;
|
||||
for &out in node.outputs.iter() {
|
||||
if fuc.nodes[out].is_data_phi()
|
||||
&& atr(out) != atr(fuc.nodes[out].inputs[index])
|
||||
{
|
||||
self.emit(instrs::cp(
|
||||
atr(out),
|
||||
atr(fuc.nodes[out].inputs[index]),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let rel = Reloc::new(self.code.len(), 1, 4);
|
||||
self.jump_relocs.push((nid, rel));
|
||||
self.emit(instrs::jmp(0));
|
||||
} else {
|
||||
let index = (node.kind != Kind::Loop) as usize + 1;
|
||||
for &out in node.outputs.iter() {
|
||||
if fuc.nodes[out].is_data_phi()
|
||||
&& atr(out) != atr(fuc.nodes[out].inputs[index])
|
||||
{
|
||||
self.emit(instrs::cp(
|
||||
atr(out),
|
||||
atr(fuc.nodes[out].inputs[index]),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Kind::Return => {
|
||||
let &[_, mut ret, ..] = node.inputs.as_slice() else { unreachable!() };
|
||||
match retl {
|
||||
None => {}
|
||||
Some(PLoc::Reg(r, _)) if sig.ret.loc(tys) == Loc::Reg => {
|
||||
self.emit(instrs::cp(r, atr(ret)));
|
||||
}
|
||||
Some(PLoc::Reg(r, size)) | Some(PLoc::WideReg(r, size)) => {
|
||||
ret = match fuc.nodes[ret].kind {
|
||||
Kind::Load { .. } => fuc.nodes[ret].inputs[1],
|
||||
_ => ret,
|
||||
};
|
||||
self.emit(instrs::ld(r, atr(ret), 0, size))
|
||||
}
|
||||
Some(PLoc::Ref(_, size)) => {
|
||||
ret = match fuc.nodes[ret].kind {
|
||||
Kind::Load { .. } => fuc.nodes[ret].inputs[1],
|
||||
_ => ret,
|
||||
};
|
||||
|
||||
let [src, dst] = [atr(ret), atr(MEM)];
|
||||
if let Ok(size) = u16::try_from(size) {
|
||||
self.emit(instrs::bmc(src, dst, size));
|
||||
} else {
|
||||
for _ in 0..size / u16::MAX as u32 {
|
||||
self.emit(instrs::bmc(src, dst, u16::MAX));
|
||||
self.emit(instrs::addi64(src, src, u16::MAX as _));
|
||||
self.emit(instrs::addi64(dst, dst, u16::MAX as _));
|
||||
}
|
||||
self.emit(instrs::bmc(src, dst, size as u16));
|
||||
self.emit(instrs::addi64(src, src, size.wrapping_neg() as _));
|
||||
self.emit(instrs::addi64(dst, dst, size.wrapping_neg() as _));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if i != fuc.func.blocks.len() - 1 {
|
||||
let rel = Reloc::new(self.code.len(), 1, 4);
|
||||
self.ret_relocs.push(rel);
|
||||
self.emit(instrs::jmp(0));
|
||||
}
|
||||
}
|
||||
Kind::Die => self.emit(instrs::un()),
|
||||
Kind::CInt { value } if node.ty.is_float() => {
|
||||
self.emit(match node.ty {
|
||||
ty::Id::F32 => instrs::li32(
|
||||
atr(nid),
|
||||
(f64::from_bits(value as _) as f32).to_bits(),
|
||||
),
|
||||
ty::Id::F64 => instrs::li64(atr(nid), value as _),
|
||||
_ => unreachable!(),
|
||||
});
|
||||
}
|
||||
Kind::CInt { value } => self.emit(match tys.size_of(node.ty) {
|
||||
1 => instrs::li8(atr(nid), value as _),
|
||||
2 => instrs::li16(atr(nid), value as _),
|
||||
4 => instrs::li32(atr(nid), value as _),
|
||||
_ => instrs::li64(atr(nid), value as _),
|
||||
}),
|
||||
Kind::UnOp { op } => {
|
||||
let op = op
|
||||
.unop(node.ty, fuc.nodes[node.inputs[1]].ty)
|
||||
.expect("TODO: unary operator not supported");
|
||||
self.emit(op(atr(nid), atr(node.inputs[1])));
|
||||
}
|
||||
Kind::BinOp { .. } if node.lock_rc != 0 => {}
|
||||
Kind::BinOp { op } => {
|
||||
let &[.., lhs, rhs] = node.inputs.as_slice() else { unreachable!() };
|
||||
|
||||
if let Kind::CInt { value } = fuc.nodes[rhs].kind
|
||||
&& fuc.nodes[rhs].lock_rc != 0
|
||||
&& let Some(op) = op.imm_binop(node.ty)
|
||||
{
|
||||
self.emit(op(atr(nid), atr(lhs), value as _));
|
||||
} else if let Some(op) =
|
||||
op.binop(node.ty).or(op.float_cmp(fuc.nodes[lhs].ty))
|
||||
{
|
||||
self.emit(op(atr(nid), atr(lhs), atr(rhs)));
|
||||
} else if let Some(against) = op.cmp_against() {
|
||||
let op_ty = fuc.nodes[lhs].ty;
|
||||
|
||||
self.emit(extend(fuc.nodes[lhs].ty, fuc.nodes[lhs].ty.extend(), 0, 0));
|
||||
self.emit(extend(fuc.nodes[rhs].ty, fuc.nodes[rhs].ty.extend(), 1, 1));
|
||||
|
||||
if op_ty.is_float() && matches!(op, TokenKind::Le | TokenKind::Ge) {
|
||||
let opop = match op {
|
||||
TokenKind::Le => TokenKind::Gt,
|
||||
TokenKind::Ge => TokenKind::Lt,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let op_fn = opop.float_cmp(op_ty).unwrap();
|
||||
self.emit(op_fn(atr(nid), atr(lhs), atr(rhs)));
|
||||
self.emit(instrs::not(atr(nid), atr(nid)));
|
||||
} else if op_ty.is_integer() {
|
||||
let op_fn =
|
||||
if op_ty.is_signed() { instrs::cmps } else { instrs::cmpu };
|
||||
self.emit(op_fn(atr(nid), atr(lhs), atr(rhs)));
|
||||
self.emit(instrs::cmpui(atr(nid), atr(nid), against));
|
||||
if matches!(op, TokenKind::Eq | TokenKind::Lt | TokenKind::Gt) {
|
||||
self.emit(instrs::not(atr(nid), atr(nid)));
|
||||
}
|
||||
} else {
|
||||
todo!("unhandled operator: {op}");
|
||||
}
|
||||
} else {
|
||||
todo!("unhandled operator: {op}");
|
||||
}
|
||||
}
|
||||
Kind::Call { args, func } => {
|
||||
let (ret, mut parama) = tys.parama(node.ty);
|
||||
let mut args = args.args();
|
||||
let mut allocs = node.inputs[1..].iter();
|
||||
while let Some(arg) = args.next(tys) {
|
||||
let Arg::Value(ty) = arg else { continue };
|
||||
let Some(loc) = parama.next(ty, tys) else { continue };
|
||||
|
||||
let mut arg = *allocs.next().unwrap();
|
||||
let (rg, size) = match loc {
|
||||
PLoc::Reg(rg, size) if ty.loc(tys) == Loc::Stack => (rg, size),
|
||||
PLoc::WideReg(rg, size) => (rg, size),
|
||||
PLoc::Ref(r, ..) => {
|
||||
arg = match fuc.nodes[arg].kind {
|
||||
Kind::Load { .. } => fuc.nodes[arg].inputs[1],
|
||||
_ => arg,
|
||||
};
|
||||
self.emit(instrs::cp(r, atr(arg)));
|
||||
continue;
|
||||
}
|
||||
PLoc::Reg(r, ..) => {
|
||||
self.emit(instrs::cp(r, atr(arg)));
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
arg = match fuc.nodes[arg].kind {
|
||||
Kind::Load { .. } => fuc.nodes[arg].inputs[1],
|
||||
_ => arg,
|
||||
};
|
||||
self.emit(instrs::ld(rg, atr(arg), 0, size));
|
||||
}
|
||||
|
||||
debug_assert!(
|
||||
!matches!(ret, Some(PLoc::Ref(..))) || allocs.next().is_some()
|
||||
);
|
||||
|
||||
if func == ty::Func::ECA {
|
||||
self.emit(instrs::eca());
|
||||
} else {
|
||||
self.relocs.push(TypedReloc {
|
||||
target: ty::Kind::Func(func).compress(),
|
||||
reloc: Reloc::new(self.code.len(), 3, 4),
|
||||
});
|
||||
self.emit(instrs::jal(reg::RET_ADDR, reg::ZERO, 0));
|
||||
}
|
||||
|
||||
match ret {
|
||||
Some(PLoc::WideReg(r, size)) => {
|
||||
debug_assert_eq!(
|
||||
fuc.nodes[*node.inputs.last().unwrap()].kind,
|
||||
Kind::Stck
|
||||
);
|
||||
let stck = self.offsets[*node.inputs.last().unwrap() as usize];
|
||||
self.emit(instrs::st(r, reg::STACK_PTR, stck as _, size));
|
||||
}
|
||||
Some(PLoc::Reg(r, size)) if node.ty.loc(tys) == Loc::Stack => {
|
||||
debug_assert_eq!(
|
||||
fuc.nodes[*node.inputs.last().unwrap()].kind,
|
||||
Kind::Stck
|
||||
);
|
||||
let stck = self.offsets[*node.inputs.last().unwrap() as usize];
|
||||
self.emit(instrs::st(r, reg::STACK_PTR, stck as _, size));
|
||||
}
|
||||
Some(PLoc::Reg(r, ..)) => self.emit(instrs::cp(atr(nid), r)),
|
||||
None | Some(PLoc::Ref(..)) => {}
|
||||
}
|
||||
}
|
||||
Kind::Global { global } => {
|
||||
let reloc = Reloc::new(self.code.len(), 3, 4);
|
||||
self.relocs.push(TypedReloc {
|
||||
target: ty::Kind::Global(global).compress(),
|
||||
reloc,
|
||||
});
|
||||
self.emit(instrs::lra(atr(nid), 0, 0));
|
||||
}
|
||||
Kind::Stck => {
|
||||
let base = reg::STACK_PTR;
|
||||
let offset = self.offsets[nid as usize];
|
||||
self.emit(instrs::addi64(atr(nid), base, offset as _));
|
||||
}
|
||||
Kind::Load => {
|
||||
let mut region = node.inputs[1];
|
||||
let mut offset = 0;
|
||||
if fuc.nodes[region].kind == (Kind::BinOp { op: TokenKind::Add })
|
||||
&& let Kind::CInt { value } =
|
||||
fuc.nodes[fuc.nodes[region].inputs[2]].kind
|
||||
{
|
||||
region = fuc.nodes[region].inputs[1];
|
||||
offset = value as Offset;
|
||||
}
|
||||
let size = tys.size_of(node.ty);
|
||||
if node.ty.loc(tys) != Loc::Stack {
|
||||
let (base, offset) = match fuc.nodes[region].kind {
|
||||
Kind::Stck => {
|
||||
(reg::STACK_PTR, self.offsets[region as usize] + offset)
|
||||
}
|
||||
_ => (atr(region), offset),
|
||||
};
|
||||
self.emit(instrs::ld(atr(nid), base, offset as _, size as _));
|
||||
}
|
||||
}
|
||||
Kind::Stre if node.inputs[1] == VOID => {}
|
||||
Kind::Stre => {
|
||||
let mut region = node.inputs[2];
|
||||
let mut offset = 0;
|
||||
let size = u16::try_from(tys.size_of(node.ty)).expect("TODO");
|
||||
if fuc.nodes[region].kind == (Kind::BinOp { op: TokenKind::Add })
|
||||
&& let Kind::CInt { value } =
|
||||
fuc.nodes[fuc.nodes[region].inputs[2]].kind
|
||||
&& node.ty.loc(tys) == Loc::Reg
|
||||
{
|
||||
region = fuc.nodes[region].inputs[1];
|
||||
offset = value as Offset;
|
||||
}
|
||||
let nd = &fuc.nodes[region];
|
||||
let value = node.inputs[1];
|
||||
let (base, offset, src) = match nd.kind {
|
||||
Kind::Stck if node.ty.loc(tys) == Loc::Reg => {
|
||||
(reg::STACK_PTR, self.offsets[region as usize] + offset, value)
|
||||
}
|
||||
_ => (atr(region), offset, match fuc.nodes[value].kind {
|
||||
Kind::Load { .. } => fuc.nodes[value].inputs[1],
|
||||
_ => value,
|
||||
}),
|
||||
};
|
||||
|
||||
match node.ty.loc(tys) {
|
||||
Loc::Reg => self.emit(instrs::st(atr(src), base, offset as _, size)),
|
||||
Loc::Stack => {
|
||||
debug_assert_eq!(offset, 0);
|
||||
self.emit(instrs::bmc(atr(src), base, size))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Kind::Mem => self.emit(instrs::cp(atr(MEM), reg::RET)),
|
||||
Kind::Arg => {}
|
||||
e @ (Kind::Start
|
||||
| Kind::Entry
|
||||
| Kind::End
|
||||
| Kind::Loops
|
||||
| Kind::Then
|
||||
| Kind::Else
|
||||
| Kind::Phi
|
||||
| Kind::Assert { .. }) => unreachable!("{e:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.ralloc_my = res;
|
||||
|
||||
let bundle_count = self.ralloc_my.bundles.len() + (reg_offset as usize);
|
||||
(
|
||||
if fuc.tail {
|
||||
bundle_count.saturating_sub(reg::RET_ADDR as _)
|
||||
} else {
|
||||
assert!(bundle_count < reg::STACK_PTR as usize, "TODO: spill memory");
|
||||
self.ralloc_my.bundles.len()
|
||||
},
|
||||
fuc.tail,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Function<'a> {
|
||||
sig: Sig,
|
||||
tail: bool,
|
||||
backrefs: Vec<u16>,
|
||||
nodes: &'a mut Nodes,
|
||||
tys: &'a Types,
|
||||
visited: BitSet,
|
||||
func: Func,
|
||||
}
|
||||
|
||||
impl Function<'_> {
|
||||
fn vreg_count(&self) -> usize {
|
||||
self.nodes.values.len()
|
||||
}
|
||||
|
||||
fn uses_of(&self, nid: Nid, buf: &mut Vec<Nid>) {
|
||||
if self.nodes[nid].kind.is_cfg() && !matches!(self.nodes[nid].kind, Kind::Call { .. }) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.nodes[nid]
|
||||
.outputs
|
||||
.iter()
|
||||
.filter(|&&n| self.nodes.is_data_dep(nid, n))
|
||||
.collect_into(buf);
|
||||
}
|
||||
|
||||
fn phi_inputs_of(&self, nid: Nid, buf: &mut Vec<Nid>) {
|
||||
match self.nodes[nid].kind {
|
||||
Kind::Region => {
|
||||
for &inp in self.nodes[nid].outputs.as_slice() {
|
||||
if self.nodes[inp].is_data_phi() {
|
||||
buf.extend(&self.nodes[inp].inputs[1..]);
|
||||
buf.push(inp);
|
||||
}
|
||||
}
|
||||
}
|
||||
Kind::Loop => {
|
||||
for &inp in self.nodes[nid].outputs.as_slice() {
|
||||
if self.nodes[inp].is_data_phi() {
|
||||
buf.push(self.nodes[inp].inputs[1]);
|
||||
buf.push(inp);
|
||||
buf.push(self.nodes[inp].inputs[2]);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn instr_of(&self, nid: Nid) -> Option<Nid> {
|
||||
if self.nodes[nid].kind == Kind::Phi || self.nodes[nid].lock_rc != 0 {
|
||||
return None;
|
||||
}
|
||||
debug_assert_ne!(self.backrefs[nid as usize], Nid::MAX, "{:?}", self.nodes[nid]);
|
||||
Some(self.backrefs[nid as usize])
|
||||
}
|
||||
|
||||
fn block_of(&self, nid: Nid) -> Nid {
|
||||
debug_assert!(self.nodes[nid].kind.starts_basic_block());
|
||||
self.backrefs[nid as usize]
|
||||
}
|
||||
|
||||
fn idom_of(&self, mut nid: Nid) -> Nid {
|
||||
while !self.nodes[nid].kind.starts_basic_block() {
|
||||
nid = self.nodes.idom(nid);
|
||||
}
|
||||
nid
|
||||
}
|
||||
|
||||
fn use_block(&self, inst: Nid, uinst: Nid) -> Nid {
|
||||
let mut block = self.nodes.use_block(inst, uinst);
|
||||
while !self.nodes[block].kind.starts_basic_block() {
|
||||
block = self.nodes.idom(block);
|
||||
}
|
||||
block
|
||||
}
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for Function<'_> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
for block in &self.func.blocks {
|
||||
writeln!(f, "{:?}", self.nodes[block.entry].kind)?;
|
||||
for &instr in &self.func.instrs[block.range.clone()] {
|
||||
writeln!(f, "{:?}", self.nodes[instr].kind)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Function<'a> {
|
||||
fn new(nodes: &'a mut Nodes, tys: &'a Types, sig: Sig) -> Self {
|
||||
let mut s = Self {
|
||||
backrefs: vec![u16::MAX; nodes.values.len()],
|
||||
tail: true,
|
||||
nodes,
|
||||
tys,
|
||||
sig,
|
||||
visited: Default::default(),
|
||||
func: Default::default(),
|
||||
};
|
||||
s.visited.clear(s.nodes.values.len());
|
||||
s.emit_node(VOID);
|
||||
s
|
||||
}
|
||||
|
||||
fn add_block(&mut self, entry: Nid) {
|
||||
self.func
|
||||
.blocks
|
||||
.push(Block { range: self.func.instrs.len()..self.func.instrs.len(), entry });
|
||||
self.backrefs[entry as usize] = self.func.blocks.len() as u16 - 1;
|
||||
}
|
||||
|
||||
fn close_block(&mut self, exit: Nid) {
|
||||
if !matches!(self.nodes[exit].kind, Kind::Loop | Kind::Region) {
|
||||
self.add_instr(exit);
|
||||
} else {
|
||||
self.func.instrs.push(exit);
|
||||
}
|
||||
let prev = self.func.blocks.last_mut().unwrap();
|
||||
prev.range.end = self.func.instrs.len();
|
||||
}
|
||||
|
||||
fn add_instr(&mut self, nid: Nid) {
|
||||
debug_assert_ne!(self.nodes[nid].kind, Kind::Loop);
|
||||
self.backrefs[nid as usize] = self.func.instrs.len() as u16;
|
||||
self.func.instrs.push(nid);
|
||||
}
|
||||
|
||||
fn emit_node(&mut self, nid: Nid) {
|
||||
if matches!(self.nodes[nid].kind, Kind::Region | Kind::Loop) {
|
||||
match (self.nodes[nid].kind, self.visited.set(nid)) {
|
||||
(Kind::Loop, false) | (Kind::Region, true) => {
|
||||
self.close_block(nid);
|
||||
return;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
} else if !self.visited.set(nid) {
|
||||
return;
|
||||
}
|
||||
|
||||
if self.nodes.is_never_used(nid, self.tys) {
|
||||
self.nodes.lock(nid);
|
||||
return;
|
||||
}
|
||||
|
||||
let mut node = self.nodes[nid].clone();
|
||||
match node.kind {
|
||||
Kind::Start => {
|
||||
debug_assert_matches!(self.nodes[node.outputs[0]].kind, Kind::Entry);
|
||||
self.add_block(VOID);
|
||||
self.emit_node(node.outputs[0])
|
||||
}
|
||||
Kind::If => {
|
||||
let &[_, cond] = node.inputs.as_slice() else { unreachable!() };
|
||||
let &[mut then, mut else_] = node.outputs.as_slice() else { unreachable!() };
|
||||
|
||||
if let Kind::BinOp { op } = self.nodes[cond].kind
|
||||
&& let Some((_, swapped)) = op.cond_op(node.ty)
|
||||
&& swapped
|
||||
{
|
||||
mem::swap(&mut then, &mut else_);
|
||||
}
|
||||
|
||||
self.close_block(nid);
|
||||
self.emit_node(then);
|
||||
self.emit_node(else_);
|
||||
}
|
||||
Kind::Region | Kind::Loop => {
|
||||
self.close_block(nid);
|
||||
self.add_block(nid);
|
||||
self.reschedule_block(nid, &mut node.outputs);
|
||||
for o in node.outputs.into_iter().rev() {
|
||||
self.emit_node(o);
|
||||
}
|
||||
}
|
||||
Kind::Return | Kind::Die => {
|
||||
self.close_block(nid);
|
||||
self.emit_node(node.outputs[0]);
|
||||
}
|
||||
Kind::Entry => {
|
||||
let (ret, mut parama) = self.tys.parama(self.sig.ret);
|
||||
let mut typs = self.sig.args.args();
|
||||
#[expect(clippy::unnecessary_to_owned)]
|
||||
let mut args = self.nodes[VOID].outputs[ARG_START..].to_owned().into_iter();
|
||||
while let Some(ty) = typs.next_value(self.tys) {
|
||||
let arg = args.next().unwrap();
|
||||
debug_assert_eq!(self.nodes[arg].kind, Kind::Arg);
|
||||
match parama.next(ty, self.tys) {
|
||||
None => {}
|
||||
Some(_) => self.add_instr(arg),
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(PLoc::Ref(..)) = ret {
|
||||
self.add_instr(MEM);
|
||||
}
|
||||
|
||||
self.reschedule_block(nid, &mut node.outputs);
|
||||
for o in node.outputs.into_iter().rev() {
|
||||
self.emit_node(o);
|
||||
}
|
||||
}
|
||||
Kind::Then | Kind::Else => {
|
||||
self.add_block(nid);
|
||||
self.reschedule_block(nid, &mut node.outputs);
|
||||
for o in node.outputs.into_iter().rev() {
|
||||
self.emit_node(o);
|
||||
}
|
||||
}
|
||||
Kind::Call { func, .. } => {
|
||||
self.tail &= func == ty::Func::ECA;
|
||||
|
||||
self.add_instr(nid);
|
||||
|
||||
self.reschedule_block(nid, &mut node.outputs);
|
||||
for o in node.outputs.into_iter().rev() {
|
||||
if self.nodes[o].inputs[0] == nid
|
||||
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
|
||||
&& self.nodes[o].inputs[1] == nid)
|
||||
{
|
||||
self.emit_node(o);
|
||||
}
|
||||
}
|
||||
}
|
||||
Kind::CInt { .. }
|
||||
| Kind::BinOp { .. }
|
||||
| Kind::UnOp { .. }
|
||||
| Kind::Global { .. }
|
||||
| Kind::Load { .. }
|
||||
| Kind::Stre
|
||||
| Kind::Stck => self.add_instr(nid),
|
||||
Kind::End | Kind::Phi | Kind::Arg | Kind::Mem | Kind::Loops => {}
|
||||
Kind::Assert { .. } => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn reschedule_block(&mut self, from: Nid, outputs: &mut Vc) {
|
||||
let from = Some(&from);
|
||||
let mut buf = Vec::with_capacity(outputs.len());
|
||||
let mut seen = BitSet::default();
|
||||
seen.clear(self.nodes.values.len());
|
||||
|
||||
for &o in outputs.iter() {
|
||||
if !self.nodes.is_cfg(o) {
|
||||
continue;
|
||||
}
|
||||
|
||||
seen.set(o);
|
||||
|
||||
let mut cursor = buf.len();
|
||||
buf.push(o);
|
||||
while let Some(&n) = buf.get(cursor) {
|
||||
for &i in &self.nodes[n].inputs[1..] {
|
||||
if from == self.nodes[i].inputs.first()
|
||||
&& self.nodes[i]
|
||||
.outputs
|
||||
.iter()
|
||||
.all(|&o| self.nodes[o].inputs.first() != from || seen.get(o))
|
||||
&& seen.set(i)
|
||||
{
|
||||
buf.push(i);
|
||||
}
|
||||
}
|
||||
cursor += 1;
|
||||
}
|
||||
}
|
||||
|
||||
for &o in outputs.iter() {
|
||||
if !seen.set(o) {
|
||||
continue;
|
||||
}
|
||||
let mut cursor = buf.len();
|
||||
buf.push(o);
|
||||
while let Some(&n) = buf.get(cursor) {
|
||||
for &i in &self.nodes[n].inputs[1..] {
|
||||
if from == self.nodes[i].inputs.first()
|
||||
&& self.nodes[i]
|
||||
.outputs
|
||||
.iter()
|
||||
.all(|&o| self.nodes[o].inputs.first() != from || seen.get(o))
|
||||
&& seen.set(i)
|
||||
{
|
||||
buf.push(i);
|
||||
}
|
||||
}
|
||||
cursor += 1;
|
||||
}
|
||||
}
|
||||
|
||||
debug_assert!(
|
||||
outputs.len() == buf.len() || outputs.len() == buf.len() + 1,
|
||||
"{:?} {:?}",
|
||||
outputs,
|
||||
buf
|
||||
);
|
||||
|
||||
if buf.len() + 1 == outputs.len() {
|
||||
outputs.remove(outputs.len() - 1);
|
||||
}
|
||||
outputs.copy_from_slice(&buf);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Env<'a> {
|
||||
ctx: &'a Function<'a>,
|
||||
func: &'a Func,
|
||||
res: &'a mut Res,
|
||||
}
|
||||
|
||||
impl<'a> Env<'a> {
|
||||
pub fn new(ctx: &'a Function<'a>, func: &'a Func, res: &'a mut Res) -> Self {
|
||||
Self { ctx, func, res }
|
||||
}
|
||||
|
||||
pub fn run(&mut self) {
|
||||
self.res.bundles.clear();
|
||||
self.res.node_to_reg.clear();
|
||||
self.res.node_to_reg.resize(self.ctx.vreg_count(), 0);
|
||||
|
||||
debug_assert!(self.res.dfs_buf.is_empty());
|
||||
debug_assert!(self.res.use_buf.is_empty());
|
||||
debug_assert!(self.res.phi_input_buf.is_empty());
|
||||
|
||||
let mut bundle = Bundle::new(self.func.instrs.len());
|
||||
let mut visited = BitSet::with_capacity(self.ctx.nodes.values.len());
|
||||
let mut use_buf = mem::take(&mut self.res.use_buf);
|
||||
|
||||
let mut phi_input_buf = mem::take(&mut self.res.phi_input_buf);
|
||||
for block in &self.func.blocks {
|
||||
self.ctx.phi_inputs_of(block.entry, &mut phi_input_buf);
|
||||
for param in phi_input_buf.drain(..) {
|
||||
if !visited.set(param) {
|
||||
continue;
|
||||
}
|
||||
self.append_bundle(param, &mut bundle, &mut use_buf);
|
||||
}
|
||||
}
|
||||
self.res.phi_input_buf = phi_input_buf;
|
||||
|
||||
for &inst in &self.func.instrs {
|
||||
if visited.get(inst) || inst == 0 {
|
||||
continue;
|
||||
}
|
||||
self.append_bundle(inst, &mut bundle, &mut use_buf);
|
||||
}
|
||||
|
||||
self.res.use_buf = use_buf;
|
||||
}
|
||||
|
||||
fn append_bundle(&mut self, inst: Nid, bundle: &mut Bundle, use_buf: &mut Vec<Nid>) {
|
||||
let mut dom = self.ctx.idom_of(inst);
|
||||
if self.ctx.nodes[dom].kind == Kind::Loop && self.ctx.nodes[inst].kind == Kind::Phi {
|
||||
dom = self.ctx.nodes.idom(dom);
|
||||
dom = self.ctx.idom_of(dom);
|
||||
}
|
||||
self.ctx.uses_of(inst, use_buf);
|
||||
for uinst in use_buf.drain(..) {
|
||||
let cursor = self.ctx.use_block(inst, uinst);
|
||||
self.reverse_cfg_dfs(cursor, dom, |_, n, b| {
|
||||
let mut range = b.range.clone();
|
||||
range.start =
|
||||
range.start.max(self.ctx.instr_of(inst).map_or(0, |n| n + 1) as usize);
|
||||
range.end = range.end.min(
|
||||
self.ctx
|
||||
.instr_of(uinst)
|
||||
.filter(|_| self.ctx.nodes.loop_depth(dom) == self.ctx.nodes.loop_depth(n))
|
||||
.map_or(Nid::MAX, |n| n + 1) as usize,
|
||||
);
|
||||
|
||||
bundle.add(range);
|
||||
});
|
||||
}
|
||||
|
||||
match self.res.bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(bundle)) {
|
||||
Some((i, other)) => {
|
||||
other.merge(bundle);
|
||||
bundle.clear();
|
||||
self.res.node_to_reg[inst as usize] = i as Reg + 1;
|
||||
}
|
||||
None => {
|
||||
self.res.bundles.push(mem::replace(bundle, Bundle::new(self.func.instrs.len())));
|
||||
self.res.node_to_reg[inst as usize] = self.res.bundles.len() as Reg;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn reverse_cfg_dfs(
|
||||
&mut self,
|
||||
from: Nid,
|
||||
until: Nid,
|
||||
mut each: impl FnMut(&mut Self, Nid, &Block),
|
||||
) {
|
||||
debug_assert!(self.res.dfs_buf.is_empty());
|
||||
self.res.dfs_buf.push(from);
|
||||
self.res.dfs_seem.clear(self.ctx.nodes.values.len());
|
||||
|
||||
while let Some(nid) = self.res.dfs_buf.pop() {
|
||||
each(self, nid, &self.func.blocks[self.ctx.block_of(nid) as usize]);
|
||||
if nid == until {
|
||||
continue;
|
||||
}
|
||||
match self.ctx.nodes[nid].kind {
|
||||
Kind::Then | Kind::Else | Kind::Region | Kind::Loop => {
|
||||
for &n in self.ctx.nodes[nid].inputs.iter() {
|
||||
let d = self.ctx.idom_of(n);
|
||||
if self.res.dfs_seem.set(d) {
|
||||
self.res.dfs_buf.push(d);
|
||||
}
|
||||
}
|
||||
}
|
||||
Kind::Start => {}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Res {
|
||||
pub bundles: Vec<Bundle>,
|
||||
pub node_to_reg: Vec<Reg>,
|
||||
use_buf: Vec<Nid>,
|
||||
phi_input_buf: Vec<Nid>,
|
||||
dfs_buf: Vec<Nid>,
|
||||
dfs_seem: BitSet,
|
||||
}
|
||||
|
||||
pub struct Bundle {
|
||||
taken: Vec<bool>,
|
||||
}
|
||||
|
||||
impl Bundle {
|
||||
fn new(size: usize) -> Self {
|
||||
Self { taken: vec![false; size] }
|
||||
}
|
||||
|
||||
fn add(&mut self, range: Range<usize>) {
|
||||
self.taken[range].fill(true);
|
||||
}
|
||||
|
||||
fn overlaps(&self, other: &Self) -> bool {
|
||||
self.taken.iter().zip(other.taken.iter()).any(|(a, b)| a & b)
|
||||
}
|
||||
|
||||
fn merge(&mut self, other: &Self) {
|
||||
debug_assert!(!self.overlaps(other));
|
||||
self.taken.iter_mut().zip(other.taken.iter()).for_each(|(a, b)| *a |= *b);
|
||||
}
|
||||
|
||||
fn clear(&mut self) {
|
||||
self.taken.fill(false);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Func {
|
||||
pub blocks: Vec<Block>,
|
||||
pub instrs: Vec<Nid>,
|
||||
}
|
||||
|
||||
pub struct Block {
|
||||
pub range: Range<usize>,
|
||||
pub entry: Nid,
|
||||
}
|
1007
lang/src/son/hbvm/their_regalloc.rs
Normal file
1007
lang/src/son/hbvm/their_regalloc.rs
Normal file
File diff suppressed because it is too large
Load diff
629
lang/src/utils.rs
Normal file
629
lang/src/utils.rs
Normal file
|
@ -0,0 +1,629 @@
|
|||
#![expect(dead_code)]
|
||||
use {
|
||||
alloc::alloc,
|
||||
core::{
|
||||
alloc::Layout,
|
||||
fmt::Debug,
|
||||
hint::unreachable_unchecked,
|
||||
marker::PhantomData,
|
||||
mem::MaybeUninit,
|
||||
ops::{Deref, DerefMut, Not},
|
||||
ptr::Unique,
|
||||
},
|
||||
};
|
||||
|
||||
type Nid = u16;
|
||||
|
||||
pub union BitSet {
|
||||
inline: usize,
|
||||
alloced: Unique<AllocedBitSet>,
|
||||
}
|
||||
|
||||
impl Debug for BitSet {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
f.debug_list().entries(self.iter()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for BitSet {
|
||||
fn clone(&self) -> Self {
|
||||
if self.is_inline() {
|
||||
Self { inline: unsafe { self.inline } }
|
||||
} else {
|
||||
let (data, _) = self.data_and_len();
|
||||
let (layout, _) = Self::layout(data.len());
|
||||
unsafe {
|
||||
let ptr = alloc::alloc(layout);
|
||||
ptr.copy_from_nonoverlapping(self.alloced.as_ptr() as _, layout.size());
|
||||
Self { alloced: Unique::new_unchecked(ptr as _) }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for BitSet {
|
||||
fn drop(&mut self) {
|
||||
if !self.is_inline() {
|
||||
unsafe {
|
||||
let cap = self.alloced.as_ref().cap;
|
||||
alloc::dealloc(self.alloced.as_ptr() as _, Self::layout(cap).0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for BitSet {
|
||||
fn default() -> Self {
|
||||
Self { inline: Self::FLAG }
|
||||
}
|
||||
}
|
||||
|
||||
impl BitSet {
|
||||
const FLAG: usize = 1 << (Self::UNIT - 1);
|
||||
const INLINE_ELEMS: usize = Self::UNIT - 1;
|
||||
const UNIT: usize = core::mem::size_of::<usize>() * 8;
|
||||
|
||||
pub fn with_capacity(len: usize) -> Self {
|
||||
let mut s = Self::default();
|
||||
s.reserve(len);
|
||||
s
|
||||
}
|
||||
|
||||
fn is_inline(&self) -> bool {
|
||||
unsafe { self.inline & Self::FLAG != 0 }
|
||||
}
|
||||
|
||||
fn data_and_len(&self) -> (&[usize], usize) {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
(core::slice::from_ref(&self.inline), Self::INLINE_ELEMS)
|
||||
} else {
|
||||
let small_vec = self.alloced.as_ref();
|
||||
(
|
||||
core::slice::from_raw_parts(
|
||||
&small_vec.data as *const _ as *const usize,
|
||||
small_vec.cap,
|
||||
),
|
||||
small_vec.cap * core::mem::size_of::<usize>() * 8,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn data_mut_and_len(&mut self) -> (&mut [usize], usize) {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
(core::slice::from_mut(&mut self.inline), INLINE_ELEMS)
|
||||
} else {
|
||||
let small_vec = self.alloced.as_mut();
|
||||
(
|
||||
core::slice::from_raw_parts_mut(
|
||||
&mut small_vec.data as *mut _ as *mut usize,
|
||||
small_vec.cap,
|
||||
),
|
||||
small_vec.cap * Self::UNIT,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn indexes(index: usize) -> (usize, usize) {
|
||||
(index / Self::UNIT, index % Self::UNIT)
|
||||
}
|
||||
|
||||
pub fn get(&self, index: Nid) -> bool {
|
||||
let index = index as usize;
|
||||
let (data, len) = self.data_and_len();
|
||||
if index >= len {
|
||||
return false;
|
||||
}
|
||||
let (elem, bit) = Self::indexes(index);
|
||||
(unsafe { *data.get_unchecked(elem) }) & (1 << bit) != 0
|
||||
}
|
||||
|
||||
pub fn set(&mut self, index: Nid) -> bool {
|
||||
let index = index as usize;
|
||||
let (mut data, len) = self.data_mut_and_len();
|
||||
if core::intrinsics::unlikely(index >= len) {
|
||||
self.grow(index.next_power_of_two().max(4 * Self::UNIT));
|
||||
(data, _) = self.data_mut_and_len();
|
||||
}
|
||||
|
||||
let (elem, bit) = Self::indexes(index);
|
||||
let elem = unsafe { data.get_unchecked_mut(elem) };
|
||||
let prev = *elem;
|
||||
*elem |= 1 << bit;
|
||||
*elem != prev
|
||||
}
|
||||
|
||||
fn grow(&mut self, size: usize) {
|
||||
debug_assert!(size.is_power_of_two());
|
||||
let slot_count = size / Self::UNIT;
|
||||
let (layout, off) = Self::layout(slot_count);
|
||||
let (ptr, prev_len) = unsafe {
|
||||
if self.is_inline() {
|
||||
let ptr = alloc::alloc(layout);
|
||||
*ptr.add(off).cast::<usize>() = self.inline & !Self::FLAG;
|
||||
(ptr, 1)
|
||||
} else {
|
||||
let prev_len = self.alloced.as_ref().cap;
|
||||
let (prev_layout, _) = Self::layout(prev_len);
|
||||
(alloc::realloc(self.alloced.as_ptr() as _, prev_layout, layout.size()), prev_len)
|
||||
}
|
||||
};
|
||||
unsafe {
|
||||
MaybeUninit::fill(
|
||||
core::slice::from_raw_parts_mut(
|
||||
ptr.add(off).cast::<MaybeUninit<usize>>().add(prev_len),
|
||||
slot_count - prev_len,
|
||||
),
|
||||
0,
|
||||
);
|
||||
*ptr.cast::<usize>() = slot_count;
|
||||
core::ptr::write(self, Self { alloced: Unique::new_unchecked(ptr as _) });
|
||||
}
|
||||
}
|
||||
|
||||
fn layout(slot_count: usize) -> (core::alloc::Layout, usize) {
|
||||
unsafe {
|
||||
core::alloc::Layout::new::<AllocedBitSet>()
|
||||
.extend(Layout::array::<usize>(slot_count).unwrap_unchecked())
|
||||
.unwrap_unchecked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> BitSetIter {
|
||||
if self.is_inline() {
|
||||
BitSetIter { index: 0, current: unsafe { self.inline & !Self::FLAG }, remining: &[] }
|
||||
} else {
|
||||
let &[current, ref remining @ ..] = self.data_and_len().0 else {
|
||||
unsafe { unreachable_unchecked() }
|
||||
};
|
||||
BitSetIter { index: 0, current, remining }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear(&mut self, len: usize) {
|
||||
self.reserve(len);
|
||||
if self.is_inline() {
|
||||
unsafe { self.inline &= Self::FLAG };
|
||||
} else {
|
||||
self.data_mut_and_len().0.fill(0);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn units<'a>(&'a self, slot: &'a mut usize) -> &'a [usize] {
|
||||
if self.is_inline() {
|
||||
*slot = unsafe { self.inline } & !Self::FLAG;
|
||||
core::slice::from_ref(slot)
|
||||
} else {
|
||||
self.data_and_len().0
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reserve(&mut self, len: usize) {
|
||||
if len > self.data_and_len().1 {
|
||||
self.grow(len.next_power_of_two().max(4 * Self::UNIT));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn units_mut(&mut self) -> Result<&mut [usize], &mut InlineBitSetView> {
|
||||
if self.is_inline() {
|
||||
Err(unsafe {
|
||||
core::mem::transmute::<&mut usize, &mut InlineBitSetView>(&mut self.inline)
|
||||
})
|
||||
} else {
|
||||
Ok(self.data_mut_and_len().0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct InlineBitSetView(usize);
|
||||
|
||||
impl InlineBitSetView {
|
||||
pub(crate) fn add_mask(&mut self, tmp: usize) {
|
||||
debug_assert!(tmp & BitSet::FLAG == 0);
|
||||
self.0 |= tmp;
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BitSetIter<'a> {
|
||||
index: usize,
|
||||
current: usize,
|
||||
remining: &'a [usize],
|
||||
}
|
||||
|
||||
impl Iterator for BitSetIter<'_> {
|
||||
type Item = usize;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while self.current == 0 {
|
||||
self.current = *self.remining.take_first()?;
|
||||
self.index += 1;
|
||||
}
|
||||
|
||||
let sub_idx = self.current.trailing_zeros() as usize;
|
||||
self.current &= self.current - 1;
|
||||
Some(self.index * BitSet::UNIT + sub_idx)
|
||||
}
|
||||
}
|
||||
|
||||
struct AllocedBitSet {
|
||||
cap: usize,
|
||||
data: [usize; 0],
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[test]
|
||||
fn test_small_bit_set() {
|
||||
use std::vec::Vec;
|
||||
|
||||
let mut sv = BitSet::default();
|
||||
|
||||
sv.set(10);
|
||||
debug_assert!(sv.get(10));
|
||||
sv.set(100);
|
||||
debug_assert!(sv.get(100));
|
||||
sv.set(10000);
|
||||
debug_assert!(sv.get(10000));
|
||||
debug_assert_eq!(sv.iter().collect::<Vec<_>>(), &[10, 100, 10000]);
|
||||
sv.clear(10000);
|
||||
debug_assert_eq!(sv.iter().collect::<Vec<_>>(), &[]);
|
||||
}
|
||||
|
||||
pub union Vc {
|
||||
inline: InlineVc,
|
||||
alloced: AllocedVc,
|
||||
}
|
||||
|
||||
impl Default for Vc {
|
||||
fn default() -> Self {
|
||||
Vc { inline: InlineVc { elems: MaybeUninit::uninit(), cap: Default::default() } }
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Vc {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
self.as_slice().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<Nid> for Vc {
|
||||
fn from_iter<T: IntoIterator<Item = Nid>>(iter: T) -> Self {
|
||||
let mut slf = Self::default();
|
||||
for i in iter {
|
||||
slf.push(i);
|
||||
}
|
||||
slf
|
||||
}
|
||||
}
|
||||
|
||||
const INLINE_ELEMS: usize = VC_SIZE / 2 - 1;
|
||||
const VC_SIZE: usize = 16;
|
||||
|
||||
impl Vc {
|
||||
fn is_inline(&self) -> bool {
|
||||
unsafe { self.inline.cap <= INLINE_ELEMS as Nid }
|
||||
}
|
||||
|
||||
fn layout(&self) -> Option<core::alloc::Layout> {
|
||||
unsafe {
|
||||
self.is_inline().not().then(|| {
|
||||
core::alloc::Layout::array::<Nid>(self.alloced.cap as _).unwrap_unchecked()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
self.inline.cap as _
|
||||
} else {
|
||||
self.alloced.len as _
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn len_mut(&mut self) -> &mut Nid {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
&mut self.inline.cap
|
||||
} else {
|
||||
&mut self.alloced.len
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_ptr(&self) -> *const Nid {
|
||||
unsafe {
|
||||
match self.is_inline() {
|
||||
true => self.inline.elems.as_ptr().cast(),
|
||||
false => self.alloced.base.as_ptr(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_mut_ptr(&mut self) -> *mut Nid {
|
||||
unsafe {
|
||||
match self.is_inline() {
|
||||
true => self.inline.elems.as_mut_ptr().cast(),
|
||||
false => self.alloced.base.as_ptr(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_slice(&self) -> &[Nid] {
|
||||
unsafe { core::slice::from_raw_parts(self.as_ptr(), self.len()) }
|
||||
}
|
||||
|
||||
fn as_slice_mut(&mut self) -> &mut [Nid] {
|
||||
unsafe { core::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
|
||||
}
|
||||
|
||||
pub fn push(&mut self, value: Nid) {
|
||||
if let Some(layout) = self.layout()
|
||||
&& unsafe { self.alloced.len == self.alloced.cap }
|
||||
{
|
||||
unsafe {
|
||||
self.alloced.cap *= 2;
|
||||
self.alloced.base = Unique::new_unchecked(
|
||||
alloc::realloc(
|
||||
self.alloced.base.as_ptr().cast(),
|
||||
layout,
|
||||
self.alloced.cap as usize * core::mem::size_of::<Nid>(),
|
||||
)
|
||||
.cast(),
|
||||
);
|
||||
}
|
||||
} else if self.len() == INLINE_ELEMS {
|
||||
unsafe {
|
||||
let mut allcd =
|
||||
Self::alloc((self.inline.cap + 1).next_power_of_two() as _, self.len());
|
||||
core::ptr::copy_nonoverlapping(self.as_ptr(), allcd.as_mut_ptr(), self.len());
|
||||
*self = allcd;
|
||||
}
|
||||
}
|
||||
|
||||
unsafe {
|
||||
*self.len_mut() += 1;
|
||||
self.as_mut_ptr().add(self.len() - 1).write(value);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn alloc(cap: usize, len: usize) -> Self {
|
||||
debug_assert!(cap > INLINE_ELEMS);
|
||||
let layout = unsafe { core::alloc::Layout::array::<Nid>(cap).unwrap_unchecked() };
|
||||
let alloc = unsafe { alloc::alloc(layout) };
|
||||
unsafe {
|
||||
Vc {
|
||||
alloced: AllocedVc {
|
||||
base: Unique::new_unchecked(alloc.cast()),
|
||||
len: len as _,
|
||||
cap: cap as _,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn swap_remove(&mut self, index: usize) {
|
||||
let len = self.len() - 1;
|
||||
self.as_slice_mut().swap(index, len);
|
||||
*self.len_mut() -= 1;
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, index: usize) {
|
||||
self.as_slice_mut().copy_within(index + 1.., index);
|
||||
*self.len_mut() -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Vc {
|
||||
fn drop(&mut self) {
|
||||
if let Some(layout) = self.layout() {
|
||||
unsafe {
|
||||
alloc::dealloc(self.alloced.base.as_ptr().cast(), layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Vc {
|
||||
fn clone(&self) -> Self {
|
||||
self.as_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Vc {
|
||||
type IntoIter = VcIntoIter;
|
||||
type Item = Nid;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
VcIntoIter { start: 0, end: self.len(), vc: self }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VcIntoIter {
|
||||
start: usize,
|
||||
end: usize,
|
||||
vc: Vc,
|
||||
}
|
||||
|
||||
impl Iterator for VcIntoIter {
|
||||
type Item = Nid;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.start == self.end {
|
||||
return None;
|
||||
}
|
||||
|
||||
let ret = unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.start)) };
|
||||
self.start += 1;
|
||||
Some(ret)
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = self.end - self.start;
|
||||
(len, Some(len))
|
||||
}
|
||||
}
|
||||
|
||||
impl DoubleEndedIterator for VcIntoIter {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
if self.start == self.end {
|
||||
return None;
|
||||
}
|
||||
|
||||
self.end -= 1;
|
||||
Some(unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.end)) })
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for VcIntoIter {}
|
||||
|
||||
impl<const SIZE: usize> From<[Nid; SIZE]> for Vc {
|
||||
fn from(value: [Nid; SIZE]) -> Self {
|
||||
value.as_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a [Nid]> for Vc {
|
||||
fn from(value: &'a [Nid]) -> Self {
|
||||
if value.len() <= INLINE_ELEMS {
|
||||
let mut dflt = Self::default();
|
||||
unsafe {
|
||||
core::ptr::copy_nonoverlapping(value.as_ptr(), dflt.as_mut_ptr(), value.len())
|
||||
};
|
||||
dflt.inline.cap = value.len() as _;
|
||||
dflt
|
||||
} else {
|
||||
let mut allcd = unsafe { Self::alloc(value.len(), value.len()) };
|
||||
unsafe {
|
||||
core::ptr::copy_nonoverlapping(value.as_ptr(), allcd.as_mut_ptr(), value.len())
|
||||
};
|
||||
allcd
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Vc {
|
||||
type Target = [Nid];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Vc {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
self.as_slice_mut()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[repr(C)]
|
||||
struct InlineVc {
|
||||
cap: Nid,
|
||||
elems: MaybeUninit<[Nid; INLINE_ELEMS]>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[repr(C)]
|
||||
struct AllocedVc {
|
||||
cap: Nid,
|
||||
len: Nid,
|
||||
base: Unique<Nid>,
|
||||
}
|
||||
|
||||
pub trait Ent: Copy {
|
||||
fn new(index: usize) -> Self;
|
||||
fn index(self) -> usize;
|
||||
}
|
||||
|
||||
pub struct EntVec<K: Ent, T> {
|
||||
data: ::alloc::vec::Vec<T>,
|
||||
k: PhantomData<fn(K)>,
|
||||
}
|
||||
|
||||
impl<K: Ent, T> Default for EntVec<K, T> {
|
||||
fn default() -> Self {
|
||||
Self { data: Default::default(), k: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ent, T> EntVec<K, T> {
|
||||
pub fn clear(&mut self) {
|
||||
self.data.clear();
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.data.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.data.len()
|
||||
}
|
||||
|
||||
pub fn push(&mut self, value: T) -> K {
|
||||
let k = K::new(self.data.len());
|
||||
self.data.push(value);
|
||||
k
|
||||
}
|
||||
|
||||
pub fn next(&self, index: K) -> Option<&T> {
|
||||
self.data.get(index.index() + 1)
|
||||
}
|
||||
|
||||
pub fn shadow(&mut self, len: usize)
|
||||
where
|
||||
T: Default,
|
||||
{
|
||||
if self.data.len() < len {
|
||||
self.data.resize_with(len, Default::default);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> core::slice::Iter<T> {
|
||||
self.data.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ent, T> core::ops::Index<K> for EntVec<K, T> {
|
||||
type Output = T;
|
||||
|
||||
fn index(&self, index: K) -> &Self::Output {
|
||||
&self.data[index.index()]
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ent, T> core::ops::IndexMut<K> for EntVec<K, T> {
|
||||
fn index_mut(&mut self, index: K) -> &mut Self::Output {
|
||||
&mut self.data[index.index()]
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! decl_ent {
|
||||
($(
|
||||
$vis:vis struct $name:ident($index:ty);
|
||||
)*) => {$(
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
$vis struct $name($index);
|
||||
|
||||
impl crate::utils::Ent for $name {
|
||||
fn new(index: usize) -> Self {
|
||||
Self(index as $index)
|
||||
}
|
||||
|
||||
fn index(self) -> usize {
|
||||
self.0 as _
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl core::fmt::Display for $name {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
write!(f, concat!(stringify!($name), "{}"), self.0)
|
||||
}
|
||||
}
|
||||
)*};
|
||||
}
|
||||
pub(crate) use decl_ent;
|
44
lang/tests/son_tests_advanced_floating_point_arithmetic.txt
Normal file
44
lang/tests/son_tests_advanced_floating_point_arithmetic.txt
Normal file
|
@ -0,0 +1,44 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LI32 r32, 1148846080w
|
||||
CP r2, r32
|
||||
JAL r31, r0, :sin
|
||||
FMUL32 r33, r1, r32
|
||||
FTI32 r1, r33, 1b
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
sin:
|
||||
LI32 r4, 1124073472w
|
||||
LI32 r5, 1078530011w
|
||||
FMUL32 r7, r2, r4
|
||||
FDIV32 r9, r7, r5
|
||||
FTI32 r11, r9, 1b
|
||||
ANDI r10, r11, 255d
|
||||
ITF64 r5, r11
|
||||
MULI64 r4, r10, 4d
|
||||
LRA r3, r0, :SIN_TABLE
|
||||
LI32 r7, 1086918619w
|
||||
FC64T32 r9, r5, 1b
|
||||
ADDI64 r5, r11, 64d
|
||||
ADD64 r8, r3, r4
|
||||
LI32 r1, 1132462080w
|
||||
FMUL32 r6, r9, r7
|
||||
ANDI r7, r5, 255d
|
||||
LI32 r5, 1056964608w
|
||||
LD r4, r8, 0a, 4h
|
||||
FDIV32 r8, r6, r1
|
||||
MULI64 r6, r7, 4d
|
||||
FMUL32 r10, r4, r5
|
||||
FSUB32 r11, r2, r8
|
||||
ADD64 r9, r3, r6
|
||||
FMUL32 r2, r11, r10
|
||||
LD r12, r9, 0a, 4h
|
||||
FSUB32 r5, r12, r2
|
||||
FMUL32 r7, r5, r11
|
||||
FADD32 r1, r4, r7
|
||||
JALA r0, r31, 0a
|
||||
code size: 1303
|
||||
ret: 826
|
||||
status: Ok(())
|
6
lang/tests/son_tests_aliasing_overoptimization.txt
Normal file
6
lang/tests/son_tests_aliasing_overoptimization.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
LI64 r1, 0d
|
||||
JALA r0, r31, 0a
|
||||
code size: 29
|
||||
ret: 0
|
||||
status: Ok(())
|
6
lang/tests/son_tests_arithmetic.txt
Normal file
6
lang/tests/son_tests_arithmetic.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
LI64 r1, 0d
|
||||
JALA r0, r31, 0a
|
||||
code size: 29
|
||||
ret: 0
|
||||
status: Ok(())
|
27
lang/tests/son_tests_arrays.txt
Normal file
27
lang/tests/son_tests_arrays.txt
Normal file
|
@ -0,0 +1,27 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -56d
|
||||
ST r31, r254, 24a, 32h
|
||||
LI64 r32, 1d
|
||||
ADDI64 r2, r254, 0d
|
||||
ST r32, r254, 0a, 8h
|
||||
LI64 r33, 2d
|
||||
ST r33, r254, 8a, 8h
|
||||
LI64 r34, 4d
|
||||
ST r34, r254, 16a, 8h
|
||||
JAL r31, r0, :pass
|
||||
ADD64 r1, r1, r32
|
||||
LD r31, r254, 24a, 32h
|
||||
ADDI64 r254, r254, 56d
|
||||
JALA r0, r31, 0a
|
||||
pass:
|
||||
LD r4, r2, 8a, 8h
|
||||
MULI64 r7, r4, 8d
|
||||
LD r5, r2, 0a, 8h
|
||||
ADD64 r10, r7, r2
|
||||
ADD64 r9, r4, r5
|
||||
LD r1, r10, 0a, 8h
|
||||
ADD64 r1, r1, r9
|
||||
JALA r0, r31, 0a
|
||||
code size: 231
|
||||
ret: 8
|
||||
status: Ok(())
|
7
lang/tests/son_tests_big_array_crash.txt
Normal file
7
lang/tests/son_tests_big_array_crash.txt
Normal file
|
@ -0,0 +1,7 @@
|
|||
main:
|
||||
LRA r1, r0, :SIN_TABLE
|
||||
LD r1, r1, 80a, 8h
|
||||
JALA r0, r31, 0a
|
||||
code size: 767
|
||||
ret: 1736
|
||||
status: Ok(())
|
13
lang/tests/son_tests_branch_assignments.txt
Normal file
13
lang/tests/son_tests_branch_assignments.txt
Normal file
|
@ -0,0 +1,13 @@
|
|||
main:
|
||||
LI64 r1, 1d
|
||||
JNE r2, r1, :0
|
||||
JMP :1
|
||||
0: LI64 r7, 0d
|
||||
JNE r2, r7, :2
|
||||
LI64 r1, 2d
|
||||
JMP :1
|
||||
2: LI64 r1, 3d
|
||||
1: JALA r0, r31, 0a
|
||||
code size: 79
|
||||
ret: 2
|
||||
status: Ok(())
|
27
lang/tests/son_tests_c_strings.txt
Normal file
27
lang/tests/son_tests_c_strings.txt
Normal file
|
@ -0,0 +1,27 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -16d
|
||||
ST r31, r254, 0a, 16h
|
||||
LRA r2, r0, :"abඞ\n\r\t56789\0"
|
||||
JAL r31, r0, :str_len
|
||||
CP r32, r1
|
||||
LRA r2, r0, :"fff\0"
|
||||
JAL r31, r0, :str_len
|
||||
ADD64 r1, r1, r32
|
||||
LD r31, r254, 0a, 16h
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
str_len:
|
||||
LI8 r6, 0b
|
||||
LI64 r1, 0d
|
||||
2: LD r8, r2, 0a, 1h
|
||||
ANDI r8, r8, 255d
|
||||
ANDI r6, r6, 255d
|
||||
JNE r8, r6, :0
|
||||
JMP :1
|
||||
0: ADDI64 r2, r2, 1d
|
||||
ADDI64 r1, r1, 1d
|
||||
JMP :2
|
||||
1: JALA r0, r31, 0a
|
||||
code size: 216
|
||||
ret: 16
|
||||
status: Ok(())
|
13
lang/tests/son_tests_comments.txt
Normal file
13
lang/tests/son_tests_comments.txt
Normal file
|
@ -0,0 +1,13 @@
|
|||
foo:
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -8d
|
||||
ST r31, r254, 0a, 8h
|
||||
JAL r31, r0, :foo
|
||||
LI64 r1, 0d
|
||||
LD r31, r254, 0a, 8h
|
||||
ADDI64 r254, r254, 8d
|
||||
JALA r0, r31, 0a
|
||||
code size: 95
|
||||
ret: 0
|
||||
status: Ok(())
|
|
@ -0,0 +1,7 @@
|
|||
main:
|
||||
LRA r1, r0, :a
|
||||
LD r1, r1, 0a, 8h
|
||||
JALA r0, r31, 0a
|
||||
code size: 47
|
||||
ret: 50
|
||||
status: Ok(())
|
7
lang/tests/son_tests_comptime_min_reg_leak.txt
Normal file
7
lang/tests/son_tests_comptime_min_reg_leak.txt
Normal file
|
@ -0,0 +1,7 @@
|
|||
main:
|
||||
LRA r1, r0, :a
|
||||
LD r1, r1, 0a, 8h
|
||||
JALA r0, r31, 0a
|
||||
code size: 47
|
||||
ret: 50
|
||||
status: Ok(())
|
20
lang/tests/son_tests_conditional_stores.txt
Normal file
20
lang/tests/son_tests_conditional_stores.txt
Normal file
|
@ -0,0 +1,20 @@
|
|||
cond:
|
||||
LI64 r1, 0d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
JAL r31, r0, :cond
|
||||
LI64 r32, 0d
|
||||
CP r33, r32
|
||||
JNE r1, r33, :0
|
||||
CP r32, r33
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
0: LI64 r1, 2d
|
||||
1: LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
code size: 134
|
||||
ret: 0
|
||||
status: Ok(())
|
6
lang/tests/son_tests_const_folding_with_arg.txt
Normal file
6
lang/tests/son_tests_const_folding_with_arg.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
LI64 r1, 0d
|
||||
JALA r0, r31, 0a
|
||||
code size: 29
|
||||
ret: 0
|
||||
status: Ok(())
|
6
lang/tests/son_tests_constants.txt
Normal file
6
lang/tests/son_tests_constants.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
LI64 r1, 69d
|
||||
JALA r0, r31, 0a
|
||||
code size: 29
|
||||
ret: 69
|
||||
status: Ok(())
|
6
lang/tests/son_tests_dead_code_in_loop.txt
Normal file
6
lang/tests/son_tests_dead_code_in_loop.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
LI64 r1, 0d
|
||||
JALA r0, r31, 0a
|
||||
code size: 29
|
||||
ret: 0
|
||||
status: Ok(())
|
5
lang/tests/son_tests_die.txt
Normal file
5
lang/tests/son_tests_die.txt
Normal file
|
@ -0,0 +1,5 @@
|
|||
main:
|
||||
UN
|
||||
code size: 9
|
||||
ret: 0
|
||||
status: Err(Unreachable)
|
30
lang/tests/son_tests_different_types.txt
Normal file
30
lang/tests/son_tests_different_types.txt
Normal file
|
@ -0,0 +1,30 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -12d
|
||||
LI8 r1, 255b
|
||||
ST r1, r254, 0a, 1h
|
||||
LI8 r4, 0b
|
||||
ST r4, r254, 1a, 1h
|
||||
ST r4, r254, 2a, 1h
|
||||
ST r1, r254, 3a, 1h
|
||||
LI32 r9, 0w
|
||||
ST r9, r254, 4a, 4h
|
||||
LI32 r12, 2w
|
||||
ST r12, r254, 8a, 4h
|
||||
LD r3, r254, 8a, 4h
|
||||
ANDI r3, r3, 4294967295d
|
||||
ANDI r12, r12, 4294967295d
|
||||
JEQ r3, r12, :0
|
||||
LI64 r1, 0d
|
||||
JMP :1
|
||||
0: LD r10, r254, 4a, 4h
|
||||
ANDI r10, r10, 4294967295d
|
||||
ANDI r9, r9, 4294967295d
|
||||
JEQ r10, r9, :2
|
||||
LI64 r1, 64d
|
||||
JMP :1
|
||||
2: LI64 r1, 512d
|
||||
1: ADDI64 r254, r254, 12d
|
||||
JALA r0, r31, 0a
|
||||
code size: 257
|
||||
ret: 512
|
||||
status: Ok(())
|
20
lang/tests/son_tests_directives.txt
Normal file
20
lang/tests/son_tests_directives.txt
Normal file
|
@ -0,0 +1,20 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -16d
|
||||
LI64 r1, 10d
|
||||
ADDI64 r4, r254, 0d
|
||||
ST r1, r254, 0a, 8h
|
||||
LI64 r7, 20d
|
||||
ST r7, r254, 8a, 8h
|
||||
LI64 r6, 6d
|
||||
LI64 r5, 5d
|
||||
LI64 r2, 1d
|
||||
CP r3, r4
|
||||
LD r3, r3, 0a, 16h
|
||||
ECA
|
||||
LI64 r1, 0d
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
ev: Ecall
|
||||
code size: 155
|
||||
ret: 0
|
||||
status: Ok(())
|
105
lang/tests/son_tests_exhaustive_loop_testing.txt
Normal file
105
lang/tests/son_tests_exhaustive_loop_testing.txt
Normal file
|
@ -0,0 +1,105 @@
|
|||
continue_and_state_change:
|
||||
LI64 r7, 3d
|
||||
LI64 r8, 4d
|
||||
LI64 r9, 2d
|
||||
LI64 r10, 10d
|
||||
6: JLTU r2, r10, :0
|
||||
CP r1, r2
|
||||
JMP :1
|
||||
0: JNE r2, r9, :2
|
||||
CP r2, r8
|
||||
JMP :3
|
||||
2: JNE r2, r7, :4
|
||||
LI64 r1, 0d
|
||||
1: JMP :5
|
||||
4: ADDI64 r2, r2, 1d
|
||||
3: JMP :6
|
||||
5: JALA r0, r31, 0a
|
||||
infinite_loop:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LI64 r32, 1d
|
||||
LI64 r33, 0d
|
||||
CP r1, r33
|
||||
1: JNE r1, r32, :0
|
||||
JMP :0
|
||||
0: CP r2, r33
|
||||
JAL r31, r0, :continue_and_state_change
|
||||
JMP :1
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -64d
|
||||
ST r31, r254, 0a, 64h
|
||||
LI64 r32, 0d
|
||||
CP r2, r32
|
||||
JAL r31, r0, :multiple_breaks
|
||||
CP r33, r1
|
||||
LI64 r1, 3d
|
||||
JEQ r33, r1, :0
|
||||
LI64 r1, 1d
|
||||
JMP :1
|
||||
0: CP r34, r1
|
||||
LI64 r35, 4d
|
||||
CP r2, r35
|
||||
JAL r31, r0, :multiple_breaks
|
||||
CP r36, r35
|
||||
LI64 r37, 10d
|
||||
JEQ r1, r37, :2
|
||||
LI64 r1, 2d
|
||||
JMP :1
|
||||
2: CP r2, r32
|
||||
JAL r31, r0, :state_change_in_break
|
||||
JEQ r1, r32, :3
|
||||
CP r1, r34
|
||||
JMP :1
|
||||
3: CP r2, r36
|
||||
JAL r31, r0, :state_change_in_break
|
||||
JEQ r1, r37, :4
|
||||
CP r1, r36
|
||||
JMP :1
|
||||
4: CP r2, r37
|
||||
JAL r31, r0, :continue_and_state_change
|
||||
JEQ r1, r37, :5
|
||||
LI64 r1, 5d
|
||||
JMP :1
|
||||
5: CP r2, r34
|
||||
JAL r31, r0, :continue_and_state_change
|
||||
JEQ r1, r32, :6
|
||||
LI64 r1, 6d
|
||||
JMP :1
|
||||
6: CP r38, r32
|
||||
JAL r31, r0, :infinite_loop
|
||||
CP r1, r38
|
||||
1: LD r31, r254, 0a, 64h
|
||||
ADDI64 r254, r254, 64d
|
||||
JALA r0, r31, 0a
|
||||
multiple_breaks:
|
||||
LI64 r6, 3d
|
||||
LI64 r5, 10d
|
||||
4: JLTU r2, r5, :0
|
||||
CP r1, r2
|
||||
JMP :1
|
||||
0: ADDI64 r1, r2, 1d
|
||||
JNE r1, r6, :2
|
||||
1: JMP :3
|
||||
2: CP r2, r1
|
||||
JMP :4
|
||||
3: JALA r0, r31, 0a
|
||||
state_change_in_break:
|
||||
LI64 r5, 3d
|
||||
LI64 r6, 10d
|
||||
4: JLTU r2, r6, :0
|
||||
CP r1, r2
|
||||
JMP :1
|
||||
0: JNE r2, r5, :2
|
||||
LI64 r1, 0d
|
||||
1: JMP :3
|
||||
2: ADDI64 r2, r2, 1d
|
||||
JMP :4
|
||||
3: JALA r0, r31, 0a
|
||||
timed out
|
||||
code size: 668
|
||||
ret: 10
|
||||
status: Ok(())
|
53
lang/tests/son_tests_fb_driver.txt
Normal file
53
lang/tests/son_tests_fb_driver.txt
Normal file
|
@ -0,0 +1,53 @@
|
|||
check_platform:
|
||||
ADDI64 r254, r254, -8d
|
||||
ST r31, r254, 0a, 8h
|
||||
JAL r31, r0, :x86_fb_ptr
|
||||
LD r31, r254, 0a, 8h
|
||||
ADDI64 r254, r254, 8d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -64d
|
||||
ST r31, r254, 0a, 64h
|
||||
JAL r31, r0, :check_platform
|
||||
LI64 r32, 0d
|
||||
LI64 r33, 30d
|
||||
LI64 r34, 100d
|
||||
CP r35, r32
|
||||
CP r36, r32
|
||||
CP r37, r32
|
||||
5: JLTU r35, r33, :0
|
||||
ADDI64 r36, r36, 1d
|
||||
CP r2, r32
|
||||
CP r3, r36
|
||||
CP r4, r33
|
||||
JAL r31, r0, :set_pixel
|
||||
JEQ r1, r37, :1
|
||||
CP r1, r32
|
||||
JMP :2
|
||||
1: CP r38, r32
|
||||
JNE r36, r34, :3
|
||||
CP r1, r37
|
||||
JMP :2
|
||||
3: CP r1, r37
|
||||
CP r35, r38
|
||||
JMP :4
|
||||
0: CP r1, r37
|
||||
CP r38, r32
|
||||
ADDI64 r1, r1, 1d
|
||||
ADDI64 r35, r35, 1d
|
||||
4: CP r32, r38
|
||||
CP r37, r1
|
||||
JMP :5
|
||||
2: LD r31, r254, 0a, 64h
|
||||
ADDI64 r254, r254, 64d
|
||||
JALA r0, r31, 0a
|
||||
set_pixel:
|
||||
MUL64 r7, r3, r4
|
||||
ADD64 r1, r7, r2
|
||||
JALA r0, r31, 0a
|
||||
x86_fb_ptr:
|
||||
LI64 r1, 100d
|
||||
JALA r0, r31, 0a
|
||||
code size: 330
|
||||
ret: 3000
|
||||
status: Ok(())
|
6
lang/tests/son_tests_floating_point_arithmetic.txt
Normal file
6
lang/tests/son_tests_floating_point_arithmetic.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
LI32 r1, 3212836864w
|
||||
JALA r0, r31, 0a
|
||||
code size: 25
|
||||
ret: 3212836864
|
||||
status: Ok(())
|
21
lang/tests/son_tests_functions.txt
Normal file
21
lang/tests/son_tests_functions.txt
Normal file
|
@ -0,0 +1,21 @@
|
|||
add_one:
|
||||
ADDI64 r1, r2, 1d
|
||||
JALA r0, r31, 0a
|
||||
add_two:
|
||||
ADDI64 r1, r2, 2d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -16d
|
||||
ST r31, r254, 0a, 16h
|
||||
LI64 r2, 10d
|
||||
JAL r31, r0, :add_one
|
||||
CP r32, r1
|
||||
LI64 r2, 20d
|
||||
JAL r31, r0, :add_two
|
||||
ADD64 r1, r1, r32
|
||||
LD r31, r254, 0a, 16h
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
code size: 152
|
||||
ret: 33
|
||||
status: Ok(())
|
24
lang/tests/son_tests_generic_functions.txt
Normal file
24
lang/tests/son_tests_generic_functions.txt
Normal file
|
@ -0,0 +1,24 @@
|
|||
add:
|
||||
ADD64 r1, r2, r3
|
||||
JALA r0, r31, 0a
|
||||
add:
|
||||
ADD32 r1, r2, r3
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LI32 r3, 2w
|
||||
CP r2, r3
|
||||
JAL r31, r0, :add
|
||||
CP r32, r1
|
||||
LI64 r3, 3d
|
||||
LI64 r2, 1d
|
||||
JAL r31, r0, :add
|
||||
ANDI r33, r32, 4294967295d
|
||||
SUB64 r1, r33, r1
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
code size: 158
|
||||
ret: 0
|
||||
status: Ok(())
|
127
lang/tests/son_tests_generic_types.txt
Normal file
127
lang/tests/son_tests_generic_types.txt
Normal file
|
@ -0,0 +1,127 @@
|
|||
deinit:
|
||||
ADDI64 r254, r254, -32d
|
||||
ST r31, r254, 0a, 32h
|
||||
CP r32, r2
|
||||
LD r33, r2, 16a, 8h
|
||||
LI64 r4, 8d
|
||||
MUL64 r3, r33, r4
|
||||
CP r34, r32
|
||||
LD r2, r34, 0a, 8h
|
||||
JAL r31, r0, :free
|
||||
CP r1, r32
|
||||
JAL r31, r0, :new
|
||||
LD r31, r254, 0a, 32h
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
free:
|
||||
CP r10, r2
|
||||
LRA r7, r0, :FREE_SYS_CALL
|
||||
LD r2, r7, 0a, 8h
|
||||
CP r5, r4
|
||||
CP r4, r3
|
||||
CP r3, r10
|
||||
ECA
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -56d
|
||||
ST r31, r254, 24a, 32h
|
||||
ADDI64 r32, r254, 0d
|
||||
CP r1, r32
|
||||
JAL r31, r0, :new
|
||||
LI64 r3, 69d
|
||||
CP r2, r32
|
||||
JAL r31, r0, :push
|
||||
LD r33, r254, 0a, 8h
|
||||
LD r34, r33, 0a, 8h
|
||||
CP r2, r32
|
||||
JAL r31, r0, :deinit
|
||||
CP r1, r34
|
||||
LD r31, r254, 24a, 32h
|
||||
ADDI64 r254, r254, 56d
|
||||
JALA r0, r31, 0a
|
||||
malloc:
|
||||
CP r9, r2
|
||||
LRA r5, r0, :MALLOC_SYS_CALL
|
||||
LD r2, r5, 0a, 8h
|
||||
CP r4, r3
|
||||
CP r3, r9
|
||||
ECA
|
||||
JALA r0, r31, 0a
|
||||
new:
|
||||
ADDI64 r254, r254, -24d
|
||||
LI64 r4, 0d
|
||||
ADDI64 r5, r254, 0d
|
||||
ST r4, r254, 0a, 8h
|
||||
ST r4, r254, 8a, 8h
|
||||
ST r4, r254, 16a, 8h
|
||||
BMC r5, r1, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
push:
|
||||
ADDI64 r254, r254, -192d
|
||||
ST r31, r254, 0a, 192h
|
||||
CP r32, r3
|
||||
LI64 r33, 1d
|
||||
LD r34, r2, 8a, 8h
|
||||
LD r35, r2, 16a, 8h
|
||||
CP r36, r2
|
||||
JNE r35, r34, :0
|
||||
LI64 r37, 0d
|
||||
JNE r35, r37, :1
|
||||
CP r38, r33
|
||||
JMP :2
|
||||
1: MULI64 r38, r35, 2d
|
||||
2: LI64 r39, 8d
|
||||
MUL64 r2, r38, r39
|
||||
CP r3, r39
|
||||
JAL r31, r0, :malloc
|
||||
CP r40, r1
|
||||
LI64 r1, 0d
|
||||
CP r41, r40
|
||||
JNE r41, r1, :3
|
||||
JMP :4
|
||||
3: CP r40, r41
|
||||
CP r42, r36
|
||||
ST r38, r42, 16a, 8h
|
||||
LD r36, r42, 8a, 8h
|
||||
MULI64 r43, r36, 8d
|
||||
LD r44, r42, 0a, 8h
|
||||
ADD64 r45, r44, r43
|
||||
CP r46, r40
|
||||
9: LD r2, r42, 0a, 8h
|
||||
LD r47, r42, 8a, 8h
|
||||
JNE r45, r44, :5
|
||||
JEQ r47, r37, :6
|
||||
CP r4, r39
|
||||
MUL64 r3, r47, r4
|
||||
JAL r31, r0, :free
|
||||
CP r1, r40
|
||||
JMP :7
|
||||
6: CP r1, r40
|
||||
7: ST r1, r42, 0a, 8h
|
||||
JMP :8
|
||||
5: CP r1, r40
|
||||
CP r4, r39
|
||||
ADDI64 r41, r46, 8d
|
||||
ADDI64 r48, r44, 8d
|
||||
LD r49, r44, 0a, 8h
|
||||
ST r49, r46, 0a, 8h
|
||||
CP r44, r48
|
||||
CP r46, r41
|
||||
JMP :9
|
||||
0: CP r42, r36
|
||||
8: LD r50, r42, 8a, 8h
|
||||
MULI64 r51, r50, 8d
|
||||
LD r52, r42, 0a, 8h
|
||||
ADD64 r1, r52, r51
|
||||
CP r3, r32
|
||||
ST r3, r1, 0a, 8h
|
||||
LD r53, r42, 8a, 8h
|
||||
ADD64 r54, r53, r33
|
||||
ST r54, r42, 8a, 8h
|
||||
4: LD r31, r254, 0a, 192h
|
||||
ADDI64 r254, r254, 192d
|
||||
JALA r0, r31, 0a
|
||||
code size: 955
|
||||
ret: 69
|
||||
status: Ok(())
|
19
lang/tests/son_tests_global_aliasing_overptimization.txt
Normal file
19
lang/tests/son_tests_global_aliasing_overptimization.txt
Normal file
|
@ -0,0 +1,19 @@
|
|||
clobber:
|
||||
LRA r1, r0, :var
|
||||
LI64 r3, 0d
|
||||
ST r3, r1, 0a, 8h
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LRA r32, r0, :var
|
||||
LI64 r33, 2d
|
||||
ST r33, r32, 0a, 8h
|
||||
JAL r31, r0, :clobber
|
||||
LD r1, r32, 0a, 8h
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
code size: 166
|
||||
ret: 0
|
||||
status: Ok(())
|
9
lang/tests/son_tests_global_variables.txt
Normal file
9
lang/tests/son_tests_global_variables.txt
Normal file
|
@ -0,0 +1,9 @@
|
|||
main:
|
||||
LRA r2, r0, :complex_global_var
|
||||
LD r3, r2, 0a, 8h
|
||||
ADDI64 r1, r3, 5d
|
||||
ST r1, r2, 0a, 8h
|
||||
JALA r0, r31, 0a
|
||||
code size: 71
|
||||
ret: 55
|
||||
status: Ok(())
|
6
lang/tests/son_tests_hex_octal_binary_literals.txt
Normal file
6
lang/tests/son_tests_hex_octal_binary_literals.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
LI64 r1, 0d
|
||||
JALA r0, r31, 0a
|
||||
code size: 29
|
||||
ret: 0
|
||||
status: Ok(())
|
20
lang/tests/son_tests_idk.txt
Normal file
20
lang/tests/son_tests_idk.txt
Normal file
|
@ -0,0 +1,20 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -128d
|
||||
LI8 r5, 69b
|
||||
LI64 r6, 128d
|
||||
LI64 r7, 0d
|
||||
ADDI64 r4, r254, 0d
|
||||
2: LD r12, r254, 42a, 1h
|
||||
JLTU r7, r6, :0
|
||||
ANDI r1, r12, 255d
|
||||
JMP :1
|
||||
0: ADDI64 r3, r7, 1d
|
||||
ADD64 r7, r4, r7
|
||||
ST r5, r7, 0a, 1h
|
||||
CP r7, r3
|
||||
JMP :2
|
||||
1: ADDI64 r254, r254, 128d
|
||||
JALA r0, r31, 0a
|
||||
code size: 145
|
||||
ret: 69
|
||||
status: Ok(())
|
30
lang/tests/son_tests_if_statements.txt
Normal file
30
lang/tests/son_tests_if_statements.txt
Normal file
|
@ -0,0 +1,30 @@
|
|||
fib:
|
||||
ADDI64 r254, r254, -40d
|
||||
ST r31, r254, 0a, 40h
|
||||
LI64 r1, 1d
|
||||
LI64 r32, 2d
|
||||
JGTU r2, r32, :0
|
||||
JMP :1
|
||||
0: CP r33, r2
|
||||
SUB64 r2, r33, r1
|
||||
CP r34, r33
|
||||
JAL r31, r0, :fib
|
||||
CP r2, r34
|
||||
CP r35, r1
|
||||
SUB64 r2, r2, r32
|
||||
JAL r31, r0, :fib
|
||||
ADD64 r1, r1, r35
|
||||
1: LD r31, r254, 0a, 40h
|
||||
ADDI64 r254, r254, 40d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -8d
|
||||
ST r31, r254, 0a, 8h
|
||||
LI64 r2, 10d
|
||||
JAL r31, r0, :fib
|
||||
LD r31, r254, 0a, 8h
|
||||
ADDI64 r254, r254, 8d
|
||||
JALA r0, r31, 0a
|
||||
code size: 211
|
||||
ret: 55
|
||||
status: Ok(())
|
9
lang/tests/son_tests_infinite_loop_after_peephole.txt
Normal file
9
lang/tests/son_tests_infinite_loop_after_peephole.txt
Normal file
|
@ -0,0 +1,9 @@
|
|||
main:
|
||||
LI64 r2, 0d
|
||||
0: ADDI64 r2, r2, 1d
|
||||
JMP :0
|
||||
JALA r0, r31, 0a
|
||||
timed out
|
||||
code size: 45
|
||||
ret: 0
|
||||
status: Ok(())
|
18
lang/tests/son_tests_inline.txt
Normal file
18
lang/tests/son_tests_inline.txt
Normal file
|
@ -0,0 +1,18 @@
|
|||
main:
|
||||
LI64 r7, 6d
|
||||
LRA r3, r0, :gb
|
||||
LI64 r6, 0d
|
||||
LD r8, r3, 0a, 8h
|
||||
CMPU r9, r8, r6
|
||||
CMPUI r9, r9, 0d
|
||||
ORI r11, r9, 0d
|
||||
ANDI r11, r11, 255d
|
||||
JNE r11, r0, :0
|
||||
CP r4, r7
|
||||
JMP :1
|
||||
0: LI64 r4, 1d
|
||||
1: SUB64 r1, r4, r7
|
||||
JALA r0, r31, 0a
|
||||
code size: 131
|
||||
ret: 0
|
||||
status: Ok(())
|
39
lang/tests/son_tests_inline_test.txt
Normal file
39
lang/tests/son_tests_inline_test.txt
Normal file
|
@ -0,0 +1,39 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -32d
|
||||
ST r31, r254, 0a, 32h
|
||||
JAL r31, r0, :scalar_values
|
||||
LI64 r32, 0d
|
||||
CP r33, r32
|
||||
JEQ r1, r33, :0
|
||||
LI64 r1, 1d
|
||||
JMP :1
|
||||
0: JAL r31, r0, :structs
|
||||
CP r34, r33
|
||||
JEQ r1, r34, :2
|
||||
JAL r31, r0, :structs
|
||||
JMP :1
|
||||
2: CP r1, r34
|
||||
CP r33, r34
|
||||
1: LD r31, r254, 0a, 32h
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
scalar_values:
|
||||
LI64 r1, 0d
|
||||
JALA r0, r31, 0a
|
||||
structs:
|
||||
ADDI64 r254, r254, -32d
|
||||
LI64 r1, 5d
|
||||
ST r1, r254, 16a, 8h
|
||||
ST r1, r254, 24a, 8h
|
||||
LD r5, r254, 16a, 8h
|
||||
ADDI64 r7, r5, 15d
|
||||
ST r7, r254, 0a, 8h
|
||||
LI64 r10, 20d
|
||||
ST r10, r254, 8a, 8h
|
||||
LD r1, r254, 0a, 8h
|
||||
SUB64 r1, r1, r10
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
code size: 307
|
||||
ret: 0
|
||||
status: Ok(())
|
6
lang/tests/son_tests_inlined_generic_functions.txt
Normal file
6
lang/tests/son_tests_inlined_generic_functions.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
LI64 r1, 10d
|
||||
JALA r0, r31, 0a
|
||||
code size: 29
|
||||
ret: 10
|
||||
status: Ok(())
|
109
lang/tests/son_tests_inlining_issues.txt
Normal file
109
lang/tests/son_tests_inlining_issues.txt
Normal file
|
@ -0,0 +1,109 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -106d
|
||||
ST r31, r254, 58a, 48h
|
||||
ADDI64 r32, r254, 33d
|
||||
ADDI64 r2, r254, 34d
|
||||
ADDI64 r6, r254, 1d
|
||||
LI64 r33, 0d
|
||||
ADDI64 r4, r254, 17d
|
||||
ST r32, r254, 34a, 8h
|
||||
LI64 r34, 100d
|
||||
ADDI64 r7, r254, 0d
|
||||
LI8 r35, 1b
|
||||
ST r33, r254, 1a, 8h
|
||||
ST r33, r254, 17a, 8h
|
||||
ST r34, r254, 42a, 8h
|
||||
LI8 r36, 0b
|
||||
ST r35, r254, 0a, 1h
|
||||
ST r33, r254, 9a, 8h
|
||||
ST r33, r254, 25a, 8h
|
||||
ST r34, r254, 50a, 8h
|
||||
ST r36, r254, 33a, 1h
|
||||
CP r3, r4
|
||||
CP r5, r6
|
||||
LD r3, r3, 0a, 16h
|
||||
LD r5, r5, 0a, 16h
|
||||
LD r7, r7, 0a, 1h
|
||||
JAL r31, r0, :put_filled_rect
|
||||
LD r31, r254, 58a, 48h
|
||||
ADDI64 r254, r254, 106d
|
||||
JALA r0, r31, 0a
|
||||
put_filled_rect:
|
||||
ADDI64 r254, r254, -212d
|
||||
ST r32, r254, 108a, 104h
|
||||
ST r3, r254, 92a, 16h
|
||||
ADDI64 r3, r254, 92d
|
||||
ST r5, r254, 76a, 16h
|
||||
ADDI64 r5, r254, 76d
|
||||
ST r7, r254, 75a, 1h
|
||||
ADDI64 r7, r254, 75d
|
||||
LI64 r8, 25d
|
||||
LI64 r32, 2d
|
||||
LI64 r6, 8d
|
||||
ADDI64 r33, r254, 25d
|
||||
ADDI64 r34, r254, 50d
|
||||
LI8 r35, 5b
|
||||
ST r35, r254, 25a, 1h
|
||||
LD r36, r5, 0a, 8h
|
||||
ST r36, r254, 26a, 4h
|
||||
LI64 r37, 1d
|
||||
ST r37, r254, 30a, 4h
|
||||
ST r7, r254, 34a, 8h
|
||||
ST r35, r254, 50a, 1h
|
||||
ST r36, r254, 51a, 4h
|
||||
ST r37, r254, 55a, 4h
|
||||
ST r7, r254, 59a, 8h
|
||||
CP r38, r7
|
||||
LD r7, r3, 8a, 8h
|
||||
LD r39, r5, 8a, 8h
|
||||
ADD64 r11, r39, r7
|
||||
SUB64 r4, r11, r37
|
||||
LD r40, r2, 8a, 8h
|
||||
MUL64 r5, r40, r4
|
||||
LD r9, r2, 0a, 8h
|
||||
ADD64 r10, r9, r5
|
||||
LD r2, r3, 0a, 8h
|
||||
ADD64 r41, r2, r10
|
||||
MUL64 r3, r40, r7
|
||||
ADD64 r4, r9, r3
|
||||
ADD64 r42, r2, r4
|
||||
3: JGTU r39, r37, :0
|
||||
JNE r39, r37, :1
|
||||
ADDI64 r4, r254, 0d
|
||||
ST r35, r254, 0a, 1h
|
||||
ST r36, r254, 1a, 4h
|
||||
ST r37, r254, 5a, 4h
|
||||
ST r38, r254, 9a, 8h
|
||||
ST r42, r254, 17a, 8h
|
||||
CP r2, r6
|
||||
CP r3, r32
|
||||
CP r5, r8
|
||||
ECA
|
||||
JMP :1
|
||||
1: JMP :2
|
||||
0: CP r3, r32
|
||||
CP r43, r6
|
||||
CP r44, r8
|
||||
ST r42, r254, 67a, 8h
|
||||
CP r2, r43
|
||||
CP r4, r34
|
||||
CP r5, r44
|
||||
ECA
|
||||
ST r41, r254, 42a, 8h
|
||||
CP r2, r43
|
||||
CP r3, r32
|
||||
CP r4, r33
|
||||
CP r5, r44
|
||||
ECA
|
||||
ADD64 r42, r40, r42
|
||||
SUB64 r41, r41, r40
|
||||
SUB64 r39, r39, r32
|
||||
CP r6, r43
|
||||
CP r8, r44
|
||||
JMP :3
|
||||
2: LD r32, r254, 108a, 104h
|
||||
ADDI64 r254, r254, 212d
|
||||
JALA r0, r31, 0a
|
||||
code size: 917
|
||||
ret: 0
|
||||
status: Ok(())
|
0
lang/tests/son_tests_inlining_loops.txt
Normal file
0
lang/tests/son_tests_inlining_loops.txt
Normal file
20
lang/tests/son_tests_intcast_store.txt
Normal file
20
lang/tests/son_tests_intcast_store.txt
Normal file
|
@ -0,0 +1,20 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -32d
|
||||
ST r31, r254, 16a, 16h
|
||||
ADDI64 r3, r254, 0d
|
||||
ADDI64 r2, r254, 8d
|
||||
LI64 r32, 0d
|
||||
ST r32, r254, 0a, 8h
|
||||
ST r32, r254, 8a, 8h
|
||||
LI64 r4, 1024d
|
||||
JAL r31, r0, :set
|
||||
ANDI r1, r1, 4294967295d
|
||||
LD r31, r254, 16a, 16h
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
set:
|
||||
CP r1, r4
|
||||
JALA r0, r31, 0a
|
||||
code size: 167
|
||||
ret: 1024
|
||||
status: Ok(())
|
29
lang/tests/son_tests_integer_inference_issues.txt
Normal file
29
lang/tests/son_tests_integer_inference_issues.txt
Normal file
|
@ -0,0 +1,29 @@
|
|||
integer_range:
|
||||
ADDI64 r254, r254, -16d
|
||||
ST r32, r254, 0a, 16h
|
||||
CP r32, r2
|
||||
CP r33, r3
|
||||
LI64 r3, 4d
|
||||
LI64 r2, 3d
|
||||
ECA
|
||||
CP r2, r32
|
||||
CP r3, r33
|
||||
SUB64 r11, r3, r2
|
||||
ADDI64 r3, r11, 1d
|
||||
DIRU64 r0, r3, r1, r3
|
||||
ADD64 r1, r3, r2
|
||||
LD r32, r254, 0a, 16h
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -8d
|
||||
ST r31, r254, 0a, 8h
|
||||
LI64 r3, 1000d
|
||||
LI64 r2, 0d
|
||||
JAL r31, r0, :integer_range
|
||||
LD r31, r254, 0a, 8h
|
||||
ADDI64 r254, r254, 8d
|
||||
JALA r0, r31, 0a
|
||||
code size: 210
|
||||
ret: 42
|
||||
status: Ok(())
|
16
lang/tests/son_tests_loop_stores.txt
Normal file
16
lang/tests/son_tests_loop_stores.txt
Normal file
|
@ -0,0 +1,16 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -8d
|
||||
LI64 r3, 0d
|
||||
LI64 r2, 10d
|
||||
ST r2, r254, 0a, 8h
|
||||
2: LD r1, r254, 0a, 8h
|
||||
JNE r1, r3, :0
|
||||
JMP :1
|
||||
0: ADDI64 r11, r1, -1d
|
||||
ST r11, r254, 0a, 8h
|
||||
JMP :2
|
||||
1: ADDI64 r254, r254, 8d
|
||||
JALA r0, r31, 0a
|
||||
code size: 126
|
||||
ret: 0
|
||||
status: Ok(())
|
25
lang/tests/son_tests_loops.txt
Normal file
25
lang/tests/son_tests_loops.txt
Normal file
|
@ -0,0 +1,25 @@
|
|||
fib:
|
||||
LI64 r4, 1d
|
||||
LI64 r5, 0d
|
||||
CP r1, r5
|
||||
CP r10, r4
|
||||
2: JNE r2, r5, :0
|
||||
JMP :1
|
||||
0: ADD64 r1, r10, r1
|
||||
SUB64 r2, r2, r4
|
||||
CP r3, r1
|
||||
CP r1, r10
|
||||
CP r10, r3
|
||||
JMP :2
|
||||
1: JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -8d
|
||||
ST r31, r254, 0a, 8h
|
||||
LI64 r2, 10d
|
||||
JAL r31, r0, :fib
|
||||
LD r31, r254, 0a, 8h
|
||||
ADDI64 r254, r254, 8d
|
||||
JALA r0, r31, 0a
|
||||
code size: 153
|
||||
ret: 55
|
||||
status: Ok(())
|
6
lang/tests/son_tests_main_fn.txt
Normal file
6
lang/tests/son_tests_main_fn.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
LI64 r1, 1d
|
||||
JALA r0, r31, 0a
|
||||
code size: 29
|
||||
ret: 1
|
||||
status: Ok(())
|
27
lang/tests/son_tests_more_if_opts.txt
Normal file
27
lang/tests/son_tests_more_if_opts.txt
Normal file
|
@ -0,0 +1,27 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -32d
|
||||
ST r31, r254, 0a, 32h
|
||||
JAL r31, r0, :opaque
|
||||
CP r32, r1
|
||||
JAL r31, r0, :opaque
|
||||
LI64 r33, 0d
|
||||
CP r1, r32
|
||||
JNE r1, r33, :0
|
||||
CP r32, r1
|
||||
LI64 r1, 0d
|
||||
CP r34, r32
|
||||
JMP :1
|
||||
0: CP r34, r1
|
||||
LD r1, r34, 0a, 8h
|
||||
1: JEQ r34, r33, :2
|
||||
LD r1, r34, 0a, 8h
|
||||
JMP :2
|
||||
2: LD r31, r254, 0a, 32h
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
opaque:
|
||||
LI64 r1, 0d
|
||||
JALA r0, r31, 0a
|
||||
code size: 183
|
||||
ret: 0
|
||||
status: Ok(())
|
6
lang/tests/son_tests_needless_unwrap.txt
Normal file
6
lang/tests/son_tests_needless_unwrap.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
test.hb:4:17: unwrap is not needed since the value is (provably) never null, remove it, or replace with '@as(<expr_ty>, <opt_expr>)'
|
||||
ptr := @unwrap(always_nn)
|
||||
^
|
||||
test.hb:6:16: unwrap is incorrect since the value is (provably) always null, make sure your logic is correct
|
||||
ptr = @unwrap(always_n)
|
||||
^
|
7
lang/tests/son_tests_nonexistent_ident_import.txt
Normal file
7
lang/tests/son_tests_nonexistent_ident_import.txt
Normal file
|
@ -0,0 +1,7 @@
|
|||
foo.hb:4:1: redeclaration of identifier: foo
|
||||
foo := fn(): void {
|
||||
^
|
||||
|
||||
foo.hb:7:23: undefined indentifier: mian
|
||||
main := @use("bar.hb").mian
|
||||
^
|
34
lang/tests/son_tests_null_check_in_the_loop.txt
Normal file
34
lang/tests/son_tests_null_check_in_the_loop.txt
Normal file
|
@ -0,0 +1,34 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -38d
|
||||
ST r31, r254, 6a, 32h
|
||||
LI8 r32, 0b
|
||||
ADDI64 r33, r254, 0d
|
||||
2: JAL r31, r0, :return_fn
|
||||
ST r1, r254, 0a, 6h
|
||||
LD r34, r254, 0a, 1h
|
||||
ANDI r34, r34, 255d
|
||||
ANDI r32, r32, 255d
|
||||
JEQ r34, r32, :0
|
||||
LI64 r1, 1d
|
||||
JMP :1
|
||||
0: JMP :2
|
||||
1: LD r31, r254, 6a, 32h
|
||||
ADDI64 r254, r254, 38d
|
||||
JALA r0, r31, 0a
|
||||
return_fn:
|
||||
ADDI64 r254, r254, -6d
|
||||
LI8 r4, 1b
|
||||
ADDI64 r3, r254, 0d
|
||||
ST r4, r254, 0a, 1h
|
||||
LI8 r4, 0b
|
||||
ST r4, r254, 1a, 1h
|
||||
ST r4, r254, 2a, 1h
|
||||
ST r4, r254, 3a, 1h
|
||||
ST r4, r254, 4a, 1h
|
||||
ST r4, r254, 5a, 1h
|
||||
LD r1, r3, 0a, 6h
|
||||
ADDI64 r254, r254, 6d
|
||||
JALA r0, r31, 0a
|
||||
code size: 302
|
||||
ret: 1
|
||||
status: Ok(())
|
24
lang/tests/son_tests_null_check_test.txt
Normal file
24
lang/tests/son_tests_null_check_test.txt
Normal file
|
@ -0,0 +1,24 @@
|
|||
get_ptr:
|
||||
LI64 r1, 0d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -40d
|
||||
ST r31, r254, 0a, 40h
|
||||
JAL r31, r0, :get_ptr
|
||||
LI64 r32, 0d
|
||||
JNE r1, r32, :0
|
||||
LI64 r1, 0d
|
||||
JMP :1
|
||||
0: LI64 r33, 10d
|
||||
CP r34, r1
|
||||
2: LD r1, r34, 0a, 8h
|
||||
JEQ r1, r33, :1
|
||||
ADDI64 r35, r1, 1d
|
||||
ST r35, r34, 0a, 8h
|
||||
JMP :2
|
||||
1: LD r31, r254, 0a, 40h
|
||||
ADDI64 r254, r254, 40d
|
||||
JALA r0, r31, 0a
|
||||
code size: 185
|
||||
ret: 0
|
||||
status: Ok(())
|
65
lang/tests/son_tests_nullable_structure.txt
Normal file
65
lang/tests/son_tests_nullable_structure.txt
Normal file
|
@ -0,0 +1,65 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -122d
|
||||
ST r31, r254, 26a, 96h
|
||||
JAL r31, r0, :returner_fn
|
||||
CP r32, r1
|
||||
ADDI64 r1, r254, 2d
|
||||
JAL r31, r0, :returner_bn
|
||||
ADDI64 r33, r254, 0d
|
||||
JAL r31, r0, :returner_cn
|
||||
ST r1, r254, 0a, 2h
|
||||
LI8 r34, 0b
|
||||
LI8 r35, 0b
|
||||
LD r36, r254, 2a, 1h
|
||||
CP r1, r32
|
||||
ANDI r37, r37, 255d
|
||||
ANDI r1, r1, 255d
|
||||
CMPU r37, r1, r34
|
||||
CMPUI r37, r37, 0d
|
||||
ANDI r38, r38, 255d
|
||||
ANDI r36, r36, 255d
|
||||
CMPU r38, r36, r35
|
||||
CMPUI r38, r38, 0d
|
||||
LD r39, r254, 0a, 1h
|
||||
AND r40, r38, r37
|
||||
ANDI r41, r41, 255d
|
||||
ANDI r39, r39, 255d
|
||||
CMPU r41, r39, r35
|
||||
CMPUI r41, r41, 0d
|
||||
AND r42, r41, r40
|
||||
ANDI r42, r42, 255d
|
||||
JNE r42, r0, :0
|
||||
LI64 r1, 0d
|
||||
JMP :1
|
||||
0: LI64 r1, 1d
|
||||
1: LD r31, r254, 26a, 96h
|
||||
ADDI64 r254, r254, 122d
|
||||
JALA r0, r31, 0a
|
||||
returner_bn:
|
||||
ADDI64 r254, r254, -24d
|
||||
LI8 r6, 1b
|
||||
ADDI64 r5, r254, 0d
|
||||
ST r6, r254, 0a, 1h
|
||||
LI64 r6, 0d
|
||||
ST r6, r254, 8a, 8h
|
||||
ST r6, r254, 16a, 8h
|
||||
BMC r5, r1, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
returner_cn:
|
||||
ADDI64 r254, r254, -2d
|
||||
LI8 r4, 1b
|
||||
ADDI64 r3, r254, 0d
|
||||
ST r4, r254, 0a, 1h
|
||||
LI8 r4, 0b
|
||||
ST r4, r254, 1a, 1h
|
||||
LD r1, r3, 0a, 2h
|
||||
ADDI64 r254, r254, 2d
|
||||
JALA r0, r31, 0a
|
||||
returner_fn:
|
||||
LD r1, r254, 0a, 0h
|
||||
ORI r1, r1, 128d
|
||||
JALA r0, r31, 0a
|
||||
code size: 546
|
||||
ret: 1
|
||||
status: Ok(())
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue