Compare commits

..

No commits in common. "trunk" and "trunk" have entirely different histories.
trunk ... trunk

66 changed files with 1320 additions and 4376 deletions

5
.gitignore vendored
View file

@ -2,10 +2,6 @@
/target
rustc-ice-*
a.out
out.o
/examples/raylib/main
# sqlite
db.sqlite
db.sqlite-journal
@ -16,4 +12,3 @@ db.sqlite-journal
/depell/src/static-pages/*.html
#**/*-sv.rs
/bytecode/src/instrs.rs
/lang/src/testcases.rs

366
Cargo.lock generated
View file

@ -38,72 +38,11 @@ dependencies = [
"memchr",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "anstream"
version = "0.6.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is_terminal_polyfill",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9"
[[package]]
name = "anstyle-parse"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c"
dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125"
dependencies = [
"anstyle",
"windows-sys 0.59.0",
]
[[package]]
name = "anyhow"
version = "1.0.95"
version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
[[package]]
name = "arbitrary"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223"
checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
[[package]]
name = "arc-swap"
@ -290,7 +229,7 @@ dependencies = [
"proc-macro2",
"quote",
"regex",
"rustc-hash 1.1.0",
"rustc-hash",
"shlex",
"syn",
"which",
@ -320,15 +259,6 @@ dependencies = [
"generic-array",
]
[[package]]
name = "bumpalo"
version = "3.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
dependencies = [
"allocator-api2",
]
[[package]]
name = "bytes"
version = "1.8.0"
@ -372,46 +302,6 @@ dependencies = [
"libloading",
]
[[package]]
name = "clap"
version = "4.5.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84"
dependencies = [
"clap_builder",
"clap_derive",
]
[[package]]
name = "clap_builder"
version = "4.5.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838"
dependencies = [
"anstream",
"anstyle",
"clap_lex",
"strsim",
]
[[package]]
name = "clap_derive"
version = "4.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "clap_lex"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "cmake"
version = "0.1.51"
@ -421,12 +311,6 @@ dependencies = [
"cc",
]
[[package]]
name = "colorchoice"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
[[package]]
name = "const_format"
version = "0.2.33"
@ -456,143 +340,6 @@ dependencies = [
"libc",
]
[[package]]
name = "cranelift-backend"
version = "0.1.0"
dependencies = [
"cranelift-codegen",
"cranelift-frontend",
"cranelift-module",
"cranelift-object",
"hblang",
"target-lexicon",
]
[[package]]
name = "cranelift-bforest"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac89549be94911dd0e839b4a7db99e9ed29c17517e1c026f61066884c168aa3c"
dependencies = [
"cranelift-entity",
]
[[package]]
name = "cranelift-bitset"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9bd49369f76c77e34e641af85d0956869237832c118964d08bf5f51f210875a"
[[package]]
name = "cranelift-codegen"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd96ce9cf8efebd7f5ab8ced5a0ce44250280bbae9f593d74a6d7effc3582a35"
dependencies = [
"bumpalo",
"cranelift-bforest",
"cranelift-bitset",
"cranelift-codegen-meta",
"cranelift-codegen-shared",
"cranelift-control",
"cranelift-entity",
"cranelift-isle",
"gimli 0.31.1",
"hashbrown 0.14.5",
"log",
"regalloc2",
"rustc-hash 2.1.0",
"serde",
"smallvec",
"target-lexicon",
]
[[package]]
name = "cranelift-codegen-meta"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a68e358827afe4bfb6239fcbf6fbd5ac56206ece8a99c8f5f9bbd518773281a"
dependencies = [
"cranelift-codegen-shared",
]
[[package]]
name = "cranelift-codegen-shared"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e184c9767afbe73d50c55ec29abcf4c32f9baf0d9d22b86d58c4d55e06dee181"
[[package]]
name = "cranelift-control"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cc7664f2a66f053e33f149e952bb5971d138e3af637f5097727ed6dc0ed95dd"
dependencies = [
"arbitrary",
]
[[package]]
name = "cranelift-entity"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "118597e3a9cf86c3556fa579a7a23b955fa18231651a52a77a2475d305a9cf84"
dependencies = [
"cranelift-bitset",
]
[[package]]
name = "cranelift-frontend"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7638ea1efb069a0aa18d8ee67401b6b0d19f6bfe5de5e9ede348bfc80bb0d8c7"
dependencies = [
"cranelift-codegen",
"log",
"smallvec",
"target-lexicon",
]
[[package]]
name = "cranelift-isle"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15c53e1152a0b01c4ed2b1e0535602b8e86458777dd9d18b28732b16325c7dc0"
[[package]]
name = "cranelift-module"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11841b3f54ac480db1e8e8d5678ba901a13b387012d315e3f8fba3e7b7a80447"
dependencies = [
"anyhow",
"cranelift-codegen",
"cranelift-control",
]
[[package]]
name = "cranelift-object"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e235ddfd19f100855ad03358c7ae0a13070c38a000701054cab46458cca6e81"
dependencies = [
"anyhow",
"cranelift-codegen",
"cranelift-control",
"cranelift-module",
"log",
"object",
"target-lexicon",
]
[[package]]
name = "crc32fast"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3"
dependencies = [
"cfg-if",
]
[[package]]
name = "crypto-common"
version = "0.1.6"
@ -667,7 +414,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba"
dependencies = [
"libc",
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -694,12 +441,6 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foldhash"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
@ -791,11 +532,6 @@ name = "gimli"
version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
dependencies = [
"fallible-iterator 0.3.0",
"indexmap 2.6.0",
"stable_deref_trait",
]
[[package]]
name = "glob"
@ -843,9 +579,6 @@ name = "hashbrown"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb"
dependencies = [
"foldhash",
]
[[package]]
name = "hashlink"
@ -860,17 +593,6 @@ dependencies = [
name = "hbbytecode"
version = "0.1.0"
[[package]]
name = "hbc"
version = "0.1.0"
dependencies = [
"clap",
"cranelift-backend",
"hblang",
"log",
"target-lexicon",
]
[[package]]
name = "hblang"
version = "0.1.0"
@ -914,7 +636,7 @@ version = "0.5.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
dependencies = [
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -1041,12 +763,6 @@ dependencies = [
"serde",
]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]]
name = "itertools"
version = "0.12.1"
@ -1188,7 +904,7 @@ dependencies = [
"hermit-abi",
"libc",
"wasi",
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -1219,9 +935,6 @@ version = "0.36.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e"
dependencies = [
"crc32fast",
"hashbrown 0.15.0",
"indexmap 2.6.0",
"memchr",
]
@ -1335,20 +1048,6 @@ dependencies = [
"getrandom",
]
[[package]]
name = "regalloc2"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "145c1c267e14f20fb0f88aa76a1c5ffec42d592c1d28b3cd9148ae35916158d3"
dependencies = [
"allocator-api2",
"bumpalo",
"hashbrown 0.15.0",
"log",
"rustc-hash 2.1.0",
"smallvec",
]
[[package]]
name = "regex"
version = "1.11.1"
@ -1390,7 +1089,7 @@ dependencies = [
"libc",
"spin",
"untrusted",
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -1419,12 +1118,6 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustc-hash"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497"
[[package]]
name = "rustix"
version = "0.38.37"
@ -1435,7 +1128,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -1499,18 +1192,18 @@ checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
[[package]]
name = "serde"
version = "1.0.217"
version = "1.0.210"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.217"
version = "1.0.210"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
dependencies = [
"proc-macro2",
"quote",
@ -1579,7 +1272,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c"
dependencies = [
"libc",
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -1594,12 +1287,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "subtle"
version = "2.6.1"
@ -1608,9 +1295,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
version = "2.0.87"
version = "2.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590"
dependencies = [
"proc-macro2",
"quote",
@ -1629,12 +1316,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394"
[[package]]
name = "target-lexicon"
version = "0.12.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
[[package]]
name = "time"
version = "0.3.36"
@ -1667,7 +1348,7 @@ dependencies = [
"pin-project-lite",
"socket2",
"tokio-macros",
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -1798,12 +1479,6 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
[[package]]
name = "utf8parse"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "vcpkg"
version = "0.2.15"
@ -1919,15 +1594,6 @@ dependencies = [
"windows-targets",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.52.6"

View file

@ -12,8 +12,6 @@ members = [
"depell/wasm-fmt",
"depell/wasm-hbc",
"depell/wasm-rt",
"cranelift-backend",
"c",
]
[workspace.dependencies]

View file

@ -5,6 +5,6 @@ edition = "2018"
[features]
default = ["disasm"]
disasm = ["alloc"]
alloc = []
std = []
disasm = ["std"]

View file

@ -98,27 +98,6 @@ fn gen_instrs(generated: &mut String) -> Result<(), Box<dyn std::error::Error>>
writeln!(generated, " {name} = {id},")?;
}
writeln!(generated, "}}")?;
writeln!(generated, "impl {instr} {{")?;
writeln!(generated, " pub fn size(self) -> usize {{")?;
writeln!(generated, " match self {{")?;
let mut instrs = instructions().collect::<Vec<_>>();
instrs.sort_unstable_by_key(|&[.., ty, _]| iter_args(ty).map(arg_to_width).sum::<usize>());
for group in instrs.chunk_by(|[.., a, _], [.., b, _]| {
iter_args(a).map(arg_to_width).sum::<usize>()
== iter_args(b).map(arg_to_width).sum::<usize>()
}) {
let ty = group[0][2];
for &[_, name, ..] in group {
writeln!(generated, " | {instr}::{name}")?;
}
generated.pop();
let size = iter_args(ty).map(arg_to_width).sum::<usize>() + 1;
writeln!(generated, " => {size},")?;
}
writeln!(generated, " }}")?;
writeln!(generated, " }}")?;
writeln!(generated, "}}")?;
}
'_arg_kind: {

View file

@ -254,7 +254,8 @@ pub fn disasm<'a>(
|| global_offset > off + len
|| prev
.get(global_offset as usize)
.is_none_or(|&b| instr_from_byte(b).is_err());
.map_or(true, |&b| instr_from_byte(b).is_err())
|| prev[global_offset as usize] == 0;
has_oob |= local_has_oob;
let label = labels.get(&global_offset).unwrap();
if local_has_oob {

View file

@ -1,11 +0,0 @@
[package]
name = "hbc"
version = "0.1.0"
edition = "2024"
[dependencies]
clap = { version = "4.5.23", features = ["derive", "env"] }
cranelift-backend = { version = "0.1.0", path = "../cranelift-backend" }
hblang = { workspace = true, features = ["std"] }
log = "0.4.22"
target-lexicon = { version = "0.12", features = ["std"] }

View file

@ -1,95 +0,0 @@
use {
clap::Parser,
std::{io, str::FromStr},
};
#[derive(Parser)]
struct Args {
/// format depends on the backend used
/// - cranelift-backend expects `<key>=<value>,...` pass `help=me` to see options
#[clap(long, env, default_value = "")]
backend_flags: String,
#[clap(long, short, env, default_value_t = target_lexicon::HOST)]
target: target_lexicon::Triple,
#[clap(long, env, value_parser = ["ableos"])]
path_resolver: Option<String>,
/// format the source code reachable form the root file
#[clap(long, env, default_value_t = false, conflicts_with_all = &["fmt_stdout", "dump_asm"])]
fmt: bool,
/// format the root file only and output the formatted file into stdout
#[clap(long, env, default_value_t = false, conflicts_with_all = &["fmt", "dump_asm"])]
fmt_stdout: bool,
#[clap(long, env, default_value_t = false, conflicts_with_all = &["fmt", "fmt_stdout"])]
dump_asm: bool,
/// extra threads to be used during compilation (currently only parser is parallelized)
#[clap(long, env, default_value_t = 0)]
extra_threads: usize,
/// path to the root file
file: String,
}
fn main() {
use std::io::Write;
fn run(out: &mut Vec<u8>, warnings: &mut String) -> std::io::Result<()> {
let Args {
backend_flags,
target,
path_resolver,
fmt,
fmt_stdout,
dump_asm,
extra_threads,
file,
} = Args::parse();
let resolvers = &[("ableos", hblang::ABLEOS_PATH_RESOLVER)];
let mut native = None;
let backend = if target
== target_lexicon::Triple::from_str(hblang::backend::hbvm::TARGET_TRIPLE).unwrap()
{
None
} else {
Some(
native.insert(
cranelift_backend::Backend::new(target, &backend_flags)
.map_err(io::Error::other)?,
) as &mut dyn hblang::backend::Backend,
)
};
let opts = hblang::Options {
fmt,
fmt_stdout,
dump_asm,
extra_threads,
resolver: resolvers
.iter()
.copied()
.find(|&(name, _)| Some(name) == path_resolver.as_deref())
.map(|(_, v)| v),
backend,
};
hblang::run_compiler(&file, opts, out, warnings)
}
log::set_logger(&hblang::fs::Logger).unwrap();
log::set_max_level(log::LevelFilter::Error);
let mut out = Vec::new();
let mut warnings = String::new();
match run(&mut out, &mut warnings) {
Ok(_) => {
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
std::io::stdout().write_all(&out).unwrap()
}
Err(e) => {
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
std::io::stderr().write_all(&out).unwrap();
std::eprint!("{e}");
std::process::exit(1);
}
}
}

View file

@ -1,12 +0,0 @@
[package]
name = "cranelift-backend"
version = "0.1.0"
edition = "2024"
[dependencies]
cranelift-codegen = "0.115.0"
cranelift-frontend = "0.115.0"
cranelift-module = "0.115.0"
cranelift-object = "0.115.0"
hblang.workspace = true
target-lexicon = "0.12"

View file

@ -1,960 +0,0 @@
#![feature(if_let_guard)]
#![feature(slice_take)]
use {
core::panic,
cranelift_codegen::{
self as cc, CodegenError, Final, FinalizedMachReloc, MachBufferFinalized,
ir::{self as cir, InstBuilder, MemFlags, TrapCode, UserExternalName, condcodes},
isa::{LookupError, TargetIsa},
settings::{Configurable, SetError},
},
cranelift_frontend::{self as cf, FunctionBuilder},
cranelift_module::{self as cm, Module, ModuleError},
hblang::{
lexer::TokenKind,
nodes::{self as hbnodes},
ty as hbty,
utils::{self as hbutils, Ent, EntVec},
},
std::{
fmt::{Display, Write},
ops::Range,
},
};
mod x86_64;
pub struct Backend {
ctx: cc::Context,
dt_ctx: cm::DataDescription,
fb_ctx: cf::FunctionBuilderContext,
module: Option<cranelift_object::ObjectModule>,
ctrl_plane: cc::control::ControlPlane,
funcs: Functions,
globals: EntVec<hbty::Global, Global>,
asm: Assembler,
}
impl Backend {
pub fn new(triple: target_lexicon::Triple, flags: &str) -> Result<Self, BackendCreationError> {
Ok(Self {
ctx: cc::Context::new(),
dt_ctx: cm::DataDescription::new(),
fb_ctx: cf::FunctionBuilderContext::default(),
ctrl_plane: cc::control::ControlPlane::default(),
module: cranelift_object::ObjectModule::new(cranelift_object::ObjectBuilder::new(
cc::isa::lookup(triple)?.finish(cc::settings::Flags::new({
let mut bl = cc::settings::builder();
for (k, v) in flags.split(',').filter_map(|s| s.split_once('=')) {
bl.set(k, v).map_err(|err| BackendCreationError::InvalidFlag {
key: k.to_owned(),
value: v.to_owned(),
err,
})?;
}
bl
}))?,
"main",
cm::default_libcall_names(),
)?)
.into(),
funcs: Default::default(),
globals: Default::default(),
asm: Default::default(),
})
}
}
impl hblang::backend::Backend for Backend {
fn assemble_reachable(
&mut self,
from: hbty::Func,
types: &hbty::Types,
files: &hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
to: &mut Vec<u8>,
) -> hblang::backend::AssemblySpec {
debug_assert!(self.asm.frontier.is_empty());
debug_assert!(self.asm.funcs.is_empty());
debug_assert!(self.asm.globals.is_empty());
let mut module = self.module.take().expect("backend can assemble only once");
fn clif_name_to_ty(name: UserExternalName) -> hbty::Id {
match name.namespace {
0 => hbty::Kind::Func(hbty::Func::new(name.index as _)),
1 => hbty::Kind::Global(hbty::Global::new(name.index as _)),
_ => unreachable!(),
}
.compress()
}
self.globals.shadow(types.ins.globals.len());
self.asm.frontier.push(from.into());
while let Some(itm) = self.asm.frontier.pop() {
match itm.expand() {
hbty::Kind::Func(func) => {
let fd = &types.ins.funcs[func];
if fd.is_import {
self.funcs.headers.shadow(func.index() + 1);
}
let fuc = &mut self.funcs.headers[func];
let file = &files[fd.file];
if fuc.module_id.is_some() {
continue;
}
self.asm.frontier.extend(
fuc.external_names.clone().map(|r| {
clif_name_to_ty(self.funcs.external_names[r as usize].clone())
}),
);
self.asm.name.clear();
if func == from {
self.asm.name.push_str("main");
} else if fd.is_import {
self.asm.name.push_str(file.ident_str(fd.name));
} else {
self.asm.name.push_str(hblang::strip_cwd(&file.path));
self.asm.name.push('.');
self.asm.name.push_str(file.ident_str(fd.name));
}
let linkage = if func == from {
cm::Linkage::Export
} else if fd.is_import {
cm::Linkage::Import
} else {
cm::Linkage::Local
};
build_signature(
module.isa().default_call_conv(),
fd.sig,
types,
&mut self.ctx.func.signature,
&mut vec![],
);
fuc.module_id = Some(
module
.declare_function(&self.asm.name, linkage, &self.ctx.func.signature)
.unwrap(),
);
if !fd.is_import {
self.asm.funcs.push(func);
}
}
hbty::Kind::Global(glob) => {
if self.globals[glob].module_id.is_some() {
continue;
}
self.asm.globals.push(glob);
self.asm.name.clear();
let mutable = if types.ins.globals[glob].file == Default::default() {
writeln!(self.asm.name, "anon{}", glob.index()).unwrap();
false
} else {
let file = &files[types.ins.globals[glob].file];
self.asm.name.push_str(hblang::strip_cwd(&file.path));
self.asm.name.push('.');
self.asm.name.push_str(file.ident_str(types.ins.globals[glob].name));
true
};
self.globals[glob].module_id = Some(
module
.declare_data(&self.asm.name, cm::Linkage::Local, mutable, false)
.unwrap(),
);
}
_ => unreachable!(),
}
}
for &func in &self.asm.funcs {
let fuc = &self.funcs.headers[func];
assert!(!types.ins.funcs[func].is_import);
debug_assert!(!fuc.code.is_empty());
let names = &mut self.funcs.external_names
[fuc.external_names.start as usize..fuc.external_names.end as usize];
self.ctx.func.clear();
names.iter().for_each(|nm| {
let mut nm = nm.clone();
if nm.namespace == 0 {
nm.index = self.funcs.headers[hbty::Func::new(nm.index as _)]
.module_id
.unwrap()
.as_u32();
} else {
nm.index =
self.globals[hbty::Global::new(nm.index as _)].module_id.unwrap().as_u32();
}
self.ctx.func.params.ensure_user_func_name(nm.clone());
});
module
.define_function_bytes(
fuc.module_id.unwrap(),
&self.ctx.func,
fuc.alignment as _,
&self.funcs.code[fuc.code.start as usize..fuc.code.end as usize],
&self.funcs.relocs[fuc.relocs.start as usize..fuc.relocs.end as usize],
)
.unwrap();
}
for global in self.asm.globals.drain(..) {
let glob = &self.globals[global];
self.dt_ctx.clear();
self.dt_ctx.define(types.ins.globals[global].data.clone().into());
module.define_data(glob.module_id.unwrap(), &self.dt_ctx).unwrap();
}
module.finish().object.write_stream(to).unwrap();
hblang::backend::AssemblySpec { code_length: 0, data_length: 0, entry: 0 }
}
fn disasm<'a>(
&'a self,
_sluce: &[u8],
_eca_handler: &mut dyn FnMut(&mut &[u8]),
_types: &'a hbty::Types,
_files: &'a hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
_output: &mut String,
) -> Result<(), std::boxed::Box<dyn core::error::Error + Send + Sync + 'a>> {
unimplemented!()
}
fn emit_body(
&mut self,
id: hbty::Func,
nodes: &hbnodes::Nodes,
tys: &hbty::Types,
files: &hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
) {
self.ctx.clear();
let isa = self.module.as_ref().unwrap().isa();
let mut lens = vec![];
let stack_ret = build_signature(
isa.default_call_conv(),
tys.ins.funcs[id].sig,
tys,
&mut self.ctx.func.signature,
&mut lens,
);
FuncBuilder {
bl: FunctionBuilder::new(&mut self.ctx.func, &mut self.fb_ctx),
isa,
nodes,
tys,
files,
values: &mut vec![None; nodes.len()],
arg_lens: &lens,
stack_ret,
}
.build(tys.ins.funcs[id].sig);
self.ctx.func.name =
cir::UserFuncName::User(cir::UserExternalName { namespace: 0, index: id.index() as _ });
//std::eprintln!("{}", self.ctx.func.display());
self.ctx.compile(isa, &mut self.ctrl_plane).unwrap();
let code = self.ctx.compiled_code().unwrap();
self.funcs.push(id, &self.ctx.func, &code.buffer);
}
}
fn build_signature(
call_conv: cc::isa::CallConv,
sig: hbty::Sig,
types: &hbty::Types,
signature: &mut cir::Signature,
arg_meta: &mut Vec<AbiMeta>,
) -> bool {
signature.clear(call_conv);
match call_conv {
cc::isa::CallConv::SystemV => {
x86_64::build_systemv_signature(sig, types, signature, arg_meta)
}
_ => todo!(),
}
}
#[derive(Clone, Copy)]
struct AbiMeta {
trough_mem: bool,
arg_count: usize,
}
struct FuncBuilder<'a, 'b> {
bl: cf::FunctionBuilder<'b>,
isa: &'a dyn TargetIsa,
nodes: &'a hbnodes::Nodes,
tys: &'a hbty::Types,
files: &'a hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
values: &'b mut [Option<Result<cir::Value, cir::Block>>],
arg_lens: &'a [AbiMeta],
stack_ret: bool,
}
impl FuncBuilder<'_, '_> {
pub fn build(mut self, sig: hbty::Sig) {
let entry = self.bl.create_block();
self.bl.append_block_params_for_function_params(entry);
self.bl.switch_to_block(entry);
let mut arg_vals = &self.bl.block_params(entry).to_vec()[..];
if self.stack_ret {
let ret_ptr = *arg_vals.take_first().unwrap();
self.values[hbnodes::MEM as usize] = Some(Ok(ret_ptr));
}
let Self { nodes, tys, .. } = self;
let mut parama_len = self.arg_lens[1..].iter();
let mut typs = sig.args.args();
let mut args = nodes[hbnodes::VOID].outputs[hbnodes::ARG_START..].iter();
while let Some(aty) = typs.next(tys) {
let hbty::Arg::Value(ty) = aty else { continue };
let abi_meta = parama_len.next().unwrap();
let &arg = args.next().unwrap();
if !abi_meta.trough_mem && ty.is_aggregate(tys) {
let slot = self.bl.create_sized_stack_slot(cir::StackSlotData {
kind: cir::StackSlotKind::ExplicitSlot,
size: self.tys.size_of(ty),
align_shift: self.tys.align_of(ty).ilog2() as _,
});
let loc = arg_vals.take(..abi_meta.arg_count).unwrap();
assert!(loc.len() <= 2, "NEED handling");
let align =
loc.iter().map(|&p| self.bl.func.dfg.value_type(p).bytes()).max().unwrap();
let mut offset = 0i32;
for &v in loc {
self.bl.ins().stack_store(v, slot, offset);
offset += align as i32;
}
self.values[arg as usize] =
Some(Ok(self.bl.ins().stack_addr(cir::types::I64, slot, 0)))
} else {
let loc = arg_vals.take(..abi_meta.arg_count).unwrap();
debug_assert_eq!(loc.len(), 1);
self.values[arg as usize] = Some(Ok(loc[0]));
}
}
self.values[hbnodes::ENTRY as usize] = Some(Err(entry));
self.emit_node(hbnodes::VOID, hbnodes::VOID);
self.bl.finalize();
}
fn value_of(&self, nid: hbnodes::Nid) -> cir::Value {
self.values[nid as usize].unwrap_or_else(|| panic!("{:?}", self.nodes[nid])).unwrap()
}
fn block_of(&self, nid: hbnodes::Nid) -> cir::Block {
self.values[nid as usize].unwrap().unwrap_err()
}
fn close_block(&mut self, nid: hbnodes::Nid) {
if matches!(self.nodes[nid].kind, hbnodes::Kind::Loop) {
return;
}
self.bl.seal_block(self.block_of(nid));
}
fn emit_node(&mut self, nid: hbnodes::Nid, block: hbnodes::Nid) {
use hbnodes::*;
let mut args = vec![];
if matches!(self.nodes[nid].kind, Kind::Region | Kind::Loop) {
let side = 1 + self.values[nid as usize].is_some() as usize;
for &o in self.nodes[nid].outputs.iter() {
if self.nodes[o].is_data_phi() {
args.push(self.value_of(self.nodes[o].inputs[side]));
}
}
match (self.nodes[nid].kind, self.values[nid as usize]) {
(Kind::Loop, Some(blck)) => {
self.bl.ins().jump(blck.unwrap_err(), &args);
self.bl.seal_block(blck.unwrap_err());
self.close_block(block);
return;
}
(Kind::Region, None) => {
let next = self.bl.create_block();
for &o in self.nodes[nid].outputs.iter() {
if self.nodes[o].is_data_phi() {
self.values[o as usize] = Some(Ok(self
.bl
.append_block_param(next, self.nodes[o].ty.to_clif(self.tys))));
}
}
self.bl.ins().jump(next, &args);
self.close_block(block);
self.values[nid as usize] = Some(Err(next));
return;
}
_ => {}
}
}
let node = &self.nodes[nid];
self.values[nid as usize] = Some(match node.kind {
Kind::Start => {
debug_assert_eq!(self.nodes[node.outputs[0]].kind, Kind::Entry);
self.emit_node(node.outputs[0], block);
return;
}
Kind::If => {
let &[_, cnd] = node.inputs.as_slice() else { unreachable!() };
let &[then, else_] = node.outputs.as_slice() else { unreachable!() };
let then_bl = self.bl.create_block();
let else_bl = self.bl.create_block();
let c = self.value_of(cnd);
self.bl.ins().brif(c, then_bl, &[], else_bl, &[]);
self.values[then as usize] = Some(Err(then_bl));
self.values[else_ as usize] = Some(Err(else_bl));
self.close_block(block);
self.bl.switch_to_block(then_bl);
self.emit_node(then, then);
self.bl.switch_to_block(else_bl);
self.emit_node(else_, else_);
Err(self.block_of(block))
}
Kind::Loop => {
let next = self.bl.create_block();
for &o in self.nodes[nid].outputs.iter() {
if self.nodes[o].is_data_phi() {
self.values[o as usize] = Some(Ok(self
.bl
.append_block_param(next, self.nodes[o].ty.to_clif(self.tys))));
}
}
self.values[nid as usize] = Some(Err(next));
self.bl.ins().jump(self.values[nid as usize].unwrap().unwrap_err(), &args);
self.close_block(block);
self.bl.switch_to_block(self.values[nid as usize].unwrap().unwrap_err());
for &o in node.outputs.iter().rev() {
self.emit_node(o, nid);
}
Err(self.block_of(block))
}
Kind::Region => {
self.bl.ins().jump(self.values[nid as usize].unwrap().unwrap_err(), &args);
self.close_block(block);
self.bl.switch_to_block(self.values[nid as usize].unwrap().unwrap_err());
for &o in node.outputs.iter().rev() {
self.emit_node(o, nid);
}
return;
}
Kind::Die => {
self.bl.ins().trap(TrapCode::unwrap_user(1));
self.close_block(block);
self.emit_node(node.outputs[0], block);
Err(self.block_of(block))
}
Kind::Return { .. } => {
let mut ir_args = vec![];
if node.inputs[1] == hbnodes::VOID {
} else {
let abi_meta = self.arg_lens[0];
let arg = node.inputs[1];
if !abi_meta.trough_mem && self.nodes[node.inputs[1]].ty.is_aggregate(self.tys)
{
let loc = self.bl.func.signature.returns.clone();
assert!(loc.len() <= 2, "NEED handling");
let align = loc.iter().map(|&p| p.value_type.bytes()).max().unwrap();
let mut offset = 0i32;
let src = self.value_of(self.nodes[arg].inputs[1]);
debug_assert!(self.nodes[arg].kind == Kind::Load);
for &v in &loc {
ir_args.push(self.bl.ins().load(
v.value_type,
MemFlags::new(),
src,
offset,
));
offset += align as i32;
}
} else if self.stack_ret {
let src = self.value_of(self.nodes[arg].inputs[1]);
let dest = self.value_of(MEM);
self.bl.emit_small_memory_copy(
self.isa.frontend_config(),
dest,
src,
self.tys.size_of(self.nodes[arg].ty) as _,
self.tys.align_of(self.nodes[arg].ty) as _,
self.tys.align_of(self.nodes[arg].ty) as _,
false,
MemFlags::new(),
);
} else {
ir_args.push(self.value_of(arg));
}
}
self.bl.ins().return_(&ir_args);
self.close_block(block);
self.emit_node(node.outputs[0], block);
Err(self.block_of(block))
}
Kind::Entry => {
for &o in node.outputs.iter().rev() {
self.emit_node(o, nid);
}
return;
}
Kind::Then | Kind::Else => {
for &o in node.outputs.iter().rev() {
self.emit_node(o, block);
}
Err(self.block_of(block))
}
Kind::Call { func, unreachable, args } => {
assert_ne!(func, hbty::Func::ECA, "@eca is not supported");
if unreachable {
todo!()
} else {
let mut arg_lens = vec![];
let mut signature = cir::Signature::new(self.isa.default_call_conv());
let stack_ret = build_signature(
self.isa.default_call_conv(),
self.tys.ins.funcs[func].sig,
self.tys,
&mut signature,
&mut arg_lens,
);
let func_ref =
'b: {
let user_name_ref = self.bl.func.declare_imported_user_function(
cir::UserExternalName { namespace: 0, index: func.index() as _ },
);
if let Some(id) = self.bl.func.dfg.ext_funcs.keys().find(|&k| {
self.bl.func.dfg.ext_funcs[k].name
== cir::ExternalName::user(user_name_ref)
}) {
break 'b id;
}
let signature = self.bl.func.import_signature(signature.clone());
self.bl.func.import_function(cir::ExtFuncData {
name: cir::ExternalName::user(user_name_ref),
signature,
// somehow, this works
colocated: true, // !self.tys.ins.funcs[func].is_import,
})
};
let mut ir_args = vec![];
if stack_ret {
ir_args.push(self.value_of(*node.inputs.last().unwrap()));
}
let mut params = signature.params.as_slice();
let mut parama_len = arg_lens[1..].iter();
let mut typs = args.args();
let mut args = node.inputs[1..].iter();
while let Some(aty) = typs.next(self.tys) {
let hbty::Arg::Value(ty) = aty else { continue };
let abi_meta = parama_len.next().unwrap();
if abi_meta.arg_count == 0 {
continue;
}
let &arg = args.next().unwrap();
if !abi_meta.trough_mem && ty.is_aggregate(self.tys) {
let loc = params.take(..abi_meta.arg_count).unwrap();
assert!(loc.len() <= 2, "NEED handling");
let align = loc.iter().map(|&p| p.value_type.bytes()).max().unwrap();
let mut offset = 0i32;
let src = self.value_of(self.nodes[arg].inputs[1]);
debug_assert!(self.nodes[arg].kind == Kind::Load);
for &v in loc {
ir_args.push(self.bl.ins().load(
v.value_type,
MemFlags::new(),
src,
offset,
));
offset += align as i32;
}
} else {
let loc = params.take(..abi_meta.arg_count).unwrap();
debug_assert_eq!(loc.len(), 1);
ir_args.push(self.value_of(arg));
}
}
let inst = self.bl.ins().call(func_ref, &ir_args);
match *self.bl.inst_results(inst) {
[] => {}
[scala] => self.values[nid as usize] = Some(Ok(scala)),
[a, b] => {
assert!(!stack_ret);
let slot = self.value_of(*node.inputs.last().unwrap());
let loc = [a, b];
assert!(loc.len() <= 2, "NEED handling");
let align = loc
.iter()
.map(|&p| self.bl.func.dfg.value_type(p).bytes())
.max()
.unwrap();
let mut offset = 0i32;
for v in loc {
self.bl.ins().store(MemFlags::new(), v, slot, offset);
offset += align as i32;
}
}
_ => unimplemented!(),
}
for &o in node.outputs.iter().rev() {
if self.nodes[o].inputs[0] == nid
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
&& self.nodes[o].inputs[1] == nid)
{
self.emit_node(o, block);
}
}
return;
}
}
Kind::CInt { value } if self.nodes[nid].ty.is_float() => {
Ok(match self.tys.size_of(self.nodes[nid].ty) {
4 => self.bl.ins().f32const(f64::from_bits(value as _) as f32),
8 => self.bl.ins().f64const(f64::from_bits(value as _)),
_ => unimplemented!(),
})
}
Kind::CInt { value } => Ok(self.bl.ins().iconst(
cir::Type::int(self.tys.size_of(node.ty) as u16 * 8).unwrap_or_else(|| {
panic!("{}", hbty::Display::new(self.tys, self.files, node.ty),)
}),
value,
)),
Kind::BinOp { op } => {
let &[_, lhs, rhs] = node.inputs.as_slice() else { unreachable!() };
let [lh, rh] = [self.value_of(lhs), self.value_of(rhs)];
let is_int_op = node.ty.is_integer()
|| node.ty.is_pointer()
|| (node.ty == hbty::Id::BOOL
&& (self.nodes[lhs].ty.is_integer()
|| node.ty.is_pointer()
|| self.nodes[lhs].ty == hbty::Id::BOOL));
let is_float_op = node.ty.is_float()
|| (node.ty == hbty::Id::BOOL && self.nodes[lhs].ty.is_float());
Ok(if is_int_op {
let signed = node.ty.is_signed();
match op {
TokenKind::Add => self.bl.ins().iadd(lh, rh),
TokenKind::Sub => self.bl.ins().isub(lh, rh),
TokenKind::Mul => self.bl.ins().imul(lh, rh),
TokenKind::Shl => self.bl.ins().ishl(lh, rh),
TokenKind::Xor => self.bl.ins().bxor(lh, rh),
TokenKind::Band => self.bl.ins().band(lh, rh),
TokenKind::Bor => self.bl.ins().bor(lh, rh),
TokenKind::Div if signed => self.bl.ins().sdiv(lh, rh),
TokenKind::Mod if signed => self.bl.ins().srem(lh, rh),
TokenKind::Shr if signed => self.bl.ins().sshr(lh, rh),
TokenKind::Div => self.bl.ins().udiv(lh, rh),
TokenKind::Mod => self.bl.ins().urem(lh, rh),
TokenKind::Shr => self.bl.ins().ushr(lh, rh),
TokenKind::Lt
| TokenKind::Gt
| TokenKind::Le
| TokenKind::Ge
| TokenKind::Eq
| TokenKind::Ne => self.bl.ins().icmp(op.to_int_cc(signed), lh, rh),
op => todo!("{op}"),
}
} else if is_float_op {
match op {
TokenKind::Add => self.bl.ins().fadd(lh, rh),
TokenKind::Sub => self.bl.ins().fsub(lh, rh),
TokenKind::Mul => self.bl.ins().fmul(lh, rh),
TokenKind::Div => self.bl.ins().fdiv(lh, rh),
TokenKind::Lt
| TokenKind::Gt
| TokenKind::Le
| TokenKind::Ge
| TokenKind::Eq
| TokenKind::Ne => self.bl.ins().fcmp(op.to_float_cc(), lh, rh),
op => todo!("{op}"),
}
} else {
todo!("{}", hbty::Display::new(self.tys, self.files, node.ty))
})
}
Kind::RetVal => Ok(self.value_of(node.inputs[0])),
Kind::UnOp { op } => {
let oper = self.value_of(node.inputs[1]);
let dst = node.ty;
let src = self
.tys
.inner_of(self.nodes[node.inputs[1]].ty)
.unwrap_or(self.nodes[node.inputs[1]].ty);
let dty = dst.to_clif(self.tys);
Ok(match op {
TokenKind::Sub => self.bl.ins().ineg(oper),
TokenKind::Not => self.bl.ins().bnot(oper),
TokenKind::Float if dst.is_float() && src.is_unsigned() => {
self.bl.ins().fcvt_from_uint(dty, oper)
}
TokenKind::Float if dst.is_float() && src.is_signed() => {
self.bl.ins().fcvt_from_sint(dty, oper)
}
TokenKind::Number if src.is_float() && dst.is_unsigned() => {
self.bl.ins().fcvt_to_uint(dty, oper)
}
TokenKind::Number
if src.is_signed() && (dst.is_integer() || dst.is_pointer()) =>
{
self.bl.ins().sextend(dty, oper)
}
TokenKind::Number
if (src.is_unsigned() || src == hbty::Id::BOOL)
&& (dst.is_integer() || dst.is_pointer()) =>
{
self.bl.ins().uextend(dty, oper)
}
TokenKind::Float if dst == hbty::Id::F64 && src.is_float() => {
self.bl.ins().fpromote(dty, oper)
}
TokenKind::Float if dst == hbty::Id::F32 && src.is_float() => {
self.bl.ins().fdemote(dty, oper)
}
_ => todo!(),
})
}
Kind::Stck => {
let slot = self.bl.create_sized_stack_slot(cir::StackSlotData {
kind: cir::StackSlotKind::ExplicitSlot,
size: self.tys.size_of(node.ty),
align_shift: self.tys.align_of(node.ty).ilog2() as _,
});
Ok(self.bl.ins().stack_addr(cir::types::I64, slot, 0))
}
Kind::Global { global } => {
let glob_ref = {
// already deduplicated by the SoN
let colocated = true;
let user_name_ref =
self.bl.func.declare_imported_user_function(cir::UserExternalName {
namespace: 1,
index: global.index() as u32,
});
self.bl.func.create_global_value(cir::GlobalValueData::Symbol {
name: cir::ExternalName::user(user_name_ref),
offset: cir::immediates::Imm64::new(0),
colocated,
tls: false,
})
};
Ok(self.bl.ins().global_value(cir::types::I64, glob_ref))
}
Kind::Load if node.ty.is_aggregate(self.tys) => return,
Kind::Load => {
let ptr = self.value_of(node.inputs[1]);
Ok(self.bl.ins().load(node.ty.to_clif(self.tys), MemFlags::new(), ptr, 0))
}
Kind::Stre if node.ty.is_aggregate(self.tys) => {
let src = self.value_of(self.nodes[node.inputs[1]].inputs[1]);
let dest = self.value_of(node.inputs[2]);
self.bl.emit_small_memory_copy(
self.isa.frontend_config(),
dest,
src,
self.tys.size_of(node.ty) as _,
self.tys.align_of(node.ty) as _,
self.tys.align_of(node.ty) as _,
false,
MemFlags::new(),
);
return;
}
Kind::Stre => {
let value = self.value_of(node.inputs[1]);
let ptr = self.value_of(node.inputs[2]);
self.bl.ins().store(MemFlags::new(), value, ptr, 0);
return;
}
Kind::End | Kind::Phi | Kind::Arg | Kind::Mem | Kind::Loops | Kind::Join => return,
Kind::Assert { .. } => unreachable!(),
});
}
}
trait ToCondcodes {
fn to_int_cc(self, signed: bool) -> condcodes::IntCC;
fn to_float_cc(self) -> condcodes::FloatCC;
}
impl ToCondcodes for TokenKind {
fn to_int_cc(self, signed: bool) -> condcodes::IntCC {
use condcodes::IntCC as ICC;
match self {
Self::Lt if signed => ICC::SignedLessThan,
Self::Gt if signed => ICC::SignedGreaterThan,
Self::Le if signed => ICC::SignedLessThanOrEqual,
Self::Ge if signed => ICC::SignedGreaterThanOrEqual,
Self::Lt => ICC::UnsignedLessThan,
Self::Gt => ICC::UnsignedGreaterThan,
Self::Le => ICC::UnsignedLessThanOrEqual,
Self::Ge => ICC::UnsignedGreaterThanOrEqual,
Self::Eq => ICC::Equal,
Self::Ne => ICC::NotEqual,
_ => unreachable!(),
}
}
fn to_float_cc(self) -> condcodes::FloatCC {
use condcodes::FloatCC as FCC;
match self {
Self::Lt => FCC::LessThan,
Self::Gt => FCC::GreaterThan,
Self::Le => FCC::LessThanOrEqual,
Self::Ge => FCC::GreaterThanOrEqual,
Self::Eq => FCC::Equal,
Self::Ne => FCC::NotEqual,
_ => unreachable!(),
}
}
}
trait ToClifTy {
fn to_clif(self, cx: &hbty::Types) -> cir::Type;
}
impl ToClifTy for hbty::Id {
fn to_clif(self, cx: &hbty::Types) -> cir::Type {
debug_assert!(!self.is_aggregate(cx));
if self.is_integer() | self.is_pointer() | self.is_optional() || self == hbty::Id::BOOL {
cir::Type::int(cx.size_of(self) as u16 * 8).unwrap()
} else if self == hbty::Id::F32 {
cir::types::F32
} else if self == hbty::Id::F64 {
cir::types::F64
} else {
unimplemented!("{:?}", self)
}
}
}
#[derive(Default)]
struct Global {
module_id: Option<cm::DataId>,
}
#[derive(Default)]
struct FuncHeaders {
module_id: Option<cm::FuncId>,
alignment: u32,
code: Range<u32>,
relocs: Range<u32>,
external_names: Range<u32>,
}
#[derive(Default)]
struct Functions {
headers: EntVec<hbty::Func, FuncHeaders>,
code: Vec<u8>,
relocs: Vec<FinalizedMachReloc>,
external_names: Vec<UserExternalName>,
}
impl Functions {
fn push(&mut self, id: hbty::Func, func: &cir::Function, code: &MachBufferFinalized<Final>) {
self.headers.shadow(id.index() + 1);
self.headers[id] = FuncHeaders {
module_id: None,
alignment: code.alignment,
code: self.code.len() as u32..self.code.len() as u32 + code.data().len() as u32,
relocs: self.relocs.len() as u32..self.relocs.len() as u32 + code.relocs().len() as u32,
external_names: self.external_names.len() as u32
..self.external_names.len() as u32 + func.params.user_named_funcs().len() as u32,
};
self.code.extend(code.data());
self.relocs.extend(code.relocs().iter().cloned());
self.external_names.extend(func.params.user_named_funcs().values().cloned());
}
}
#[derive(Default)]
struct Assembler {
name: String,
frontier: Vec<hbty::Id>,
globals: Vec<hbty::Global>,
funcs: Vec<hbty::Func>,
}
#[derive(Debug)]
pub enum BackendCreationError {
UnsupportedTriplet(LookupError),
InvalidFlags(CodegenError),
UnsupportedModuleConfig(ModuleError),
InvalidFlag { key: String, value: String, err: SetError },
}
impl Display for BackendCreationError {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
BackendCreationError::UnsupportedTriplet(err) => {
write!(f, "Unsupported triplet: {}", err)
}
BackendCreationError::InvalidFlags(err) => {
write!(f, "Invalid flags: {}", err)
}
BackendCreationError::UnsupportedModuleConfig(err) => {
write!(f, "Unsupported module configuration: {}", err)
}
BackendCreationError::InvalidFlag { key, value, err } => {
write!(
f,
"Problem setting a '{key}' to '{value}': {err}\navailable flags: {}",
cc::settings::Flags::new(cc::settings::builder())
)
}
}
}
}
impl core::error::Error for BackendCreationError {}
impl From<LookupError> for BackendCreationError {
fn from(value: LookupError) -> Self {
Self::UnsupportedTriplet(value)
}
}
impl From<CodegenError> for BackendCreationError {
fn from(value: CodegenError) -> Self {
Self::InvalidFlags(value)
}
}
impl From<ModuleError> for BackendCreationError {
fn from(value: ModuleError) -> Self {
Self::UnsupportedModuleConfig(value)
}
}

View file

@ -1,310 +0,0 @@
// The classification code for the x86_64 ABI is taken from the clay language
// https://github.com/jckarter/clay/blob/db0bd2702ab0b6e48965cd85f8859bbd5f60e48e/compiler/externals.cpp
use {crate::AbiMeta, hblang::ty};
pub fn build_systemv_signature(
sig: hblang::ty::Sig,
types: &hblang::ty::Types,
signature: &mut cranelift_codegen::ir::Signature,
arg_lens: &mut Vec<AbiMeta>,
) -> bool {
let mut alloca = Alloca::new();
alloca.next(false, sig.ret, types, &mut signature.returns);
let stack_ret = signature.returns.len() == 1
&& signature.returns[0].purpose == cranelift_codegen::ir::ArgumentPurpose::StructReturn;
if stack_ret {
signature.params.append(&mut signature.returns);
arg_lens.push(AbiMeta { arg_count: signature.params.len(), trough_mem: true });
} else {
arg_lens.push(AbiMeta { arg_count: signature.returns.len(), trough_mem: false });
}
let mut args = sig.args.args();
while let Some(arg) = args.next_value(types) {
let prev = signature.params.len();
let trough_mem = alloca.next(true, arg, types, &mut signature.params);
arg_lens.push(AbiMeta { arg_count: signature.params.len() - prev, trough_mem });
}
stack_ret
}
/// Classification of "eightbyte" components.
// N.B., the order of the variants is from general to specific,
// such that `unify(a, b)` is the "smaller" of `a` and `b`.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
enum Class {
Int,
Sse,
SseUp,
}
#[derive(Clone, Copy, Debug)]
struct Memory;
// Currently supported vector size (AVX-512).
const LARGEST_VECTOR_SIZE: usize = 512;
const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64;
fn classify_arg(
cx: &hblang::ty::Types,
arg: hblang::ty::Id,
) -> Result<[Option<Class>; MAX_EIGHTBYTES], Memory> {
fn classify(
cx: &hblang::ty::Types,
layout: hblang::ty::Id,
cls: &mut [Option<Class>],
off: hblang::ty::Offset,
) -> Result<(), Memory> {
let size = cx.size_of(layout);
if off & (cx.align_of(layout) - 1) != 0 {
if size != 0 {
return Err(Memory);
}
return Ok(());
}
let mut c = match layout.expand() {
_ if size == 0 => return Ok(()),
_ if layout.is_integer() || layout.is_pointer() || layout == ty::Id::BOOL => Class::Int,
_ if layout.is_float() => Class::Sse,
hblang::ty::Kind::Struct(s) => {
for (f, foff) in hblang::ty::OffsetIter::new(s, cx).into_iter(cx) {
classify(cx, f.ty, cls, off + foff)?;
}
return Ok(());
}
hblang::ty::Kind::Tuple(tuple) => {
for (&ty, foff) in hblang::ty::OffsetIter::new(tuple, cx).into_iter(cx) {
classify(cx, ty, cls, off + foff)?;
}
return Ok(());
}
hblang::ty::Kind::Enum(_) => Class::Int,
hblang::ty::Kind::Union(union) => {
for f in cx.union_fields(union) {
classify(cx, f.ty, cls, off)?;
}
return Ok(());
}
hblang::ty::Kind::Slice(slice) if let Some(len) = cx.ins.slices[slice].len() => {
for i in 0..len as u32 {
classify(
cx,
cx.ins.slices[slice].elem,
cls,
off + i * cx.size_of(cx.ins.slices[slice].elem),
)?;
}
return Ok(());
}
hblang::ty::Kind::Slice(_) => {
classify(cx, hblang::ty::Id::UINT, cls, off)?;
classify(cx, hblang::ty::Id::UINT, cls, off + 8)?;
return Ok(());
}
hblang::ty::Kind::Opt(opt) => {
let base = cx.ins.opts[opt].base;
if cx.nieche_of(base).is_some() {
classify(cx, base, cls, off)?;
} else {
classify(cx, hblang::ty::Id::BOOL, cls, off)?;
classify(cx, base, cls, off + cx.align_of(base))?;
}
return Ok(());
}
ty => unimplemented!("{ty:?}"),
};
// Fill in `cls` for scalars (Int/Sse) and vectors (Sse).
let first = (off / 8) as usize;
let last = ((off + size - 1) / 8) as usize;
for cls in &mut cls[first..=last] {
*cls = Some(cls.map_or(c, |old| old.min(c)));
// Everything after the first Sse "eightbyte"
// component is the upper half of a register.
if c == Class::Sse {
c = Class::SseUp;
}
}
Ok(())
}
let size = cx.size_of(arg);
let n = ((size + 7) / 8) as usize;
if n > MAX_EIGHTBYTES {
return Err(Memory);
}
let mut cls = [None; MAX_EIGHTBYTES];
classify(cx, arg, &mut cls, 0)?;
if n > 2 {
if cls[0] != Some(Class::Sse) {
return Err(Memory);
}
if cls[1..n].iter().any(|&c| c != Some(Class::SseUp)) {
return Err(Memory);
}
} else {
let mut i = 0;
while i < n {
if cls[i] == Some(Class::SseUp) {
cls[i] = Some(Class::Sse);
} else if cls[i] == Some(Class::Sse) {
i += 1;
while i != n && cls[i] == Some(Class::SseUp) {
i += 1;
}
} else {
i += 1;
}
}
}
Ok(cls)
}
fn reg_component(
cls: &[Option<Class>],
i: &mut usize,
size: hblang::ty::Size,
) -> Option<cranelift_codegen::ir::Type> {
if *i >= cls.len() {
return None;
}
match cls[*i] {
None => None,
Some(Class::Int) => {
*i += 1;
Some(if size < 8 {
cranelift_codegen::ir::Type::int(size as u16 * 8).unwrap()
} else {
cranelift_codegen::ir::types::I64
})
}
Some(Class::Sse) => {
let vec_len =
1 + cls[*i + 1..].iter().take_while(|&&c| c == Some(Class::SseUp)).count();
*i += vec_len;
Some(if vec_len == 1 {
match size {
4 => cranelift_codegen::ir::types::F32,
_ => cranelift_codegen::ir::types::F64,
}
} else {
cranelift_codegen::ir::types::I64.by(vec_len as _).unwrap()
})
}
Some(c) => unreachable!("reg_component: unhandled class {:?}", c),
}
}
fn cast_target(
cls: &[Option<Class>],
size: hblang::ty::Size,
dest: &mut Vec<cranelift_codegen::ir::AbiParam>,
) {
let mut i = 0;
let lo = reg_component(cls, &mut i, size).unwrap();
let offset = 8 * (i as u32);
dest.push(cranelift_codegen::ir::AbiParam::new(lo));
if size > offset {
if let Some(hi) = reg_component(cls, &mut i, size - offset) {
dest.push(cranelift_codegen::ir::AbiParam::new(hi));
}
}
assert_eq!(reg_component(cls, &mut i, 0), None);
}
const MAX_INT_REGS: usize = 6; // RDI, RSI, RDX, RCX, R8, R9
const MAX_SSE_REGS: usize = 8; // XMM0-7
pub struct Alloca {
int_regs: usize,
sse_regs: usize,
}
impl Alloca {
pub fn new() -> Self {
Self { int_regs: MAX_INT_REGS, sse_regs: MAX_SSE_REGS }
}
pub fn next(
&mut self,
is_arg: bool,
arg: hblang::ty::Id,
cx: &hblang::ty::Types,
dest: &mut Vec<cranelift_codegen::ir::AbiParam>,
) -> bool {
if cx.size_of(arg) == 0 {
return false;
}
let mut cls_or_mem = classify_arg(cx, arg);
if is_arg {
if let Ok(cls) = cls_or_mem {
let mut needed_int = 0;
let mut needed_sse = 0;
for c in cls {
match c {
Some(Class::Int) => needed_int += 1,
Some(Class::Sse) => needed_sse += 1,
_ => {}
}
}
match (self.int_regs.checked_sub(needed_int), self.sse_regs.checked_sub(needed_sse))
{
(Some(left_int), Some(left_sse)) => {
self.int_regs = left_int;
self.sse_regs = left_sse;
}
_ => {
// Not enough registers for this argument, so it will be
// passed on the stack, but we only mark aggregates
// explicitly as indirect `byval` arguments, as LLVM will
// automatically put immediates on the stack itself.
if arg.is_aggregate(cx) {
cls_or_mem = Err(Memory);
}
}
}
}
}
match cls_or_mem {
Err(Memory) => {
if is_arg {
dest.push(cranelift_codegen::ir::AbiParam::new(
cranelift_codegen::ir::types::I64,
));
} else {
dest.push(cranelift_codegen::ir::AbiParam::special(
cranelift_codegen::ir::types::I64,
cranelift_codegen::ir::ArgumentPurpose::StructReturn,
));
}
true
}
Ok(ref cls) => {
// split into sized chunks passed individually
if arg.is_aggregate(cx) {
cast_target(cls, cx.size_of(arg), dest);
} else {
dest.push(cranelift_codegen::ir::AbiParam::new(
reg_component(cls, &mut 0, cx.size_of(arg)).unwrap(),
));
}
false
}
}
}
}

View file

@ -1,4 +1,4 @@
# The journey to an optimizing compiler
# The journey to optimizing compiler
It's been years since I was continuously trying to make a compiler to implement language of my dreams. Problem was tho that I wanted something similar to Rust, which if you did not know, `rustc` far exceeded the one million lines of code mark some time ago, so implementing such language would take me years if not decades, but I still tired it.
@ -28,34 +28,4 @@ It took around 4 months to reimplement everything make make the optimal code loo
## How my understanding of optimizations changed
### Optimizations allow us to scale software
I need to admit, before writing a single pass compiler and later upgrading it to optimizing one, I thought optimizations only affect the quality of final assembly emitted by the compiler. It never occur to me that what the optimizations actually do, is reduce the impact of how you decide to write the code. In a single pass compiler (with zero optimizations), the machine code reflects:
- order of operations as written in code
- whether the value was stored in intermediate locations
- exact structure of the control flow and at which point the operations are placed
- how many times is something recomputed
- operations that only help to convey intent for the reader of the source code
- and more I can't think of...
If you took some code you wrote and then modified it to obfuscate these aspects (in reference to the original code), you would to a subset of what optimizing compiler does. Of course, a good compiler would try hard to improve the metrics its optimizing for, it would:
- reorder operations to allow the CPU to parallelize them
- remove needless stores, or store values directly to places you cant express in code
- pull operations out of the loops and into the branches (if it can)
- find all common sub-expressions and compute them only once
- fold constants as much as possible and use obscure tricks to replace slow instructions if any of the operands are constant
- and more...
In the end, compiler optimizations try to reduce correlation between how the code happens to be written and how well it performs, which is extremely important when you want humans to read the code.
### Optimizing compilers know more then you
Optimizing code is a search problem, an optimizer searches the code for patterns that can be rewritten so something more practical for the computer, while preserving the observable behavior of the program. This means it needs enough context about the code to not make a mistake. In fact, the optimizer has so much context, it is able to determine your code is useless. But wait, didn't you write the code because you needed it to do something? Maybe your intention was to break out of the loop after you are done, but the optimizer looked at the code and said, "great, we are so lucky that this integer is always small enough to miss this check by one, DELETE", and then he goes "jackpot, since this loop is now infinite, we don't need this code after it, DELETE". Notice that the optimizer is eager to delete dead code, it did not ask you "Brah, why did you place all your code after an infinite loop?". This is just an example, there are many more cases where modern optimizers just delete all your code because they proven it does something invalid without running it.
Its stupid but its the world we live in, optimizers are usually a black box you import and feed it the code in a format they understand, they then proceed to optimize it, and if they find a glaring bug they wont tell you, god forbid, they will just molest the code in unspecified ways and spit out whats left. Before writing an optimizer, I did no know this can happen and I did not know this is a problem I pay for with my time, spent figuring out why noting is happening when I run the program.
But wait its worse! Since optimizers wont ever share the fact you are stupid, we end up with other people painstakingly writing complex linters, that will do a shitty job detecting things that matter, and instead whine about style and other bullcrap (and they suck even at that). If the people who write linters and people who write optimizers swapped the roles, I would be ranting about optimizers instead.
And so, this is the area where I want to innovate, lets report the dead code to the frontend, and let the compiler frontend filter out the noise and show relevant information in the diagnostics. Refuse to compile the program if you `i /= 0`. Refuse to compile if you `arr[arr.len]`. This is the level of stupid optimizer sees, once it normalizes your code, but proceeds to protect your feelings. My goal so for hblang to relay this to you as much as possible. If we can query for optimizations, we can query for bugs too.
I need to admit, before writing a single pass compiler and later upgrading it to optimizing one, I took optimizations as some magic that makes code faster and honestly believed they are optional and most of the hard work is done in the process of translating readable text to the machine code. That is almost true with minus the readable part. If you want the code you write to perform well, with a compiler that translates your code from text to instructions as its written, you will be forced to do everything modern optimizers do, by hand in your code. TODO...

View file

@ -9,7 +9,7 @@ use {
backend::hbvm::HbvmBackend,
son::{Codegen, CodegenCtx},
ty::Module,
utils::Ent,
Ent,
},
};

View file

@ -1,47 +0,0 @@
InitWindow := fn(w: uint, h: uint, name: ^u8): uint @import()
WindowShouldClose := fn(): bool @import()
BeginDrawing := fn(): void @import()
EndDrawing := fn(): void @import()
DrawRectangleV := fn(pos: Vec2, size: Vec2, color: Color): void @import()
DrawRectangle := fn(a: uint, b: uint, c: uint, d: uint, color: Color): void @import()
ClearBackground := fn(color: Color): void @import()
SetTargetFPS := fn(target: uint): void @import()
GetFrameTime := fn(): f32 @import()
Vec2 := struct {x: f32, y: f32}
Color := struct {r: u8, g: u8, b: u8, a: u8}
$W := 800
$H := 600
main := fn(): uint {
_ = InitWindow(W, H, "whawee\0".ptr)
SetTargetFPS(60)
pos := Vec2.(100, 100)
vel := Vec2.(300, 300)
size := Vec2.(100, 100)
color := Color.(17, 255, 17, 255)
loop if WindowShouldClose() break else {
BeginDrawing()
ClearBackground(.(0, 0, 0, 255))
DrawRectangleV(pos, size, color)
pos += vel * .(GetFrameTime(), GetFrameTime())
if pos.x < 0 | pos.x + size.x > W {
vel.x *= -1
color += .(32, 11, 20, 0)
}
if pos.y < 0 | pos.y + size.y > H {
vel.y *= -1
color += .(32, 11, 20, 0)
}
EndDrawing()
}
return 0
}

View file

@ -1,4 +0,0 @@
#!/bin/bash
DIR=$(dirname $0)
cd $DIR
cargo run -p hbc main.hb > out.o && gcc -o main out.o -lraylib -lm -ldl -lpthread -lrt -lGL -lX11 && ./main

View file

@ -3,13 +3,17 @@ name = "hblang"
version = "0.1.0"
edition = "2021"
[[bin]]
name = "hbc"
path = "src/main.rs"
[[bin]]
name = "fuzz"
path = "src/fuzz_main.rs"
[dependencies]
hbbytecode = { workspace = true, features = ["disasm"] }
hbvm = { workspace = true, features = ["nightly", "alloc"] }
hbvm = { workspace = true, features = ["nightly"] }
hashbrown = { version = "0.15.0", default-features = false, features = ["raw-entry"] }
log = "0.4.22"

File diff suppressed because one or more lines are too long

View file

@ -1,35 +0,0 @@
use std::{fmt::Write, iter};
fn main() {
const TEST_FILE: &str = "src/testcases.rs";
const INPUT: &str = include_str!("./README.md");
let mut out = String::new();
for (name, code) in block_iter(INPUT) {
let name = name.replace(' ', "_");
_ = writeln!(
out,
"#[test] fn {name}() {{ run_codegen_test(\"{name}\", r##\"{code}\"##) }}"
);
}
std::fs::write(TEST_FILE, out).unwrap();
}
fn block_iter(mut input: &str) -> impl Iterator<Item = (&str, &str)> {
const CASE_PREFIX: &str = "#### ";
const CASE_SUFFIX: &str = "\n```hb";
iter::from_fn(move || loop {
let pos = input.find(CASE_PREFIX)?;
input = unsafe { input.get_unchecked(pos + CASE_PREFIX.len()..) };
let Some((test_name, rest)) = input.split_once(CASE_SUFFIX) else { continue };
if !test_name.chars().all(|c| c.is_alphanumeric() || c == '_') {
continue;
}
input = rest;
let (body, rest) = input.split_once("```").unwrap_or((input, ""));
input = rest;
break Some((test_name, body));
})
}

5
lang/command-help.txt Normal file
View file

@ -0,0 +1,5 @@
--fmt - format all imported source files
--fmt-stdout - dont write the formatted file but print it
--dump-asm - output assembly instead of raw code, (the assembly is more for debugging the compiler)
--threads <1...> - number of extra threads compiler can use [default: 0]
--path-resolver <name> - choose between builtin path resolvers, options are: ableos

View file

@ -8,7 +8,7 @@ use {
utils::{EntSlice, EntVec},
},
alloc::{boxed::Box, collections::BTreeMap, string::String, vec::Vec},
core::{assert_matches::debug_assert_matches, error, mem, ops::Range},
core::{assert_matches::debug_assert_matches, mem, ops::Range},
hbbytecode::{self as instrs, *},
reg::Reg,
};
@ -106,8 +106,6 @@ pub struct HbvmBackend {
offsets: Vec<Offset>,
}
pub const TARGET_TRIPLE: &str = "unknown-virt-unknown";
impl HbvmBackend {
fn emit(&mut self, instr: (usize, [u8; instrs::MAX_SIZE])) {
emit(&mut self.code, instr);
@ -115,18 +113,12 @@ impl HbvmBackend {
}
impl Backend for HbvmBackend {
fn assemble_bin(
&mut self,
entry: ty::Func,
types: &Types,
files: &EntSlice<Module, parser::Ast>,
to: &mut Vec<u8>,
) {
fn assemble_bin(&mut self, entry: ty::Func, types: &Types, to: &mut Vec<u8>) {
to.extend([0u8; HEADER_SIZE]);
binary_prelude(to);
let AssemblySpec { code_length, data_length, entry } =
self.assemble_reachable(entry, types, files, to);
self.assemble_reachable(entry, types, to);
let exe = AbleOsExecutableHeader {
magic_number: [0x15, 0x91, 0xD2],
@ -146,7 +138,6 @@ impl Backend for HbvmBackend {
&mut self,
from: ty::Func,
types: &Types,
_files: &EntSlice<Module, parser::Ast>,
to: &mut Vec<u8>,
) -> AssemblySpec {
debug_assert!(self.asm.frontier.is_empty());
@ -224,7 +215,7 @@ impl Backend for HbvmBackend {
types: &'a Types,
files: &'a EntSlice<Module, parser::Ast>,
output: &mut String,
) -> Result<(), alloc::boxed::Box<dyn error::Error + Send + Sync + 'a>> {
) -> Result<(), hbbytecode::DisasmError<'a>> {
use hbbytecode::DisasmItem;
let functions = types
.ins
@ -259,7 +250,7 @@ impl Backend for HbvmBackend {
}),
)
.collect::<BTreeMap<_, _>>();
hbbytecode::disasm(&mut sluce, &functions, output, eca_handler).map_err(Into::into)
hbbytecode::disasm(&mut sluce, &functions, output, eca_handler)
}
fn emit_ct_body(
@ -323,7 +314,7 @@ impl Backend for HbvmBackend {
&& self
.jump_relocs
.last()
.is_none_or(|&(r, _)| self.offsets[r as usize] as usize != self.code.len())
.map_or(true, |&(r, _)| self.offsets[r as usize] as usize != self.code.len())
{
self.code.truncate(self.code.len() - 5);
self.ret_relocs.pop();
@ -615,8 +606,9 @@ impl TokenKind {
Some(ops[size.ilog2() as usize])
}
fn unop(&self, dst: ty::Id, src: ty::Id, tys: &Types) -> Option<fn(u8, u8) -> EncodedInstr> {
let src_idx = tys.size_of(src).ilog2() as usize;
fn unop(&self, dst: ty::Id, src: ty::Id) -> Option<fn(u8, u8) -> EncodedInstr> {
let src_idx =
src.simple_size().unwrap_or_else(|| panic!("{:?}", src.expand())).ilog2() as usize;
Some(match self {
Self::Sub => [
|a, b| sub8(a, reg::ZERO, b),
@ -663,14 +655,6 @@ enum PLoc {
Ref(Reg, u32),
}
impl PLoc {
fn reg(self) -> u8 {
match self {
PLoc::Reg(r, _) | PLoc::WideReg(r, _) | PLoc::Ref(r, _) => r,
}
}
}
struct ParamAlloc(Range<Reg>);
impl ParamAlloc {
@ -706,7 +690,42 @@ fn binary_prelude(to: &mut Vec<u8>) {
#[derive(Default)]
pub struct LoggedMem {
pub mem: hbvm::mem::HostMemory,
logger: hbvm::mem::InstrLogger,
op_buf: Vec<hbbytecode::Oper>,
disp_buf: String,
prev_instr: Option<hbbytecode::Instr>,
}
impl LoggedMem {
unsafe fn display_instr<T>(&mut self, instr: hbbytecode::Instr, addr: hbvm::mem::Address) {
let novm: *const hbvm::Vm<Self, 0> = core::ptr::null();
let offset = core::ptr::addr_of!((*novm).memory) as usize;
let regs = unsafe {
&*core::ptr::addr_of!(
(*(((self as *mut _ as *mut u8).sub(offset)) as *const hbvm::Vm<Self, 0>))
.registers
)
};
let mut bytes = core::slice::from_raw_parts(
(addr.get() - 1) as *const u8,
core::mem::size_of::<T>() + 1,
);
use core::fmt::Write;
hbbytecode::parse_args(&mut bytes, instr, &mut self.op_buf).unwrap();
debug_assert!(bytes.is_empty());
self.disp_buf.clear();
write!(self.disp_buf, "{:<10}", format!("{instr:?}")).unwrap();
for (i, op) in self.op_buf.drain(..).enumerate() {
if i != 0 {
write!(self.disp_buf, ", ").unwrap();
}
write!(self.disp_buf, "{op:?}").unwrap();
if let hbbytecode::Oper::R(r) = op {
write!(self.disp_buf, "({})", regs[r as usize].0).unwrap()
}
}
log::trace!("read-typed: {:x}: {}", addr.get(), self.disp_buf);
}
}
impl hbvm::mem::Memory for LoggedMem {
@ -739,13 +758,19 @@ impl hbvm::mem::Memory for LoggedMem {
}
unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: hbvm::mem::Address) -> T {
self.mem.prog_read(addr)
}
if log::log_enabled!(log::Level::Trace) {
if core::any::TypeId::of::<u8>() == core::any::TypeId::of::<T>() {
if let Some(instr) = self.prev_instr {
self.display_instr::<()>(instr, addr);
}
self.prev_instr = hbbytecode::Instr::try_from(*(addr.get() as *const u8)).ok();
} else {
let instr = self.prev_instr.take().unwrap();
self.display_instr::<T>(instr, addr);
}
}
fn log_instr(&mut self, at: hbvm::mem::Address, regs: &[hbvm::value::Value]) {
log::trace!("read-typed: {:x}: {}", at.get(), unsafe {
self.logger.display_instr(at, regs)
});
self.mem.prog_read(addr)
}
}

View file

@ -1,5 +1,4 @@
use {
super::ParamAlloc,
crate::{
backend::hbvm::{
reg::{self, Reg},
@ -24,7 +23,7 @@ impl HbvmBackend {
tys: &Types,
files: &EntSlice<Module, parser::Ast>,
) -> (usize, bool) {
let tail = FunctionBuilder::build(nodes, tys, &mut self.ralloc, sig);
let tail = Function::build(nodes, tys, &mut self.ralloc, sig);
let strip_load = |value| match nodes[value].kind {
Kind::Load { .. } if nodes[value].ty.loc(tys) == Loc::Stack => nodes[value].inputs[1],
@ -33,9 +32,7 @@ impl HbvmBackend {
let mut res = mem::take(&mut self.ralloc);
let special_reg_count = 13u8;
Regalloc::run(nodes, tys, &mut res, special_reg_count as _);
Regalloc::run(nodes, tys, &mut res);
'_open_function: {
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, 0));
@ -43,12 +40,12 @@ impl HbvmBackend {
}
if let Some(PLoc::Ref(..)) = tys.parama(sig.ret).0 {
res.node_to_reg[MEM as usize] = res.general_bundles.len() as u8 + 1;
res.general_bundles.push(Bundle::default());
res.node_to_reg[MEM as usize] = res.bundles.len() as u8 + 1;
res.bundles.push(Bundle::new(0));
}
let reg_offset = if tail { special_reg_count } else { reg::RET_ADDR + 1 };
let bundle_count = res.general_bundles.len() + (reg_offset as usize);
let reg_offset = if tail { reg::RET + 12 } else { reg::RET_ADDR + 1 };
let bundle_count = res.bundles.len() + (reg_offset as usize);
res.node_to_reg.iter_mut().filter(|r| **r != 0).for_each(|r| {
if *r == u8::MAX {
@ -328,7 +325,6 @@ impl HbvmBackend {
node.ty,
tys.inner_of(nodes[node.inputs[1]].ty)
.unwrap_or(nodes[node.inputs[1]].ty),
tys,
)
.unwrap_or_else(|| {
panic!(
@ -396,9 +392,8 @@ impl HbvmBackend {
todo!("unhandled operator: {op}");
}
}
Kind::Call { args, func, .. } => {
Kind::Call { args, func } => {
let (ret, mut parama) = tys.parama(node.ty);
debug_assert!(node.ty != ty::Id::NEVER || ret.is_none());
if let Some(PLoc::Ref(r, ..)) = ret {
self.emit(instrs::cp(r, atr(*node.inputs.last().unwrap())))
}
@ -438,15 +433,12 @@ impl HbvmBackend {
self.emit(instrs::st(r, atr(*node.inputs.last().unwrap()), 0, size));
}
//match ret {
// Some(PLoc::WideReg(..)) => {}
// Some(PLoc::Reg(..)) if node.ty.loc(tys) == Loc::Stack => {}
// Some(PLoc::Reg(r, ..)) => self.emit_cp(atr(nid), r),
// None | Some(PLoc::Ref(..)) => {}
//}
}
Kind::RetVal => {
self.emit_cp(atr(nid), reg::RET);
match ret {
Some(PLoc::WideReg(..)) => {}
Some(PLoc::Reg(..)) if node.ty.loc(tys) == Loc::Stack => {}
Some(PLoc::Reg(r, ..)) => self.emit_cp(atr(nid), r),
None | Some(PLoc::Ref(..)) => {}
}
}
Kind::Global { global } => {
let reloc = Reloc::new(self.code.len(), 3, 4);
@ -525,7 +517,7 @@ impl HbvmBackend {
if tail {
bundle_count.saturating_sub(reg::RET_ADDR as _)
} else {
self.ralloc.general_bundles.len()
self.ralloc.bundles.len()
},
tail,
)
@ -538,7 +530,7 @@ impl HbvmBackend {
}
}
struct FunctionBuilder<'a> {
struct Function<'a> {
sig: Sig,
tail: bool,
nodes: &'a Nodes,
@ -546,7 +538,7 @@ struct FunctionBuilder<'a> {
func: &'a mut Res,
}
impl core::fmt::Debug for FunctionBuilder<'_> {
impl core::fmt::Debug for Function<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
for block in &self.func.blocks {
writeln!(f, "{:?}", self.nodes[block.entry].kind)?;
@ -559,7 +551,7 @@ impl core::fmt::Debug for FunctionBuilder<'_> {
}
}
impl<'a> FunctionBuilder<'a> {
impl<'a> Function<'a> {
fn build(nodes: &'a Nodes, tys: &'a Types, func: &'a mut Res, sig: Sig) -> bool {
func.blocks.clear();
func.instrs.clear();
@ -678,21 +670,17 @@ impl<'a> FunctionBuilder<'a> {
self.emit_node(o);
}
}
Kind::Call { func, unreachable, .. } => {
Kind::Call { func, .. } => {
self.tail &= func == ty::Func::ECA;
if unreachable {
self.close_block(nid);
self.emit_node(node.outputs[0]);
} else {
self.add_instr(nid);
for &o in node.outputs.iter().rev() {
if self.nodes[o].inputs[0] == nid
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
&& self.nodes[o].inputs[1] == nid)
{
self.emit_node(o);
}
self.add_instr(nid);
for &o in node.outputs.iter().rev() {
if self.nodes[o].inputs[0] == nid
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
&& self.nodes[o].inputs[1] == nid)
{
self.emit_node(o);
}
}
}
@ -703,7 +691,6 @@ impl<'a> FunctionBuilder<'a> {
| Kind::Global { .. }
| Kind::Load { .. }
| Kind::Stre
| Kind::RetVal
| Kind::Stck => self.add_instr(nid),
Kind::End | Kind::Phi | Kind::Arg | Kind::Mem | Kind::Loops | Kind::Join => {}
Kind::Assert { .. } => unreachable!(),
@ -774,13 +761,7 @@ impl Nodes {
nid
}
fn uses_of(
&self,
nid: Nid,
types: &Types,
stack: &mut Vec<Nid>,
buf: &mut Vec<(Nid, Nid, Reg)>,
) {
fn uses_of(&self, nid: Nid, types: &Types, stack: &mut Vec<Nid>, buf: &mut Vec<(Nid, Nid)>) {
debug_assert!(stack.is_empty());
debug_assert!(buf.is_empty());
@ -796,38 +777,13 @@ impl Nodes {
continue;
}
if self.is_unlocked(o) {
buf.push((self.use_block_of(exp, o), o, self.use_reg_of(exp, o)));
buf.push((self.use_block_of(exp, o), o));
} else {
stack.push(o);
}
}
}
}
#[expect(unused)]
fn init_loc_of(&self, def: Nid, types: &Types) -> Reg {
if self[def].kind == Kind::Arg {
let mut parama = ParamAlloc(0..11);
let (_, ploc) = self[VOID]
.outputs
.iter()
.skip(ARG_START)
.map(|&n| (n, parama.next(self[n].ty, types)))
.find(|&(n, _)| n == def)
.unwrap();
return ploc.unwrap().reg();
}
255
}
#[expect(unused)]
fn use_reg_of(&self, def: Nid, usage: Nid) -> Reg {
//if matches!(self[usage].kind, Kind::Return { .. }) {}
255
}
}
struct Regalloc<'a> {
@ -850,35 +806,27 @@ impl<'a> Regalloc<'a> {
self.res.backrefs[nid as usize]
}
fn run(ctx: &'a Nodes, tys: &'a Types, res: &'a mut Res, special_count: usize) {
Self { nodes: ctx, tys, res }.run_low(special_count);
fn run(ctx: &'a Nodes, tys: &'a Types, res: &'a mut Res) {
Self { nodes: ctx, tys, res }.run_low();
}
fn run_low(&mut self, #[expect(unused)] special_count: usize) {
self.res.general_bundles.clear();
fn run_low(&mut self) {
self.res.bundles.clear();
self.res.node_to_reg.clear();
#[cfg(debug_assertions)]
self.res.marked.clear();
self.res.node_to_reg.resize(self.nodes.vreg_count(), 0);
self.res.call_set.clear();
for (i, &instr) in self.res.instrs.iter().enumerate() {
if self.nodes[instr].kind.is_call() {
self.res.call_set.add_one(i);
}
}
debug_assert!(self.res.dfs_buf.is_empty());
let mut uses_buf = Vec::new();
let mut range_buf = Vec::new();
let mut bundle = Bundle::default();
let mut bundle = Bundle::new(self.res.instrs.len());
self.res.visited.clear(self.nodes.len());
for i in (0..self.res.blocks.len()).rev() {
for [a, rest @ ..] in self.nodes.phi_inputs_of(self.res.blocks[i].entry) {
if self.res.visited.set(a) {
self.append_bundle(a, &mut bundle, None, &mut uses_buf, &mut range_buf);
self.append_bundle(a, &mut bundle, None, &mut uses_buf);
}
for r in rest {
@ -891,7 +839,6 @@ impl<'a> Regalloc<'a> {
&mut bundle,
Some(self.res.node_to_reg[a as usize] as usize - 1),
&mut uses_buf,
&mut range_buf,
);
}
}
@ -902,7 +849,7 @@ impl<'a> Regalloc<'a> {
if self.nodes[inst].has_no_value() || self.res.visited.get(inst) || inst == 0 {
continue;
}
self.append_bundle(inst, &mut bundle, None, &mut uses_buf, &mut range_buf);
self.append_bundle(inst, &mut bundle, None, &mut uses_buf);
}
self.res.instrs = instrs;
}
@ -912,16 +859,12 @@ impl<'a> Regalloc<'a> {
inst: Nid,
tmp: &mut Bundle,
prefered: Option<usize>,
uses_buf: &mut Vec<(Nid, Nid, Reg)>,
range_buf: &mut Vec<Range<usize>>,
uses_buf: &mut Vec<(Nid, Nid)>,
) {
let dom = self.nodes.idom_of(inst);
self.res.dfs_seem.clear(self.nodes.len());
self.nodes.uses_of(inst, self.tys, &mut self.res.dfs_buf, uses_buf);
let mut prefered_reg = reg::ZERO;
for (cursor, uinst, reg) in uses_buf.drain(..) {
prefered_reg = prefered_reg.min(reg);
for (cursor, uinst) in uses_buf.drain(..) {
if !self.res.dfs_seem.set(uinst) {
continue;
}
@ -951,22 +894,8 @@ impl<'a> Regalloc<'a> {
range.end = new;
debug_assert!(range.start < range.end, "{:?} {inst} {uinst}", range);
range_buf.push(range)
});
range_buf.sort_unstable_by_key(|r| r.start);
range_buf.dedup_by(|a, b| {
if b.end == a.start {
b.end = a.end;
true
} else {
false
}
});
for range in range_buf.drain(..) {
tmp.add(range);
}
});
}
if tmp.is_empty() {
@ -975,23 +904,23 @@ impl<'a> Regalloc<'a> {
}
if let Some(prefered) = prefered
&& !self.res.general_bundles[prefered].overlaps(tmp)
&& !self.res.bundles[prefered].overlaps(tmp)
{
self.res.general_bundles[prefered].merge(tmp);
self.res.bundles[prefered].merge(tmp);
tmp.clear();
self.res.node_to_reg[inst as usize] = prefered as Reg + 1;
return;
}
match self.res.general_bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(tmp)) {
match self.res.bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(tmp)) {
Some((i, other)) => {
other.merge(tmp);
tmp.clear();
self.res.node_to_reg[inst as usize] = i as Reg + 1;
}
None => {
self.res.general_bundles.push(tmp.take());
self.res.node_to_reg[inst as usize] = self.res.general_bundles.len() as Reg;
self.res.bundles.push(tmp.take());
self.res.node_to_reg[inst as usize] = self.res.bundles.len() as Reg;
}
}
}
@ -1042,8 +971,7 @@ pub(super) struct Res {
instrs: Vec<Nid>,
backrefs: Vec<u16>,
general_bundles: Vec<Bundle>,
call_set: Bundle,
bundles: Vec<Bundle>,
node_to_reg: Vec<Reg>,
visited: BitSet,
@ -1054,83 +982,37 @@ pub(super) struct Res {
}
struct Bundle {
start: usize,
end: usize,
usage: BitSet,
}
impl Default for Bundle {
fn default() -> Self {
Self { start: usize::MAX, end: 0, usage: Default::default() }
}
taken: Vec<bool>,
}
impl Bundle {
fn new(size: usize) -> Self {
Self { taken: vec![false; size] }
}
fn add(&mut self, range: Range<usize>) {
debug_assert!(!range.is_empty());
debug_assert!(range.start / BitSet::UNIT >= self.start || self.start == usize::MAX);
self.start = self.start.min(range.start / BitSet::UNIT);
self.end = self.end.max(range.end.div_ceil(BitSet::UNIT));
let proj_range =
range.start - self.start * BitSet::UNIT..range.end - self.start * BitSet::UNIT;
self.usage.set_range(proj_range)
self.taken[range].fill(true);
}
fn overlaps(&self, othr: &Self) -> bool {
let overlap = self.start.max(othr.start)..self.end.min(othr.end);
if overlap.start >= overlap.end {
return false;
}
let [mut sslot, mut oslot] = [0, 0];
let sunits =
&self.usage.units(&mut sslot)[overlap.start - self.start..overlap.end - self.start];
let ounits =
&othr.usage.units(&mut oslot)[overlap.start - othr.start..overlap.end - othr.start];
debug_assert_eq!(sunits.len(), ounits.len());
let res = sunits.iter().zip(ounits).any(|(a, b)| (a & b) != 0);
res
fn overlaps(&self, other: &Self) -> bool {
self.taken.iter().zip(other.taken.iter()).any(|(a, b)| a & b)
}
fn merge(&mut self, othr: &Self) {
debug_assert!(!self.overlaps(othr));
debug_assert!(self.start <= othr.start || self.start == usize::MAX);
self.usage.reserve((othr.end - self.start) * BitSet::UNIT);
self.start = self.start.min(othr.start);
self.end = self.end.max(othr.end);
let sunits =
&mut self.usage.units_mut().unwrap()[othr.start - self.start..othr.end - self.start];
let mut oslot = 0;
let ounits = othr.usage.units(&mut oslot);
sunits.iter_mut().zip(ounits).for_each(|(a, b)| *a |= *b);
fn merge(&mut self, other: &Self) {
debug_assert!(!self.overlaps(other));
self.taken.iter_mut().zip(other.taken.iter()).for_each(|(a, b)| *a |= *b);
}
fn clear(&mut self) {
self.start = usize::MAX;
self.end = 0;
self.usage.clear_as_is();
self.taken.fill(false);
}
fn is_empty(&self) -> bool {
self.end == 0
!self.taken.contains(&true)
}
fn take(&mut self) -> Self {
let mut new = Self { start: 0, ..Self::default() };
new.merge(self);
self.clear();
new
}
fn add_one(&mut self, i: usize) {
self.start = self.start.min(i / BitSet::UNIT);
self.end = self.end.max(i.div_ceil(BitSet::UNIT));
self.usage.set(i as _);
mem::replace(self, Self::new(self.taken.len()))
}
}

View file

@ -2,8 +2,7 @@ use {
crate::{
lexer::{self, Lexer, TokenKind},
parser::{
self, CommentOr, CtorField, EnumField, Expr, FieldList, ListKind, Poser, Radix,
StructField, UnionField,
self, CommentOr, CtorField, EnumField, Expr, FieldList, Poser, Radix, StructField,
},
},
core::{
@ -51,29 +50,27 @@ enum TokenGroup {
Ctor,
}
impl TokenKind {
fn to_higlight_group(self) -> TokenGroup {
use {TokenGroup as TG, TokenKind::*};
match self {
BSlash | Pound | Eof | Ct => TG::Blank,
Comment => TG::Comment,
Directive => TG::Directive,
Colon => TG::Colon,
Semi | Comma => TG::Comma,
Dot => TG::Dot,
Ctor | Arr | Tupl | TArrow | Range => TG::Ctor,
LParen | RParen => TG::Paren,
LBrace | RBrace | LBrack | RBrack => TG::Bracket,
Number | Float => TG::Number,
Under | CtIdent | Ident => TG::Identifier,
Tick | Tilde | Que | Not | Mod | Band | Bor | Xor | Mul | Add | Sub | Div | Shl
| Shr | Or | And | Lt | Gt | Eq | Le | Ge | Ne => TG::Op,
Decl | Assign | BorAss | XorAss | BandAss | AddAss | SubAss | MulAss | DivAss
| ModAss | ShrAss | ShlAss => TG::Assign,
DQuote | Quote => TG::String,
Slf | Defer | Return | If | Else | Loop | Break | Continue | Fn | Idk | Die
| Struct | Packed | True | False | Null | Match | Enum | Union | CtLoop => TG::Keyword,
}
fn token_group(kind: TokenKind) -> TokenGroup {
use {crate::lexer::TokenKind::*, TokenGroup as TG};
match kind {
BSlash | Pound | Eof | Ct => TG::Blank,
Comment => TG::Comment,
Directive => TG::Directive,
Colon => TG::Colon,
Semi | Comma => TG::Comma,
Dot => TG::Dot,
Ctor | Tupl | TArrow => TG::Ctor,
LParen | RParen => TG::Paren,
LBrace | RBrace | LBrack | RBrack => TG::Bracket,
Number | Float => TG::Number,
Under | CtIdent | Ident => TG::Identifier,
Tick | Tilde | Que | Not | Mod | Band | Bor | Xor | Mul | Add | Sub | Div | Shl | Shr
| Or | And | Lt | Gt | Eq | Le | Ge | Ne => TG::Op,
Decl | Assign | BorAss | XorAss | BandAss | AddAss | SubAss | MulAss | DivAss | ModAss
| ShrAss | ShlAss => TG::Assign,
DQuote | Quote => TG::String,
Slf | Defer | Return | If | Else | Loop | Break | Continue | Fn | Idk | Die | Struct
| Packed | True | False | Null | Match | Enum | Union | CtLoop => TG::Keyword,
}
}
@ -91,7 +88,7 @@ pub fn get_token_kinds(mut source: &mut [u8]) -> usize {
let start = token.start as usize;
let end = token.end as usize;
source[..start].fill(0);
source[start..end].fill(token.kind.to_higlight_group() as u8);
source[start..end].fill(token_group(token.kind) as u8);
source = &mut source[end..];
}
len
@ -224,12 +221,12 @@ impl<'a> Formatter<'a> {
f.write_str(sep)?;
}
if let Some(expr) = list.get(i + 1)
&& let Some(prev) = self.source.get(..expr.posi() as usize)
&& let Some(rest) = self.source.get(expr.posi() as usize..)
{
if sep.is_empty() && prev.trim_end().ends_with(';') {
if sep.is_empty() && insert_needed_semicolon(rest) {
f.write_str(";")?;
}
if count_trailing_newlines(prev) > 1 {
if preserve_newlines(&self.source[..expr.posi() as usize]) > 1 {
f.write_str("\n")?;
}
}
@ -307,6 +304,10 @@ impl<'a> Formatter<'a> {
}
match *expr {
Expr::Ct { value, .. } => {
f.write_str("$: ")?;
self.fmt(value, f)
}
Expr::Defer { value, .. } => {
f.write_str("defer ")?;
self.fmt(value, f)
@ -322,16 +323,6 @@ impl<'a> Formatter<'a> {
f.write_str(".")?;
f.write_str(field)
}
Expr::Range { start, end, .. } => {
if let Some(start) = start {
self.fmt(start, f)?;
}
f.write_str("..")?;
if let Some(end) = end {
self.fmt(end, f)?;
}
Ok(())
}
Expr::Directive { name, args, .. } => {
f.write_str("@")?;
f.write_str(name)?;
@ -348,15 +339,10 @@ impl<'a> Formatter<'a> {
"struct",
trailing_comma,
fields,
|s, StructField { name, ty, default_value, .. }, f| {
|s, StructField { name, ty, .. }, f| {
f.write_str(name)?;
f.write_str(": ")?;
s.fmt(ty, f)?;
if let Some(deva) = default_value {
f.write_str(" = ")?;
s.fmt(deva, f)?;
}
Ok(())
s.fmt(ty, f)
},
)
}
@ -365,7 +351,7 @@ impl<'a> Formatter<'a> {
"union",
trailing_comma,
fields,
|s, UnionField { name, ty, .. }, f| {
|s, StructField { name, ty, .. }, f| {
f.write_str(name)?;
f.write_str(": ")?;
s.fmt(ty, f)
@ -380,7 +366,7 @@ impl<'a> Formatter<'a> {
),
Expr::Ctor { ty, fields, trailing_comma, .. } => {
if let Some(ty) = ty {
self.fmt_paren(ty, f, postfix)?;
self.fmt_paren(ty, f, unary)?;
}
f.write_str(".{")?;
self.fmt_list(
@ -399,43 +385,38 @@ impl<'a> Formatter<'a> {
},
)
}
Expr::List {
Expr::Tupl {
pos,
kind: term,
ty: Some(&Expr::Slice { pos: spos, size: Some(&Expr::Number { value, .. }), item }),
fields,
trailing_comma,
} if value as usize == fields.len() => self.fmt(
&Expr::List {
&Expr::Tupl {
pos,
kind: term,
ty: Some(&Expr::Slice { pos: spos, size: None, item }),
fields,
trailing_comma,
},
f,
),
Expr::List { ty, kind: term, fields, trailing_comma, .. } => {
Expr::Tupl { ty, fields, trailing_comma, .. } => {
if let Some(ty) = ty {
self.fmt_paren(ty, f, postfix)?;
self.fmt_paren(ty, f, unary)?;
}
let (start, end) = match term {
ListKind::Tuple => (".(", ")"),
ListKind::Array => (".[", "]"),
};
f.write_str(start)?;
self.fmt_list(f, trailing_comma, end, ",", fields, Self::fmt)
f.write_str(".(")?;
self.fmt_list(f, trailing_comma, ")", ",", fields, Self::fmt)
}
Expr::Slice { item, size, .. } => {
f.write_str("[")?;
self.fmt(item, f)?;
if let Some(size) = size {
f.write_str("; ")?;
self.fmt(size, f)?;
}
f.write_str("]")?;
self.fmt_paren(item, f, unary)
f.write_str("]")
}
Expr::Index { base, index } => {
self.fmt_paren(base, f, postfix)?;
self.fmt(base, f)?;
f.write_str("[")?;
self.fmt(index, f)?;
f.write_str("]")
@ -558,7 +539,7 @@ impl<'a> Formatter<'a> {
prev.rfind(|c: char| c.is_ascii_whitespace()).map_or(prev.len(), |i| i + 1);
let exact_bound = lexer::Lexer::new(&prev[estimate_bound..]).last().start;
prev = &prev[..exact_bound as usize + estimate_bound];
if count_trailing_newlines(prev) > 0 {
if preserve_newlines(prev) > 0 {
f.write_str("\n")?;
for _ in 0..self.depth + 1 {
f.write_str("\t")?;
@ -566,9 +547,7 @@ impl<'a> Formatter<'a> {
f.write_str(op.name())?;
f.write_str(" ")?;
} else {
if op != TokenKind::Colon {
f.write_str(" ")?;
}
f.write_str(" ")?;
f.write_str(op.name())?;
f.write_str(" ")?;
}
@ -583,10 +562,15 @@ impl<'a> Formatter<'a> {
}
}
pub fn count_trailing_newlines(source: &str) -> usize {
pub fn preserve_newlines(source: &str) -> usize {
source[source.trim_end().len()..].bytes().filter(|&c| c == b'\n').count()
}
pub fn insert_needed_semicolon(source: &str) -> bool {
let kind = lexer::Lexer::new(source).eat().kind;
kind.precedence().is_some() || matches!(kind, TokenKind::Ctor | TokenKind::Tupl)
}
impl core::fmt::Display for parser::Ast {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt_file(self.exprs(), &self.file, f)
@ -597,14 +581,14 @@ pub fn fmt_file(exprs: &[Expr], file: &str, f: &mut impl fmt::Write) -> fmt::Res
for (i, expr) in exprs.iter().enumerate() {
Formatter::new(file).fmt(expr, f)?;
if let Some(expr) = exprs.get(i + 1)
&& let Some(prefix) = file.get(..expr.pos() as usize)
&& let Some(rest) = file.get(expr.pos() as usize..)
{
if prefix.trim_end().ends_with(';') {
f.write_str(";")?;
if insert_needed_semicolon(rest) {
write!(f, ";")?;
}
if count_trailing_newlines(prefix) > 1 {
f.write_str("\n")?;
if preserve_newlines(&file[..expr.pos() as usize]) > 1 {
writeln!(f)?;
}
}

View file

@ -1,17 +1,18 @@
use {
crate::{
backend::{hbvm::HbvmBackend, Backend},
backend::hbvm::HbvmBackend,
parser::{Ast, Ctx, FileKind},
son::{self},
ty, FnvBuildHasher,
},
alloc::{string::String, vec::Vec},
core::{fmt::Write, ops::Deref},
core::{fmt::Write, num::NonZeroUsize, ops::Deref},
hashbrown::hash_map,
std::{
borrow::ToOwned,
collections::VecDeque,
eprintln,
ffi::OsStr,
io::{self, Write as _},
path::{Path, PathBuf},
string::ToString,
@ -71,7 +72,60 @@ pub struct Options<'a> {
pub dump_asm: bool,
pub extra_threads: usize,
pub resolver: Option<PathResolver<'a>>,
pub backend: Option<&'a mut dyn Backend>,
}
impl<'a> Options<'a> {
pub fn from_args(
args: &[&str],
out: &mut Vec<u8>,
resolvers: &'a [(&str, PathResolver)],
) -> std::io::Result<Self> {
if args.contains(&"--help") || args.contains(&"-h") {
writeln!(out, "Usage: hbc [OPTIONS...] <FILE>")?;
writeln!(out, include_str!("../command-help.txt"))?;
return Err(std::io::ErrorKind::Other.into());
}
Ok(Options {
fmt: args.contains(&"--fmt"),
fmt_stdout: args.contains(&"--fmt-stdout"),
dump_asm: args.contains(&"--dump-asm"),
extra_threads: args
.iter()
.position(|&a| a == "--threads")
.map(|i| {
args[i + 1].parse::<NonZeroUsize>().map_err(|e| {
writeln!(out, "--threads expects non zero integer: {e}")
.err()
.unwrap_or(std::io::ErrorKind::Other.into())
})
})
.transpose()?
.map_or(1, NonZeroUsize::get)
- 1,
resolver: args
.iter()
.position(|&a| a == "--path-resolver")
.map(|i| {
resolvers.iter().find(|&&(n, _)| args[i + 1] == n).map(|&(_, r)| r).ok_or_else(
|| {
writeln!(
out,
"--path-resolver can only be one of: {}",
resolvers
.iter()
.map(|&(n, _)| n)
.intersperse(", ")
.collect::<String>()
)
.err()
.unwrap_or(std::io::ErrorKind::Other.into())
},
)
})
.transpose()?,
})
}
}
pub fn run_compiler(
@ -104,11 +158,10 @@ pub fn run_compiler(
write!(out, "{}", &parsed.ast[0])?;
} else {
let mut backend = HbvmBackend::default();
let backend = options.backend.unwrap_or(&mut backend);
let mut ctx = crate::son::CodegenCtx::default();
*ctx.parser.errors.get_mut() = parsed.errors;
let mut codegen = son::Codegen::new(backend, &parsed.ast, &mut ctx);
let mut codegen = son::Codegen::new(&mut backend, &parsed.ast, &mut ctx);
codegen.push_embeds(parsed.embeds);
codegen.generate(ty::Module::MAIN);
@ -124,9 +177,8 @@ pub fn run_compiler(
if options.dump_asm {
let mut disasm = String::new();
let err = codegen.disasm(&mut disasm, out).map_err(|e| io::Error::other(e.to_string()));
codegen.disasm(&mut disasm, out).map_err(|e| io::Error::other(e.to_string()))?;
*out = disasm.into_bytes();
err?
}
}
@ -246,7 +298,7 @@ pub struct CantLoadFile {
impl core::fmt::Display for CantLoadFile {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(f, "can't load file: {}", crate::display_rel_path(&self.path),)
write!(f, "can't load file: {}", display_rel_path(&self.path),)
}
}
@ -297,7 +349,7 @@ pub fn parse_from_fs(
if !physiscal_path.exists() {
return Err(io::Error::new(
io::ErrorKind::NotFound,
format!("can't find file: {}", crate::display_rel_path(&physiscal_path)),
format!("can't find file: {}", display_rel_path(&physiscal_path)),
));
}
@ -324,7 +376,7 @@ pub fn parse_from_fs(
e.kind(),
format!(
"can't load embed file: {}: {e}",
crate::display_rel_path(&physiscal_path)
display_rel_path(&physiscal_path)
),
)
})?;
@ -342,7 +394,7 @@ pub fn parse_from_fs(
let path = path.to_str().ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidData,
format!("path contains invalid characters: {}", crate::display_rel_path(&path)),
format!("path contains invalid characters: {}", display_rel_path(&path)),
)
})?;
Ok(Ast::new(path, std::fs::read_to_string(path)?, ctx, &mut |path, from, kind| {
@ -388,3 +440,9 @@ pub fn parse_from_fs(
errors,
})
}
pub fn display_rel_path(path: &(impl AsRef<OsStr> + ?Sized)) -> std::path::Display {
static CWD: std::sync::LazyLock<PathBuf> =
std::sync::LazyLock::new(|| std::env::current_dir().unwrap_or_default());
std::path::Path::new(path).strip_prefix(&*CWD).unwrap_or(std::path::Path::new(path)).display()
}

View file

@ -138,9 +138,7 @@ pub enum TokenKind {
Ctor,
Tupl,
Arr,
TArrow,
Range,
Or,
And,
@ -349,9 +347,7 @@ gen_token_kind! {
#[punkt]
Ctor = ".{",
Tupl = ".(",
Arr = ".[",
TArrow = "=>",
Range = "..",
// #define OP: each `#[prec]` delimeters a level of precedence from lowest to highest
#[ops]
#[prec]
@ -434,19 +430,6 @@ impl<'a> Lexer<'a> {
Lexer { pos: self.pos, source: self.source }.eat()
}
fn peek_n<const N: usize>(&self) -> Option<&[u8; N]> {
if core::intrinsics::unlikely(self.pos as usize + N > self.source.len()) {
None
} else {
Some(unsafe {
self.source
.get_unchecked(self.pos as usize..self.pos as usize + N)
.first_chunk()
.unwrap_unchecked()
})
}
}
fn peek(&self) -> Option<u8> {
if core::intrinsics::unlikely(self.pos >= self.source.len() as u32) {
None
@ -515,11 +498,7 @@ impl<'a> Lexer<'a> {
self.advance();
}
if self
.peek_n()
.map_or_else(|| self.peek() == Some(b'.'), |&[a, b]| a == b'.' && b != b'.')
{
self.pos += 1;
if self.advance_if(b'.') {
while let Some(b'0'..=b'9') = self.peek() {
self.advance();
}
@ -571,8 +550,6 @@ impl<'a> Lexer<'a> {
}
b'.' if self.advance_if(b'{') => T::Ctor,
b'.' if self.advance_if(b'(') => T::Tupl,
b'.' if self.advance_if(b'[') => T::Arr,
b'.' if self.advance_if(b'.') => T::Range,
b'=' if self.advance_if(b'>') => T::TArrow,
b'&' if self.advance_if(b'&') => T::And,
b'|' if self.advance_if(b'|') => T::Or,

View file

@ -24,8 +24,7 @@
iter_next_chunk,
pointer_is_aligned_to,
maybe_uninit_fill,
array_chunks,
array_windows
array_chunks
)]
#![warn(clippy::dbg_macro)]
#![expect(internal_features)]
@ -33,10 +32,8 @@
#[cfg(feature = "std")]
pub use fs::*;
use {
self::{ty::Builtin, utils::Ent},
alloc::vec::Vec,
};
pub use utils::Ent;
use {self::ty::Builtin, alloc::vec::Vec};
#[macro_use]
extern crate alloc;
@ -44,6 +41,9 @@ extern crate alloc;
#[cfg(any(feature = "std", test))]
extern crate std;
#[cfg(test)]
const README: &str = include_str!("../README.md");
#[cfg(test)]
#[macro_export]
macro_rules! run_tests {
@ -74,7 +74,6 @@ pub mod backend {
utils::EntSlice,
},
alloc::{string::String, vec::Vec},
core::error,
};
pub mod hbvm;
@ -90,7 +89,6 @@ pub mod backend {
&mut self,
from: ty::Func,
types: &Types,
files: &EntSlice<Module, parser::Ast>,
to: &mut Vec<u8>,
) -> AssemblySpec;
fn disasm<'a>(
@ -100,11 +98,11 @@ pub mod backend {
types: &'a Types,
files: &'a EntSlice<Module, parser::Ast>,
output: &mut String,
) -> Result<(), alloc::boxed::Box<dyn error::Error + Send + Sync + 'a>>;
) -> Result<(), hbbytecode::DisasmError<'a>>;
fn emit_body(
&mut self,
id: ty::Func,
nodes: &Nodes,
ci: &Nodes,
tys: &Types,
files: &EntSlice<Module, parser::Ast>,
);
@ -112,30 +110,22 @@ pub mod backend {
fn emit_ct_body(
&mut self,
id: ty::Func,
nodes: &Nodes,
ci: &Nodes,
tys: &Types,
files: &EntSlice<Module, parser::Ast>,
) {
self.emit_body(id, nodes, tys, files);
self.emit_body(id, ci, tys, files);
}
fn assemble_bin(
&mut self,
from: ty::Func,
types: &Types,
files: &EntSlice<Module, parser::Ast>,
to: &mut Vec<u8>,
) {
self.assemble_reachable(from, types, files, to);
fn assemble_bin(&mut self, from: ty::Func, types: &Types, to: &mut Vec<u8>) {
self.assemble_reachable(from, types, to);
}
}
}
pub mod utils;
mod utils;
mod debug {
use core::fmt::Debug;
pub fn panicking() -> bool {
#[cfg(feature = "std")]
{
@ -148,14 +138,14 @@ mod debug {
}
#[cfg(all(debug_assertions, feature = "std"))]
pub type Trace = std::rc::Rc<(std::backtrace::Backtrace, std::string::String)>;
pub type Trace = std::rc::Rc<std::backtrace::Backtrace>;
#[cfg(not(all(debug_assertions, feature = "std")))]
pub type Trace = ();
pub fn trace(_hint: impl Debug) -> Trace {
pub fn trace() -> Trace {
#[cfg(all(debug_assertions, feature = "std"))]
{
std::rc::Rc::new((std::backtrace::Backtrace::capture(), format!("{_hint:?}")))
std::rc::Rc::new(std::backtrace::Backtrace::capture())
}
#[cfg(not(all(debug_assertions, feature = "std")))]
{}
@ -408,10 +398,10 @@ impl Default for FnvHasher {
#[cfg(test)]
pub fn run_test(
name: &str,
ident: &str,
input: &str,
test: fn(&str, &str, &mut alloc::string::String),
name: &'static str,
ident: &'static str,
input: &'static str,
test: fn(&'static str, &'static str, &mut alloc::string::String),
) {
use std::{
io::Write,
@ -490,6 +480,31 @@ fn test_parse_files(
std::{borrow::ToOwned, string::ToString},
};
fn find_block<'a>(mut input: &'a str, test_name: &str) -> &'a str {
const CASE_PREFIX: &str = "#### ";
const CASE_SUFFIX: &str = "\n```hb";
loop {
let Some(pos) = input.find(CASE_PREFIX) else {
unreachable!("test {test_name} not found");
};
input = unsafe { input.get_unchecked(pos + CASE_PREFIX.len()..) };
if !input.starts_with(test_name) {
continue;
}
input = unsafe { input.get_unchecked(test_name.len()..) };
if !input.starts_with(CASE_SUFFIX) {
continue;
}
input = unsafe { input.get_unchecked(CASE_SUFFIX.len()..) };
let end = input.find("```").unwrap_or(input.len());
break unsafe { input.get_unchecked(..end) };
}
}
let input = find_block(input, ident);
let mut module_map = Vec::new();
let mut embed_map = Vec::new();
let mut last_start = 0;
@ -531,26 +546,3 @@ fn test_parse_files(
embed_map.iter().map(|&(_, content)| content.to_owned().into_bytes()).collect(),
)
}
#[cfg(feature = "std")]
static CWD: std::sync::LazyLock<std::path::PathBuf> =
std::sync::LazyLock::new(|| std::env::current_dir().unwrap_or_default());
#[cfg(feature = "std")]
pub fn strip_cwd(path: &str) -> &str {
std::path::Path::new(path)
.strip_prefix(&*CWD)
.unwrap_or(std::path::Path::new(path))
.to_str()
.unwrap()
}
#[cfg(feature = "std")]
pub fn display_rel_path(path: &(impl AsRef<std::ffi::OsStr> + ?Sized)) -> std::path::Display {
std::path::Path::new(path).strip_prefix(&*CWD).unwrap_or(std::path::Path::new(path)).display()
}
#[cfg(not(feature = "std"))]
pub fn display_rel_path(path: &str) -> &str {
path
}

31
lang/src/main.rs Normal file
View file

@ -0,0 +1,31 @@
#[cfg(feature = "std")]
fn main() {
use std::io::Write;
fn run(out: &mut Vec<u8>, warnings: &mut String) -> std::io::Result<()> {
let args = std::env::args().collect::<Vec<_>>();
let args = args.iter().map(String::as_str).collect::<Vec<_>>();
let resolvers = &[("ableos", hblang::ABLEOS_PATH_RESOLVER)];
let opts = hblang::Options::from_args(&args, out, resolvers)?;
let file = args.iter().filter(|a| !a.starts_with('-')).nth(1).copied().unwrap_or("main.hb");
hblang::run_compiler(file, opts, out, warnings)
}
log::set_logger(&hblang::fs::Logger).unwrap();
log::set_max_level(log::LevelFilter::Error);
let mut out = Vec::new();
let mut warnings = String::new();
match run(&mut out, &mut warnings) {
Ok(_) => {
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
std::io::stdout().write_all(&out).unwrap()
}
Err(_) => {
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
std::io::stderr().write_all(&out).unwrap();
std::process::exit(1);
}
}
}

View file

@ -9,6 +9,7 @@ use {
},
alloc::{string::String, vec::Vec},
core::{
assert_matches::debug_assert_matches,
cell::Cell,
fmt::{self, Debug, Write},
mem,
@ -96,7 +97,6 @@ impl Nodes {
debug_assert_ne!(next, 0);
if matches!(self[cursor].kind, Kind::Then | Kind::Else) {
debug_assert_eq!(self[next].kind, Kind::If);
debug_assert_eq!(self[next].ty, ty::Id::VOID);
let other = self[next].outputs[(self[next].outputs[0] == cursor) as usize];
self[other].loop_depth.set(depth - 1);
}
@ -168,8 +168,12 @@ impl Nodes {
let mut deepest = self[node].inputs[0];
for &inp in self[node].inputs[1..].iter() {
if self.idepth(inp, Some(scheds)) > self.idepth(deepest, Some(scheds)) {
debug_assert!(!self.is_cfg(inp));
deepest = self.idom(inp, Some(scheds));
if self[inp].kind.is_call() {
deepest = inp;
} else {
debug_assert!(!self.is_cfg(inp));
deepest = self.idom(inp, Some(scheds));
}
}
}
@ -194,8 +198,8 @@ impl Nodes {
for &node in rpo.iter().rev() {
self.loop_depth(node, Some(scheds));
for &i in self[node].inputs.iter() {
self.push_up_impl(i, visited, scheds);
for i in 0..self[node].inputs.len() {
self.push_up_impl(self[node].inputs[i], visited, scheds);
}
if matches!(self[node].kind, Kind::Loop | Kind::Region) {
@ -212,13 +216,13 @@ impl Nodes {
self.iter()
.map(|(n, _)| n)
.filter(|&n| !visited.get(n)
&& !matches!(self[n].kind, Kind::Arg | Kind::Mem | Kind::Loops | Kind::RetVal))
&& !matches!(self[n].kind, Kind::Arg | Kind::Mem | Kind::Loops))
.collect::<Vec<_>>(),
vec![],
"{:?}",
self.iter()
.filter(|&(n, nod)| !visited.get(n)
&& !matches!(nod.kind, Kind::Arg | Kind::Mem | Kind::Loops | Kind::RetVal))
&& !matches!(nod.kind, Kind::Arg | Kind::Mem | Kind::Loops))
.collect::<Vec<_>>()
);
@ -283,7 +287,6 @@ impl Nodes {
let cfg_idx = outputs.iter().position(|&n| self.is_cfg(n)).unwrap();
outputs.swap(cfg_idx, 0);
for &o in outputs.iter() {
if (!self.is_cfg(o)
&& self[o].outputs.iter().any(|&oi| {
@ -294,7 +297,9 @@ impl Nodes {
continue;
}
let mut cursor = buf.len();
buf.push(o);
for &o in outputs.iter().filter(|&&n| n == o) {
buf.push(o);
}
while let Some(&n) = buf.get(cursor) {
for &i in &self[n].inputs[1..] {
if fromc == self[i].inputs.first()
@ -305,17 +310,15 @@ impl Nodes {
})
&& seen.set(i)
{
buf.push(i);
for &o in outputs.iter().filter(|&&n| n == i) {
buf.push(o);
}
}
}
cursor += 1;
}
}
buf[1..].sort_by_key(|&n| {
self[n].has_no_value() || !self[n].outputs.iter().all(|&o| self[o].kind == Kind::Phi)
});
debug_assert_eq!(
outputs.iter().filter(|&&n| !seen.get(n)).copied().collect::<Vec<_>>(),
vec![],
@ -698,20 +701,19 @@ impl Nodes {
if self.free == Nid::MAX {
self.free = self.values.len() as _;
self.values.push(Err((Nid::MAX, debug::trace(""))));
self.values.push(Err((Nid::MAX, debug::trace())));
}
let free = self.free;
for &d in node.inputs.as_slice() {
debug_assert_ne!(d, free);
self.values[d as usize].as_mut().unwrap_or_else(|_| panic!("{d} ")).outputs.push(free);
self.values[d as usize].as_mut().unwrap_or_else(|_| panic!("{d}")).outputs.push(free);
}
self.free = mem::replace(&mut self.values[free as usize], Ok(node)).unwrap_err().0;
if let Some((entry, hash)) = lookup_meta {
entry.insert(crate::ctx_map::Key { value: free, hash }, ());
}
free
}
@ -775,11 +777,13 @@ impl Nodes {
}
self.remove_node_lookup(target);
let trace = debug::trace(&self.values[target as usize]);
if cfg!(debug_assertions) {
mem::replace(&mut self.values[target as usize], Err((Nid::MAX, trace))).unwrap();
mem::replace(&mut self.values[target as usize], Err((Nid::MAX, debug::trace())))
.unwrap();
} else {
mem::replace(&mut self.values[target as usize], Err((self.free, trace))).unwrap();
mem::replace(&mut self.values[target as usize], Err((self.free, debug::trace())))
.unwrap();
self.free = target;
}
@ -804,28 +808,17 @@ impl Nodes {
self.iter()
.filter_map(|(id, node)| node.kind.is_peeped().then_some(id))
.collect_into(stack);
stack.iter().for_each(|&s| {
debug_assert!(self.is_unlocked(s));
self.lock(s)
});
stack.iter().for_each(|&s| self.lock(s));
while fuel != 0
&& let Some(node) = stack.pop()
{
fuel -= 1;
if self[node].outputs.is_empty() {
self.push_adjacent_nodes(node, stack);
}
debug_assert_eq!(self[node].lock_rc.get(), 1, "{:?} {}", self[node], node);
if self.unlock_remove(node) {
continue;
}
debug_assert!(!self[node].outputs.is_empty(), "{:?} {}", self[node], node);
if let Some(new) = self.peephole(node, tys) {
self.replace(node, new);
self.push_adjacent_nodes(new, stack);
@ -841,6 +834,7 @@ impl Nodes {
}
debug_assert!(self.queued_peeps.is_empty());
stack.drain(..).for_each(|s| _ = self.unlock_remove(s));
}
@ -861,19 +855,7 @@ impl Nodes {
}
self[of].peep_triggers = Vc::default();
let mut i = 0;
stack.retain(|&n| {
if i < prev_len {
i += 1;
return true;
}
if self.is_unlocked(n) {
self.lock(n);
true
} else {
false
}
});
stack.iter().skip(prev_len).for_each(|&n| self.lock(n));
}
pub fn aclass_index(&self, region: Nid) -> (usize, Nid) {
@ -1175,15 +1157,10 @@ impl Nodes {
continue;
}
let mut broken = false;
for o in self[n].outputs.clone() {
if o != target && !matches!(self[o].kind, Kind::Return { .. }) {
self.add_trigger(o, target);
broken = true;
}
}
if broken {
new_inps.push(n);
if let Some(&load) =
self[n].outputs.iter().find(|&&n| self[n].kind == Kind::Load)
{
self.add_trigger(load, target);
continue;
}
@ -1333,9 +1310,9 @@ impl Nodes {
cursor = next_store;
}
'forward_store: {
'eliminate: {
if self[target].outputs.is_empty() {
break 'forward_store;
break 'eliminate;
}
if self[value].kind != Kind::Load
@ -1344,121 +1321,106 @@ impl Nodes {
for &ele in self[value].outputs.clone().iter().filter(|&&n| n != target) {
self.add_trigger(ele, target);
}
break 'forward_store;
break 'eliminate;
}
let &[_, stack, last_store] = self[value].inputs.as_slice() else {
unreachable!()
};
// TODO: count othe loads to determine wether this transformation is worth it
// might be overly restricitive
// but for now, just check we are copiing the full stack allocation
if self[stack].ty != self[value].ty || self[stack].kind != Kind::Stck {
break 'forward_store;
break 'eliminate;
}
// pessimistic
// allocation is most likely used in a loop or something so we cant get rid ot it
if last_store != MEM
&& self[last_store]
.outputs
.iter()
.any(|&n| !matches!(self[n].kind, Kind::Load | Kind::Return { .. }))
{
break 'forward_store;
}
let mut unidentifed = self[stack].outputs.clone();
let load_idx = unidentifed.iter().position(|&n| n == value).unwrap();
unidentifed.swap_remove(load_idx);
let mut store_count = 0;
let [mut cursor, mut first_store] = [last_store; 2];
while cursor != MEM {
debug_assert_eq!(self[cursor].kind, Kind::Stre);
// pessimistic
// the offset must only be used for this store
if self[cursor].inputs[2] != stack
&& self[self[cursor].inputs[2]].outputs.as_slice() != [cursor]
{
break 'forward_store;
let mut saved = Vc::default();
let mut cursor = last_store;
let mut first_store = last_store;
while cursor != MEM && self[cursor].kind == Kind::Stre {
let mut contact_point = cursor;
let mut region = self[cursor].inputs[2];
if let Kind::BinOp { op } = self[region].kind {
debug_assert_matches!(op, TokenKind::Add | TokenKind::Sub);
contact_point = region;
region = self[region].inputs[1]
}
// pessimistic
// we load from the store, this might be because the load spans multiple
// stores
if self[cursor].inputs[3] != MEM
&& self[self[cursor].inputs[3]].outputs.as_slice() != [cursor]
{
break 'forward_store;
if region != stack {
break;
}
let Some(index) = unidentifed.iter().position(|&n| n == contact_point)
else {
break 'eliminate;
};
if self[self[cursor].inputs[1]].kind == Kind::Load
&& self[value].outputs.iter().any(|&n| {
self.aclass_index(self[self[cursor].inputs[1]].inputs[1]).0
== self.aclass_index(self[n].inputs[2]).0
})
{
break 'forward_store;
break 'eliminate;
}
unidentifed.remove(index);
saved.push(contact_point);
first_store = cursor;
cursor = self[cursor].inputs[3];
store_count += 1;
cursor = *self[cursor].inputs.get(3).unwrap_or(&MEM);
if unidentifed.is_empty() {
break;
}
}
if store_count + 1 != self[stack].outputs.len() {
debug_assert!(store_count + 1 < self[stack].outputs.len());
break 'forward_store;
if !unidentifed.is_empty() {
break 'eliminate;
}
// at this potint we know the stack was initialized just to be moved into
// different location so create new stores that store directly to the
// destination and remove the final load from this stack, that shoucl cause
// this stack allocation to be eliminated
debug_assert_matches!(
self[last_store].kind,
Kind::Stre | Kind::Mem,
"{:?}",
self[last_store]
);
debug_assert_matches!(
self[first_store].kind,
Kind::Stre | Kind::Mem,
"{:?}",
self[first_store]
);
let mut base_store = store;
if first_store != MEM {
debug_assert_ne!(last_store, MEM);
let mut cursor = first_store;
loop {
let mut inps = self[cursor].inputs.clone();
inps[2] = if inps[2] == stack {
region
} else {
let new_region = self.new_node(
self[inps[2]].ty,
self[inps[2]].kind,
[VOID, region, self[inps[2]].inputs[2]],
tys,
);
self.pass_aclass(self.aclass_index(region).1, new_region);
new_region
};
inps[3] = base_store;
base_store = self.new_node(self[cursor].ty, Kind::Stre, inps, tys);
if self.is_unlocked(base_store) {
self.lock(base_store);
self.queued_peeps.push(base_store);
}
if cursor == last_store {
break;
}
cursor = self[cursor].outputs[0];
// FIXME: when the loads and stores become parallel we will need to get saved
// differently
let mut prev_store = store;
for mut oper in saved.into_iter().rev() {
let mut region = region;
if let Kind::BinOp { op } = self[oper].kind {
debug_assert_eq!(self[oper].outputs.len(), 1);
debug_assert_eq!(self[self[oper].outputs[0]].kind, Kind::Stre);
let new_region = self.new_node(
self[oper].ty,
Kind::BinOp { op },
[VOID, region, self[oper].inputs[2]],
tys,
);
self.pass_aclass(self.aclass_index(region).1, new_region);
region = new_region;
oper = self[oper].outputs[0];
}
for o in self[last_store].outputs.clone() {
if matches!(self[o].kind, Kind::Return { .. }) && self.is_unlocked(o) {
self.queued_peeps.push(o);
}
let mut inps = self[oper].inputs.clone();
debug_assert_eq!(inps.len(), 4);
inps[2] = region;
inps[3] = prev_store;
prev_store = self.new_node_nop(self[oper].ty, Kind::Stre, inps);
if self.is_unlocked(prev_store) {
self.lock(prev_store);
self.queued_peeps.push(prev_store);
}
} else {
debug_assert_eq!(last_store, MEM);
}
return Some(base_store);
return Some(prev_store);
}
if let Some(&load) =
@ -1567,6 +1529,12 @@ impl Nodes {
self.remove(prev);
self.unlock(o);
for o in self[o].outputs.clone() {
if self.is_unlocked(o) {
self.lock(o);
self.queued_peeps.push(o);
}
}
self.replace(o, self[o].inputs[1]);
}
}
@ -1597,7 +1565,6 @@ impl Nodes {
K::Start => {}
_ if self.is_cfg(target) && self.idom(target, None) == NEVER => panic!(),
K::Entry
| K::RetVal
| K::Mem
| K::Loops
| K::End
@ -1671,7 +1638,6 @@ impl Nodes {
}
pub fn replace(&mut self, target: Nid, with: Nid) {
self.patch_aclass(target, with);
debug_assert_ne!(target, with, "{:?}", self[target]);
for out in self[target].outputs.clone() {
let index = self[out].inputs.iter().position(|&p| p == target).unwrap();
@ -1748,7 +1714,7 @@ impl Nodes {
Kind::BinOp { op } | Kind::UnOp { op } => {
write!(out, "{:>4}: ", op.name())
}
Kind::Call { func, args: _, unreachable: _ } => {
Kind::Call { func, args: _ } => {
write!(out, "call: {func} {} ", self[node].depth.get())
}
Kind::Global { global } => write!(out, "glob: {global:<5}"),
@ -1761,7 +1727,6 @@ impl Nodes {
Kind::Mem => write!(out, " mem: "),
Kind::Loops => write!(out, "loops: "),
Kind::Join => write!(out, "join: "),
Kind::RetVal => write!(out, "rval: "),
}?;
if self[node].kind != Kind::Loop && self[node].kind != Kind::Region {
@ -2016,25 +1981,6 @@ impl Nodes {
self[blocker].peep_triggers.push(target);
}
}
fn patch_aclass(&mut self, target: Nid, with: Nid) {
let (_, region) = self.aclass_index(target);
if region == 0 {
return;
}
fn patch_aclass_inner(s: &mut Nodes, root: Nid, with: Nid, matches: Nid) {
for out in s[root].outputs.clone() {
let (_, region) = s.aclass_index(out);
if region == matches {
s.pass_aclass(with, out);
patch_aclass_inner(s, out, with, matches);
}
}
}
patch_aclass_inner(self, target, with, target);
}
}
impl ops::Index<Nid> for Nodes {
@ -2104,7 +2050,8 @@ impl Node {
}
pub fn has_no_value(&self) -> bool {
self.kind.is_cfg() || matches!(self.kind, Kind::Stre)
(self.kind.is_cfg() && (!self.kind.is_call() || self.ty == ty::Id::VOID))
|| matches!(self.kind, Kind::Stre)
}
}
@ -2139,12 +2086,6 @@ pub enum Kind {
Return {
file: ty::Module,
},
// [ctrl, ...args]
Call {
unreachable: bool,
func: ty::Func,
args: ty::List,
},
// [ctrl]
Die,
// [ctrl]
@ -2166,7 +2107,11 @@ pub enum Kind {
Global {
global: ty::Global,
},
RetVal,
// [ctrl, ...args]
Call {
func: ty::Func,
args: ty::Tuple,
},
// [ctrl, cond, value]
Assert {
kind: AssertKind,
@ -2192,9 +2137,7 @@ impl Kind {
}
fn is_pinned(&self) -> bool {
self.is_cfg()
|| self.is_at_start()
|| matches!(self, Self::Phi | Self::Assert { .. } | Self::RetVal)
self.is_cfg() || self.is_at_start() || matches!(self, Self::Phi | Kind::Assert { .. })
}
fn is_at_start(&self) -> bool {
@ -2220,7 +2163,6 @@ impl Kind {
fn ends_basic_block(&self) -> bool {
matches!(self, Self::Return { .. } | Self::If | Self::End | Self::Die)
|| matches!(self, Kind::Call { unreachable: true, .. })
}
pub fn starts_basic_block(&self) -> bool {
@ -2246,7 +2188,6 @@ impl fmt::Display for Kind {
}
}
#[derive(Debug)]
pub enum CondOptRes {
Unknown,
Known { value: bool, pin: Option<Nid> },

View file

@ -31,7 +31,7 @@ pub enum FileKind {
Embed,
}
pub trait Trans {
trait Trans {
fn trans(self) -> Self;
}
@ -80,7 +80,6 @@ struct ScopeIdent {
declared: bool,
ordered: bool,
used: bool,
is_ct: bool,
flags: IdentFlags,
}
@ -197,8 +196,8 @@ impl<'a, 'b> Parser<'a, 'b> {
fn declare_rec(&mut self, expr: &Expr, top_level: bool) {
match *expr {
Expr::Ident { pos, id, is_first, is_ct, .. } => {
self.declare(pos, id, !top_level, is_first || top_level, is_ct)
Expr::Ident { pos, id, is_first, .. } => {
self.declare(pos, id, !top_level, is_first || top_level)
}
Expr::Ctor { fields, .. } => {
for CtorField { value, .. } in fields {
@ -209,7 +208,7 @@ impl<'a, 'b> Parser<'a, 'b> {
}
}
fn declare(&mut self, pos: Pos, id: Ident, ordered: bool, valid_order: bool, is_ct: bool) {
fn declare(&mut self, pos: Pos, id: Ident, ordered: bool, valid_order: bool) {
if !valid_order {
self.report(
pos,
@ -231,7 +230,7 @@ impl<'a, 'b> Parser<'a, 'b> {
);
return;
}
self.ctx.idents[index].is_ct = is_ct;
self.ctx.idents[index].ordered = ordered;
}
@ -257,11 +256,7 @@ impl<'a, 'b> Parser<'a, 'b> {
None => {
let ident = match Ident::new(token.start, name.len() as _) {
None => {
self.report(
token.start,
"identifier can at most have 63 characters, \
the code is too clean to efficiently represent in memory",
);
self.report(token.start, "identifier can at most have 64 characters");
Ident::new(token.start, 63).unwrap()
}
Some(id) => id,
@ -272,7 +267,6 @@ impl<'a, 'b> Parser<'a, 'b> {
declared: false,
used: false,
ordered: false,
is_ct: false,
flags: 0,
});
(self.ctx.idents.len() - 1, self.ctx.idents.last_mut().unwrap(), true)
@ -282,7 +276,7 @@ impl<'a, 'b> Parser<'a, 'b> {
id.flags |= idfl::COMPTIME * is_ct as u32;
if id.declared && id.ordered && self.ns_bound > i {
id.flags |= idfl::COMPTIME;
self.ctx.captured.push(CapturedIdent { id: id.ident, is_ct: id.is_ct });
self.ctx.captured.push(id.ident);
}
(id.ident, bl)
@ -293,10 +287,6 @@ impl<'a, 'b> Parser<'a, 'b> {
}
fn unit_expr(&mut self) -> Option<Expr<'a>> {
self.unit_expr_low(true)
}
fn unit_expr_low(&mut self, eat_tail: bool) -> Option<Expr<'a>> {
use {Expr as E, TokenKind as T};
if matches!(
@ -312,6 +302,7 @@ impl<'a, 'b> Parser<'a, 'b> {
let prev_captured = self.ctx.captured.len();
let mut must_trail = false;
let mut expr = match token.kind {
T::Ct => E::Ct { pos, value: self.ptr_expr()? },
T::Defer => E::Defer { pos, value: self.ptr_expr()? },
T::Slf => E::Slf { pos },
T::Directive if self.lexer.slice(token.range()) == "use" => {
@ -387,15 +378,10 @@ impl<'a, 'b> Parser<'a, 'b> {
}
let name = s.expect_advance(T::Ident)?;
s.expect_advance(T::Colon)?;
let (ty, default_value) = match s.expr()? {
Expr::BinOp { left, op: T::Assign, right, .. } => (*left, Some(*right)),
ty => (ty, None),
};
Some(Some(StructField {
pos: name.start,
name: s.tok_str(name),
ty,
default_value,
ty: s.expr()?,
}))
})?,
captured: self.collect_captures(prev_boundary, prev_captured),
@ -409,7 +395,11 @@ impl<'a, 'b> Parser<'a, 'b> {
}
let name = s.expect_advance(T::Ident)?;
s.expect_advance(T::Colon)?;
Some(Some(UnionField { pos: name.start, name: s.tok_str(name), ty: s.expr()? }))
Some(Some(StructField {
pos: name.start,
name: s.tok_str(name),
ty: s.expr()?,
}))
})?,
captured: self.collect_captures(prev_boundary, prev_captured),
trailing_comma: core::mem::take(&mut self.trailing_sep) || must_trail,
@ -477,7 +467,7 @@ impl<'a, 'b> Parser<'a, 'b> {
self.collect_list(T::Comma, T::RParen, |s| {
let name = s.advance_ident()?;
let (id, _) = s.resolve_ident(name);
s.declare(name.start, id, true, true, name.kind == T::CtIdent);
s.declare(name.start, id, true, true);
s.expect_advance(T::Colon)?;
Some(Arg {
pos: name.start,
@ -495,20 +485,14 @@ impl<'a, 'b> Parser<'a, 'b> {
body: self.ptr_expr()?,
},
T::Ctor => self.ctor(pos, None),
T::Tupl => self.tupl(pos, None, ListKind::Tuple),
T::Arr => self.tupl(pos, None, ListKind::Array),
T::Tupl => self.tupl(pos, None),
T::LBrack => E::Slice {
size: {
if self.advance_if(T::RBrack) {
None
} else {
let adv = self.ptr_expr()?;
self.expect_advance(T::RBrack)?;
Some(adv)
}
item: self.ptr_unit_expr()?,
size: self.advance_if(T::Semi).then(|| self.ptr_expr()).trans()?,
pos: {
self.expect_advance(T::RBrack)?;
pos
},
item: self.arena.alloc(self.unit_expr_low(false)?),
pos,
},
T::Band | T::Mul | T::Xor | T::Sub | T::Que | T::Not | T::Dot => E::UnOp {
pos,
@ -560,84 +544,37 @@ impl<'a, 'b> Parser<'a, 'b> {
tok => self.report(token.start, format_args!("unexpected token: {tok}"))?,
};
if eat_tail {
loop {
let token = self.token;
if matches!(
token.kind,
T::LParen | T::Ctor | T::Dot | T::Tupl | T::Arr | T::LBrack | T::Colon
) {
self.next();
}
loop {
let token = self.token;
if matches!(token.kind, T::LParen | T::Ctor | T::Dot | T::Tupl | T::LBrack) {
self.next();
}
expr = match token.kind {
T::LParen => Expr::Call {
func: self.arena.alloc(expr),
args: self.collect_list(T::Comma, T::RParen, Self::expr),
trailing_comma: core::mem::take(&mut self.trailing_sep),
expr = match token.kind {
T::LParen => Expr::Call {
func: self.arena.alloc(expr),
args: self.collect_list(T::Comma, T::RParen, Self::expr),
trailing_comma: core::mem::take(&mut self.trailing_sep),
},
T::Ctor => self.ctor(token.start, Some(expr)),
T::Tupl => self.tupl(token.start, Some(expr)),
T::LBrack => E::Index {
base: self.arena.alloc(expr),
index: {
let index = self.expr()?;
self.expect_advance(T::RBrack)?;
self.arena.alloc(index)
},
T::Ctor => self.ctor(token.start, Some(expr)),
T::Tupl => self.tupl(token.start, Some(expr), ListKind::Tuple),
T::Arr => self.tupl(token.start, Some(expr), ListKind::Array),
T::LBrack => E::Index {
base: self.arena.alloc(expr),
index: self.arena.alloc({
if self.advance_if(T::Range) {
let pos = self.token.start;
if self.advance_if(T::RBrack) {
Expr::Range { pos, start: None, end: None }
} else {
let res = Expr::Range {
pos,
start: None,
end: Some(self.ptr_expr()?),
};
self.expect_advance(T::RBrack)?;
res
}
} else {
let start = self.expr()?;
let pos = self.token.start;
if self.advance_if(T::Range) {
let start = self.arena.alloc(start);
if self.advance_if(T::RBrack) {
Expr::Range { pos, start: Some(start), end: None }
} else {
let res = Expr::Range {
pos,
start: Some(start),
end: Some(self.ptr_expr()?),
};
self.expect_advance(T::RBrack)?;
res
}
} else {
self.expect_advance(T::RBrack)?;
start
}
}
}),
},
T::Dot => E::Field {
target: self.arena.alloc(expr),
pos: token.start,
name: {
let token = self.expect_advance(T::Ident)?;
self.tok_str(token)
},
T::Colon => E::BinOp {
left: {
self.declare_rec(&expr, false);
self.arena.alloc(expr)
},
pos,
op: T::Colon,
right: self.ptr_expr()?,
},
T::Dot => E::Field {
target: self.arena.alloc(expr),
pos: token.start,
name: {
let token = self.expect_advance(T::Ident)?;
self.tok_str(token)
},
},
_ => break,
}
},
_ => break,
}
}
@ -648,12 +585,11 @@ impl<'a, 'b> Parser<'a, 'b> {
Some(expr)
}
fn tupl(&mut self, pos: Pos, ty: Option<Expr<'a>>, kind: ListKind) -> Expr<'a> {
Expr::List {
fn tupl(&mut self, pos: Pos, ty: Option<Expr<'a>>) -> Expr<'a> {
Expr::Tupl {
pos,
kind,
ty: ty.map(|ty| self.arena.alloc(ty)),
fields: self.collect_list(TokenKind::Comma, kind.term(), Self::expr),
fields: self.collect_list(TokenKind::Comma, TokenKind::RParen, Self::expr),
trailing_comma: core::mem::take(&mut self.trailing_sep),
}
}
@ -703,11 +639,7 @@ impl<'a, 'b> Parser<'a, 'b> {
}))
}
fn collect_captures(
&mut self,
prev_captured: usize,
prev_boundary: usize,
) -> &'a [CapturedIdent] {
fn collect_captures(&mut self, prev_captured: usize, prev_boundary: usize) -> &'a [Ident] {
self.ns_bound = prev_boundary;
let captured = &mut self.ctx.captured[prev_captured..];
crate::quad_sort(captured, core::cmp::Ord::cmp);
@ -772,9 +704,7 @@ impl<'a, 'b> Parser<'a, 'b> {
) -> &'a [T] {
let mut trailing_sep = false;
let mut view = self.ctx.stack.view();
'o: while (keep_end && self.token.kind != end)
|| (!keep_end && !self.advance_if(end)) && self.token.kind != TokenKind::Eof
{
'o: while (keep_end && self.token.kind != end) || (!keep_end && !self.advance_if(end)) {
let val = match f(self) {
Some(val) => val,
None => {
@ -847,9 +777,6 @@ impl<'a, 'b> Parser<'a, 'b> {
#[track_caller]
fn report(&mut self, pos: Pos, msg: impl fmt::Display) -> Option<!> {
if log::log_enabled!(log::Level::Error) {
if self.ctx.errors.get_mut().len() > 1024 * 10 {
panic!("{}", self.ctx.errors.get_mut());
}
use core::fmt::Write;
writeln!(
self.ctx.errors.get_mut(),
@ -863,19 +790,15 @@ impl<'a, 'b> Parser<'a, 'b> {
fn flag_idents(&mut self, e: Expr<'a>, flags: IdentFlags) {
match e {
Expr::Ident { id, .. } => {
if let Some(f) = find_ident(&mut self.ctx.idents, id) {
f.flags |= flags;
}
}
Expr::Ident { id, .. } => find_ident(&mut self.ctx.idents, id).flags |= flags,
Expr::Field { target, .. } => self.flag_idents(*target, flags),
_ => {}
}
}
}
fn find_ident(idents: &mut [ScopeIdent], id: Ident) -> Option<&mut ScopeIdent> {
idents.binary_search_by_key(&id, |si| si.ident).map(|i| &mut idents[i]).ok()
fn find_ident(idents: &mut [ScopeIdent], id: Ident) -> &mut ScopeIdent {
idents.binary_search_by_key(&id, |si| si.ident).map(|i| &mut idents[i]).unwrap()
}
pub fn find_symbol(symbols: &[Symbol], id: Ident) -> &Symbol {
@ -956,6 +879,11 @@ generate_expr! {
/// `OP := grep for `#define OP:`
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Expr<'a> {
/// `'ct' Expr`
Ct {
pos: Pos,
value: &'a Self,
},
/// `'defer' Expr`
Defer {
pos: Pos,
@ -1070,22 +998,22 @@ generate_expr! {
Struct {
pos: Pos,
fields: FieldList<'a, StructField<'a>>,
captured: &'a [CapturedIdent],
captured: &'a [Ident],
trailing_comma: bool,
packed: bool,
},
/// `'union' LIST('{', ',', '}', Ident ':' Expr)`
Union {
pos: Pos,
fields: FieldList<'a, UnionField<'a>>,
captured: &'a [CapturedIdent],
fields: FieldList<'a, StructField<'a>>,
captured: &'a [Ident],
trailing_comma: bool,
},
/// `'enum' LIST('{', ',', '}', Ident)`
Enum {
pos: Pos,
variants: FieldList<'a, EnumField<'a>>,
captured: &'a [CapturedIdent],
captured: &'a [Ident],
trailing_comma: bool,
},
/// `[Expr] LIST('.{', ',', '}', Ident [':' Expr])`
@ -1096,9 +1024,8 @@ generate_expr! {
trailing_comma: bool,
},
/// `[Expr] LIST('.(', ',', ')', Ident [':' Expr])`
List {
Tupl {
pos: Pos,
kind: ListKind,
ty: Option<&'a Self>,
fields: &'a [Self],
trailing_comma: bool,
@ -1114,12 +1041,6 @@ generate_expr! {
base: &'a Self,
index: &'a Self,
},
/// `[ Expr ] .. [ Expr ]`
Range {
pos: u32,
start: Option<&'a Self>,
end: Option<&'a Self>,
},
/// `Expr '.' Ident`
Field {
target: &'a Self,
@ -1165,26 +1086,6 @@ generate_expr! {
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, PartialOrd, Ord)]
pub struct CapturedIdent {
pub id: Ident,
pub is_ct: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ListKind {
Tuple,
Array,
}
impl ListKind {
fn term(self) -> TokenKind {
match self {
ListKind::Tuple => TokenKind::RParen,
ListKind::Array => TokenKind::RBrack,
}
}
}
impl Expr<'_> {
pub fn declares(&self, iden: DeclId, source: &str) -> Option<Ident> {
match *self {
@ -1257,25 +1158,11 @@ impl Poser for EnumField<'_> {
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct UnionField<'a> {
pub pos: Pos,
pub name: &'a str,
pub ty: Expr<'a>,
}
impl Poser for UnionField<'_> {
fn posi(&self) -> Pos {
self.pos
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct StructField<'a> {
pub pos: Pos,
pub name: &'a str,
pub ty: Expr<'a>,
pub default_value: Option<Expr<'a>>,
}
impl Poser for StructField<'_> {
@ -1340,9 +1227,9 @@ pub enum CommentOr<'a, T> {
Comment { literal: &'a str, pos: Pos },
}
impl<T> CommentOr<'_, T> {
pub fn or(&self) -> Option<&T> {
match self {
impl<T: Copy> CommentOr<'_, T> {
pub fn or(&self) -> Option<T> {
match *self {
CommentOr::Or(v) => Some(v),
CommentOr::Comment { .. } => None,
}
@ -1373,7 +1260,7 @@ pub struct Ctx {
symbols: Vec<Symbol>,
stack: StackAlloc,
idents: Vec<ScopeIdent>,
captured: Vec<CapturedIdent>,
captured: Vec<Ident>,
}
impl Ctx {
@ -1465,7 +1352,10 @@ impl<D: core::fmt::Display> core::fmt::Display for Report<'_, D> {
fn report_to(file: &str, path: &str, pos: Pos, msg: &dyn fmt::Display, out: &mut impl fmt::Write) {
let (line, mut col) = lexer::line_col(file.as_bytes(), pos);
let disp = crate::display_rel_path(path);
#[cfg(feature = "std")]
let disp = crate::fs::display_rel_path(path);
#[cfg(not(feature = "std"))]
let disp = path;
_ = writeln!(out, "{}:{}:{}: {}", disp, line, col, msg);
let line = &file[file[..pos as usize].rfind('\n').map_or(0, |i| i + 1)
@ -1524,7 +1414,7 @@ pub fn find_decl<'a>(
id: DeclId,
) -> Option<(&'a Expr<'a>, Ident)> {
exprs.iter().find_map(|expr| match expr {
Expr::BinOp { left, op: TokenKind::Decl | TokenKind::Colon, .. } => {
Expr::BinOp { left, op: TokenKind::Decl, .. } => {
left.declares(id, file).map(|id| (expr, id))
}
_ => None,

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,7 @@ use {
crate::{
ctx_map,
lexer::TokenKind,
parser::{self, CapturedIdent, CommentOr, Expr, ExprRef, Pos},
parser::{self, CommentOr, Expr, ExprRef, Pos},
utils::{self, Ent, EntSlice, EntVec},
Ident,
},
@ -38,9 +38,9 @@ pub type Offset = u32;
pub type Size = u32;
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Default, PartialOrd, Ord)]
pub struct List(pub u32);
pub struct Tuple(pub u32);
impl List {
impl Tuple {
const LEN_BITS: u32 = 5;
const LEN_MASK: usize = Self::MAX_LEN - 1;
const MAX_LEN: usize = 1 << Self::LEN_BITS;
@ -83,7 +83,7 @@ pub enum Arg {
}
impl ArgIter {
pub fn next(&mut self, tys: &Types) -> Option<Arg> {
pub(crate) fn next(&mut self, tys: &Types) -> Option<Arg> {
let ty = tys.ins.args[self.0.next()?];
if ty == Id::TYPE {
return Some(Arg::Type(tys.ins.args[self.0.next().unwrap()]));
@ -91,7 +91,7 @@ impl ArgIter {
Some(Arg::Value(ty))
}
pub fn next_value(&mut self, tys: &Types) -> Option<Id> {
pub(crate) fn next_value(&mut self, tys: &Types) -> Option<Id> {
loop {
match self.next(tys)? {
Arg::Type(_) => continue,
@ -104,12 +104,6 @@ impl ArgIter {
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
pub struct Id(NonZeroU32);
impl AsRef<Id> for Id {
fn as_ref(&self) -> &Id {
self
}
}
impl From<Id> for i64 {
fn from(value: Id) -> Self {
value.0.get() as _
@ -156,7 +150,6 @@ impl crate::ctx_map::CtxEntry for Id {
SymKey::Decl(gb.file.into(), gb.name)
}
Kind::Slice(s) => SymKey::Array(&ctx.slices[s]),
Kind::Tuple(t) => SymKey::Tuple(ctx.tuples[t].fields),
Kind::Module(_) | Kind::Builtin(_) => {
SymKey::Decl(Module::default().into(), Ident::INVALID)
}
@ -191,13 +184,11 @@ impl Id {
}
pub fn is_unsigned(self) -> bool {
matches!(self.repr(), U8..=UINT)
|| self.is_never()
|| matches!(self.expand(), Kind::Enum(_))
matches!(self.repr(), U8..=UINT) || self.is_never()
}
pub fn is_integer(self) -> bool {
self.is_signed() || self.is_unsigned()
matches!(self.repr(), U8..=INT) || self.is_never()
}
pub fn is_never(self) -> bool {
@ -273,19 +264,22 @@ impl Id {
}
pub(crate) fn loc(&self, tys: &Types) -> Loc {
use Kind as K;
match self.expand() {
K::Opt(o)
Kind::Opt(o)
if let ty = tys.ins.opts[o].base
&& ty.loc(tys) == Loc::Reg
&& (ty.is_pointer() || tys.size_of(ty) < 8) =>
{
Loc::Reg
}
K::Ptr(_) | K::Enum(_) | K::Builtin(_) => Loc::Reg,
K::Struct(_) | K::Tuple(_) | K::Union(_) if tys.size_of(*self) == 0 => Loc::Reg,
K::Struct(_) | K::Tuple(_) | K::Union(_) | K::Slice(_) | K::Opt(_) => Loc::Stack,
c @ (K::Func(_) | K::Global(_) | K::Module(_) | K::Const(_) | K::Template(_)) => {
Kind::Ptr(_) | Kind::Enum(_) | Kind::Builtin(_) => Loc::Reg,
Kind::Struct(_) | Kind::Union(_) if tys.size_of(*self) == 0 => Loc::Reg,
Kind::Struct(_) | Kind::Union(_) | Kind::Slice(_) | Kind::Opt(_) => Loc::Stack,
c @ (Kind::Func(_)
| Kind::Global(_)
| Kind::Module(_)
| Kind::Const(_)
| Kind::Template(_)) => {
unreachable!("{c:?}")
}
}
@ -299,10 +293,6 @@ impl Id {
_ => false,
}
}
pub fn is_aggregate(&self, tys: &Types) -> bool {
self.loc(tys) == Loc::Stack
}
}
#[derive(PartialEq, Eq, Clone, Copy)]
@ -390,7 +380,6 @@ builtin_type! {
INT;
F32;
F64;
ANY_TYPE;
}
macro_rules! type_kind {
@ -425,12 +414,6 @@ macro_rules! type_kind {
}
}
impl Id {
pub fn kind(self) -> u8 {
(self.repr() >> $name::FLAG_OFFSET) as _
}
}
$(
impl From<$variant> for $name {
fn from(value: $variant) -> Self {
@ -458,7 +441,6 @@ type_kind! {
pub enum Kind {
Builtin,
Struct,
Tuple,
Enum,
Union,
Ptr,
@ -467,8 +449,8 @@ type_kind! {
Func,
Template,
Global,
Const,
Module,
Const,
}
}
@ -523,31 +505,31 @@ impl<'a> Display<'a> {
impl core::fmt::Display for Display<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
use Kind as K;
match K::from_ty(self.ty) {
K::Module(idx) => {
use Kind as TK;
match TK::from_ty(self.ty) {
TK::Module(idx) => {
f.write_str("@use(\"")?;
self.files[idx].path.fmt(f)?;
f.write_str(")[")?;
idx.fmt(f)?;
f.write_str("]")
}
K::Builtin(ty) => f.write_str(to_str(ty)),
K::Opt(ty) => {
TK::Builtin(ty) => f.write_str(to_str(ty)),
TK::Opt(ty) => {
f.write_str("?")?;
self.rety(self.tys.ins.opts[ty].base).fmt(f)
}
K::Ptr(ty) => {
TK::Ptr(ty) => {
f.write_str("^")?;
self.rety(self.tys.ins.ptrs[ty].base).fmt(f)
}
K::Struct(idx) => {
TK::Struct(idx) => {
let record = &self.tys.ins.structs[idx];
if record.name.is_null() {
f.write_str("[")?;
idx.fmt(f)?;
f.write_str("]{")?;
for (i, &StructField { name, ty, .. }) in
for (i, &StructField { name, ty }) in
self.tys.struct_fields(idx).iter().enumerate()
{
if i != 0 {
@ -563,25 +545,13 @@ impl core::fmt::Display for Display<'_> {
f.write_str(file.ident_str(record.name))
}
}
K::Tuple(idx) => {
f.write_str(".(")?;
for (i, &ty) in
self.tys.ins.args[self.tys.ins.tuples[idx].fields.range()].iter().enumerate()
{
if i != 0 {
f.write_str(", ")?;
}
self.rety(ty).fmt(f)?;
}
f.write_str(")")
}
K::Union(idx) => {
TK::Union(idx) => {
let record = &self.tys.ins.unions[idx];
if record.name.is_null() {
f.write_str("[")?;
idx.fmt(f)?;
f.write_str("]{")?;
for (i, &UnionField { name, ty }) in
for (i, &StructField { name, ty }) in
self.tys.union_fields(idx).iter().enumerate()
{
if i != 0 {
@ -597,36 +567,37 @@ impl core::fmt::Display for Display<'_> {
f.write_str(file.ident_str(record.name))
}
}
K::Enum(idx) => {
TK::Enum(idx) => {
let enm = &self.tys.ins.enums[idx];
debug_assert!(!enm.name.is_null());
let file = &self.files[enm.file];
f.write_str(file.ident_str(enm.name))
}
K::Func(idx) => {
TK::Func(idx) => {
f.write_str("fn")?;
idx.fmt(f)
}
K::Template(idx) => {
TK::Template(idx) => {
f.write_str("fn")?;
idx.fmt(f)
}
K::Global(idx) => {
TK::Global(idx) => {
let global = &self.tys.ins.globals[idx];
let file = &self.files[global.file];
f.write_str(file.ident_str(global.name))?;
f.write_str(" (global)")
}
K::Slice(idx) => {
TK::Slice(idx) => {
let array = self.tys.ins.slices[idx];
f.write_str("[")?;
if let Some(len) = array.len() {
len.fmt(f)?;
self.rety(array.elem).fmt(f)?;
if array.len != ArrayLen::MAX {
f.write_str("; ")?;
array.len.fmt(f)?;
}
f.write_str("]")?;
self.rety(array.elem).fmt(f)
f.write_str("]")
}
K::Const(idx) => {
TK::Const(idx) => {
let cnst = &self.tys.ins.consts[idx];
let file = &self.files[cnst.file];
f.write_str(file.ident_str(cnst.name))?;
@ -638,10 +609,9 @@ impl core::fmt::Display for Display<'_> {
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub enum SymKey<'a> {
Tuple(List),
Pointer(&'a PtrData),
Optional(&'a OptData),
Type(Id, Pos, List),
Type(Id, Pos, Tuple),
Decl(Id, Ident),
Array(&'a ArrayData),
Constant(&'a ConstData),
@ -649,7 +619,7 @@ pub enum SymKey<'a> {
#[derive(Clone, Copy, Default)]
pub struct Sig {
pub args: List,
pub args: Tuple,
pub ret: Id,
}
@ -671,7 +641,6 @@ pub struct FuncData {
pub sig: Sig,
pub is_inline: bool,
pub is_generic: bool,
pub is_import: bool,
pub comp_state: [PackedCompState; 2],
}
@ -744,7 +713,7 @@ pub struct TypeBase {
pub pos: Pos,
pub name: Ident,
pub field_start: u32,
pub captured: List,
pub captured: Tuple,
pub ast: ExprRef,
}
@ -755,11 +724,6 @@ pub struct EnumData {
impl_deref!(EnumData { base: TypeBase });
pub struct UnionField {
pub name: Ident,
pub ty: Id,
}
#[derive(Default)]
pub struct UnionData {
pub base: TypeBase,
@ -772,7 +736,6 @@ impl_deref!(UnionData { base: TypeBase });
pub struct StructField {
pub name: Ident,
pub ty: Id,
pub default_value: Option<Const>,
}
#[derive(Default)]
@ -786,13 +749,6 @@ pub struct StructData {
impl_deref!(StructData { base: TypeBase });
#[derive(Default)]
pub struct TupleData {
pub fields: List,
pub size: Cell<Size>,
pub align: Cell<u8>,
}
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub struct OptData {
pub base: Id,
@ -862,7 +818,6 @@ impl IdentInterner {
#[derive(Default)]
pub struct TypesTmp {
pub struct_fields: Vec<StructField>,
pub union_fields: Vec<UnionField>,
pub enum_fields: Vec<EnumField>,
pub args: Vec<Id>,
}
@ -871,7 +826,6 @@ pub struct TypesTmp {
pub struct TypeIns {
pub args: Vec<Id>,
pub struct_fields: Vec<StructField>,
pub union_fields: Vec<UnionField>,
pub enum_fields: Vec<EnumField>,
pub funcs: EntVec<Func, FuncData>,
pub templates: EntVec<Template, TemplateData>,
@ -883,7 +837,6 @@ pub struct TypeIns {
pub ptrs: EntVec<Ptr, PtrData>,
pub opts: EntVec<Opt, OptData>,
pub slices: EntVec<Slice, ArrayData>,
pub tuples: EntVec<Tuple, TupleData>,
}
pub struct FTask {
@ -927,9 +880,7 @@ impl Types {
| Kind::Builtin(_)
| Kind::Ptr(_)
| Kind::Slice(_)
| Kind::Tuple(_)
| Kind::Opt(_) => utils::is_pascal_case,
Kind::Func(f) if self.ins.funcs[f].is_import => |_| Ok(()),
Kind::Func(f)
if let &Expr::Closure { ret: &Expr::Ident { id, .. }, .. } =
self.ins.funcs[f].expr.get(&files[self.ins.funcs[f].file])
@ -951,23 +902,23 @@ impl Types {
}
}
pub fn pack_args(&mut self, arg_base: usize) -> Option<List> {
pub fn pack_args(&mut self, arg_base: usize) -> Option<Tuple> {
let base = self.ins.args.len();
self.ins.args.extend(self.tmp.args.drain(arg_base..));
let needle = &self.ins.args[base..];
if needle.is_empty() {
return Some(List::empty());
return Some(Tuple::empty());
}
let len = needle.len();
// FIXME: maybe later when this becomes a bottleneck we use more
// efficient search (SIMD?, indexing?)
let sp = self.ins.args.windows(needle.len()).position(|val| val == needle).unwrap();
self.ins.args.truncate((sp + needle.len()).max(base));
List::new(sp, len)
Tuple::new(sp, len)
}
pub fn union_fields(&self, union: Union) -> &[UnionField] {
&self.ins.union_fields[self.union_field_range(union)]
pub fn union_fields(&self, union: Union) -> &[StructField] {
&self.ins.struct_fields[self.union_field_range(union)]
}
fn union_field_range(&self, union: Union) -> Range<usize> {
@ -976,7 +927,7 @@ impl Types {
.ins
.unions
.next(union)
.map_or(self.ins.union_fields.len(), |s| s.field_start as usize);
.map_or(self.ins.struct_fields.len(), |s| s.field_start as usize);
start..end
}
@ -984,7 +935,7 @@ impl Types {
&self.ins.struct_fields[self.struct_field_range(strct)]
}
pub fn struct_field_range(&self, strct: Struct) -> Range<usize> {
fn struct_field_range(&self, strct: Struct) -> Range<usize> {
let start = self.ins.structs[strct].field_start as usize;
let end = self
.ins
@ -1046,16 +997,6 @@ impl Types {
self.ins.structs[stru].size.set(oiter.offset);
oiter.offset
}
Kind::Tuple(tuple) => {
if self.ins.tuples[tuple].size.get() != 0 {
return self.ins.tuples[tuple].size.get();
}
let mut oiter = OffsetIter::new(tuple, self);
while oiter.next(self).is_some() {}
self.ins.tuples[tuple].size.set(oiter.offset);
oiter.offset
}
Kind::Union(union) => {
if self.ins.unions[union].size.get() != 0 {
return self.ins.unions[union].size.get();
@ -1075,12 +1016,8 @@ impl Types {
self.size_of(base) + self.align_of(base)
}
}
Kind::Ptr(_) | Kind::Builtin(_) => ty.simple_size().unwrap(),
Kind::Func(_)
| Kind::Template(_)
| Kind::Global(_)
| Kind::Const(_)
| Kind::Module(_) => unreachable!(),
_ if let Some(size) = ty.simple_size() => size,
ty => unimplemented!("size_of: {:?}", ty),
}
}
@ -1112,15 +1049,6 @@ impl Types {
self.ins.structs[stru].align.set(align.try_into().unwrap());
align
}
Kind::Tuple(tuple) => {
if self.ins.tuples[tuple].align.get() != 0 {
return self.ins.tuples[tuple].align.get() as _;
}
let align =
self.tuple_fields(tuple).iter().map(|&f| self.align_of(f)).max().unwrap_or(1);
self.ins.tuples[tuple].align.set(align.try_into().unwrap());
align
}
Kind::Slice(arr) => {
let arr = &self.ins.slices[arr];
match arr.len {
@ -1128,14 +1056,7 @@ impl Types {
_ => self.align_of(arr.elem),
}
}
Kind::Opt(opt) => self.align_of(self.ins.opts[opt].base),
Kind::Builtin(_) | Kind::Enum(_) | Kind::Ptr(_) => self.size_of(ty).max(1),
Kind::Func(_)
| Kind::Template(_)
| Kind::Global(_)
| Kind::Const(_)
| Kind::Module(_) => unreachable!(),
//_ => self.size_of(ty).max(1),
_ => self.size_of(ty).max(1),
}
}
@ -1182,7 +1103,7 @@ impl Types {
self.struct_fields(s).iter().position(|f| f.name == name)
}
pub fn find_union_field(&self, u: Union, name: &str) -> Option<(usize, &UnionField)> {
pub fn find_union_field(&self, u: Union, name: &str) -> Option<(usize, &StructField)> {
let name = self.names.project(name)?;
self.union_fields(u).iter().enumerate().find(|(_, f)| f.name == name)
}
@ -1197,14 +1118,10 @@ impl Types {
self.ins.globals.clear();
self.ins.structs.clear();
self.ins.struct_fields.clear();
self.ins.union_fields.clear();
self.ins.enum_fields.clear();
self.ins.ptrs.clear();
self.ins.slices.clear();
debug_assert_eq!(self.tmp.struct_fields.len(), 0);
debug_assert_eq!(self.tmp.union_fields.len(), 0);
debug_assert_eq!(self.tmp.enum_fields.len(), 0);
debug_assert_eq!(self.tmp.args.len(), 0);
debug_assert_eq!(self.tasks.len(), 0);
@ -1223,7 +1140,6 @@ impl Types {
| Kind::Template(_)
| Kind::Global(_)
| Kind::Module(_)
| Kind::Tuple(_)
| Kind::Const(_) => return None,
})
}
@ -1249,11 +1165,7 @@ impl Types {
self.type_base_of(ty).map(|b| b.parent)
}
pub fn captures_of<'a>(
&self,
ty: Id,
file: &'a parser::Ast,
) -> Option<(&'a [CapturedIdent], List)> {
pub fn captures_of<'a>(&self, ty: Id, file: &'a parser::Ast) -> Option<(&'a [Ident], Tuple)> {
let base = self.type_base_of(ty)?;
let (Expr::Struct { captured, .. }
@ -1269,28 +1181,10 @@ impl Types {
pub fn len_of(&self, ty: Id) -> Option<u32> {
Some(match ty.expand() {
Kind::Struct(s) => self.struct_field_range(s).len() as _,
Kind::Tuple(s) => self.ins.tuples[s].fields.len() as _,
Kind::Slice(s) => self.ins.slices[s].len()? as _,
_ => return None,
})
}
pub fn name_of(&self, ty: Id, files: &EntSlice<Module, parser::Ast>, data: &mut Vec<u8>) {
use core::fmt::Write;
let str = unsafe { core::mem::transmute::<&mut Vec<u8>, &mut String>(data) };
write!(str, "{}", Display::new(self, files, ty)).unwrap();
}
pub fn tuple_fields(&self, tuple: Tuple) -> &[Id] {
&self.ins.args[self.ins.tuples[tuple].fields.range()]
}
pub fn elem_of(&self, ty: Id) -> Option<Id> {
match ty.expand() {
Kind::Slice(s) => Some(self.ins.slices[s].elem),
_ => None,
}
}
}
pub struct OptLayout {
@ -1299,57 +1193,17 @@ pub struct OptLayout {
pub payload_offset: Offset,
}
pub trait Agregate: Copy {
type Field: AsRef<Id> + 'static;
fn fields(self, tys: &Types) -> Range<usize>;
fn field_by_idx(tys: &Types, index: usize) -> &Self::Field;
fn align_override(self, _: &Types) -> Option<u8> {
None
}
}
impl Agregate for Tuple {
type Field = Id;
fn fields(self, tys: &Types) -> Range<usize> {
tys.ins.tuples[self].fields.range()
}
fn field_by_idx(tys: &Types, index: usize) -> &Self::Field {
&tys.ins.args[index]
}
}
impl Agregate for Struct {
type Field = StructField;
fn fields(self, tys: &Types) -> Range<usize> {
tys.struct_field_range(self)
}
fn field_by_idx(tys: &Types, index: usize) -> &Self::Field {
&tys.ins.struct_fields[index]
}
fn align_override(self, tys: &Types) -> Option<u8> {
tys.ins.structs[self].explicit_alignment
}
}
impl AsRef<Id> for StructField {
fn as_ref(&self) -> &Id {
&self.ty
}
}
pub struct OffsetIter<T> {
strct: T,
pub struct OffsetIter {
strct: Struct,
offset: Offset,
fields: Range<usize>,
}
impl OffsetIter<Struct> {
impl OffsetIter {
pub fn new(strct: Struct, tys: &Types) -> Self {
Self { strct, offset: 0, fields: tys.struct_field_range(strct) }
}
pub fn offset_of(tys: &Types, idx: Struct, field: &str) -> Option<(Offset, Id)> {
let field_id = tys.names.project(field)?;
OffsetIter::new(idx, tys)
@ -1357,33 +1211,25 @@ impl OffsetIter<Struct> {
.find(|(f, _)| f.name == field_id)
.map(|(f, off)| (off, f.ty))
}
}
impl<T: Agregate> OffsetIter<T> {
pub fn new(strct: T, tys: &Types) -> Self {
Self { strct, offset: 0, fields: strct.fields(tys) }
}
fn next<'a>(&mut self, tys: &'a Types) -> Option<(&'a StructField, Offset)> {
let stru = &tys.ins.structs[self.strct];
let field = &tys.ins.struct_fields[self.fields.next()?];
fn next<'a>(&mut self, tys: &'a Types) -> Option<(&'a T::Field, Offset)> {
let field = &T::field_by_idx(tys, self.fields.next()?);
let align = self
.strct
.align_override(tys)
.map_or_else(|| tys.align_of(*field.as_ref()), |a| a as u32);
let align = stru.explicit_alignment.map_or_else(|| tys.align_of(field.ty), |a| a as u32);
self.offset = (self.offset + align - 1) & !(align - 1);
let off = self.offset;
self.offset += tys.size_of(*field.as_ref());
self.offset += tys.size_of(field.ty);
Some((field, off))
}
pub fn next_ty(&mut self, tys: &Types) -> Option<(Id, Offset)> {
let (field, off) = self.next(tys)?;
Some((*field.as_ref(), off))
Some((field.ty, off))
}
pub fn into_iter(mut self, tys: &Types) -> impl Iterator<Item = (&T::Field, Offset)> {
pub fn into_iter(mut self, tys: &Types) -> impl Iterator<Item = (&StructField, Offset)> {
core::iter::from_fn(move || self.next(tys))
}
}

View file

@ -1,3 +1,4 @@
#![expect(dead_code)]
use {
alloc::alloc,
core::{
@ -6,7 +7,7 @@ use {
hint::unreachable_unchecked,
marker::PhantomData,
mem::MaybeUninit,
ops::{Deref, DerefMut, Not, Range},
ops::{Deref, DerefMut, Not},
ptr::Unique,
},
};
@ -31,10 +32,9 @@ pub fn is_screaming_case(str: &str) -> Result<(), &'static str> {
}
type Nid = u16;
type BitSetUnit = usize;
pub union BitSet {
inline: BitSetUnit,
inline: usize,
alloced: Unique<AllocedBitSet>,
}
@ -78,9 +78,9 @@ impl Default for BitSet {
}
impl BitSet {
const FLAG: BitSetUnit = 1 << (Self::UNIT - 1);
const FLAG: usize = 1 << (Self::UNIT - 1);
const INLINE_ELEMS: usize = Self::UNIT - 1;
pub const UNIT: usize = core::mem::size_of::<BitSetUnit>() * 8;
const UNIT: usize = core::mem::size_of::<usize>() * 8;
pub fn with_capacity(len: usize) -> Self {
let mut s = Self::default();
@ -92,7 +92,7 @@ impl BitSet {
unsafe { self.inline & Self::FLAG != 0 }
}
fn data_and_len(&self) -> (&[BitSetUnit], usize) {
fn data_and_len(&self) -> (&[usize], usize) {
unsafe {
if self.is_inline() {
(core::slice::from_ref(&self.inline), Self::INLINE_ELEMS)
@ -100,16 +100,16 @@ impl BitSet {
let small_vec = self.alloced.as_ref();
(
core::slice::from_raw_parts(
&small_vec.data as *const _ as *const BitSetUnit,
&small_vec.data as *const _ as *const usize,
small_vec.cap,
),
small_vec.cap * Self::UNIT,
small_vec.cap * core::mem::size_of::<usize>() * 8,
)
}
}
}
fn data_mut_and_len(&mut self) -> (&mut [BitSetUnit], usize) {
fn data_mut_and_len(&mut self) -> (&mut [usize], usize) {
unsafe {
if self.is_inline() {
(core::slice::from_mut(&mut self.inline), INLINE_ELEMS)
@ -117,7 +117,7 @@ impl BitSet {
let small_vec = self.alloced.as_mut();
(
core::slice::from_raw_parts_mut(
&mut small_vec.data as *mut _ as *mut BitSetUnit,
&mut small_vec.data as *mut _ as *mut usize,
small_vec.cap,
),
small_vec.cap * Self::UNIT,
@ -163,7 +163,7 @@ impl BitSet {
let (ptr, prev_len) = unsafe {
if self.is_inline() {
let ptr = alloc::alloc(layout);
*ptr.add(off).cast::<BitSetUnit>() = self.inline & !Self::FLAG;
*ptr.add(off).cast::<usize>() = self.inline & !Self::FLAG;
(ptr, 1)
} else {
let prev_len = self.alloced.as_ref().cap;
@ -174,7 +174,7 @@ impl BitSet {
unsafe {
MaybeUninit::fill(
core::slice::from_raw_parts_mut(
ptr.add(off).cast::<MaybeUninit<BitSetUnit>>().add(prev_len),
ptr.add(off).cast::<MaybeUninit<usize>>().add(prev_len),
slot_count - prev_len,
),
0,
@ -187,7 +187,7 @@ impl BitSet {
fn layout(slot_count: usize) -> (core::alloc::Layout, usize) {
unsafe {
core::alloc::Layout::new::<AllocedBitSet>()
.extend(Layout::array::<BitSetUnit>(slot_count).unwrap_unchecked())
.extend(Layout::array::<usize>(slot_count).unwrap_unchecked())
.unwrap_unchecked()
}
}
@ -205,10 +205,6 @@ impl BitSet {
pub fn clear(&mut self, len: usize) {
self.reserve(len);
self.clear_as_is();
}
pub fn clear_as_is(&mut self) {
if self.is_inline() {
unsafe { self.inline &= Self::FLAG };
} else {
@ -216,11 +212,7 @@ impl BitSet {
}
}
pub fn approx_unit_cap(&self) -> usize {
self.data_and_len().0.len()
}
pub fn units<'a>(&'a self, slot: &'a mut BitSetUnit) -> &'a [BitSetUnit] {
pub fn units<'a>(&'a self, slot: &'a mut usize) -> &'a [usize] {
if self.is_inline() {
*slot = unsafe { self.inline } & !Self::FLAG;
core::slice::from_ref(slot)
@ -229,47 +221,36 @@ impl BitSet {
}
}
pub fn units_mut(&mut self) -> Option<&mut [BitSetUnit]> {
self.is_inline().not().then(|| self.data_mut_and_len().0)
}
pub fn reserve(&mut self, len: usize) {
if len > self.data_and_len().1 {
self.grow(len.next_power_of_two().max(4 * Self::UNIT));
}
}
pub fn set_range(&mut self, proj_range: Range<usize>) {
if proj_range.is_empty() {
return;
}
self.reserve(proj_range.end);
let (units, _) = self.data_mut_and_len();
if proj_range.start / Self::UNIT == (proj_range.end - 1) / Self::UNIT {
debug_assert!(proj_range.len() <= Self::UNIT);
let mask = ((1 << proj_range.len()) - 1) << (proj_range.start % Self::UNIT);
units[proj_range.start / Self::UNIT] |= mask;
pub fn units_mut(&mut self) -> Result<&mut [usize], &mut InlineBitSetView> {
if self.is_inline() {
Err(unsafe {
core::mem::transmute::<&mut usize, &mut InlineBitSetView>(&mut self.inline)
})
} else {
let fill_range = proj_range.start.div_ceil(Self::UNIT)..proj_range.end / Self::UNIT;
units[fill_range].fill(BitSetUnit::MAX);
let prefix_len = Self::UNIT - proj_range.start % Self::UNIT;
let prefix_mask = ((1 << prefix_len) - 1) << (proj_range.start % Self::UNIT);
units[proj_range.start / Self::UNIT] |= prefix_mask;
let postfix_len = proj_range.end % Self::UNIT;
let postfix_mask = (1 << postfix_len) - 1;
units[proj_range.end / Self::UNIT] |= postfix_mask;
Ok(self.data_mut_and_len().0)
}
}
}
pub struct InlineBitSetView(usize);
impl InlineBitSetView {
pub(crate) fn add_mask(&mut self, tmp: usize) {
debug_assert!(tmp & BitSet::FLAG == 0);
self.0 |= tmp;
}
}
pub struct BitSetIter<'a> {
index: usize,
current: BitSetUnit,
remining: &'a [BitSetUnit],
current: usize,
remining: &'a [usize],
}
impl Iterator for BitSetIter<'_> {
@ -289,7 +270,7 @@ impl Iterator for BitSetIter<'_> {
struct AllocedBitSet {
cap: usize,
data: [BitSetUnit; 0],
data: [usize; 0],
}
#[cfg(test)]
@ -363,10 +344,6 @@ impl Vc {
}
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
fn len_mut(&mut self) -> &mut Nid {
unsafe {
if self.is_inline() {

View file

@ -5,8 +5,8 @@ main:
ADDI64 r254, r254, -24d
ST r31, r254, 0a, 24h
JAL r31, r0, :cond
CP r32, r0
CP r33, r1
CP r32, r0
JNE r33, r32, :0
JMP :1
0: LI64 r32, 2d

View file

@ -1,11 +1,5 @@
fun:
UN
main:
ADDI64 r254, r254, -8d
ST r31, r254, 0a, 8h
JAL r31, r0, :fun
LD r31, r254, 0a, 8h
ADDI64 r254, r254, 8d
code size: 64
UN
code size: 9
ret: 0
status: Err(Unreachable)

View file

@ -12,7 +12,6 @@ main:
0: ST r0, r32, 0a, 8h
LD r33, r32, 0a, 8h
JEQ r33, r0, :2
ST r0, r32, 8a, 8h
LI64 r32, 200d
CP r1, r32
JMP :1
@ -49,9 +48,10 @@ main:
JMP :1
6: CP r1, r0
JMP :1
5: ST r0, r32, 0a, 8h
5: ADDI64 r34, r32, 16d
ST r0, r32, 0a, 8h
ST r0, r32, 8a, 8h
ADDI64 r32, r32, 16d
CP r32, r34
JMP :7
3: JAL r31, r0, :new_stru
ST r1, r32, 0a, 16h
@ -67,6 +67,6 @@ new_stru:
LD r1, r254, 0a, 16h
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
code size: 668
code size: 658
ret: 0
status: Ok(())

View file

@ -1,10 +1,10 @@
continue_and_state_change:
CP r13, r2
CP r15, r0
LI64 r16, 3d
LI64 r14, 4d
LI64 r17, 2d
LI64 r18, 10d
CP r15, r0
LI64 r14, 4d
6: JLTU r13, r18, :0
JMP :1
0: JNE r13, r17, :2
@ -37,41 +37,41 @@ main:
ST r31, r254, 0a, 40h
CP r2, r0
JAL r31, r0, :multiple_breaks
LI64 r32, 3d
CP r33, r1
JEQ r33, r32, :0
CP r32, r1
LI64 r33, 3d
JEQ r32, r33, :0
LI64 r32, 1d
CP r1, r32
JMP :1
0: LI64 r33, 4d
CP r2, r33
0: LI64 r32, 4d
CP r2, r32
JAL r31, r0, :multiple_breaks
LI64 r34, 10d
CP r35, r1
JEQ r35, r34, :2
CP r34, r1
LI64 r35, 10d
JEQ r34, r35, :2
LI64 r32, 2d
CP r1, r32
JMP :1
2: CP r2, r0
JAL r31, r0, :state_change_in_break
CP r35, r1
JEQ r35, r0, :3
CP r1, r32
JMP :1
3: CP r2, r33
JAL r31, r0, :state_change_in_break
CP r35, r1
JEQ r35, r34, :4
CP r34, r1
JEQ r34, r0, :3
CP r1, r33
JMP :1
4: CP r2, r34
3: CP r2, r32
JAL r31, r0, :state_change_in_break
CP r34, r1
JEQ r34, r35, :4
CP r1, r32
JMP :1
4: CP r2, r35
JAL r31, r0, :continue_and_state_change
CP r33, r1
JEQ r33, r34, :5
CP r32, r1
JEQ r32, r35, :5
LI64 r32, 5d
CP r1, r32
JMP :1
5: CP r2, r32
5: CP r2, r33
JAL r31, r0, :continue_and_state_change
CP r32, r1
JEQ r32, r0, :6

View file

@ -1,21 +0,0 @@
b:
CP r13, r3
CP r1, r13
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -32d
ST r31, r254, 8a, 24h
ADDI64 r32, r254, 0d
LI64 r33, 100d
ST r33, r254, 0a, 8h
CP r2, r32
CP r3, r33
JAL r31, r0, :b
CP r32, r1
CP r1, r32
LD r31, r254, 8a, 24h
ADDI64 r254, r254, 32d
JALA r0, r31, 0a
code size: 137
ret: 100
status: Ok(())

View file

@ -42,11 +42,10 @@ free:
CP r4, r14
CP r5, r15
ECA
CP r13, r1
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -88d
ST r31, r254, 48a, 40h
ADDI64 r254, r254, -96d
ST r31, r254, 48a, 48h
ADDI64 r32, r254, 24d
CP r1, r32
JAL r31, r0, :new
@ -61,19 +60,20 @@ main:
CP r2, r33
CP r3, r34
JAL r31, r0, :push
LD r34, r254, 0a, 8h
LD r34, r34, 0a, 1h
LD r35, r254, 24a, 8h
LD r35, r35, 0a, 8h
CP r34, r1
LD r35, r254, 0a, 8h
LD r35, r35, 0a, 1h
LD r36, r254, 24a, 8h
LD r34, r36, 0a, 8h
CP r2, r33
JAL r31, r0, :deinit
CP r2, r32
JAL r31, r0, :deinit
ANDI r32, r34, 255d
ADD64 r32, r35, r32
ANDI r32, r35, 255d
ADD64 r32, r34, r32
CP r1, r32
LD r31, r254, 48a, 40h
ADDI64 r254, r254, 88d
LD r31, r254, 48a, 48h
ADDI64 r254, r254, 96d
JALA r0, r31, 0a
malloc:
CP r13, r2
@ -112,49 +112,51 @@ new:
push:
ADDI64 r254, r254, -80d
ST r31, r254, 0a, 80h
CP r36, r2
CP r37, r3
LI64 r35, 1d
LD r33, r36, 8a, 8h
LD r32, r36, 16a, 8h
CP r38, r2
CP r39, r3
LI64 r37, 1d
LD r33, r38, 8a, 8h
LD r32, r38, 16a, 8h
JNE r32, r33, :0
JNE r32, r0, :1
CP r32, r35
CP r32, r37
JMP :2
1: MULI64 r32, r32, 2d
2: CP r2, r32
CP r3, r35
CP r3, r37
JAL r31, r0, :malloc
ST r32, r36, 16a, 8h
CP r34, r1
JNE r34, r0, :3
CP r35, r1
ST r32, r38, 16a, 8h
JNE r35, r0, :3
CP r1, r0
JMP :4
3: LD r32, r36, 0a, 8h
ADD64 r38, r33, r32
CP r33, r34
7: LD r39, r36, 0a, 8h
LD r40, r36, 8a, 8h
JNE r38, r32, :5
JEQ r40, r0, :6
CP r2, r39
CP r3, r40
CP r4, r35
3: LD r32, r38, 0a, 8h
ADD64 r40, r33, r32
CP r34, r35
7: LD r33, r38, 0a, 8h
LD r36, r38, 8a, 8h
JNE r40, r32, :5
JEQ r36, r0, :6
CP r2, r33
CP r3, r36
CP r4, r37
JAL r31, r0, :free
JMP :6
6: ST r34, r36, 0a, 8h
6: ST r35, r38, 0a, 8h
JMP :0
5: LD r39, r32, 0a, 1h
ST r39, r33, 0a, 1h
ADDI64 r33, r33, 1d
ADDI64 r32, r32, 1d
5: ADDI64 r36, r34, 1d
ADDI64 r33, r32, 1d
LD r32, r32, 0a, 1h
ST r32, r34, 0a, 1h
CP r32, r33
CP r34, r36
JMP :7
0: LD r32, r36, 8a, 8h
LD r33, r36, 0a, 8h
0: LD r32, r38, 8a, 8h
LD r33, r38, 0a, 8h
ADD64 r33, r32, r33
ST r37, r33, 0a, 1h
ADD64 r32, r32, r35
ST r32, r36, 8a, 8h
ST r39, r33, 0a, 1h
ADD64 r32, r32, r37
ST r32, r38, 8a, 8h
CP r1, r33
4: LD r31, r254, 0a, 80h
ADDI64 r254, r254, 80d
@ -162,58 +164,60 @@ push:
push:
ADDI64 r254, r254, -88d
ST r31, r254, 0a, 88h
CP r36, r2
CP r37, r3
LI64 r35, 1d
LD r33, r36, 8a, 8h
LD r32, r36, 16a, 8h
CP r38, r2
CP r39, r3
LI64 r37, 1d
LD r33, r38, 8a, 8h
LD r32, r38, 16a, 8h
JNE r32, r33, :0
JNE r32, r0, :1
CP r32, r35
CP r32, r37
JMP :2
1: MULI64 r32, r32, 2d
2: LI64 r38, 8d
MUL64 r34, r32, r38
2: LI64 r40, 8d
MUL64 r34, r32, r40
CP r2, r34
CP r3, r38
CP r3, r40
JAL r31, r0, :malloc
ST r32, r36, 16a, 8h
CP r34, r1
JNE r34, r0, :3
CP r35, r1
ST r32, r38, 16a, 8h
JNE r35, r0, :3
CP r1, r0
JMP :4
3: MULI64 r33, r33, 8d
LD r32, r36, 0a, 8h
ADD64 r39, r32, r33
CP r33, r34
7: LD r40, r36, 0a, 8h
LD r41, r36, 8a, 8h
JNE r39, r32, :5
JEQ r41, r0, :6
MUL64 r32, r41, r38
CP r2, r40
LD r32, r38, 0a, 8h
ADD64 r41, r32, r33
CP r34, r35
7: LD r33, r38, 0a, 8h
LD r36, r38, 8a, 8h
JNE r41, r32, :5
JEQ r36, r0, :6
MUL64 r32, r36, r40
CP r2, r33
CP r3, r32
CP r4, r38
CP r4, r40
JAL r31, r0, :free
JMP :6
6: ST r34, r36, 0a, 8h
6: ST r35, r38, 0a, 8h
JMP :0
5: LD r40, r32, 0a, 8h
ST r40, r33, 0a, 8h
ADDI64 r33, r33, 8d
ADDI64 r32, r32, 8d
5: ADDI64 r36, r34, 8d
ADDI64 r33, r32, 8d
LD r32, r32, 0a, 8h
ST r32, r34, 0a, 8h
CP r32, r33
CP r34, r36
JMP :7
0: LD r32, r36, 8a, 8h
0: LD r32, r38, 8a, 8h
MULI64 r33, r32, 8d
LD r34, r36, 0a, 8h
LD r34, r38, 0a, 8h
ADD64 r33, r34, r33
ST r37, r33, 0a, 8h
ADD64 r32, r32, r35
ST r32, r36, 8a, 8h
ST r39, r33, 0a, 8h
ADD64 r32, r32, r37
ST r32, r38, 8a, 8h
CP r1, r33
4: LD r31, r254, 0a, 88h
ADDI64 r254, r254, 88d
JALA r0, r31, 0a
code size: 1623
code size: 1635
ret: 69
status: Ok(())

View file

@ -2,8 +2,8 @@ inb:
CP r1, r0
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -24d
ST r31, r254, 0a, 24h
ADDI64 r254, r254, -32d
ST r31, r254, 0a, 32h
LRA r32, r0, :ports
LD r33, r32, 0a, 1h
ANDI r33, r33, 255d
@ -11,12 +11,12 @@ main:
JMP :1
0: JAL r31, r0, :inb
CP r33, r1
CMPU r33, r33, r0
CMPUI r33, r33, 0d
NOT r33, r33
ST r33, r32, 0a, 1h
1: LD r31, r254, 0a, 24h
ADDI64 r254, r254, 24d
CMPU r34, r33, r0
CMPUI r34, r34, 0d
NOT r34, r34
ST r34, r32, 0a, 1h
1: LD r31, r254, 0a, 32h
ADDI64 r254, r254, 32d
JALA r0, r31, 0a
code size: 164
ret: 0

View file

@ -1,20 +1,21 @@
main:
ADDI64 r254, r254, -128d
ADDI64 r14, r254, 0d
LI8 r15, 69b
LI64 r16, 128d
ADDI64 r15, r254, 0d
LI8 r16, 69b
LI64 r17, 128d
CP r13, r0
2: LD r17, r254, 42a, 1h
JLTU r13, r16, :0
ANDI r13, r17, 255d
2: LD r14, r254, 42a, 1h
JLTU r13, r17, :0
ANDI r13, r14, 255d
CP r1, r13
JMP :1
0: ADD64 r17, r14, r13
ST r15, r17, 0a, 1h
ADDI64 r13, r13, 1d
0: ADDI64 r14, r13, 1d
ADD64 r13, r15, r13
ST r16, r13, 0a, 1h
CP r13, r14
JMP :2
1: ADDI64 r254, r254, 128d
JALA r0, r31, 0a
code size: 138
code size: 141
ret: 69
status: Ok(())

View file

@ -59,9 +59,9 @@ put_filled_rect:
LD r14, r14, 0a, 8h
ADD64 r26, r14, r26
LD r28, r15, 0a, 8h
MUL64 r15, r27, r25
ADD64 r14, r14, r15
ADD64 r15, r28, r26
MUL64 r25, r27, r25
ADD64 r14, r14, r25
ADD64 r14, r28, r14
3: JGTU r13, r20, :0
JNE r13, r20, :1

View file

View file

@ -6,9 +6,9 @@ integer_range:
CP r2, r16
CP r3, r15
ECA
CP r15, r1
SUB64 r14, r14, r13
ADDI64 r14, r14, 1d
CP r15, r1
DIRU64 r0, r14, r15, r14
ADD64 r13, r14, r13
CP r1, r13

View file

@ -1,49 +0,0 @@
chars:
ADDI64 r254, r254, -32d
ST r3, r254, 16a, 16h
ADDI64 r3, r254, 16d
CP r13, r3
ADDI64 r14, r254, 0d
BMC r13, r14, 16h
LD r1, r14, 0a, 16h
ADDI64 r254, r254, 32d
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -56d
ST r31, r254, 32a, 24h
LRA r32, r0, :Hello, World!
ST r32, r254, 16a, 8h
LI64 r32, 13d
ST r32, r254, 24a, 8h
ADDI64 r32, r254, 0d
LD r3, r254, 16a, 16h
JAL r31, r0, :chars
ST r1, r32, 0a, 16h
2: CP r2, r32
JAL r31, r0, :next
CP r33, r1
ANDI r33, r33, 65535d
JNE r33, r0, :0
JMP :1
0: JMP :2
1: LD r31, r254, 32a, 24h
ADDI64 r254, r254, 56d
JALA r0, r31, 0a
next:
CP r13, r2
LD r14, r13, 8a, 8h
JNE r14, r0, :0
CP r1, r0
JMP :1
0: LD r15, r13, 0a, 8h
ADDI64 r15, r15, 1d
ST r15, r13, 0a, 8h
ADDI64 r14, r14, -1d
LD r15, r15, 0a, 1h
ST r14, r13, 8a, 8h
ORI r13, r15, 32768d
CP r1, r13
1: JALA r0, r31, 0a
code size: 423
ret: 0
status: Ok(())

View file

@ -10,26 +10,26 @@ decide:
ADDI64 r254, r254, 24d
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -104d
ST r31, r254, 72a, 32h
ADDI64 r254, r254, -120d
ST r31, r254, 72a, 48h
ADDI64 r32, r254, 48d
CP r1, r32
CP r2, r0
JAL r31, r0, :decide
ADDI64 r33, r254, 24d
BMC r32, r33, 24h
LI64 r34, 1d
CP r1, r33
CP r2, r34
JAL r31, r0, :decide
ADDI64 r34, r254, 0d
ADDI64 r34, r254, 24d
BMC r32, r34, 24h
LI64 r35, 1d
CP r1, r34
CP r2, r35
JAL r31, r0, :decide
ADDI64 r36, r254, 0d
BMC r32, r36, 24h
LD r32, r254, 24a, 8h
LD r33, r254, 0a, 8h
ADD64 r32, r33, r32
CP r1, r32
LD r31, r254, 72a, 32h
ADDI64 r254, r254, 104d
LD r31, r254, 72a, 48h
ADDI64 r254, r254, 120d
JALA r0, r31, 0a
code size: 273
ret: 1

View file

@ -3,15 +3,15 @@ main:
ST r31, r254, 32a, 40h
LRA r32, r0, :"Goodbye, World!\0"
LRA r33, r0, :"Hello, World!\0"
ST r32, r254, 16a, 8h
ST r32, r254, 8a, 8h
ST r33, r254, 24a, 8h
LD r2, r254, 24a, 8h
LD r3, r254, 16a, 8h
LD r3, r254, 8a, 8h
JAL r31, r0, :print
ADDI64 r34, r254, 8d
ADDI64 r35, r254, 0d
ST r32, r254, 8a, 8h
ST r33, r254, 0a, 8h
ADDI64 r34, r254, 0d
ADDI64 r35, r254, 16d
ST r32, r254, 0a, 8h
ST r33, r254, 16a, 8h
CP r2, r35
CP r3, r34
JAL r31, r0, :print2

View file

@ -1,6 +1,6 @@
main:
ADDI64 r254, r254, -58d
ST r31, r254, 26a, 32h
ADDI64 r254, r254, -66d
ST r31, r254, 26a, 40h
JAL r31, r0, :returner_fn
CP r32, r1
ADDI64 r33, r254, 2d
@ -25,8 +25,8 @@ main:
JMP :1
0: LI64 r32, 1d
CP r1, r32
1: LD r31, r254, 26a, 32h
ADDI64 r254, r254, 58d
1: LD r31, r254, 26a, 40h
ADDI64 r254, r254, 66d
JALA r0, r31, 0a
returner_bn:
ADDI64 r254, r254, -24d

View file

@ -3,18 +3,20 @@ decide:
CP r1, r13
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -136d
ST r31, r254, 96a, 40h
ADDI64 r254, r254, -128d
ST r31, r254, 80a, 48h
JAL r31, r0, :decide
CP r33, r0
ADDI64 r34, r254, 88d
CP r32, r1
CP r33, r0
ADDI64 r34, r254, 72d
ANDI r32, r32, 255d
JNE r32, r0, :0
CP r32, r33
JMP :1
0: CP r32, r34
1: JNE r32, r33, :2
1: LI64 r35, 1d
ST r35, r254, 72a, 8h
JNE r32, r33, :2
LI64 r32, 9001d
CP r1, r32
JMP :3
@ -23,20 +25,20 @@ main:
ANDI r33, r33, 255d
JNE r33, r0, :4
LI8 r33, 1b
ST r33, r254, 72a, 1h
ST r33, r254, 56a, 1h
LD r32, r32, 0a, 8h
ST r32, r254, 80a, 8h
ST r32, r254, 64a, 8h
JMP :5
4: ST r0, r254, 72a, 1h
5: LD r32, r254, 72a, 1h
4: ST r0, r254, 56a, 1h
5: LD r32, r254, 56a, 1h
ANDI r32, r32, 255d
JEQ r32, r0, :6
LI64 r32, 42d
CP r1, r32
JMP :3
6: JAL r31, r0, :decide
CP r33, r0
CP r32, r1
CP r33, r0
ANDI r32, r32, 255d
JNE r32, r0, :7
CP r32, r33
@ -48,33 +50,28 @@ main:
LI64 r32, 69d
CP r1, r32
JMP :3
9: ADDI64 r33, r254, 56d
9: ADDI64 r33, r254, 40d
JAL r31, r0, :new_foo
ST r1, r33, 0a, 16h
LD r35, r254, 56a, 8h
JNE r35, r0, :10
LD r36, r254, 40a, 8h
JNE r36, r0, :10
LI64 r32, 999d
CP r1, r32
JMP :3
10: LRA r35, r0, :"foo\0"
ST r35, r254, 40a, 8h
LI64 r35, 4d
ST r35, r254, 48a, 8h
10: LRA r36, r0, :"foo\0"
LD r2, r33, 0a, 16h
LD r4, r254, 40a, 16h
CP r4, r36
JAL r31, r0, :use_foo
ADDI64 r33, r254, 0d
JAL r31, r0, :no_foo
ST r1, r33, 0a, 16h
JAL r31, r0, :decide
CP r35, r1
ANDI r35, r35, 255d
JNE r35, r0, :11
CP r36, r1
ANDI r36, r36, 255d
JNE r36, r0, :11
JMP :12
11: ST r34, r254, 0a, 8h
LI64 r35, 1d
ST r35, r254, 8a, 8h
ST r35, r254, 88a, 8h
12: LD r35, r254, 0a, 8h
JNE r35, r0, :13
LI64 r32, 34d
@ -101,8 +98,8 @@ main:
ANDI r32, r32, 65535d
SUB64 r32, r32, r33
CP r1, r32
3: LD r31, r254, 96a, 40h
ADDI64 r254, r254, 136d
3: LD r31, r254, 80a, 48h
ADDI64 r254, r254, 128d
JALA r0, r31, 0a
new_bar:
ADDI64 r254, r254, -24d
@ -132,13 +129,11 @@ no_foo:
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
use_foo:
ADDI64 r254, r254, -32d
ST r2, r254, 16a, 16h
ADDI64 r2, r254, 16d
ST r4, r254, 0a, 16h
ADDI64 r4, r254, 0d
ADDI64 r254, r254, 32d
ADDI64 r254, r254, -16d
ST r2, r254, 0a, 16h
ADDI64 r2, r254, 0d
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
code size: 1162
code size: 1092
ret: 0
status: Ok(())

View file

@ -1,6 +1,6 @@
main:
ADDI64 r254, r254, -56d
ST r31, r254, 24a, 32h
ADDI64 r254, r254, -64d
ST r31, r254, 24a, 40h
ADDI64 r32, r254, 0d
LI64 r33, 1d
ST r33, r254, 16a, 8h
@ -9,14 +9,14 @@ main:
ST r33, r254, 8a, 8h
JAL r31, r0, :opaque
ST r1, r32, 0a, 16h
LD r33, r254, 8a, 8h
LD r34, r254, 16a, 8h
ADD64 r33, r34, r33
LD r34, r254, 8a, 8h
LD r35, r254, 16a, 8h
ADD64 r34, r35, r34
LD r32, r254, 0a, 8h
SUB64 r32, r32, r33
SUB64 r32, r32, r34
CP r1, r32
LD r31, r254, 24a, 32h
ADDI64 r254, r254, 56d
LD r31, r254, 24a, 40h
ADDI64 r254, r254, 64d
JALA r0, r31, 0a
opaque:
ADDI64 r254, r254, -16d

View file

@ -1,6 +0,0 @@
main:
CP r1, r0
JALA r0, r31, 0a
code size: 22
ret: 0
status: Ok(())

View file

@ -1,23 +1,23 @@
main:
ADDI64 r254, r254, -44d
ST r31, r254, 4a, 40h
ADDI64 r254, r254, -52d
ST r31, r254, 4a, 48h
ADDI64 r32, r254, 0d
JAL r31, r0, :random_color
ST r1, r32, 0a, 4h
LD r33, r254, 0a, 1h
LD r34, r254, 1a, 1h
LD r35, r254, 2a, 1h
ANDI r33, r33, 255d
LD r34, r254, 0a, 1h
LD r35, r254, 1a, 1h
LD r36, r254, 2a, 1h
ANDI r34, r34, 255d
LD r32, r254, 3a, 1h
ANDI r35, r35, 255d
ADD64 r33, r34, r33
LD r32, r254, 3a, 1h
ANDI r33, r36, 255d
ADD64 r34, r35, r34
ANDI r32, r32, 255d
ADD64 r33, r33, r35
ADD64 r33, r34, r33
ADD64 r32, r33, r32
CP r1, r32
LD r31, r254, 4a, 40h
ADDI64 r254, r254, 44d
LD r31, r254, 4a, 48h
ADDI64 r254, r254, 52d
JALA r0, r31, 0a
random_color:
LRA r13, r0, :white

View file

@ -1,8 +0,0 @@
main:
LRA r13, r0, :a
LD r13, r13, 0a, 8h
CP r1, r13
JALA r0, r31, 0a
code size: 50
ret: 0
status: Ok(())

View file

@ -1,27 +0,0 @@
main:
ADDI64 r254, r254, -40d
ST r0, r254, 0a, 8h
LI64 r13, 1d
ST r13, r254, 8a, 8h
LI64 r13, 2d
ST r13, r254, 16a, 8h
LI64 r13, 3d
LI64 r14, 10d
ST r13, r254, 24a, 8h
ST r14, r254, 32a, 8h
LD r13, r254, 0a, 8h
LD r14, r254, 8a, 8h
ADD64 r13, r14, r13
LD r14, r254, 16a, 8h
ADD64 r13, r14, r13
LD r14, r254, 24a, 8h
ADD64 r13, r14, r13
LD r14, r254, 32a, 8h
ADDI64 r13, r13, 4d
SUB64 r13, r13, r14
CP r1, r13
ADDI64 r254, r254, 40d
JALA r0, r31, 0a
code size: 241
ret: 0
status: Ok(())

View file

@ -11,9 +11,9 @@ main:
JALA r0, r31, 0a
sqrt:
CP r14, r2
CP r17, r0
LI64 r16, 15d
LI64 r15, 32768d
CP r17, r0
CP r13, r17
3: JNE r15, r17, :0
CP r1, r13

View file

@ -5,8 +5,8 @@ do_stuff:
just_read:
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -72d
ST r31, r254, 48a, 24h
ADDI64 r254, r254, -80d
ST r31, r254, 48a, 32h
ADDI64 r32, r254, 16d
CP r1, r32
JAL r31, r0, :optionala
@ -37,8 +37,8 @@ main:
CP r33, r1
ADD64 r32, r33, r32
CP r1, r32
1: LD r31, r254, 48a, 24h
ADDI64 r254, r254, 72d
1: LD r31, r254, 48a, 32h
ADDI64 r254, r254, 80d
JALA r0, r31, 0a
optional:
ADDI64 r254, r254, -16d

View file

@ -1,9 +0,0 @@
main:
LRA r13, r0, :"abcdefshijklmnop\0"
LD r13, r13, 0a, 1h
ANDI r13, r13, 255d
CP r1, r13
JALA r0, r31, 0a
code size: 70
ret: 97
status: Ok(())

View file

@ -1,44 +1,45 @@
main:
ADDI64 r254, r254, -40d
LI64 r16, 1d
LI64 r17, 1d
LI64 r15, 4d
ADDI64 r17, r254, 0d
CP r14, r0
ADDI64 r18, r254, 0d
CP r13, r14
6: JNE r13, r15, :0
ADDI64 r18, r254, 32d
LI64 r19, 2d
ADDI64 r19, r254, 32d
LI64 r20, 2d
CP r13, r14
4: LD r15, r254, 16a, 8h
JNE r13, r16, :1
JNE r13, r17, :1
CP r1, r15
JMP :2
1: ADD64 r15, r13, r16
SUB64 r20, r19, r15
MUL64 r20, r20, r19
MUL64 r21, r13, r19
1: ADD64 r16, r13, r17
SUB64 r15, r20, r16
MUL64 r21, r15, r20
MUL64 r22, r13, r20
CP r13, r14
5: JNE r13, r19, :3
CP r13, r15
5: JNE r13, r20, :3
CP r13, r16
JMP :4
3: ADD64 r22, r21, r13
ADD64 r23, r20, r13
MULI64 r22, r22, 8d
3: ADD64 r15, r13, r17
ADD64 r23, r22, r13
ADD64 r13, r21, r13
MULI64 r23, r23, 8d
ADD64 r22, r17, r22
ADD64 r23, r17, r23
BMC r22, r18, 8h
BMC r23, r22, 8h
BMC r18, r23, 8h
ADD64 r13, r13, r16
MULI64 r13, r13, 8d
ADD64 r23, r18, r23
ADD64 r13, r18, r13
BMC r23, r19, 8h
BMC r13, r23, 8h
BMC r19, r13, 8h
CP r13, r15
JMP :5
0: MULI64 r18, r13, 8d
ADD64 r18, r17, r18
ST r13, r18, 0a, 8h
ADD64 r13, r13, r16
0: MULI64 r16, r13, 8d
ADD64 r16, r18, r16
ST r13, r16, 0a, 8h
ADD64 r13, r13, r17
JMP :6
2: ADDI64 r254, r254, 40d
JALA r0, r31, 0a
code size: 264
code size: 267
ret: 0
status: Ok(())

View file

@ -9,8 +9,8 @@ foo:
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -80d
ST r31, r254, 48a, 32h
ADDI64 r254, r254, -88d
ST r31, r254, 48a, 40h
ADDI64 r32, r254, 32d
JAL r31, r0, :foo
ST r1, r32, 0a, 16h
@ -30,8 +30,8 @@ main:
LI64 r33, 7d
SUB64 r32, r33, r32
CP r1, r32
LD r31, r254, 48a, 32h
ADDI64 r254, r254, 80d
LD r31, r254, 48a, 40h
ADDI64 r254, r254, 88d
JALA r0, r31, 0a
code size: 347
ret: 0

View file

@ -1,12 +0,0 @@
main:
LRA r13, r0, :"abcd\0"
ADDI64 r13, r13, 1d
LI64 r14, 37d
CP r2, r14
CP r3, r13
ECA
JALA r0, r31, 0a
bcd
code size: 59
ret: 0
status: Ok(())

View file

@ -1,26 +1,28 @@
main:
ADDI64 r254, r254, -10240d
LI8 r14, 64b
LI64 r15, 1024d
ADDI64 r16, r254, 0d
LI8 r15, 64b
LI64 r16, 1024d
CP r13, r0
4: JLTU r13, r15, :0
ADDI64 r14, r16, 10240d
ADDI64 r13, r16, 1024d
3: LD r15, r254, 2048a, 1h
JLTU r13, r14, :1
ANDI r13, r15, 255d
ADDI64 r17, r254, 0d
4: JLTU r13, r16, :0
ADDI64 r13, r17, 1024d
ADDI64 r15, r17, 10240d
3: LD r14, r254, 2048a, 1h
JLTU r13, r15, :1
ANDI r13, r14, 255d
CP r1, r13
JMP :2
1: BMC r16, r13, 1024h
ADDI64 r13, r13, 1024d
1: ADDI64 r14, r13, 1024d
BMC r17, r13, 1024h
CP r13, r14
JMP :3
0: ADD64 r17, r16, r13
ST r14, r17, 0a, 1h
ADDI64 r13, r13, 1d
0: ADDI64 r14, r13, 1d
ADD64 r13, r17, r13
ST r15, r13, 0a, 1h
CP r13, r14
JMP :4
2: ADDI64 r254, r254, 10240d
JALA r0, r31, 0a
code size: 186
code size: 192
ret: 64
status: Ok(())

View file

@ -1,10 +1,10 @@
main:
ADDI64 r254, r254, -64d
ST r31, r254, 0a, 64h
LI64 r37, 65536d
CP r36, r0
CP r34, r0
LI64 r37, 65536d
LI8 r35, 1b
CP r36, r0
CP r32, r36
7: JAL r31, r0, :opaque
CP r33, r1

View file

@ -1,6 +0,0 @@
main:
CP r1, r0
JALA r0, r31, 0a
code size: 22
ret: 0
status: Ok(())

View file

@ -1,46 +1,46 @@
main:
ADDI64 r254, r254, -40d
ST r31, r254, 16a, 24h
ADDI64 r254, r254, -48d
ST r31, r254, 16a, 32h
ADDI64 r32, r254, 0d
CP r3, r0
CP r4, r0
JAL r31, r0, :maina
ST r1, r32, 0a, 16h
LD r33, r254, 12a, 1h
LD r34, r254, 12a, 1h
LD r32, r254, 3a, 1h
SUB8 r32, r32, r33
SUB8 r32, r32, r34
ANDI r32, r32, 255d
CP r1, r32
LD r31, r254, 16a, 24h
ADDI64 r254, r254, 40d
LD r31, r254, 16a, 32h
ADDI64 r254, r254, 48d
JALA r0, r31, 0a
maina:
ADDI64 r254, r254, -44d
ST r31, r254, 20a, 24h
ADDI64 r254, r254, -52d
ST r31, r254, 20a, 32h
ADDI64 r32, r254, 16d
JAL r31, r0, :small_struct
ST r1, r32, 0a, 4h
ST r0, r254, 0a, 1h
ST r0, r254, 1a, 1h
ST r0, r254, 2a, 1h
LI8 r32, 3b
ST r32, r254, 3a, 1h
LI8 r33, 1b
ST r33, r254, 4a, 1h
LI8 r33, 3b
ST r33, r254, 3a, 1h
LI8 r34, 1b
ST r34, r254, 4a, 1h
ST r0, r254, 5a, 1h
ST r0, r254, 6a, 1h
ST r0, r254, 7a, 1h
ST r0, r254, 8a, 1h
ST r0, r254, 9a, 1h
ST r0, r254, 10a, 1h
ST r32, r254, 11a, 1h
ST r33, r254, 12a, 1h
ST r33, r254, 11a, 1h
ST r34, r254, 12a, 1h
ST r0, r254, 13a, 1h
ST r0, r254, 14a, 1h
ST r0, r254, 15a, 1h
LD r1, r254, 0a, 16h
LD r31, r254, 20a, 24h
ADDI64 r254, r254, 44d
LD r31, r254, 20a, 32h
ADDI64 r254, r254, 52d
JALA r0, r31, 0a
small_struct:
ADDI64 r254, r254, -4d

31
smh.hb
View file

@ -1,31 +0,0 @@
ResultInner := fn($T: type, $E: type): type return union {ok: T, err: E}
Result := fn($T: type, $E: type): type return struct {
inner: ResultInner(T, E),
is_ok: bool,
$ok := fn(k: T): Self return .(.{ok: k}, true)
$err := fn(k: E): Self return .(.{err: k}, false)
$unwrap := fn(self: Self): T return self.expect("Panic: Unwrap on Error Variant.\0".ptr)
$unwrap_unchecked := fn(self: Self): T return self.inner.ok
unwrap_or := fn(self: Self, v: T): T if self.is_ok return self.inner.ok else return v
unwrap_or_else := fn(self: Self, $F: type): T if self.is_ok return self.inner.ok else return F(self.inner.err)
expect := fn(self: Self, msg: ^u8): T if self.is_ok return self.inner.ok else {
@eca(0, msg)
die
}
}
SomeError := enum {
SkillIssue,
}
div := fn(a: uint, b: uint): Result(uint, SomeError) {
if b != 0 return Result(uint, SomeError).ok(a / b)
return Result(uint, SomeError).err(.SkillIssue)
}
main := fn(): uint {
a := div(100, 0)
return a.expect("goof\0".ptr)
}

View file

@ -3,12 +3,10 @@ name = "hbvm"
version = "0.1.0"
edition = "2021"
[dependencies]
hbbytecode = { workspace = true }
[features]
default = ["alloc"]
disasm = ["hbbytecode/disasm", "alloc"]
alloc = []
nightly = []
[dependencies]
hbbytecode = { workspace = true }

View file

@ -4,7 +4,7 @@ pub mod softpaging;
pub(crate) mod addr;
use crate::{utils::impl_display, value::Value};
use crate::utils::impl_display;
pub use addr::Address;
/// Load-store memory access
@ -36,50 +36,6 @@ pub trait Memory {
/// # Safety
/// - Data read have to be valid
unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: Address) -> T;
/// Log instruction to be executed
fn log_instr(&mut self, _at: Address, _regs: &[Value]) {}
}
#[cfg(feature = "alloc")]
#[derive(Default)]
pub struct InstrLogger {
#[cfg(debug_assertions)]
op_buf: alloc::vec::Vec<hbbytecode::Oper>,
#[cfg(debug_assertions)]
disp_buf: alloc::string::String,
}
#[cfg(feature = "alloc")]
impl InstrLogger {
/// # Safety
/// - `addr` needs to point to a valid instruction
#[cfg(debug_assertions)]
pub unsafe fn display_instr(&mut self, addr: Address, regs: &[Value]) -> &str {
let instr = hbbytecode::Instr::try_from(unsafe { *(addr.get() as *const u8) }).unwrap();
let mut bytes =
unsafe { core::slice::from_raw_parts(addr.get() as *const u8, instr.size()) };
use core::fmt::Write;
hbbytecode::parse_args(&mut bytes, instr, &mut self.op_buf).unwrap();
debug_assert!(bytes.is_empty());
self.disp_buf.clear();
write!(self.disp_buf, "{:<10}", alloc::format!("{instr:?}")).unwrap();
for (i, op) in self.op_buf.drain(..).enumerate() {
if i != 0 {
write!(self.disp_buf, ", ").unwrap();
}
write!(self.disp_buf, "{op:?}").unwrap();
if let hbbytecode::Oper::R(r) = op {
write!(self.disp_buf, "({})", regs[r as usize].0).unwrap()
}
}
&self.disp_buf
}
#[cfg(not(debug_assertions))]
pub unsafe fn display_instr(&mut self, addr: Address, regs: &[Value]) -> &str {
""
}
}
/// Unhandled load access trap

View file

@ -55,7 +55,6 @@ where
// - Yes, we assume you run 64 bit CPU. Else ?conradluget a better CPU
// sorry 8 bit fans, HBVM won't run on your Speccy :(
unsafe {
self.memory.log_instr(self.pc, &self.registers);
match self
.memory
.prog_read::<u8>(self.pc as _)