Compare commits

..

No commits in common. "trunk" and "trunk" have entirely different histories.
trunk ... trunk

65 changed files with 1340 additions and 4500 deletions

5
.gitignore vendored
View file

@ -2,10 +2,6 @@
/target /target
rustc-ice-* rustc-ice-*
a.out
out.o
/examples/raylib/main
# sqlite # sqlite
db.sqlite db.sqlite
db.sqlite-journal db.sqlite-journal
@ -16,4 +12,3 @@ db.sqlite-journal
/depell/src/static-pages/*.html /depell/src/static-pages/*.html
#**/*-sv.rs #**/*-sv.rs
/bytecode/src/instrs.rs /bytecode/src/instrs.rs
/lang/src/testcases.rs

366
Cargo.lock generated
View file

@ -38,72 +38,11 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "allocator-api2"
version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "anstream"
version = "0.6.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is_terminal_polyfill",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9"
[[package]]
name = "anstyle-parse"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c"
dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125"
dependencies = [
"anstyle",
"windows-sys 0.59.0",
]
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.95" version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
[[package]]
name = "arbitrary"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223"
[[package]] [[package]]
name = "arc-swap" name = "arc-swap"
@ -290,7 +229,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"regex", "regex",
"rustc-hash 1.1.0", "rustc-hash",
"shlex", "shlex",
"syn", "syn",
"which", "which",
@ -320,15 +259,6 @@ dependencies = [
"generic-array", "generic-array",
] ]
[[package]]
name = "bumpalo"
version = "3.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
dependencies = [
"allocator-api2",
]
[[package]] [[package]]
name = "bytes" name = "bytes"
version = "1.8.0" version = "1.8.0"
@ -372,46 +302,6 @@ dependencies = [
"libloading", "libloading",
] ]
[[package]]
name = "clap"
version = "4.5.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84"
dependencies = [
"clap_builder",
"clap_derive",
]
[[package]]
name = "clap_builder"
version = "4.5.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838"
dependencies = [
"anstream",
"anstyle",
"clap_lex",
"strsim",
]
[[package]]
name = "clap_derive"
version = "4.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "clap_lex"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]] [[package]]
name = "cmake" name = "cmake"
version = "0.1.51" version = "0.1.51"
@ -421,12 +311,6 @@ dependencies = [
"cc", "cc",
] ]
[[package]]
name = "colorchoice"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
[[package]] [[package]]
name = "const_format" name = "const_format"
version = "0.2.33" version = "0.2.33"
@ -456,143 +340,6 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "cranelift-backend"
version = "0.1.0"
dependencies = [
"cranelift-codegen",
"cranelift-frontend",
"cranelift-module",
"cranelift-object",
"hblang",
"target-lexicon",
]
[[package]]
name = "cranelift-bforest"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac89549be94911dd0e839b4a7db99e9ed29c17517e1c026f61066884c168aa3c"
dependencies = [
"cranelift-entity",
]
[[package]]
name = "cranelift-bitset"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9bd49369f76c77e34e641af85d0956869237832c118964d08bf5f51f210875a"
[[package]]
name = "cranelift-codegen"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd96ce9cf8efebd7f5ab8ced5a0ce44250280bbae9f593d74a6d7effc3582a35"
dependencies = [
"bumpalo",
"cranelift-bforest",
"cranelift-bitset",
"cranelift-codegen-meta",
"cranelift-codegen-shared",
"cranelift-control",
"cranelift-entity",
"cranelift-isle",
"gimli 0.31.1",
"hashbrown 0.14.5",
"log",
"regalloc2",
"rustc-hash 2.1.0",
"serde",
"smallvec",
"target-lexicon",
]
[[package]]
name = "cranelift-codegen-meta"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a68e358827afe4bfb6239fcbf6fbd5ac56206ece8a99c8f5f9bbd518773281a"
dependencies = [
"cranelift-codegen-shared",
]
[[package]]
name = "cranelift-codegen-shared"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e184c9767afbe73d50c55ec29abcf4c32f9baf0d9d22b86d58c4d55e06dee181"
[[package]]
name = "cranelift-control"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cc7664f2a66f053e33f149e952bb5971d138e3af637f5097727ed6dc0ed95dd"
dependencies = [
"arbitrary",
]
[[package]]
name = "cranelift-entity"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "118597e3a9cf86c3556fa579a7a23b955fa18231651a52a77a2475d305a9cf84"
dependencies = [
"cranelift-bitset",
]
[[package]]
name = "cranelift-frontend"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7638ea1efb069a0aa18d8ee67401b6b0d19f6bfe5de5e9ede348bfc80bb0d8c7"
dependencies = [
"cranelift-codegen",
"log",
"smallvec",
"target-lexicon",
]
[[package]]
name = "cranelift-isle"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15c53e1152a0b01c4ed2b1e0535602b8e86458777dd9d18b28732b16325c7dc0"
[[package]]
name = "cranelift-module"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11841b3f54ac480db1e8e8d5678ba901a13b387012d315e3f8fba3e7b7a80447"
dependencies = [
"anyhow",
"cranelift-codegen",
"cranelift-control",
]
[[package]]
name = "cranelift-object"
version = "0.115.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e235ddfd19f100855ad03358c7ae0a13070c38a000701054cab46458cca6e81"
dependencies = [
"anyhow",
"cranelift-codegen",
"cranelift-control",
"cranelift-module",
"log",
"object",
"target-lexicon",
]
[[package]]
name = "crc32fast"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3"
dependencies = [
"cfg-if",
]
[[package]] [[package]]
name = "crypto-common" name = "crypto-common"
version = "0.1.6" version = "0.1.6"
@ -667,7 +414,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba"
dependencies = [ dependencies = [
"libc", "libc",
"windows-sys 0.52.0", "windows-sys",
] ]
[[package]] [[package]]
@ -694,12 +441,6 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foldhash"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
[[package]] [[package]]
name = "form_urlencoded" name = "form_urlencoded"
version = "1.2.1" version = "1.2.1"
@ -791,11 +532,6 @@ name = "gimli"
version = "0.31.1" version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
dependencies = [
"fallible-iterator 0.3.0",
"indexmap 2.6.0",
"stable_deref_trait",
]
[[package]] [[package]]
name = "glob" name = "glob"
@ -843,9 +579,6 @@ name = "hashbrown"
version = "0.15.0" version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb"
dependencies = [
"foldhash",
]
[[package]] [[package]]
name = "hashlink" name = "hashlink"
@ -860,17 +593,6 @@ dependencies = [
name = "hbbytecode" name = "hbbytecode"
version = "0.1.0" version = "0.1.0"
[[package]]
name = "hbc"
version = "0.1.0"
dependencies = [
"clap",
"cranelift-backend",
"hblang",
"log",
"target-lexicon",
]
[[package]] [[package]]
name = "hblang" name = "hblang"
version = "0.1.0" version = "0.1.0"
@ -914,7 +636,7 @@ version = "0.5.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
dependencies = [ dependencies = [
"windows-sys 0.52.0", "windows-sys",
] ]
[[package]] [[package]]
@ -1041,12 +763,6 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.12.1" version = "0.12.1"
@ -1188,7 +904,7 @@ dependencies = [
"hermit-abi", "hermit-abi",
"libc", "libc",
"wasi", "wasi",
"windows-sys 0.52.0", "windows-sys",
] ]
[[package]] [[package]]
@ -1219,9 +935,6 @@ version = "0.36.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e"
dependencies = [ dependencies = [
"crc32fast",
"hashbrown 0.15.0",
"indexmap 2.6.0",
"memchr", "memchr",
] ]
@ -1335,20 +1048,6 @@ dependencies = [
"getrandom", "getrandom",
] ]
[[package]]
name = "regalloc2"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "145c1c267e14f20fb0f88aa76a1c5ffec42d592c1d28b3cd9148ae35916158d3"
dependencies = [
"allocator-api2",
"bumpalo",
"hashbrown 0.15.0",
"log",
"rustc-hash 2.1.0",
"smallvec",
]
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.11.1" version = "1.11.1"
@ -1390,7 +1089,7 @@ dependencies = [
"libc", "libc",
"spin", "spin",
"untrusted", "untrusted",
"windows-sys 0.52.0", "windows-sys",
] ]
[[package]] [[package]]
@ -1419,12 +1118,6 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustc-hash"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497"
[[package]] [[package]]
name = "rustix" name = "rustix"
version = "0.38.37" version = "0.38.37"
@ -1435,7 +1128,7 @@ dependencies = [
"errno", "errno",
"libc", "libc",
"linux-raw-sys", "linux-raw-sys",
"windows-sys 0.52.0", "windows-sys",
] ]
[[package]] [[package]]
@ -1499,18 +1192,18 @@ checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.217" version = "1.0.210"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.217" version = "1.0.210"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1579,7 +1272,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c"
dependencies = [ dependencies = [
"libc", "libc",
"windows-sys 0.52.0", "windows-sys",
] ]
[[package]] [[package]]
@ -1594,12 +1287,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]] [[package]]
name = "subtle" name = "subtle"
version = "2.6.1" version = "2.6.1"
@ -1608,9 +1295,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.87" version = "2.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1629,12 +1316,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394"
[[package]]
name = "target-lexicon"
version = "0.12.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
[[package]] [[package]]
name = "time" name = "time"
version = "0.3.36" version = "0.3.36"
@ -1667,7 +1348,7 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
"socket2", "socket2",
"tokio-macros", "tokio-macros",
"windows-sys 0.52.0", "windows-sys",
] ]
[[package]] [[package]]
@ -1798,12 +1479,6 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
[[package]]
name = "utf8parse"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]] [[package]]
name = "vcpkg" name = "vcpkg"
version = "0.2.15" version = "0.2.15"
@ -1919,15 +1594,6 @@ dependencies = [
"windows-targets", "windows-targets",
] ]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets",
]
[[package]] [[package]]
name = "windows-targets" name = "windows-targets"
version = "0.52.6" version = "0.52.6"

View file

@ -12,8 +12,6 @@ members = [
"depell/wasm-fmt", "depell/wasm-fmt",
"depell/wasm-hbc", "depell/wasm-hbc",
"depell/wasm-rt", "depell/wasm-rt",
"cranelift-backend",
"c",
] ]
[workspace.dependencies] [workspace.dependencies]

View file

@ -5,6 +5,6 @@ edition = "2018"
[features] [features]
default = ["disasm"] default = ["disasm"]
disasm = ["alloc"] std = []
alloc = [] disasm = ["std"]

View file

@ -98,27 +98,6 @@ fn gen_instrs(generated: &mut String) -> Result<(), Box<dyn std::error::Error>>
writeln!(generated, " {name} = {id},")?; writeln!(generated, " {name} = {id},")?;
} }
writeln!(generated, "}}")?; writeln!(generated, "}}")?;
writeln!(generated, "impl {instr} {{")?;
writeln!(generated, " pub fn size(self) -> usize {{")?;
writeln!(generated, " match self {{")?;
let mut instrs = instructions().collect::<Vec<_>>();
instrs.sort_unstable_by_key(|&[.., ty, _]| iter_args(ty).map(arg_to_width).sum::<usize>());
for group in instrs.chunk_by(|[.., a, _], [.., b, _]| {
iter_args(a).map(arg_to_width).sum::<usize>()
== iter_args(b).map(arg_to_width).sum::<usize>()
}) {
let ty = group[0][2];
for &[_, name, ..] in group {
writeln!(generated, " | {instr}::{name}")?;
}
generated.pop();
let size = iter_args(ty).map(arg_to_width).sum::<usize>() + 1;
writeln!(generated, " => {size},")?;
}
writeln!(generated, " }}")?;
writeln!(generated, " }}")?;
writeln!(generated, "}}")?;
} }
'_arg_kind: { '_arg_kind: {

View file

@ -254,7 +254,8 @@ pub fn disasm<'a>(
|| global_offset > off + len || global_offset > off + len
|| prev || prev
.get(global_offset as usize) .get(global_offset as usize)
.is_none_or(|&b| instr_from_byte(b).is_err()); .map_or(true, |&b| instr_from_byte(b).is_err())
|| prev[global_offset as usize] == 0;
has_oob |= local_has_oob; has_oob |= local_has_oob;
let label = labels.get(&global_offset).unwrap(); let label = labels.get(&global_offset).unwrap();
if local_has_oob { if local_has_oob {

View file

@ -1,11 +0,0 @@
[package]
name = "hbc"
version = "0.1.0"
edition = "2024"
[dependencies]
clap = { version = "4.5.23", features = ["derive", "env"] }
cranelift-backend = { version = "0.1.0", path = "../cranelift-backend" }
hblang = { workspace = true, features = ["std"] }
log = "0.4.22"
target-lexicon = { version = "0.12", features = ["std"] }

View file

@ -1,95 +0,0 @@
use {
clap::Parser,
std::{io, str::FromStr},
};
#[derive(Parser)]
struct Args {
/// format depends on the backend used
/// - cranelift-backend expects `<key>=<value>,...` pass `help=me` to see options
#[clap(long, env, default_value = "")]
backend_flags: String,
#[clap(long, short, env, default_value_t = target_lexicon::HOST)]
target: target_lexicon::Triple,
#[clap(long, env, value_parser = ["ableos"])]
path_resolver: Option<String>,
/// format the source code reachable form the root file
#[clap(long, env, default_value_t = false, conflicts_with_all = &["fmt_stdout", "dump_asm"])]
fmt: bool,
/// format the root file only and output the formatted file into stdout
#[clap(long, env, default_value_t = false, conflicts_with_all = &["fmt", "dump_asm"])]
fmt_stdout: bool,
#[clap(long, env, default_value_t = false, conflicts_with_all = &["fmt", "fmt_stdout"])]
dump_asm: bool,
/// extra threads to be used during compilation (currently only parser is parallelized)
#[clap(long, env, default_value_t = 0)]
extra_threads: usize,
/// path to the root file
file: String,
}
fn main() {
use std::io::Write;
fn run(out: &mut Vec<u8>, warnings: &mut String) -> std::io::Result<()> {
let Args {
backend_flags,
target,
path_resolver,
fmt,
fmt_stdout,
dump_asm,
extra_threads,
file,
} = Args::parse();
let resolvers = &[("ableos", hblang::ABLEOS_PATH_RESOLVER)];
let mut native = None;
let backend = if target
== target_lexicon::Triple::from_str(hblang::backend::hbvm::TARGET_TRIPLE).unwrap()
{
None
} else {
Some(
native.insert(
cranelift_backend::Backend::new(target, &backend_flags)
.map_err(io::Error::other)?,
) as &mut dyn hblang::backend::Backend,
)
};
let opts = hblang::Options {
fmt,
fmt_stdout,
dump_asm,
extra_threads,
resolver: resolvers
.iter()
.copied()
.find(|&(name, _)| Some(name) == path_resolver.as_deref())
.map(|(_, v)| v),
backend,
};
hblang::run_compiler(&file, opts, out, warnings)
}
log::set_logger(&hblang::fs::Logger).unwrap();
log::set_max_level(log::LevelFilter::Error);
let mut out = Vec::new();
let mut warnings = String::new();
match run(&mut out, &mut warnings) {
Ok(_) => {
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
std::io::stdout().write_all(&out).unwrap()
}
Err(e) => {
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
std::io::stderr().write_all(&out).unwrap();
std::eprint!("{e}");
std::process::exit(1);
}
}
}

View file

@ -1,12 +0,0 @@
[package]
name = "cranelift-backend"
version = "0.1.0"
edition = "2024"
[dependencies]
cranelift-codegen = "0.115.0"
cranelift-frontend = "0.115.0"
cranelift-module = "0.115.0"
cranelift-object = "0.115.0"
hblang.workspace = true
target-lexicon = "0.12"

View file

@ -1,994 +0,0 @@
#![feature(if_let_guard)]
#![feature(slice_take)]
use {
core::panic,
cranelift_codegen::{
self as cc, CodegenError, Final, FinalizedMachReloc, MachBufferFinalized,
ir::{self as cir, InstBuilder, MemFlags, TrapCode, UserExternalName, condcodes},
isa::{LookupError, TargetIsa},
settings::{Configurable, SetError},
},
cranelift_frontend::{self as cf, FunctionBuilder},
cranelift_module::{self as cm, Module, ModuleError},
hblang::{
lexer::TokenKind,
nodes::{self as hbnodes},
ty as hbty,
utils::{self as hbutils, Ent, EntVec},
},
std::{
collections::HashSet,
fmt::{Display, Write},
ops::Range,
},
};
mod x86_64;
pub struct Backend {
ctx: cc::Context,
dt_ctx: cm::DataDescription,
fb_ctx: cf::FunctionBuilderContext,
module: Option<cranelift_object::ObjectModule>,
ctrl_plane: cc::control::ControlPlane,
funcs: Functions,
globals: EntVec<hbty::Global, Global>,
asm: Assembler,
}
impl Backend {
pub fn new(triple: target_lexicon::Triple, flags: &str) -> Result<Self, BackendCreationError> {
Ok(Self {
ctx: cc::Context::new(),
dt_ctx: cm::DataDescription::new(),
fb_ctx: cf::FunctionBuilderContext::default(),
ctrl_plane: cc::control::ControlPlane::default(),
module: cranelift_object::ObjectModule::new(cranelift_object::ObjectBuilder::new(
cc::isa::lookup(triple)?.finish(cc::settings::Flags::new({
let mut bl = cc::settings::builder();
for (k, v) in flags.split(',').filter_map(|s| s.split_once('=')) {
bl.set(k, v).map_err(|err| BackendCreationError::InvalidFlag {
key: k.to_owned(),
value: v.to_owned(),
err,
})?;
}
bl
}))?,
"main",
cm::default_libcall_names(),
)?)
.into(),
funcs: Default::default(),
globals: Default::default(),
asm: Default::default(),
})
}
}
impl hblang::backend::Backend for Backend {
fn triple(&self) -> String {
self.module.as_ref().unwrap().isa().triple().to_string()
}
fn assemble_reachable(
&mut self,
from: hbty::Func,
types: &hbty::Types,
files: &hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
to: &mut Vec<u8>,
) -> hblang::backend::AssemblySpec {
debug_assert!(self.asm.frontier.is_empty());
debug_assert!(self.asm.funcs.is_empty());
debug_assert!(self.asm.globals.is_empty());
let mut module = self.module.take().expect("backend can assemble only once");
fn clif_name_to_ty(name: UserExternalName) -> hbty::Id {
match name.namespace {
0 => hbty::Kind::Func(hbty::Func::new(name.index as _)),
1 => hbty::Kind::Global(hbty::Global::new(name.index as _)),
_ => unreachable!(),
}
.compress()
}
self.globals.shadow(types.ins.globals.len());
let mut seen_names = HashSet::new();
self.asm.frontier.push(from.into());
while let Some(itm) = self.asm.frontier.pop() {
match itm.expand() {
hbty::Kind::Func(func) => {
let fd = &types.ins.funcs[func];
if fd.is_import {
self.funcs.headers.shadow(func.index() + 1);
}
let fuc = &mut self.funcs.headers[func];
let file = &files[fd.file];
if fuc.module_id.is_some() {
continue;
}
self.asm.frontier.extend(
fuc.external_names.clone().map(|r| {
clif_name_to_ty(self.funcs.external_names[r as usize].clone())
}),
);
self.asm.name.clear();
if func == from {
self.asm.name.push_str("main");
} else if fd.is_import {
self.asm.name.push_str(file.ident_str(fd.name));
} else {
self.asm.name.push_str(hblang::strip_cwd(&file.path));
self.asm.name.push('.');
if fd.parent != hbty::Id::from(fd.file) {
write!(
self.asm.name,
"{}",
hbty::Display::new(types, files, fd.parent)
)
.unwrap();
}
self.asm.name.push_str(file.ident_str(fd.name));
if fd.is_generic {
let mut args = fd.sig.args.args();
self.asm.name.push('(');
while let Some(arg) = args.next(types) {
if let hbty::Arg::Type(ty) = arg {
write!(
self.asm.name,
"{},",
hbty::Display::new(types, files, ty)
)
.unwrap();
}
}
self.asm.name.pop().unwrap();
self.asm.name.push(')');
}
}
let linkage = if func == from {
cm::Linkage::Export
} else if fd.is_import {
cm::Linkage::Import
} else {
cm::Linkage::Local
};
build_signature(
module.isa().default_call_conv(),
fd.sig,
types,
&mut self.ctx.func.signature,
&mut vec![],
);
debug_assert!(seen_names.insert(self.asm.name.clone()), "{}", self.asm.name);
fuc.module_id = Some(
module
.declare_function(&self.asm.name, linkage, &self.ctx.func.signature)
.unwrap(),
);
if !fd.is_import {
self.asm.funcs.push(func);
}
}
hbty::Kind::Global(glob) => {
if self.globals[glob].module_id.is_some() {
continue;
}
self.asm.globals.push(glob);
self.asm.name.clear();
let mutable = if types.ins.globals[glob].file == Default::default() {
writeln!(self.asm.name, "anon{}", glob.index()).unwrap();
false
} else {
let file = &files[types.ins.globals[glob].file];
self.asm.name.push_str(hblang::strip_cwd(&file.path));
self.asm.name.push('.');
self.asm.name.push_str(file.ident_str(types.ins.globals[glob].name));
true
};
self.globals[glob].module_id = Some(
module
.declare_data(&self.asm.name, cm::Linkage::Local, mutable, false)
.unwrap(),
);
}
_ => unreachable!(),
}
}
for &func in &self.asm.funcs {
let fuc = &self.funcs.headers[func];
assert!(!types.ins.funcs[func].is_import);
debug_assert!(!fuc.code.is_empty());
let names = &mut self.funcs.external_names
[fuc.external_names.start as usize..fuc.external_names.end as usize];
self.ctx.func.clear();
names.iter().for_each(|nm| {
let mut nm = nm.clone();
if nm.namespace == 0 {
nm.index = self.funcs.headers[hbty::Func::new(nm.index as _)]
.module_id
.unwrap()
.as_u32();
} else {
nm.index =
self.globals[hbty::Global::new(nm.index as _)].module_id.unwrap().as_u32();
}
let prev_len = self.ctx.func.params.user_named_funcs().len();
self.ctx.func.params.ensure_user_func_name(nm.clone());
debug_assert_ne!(self.ctx.func.params.user_named_funcs().len(), prev_len, "{}", nm);
});
module
.define_function_bytes(
fuc.module_id.unwrap(),
&self.ctx.func,
fuc.alignment as _,
&self.funcs.code[fuc.code.start as usize..fuc.code.end as usize],
&self.funcs.relocs[fuc.relocs.start as usize..fuc.relocs.end as usize],
)
.unwrap();
}
for global in self.asm.globals.drain(..) {
let glob = &self.globals[global];
self.dt_ctx.clear();
self.dt_ctx.define(types.ins.globals[global].data.clone().into());
module.define_data(glob.module_id.unwrap(), &self.dt_ctx).unwrap();
}
module.finish().object.write_stream(to).unwrap();
hblang::backend::AssemblySpec { code_length: 0, data_length: 0, entry: 0 }
}
fn disasm<'a>(
&'a self,
_sluce: &[u8],
_eca_handler: &mut dyn FnMut(&mut &[u8]),
_types: &'a hbty::Types,
_files: &'a hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
_output: &mut String,
) -> Result<(), std::boxed::Box<dyn core::error::Error + Send + Sync + 'a>> {
unimplemented!()
}
fn emit_body(
&mut self,
id: hbty::Func,
nodes: &hbnodes::Nodes,
tys: &hbty::Types,
files: &hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
) {
let isa = self.module.as_ref().unwrap().isa();
let mut lens = vec![];
let stack_ret = build_signature(
isa.default_call_conv(),
tys.ins.funcs[id].sig,
tys,
&mut self.ctx.func.signature,
&mut lens,
);
FuncBuilder {
bl: FunctionBuilder::new(&mut self.ctx.func, &mut self.fb_ctx),
isa,
nodes,
tys,
files,
values: &mut vec![None; nodes.len()],
arg_lens: &lens,
stack_ret,
}
.build(tys.ins.funcs[id].sig);
self.ctx.func.name =
cir::UserFuncName::User(cir::UserExternalName { namespace: 0, index: id.index() as _ });
//std::eprintln!("{}", self.ctx.func.display());
self.ctx.compile(isa, &mut self.ctrl_plane).unwrap();
let code = self.ctx.compiled_code().unwrap();
self.funcs.push(id, &self.ctx.func, &code.buffer);
self.ctx.clear();
}
}
fn build_signature(
call_conv: cc::isa::CallConv,
sig: hbty::Sig,
types: &hbty::Types,
signature: &mut cir::Signature,
arg_meta: &mut Vec<AbiMeta>,
) -> bool {
signature.clear(call_conv);
match call_conv {
cc::isa::CallConv::SystemV => {
x86_64::build_systemv_signature(sig, types, signature, arg_meta)
}
_ => todo!(),
}
}
#[derive(Clone, Copy)]
struct AbiMeta {
trough_mem: bool,
arg_count: usize,
}
struct FuncBuilder<'a, 'b> {
bl: cf::FunctionBuilder<'b>,
isa: &'a dyn TargetIsa,
nodes: &'a hbnodes::Nodes,
tys: &'a hbty::Types,
files: &'a hbutils::EntSlice<hbty::Module, hblang::parser::Ast>,
values: &'b mut [Option<Result<cir::Value, cir::Block>>],
arg_lens: &'a [AbiMeta],
stack_ret: bool,
}
impl FuncBuilder<'_, '_> {
pub fn build(mut self, sig: hbty::Sig) {
let entry = self.bl.create_block();
self.bl.append_block_params_for_function_params(entry);
self.bl.switch_to_block(entry);
let mut arg_vals = &self.bl.block_params(entry).to_vec()[..];
if self.stack_ret {
let ret_ptr = *arg_vals.take_first().unwrap();
self.values[hbnodes::MEM as usize] = Some(Ok(ret_ptr));
}
let Self { nodes, tys, .. } = self;
let mut parama_len = self.arg_lens[1..].iter();
let mut typs = sig.args.args();
let mut args = nodes[hbnodes::VOID].outputs[hbnodes::ARG_START..].iter();
while let Some(aty) = typs.next(tys) {
let hbty::Arg::Value(ty) = aty else { continue };
let abi_meta = parama_len.next().unwrap();
let &arg = args.next().unwrap();
if !abi_meta.trough_mem && ty.is_aggregate(tys) {
let slot = self.bl.create_sized_stack_slot(cir::StackSlotData {
kind: cir::StackSlotKind::ExplicitSlot,
size: self.tys.size_of(ty),
align_shift: self.tys.align_of(ty).ilog2() as _,
});
let loc = arg_vals.take(..abi_meta.arg_count).unwrap();
assert!(loc.len() <= 2, "NEED handling");
let align =
loc.iter().map(|&p| self.bl.func.dfg.value_type(p).bytes()).max().unwrap();
let mut offset = 0i32;
for &v in loc {
self.bl.ins().stack_store(v, slot, offset);
offset += align as i32;
}
self.values[arg as usize] =
Some(Ok(self.bl.ins().stack_addr(cir::types::I64, slot, 0)))
} else {
let loc = arg_vals.take(..abi_meta.arg_count).unwrap();
debug_assert_eq!(loc.len(), 1);
self.values[arg as usize] = Some(Ok(loc[0]));
}
}
self.values[hbnodes::ENTRY as usize] = Some(Err(entry));
self.emit_node(hbnodes::VOID, hbnodes::VOID);
self.bl.finalize();
}
fn value_of(&self, nid: hbnodes::Nid) -> cir::Value {
self.values[nid as usize].unwrap_or_else(|| panic!("{:?}", self.nodes[nid])).unwrap()
}
fn block_of(&self, nid: hbnodes::Nid) -> cir::Block {
self.values[nid as usize].unwrap().unwrap_err()
}
fn close_block(&mut self, nid: hbnodes::Nid) {
if matches!(self.nodes[nid].kind, hbnodes::Kind::Loop) {
return;
}
self.bl.seal_block(self.block_of(nid));
}
fn emit_node(&mut self, nid: hbnodes::Nid, block: hbnodes::Nid) {
use hbnodes::*;
let mut args = vec![];
if matches!(self.nodes[nid].kind, Kind::Region | Kind::Loop) {
let side = 1 + self.values[nid as usize].is_some() as usize;
for &o in self.nodes[nid].outputs.iter() {
if self.nodes[o].is_data_phi() {
args.push(self.value_of(self.nodes[o].inputs[side]));
}
}
match (self.nodes[nid].kind, self.values[nid as usize]) {
(Kind::Loop, Some(blck)) => {
self.bl.ins().jump(blck.unwrap_err(), &args);
self.bl.seal_block(blck.unwrap_err());
self.close_block(block);
return;
}
(Kind::Region, None) => {
let next = self.bl.create_block();
for &o in self.nodes[nid].outputs.iter() {
if self.nodes[o].is_data_phi() {
self.values[o as usize] = Some(Ok(self
.bl
.append_block_param(next, self.nodes[o].ty.to_clif(self.tys))));
}
}
self.bl.ins().jump(next, &args);
self.close_block(block);
self.values[nid as usize] = Some(Err(next));
return;
}
_ => {}
}
}
let node = &self.nodes[nid];
self.values[nid as usize] = Some(match node.kind {
Kind::Start => {
debug_assert_eq!(self.nodes[node.outputs[0]].kind, Kind::Entry);
self.emit_node(node.outputs[0], block);
return;
}
Kind::If => {
let &[_, cnd] = node.inputs.as_slice() else { unreachable!() };
let &[then, else_] = node.outputs.as_slice() else { unreachable!() };
let then_bl = self.bl.create_block();
let else_bl = self.bl.create_block();
let c = self.value_of(cnd);
self.bl.ins().brif(c, then_bl, &[], else_bl, &[]);
self.values[then as usize] = Some(Err(then_bl));
self.values[else_ as usize] = Some(Err(else_bl));
self.close_block(block);
self.bl.switch_to_block(then_bl);
self.emit_node(then, then);
self.bl.switch_to_block(else_bl);
self.emit_node(else_, else_);
Err(self.block_of(block))
}
Kind::Loop => {
let next = self.bl.create_block();
for &o in self.nodes[nid].outputs.iter() {
if self.nodes[o].is_data_phi() {
self.values[o as usize] = Some(Ok(self
.bl
.append_block_param(next, self.nodes[o].ty.to_clif(self.tys))));
}
}
self.values[nid as usize] = Some(Err(next));
self.bl.ins().jump(self.values[nid as usize].unwrap().unwrap_err(), &args);
self.close_block(block);
self.bl.switch_to_block(self.values[nid as usize].unwrap().unwrap_err());
for &o in node.outputs.iter().rev() {
self.emit_node(o, nid);
}
Err(self.block_of(block))
}
Kind::Region => {
self.bl.ins().jump(self.values[nid as usize].unwrap().unwrap_err(), &args);
self.close_block(block);
self.bl.switch_to_block(self.values[nid as usize].unwrap().unwrap_err());
for &o in node.outputs.iter().rev() {
self.emit_node(o, nid);
}
return;
}
Kind::Die => {
self.bl.ins().trap(TrapCode::unwrap_user(1));
self.close_block(block);
self.emit_node(node.outputs[0], block);
Err(self.block_of(block))
}
Kind::Return { .. } => {
let mut ir_args = vec![];
if node.inputs[1] == hbnodes::VOID {
} else {
let abi_meta = self.arg_lens[0];
let arg = node.inputs[1];
if !abi_meta.trough_mem && self.nodes[node.inputs[1]].ty.is_aggregate(self.tys)
{
let loc = self.bl.func.signature.returns.clone();
assert!(loc.len() <= 2, "NEED handling");
let align = loc.iter().map(|&p| p.value_type.bytes()).max().unwrap();
let mut offset = 0i32;
let src = self.value_of(self.nodes[arg].inputs[1]);
debug_assert!(self.nodes[arg].kind == Kind::Load);
for &v in &loc {
ir_args.push(self.bl.ins().load(
v.value_type,
MemFlags::new(),
src,
offset,
));
offset += align as i32;
}
} else if self.stack_ret {
let src = self.value_of(self.nodes[arg].inputs[1]);
let dest = self.value_of(MEM);
self.bl.emit_small_memory_copy(
self.isa.frontend_config(),
dest,
src,
self.tys.size_of(self.nodes[arg].ty) as _,
self.tys.align_of(self.nodes[arg].ty) as _,
self.tys.align_of(self.nodes[arg].ty) as _,
false,
MemFlags::new(),
);
} else {
ir_args.push(self.value_of(arg));
}
}
self.bl.ins().return_(&ir_args);
self.close_block(block);
self.emit_node(node.outputs[0], block);
Err(self.block_of(block))
}
Kind::Entry => {
for &o in node.outputs.iter().rev() {
self.emit_node(o, nid);
}
return;
}
Kind::Then | Kind::Else => {
for &o in node.outputs.iter().rev() {
self.emit_node(o, block);
}
Err(self.block_of(block))
}
Kind::Call { func, unreachable, args } => {
assert_ne!(func, hbty::Func::ECA, "@eca is not supported");
if unreachable {
todo!()
} else {
let mut arg_lens = vec![];
let mut signature = cir::Signature::new(self.isa.default_call_conv());
let stack_ret = build_signature(
self.isa.default_call_conv(),
self.tys.ins.funcs[func].sig,
self.tys,
&mut signature,
&mut arg_lens,
);
let func_ref =
'b: {
let user_name_ref = self.bl.func.declare_imported_user_function(
cir::UserExternalName { namespace: 0, index: func.index() as _ },
);
if let Some(id) = self.bl.func.dfg.ext_funcs.keys().find(|&k| {
self.bl.func.dfg.ext_funcs[k].name
== cir::ExternalName::user(user_name_ref)
}) {
break 'b id;
}
let signature = self.bl.func.import_signature(signature.clone());
self.bl.func.import_function(cir::ExtFuncData {
name: cir::ExternalName::user(user_name_ref),
signature,
// somehow, this works
colocated: true, // !self.tys.ins.funcs[func].is_import,
})
};
let mut ir_args = vec![];
if stack_ret {
ir_args.push(self.value_of(*node.inputs.last().unwrap()));
}
let mut params = signature.params.as_slice();
let mut parama_len = arg_lens[1..].iter();
let mut typs = args.args();
let mut args = node.inputs[1..].iter();
while let Some(aty) = typs.next(self.tys) {
let hbty::Arg::Value(ty) = aty else { continue };
let abi_meta = parama_len.next().unwrap();
if abi_meta.arg_count == 0 {
continue;
}
let &arg = args.next().unwrap();
if !abi_meta.trough_mem && ty.is_aggregate(self.tys) {
let loc = params.take(..abi_meta.arg_count).unwrap();
assert!(loc.len() <= 2, "NEED handling");
let align = loc.iter().map(|&p| p.value_type.bytes()).max().unwrap();
let mut offset = 0i32;
let src = self.value_of(self.nodes[arg].inputs[1]);
debug_assert!(self.nodes[arg].kind == Kind::Load);
for &v in loc {
ir_args.push(self.bl.ins().load(
v.value_type,
MemFlags::new(),
src,
offset,
));
offset += align as i32;
}
} else {
let loc = params.take(..abi_meta.arg_count).unwrap();
debug_assert_eq!(loc.len(), 1);
ir_args.push(self.value_of(arg));
}
}
let inst = self.bl.ins().call(func_ref, &ir_args);
match *self.bl.inst_results(inst) {
[] => {}
[scala] => self.values[nid as usize] = Some(Ok(scala)),
[a, b] => {
assert!(!stack_ret);
let slot = self.value_of(*node.inputs.last().unwrap());
let loc = [a, b];
assert!(loc.len() <= 2, "NEED handling");
let align = loc
.iter()
.map(|&p| self.bl.func.dfg.value_type(p).bytes())
.max()
.unwrap();
let mut offset = 0i32;
for v in loc {
self.bl.ins().store(MemFlags::new(), v, slot, offset);
offset += align as i32;
}
}
_ => unimplemented!(),
}
for &o in node.outputs.iter().rev() {
if self.nodes[o].inputs[0] == nid
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
&& self.nodes[o].inputs[1] == nid)
{
self.emit_node(o, block);
}
}
return;
}
}
Kind::CInt { value } if self.nodes[nid].ty.is_float() => {
Ok(match self.tys.size_of(self.nodes[nid].ty) {
4 => self.bl.ins().f32const(f64::from_bits(value as _) as f32),
8 => self.bl.ins().f64const(f64::from_bits(value as _)),
_ => unimplemented!(),
})
}
Kind::CInt { value } => Ok(self.bl.ins().iconst(
cir::Type::int(self.tys.size_of(node.ty) as u16 * 8).unwrap_or_else(|| {
panic!("{}", hbty::Display::new(self.tys, self.files, node.ty),)
}),
value,
)),
Kind::BinOp { op } => {
let &[_, lhs, rhs] = node.inputs.as_slice() else { unreachable!() };
let [lh, rh] = [self.value_of(lhs), self.value_of(rhs)];
let is_int_op = node.ty.is_integer()
|| node.ty.is_pointer()
|| (node.ty == hbty::Id::BOOL
&& (self.nodes[lhs].ty.is_integer()
|| node.ty.is_pointer()
|| self.nodes[lhs].ty == hbty::Id::BOOL));
let is_float_op = node.ty.is_float()
|| (node.ty == hbty::Id::BOOL && self.nodes[lhs].ty.is_float());
Ok(if is_int_op {
let signed = node.ty.is_signed();
match op {
TokenKind::Add => self.bl.ins().iadd(lh, rh),
TokenKind::Sub => self.bl.ins().isub(lh, rh),
TokenKind::Mul => self.bl.ins().imul(lh, rh),
TokenKind::Shl => self.bl.ins().ishl(lh, rh),
TokenKind::Xor => self.bl.ins().bxor(lh, rh),
TokenKind::Band => self.bl.ins().band(lh, rh),
TokenKind::Bor => self.bl.ins().bor(lh, rh),
TokenKind::Div if signed => self.bl.ins().sdiv(lh, rh),
TokenKind::Mod if signed => self.bl.ins().srem(lh, rh),
TokenKind::Shr if signed => self.bl.ins().sshr(lh, rh),
TokenKind::Div => self.bl.ins().udiv(lh, rh),
TokenKind::Mod => self.bl.ins().urem(lh, rh),
TokenKind::Shr => self.bl.ins().ushr(lh, rh),
TokenKind::Lt
| TokenKind::Gt
| TokenKind::Le
| TokenKind::Ge
| TokenKind::Eq
| TokenKind::Ne => self.bl.ins().icmp(op.to_int_cc(signed), lh, rh),
op => todo!("{op}"),
}
} else if is_float_op {
match op {
TokenKind::Add => self.bl.ins().fadd(lh, rh),
TokenKind::Sub => self.bl.ins().fsub(lh, rh),
TokenKind::Mul => self.bl.ins().fmul(lh, rh),
TokenKind::Div => self.bl.ins().fdiv(lh, rh),
TokenKind::Lt
| TokenKind::Gt
| TokenKind::Le
| TokenKind::Ge
| TokenKind::Eq
| TokenKind::Ne => self.bl.ins().fcmp(op.to_float_cc(), lh, rh),
op => todo!("{op}"),
}
} else {
todo!("{}", hbty::Display::new(self.tys, self.files, node.ty))
})
}
Kind::RetVal => Ok(self.value_of(node.inputs[0])),
Kind::UnOp { op } => {
let oper = self.value_of(node.inputs[1]);
let dst = node.ty;
let src = self
.tys
.inner_of(self.nodes[node.inputs[1]].ty)
.unwrap_or(self.nodes[node.inputs[1]].ty);
let dty = dst.to_clif(self.tys);
Ok(match op {
TokenKind::Sub => self.bl.ins().ineg(oper),
TokenKind::Not => self.bl.ins().bnot(oper),
TokenKind::Float if dst.is_float() && src.is_unsigned() => {
self.bl.ins().fcvt_from_uint(dty, oper)
}
TokenKind::Float if dst.is_float() && src.is_signed() => {
self.bl.ins().fcvt_from_sint(dty, oper)
}
TokenKind::Number if src.is_float() && dst.is_unsigned() => {
self.bl.ins().fcvt_to_uint(dty, oper)
}
TokenKind::Number
if src.is_signed() && (dst.is_integer() || dst.is_pointer()) =>
{
self.bl.ins().sextend(dty, oper)
}
TokenKind::Number
if (src.is_unsigned() || src == hbty::Id::BOOL)
&& (dst.is_integer() || dst.is_pointer()) =>
{
self.bl.ins().uextend(dty, oper)
}
TokenKind::Float if dst == hbty::Id::F64 && src.is_float() => {
self.bl.ins().fpromote(dty, oper)
}
TokenKind::Float if dst == hbty::Id::F32 && src.is_float() => {
self.bl.ins().fdemote(dty, oper)
}
_ => todo!(),
})
}
Kind::Stck => {
let slot = self.bl.create_sized_stack_slot(cir::StackSlotData {
kind: cir::StackSlotKind::ExplicitSlot,
size: self.tys.size_of(node.ty),
align_shift: self.tys.align_of(node.ty).ilog2() as _,
});
Ok(self.bl.ins().stack_addr(cir::types::I64, slot, 0))
}
Kind::Global { global } => {
let glob_ref = {
// already deduplicated by the SoN
let colocated = true;
let user_name_ref =
self.bl.func.declare_imported_user_function(cir::UserExternalName {
namespace: 1,
index: global.index() as u32,
});
self.bl.func.create_global_value(cir::GlobalValueData::Symbol {
name: cir::ExternalName::user(user_name_ref),
offset: cir::immediates::Imm64::new(0),
colocated,
tls: false,
})
};
Ok(self.bl.ins().global_value(cir::types::I64, glob_ref))
}
Kind::Load if node.ty.is_aggregate(self.tys) => return,
Kind::Load => {
let ptr = self.value_of(node.inputs[1]);
Ok(self.bl.ins().load(node.ty.to_clif(self.tys), MemFlags::new(), ptr, 0))
}
Kind::Stre if node.ty.is_aggregate(self.tys) => {
let src = self.value_of(self.nodes[node.inputs[1]].inputs[1]);
let dest = self.value_of(node.inputs[2]);
self.bl.emit_small_memory_copy(
self.isa.frontend_config(),
dest,
src,
self.tys.size_of(node.ty) as _,
self.tys.align_of(node.ty) as _,
self.tys.align_of(node.ty) as _,
false,
MemFlags::new(),
);
return;
}
Kind::Stre => {
let value = self.value_of(node.inputs[1]);
let ptr = self.value_of(node.inputs[2]);
self.bl.ins().store(MemFlags::new(), value, ptr, 0);
return;
}
Kind::End | Kind::Phi | Kind::Arg | Kind::Mem | Kind::Loops | Kind::Join => return,
Kind::Assert { .. } => unreachable!(),
});
}
}
trait ToCondcodes {
fn to_int_cc(self, signed: bool) -> condcodes::IntCC;
fn to_float_cc(self) -> condcodes::FloatCC;
}
impl ToCondcodes for TokenKind {
fn to_int_cc(self, signed: bool) -> condcodes::IntCC {
use condcodes::IntCC as ICC;
match self {
Self::Lt if signed => ICC::SignedLessThan,
Self::Gt if signed => ICC::SignedGreaterThan,
Self::Le if signed => ICC::SignedLessThanOrEqual,
Self::Ge if signed => ICC::SignedGreaterThanOrEqual,
Self::Lt => ICC::UnsignedLessThan,
Self::Gt => ICC::UnsignedGreaterThan,
Self::Le => ICC::UnsignedLessThanOrEqual,
Self::Ge => ICC::UnsignedGreaterThanOrEqual,
Self::Eq => ICC::Equal,
Self::Ne => ICC::NotEqual,
_ => unreachable!(),
}
}
fn to_float_cc(self) -> condcodes::FloatCC {
use condcodes::FloatCC as FCC;
match self {
Self::Lt => FCC::LessThan,
Self::Gt => FCC::GreaterThan,
Self::Le => FCC::LessThanOrEqual,
Self::Ge => FCC::GreaterThanOrEqual,
Self::Eq => FCC::Equal,
Self::Ne => FCC::NotEqual,
_ => unreachable!(),
}
}
}
trait ToClifTy {
fn to_clif(self, cx: &hbty::Types) -> cir::Type;
}
impl ToClifTy for hbty::Id {
fn to_clif(self, cx: &hbty::Types) -> cir::Type {
debug_assert!(!self.is_aggregate(cx));
if self.is_integer() | self.is_pointer() | self.is_optional() || self == hbty::Id::BOOL {
cir::Type::int(cx.size_of(self) as u16 * 8).unwrap()
} else if self == hbty::Id::F32 {
cir::types::F32
} else if self == hbty::Id::F64 {
cir::types::F64
} else {
unimplemented!("{:?}", self)
}
}
}
#[derive(Default)]
struct Global {
module_id: Option<cm::DataId>,
}
#[derive(Default)]
struct FuncHeaders {
module_id: Option<cm::FuncId>,
alignment: u32,
code: Range<u32>,
relocs: Range<u32>,
external_names: Range<u32>,
}
#[derive(Default)]
struct Functions {
headers: EntVec<hbty::Func, FuncHeaders>,
code: Vec<u8>,
relocs: Vec<FinalizedMachReloc>,
external_names: Vec<UserExternalName>,
}
impl Functions {
fn push(&mut self, id: hbty::Func, func: &cir::Function, code: &MachBufferFinalized<Final>) {
self.headers.shadow(id.index() + 1);
self.headers[id] = FuncHeaders {
module_id: None,
alignment: code.alignment,
code: self.code.len() as u32..self.code.len() as u32 + code.data().len() as u32,
relocs: self.relocs.len() as u32..self.relocs.len() as u32 + code.relocs().len() as u32,
external_names: self.external_names.len() as u32
..self.external_names.len() as u32 + func.params.user_named_funcs().len() as u32,
};
self.code.extend(code.data());
self.relocs.extend(code.relocs().iter().cloned());
self.external_names.extend(func.params.user_named_funcs().values().cloned());
}
}
#[derive(Default)]
struct Assembler {
name: String,
frontier: Vec<hbty::Id>,
globals: Vec<hbty::Global>,
funcs: Vec<hbty::Func>,
}
#[derive(Debug)]
pub enum BackendCreationError {
UnsupportedTriplet(LookupError),
InvalidFlags(CodegenError),
UnsupportedModuleConfig(ModuleError),
InvalidFlag { key: String, value: String, err: SetError },
}
impl Display for BackendCreationError {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
BackendCreationError::UnsupportedTriplet(err) => {
write!(f, "Unsupported triplet: {}", err)
}
BackendCreationError::InvalidFlags(err) => {
write!(f, "Invalid flags: {}", err)
}
BackendCreationError::UnsupportedModuleConfig(err) => {
write!(f, "Unsupported module configuration: {}", err)
}
BackendCreationError::InvalidFlag { key, value, err } => {
write!(
f,
"Problem setting a '{key}' to '{value}': {err}\navailable flags: {}",
cc::settings::Flags::new(cc::settings::builder())
)
}
}
}
}
impl core::error::Error for BackendCreationError {}
impl From<LookupError> for BackendCreationError {
fn from(value: LookupError) -> Self {
Self::UnsupportedTriplet(value)
}
}
impl From<CodegenError> for BackendCreationError {
fn from(value: CodegenError) -> Self {
Self::InvalidFlags(value)
}
}
impl From<ModuleError> for BackendCreationError {
fn from(value: ModuleError) -> Self {
Self::UnsupportedModuleConfig(value)
}
}

View file

@ -1,310 +0,0 @@
// The classification code for the x86_64 ABI is taken from the clay language
// https://github.com/jckarter/clay/blob/db0bd2702ab0b6e48965cd85f8859bbd5f60e48e/compiler/externals.cpp
use {crate::AbiMeta, hblang::ty};
pub fn build_systemv_signature(
sig: hblang::ty::Sig,
types: &hblang::ty::Types,
signature: &mut cranelift_codegen::ir::Signature,
arg_lens: &mut Vec<AbiMeta>,
) -> bool {
let mut alloca = Alloca::new();
alloca.next(false, sig.ret, types, &mut signature.returns);
let stack_ret = signature.returns.len() == 1
&& signature.returns[0].purpose == cranelift_codegen::ir::ArgumentPurpose::StructReturn;
if stack_ret {
signature.params.append(&mut signature.returns);
arg_lens.push(AbiMeta { arg_count: signature.params.len(), trough_mem: true });
} else {
arg_lens.push(AbiMeta { arg_count: signature.returns.len(), trough_mem: false });
}
let mut args = sig.args.args();
while let Some(arg) = args.next_value(types) {
let prev = signature.params.len();
let trough_mem = alloca.next(true, arg, types, &mut signature.params);
arg_lens.push(AbiMeta { arg_count: signature.params.len() - prev, trough_mem });
}
stack_ret
}
/// Classification of "eightbyte" components.
// N.B., the order of the variants is from general to specific,
// such that `unify(a, b)` is the "smaller" of `a` and `b`.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
enum Class {
Int,
Sse,
SseUp,
}
#[derive(Clone, Copy, Debug)]
struct Memory;
// Currently supported vector size (AVX-512).
const LARGEST_VECTOR_SIZE: usize = 512;
const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64;
fn classify_arg(
cx: &hblang::ty::Types,
arg: hblang::ty::Id,
) -> Result<[Option<Class>; MAX_EIGHTBYTES], Memory> {
fn classify(
cx: &hblang::ty::Types,
layout: hblang::ty::Id,
cls: &mut [Option<Class>],
off: hblang::ty::Offset,
) -> Result<(), Memory> {
let size = cx.size_of(layout);
if off & (cx.align_of(layout) - 1) != 0 {
if size != 0 {
return Err(Memory);
}
return Ok(());
}
let mut c = match layout.expand() {
_ if size == 0 => return Ok(()),
_ if layout.is_integer() || layout.is_pointer() || layout == ty::Id::BOOL => Class::Int,
_ if layout.is_float() => Class::Sse,
hblang::ty::Kind::Struct(s) => {
for (f, foff) in hblang::ty::OffsetIter::new(s, cx).into_iter(cx) {
classify(cx, f.ty, cls, off + foff)?;
}
return Ok(());
}
hblang::ty::Kind::Tuple(tuple) => {
for (&ty, foff) in hblang::ty::OffsetIter::new(tuple, cx).into_iter(cx) {
classify(cx, ty, cls, off + foff)?;
}
return Ok(());
}
hblang::ty::Kind::Enum(_) => Class::Int,
hblang::ty::Kind::Union(union) => {
for f in cx.union_fields(union) {
classify(cx, f.ty, cls, off)?;
}
return Ok(());
}
hblang::ty::Kind::Slice(slice) if let Some(len) = cx.ins.slices[slice].len() => {
for i in 0..len as u32 {
classify(
cx,
cx.ins.slices[slice].elem,
cls,
off + i * cx.size_of(cx.ins.slices[slice].elem),
)?;
}
return Ok(());
}
hblang::ty::Kind::Slice(_) => {
classify(cx, hblang::ty::Id::UINT, cls, off)?;
classify(cx, hblang::ty::Id::UINT, cls, off + 8)?;
return Ok(());
}
hblang::ty::Kind::Opt(opt) => {
let base = cx.ins.opts[opt].base;
if cx.nieche_of(base).is_some() {
classify(cx, base, cls, off)?;
} else {
classify(cx, hblang::ty::Id::BOOL, cls, off)?;
classify(cx, base, cls, off + cx.align_of(base))?;
}
return Ok(());
}
ty => unimplemented!("{ty:?}"),
};
// Fill in `cls` for scalars (Int/Sse) and vectors (Sse).
let first = (off / 8) as usize;
let last = ((off + size - 1) / 8) as usize;
for cls in &mut cls[first..=last] {
*cls = Some(cls.map_or(c, |old| old.min(c)));
// Everything after the first Sse "eightbyte"
// component is the upper half of a register.
if c == Class::Sse {
c = Class::SseUp;
}
}
Ok(())
}
let size = cx.size_of(arg);
let n = ((size + 7) / 8) as usize;
if n > MAX_EIGHTBYTES {
return Err(Memory);
}
let mut cls = [None; MAX_EIGHTBYTES];
classify(cx, arg, &mut cls, 0)?;
if n > 2 {
if cls[0] != Some(Class::Sse) {
return Err(Memory);
}
if cls[1..n].iter().any(|&c| c != Some(Class::SseUp)) {
return Err(Memory);
}
} else {
let mut i = 0;
while i < n {
if cls[i] == Some(Class::SseUp) {
cls[i] = Some(Class::Sse);
} else if cls[i] == Some(Class::Sse) {
i += 1;
while i != n && cls[i] == Some(Class::SseUp) {
i += 1;
}
} else {
i += 1;
}
}
}
Ok(cls)
}
fn reg_component(
cls: &[Option<Class>],
i: &mut usize,
size: hblang::ty::Size,
) -> Option<cranelift_codegen::ir::Type> {
if *i >= cls.len() {
return None;
}
match cls[*i] {
None => None,
Some(Class::Int) => {
*i += 1;
Some(if size < 8 {
cranelift_codegen::ir::Type::int(size as u16 * 8).unwrap()
} else {
cranelift_codegen::ir::types::I64
})
}
Some(Class::Sse) => {
let vec_len =
1 + cls[*i + 1..].iter().take_while(|&&c| c == Some(Class::SseUp)).count();
*i += vec_len;
Some(if vec_len == 1 {
match size {
4 => cranelift_codegen::ir::types::F32,
_ => cranelift_codegen::ir::types::F64,
}
} else {
cranelift_codegen::ir::types::I64.by(vec_len as _).unwrap()
})
}
Some(c) => unreachable!("reg_component: unhandled class {:?}", c),
}
}
fn cast_target(
cls: &[Option<Class>],
size: hblang::ty::Size,
dest: &mut Vec<cranelift_codegen::ir::AbiParam>,
) {
let mut i = 0;
let lo = reg_component(cls, &mut i, size).unwrap();
let offset = 8 * (i as u32);
dest.push(cranelift_codegen::ir::AbiParam::new(lo));
if size > offset {
if let Some(hi) = reg_component(cls, &mut i, size - offset) {
dest.push(cranelift_codegen::ir::AbiParam::new(hi));
}
}
assert_eq!(reg_component(cls, &mut i, 0), None);
}
const MAX_INT_REGS: usize = 6; // RDI, RSI, RDX, RCX, R8, R9
const MAX_SSE_REGS: usize = 8; // XMM0-7
pub struct Alloca {
int_regs: usize,
sse_regs: usize,
}
impl Alloca {
pub fn new() -> Self {
Self { int_regs: MAX_INT_REGS, sse_regs: MAX_SSE_REGS }
}
pub fn next(
&mut self,
is_arg: bool,
arg: hblang::ty::Id,
cx: &hblang::ty::Types,
dest: &mut Vec<cranelift_codegen::ir::AbiParam>,
) -> bool {
if cx.size_of(arg) == 0 {
return false;
}
let mut cls_or_mem = classify_arg(cx, arg);
if is_arg {
if let Ok(cls) = cls_or_mem {
let mut needed_int = 0;
let mut needed_sse = 0;
for c in cls {
match c {
Some(Class::Int) => needed_int += 1,
Some(Class::Sse) => needed_sse += 1,
_ => {}
}
}
match (self.int_regs.checked_sub(needed_int), self.sse_regs.checked_sub(needed_sse))
{
(Some(left_int), Some(left_sse)) => {
self.int_regs = left_int;
self.sse_regs = left_sse;
}
_ => {
// Not enough registers for this argument, so it will be
// passed on the stack, but we only mark aggregates
// explicitly as indirect `byval` arguments, as LLVM will
// automatically put immediates on the stack itself.
if arg.is_aggregate(cx) {
cls_or_mem = Err(Memory);
}
}
}
}
}
match cls_or_mem {
Err(Memory) => {
if is_arg {
dest.push(cranelift_codegen::ir::AbiParam::new(
cranelift_codegen::ir::types::I64,
));
} else {
dest.push(cranelift_codegen::ir::AbiParam::special(
cranelift_codegen::ir::types::I64,
cranelift_codegen::ir::ArgumentPurpose::StructReturn,
));
}
true
}
Ok(ref cls) => {
// split into sized chunks passed individually
if arg.is_aggregate(cx) {
cast_target(cls, cx.size_of(arg), dest);
} else {
dest.push(cranelift_codegen::ir::AbiParam::new(
reg_component(cls, &mut 0, cx.size_of(arg)).unwrap(),
));
}
false
}
}
}
}

View file

@ -1,4 +1,4 @@
# The journey to an optimizing compiler # The journey to optimizing compiler
It's been years since I was continuously trying to make a compiler to implement language of my dreams. Problem was tho that I wanted something similar to Rust, which if you did not know, `rustc` far exceeded the one million lines of code mark some time ago, so implementing such language would take me years if not decades, but I still tired it. It's been years since I was continuously trying to make a compiler to implement language of my dreams. Problem was tho that I wanted something similar to Rust, which if you did not know, `rustc` far exceeded the one million lines of code mark some time ago, so implementing such language would take me years if not decades, but I still tired it.
@ -28,34 +28,4 @@ It took around 4 months to reimplement everything make make the optimal code loo
## How my understanding of optimizations changed ## How my understanding of optimizations changed
### Optimizations allow us to scale software I need to admit, before writing a single pass compiler and later upgrading it to optimizing one, I took optimizations as some magic that makes code faster and honestly believed they are optional and most of the hard work is done in the process of translating readable text to the machine code. That is almost true with minus the readable part. If you want the code you write to perform well, with a compiler that translates your code from text to instructions as its written, you will be forced to do everything modern optimizers do, by hand in your code. TODO...
I need to admit, before writing a single pass compiler and later upgrading it to optimizing one, I thought optimizations only affect the quality of final assembly emitted by the compiler. It never occur to me that what the optimizations actually do, is reduce the impact of how you decide to write the code. In a single pass compiler (with zero optimizations), the machine code reflects:
- order of operations as written in code
- whether the value was stored in intermediate locations
- exact structure of the control flow and at which point the operations are placed
- how many times is something recomputed
- operations that only help to convey intent for the reader of the source code
- and more I can't think of...
If you took some code you wrote and then modified it to obfuscate these aspects (in reference to the original code), you would to a subset of what optimizing compiler does. Of course, a good compiler would try hard to improve the metrics its optimizing for, it would:
- reorder operations to allow the CPU to parallelize them
- remove needless stores, or store values directly to places you cant express in code
- pull operations out of the loops and into the branches (if it can)
- find all common sub-expressions and compute them only once
- fold constants as much as possible and use obscure tricks to replace slow instructions if any of the operands are constant
- and more...
In the end, compiler optimizations try to reduce correlation between how the code happens to be written and how well it performs, which is extremely important when you want humans to read the code.
### Optimizing compilers know more then you
Optimizing code is a search problem, an optimizer searches the code for patterns that can be rewritten so something more practical for the computer, while preserving the observable behavior of the program. This means it needs enough context about the code to not make a mistake. In fact, the optimizer has so much context, it is able to determine your code is useless. But wait, didn't you write the code because you needed it to do something? Maybe your intention was to break out of the loop after you are done, but the optimizer looked at the code and said, "great, we are so lucky that this integer is always small enough to miss this check by one, DELETE", and then he goes "jackpot, since this loop is now infinite, we don't need this code after it, DELETE". Notice that the optimizer is eager to delete dead code, it did not ask you "Brah, why did you place all your code after an infinite loop?". This is just an example, there are many more cases where modern optimizers just delete all your code because they proven it does something invalid without running it.
Its stupid but its the world we live in, optimizers are usually a black box you import and feed it the code in a format they understand, they then proceed to optimize it, and if they find a glaring bug they wont tell you, god forbid, they will just molest the code in unspecified ways and spit out whats left. Before writing an optimizer, I did no know this can happen and I did not know this is a problem I pay for with my time, spent figuring out why noting is happening when I run the program.
But wait its worse! Since optimizers wont ever share the fact you are stupid, we end up with other people painstakingly writing complex linters, that will do a shitty job detecting things that matter, and instead whine about style and other bullcrap (and they suck even at that). If the people who write linters and people who write optimizers swapped the roles, I would be ranting about optimizers instead.
And so, this is the area where I want to innovate, lets report the dead code to the frontend, and let the compiler frontend filter out the noise and show relevant information in the diagnostics. Refuse to compile the program if you `i /= 0`. Refuse to compile if you `arr[arr.len]`. This is the level of stupid optimizer sees, once it normalizes your code, but proceeds to protect your feelings. My goal so for hblang to relay this to you as much as possible. If we can query for optimizations, we can query for bugs too.

View file

@ -9,7 +9,7 @@ use {
backend::hbvm::HbvmBackend, backend::hbvm::HbvmBackend,
son::{Codegen, CodegenCtx}, son::{Codegen, CodegenCtx},
ty::Module, ty::Module,
utils::Ent, Ent,
}, },
}; };

View file

@ -1,47 +0,0 @@
InitWindow := fn(w: uint, h: uint, name: ^u8): uint @import()
WindowShouldClose := fn(): bool @import()
BeginDrawing := fn(): void @import()
EndDrawing := fn(): void @import()
DrawRectangleV := fn(pos: Vec2, size: Vec2, color: Color): void @import()
DrawRectangle := fn(a: uint, b: uint, c: uint, d: uint, color: Color): void @import()
ClearBackground := fn(color: Color): void @import()
SetTargetFPS := fn(target: uint): void @import()
GetFrameTime := fn(): f32 @import()
Vec2 := struct {x: f32, y: f32}
Color := struct {r: u8, g: u8, b: u8, a: u8}
$W := 800
$H := 600
main := fn(): uint {
_ = InitWindow(W, H, "whawee\0".ptr)
SetTargetFPS(60)
pos := Vec2.(100, 100)
vel := Vec2.(300, 300)
size := Vec2.(100, 100)
color := Color.(17, 255, 17, 255)
loop if WindowShouldClose() break else {
BeginDrawing()
ClearBackground(.(0, 0, 0, 255))
DrawRectangleV(pos, size, color)
pos += vel * .(GetFrameTime(), GetFrameTime())
if pos.x < 0 | pos.x + size.x > W {
vel.x *= -1
color += .(32, 11, 20, 0)
}
if pos.y < 0 | pos.y + size.y > H {
vel.y *= -1
color += .(32, 11, 20, 0)
}
EndDrawing()
}
return 0
}

View file

@ -1,4 +0,0 @@
#!/bin/bash
DIR=$(dirname $0)
cd $DIR
cargo run -p hbc main.hb > out.o && gcc -o main out.o -lraylib -lm -ldl -lpthread -lrt -lGL -lX11 && ./main

View file

@ -3,13 +3,17 @@ name = "hblang"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
[[bin]]
name = "hbc"
path = "src/main.rs"
[[bin]] [[bin]]
name = "fuzz" name = "fuzz"
path = "src/fuzz_main.rs" path = "src/fuzz_main.rs"
[dependencies] [dependencies]
hbbytecode = { workspace = true, features = ["disasm"] } hbbytecode = { workspace = true, features = ["disasm"] }
hbvm = { workspace = true, features = ["nightly", "alloc"] } hbvm = { workspace = true, features = ["nightly"] }
hashbrown = { version = "0.15.0", default-features = false, features = ["raw-entry"] } hashbrown = { version = "0.15.0", default-features = false, features = ["raw-entry"] }
log = "0.4.22" log = "0.4.22"

File diff suppressed because one or more lines are too long

View file

@ -1,35 +0,0 @@
use std::{fmt::Write, iter};
fn main() {
const TEST_FILE: &str = "src/testcases.rs";
const INPUT: &str = include_str!("./README.md");
let mut out = String::new();
for (name, code) in block_iter(INPUT) {
let name = name.replace(' ', "_");
_ = writeln!(
out,
"#[test] fn {name}() {{ run_codegen_test(\"{name}\", r##\"{code}\"##) }}"
);
}
std::fs::write(TEST_FILE, out).unwrap();
}
fn block_iter(mut input: &str) -> impl Iterator<Item = (&str, &str)> {
const CASE_PREFIX: &str = "#### ";
const CASE_SUFFIX: &str = "\n```hb";
iter::from_fn(move || loop {
let pos = input.find(CASE_PREFIX)?;
input = unsafe { input.get_unchecked(pos + CASE_PREFIX.len()..) };
let Some((test_name, rest)) = input.split_once(CASE_SUFFIX) else { continue };
if !test_name.chars().all(|c| c.is_alphanumeric() || c == '_') {
continue;
}
input = rest;
let (body, rest) = input.split_once("```").unwrap_or((input, ""));
input = rest;
break Some((test_name, body));
})
}

5
lang/command-help.txt Normal file
View file

@ -0,0 +1,5 @@
--fmt - format all imported source files
--fmt-stdout - dont write the formatted file but print it
--dump-asm - output assembly instead of raw code, (the assembly is more for debugging the compiler)
--threads <1...> - number of extra threads compiler can use [default: 0]
--path-resolver <name> - choose between builtin path resolvers, options are: ableos

View file

@ -8,10 +8,9 @@ use {
utils::{EntSlice, EntVec}, utils::{EntSlice, EntVec},
}, },
alloc::{boxed::Box, collections::BTreeMap, string::String, vec::Vec}, alloc::{boxed::Box, collections::BTreeMap, string::String, vec::Vec},
core::{assert_matches::debug_assert_matches, error, mem, ops::Range}, core::{assert_matches::debug_assert_matches, mem, ops::Range},
hbbytecode::{self as instrs, *}, hbbytecode::{self as instrs, *},
reg::Reg, reg::Reg,
std::borrow::ToOwned,
}; };
mod regalloc; mod regalloc;
@ -107,8 +106,6 @@ pub struct HbvmBackend {
offsets: Vec<Offset>, offsets: Vec<Offset>,
} }
pub const TARGET_TRIPLE: &str = "unknown-virt-unknown";
impl HbvmBackend { impl HbvmBackend {
fn emit(&mut self, instr: (usize, [u8; instrs::MAX_SIZE])) { fn emit(&mut self, instr: (usize, [u8; instrs::MAX_SIZE])) {
emit(&mut self.code, instr); emit(&mut self.code, instr);
@ -116,22 +113,12 @@ impl HbvmBackend {
} }
impl Backend for HbvmBackend { impl Backend for HbvmBackend {
fn triple(&self) -> String { fn assemble_bin(&mut self, entry: ty::Func, types: &Types, to: &mut Vec<u8>) {
TARGET_TRIPLE.to_owned()
}
fn assemble_bin(
&mut self,
entry: ty::Func,
types: &Types,
files: &EntSlice<Module, parser::Ast>,
to: &mut Vec<u8>,
) {
to.extend([0u8; HEADER_SIZE]); to.extend([0u8; HEADER_SIZE]);
binary_prelude(to); binary_prelude(to);
let AssemblySpec { code_length, data_length, entry } = let AssemblySpec { code_length, data_length, entry } =
self.assemble_reachable(entry, types, files, to); self.assemble_reachable(entry, types, to);
let exe = AbleOsExecutableHeader { let exe = AbleOsExecutableHeader {
magic_number: [0x15, 0x91, 0xD2], magic_number: [0x15, 0x91, 0xD2],
@ -151,7 +138,6 @@ impl Backend for HbvmBackend {
&mut self, &mut self,
from: ty::Func, from: ty::Func,
types: &Types, types: &Types,
_files: &EntSlice<Module, parser::Ast>,
to: &mut Vec<u8>, to: &mut Vec<u8>,
) -> AssemblySpec { ) -> AssemblySpec {
debug_assert!(self.asm.frontier.is_empty()); debug_assert!(self.asm.frontier.is_empty());
@ -229,13 +215,13 @@ impl Backend for HbvmBackend {
types: &'a Types, types: &'a Types,
files: &'a EntSlice<Module, parser::Ast>, files: &'a EntSlice<Module, parser::Ast>,
output: &mut String, output: &mut String,
) -> Result<(), alloc::boxed::Box<dyn error::Error + Send + Sync + 'a>> { ) -> Result<(), hbbytecode::DisasmError<'a>> {
use hbbytecode::DisasmItem; use hbbytecode::DisasmItem;
let functions = types let functions = types
.ins .ins
.funcs .funcs
.values() .iter()
.zip(self.funcs.values()) .zip(self.funcs.iter())
.filter(|(_, f)| f.offset != u32::MAX) .filter(|(_, f)| f.offset != u32::MAX)
.map(|(f, fd)| { .map(|(f, fd)| {
let name = if f.file != Module::default() { let name = if f.file != Module::default() {
@ -250,8 +236,8 @@ impl Backend for HbvmBackend {
types types
.ins .ins
.globals .globals
.values() .iter()
.zip(self.globals.values()) .zip(self.globals.iter())
.filter(|(_, g)| g.offset != u32::MAX) .filter(|(_, g)| g.offset != u32::MAX)
.map(|(g, gd)| { .map(|(g, gd)| {
let name = if g.file == Module::default() { let name = if g.file == Module::default() {
@ -264,7 +250,7 @@ impl Backend for HbvmBackend {
}), }),
) )
.collect::<BTreeMap<_, _>>(); .collect::<BTreeMap<_, _>>();
hbbytecode::disasm(&mut sluce, &functions, output, eca_handler).map_err(Into::into) hbbytecode::disasm(&mut sluce, &functions, output, eca_handler)
} }
fn emit_ct_body( fn emit_ct_body(
@ -328,7 +314,7 @@ impl Backend for HbvmBackend {
&& self && self
.jump_relocs .jump_relocs
.last() .last()
.is_none_or(|&(r, _)| self.offsets[r as usize] as usize != self.code.len()) .map_or(true, |&(r, _)| self.offsets[r as usize] as usize != self.code.len())
{ {
self.code.truncate(self.code.len() - 5); self.code.truncate(self.code.len() - 5);
self.ret_relocs.pop(); self.ret_relocs.pop();
@ -620,8 +606,9 @@ impl TokenKind {
Some(ops[size.ilog2() as usize]) Some(ops[size.ilog2() as usize])
} }
fn unop(&self, dst: ty::Id, src: ty::Id, tys: &Types) -> Option<fn(u8, u8) -> EncodedInstr> { fn unop(&self, dst: ty::Id, src: ty::Id) -> Option<fn(u8, u8) -> EncodedInstr> {
let src_idx = tys.size_of(src).ilog2() as usize; let src_idx =
src.simple_size().unwrap_or_else(|| panic!("{:?}", src.expand())).ilog2() as usize;
Some(match self { Some(match self {
Self::Sub => [ Self::Sub => [
|a, b| sub8(a, reg::ZERO, b), |a, b| sub8(a, reg::ZERO, b),
@ -668,14 +655,6 @@ enum PLoc {
Ref(Reg, u32), Ref(Reg, u32),
} }
impl PLoc {
fn reg(self) -> u8 {
match self {
PLoc::Reg(r, _) | PLoc::WideReg(r, _) | PLoc::Ref(r, _) => r,
}
}
}
struct ParamAlloc(Range<Reg>); struct ParamAlloc(Range<Reg>);
impl ParamAlloc { impl ParamAlloc {
@ -711,7 +690,42 @@ fn binary_prelude(to: &mut Vec<u8>) {
#[derive(Default)] #[derive(Default)]
pub struct LoggedMem { pub struct LoggedMem {
pub mem: hbvm::mem::HostMemory, pub mem: hbvm::mem::HostMemory,
logger: hbvm::mem::InstrLogger, op_buf: Vec<hbbytecode::Oper>,
disp_buf: String,
prev_instr: Option<hbbytecode::Instr>,
}
impl LoggedMem {
unsafe fn display_instr<T>(&mut self, instr: hbbytecode::Instr, addr: hbvm::mem::Address) {
let novm: *const hbvm::Vm<Self, 0> = core::ptr::null();
let offset = core::ptr::addr_of!((*novm).memory) as usize;
let regs = unsafe {
&*core::ptr::addr_of!(
(*(((self as *mut _ as *mut u8).sub(offset)) as *const hbvm::Vm<Self, 0>))
.registers
)
};
let mut bytes = core::slice::from_raw_parts(
(addr.get() - 1) as *const u8,
core::mem::size_of::<T>() + 1,
);
use core::fmt::Write;
hbbytecode::parse_args(&mut bytes, instr, &mut self.op_buf).unwrap();
debug_assert!(bytes.is_empty());
self.disp_buf.clear();
write!(self.disp_buf, "{:<10}", format!("{instr:?}")).unwrap();
for (i, op) in self.op_buf.drain(..).enumerate() {
if i != 0 {
write!(self.disp_buf, ", ").unwrap();
}
write!(self.disp_buf, "{op:?}").unwrap();
if let hbbytecode::Oper::R(r) = op {
write!(self.disp_buf, "({})", regs[r as usize].0).unwrap()
}
}
log::trace!("read-typed: {:x}: {}", addr.get(), self.disp_buf);
}
} }
impl hbvm::mem::Memory for LoggedMem { impl hbvm::mem::Memory for LoggedMem {
@ -744,13 +758,19 @@ impl hbvm::mem::Memory for LoggedMem {
} }
unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: hbvm::mem::Address) -> T { unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: hbvm::mem::Address) -> T {
self.mem.prog_read(addr) if log::log_enabled!(log::Level::Trace) {
} if core::any::TypeId::of::<u8>() == core::any::TypeId::of::<T>() {
if let Some(instr) = self.prev_instr {
self.display_instr::<()>(instr, addr);
}
self.prev_instr = hbbytecode::Instr::try_from(*(addr.get() as *const u8)).ok();
} else {
let instr = self.prev_instr.take().unwrap();
self.display_instr::<T>(instr, addr);
}
}
fn log_instr(&mut self, at: hbvm::mem::Address, regs: &[hbvm::value::Value]) { self.mem.prog_read(addr)
log::trace!("read-typed: {:x}: {}", at.get(), unsafe {
self.logger.display_instr(at, regs)
});
} }
} }

View file

@ -1,5 +1,4 @@
use { use {
super::ParamAlloc,
crate::{ crate::{
backend::hbvm::{ backend::hbvm::{
reg::{self, Reg}, reg::{self, Reg},
@ -24,7 +23,7 @@ impl HbvmBackend {
tys: &Types, tys: &Types,
files: &EntSlice<Module, parser::Ast>, files: &EntSlice<Module, parser::Ast>,
) -> (usize, bool) { ) -> (usize, bool) {
let tail = FunctionBuilder::build(nodes, tys, &mut self.ralloc, sig); let tail = Function::build(nodes, tys, &mut self.ralloc, sig);
let strip_load = |value| match nodes[value].kind { let strip_load = |value| match nodes[value].kind {
Kind::Load { .. } if nodes[value].ty.loc(tys) == Loc::Stack => nodes[value].inputs[1], Kind::Load { .. } if nodes[value].ty.loc(tys) == Loc::Stack => nodes[value].inputs[1],
@ -33,9 +32,7 @@ impl HbvmBackend {
let mut res = mem::take(&mut self.ralloc); let mut res = mem::take(&mut self.ralloc);
let special_reg_count = 13u8; Regalloc::run(nodes, tys, &mut res);
Regalloc::run(nodes, tys, &mut res, special_reg_count as _);
'_open_function: { '_open_function: {
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, 0)); self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, 0));
@ -43,12 +40,12 @@ impl HbvmBackend {
} }
if let Some(PLoc::Ref(..)) = tys.parama(sig.ret).0 { if let Some(PLoc::Ref(..)) = tys.parama(sig.ret).0 {
res.node_to_reg[MEM as usize] = res.general_bundles.len() as u8 + 1; res.node_to_reg[MEM as usize] = res.bundles.len() as u8 + 1;
res.general_bundles.push(Bundle::default()); res.bundles.push(Bundle::new(0));
} }
let reg_offset = if tail { special_reg_count } else { reg::RET_ADDR + 1 }; let reg_offset = if tail { reg::RET + 12 } else { reg::RET_ADDR + 1 };
let bundle_count = res.general_bundles.len() + (reg_offset as usize); let bundle_count = res.bundles.len() + (reg_offset as usize);
res.node_to_reg.iter_mut().filter(|r| **r != 0).for_each(|r| { res.node_to_reg.iter_mut().filter(|r| **r != 0).for_each(|r| {
if *r == u8::MAX { if *r == u8::MAX {
@ -328,7 +325,6 @@ impl HbvmBackend {
node.ty, node.ty,
tys.inner_of(nodes[node.inputs[1]].ty) tys.inner_of(nodes[node.inputs[1]].ty)
.unwrap_or(nodes[node.inputs[1]].ty), .unwrap_or(nodes[node.inputs[1]].ty),
tys,
) )
.unwrap_or_else(|| { .unwrap_or_else(|| {
panic!( panic!(
@ -396,9 +392,8 @@ impl HbvmBackend {
todo!("unhandled operator: {op}"); todo!("unhandled operator: {op}");
} }
} }
Kind::Call { args, func, .. } => { Kind::Call { args, func } => {
let (ret, mut parama) = tys.parama(node.ty); let (ret, mut parama) = tys.parama(node.ty);
debug_assert!(node.ty != ty::Id::NEVER || ret.is_none());
if let Some(PLoc::Ref(r, ..)) = ret { if let Some(PLoc::Ref(r, ..)) = ret {
self.emit(instrs::cp(r, atr(*node.inputs.last().unwrap()))) self.emit(instrs::cp(r, atr(*node.inputs.last().unwrap())))
} }
@ -438,15 +433,12 @@ impl HbvmBackend {
self.emit(instrs::st(r, atr(*node.inputs.last().unwrap()), 0, size)); self.emit(instrs::st(r, atr(*node.inputs.last().unwrap()), 0, size));
} }
//match ret { match ret {
// Some(PLoc::WideReg(..)) => {} Some(PLoc::WideReg(..)) => {}
// Some(PLoc::Reg(..)) if node.ty.loc(tys) == Loc::Stack => {} Some(PLoc::Reg(..)) if node.ty.loc(tys) == Loc::Stack => {}
// Some(PLoc::Reg(r, ..)) => self.emit_cp(atr(nid), r), Some(PLoc::Reg(r, ..)) => self.emit_cp(atr(nid), r),
// None | Some(PLoc::Ref(..)) => {} None | Some(PLoc::Ref(..)) => {}
//} }
}
Kind::RetVal => {
self.emit_cp(atr(nid), reg::RET);
} }
Kind::Global { global } => { Kind::Global { global } => {
let reloc = Reloc::new(self.code.len(), 3, 4); let reloc = Reloc::new(self.code.len(), 3, 4);
@ -525,7 +517,7 @@ impl HbvmBackend {
if tail { if tail {
bundle_count.saturating_sub(reg::RET_ADDR as _) bundle_count.saturating_sub(reg::RET_ADDR as _)
} else { } else {
self.ralloc.general_bundles.len() self.ralloc.bundles.len()
}, },
tail, tail,
) )
@ -538,7 +530,7 @@ impl HbvmBackend {
} }
} }
struct FunctionBuilder<'a> { struct Function<'a> {
sig: Sig, sig: Sig,
tail: bool, tail: bool,
nodes: &'a Nodes, nodes: &'a Nodes,
@ -546,7 +538,7 @@ struct FunctionBuilder<'a> {
func: &'a mut Res, func: &'a mut Res,
} }
impl core::fmt::Debug for FunctionBuilder<'_> { impl core::fmt::Debug for Function<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
for block in &self.func.blocks { for block in &self.func.blocks {
writeln!(f, "{:?}", self.nodes[block.entry].kind)?; writeln!(f, "{:?}", self.nodes[block.entry].kind)?;
@ -559,7 +551,7 @@ impl core::fmt::Debug for FunctionBuilder<'_> {
} }
} }
impl<'a> FunctionBuilder<'a> { impl<'a> Function<'a> {
fn build(nodes: &'a Nodes, tys: &'a Types, func: &'a mut Res, sig: Sig) -> bool { fn build(nodes: &'a Nodes, tys: &'a Types, func: &'a mut Res, sig: Sig) -> bool {
func.blocks.clear(); func.blocks.clear();
func.instrs.clear(); func.instrs.clear();
@ -678,21 +670,17 @@ impl<'a> FunctionBuilder<'a> {
self.emit_node(o); self.emit_node(o);
} }
} }
Kind::Call { func, unreachable, .. } => { Kind::Call { func, .. } => {
self.tail &= func == ty::Func::ECA; self.tail &= func == ty::Func::ECA;
if unreachable { self.add_instr(nid);
self.close_block(nid);
self.emit_node(node.outputs[0]); for &o in node.outputs.iter().rev() {
} else { if self.nodes[o].inputs[0] == nid
self.add_instr(nid); || (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
for &o in node.outputs.iter().rev() { && self.nodes[o].inputs[1] == nid)
if self.nodes[o].inputs[0] == nid {
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region) self.emit_node(o);
&& self.nodes[o].inputs[1] == nid)
{
self.emit_node(o);
}
} }
} }
} }
@ -703,7 +691,6 @@ impl<'a> FunctionBuilder<'a> {
| Kind::Global { .. } | Kind::Global { .. }
| Kind::Load { .. } | Kind::Load { .. }
| Kind::Stre | Kind::Stre
| Kind::RetVal
| Kind::Stck => self.add_instr(nid), | Kind::Stck => self.add_instr(nid),
Kind::End | Kind::Phi | Kind::Arg | Kind::Mem | Kind::Loops | Kind::Join => {} Kind::End | Kind::Phi | Kind::Arg | Kind::Mem | Kind::Loops | Kind::Join => {}
Kind::Assert { .. } => unreachable!(), Kind::Assert { .. } => unreachable!(),
@ -774,13 +761,7 @@ impl Nodes {
nid nid
} }
fn uses_of( fn uses_of(&self, nid: Nid, types: &Types, stack: &mut Vec<Nid>, buf: &mut Vec<(Nid, Nid)>) {
&self,
nid: Nid,
types: &Types,
stack: &mut Vec<Nid>,
buf: &mut Vec<(Nid, Nid, Reg)>,
) {
debug_assert!(stack.is_empty()); debug_assert!(stack.is_empty());
debug_assert!(buf.is_empty()); debug_assert!(buf.is_empty());
@ -796,38 +777,13 @@ impl Nodes {
continue; continue;
} }
if self.is_unlocked(o) { if self.is_unlocked(o) {
buf.push((self.use_block_of(exp, o), o, self.use_reg_of(exp, o))); buf.push((self.use_block_of(exp, o), o));
} else { } else {
stack.push(o); stack.push(o);
} }
} }
} }
} }
#[expect(unused)]
fn init_loc_of(&self, def: Nid, types: &Types) -> Reg {
if self[def].kind == Kind::Arg {
let mut parama = ParamAlloc(0..11);
let (_, ploc) = self[VOID]
.outputs
.iter()
.skip(ARG_START)
.map(|&n| (n, parama.next(self[n].ty, types)))
.find(|&(n, _)| n == def)
.unwrap();
return ploc.unwrap().reg();
}
255
}
#[expect(unused)]
fn use_reg_of(&self, def: Nid, usage: Nid) -> Reg {
//if matches!(self[usage].kind, Kind::Return { .. }) {}
255
}
} }
struct Regalloc<'a> { struct Regalloc<'a> {
@ -850,35 +806,27 @@ impl<'a> Regalloc<'a> {
self.res.backrefs[nid as usize] self.res.backrefs[nid as usize]
} }
fn run(ctx: &'a Nodes, tys: &'a Types, res: &'a mut Res, special_count: usize) { fn run(ctx: &'a Nodes, tys: &'a Types, res: &'a mut Res) {
Self { nodes: ctx, tys, res }.run_low(special_count); Self { nodes: ctx, tys, res }.run_low();
} }
fn run_low(&mut self, #[expect(unused)] special_count: usize) { fn run_low(&mut self) {
self.res.general_bundles.clear(); self.res.bundles.clear();
self.res.node_to_reg.clear(); self.res.node_to_reg.clear();
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
self.res.marked.clear(); self.res.marked.clear();
self.res.node_to_reg.resize(self.nodes.vreg_count(), 0); self.res.node_to_reg.resize(self.nodes.vreg_count(), 0);
self.res.call_set.clear();
for (i, &instr) in self.res.instrs.iter().enumerate() {
if self.nodes[instr].kind.is_call() {
self.res.call_set.add_one(i);
}
}
debug_assert!(self.res.dfs_buf.is_empty()); debug_assert!(self.res.dfs_buf.is_empty());
let mut uses_buf = Vec::new(); let mut uses_buf = Vec::new();
let mut range_buf = Vec::new(); let mut bundle = Bundle::new(self.res.instrs.len());
let mut bundle = Bundle::default();
self.res.visited.clear(self.nodes.len()); self.res.visited.clear(self.nodes.len());
for i in (0..self.res.blocks.len()).rev() { for i in (0..self.res.blocks.len()).rev() {
for [a, rest @ ..] in self.nodes.phi_inputs_of(self.res.blocks[i].entry) { for [a, rest @ ..] in self.nodes.phi_inputs_of(self.res.blocks[i].entry) {
if self.res.visited.set(a) { if self.res.visited.set(a) {
self.append_bundle(a, &mut bundle, None, &mut uses_buf, &mut range_buf); self.append_bundle(a, &mut bundle, None, &mut uses_buf);
} }
for r in rest { for r in rest {
@ -891,7 +839,6 @@ impl<'a> Regalloc<'a> {
&mut bundle, &mut bundle,
Some(self.res.node_to_reg[a as usize] as usize - 1), Some(self.res.node_to_reg[a as usize] as usize - 1),
&mut uses_buf, &mut uses_buf,
&mut range_buf,
); );
} }
} }
@ -902,7 +849,7 @@ impl<'a> Regalloc<'a> {
if self.nodes[inst].has_no_value() || self.res.visited.get(inst) || inst == 0 { if self.nodes[inst].has_no_value() || self.res.visited.get(inst) || inst == 0 {
continue; continue;
} }
self.append_bundle(inst, &mut bundle, None, &mut uses_buf, &mut range_buf); self.append_bundle(inst, &mut bundle, None, &mut uses_buf);
} }
self.res.instrs = instrs; self.res.instrs = instrs;
} }
@ -912,16 +859,12 @@ impl<'a> Regalloc<'a> {
inst: Nid, inst: Nid,
tmp: &mut Bundle, tmp: &mut Bundle,
prefered: Option<usize>, prefered: Option<usize>,
uses_buf: &mut Vec<(Nid, Nid, Reg)>, uses_buf: &mut Vec<(Nid, Nid)>,
range_buf: &mut Vec<Range<usize>>,
) { ) {
let dom = self.nodes.idom_of(inst); let dom = self.nodes.idom_of(inst);
self.res.dfs_seem.clear(self.nodes.len()); self.res.dfs_seem.clear(self.nodes.len());
self.nodes.uses_of(inst, self.tys, &mut self.res.dfs_buf, uses_buf); self.nodes.uses_of(inst, self.tys, &mut self.res.dfs_buf, uses_buf);
let mut prefered_reg = reg::ZERO; for (cursor, uinst) in uses_buf.drain(..) {
for (cursor, uinst, reg) in uses_buf.drain(..) {
prefered_reg = prefered_reg.min(reg);
if !self.res.dfs_seem.set(uinst) { if !self.res.dfs_seem.set(uinst) {
continue; continue;
} }
@ -951,22 +894,8 @@ impl<'a> Regalloc<'a> {
range.end = new; range.end = new;
debug_assert!(range.start < range.end, "{:?} {inst} {uinst}", range); debug_assert!(range.start < range.end, "{:?} {inst} {uinst}", range);
range_buf.push(range)
});
range_buf.sort_unstable_by_key(|r| r.start);
range_buf.dedup_by(|a, b| {
if b.end == a.start {
b.end = a.end;
true
} else {
false
}
});
for range in range_buf.drain(..) {
tmp.add(range); tmp.add(range);
} });
} }
if tmp.is_empty() { if tmp.is_empty() {
@ -975,23 +904,23 @@ impl<'a> Regalloc<'a> {
} }
if let Some(prefered) = prefered if let Some(prefered) = prefered
&& !self.res.general_bundles[prefered].overlaps(tmp) && !self.res.bundles[prefered].overlaps(tmp)
{ {
self.res.general_bundles[prefered].merge(tmp); self.res.bundles[prefered].merge(tmp);
tmp.clear(); tmp.clear();
self.res.node_to_reg[inst as usize] = prefered as Reg + 1; self.res.node_to_reg[inst as usize] = prefered as Reg + 1;
return; return;
} }
match self.res.general_bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(tmp)) { match self.res.bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(tmp)) {
Some((i, other)) => { Some((i, other)) => {
other.merge(tmp); other.merge(tmp);
tmp.clear(); tmp.clear();
self.res.node_to_reg[inst as usize] = i as Reg + 1; self.res.node_to_reg[inst as usize] = i as Reg + 1;
} }
None => { None => {
self.res.general_bundles.push(tmp.take()); self.res.bundles.push(tmp.take());
self.res.node_to_reg[inst as usize] = self.res.general_bundles.len() as Reg; self.res.node_to_reg[inst as usize] = self.res.bundles.len() as Reg;
} }
} }
} }
@ -1042,8 +971,7 @@ pub(super) struct Res {
instrs: Vec<Nid>, instrs: Vec<Nid>,
backrefs: Vec<u16>, backrefs: Vec<u16>,
general_bundles: Vec<Bundle>, bundles: Vec<Bundle>,
call_set: Bundle,
node_to_reg: Vec<Reg>, node_to_reg: Vec<Reg>,
visited: BitSet, visited: BitSet,
@ -1054,83 +982,37 @@ pub(super) struct Res {
} }
struct Bundle { struct Bundle {
start: usize, taken: Vec<bool>,
end: usize,
usage: BitSet,
}
impl Default for Bundle {
fn default() -> Self {
Self { start: usize::MAX, end: 0, usage: Default::default() }
}
} }
impl Bundle { impl Bundle {
fn new(size: usize) -> Self {
Self { taken: vec![false; size] }
}
fn add(&mut self, range: Range<usize>) { fn add(&mut self, range: Range<usize>) {
debug_assert!(!range.is_empty()); self.taken[range].fill(true);
debug_assert!(range.start / BitSet::UNIT >= self.start || self.start == usize::MAX);
self.start = self.start.min(range.start / BitSet::UNIT);
self.end = self.end.max(range.end.div_ceil(BitSet::UNIT));
let proj_range =
range.start - self.start * BitSet::UNIT..range.end - self.start * BitSet::UNIT;
self.usage.set_range(proj_range)
} }
fn overlaps(&self, othr: &Self) -> bool { fn overlaps(&self, other: &Self) -> bool {
let overlap = self.start.max(othr.start)..self.end.min(othr.end); self.taken.iter().zip(other.taken.iter()).any(|(a, b)| a & b)
if overlap.start >= overlap.end {
return false;
}
let [mut sslot, mut oslot] = [0, 0];
let sunits =
&self.usage.units(&mut sslot)[overlap.start - self.start..overlap.end - self.start];
let ounits =
&othr.usage.units(&mut oslot)[overlap.start - othr.start..overlap.end - othr.start];
debug_assert_eq!(sunits.len(), ounits.len());
let res = sunits.iter().zip(ounits).any(|(a, b)| (a & b) != 0);
res
} }
fn merge(&mut self, othr: &Self) { fn merge(&mut self, other: &Self) {
debug_assert!(!self.overlaps(othr)); debug_assert!(!self.overlaps(other));
debug_assert!(self.start <= othr.start || self.start == usize::MAX); self.taken.iter_mut().zip(other.taken.iter()).for_each(|(a, b)| *a |= *b);
self.usage.reserve((othr.end - self.start) * BitSet::UNIT);
self.start = self.start.min(othr.start);
self.end = self.end.max(othr.end);
let sunits =
&mut self.usage.units_mut().unwrap()[othr.start - self.start..othr.end - self.start];
let mut oslot = 0;
let ounits = othr.usage.units(&mut oslot);
sunits.iter_mut().zip(ounits).for_each(|(a, b)| *a |= *b);
} }
fn clear(&mut self) { fn clear(&mut self) {
self.start = usize::MAX; self.taken.fill(false);
self.end = 0;
self.usage.clear_as_is();
} }
fn is_empty(&self) -> bool { fn is_empty(&self) -> bool {
self.end == 0 !self.taken.contains(&true)
} }
fn take(&mut self) -> Self { fn take(&mut self) -> Self {
let mut new = Self { start: 0, ..Self::default() }; mem::replace(self, Self::new(self.taken.len()))
new.merge(self);
self.clear();
new
}
fn add_one(&mut self, i: usize) {
self.start = self.start.min(i / BitSet::UNIT);
self.end = self.end.max(i.div_ceil(BitSet::UNIT));
self.usage.set(i as _);
} }
} }

View file

@ -2,8 +2,7 @@ use {
crate::{ crate::{
lexer::{self, Lexer, TokenKind}, lexer::{self, Lexer, TokenKind},
parser::{ parser::{
self, CommentOr, CtorField, EnumField, Expr, FieldList, ListKind, Poser, Radix, self, CommentOr, CtorField, EnumField, Expr, FieldList, Poser, Radix, StructField,
StructField, UnionField,
}, },
}, },
core::{ core::{
@ -51,29 +50,27 @@ enum TokenGroup {
Ctor, Ctor,
} }
impl TokenKind { fn token_group(kind: TokenKind) -> TokenGroup {
fn to_higlight_group(self) -> TokenGroup { use {crate::lexer::TokenKind::*, TokenGroup as TG};
use {TokenGroup as TG, TokenKind::*}; match kind {
match self { BSlash | Pound | Eof | Ct => TG::Blank,
BSlash | Pound | Eof | Ct => TG::Blank, Comment => TG::Comment,
Comment => TG::Comment, Directive => TG::Directive,
Directive => TG::Directive, Colon => TG::Colon,
Colon => TG::Colon, Semi | Comma => TG::Comma,
Semi | Comma => TG::Comma, Dot => TG::Dot,
Dot => TG::Dot, Ctor | Tupl | TArrow => TG::Ctor,
Ctor | Arr | Tupl | TArrow | Range => TG::Ctor, LParen | RParen => TG::Paren,
LParen | RParen => TG::Paren, LBrace | RBrace | LBrack | RBrack => TG::Bracket,
LBrace | RBrace | LBrack | RBrack => TG::Bracket, Number | Float => TG::Number,
Number | Float => TG::Number, Under | CtIdent | Ident => TG::Identifier,
Under | CtIdent | Ident => TG::Identifier, Tick | Tilde | Que | Not | Mod | Band | Bor | Xor | Mul | Add | Sub | Div | Shl | Shr
Tick | Tilde | Que | Not | Mod | Band | Bor | Xor | Mul | Add | Sub | Div | Shl | Or | And | Lt | Gt | Eq | Le | Ge | Ne => TG::Op,
| Shr | Or | And | Lt | Gt | Eq | Le | Ge | Ne => TG::Op, Decl | Assign | BorAss | XorAss | BandAss | AddAss | SubAss | MulAss | DivAss | ModAss
Decl | Assign | BorAss | XorAss | BandAss | AddAss | SubAss | MulAss | DivAss | ShrAss | ShlAss => TG::Assign,
| ModAss | ShrAss | ShlAss => TG::Assign, DQuote | Quote => TG::String,
DQuote | Quote => TG::String, Slf | Defer | Return | If | Else | Loop | Break | Continue | Fn | Idk | Die | Struct
Slf | Defer | Return | If | Else | Loop | Break | Continue | Fn | Idk | Die | Packed | True | False | Null | Match | Enum | Union | CtLoop => TG::Keyword,
| Struct | Packed | True | False | Null | Match | Enum | Union | CtLoop => TG::Keyword,
}
} }
} }
@ -91,7 +88,7 @@ pub fn get_token_kinds(mut source: &mut [u8]) -> usize {
let start = token.start as usize; let start = token.start as usize;
let end = token.end as usize; let end = token.end as usize;
source[..start].fill(0); source[..start].fill(0);
source[start..end].fill(token.kind.to_higlight_group() as u8); source[start..end].fill(token_group(token.kind) as u8);
source = &mut source[end..]; source = &mut source[end..];
} }
len len
@ -224,12 +221,12 @@ impl<'a> Formatter<'a> {
f.write_str(sep)?; f.write_str(sep)?;
} }
if let Some(expr) = list.get(i + 1) if let Some(expr) = list.get(i + 1)
&& let Some(prev) = self.source.get(..expr.posi() as usize) && let Some(rest) = self.source.get(expr.posi() as usize..)
{ {
if sep.is_empty() && prev.trim_end().ends_with(';') { if sep.is_empty() && insert_needed_semicolon(rest) {
f.write_str(";")?; f.write_str(";")?;
} }
if count_trailing_newlines(prev) > 1 { if preserve_newlines(&self.source[..expr.posi() as usize]) > 1 {
f.write_str("\n")?; f.write_str("\n")?;
} }
} }
@ -307,6 +304,10 @@ impl<'a> Formatter<'a> {
} }
match *expr { match *expr {
Expr::Ct { value, .. } => {
f.write_str("$: ")?;
self.fmt(value, f)
}
Expr::Defer { value, .. } => { Expr::Defer { value, .. } => {
f.write_str("defer ")?; f.write_str("defer ")?;
self.fmt(value, f) self.fmt(value, f)
@ -322,16 +323,6 @@ impl<'a> Formatter<'a> {
f.write_str(".")?; f.write_str(".")?;
f.write_str(field) f.write_str(field)
} }
Expr::Range { start, end, .. } => {
if let Some(start) = start {
self.fmt(start, f)?;
}
f.write_str("..")?;
if let Some(end) = end {
self.fmt(end, f)?;
}
Ok(())
}
Expr::Directive { name, args, .. } => { Expr::Directive { name, args, .. } => {
f.write_str("@")?; f.write_str("@")?;
f.write_str(name)?; f.write_str(name)?;
@ -348,15 +339,10 @@ impl<'a> Formatter<'a> {
"struct", "struct",
trailing_comma, trailing_comma,
fields, fields,
|s, StructField { name, ty, default_value, .. }, f| { |s, StructField { name, ty, .. }, f| {
f.write_str(name)?; f.write_str(name)?;
f.write_str(": ")?; f.write_str(": ")?;
s.fmt(ty, f)?; s.fmt(ty, f)
if let Some(deva) = default_value {
f.write_str(" = ")?;
s.fmt(deva, f)?;
}
Ok(())
}, },
) )
} }
@ -365,7 +351,7 @@ impl<'a> Formatter<'a> {
"union", "union",
trailing_comma, trailing_comma,
fields, fields,
|s, UnionField { name, ty, .. }, f| { |s, StructField { name, ty, .. }, f| {
f.write_str(name)?; f.write_str(name)?;
f.write_str(": ")?; f.write_str(": ")?;
s.fmt(ty, f) s.fmt(ty, f)
@ -380,7 +366,7 @@ impl<'a> Formatter<'a> {
), ),
Expr::Ctor { ty, fields, trailing_comma, .. } => { Expr::Ctor { ty, fields, trailing_comma, .. } => {
if let Some(ty) = ty { if let Some(ty) = ty {
self.fmt_paren(ty, f, postfix)?; self.fmt_paren(ty, f, unary)?;
} }
f.write_str(".{")?; f.write_str(".{")?;
self.fmt_list( self.fmt_list(
@ -399,43 +385,38 @@ impl<'a> Formatter<'a> {
}, },
) )
} }
Expr::List { Expr::Tupl {
pos, pos,
kind: term,
ty: Some(&Expr::Slice { pos: spos, size: Some(&Expr::Number { value, .. }), item }), ty: Some(&Expr::Slice { pos: spos, size: Some(&Expr::Number { value, .. }), item }),
fields, fields,
trailing_comma, trailing_comma,
} if value as usize == fields.len() => self.fmt( } if value as usize == fields.len() => self.fmt(
&Expr::List { &Expr::Tupl {
pos, pos,
kind: term,
ty: Some(&Expr::Slice { pos: spos, size: None, item }), ty: Some(&Expr::Slice { pos: spos, size: None, item }),
fields, fields,
trailing_comma, trailing_comma,
}, },
f, f,
), ),
Expr::List { ty, kind: term, fields, trailing_comma, .. } => { Expr::Tupl { ty, fields, trailing_comma, .. } => {
if let Some(ty) = ty { if let Some(ty) = ty {
self.fmt_paren(ty, f, postfix)?; self.fmt_paren(ty, f, unary)?;
} }
let (start, end) = match term { f.write_str(".(")?;
ListKind::Tuple => (".(", ")"), self.fmt_list(f, trailing_comma, ")", ",", fields, Self::fmt)
ListKind::Array => (".[", "]"),
};
f.write_str(start)?;
self.fmt_list(f, trailing_comma, end, ",", fields, Self::fmt)
} }
Expr::Slice { item, size, .. } => { Expr::Slice { item, size, .. } => {
f.write_str("[")?; f.write_str("[")?;
self.fmt(item, f)?;
if let Some(size) = size { if let Some(size) = size {
f.write_str("; ")?;
self.fmt(size, f)?; self.fmt(size, f)?;
} }
f.write_str("]")?; f.write_str("]")
self.fmt_paren(item, f, unary)
} }
Expr::Index { base, index } => { Expr::Index { base, index } => {
self.fmt_paren(base, f, postfix)?; self.fmt(base, f)?;
f.write_str("[")?; f.write_str("[")?;
self.fmt(index, f)?; self.fmt(index, f)?;
f.write_str("]") f.write_str("]")
@ -558,7 +539,7 @@ impl<'a> Formatter<'a> {
prev.rfind(|c: char| c.is_ascii_whitespace()).map_or(prev.len(), |i| i + 1); prev.rfind(|c: char| c.is_ascii_whitespace()).map_or(prev.len(), |i| i + 1);
let exact_bound = lexer::Lexer::new(&prev[estimate_bound..]).last().start; let exact_bound = lexer::Lexer::new(&prev[estimate_bound..]).last().start;
prev = &prev[..exact_bound as usize + estimate_bound]; prev = &prev[..exact_bound as usize + estimate_bound];
if count_trailing_newlines(prev) > 0 { if preserve_newlines(prev) > 0 {
f.write_str("\n")?; f.write_str("\n")?;
for _ in 0..self.depth + 1 { for _ in 0..self.depth + 1 {
f.write_str("\t")?; f.write_str("\t")?;
@ -566,9 +547,7 @@ impl<'a> Formatter<'a> {
f.write_str(op.name())?; f.write_str(op.name())?;
f.write_str(" ")?; f.write_str(" ")?;
} else { } else {
if op != TokenKind::Colon { f.write_str(" ")?;
f.write_str(" ")?;
}
f.write_str(op.name())?; f.write_str(op.name())?;
f.write_str(" ")?; f.write_str(" ")?;
} }
@ -583,10 +562,15 @@ impl<'a> Formatter<'a> {
} }
} }
pub fn count_trailing_newlines(source: &str) -> usize { pub fn preserve_newlines(source: &str) -> usize {
source[source.trim_end().len()..].bytes().filter(|&c| c == b'\n').count() source[source.trim_end().len()..].bytes().filter(|&c| c == b'\n').count()
} }
pub fn insert_needed_semicolon(source: &str) -> bool {
let kind = lexer::Lexer::new(source).eat().kind;
kind.precedence().is_some() || matches!(kind, TokenKind::Ctor | TokenKind::Tupl)
}
impl core::fmt::Display for parser::Ast { impl core::fmt::Display for parser::Ast {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt_file(self.exprs(), &self.file, f) fmt_file(self.exprs(), &self.file, f)
@ -597,14 +581,14 @@ pub fn fmt_file(exprs: &[Expr], file: &str, f: &mut impl fmt::Write) -> fmt::Res
for (i, expr) in exprs.iter().enumerate() { for (i, expr) in exprs.iter().enumerate() {
Formatter::new(file).fmt(expr, f)?; Formatter::new(file).fmt(expr, f)?;
if let Some(expr) = exprs.get(i + 1) if let Some(expr) = exprs.get(i + 1)
&& let Some(prefix) = file.get(..expr.pos() as usize) && let Some(rest) = file.get(expr.pos() as usize..)
{ {
if prefix.trim_end().ends_with(';') { if insert_needed_semicolon(rest) {
f.write_str(";")?; write!(f, ";")?;
} }
if count_trailing_newlines(prefix) > 1 { if preserve_newlines(&file[..expr.pos() as usize]) > 1 {
f.write_str("\n")?; writeln!(f)?;
} }
} }

View file

@ -1,17 +1,18 @@
use { use {
crate::{ crate::{
backend::{hbvm::HbvmBackend, Backend}, backend::hbvm::HbvmBackend,
parser::{Ast, Ctx, FileKind}, parser::{Ast, Ctx, FileKind},
son::{self}, son::{self},
ty, FnvBuildHasher, ty, FnvBuildHasher,
}, },
alloc::{string::String, vec::Vec}, alloc::{string::String, vec::Vec},
core::{fmt::Write, ops::Deref}, core::{fmt::Write, num::NonZeroUsize, ops::Deref},
hashbrown::hash_map, hashbrown::hash_map,
std::{ std::{
borrow::ToOwned, borrow::ToOwned,
collections::VecDeque, collections::VecDeque,
eprintln, eprintln,
ffi::OsStr,
io::{self, Write as _}, io::{self, Write as _},
path::{Path, PathBuf}, path::{Path, PathBuf},
string::ToString, string::ToString,
@ -71,7 +72,60 @@ pub struct Options<'a> {
pub dump_asm: bool, pub dump_asm: bool,
pub extra_threads: usize, pub extra_threads: usize,
pub resolver: Option<PathResolver<'a>>, pub resolver: Option<PathResolver<'a>>,
pub backend: Option<&'a mut dyn Backend>, }
impl<'a> Options<'a> {
pub fn from_args(
args: &[&str],
out: &mut Vec<u8>,
resolvers: &'a [(&str, PathResolver)],
) -> std::io::Result<Self> {
if args.contains(&"--help") || args.contains(&"-h") {
writeln!(out, "Usage: hbc [OPTIONS...] <FILE>")?;
writeln!(out, include_str!("../command-help.txt"))?;
return Err(std::io::ErrorKind::Other.into());
}
Ok(Options {
fmt: args.contains(&"--fmt"),
fmt_stdout: args.contains(&"--fmt-stdout"),
dump_asm: args.contains(&"--dump-asm"),
extra_threads: args
.iter()
.position(|&a| a == "--threads")
.map(|i| {
args[i + 1].parse::<NonZeroUsize>().map_err(|e| {
writeln!(out, "--threads expects non zero integer: {e}")
.err()
.unwrap_or(std::io::ErrorKind::Other.into())
})
})
.transpose()?
.map_or(1, NonZeroUsize::get)
- 1,
resolver: args
.iter()
.position(|&a| a == "--path-resolver")
.map(|i| {
resolvers.iter().find(|&&(n, _)| args[i + 1] == n).map(|&(_, r)| r).ok_or_else(
|| {
writeln!(
out,
"--path-resolver can only be one of: {}",
resolvers
.iter()
.map(|&(n, _)| n)
.intersperse(", ")
.collect::<String>()
)
.err()
.unwrap_or(std::io::ErrorKind::Other.into())
},
)
})
.transpose()?,
})
}
} }
pub fn run_compiler( pub fn run_compiler(
@ -104,11 +158,10 @@ pub fn run_compiler(
write!(out, "{}", &parsed.ast[0])?; write!(out, "{}", &parsed.ast[0])?;
} else { } else {
let mut backend = HbvmBackend::default(); let mut backend = HbvmBackend::default();
let backend = options.backend.unwrap_or(&mut backend);
let mut ctx = crate::son::CodegenCtx::default(); let mut ctx = crate::son::CodegenCtx::default();
*ctx.parser.errors.get_mut() = parsed.errors; *ctx.parser.errors.get_mut() = parsed.errors;
let mut codegen = son::Codegen::new(backend, &parsed.ast, &mut ctx); let mut codegen = son::Codegen::new(&mut backend, &parsed.ast, &mut ctx);
codegen.push_embeds(parsed.embeds); codegen.push_embeds(parsed.embeds);
codegen.generate(ty::Module::MAIN); codegen.generate(ty::Module::MAIN);
@ -124,9 +177,8 @@ pub fn run_compiler(
if options.dump_asm { if options.dump_asm {
let mut disasm = String::new(); let mut disasm = String::new();
let err = codegen.disasm(&mut disasm, out).map_err(|e| io::Error::other(e.to_string())); codegen.disasm(&mut disasm, out).map_err(|e| io::Error::other(e.to_string()))?;
*out = disasm.into_bytes(); *out = disasm.into_bytes();
err?
} }
} }
@ -246,7 +298,7 @@ pub struct CantLoadFile {
impl core::fmt::Display for CantLoadFile { impl core::fmt::Display for CantLoadFile {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(f, "can't load file: {}", crate::display_rel_path(&self.path),) write!(f, "can't load file: {}", display_rel_path(&self.path),)
} }
} }
@ -297,7 +349,7 @@ pub fn parse_from_fs(
if !physiscal_path.exists() { if !physiscal_path.exists() {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::NotFound, io::ErrorKind::NotFound,
format!("can't find file: {}", crate::display_rel_path(&physiscal_path)), format!("can't find file: {}", display_rel_path(&physiscal_path)),
)); ));
} }
@ -324,7 +376,7 @@ pub fn parse_from_fs(
e.kind(), e.kind(),
format!( format!(
"can't load embed file: {}: {e}", "can't load embed file: {}: {e}",
crate::display_rel_path(&physiscal_path) display_rel_path(&physiscal_path)
), ),
) )
})?; })?;
@ -342,7 +394,7 @@ pub fn parse_from_fs(
let path = path.to_str().ok_or_else(|| { let path = path.to_str().ok_or_else(|| {
io::Error::new( io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("path contains invalid characters: {}", crate::display_rel_path(&path)), format!("path contains invalid characters: {}", display_rel_path(&path)),
) )
})?; })?;
Ok(Ast::new(path, std::fs::read_to_string(path)?, ctx, &mut |path, from, kind| { Ok(Ast::new(path, std::fs::read_to_string(path)?, ctx, &mut |path, from, kind| {
@ -388,3 +440,9 @@ pub fn parse_from_fs(
errors, errors,
}) })
} }
pub fn display_rel_path(path: &(impl AsRef<OsStr> + ?Sized)) -> std::path::Display {
static CWD: std::sync::LazyLock<PathBuf> =
std::sync::LazyLock::new(|| std::env::current_dir().unwrap_or_default());
std::path::Path::new(path).strip_prefix(&*CWD).unwrap_or(std::path::Path::new(path)).display()
}

View file

@ -138,9 +138,7 @@ pub enum TokenKind {
Ctor, Ctor,
Tupl, Tupl,
Arr,
TArrow, TArrow,
Range,
Or, Or,
And, And,
@ -349,9 +347,7 @@ gen_token_kind! {
#[punkt] #[punkt]
Ctor = ".{", Ctor = ".{",
Tupl = ".(", Tupl = ".(",
Arr = ".[",
TArrow = "=>", TArrow = "=>",
Range = "..",
// #define OP: each `#[prec]` delimeters a level of precedence from lowest to highest // #define OP: each `#[prec]` delimeters a level of precedence from lowest to highest
#[ops] #[ops]
#[prec] #[prec]
@ -434,19 +430,6 @@ impl<'a> Lexer<'a> {
Lexer { pos: self.pos, source: self.source }.eat() Lexer { pos: self.pos, source: self.source }.eat()
} }
fn peek_n<const N: usize>(&self) -> Option<&[u8; N]> {
if core::intrinsics::unlikely(self.pos as usize + N > self.source.len()) {
None
} else {
Some(unsafe {
self.source
.get_unchecked(self.pos as usize..self.pos as usize + N)
.first_chunk()
.unwrap_unchecked()
})
}
}
fn peek(&self) -> Option<u8> { fn peek(&self) -> Option<u8> {
if core::intrinsics::unlikely(self.pos >= self.source.len() as u32) { if core::intrinsics::unlikely(self.pos >= self.source.len() as u32) {
None None
@ -515,11 +498,7 @@ impl<'a> Lexer<'a> {
self.advance(); self.advance();
} }
if self if self.advance_if(b'.') {
.peek_n()
.map_or_else(|| self.peek() == Some(b'.'), |&[a, b]| a == b'.' && b != b'.')
{
self.pos += 1;
while let Some(b'0'..=b'9') = self.peek() { while let Some(b'0'..=b'9') = self.peek() {
self.advance(); self.advance();
} }
@ -571,8 +550,6 @@ impl<'a> Lexer<'a> {
} }
b'.' if self.advance_if(b'{') => T::Ctor, b'.' if self.advance_if(b'{') => T::Ctor,
b'.' if self.advance_if(b'(') => T::Tupl, b'.' if self.advance_if(b'(') => T::Tupl,
b'.' if self.advance_if(b'[') => T::Arr,
b'.' if self.advance_if(b'.') => T::Range,
b'=' if self.advance_if(b'>') => T::TArrow, b'=' if self.advance_if(b'>') => T::TArrow,
b'&' if self.advance_if(b'&') => T::And, b'&' if self.advance_if(b'&') => T::And,
b'|' if self.advance_if(b'|') => T::Or, b'|' if self.advance_if(b'|') => T::Or,

View file

@ -1,6 +1,5 @@
#![feature( #![feature(
iter_array_chunks, iter_array_chunks,
str_split_remainder,
assert_matches, assert_matches,
let_chains, let_chains,
if_let_guard, if_let_guard,
@ -25,8 +24,7 @@
iter_next_chunk, iter_next_chunk,
pointer_is_aligned_to, pointer_is_aligned_to,
maybe_uninit_fill, maybe_uninit_fill,
array_chunks, array_chunks
array_windows
)] )]
#![warn(clippy::dbg_macro)] #![warn(clippy::dbg_macro)]
#![expect(internal_features)] #![expect(internal_features)]
@ -34,11 +32,8 @@
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub use fs::*; pub use fs::*;
use { pub use utils::Ent;
self::{ty::Builtin, utils::Ent}, use {self::ty::Builtin, alloc::vec::Vec};
alloc::vec::Vec,
core::{fmt::Arguments, format_args as fa},
};
#[macro_use] #[macro_use]
extern crate alloc; extern crate alloc;
@ -46,6 +41,9 @@ extern crate alloc;
#[cfg(any(feature = "std", test))] #[cfg(any(feature = "std", test))]
extern crate std; extern crate std;
#[cfg(test)]
const README: &str = include_str!("../README.md");
#[cfg(test)] #[cfg(test)]
#[macro_export] #[macro_export]
macro_rules! run_tests { macro_rules! run_tests {
@ -76,51 +74,8 @@ pub mod backend {
utils::EntSlice, utils::EntSlice,
}, },
alloc::{string::String, vec::Vec}, alloc::{string::String, vec::Vec},
core::{error, mem::take},
}; };
pub fn match_triple(pattern: &str, triple: &str) -> Result<bool, &'static str> {
if pattern == "*" {
return Err("you can replace this with 'true'");
}
if pattern.ends_with("-*") {
return Err("trailing '*' is redundant");
}
let mut matcher = pattern.split('-');
let mut matchee = triple.split('-');
let mut eat_start = false;
loop {
match matcher.next() {
Some("*") if eat_start => return Err("consecutive '*' are redundant"),
Some("*") if matchee.next().is_none() => return Ok(false),
Some("*") => eat_start = true,
Some(pat) if take(&mut eat_start) => {
if matchee.by_ref().all(|v| v != pat) {
return Ok(false);
}
}
Some(pat) if matchee.next() != Some(pat) => return Ok(false),
Some(_) => {}
None => return Ok(true),
}
}
}
#[test]
fn sanity_match_triple() {
assert!(match_triple("a-b-c", "a-b-c").unwrap());
assert!(match_triple("*-b-c", "a-b-c").unwrap());
assert!(match_triple("*-c", "a-b-c").unwrap());
assert!(match_triple("a", "a-b-c").unwrap());
assert!(!match_triple("*-a", "a-b-c").unwrap());
assert!(!match_triple("*-a", "a-b-c").unwrap());
assert!(match_triple("*-*", "a-b-c").is_err());
assert!(match_triple("*-b-*", "a-b-c").is_err());
}
pub mod hbvm; pub mod hbvm;
pub struct AssemblySpec { pub struct AssemblySpec {
@ -130,12 +85,10 @@ pub mod backend {
} }
pub trait Backend { pub trait Backend {
fn triple(&self) -> String;
fn assemble_reachable( fn assemble_reachable(
&mut self, &mut self,
from: ty::Func, from: ty::Func,
types: &Types, types: &Types,
files: &EntSlice<Module, parser::Ast>,
to: &mut Vec<u8>, to: &mut Vec<u8>,
) -> AssemblySpec; ) -> AssemblySpec;
fn disasm<'a>( fn disasm<'a>(
@ -145,11 +98,11 @@ pub mod backend {
types: &'a Types, types: &'a Types,
files: &'a EntSlice<Module, parser::Ast>, files: &'a EntSlice<Module, parser::Ast>,
output: &mut String, output: &mut String,
) -> Result<(), alloc::boxed::Box<dyn error::Error + Send + Sync + 'a>>; ) -> Result<(), hbbytecode::DisasmError<'a>>;
fn emit_body( fn emit_body(
&mut self, &mut self,
id: ty::Func, id: ty::Func,
nodes: &Nodes, ci: &Nodes,
tys: &Types, tys: &Types,
files: &EntSlice<Module, parser::Ast>, files: &EntSlice<Module, parser::Ast>,
); );
@ -157,30 +110,22 @@ pub mod backend {
fn emit_ct_body( fn emit_ct_body(
&mut self, &mut self,
id: ty::Func, id: ty::Func,
nodes: &Nodes, ci: &Nodes,
tys: &Types, tys: &Types,
files: &EntSlice<Module, parser::Ast>, files: &EntSlice<Module, parser::Ast>,
) { ) {
self.emit_body(id, nodes, tys, files); self.emit_body(id, ci, tys, files);
} }
fn assemble_bin( fn assemble_bin(&mut self, from: ty::Func, types: &Types, to: &mut Vec<u8>) {
&mut self, self.assemble_reachable(from, types, to);
from: ty::Func,
types: &Types,
files: &EntSlice<Module, parser::Ast>,
to: &mut Vec<u8>,
) {
self.assemble_reachable(from, types, files, to);
} }
} }
} }
pub mod utils; mod utils;
mod debug { mod debug {
use core::fmt::Debug;
pub fn panicking() -> bool { pub fn panicking() -> bool {
#[cfg(feature = "std")] #[cfg(feature = "std")]
{ {
@ -193,14 +138,14 @@ mod debug {
} }
#[cfg(all(debug_assertions, feature = "std"))] #[cfg(all(debug_assertions, feature = "std"))]
pub type Trace = std::rc::Rc<(std::backtrace::Backtrace, std::string::String)>; pub type Trace = std::rc::Rc<std::backtrace::Backtrace>;
#[cfg(not(all(debug_assertions, feature = "std")))] #[cfg(not(all(debug_assertions, feature = "std")))]
pub type Trace = (); pub type Trace = ();
pub fn trace(_hint: impl Debug) -> Trace { pub fn trace() -> Trace {
#[cfg(all(debug_assertions, feature = "std"))] #[cfg(all(debug_assertions, feature = "std"))]
{ {
std::rc::Rc::new((std::backtrace::Backtrace::capture(), format!("{_hint:?}"))) std::rc::Rc::new(std::backtrace::Backtrace::capture())
} }
#[cfg(not(all(debug_assertions, feature = "std")))] #[cfg(not(all(debug_assertions, feature = "std")))]
{} {}
@ -361,9 +306,9 @@ impl Ident {
fn endoce_string( fn endoce_string(
literal: &str, literal: &str,
str: &mut Vec<u8>, str: &mut Vec<u8>,
report: impl Fn(&core::str::Bytes, Arguments), report: impl Fn(&core::str::Bytes, &str),
) -> Option<usize> { ) -> Option<usize> {
let report = |bytes: &core::str::Bytes, msg: Arguments| { let report = |bytes: &core::str::Bytes, msg: &_| {
report(bytes, msg); report(bytes, msg);
None::<u8> None::<u8>
}; };
@ -372,13 +317,13 @@ fn endoce_string(
while let Some(b) = bytes.next() while let Some(b) = bytes.next()
&& b != b'}' && b != b'}'
{ {
let c = bytes.next().or_else(|| report(bytes, fa!("incomplete escape sequence")))?; let c = bytes.next().or_else(|| report(bytes, "incomplete escape sequence"))?;
let decode = |b: u8| { let decode = |b: u8| {
Some(match b { Some(match b {
b'0'..=b'9' => b - b'0', b'0'..=b'9' => b - b'0',
b'a'..=b'f' => b - b'a' + 10, b'a'..=b'f' => b - b'a' + 10,
b'A'..=b'F' => b - b'A' + 10, b'A'..=b'F' => b - b'A' + 10,
_ => report(bytes, fa!("expected hex digit or '}}'"))?, _ => report(bytes, "expected hex digit or '}'")?,
}) })
}; };
str.push(decode(b)? << 4 | decode(c)?); str.push(decode(b)? << 4 | decode(c)?);
@ -395,27 +340,19 @@ fn endoce_string(
str.push(b); str.push(b);
continue; continue;
} }
let b = match bytes.next().or_else(|| report(&bytes, "incomplete escape sequence"))? {
const SPECIAL_CHARS: &str = "nrt\\'\"0"; b'n' => b'\n',
const TO_BYTES: &[u8] = b"\n\r\t\\'\"\0"; b'r' => b'\r',
const _: () = assert!(SPECIAL_CHARS.len() == TO_BYTES.len()); b't' => b'\t',
b'\\' => b'\\',
let b = match bytes.next().or_else(|| report(&bytes, fa!("incomplete escape sequence")))? { b'\'' => b'\'',
b if let Some((_, &i)) = SPECIAL_CHARS.bytes().zip(TO_BYTES).find(|&(i, _)| i == b) => { b'"' => b'"',
i b'0' => b'\0',
}
b'{' => { b'{' => {
decode_braces(str, &mut bytes); decode_braces(str, &mut bytes);
continue; continue;
} }
_ => report( _ => report(&bytes, "unknown escape sequence, expected [nrt\\\"'{0]")?,
&bytes,
format_args!(
"unknown escape sequence, \
expected one of special characters (regex /[{SPECIAL_CHARS}]/), \
or arbitrary byte sequence in hex (regex /\\{{[0-9a-f]{{2}}+\\/}})"
),
)?,
}; };
str.push(b); str.push(b);
} }
@ -461,10 +398,10 @@ impl Default for FnvHasher {
#[cfg(test)] #[cfg(test)]
pub fn run_test( pub fn run_test(
name: &str, name: &'static str,
ident: &str, ident: &'static str,
input: &str, input: &'static str,
test: fn(&str, &str, &mut alloc::string::String), test: fn(&'static str, &'static str, &mut alloc::string::String),
) { ) {
use std::{ use std::{
io::Write, io::Write,
@ -543,6 +480,31 @@ fn test_parse_files(
std::{borrow::ToOwned, string::ToString}, std::{borrow::ToOwned, string::ToString},
}; };
fn find_block<'a>(mut input: &'a str, test_name: &str) -> &'a str {
const CASE_PREFIX: &str = "#### ";
const CASE_SUFFIX: &str = "\n```hb";
loop {
let Some(pos) = input.find(CASE_PREFIX) else {
unreachable!("test {test_name} not found");
};
input = unsafe { input.get_unchecked(pos + CASE_PREFIX.len()..) };
if !input.starts_with(test_name) {
continue;
}
input = unsafe { input.get_unchecked(test_name.len()..) };
if !input.starts_with(CASE_SUFFIX) {
continue;
}
input = unsafe { input.get_unchecked(CASE_SUFFIX.len()..) };
let end = input.find("```").unwrap_or(input.len());
break unsafe { input.get_unchecked(..end) };
}
}
let input = find_block(input, ident);
let mut module_map = Vec::new(); let mut module_map = Vec::new();
let mut embed_map = Vec::new(); let mut embed_map = Vec::new();
let mut last_start = 0; let mut last_start = 0;
@ -584,26 +546,3 @@ fn test_parse_files(
embed_map.iter().map(|&(_, content)| content.to_owned().into_bytes()).collect(), embed_map.iter().map(|&(_, content)| content.to_owned().into_bytes()).collect(),
) )
} }
#[cfg(feature = "std")]
static CWD: std::sync::LazyLock<std::path::PathBuf> =
std::sync::LazyLock::new(|| std::env::current_dir().unwrap_or_default());
#[cfg(feature = "std")]
pub fn strip_cwd(path: &str) -> &str {
std::path::Path::new(path)
.strip_prefix(&*CWD)
.unwrap_or(std::path::Path::new(path))
.to_str()
.unwrap()
}
#[cfg(feature = "std")]
pub fn display_rel_path(path: &(impl AsRef<std::ffi::OsStr> + ?Sized)) -> std::path::Display {
std::path::Path::new(path).strip_prefix(&*CWD).unwrap_or(std::path::Path::new(path)).display()
}
#[cfg(not(feature = "std"))]
pub fn display_rel_path(path: &str) -> &str {
path
}

31
lang/src/main.rs Normal file
View file

@ -0,0 +1,31 @@
#[cfg(feature = "std")]
fn main() {
use std::io::Write;
fn run(out: &mut Vec<u8>, warnings: &mut String) -> std::io::Result<()> {
let args = std::env::args().collect::<Vec<_>>();
let args = args.iter().map(String::as_str).collect::<Vec<_>>();
let resolvers = &[("ableos", hblang::ABLEOS_PATH_RESOLVER)];
let opts = hblang::Options::from_args(&args, out, resolvers)?;
let file = args.iter().filter(|a| !a.starts_with('-')).nth(1).copied().unwrap_or("main.hb");
hblang::run_compiler(file, opts, out, warnings)
}
log::set_logger(&hblang::fs::Logger).unwrap();
log::set_max_level(log::LevelFilter::Error);
let mut out = Vec::new();
let mut warnings = String::new();
match run(&mut out, &mut warnings) {
Ok(_) => {
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
std::io::stdout().write_all(&out).unwrap()
}
Err(_) => {
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
std::io::stderr().write_all(&out).unwrap();
std::process::exit(1);
}
}
}

View file

@ -9,6 +9,7 @@ use {
}, },
alloc::{string::String, vec::Vec}, alloc::{string::String, vec::Vec},
core::{ core::{
assert_matches::debug_assert_matches,
cell::Cell, cell::Cell,
fmt::{self, Debug, Write}, fmt::{self, Debug, Write},
mem, mem,
@ -96,7 +97,6 @@ impl Nodes {
debug_assert_ne!(next, 0); debug_assert_ne!(next, 0);
if matches!(self[cursor].kind, Kind::Then | Kind::Else) { if matches!(self[cursor].kind, Kind::Then | Kind::Else) {
debug_assert_eq!(self[next].kind, Kind::If); debug_assert_eq!(self[next].kind, Kind::If);
debug_assert_eq!(self[next].ty, ty::Id::VOID);
let other = self[next].outputs[(self[next].outputs[0] == cursor) as usize]; let other = self[next].outputs[(self[next].outputs[0] == cursor) as usize];
self[other].loop_depth.set(depth - 1); self[other].loop_depth.set(depth - 1);
} }
@ -168,8 +168,12 @@ impl Nodes {
let mut deepest = self[node].inputs[0]; let mut deepest = self[node].inputs[0];
for &inp in self[node].inputs[1..].iter() { for &inp in self[node].inputs[1..].iter() {
if self.idepth(inp, Some(scheds)) > self.idepth(deepest, Some(scheds)) { if self.idepth(inp, Some(scheds)) > self.idepth(deepest, Some(scheds)) {
debug_assert!(!self.is_cfg(inp)); if self[inp].kind.is_call() {
deepest = self.idom(inp, Some(scheds)); deepest = inp;
} else {
debug_assert!(!self.is_cfg(inp));
deepest = self.idom(inp, Some(scheds));
}
} }
} }
@ -194,8 +198,8 @@ impl Nodes {
for &node in rpo.iter().rev() { for &node in rpo.iter().rev() {
self.loop_depth(node, Some(scheds)); self.loop_depth(node, Some(scheds));
for &i in self[node].inputs.iter() { for i in 0..self[node].inputs.len() {
self.push_up_impl(i, visited, scheds); self.push_up_impl(self[node].inputs[i], visited, scheds);
} }
if matches!(self[node].kind, Kind::Loop | Kind::Region) { if matches!(self[node].kind, Kind::Loop | Kind::Region) {
@ -212,13 +216,13 @@ impl Nodes {
self.iter() self.iter()
.map(|(n, _)| n) .map(|(n, _)| n)
.filter(|&n| !visited.get(n) .filter(|&n| !visited.get(n)
&& !matches!(self[n].kind, Kind::Arg | Kind::Mem | Kind::Loops | Kind::RetVal)) && !matches!(self[n].kind, Kind::Arg | Kind::Mem | Kind::Loops))
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
vec![], vec![],
"{:?}", "{:?}",
self.iter() self.iter()
.filter(|&(n, nod)| !visited.get(n) .filter(|&(n, nod)| !visited.get(n)
&& !matches!(nod.kind, Kind::Arg | Kind::Mem | Kind::Loops | Kind::RetVal)) && !matches!(nod.kind, Kind::Arg | Kind::Mem | Kind::Loops))
.collect::<Vec<_>>() .collect::<Vec<_>>()
); );
@ -283,7 +287,6 @@ impl Nodes {
let cfg_idx = outputs.iter().position(|&n| self.is_cfg(n)).unwrap(); let cfg_idx = outputs.iter().position(|&n| self.is_cfg(n)).unwrap();
outputs.swap(cfg_idx, 0); outputs.swap(cfg_idx, 0);
for &o in outputs.iter() { for &o in outputs.iter() {
if (!self.is_cfg(o) if (!self.is_cfg(o)
&& self[o].outputs.iter().any(|&oi| { && self[o].outputs.iter().any(|&oi| {
@ -294,7 +297,9 @@ impl Nodes {
continue; continue;
} }
let mut cursor = buf.len(); let mut cursor = buf.len();
buf.push(o); for &o in outputs.iter().filter(|&&n| n == o) {
buf.push(o);
}
while let Some(&n) = buf.get(cursor) { while let Some(&n) = buf.get(cursor) {
for &i in &self[n].inputs[1..] { for &i in &self[n].inputs[1..] {
if fromc == self[i].inputs.first() if fromc == self[i].inputs.first()
@ -305,17 +310,15 @@ impl Nodes {
}) })
&& seen.set(i) && seen.set(i)
{ {
buf.push(i); for &o in outputs.iter().filter(|&&n| n == i) {
buf.push(o);
}
} }
} }
cursor += 1; cursor += 1;
} }
} }
buf[1..].sort_by_key(|&n| {
self[n].has_no_value() || !self[n].outputs.iter().all(|&o| self[o].kind == Kind::Phi)
});
debug_assert_eq!( debug_assert_eq!(
outputs.iter().filter(|&&n| !seen.get(n)).copied().collect::<Vec<_>>(), outputs.iter().filter(|&&n| !seen.get(n)).copied().collect::<Vec<_>>(),
vec![], vec![],
@ -698,20 +701,19 @@ impl Nodes {
if self.free == Nid::MAX { if self.free == Nid::MAX {
self.free = self.values.len() as _; self.free = self.values.len() as _;
self.values.push(Err((Nid::MAX, debug::trace("")))); self.values.push(Err((Nid::MAX, debug::trace())));
} }
let free = self.free; let free = self.free;
for &d in node.inputs.as_slice() { for &d in node.inputs.as_slice() {
debug_assert_ne!(d, free); debug_assert_ne!(d, free);
self.values[d as usize].as_mut().unwrap_or_else(|_| panic!("{d} ")).outputs.push(free); self.values[d as usize].as_mut().unwrap_or_else(|_| panic!("{d}")).outputs.push(free);
} }
self.free = mem::replace(&mut self.values[free as usize], Ok(node)).unwrap_err().0; self.free = mem::replace(&mut self.values[free as usize], Ok(node)).unwrap_err().0;
if let Some((entry, hash)) = lookup_meta { if let Some((entry, hash)) = lookup_meta {
entry.insert(crate::ctx_map::Key { value: free, hash }, ()); entry.insert(crate::ctx_map::Key { value: free, hash }, ());
} }
free free
} }
@ -775,11 +777,13 @@ impl Nodes {
} }
self.remove_node_lookup(target); self.remove_node_lookup(target);
let trace = debug::trace(&self.values[target as usize]);
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
mem::replace(&mut self.values[target as usize], Err((Nid::MAX, trace))).unwrap(); mem::replace(&mut self.values[target as usize], Err((Nid::MAX, debug::trace())))
.unwrap();
} else { } else {
mem::replace(&mut self.values[target as usize], Err((self.free, trace))).unwrap(); mem::replace(&mut self.values[target as usize], Err((self.free, debug::trace())))
.unwrap();
self.free = target; self.free = target;
} }
@ -804,28 +808,17 @@ impl Nodes {
self.iter() self.iter()
.filter_map(|(id, node)| node.kind.is_peeped().then_some(id)) .filter_map(|(id, node)| node.kind.is_peeped().then_some(id))
.collect_into(stack); .collect_into(stack);
stack.iter().for_each(|&s| { stack.iter().for_each(|&s| self.lock(s));
debug_assert!(self.is_unlocked(s));
self.lock(s)
});
while fuel != 0 while fuel != 0
&& let Some(node) = stack.pop() && let Some(node) = stack.pop()
{ {
fuel -= 1; fuel -= 1;
if self[node].outputs.is_empty() {
self.push_adjacent_nodes(node, stack);
}
debug_assert_eq!(self[node].lock_rc.get(), 1, "{:?} {}", self[node], node);
if self.unlock_remove(node) { if self.unlock_remove(node) {
continue; continue;
} }
debug_assert!(!self[node].outputs.is_empty(), "{:?} {}", self[node], node);
if let Some(new) = self.peephole(node, tys) { if let Some(new) = self.peephole(node, tys) {
self.replace(node, new); self.replace(node, new);
self.push_adjacent_nodes(new, stack); self.push_adjacent_nodes(new, stack);
@ -841,6 +834,7 @@ impl Nodes {
} }
debug_assert!(self.queued_peeps.is_empty()); debug_assert!(self.queued_peeps.is_empty());
stack.drain(..).for_each(|s| _ = self.unlock_remove(s)); stack.drain(..).for_each(|s| _ = self.unlock_remove(s));
} }
@ -861,19 +855,7 @@ impl Nodes {
} }
self[of].peep_triggers = Vc::default(); self[of].peep_triggers = Vc::default();
let mut i = 0; stack.iter().skip(prev_len).for_each(|&n| self.lock(n));
stack.retain(|&n| {
if i < prev_len {
i += 1;
return true;
}
if self.is_unlocked(n) {
self.lock(n);
true
} else {
false
}
});
} }
pub fn aclass_index(&self, region: Nid) -> (usize, Nid) { pub fn aclass_index(&self, region: Nid) -> (usize, Nid) {
@ -1175,15 +1157,10 @@ impl Nodes {
continue; continue;
} }
let mut broken = false; if let Some(&load) =
for o in self[n].outputs.clone() { self[n].outputs.iter().find(|&&n| self[n].kind == Kind::Load)
if o != target && !matches!(self[o].kind, Kind::Return { .. }) { {
self.add_trigger(o, target); self.add_trigger(load, target);
broken = true;
}
}
if broken {
new_inps.push(n);
continue; continue;
} }
@ -1333,9 +1310,9 @@ impl Nodes {
cursor = next_store; cursor = next_store;
} }
'forward_store: { 'eliminate: {
if self[target].outputs.is_empty() { if self[target].outputs.is_empty() {
break 'forward_store; break 'eliminate;
} }
if self[value].kind != Kind::Load if self[value].kind != Kind::Load
@ -1344,121 +1321,106 @@ impl Nodes {
for &ele in self[value].outputs.clone().iter().filter(|&&n| n != target) { for &ele in self[value].outputs.clone().iter().filter(|&&n| n != target) {
self.add_trigger(ele, target); self.add_trigger(ele, target);
} }
break 'forward_store; break 'eliminate;
} }
let &[_, stack, last_store] = self[value].inputs.as_slice() else { let &[_, stack, last_store] = self[value].inputs.as_slice() else {
unreachable!() unreachable!()
}; };
// TODO: count othe loads to determine wether this transformation is worth it
// might be overly restricitive
// but for now, just check we are copiing the full stack allocation
if self[stack].ty != self[value].ty || self[stack].kind != Kind::Stck { if self[stack].ty != self[value].ty || self[stack].kind != Kind::Stck {
break 'forward_store; break 'eliminate;
} }
// pessimistic let mut unidentifed = self[stack].outputs.clone();
// allocation is most likely used in a loop or something so we cant get rid ot it let load_idx = unidentifed.iter().position(|&n| n == value).unwrap();
if last_store != MEM unidentifed.swap_remove(load_idx);
&& self[last_store]
.outputs
.iter()
.any(|&n| !matches!(self[n].kind, Kind::Load | Kind::Return { .. }))
{
break 'forward_store;
}
let mut store_count = 0; let mut saved = Vc::default();
let [mut cursor, mut first_store] = [last_store; 2]; let mut cursor = last_store;
while cursor != MEM { let mut first_store = last_store;
debug_assert_eq!(self[cursor].kind, Kind::Stre); while cursor != MEM && self[cursor].kind == Kind::Stre {
let mut contact_point = cursor;
// pessimistic let mut region = self[cursor].inputs[2];
// the offset must only be used for this store if let Kind::BinOp { op } = self[region].kind {
if self[cursor].inputs[2] != stack debug_assert_matches!(op, TokenKind::Add | TokenKind::Sub);
&& self[self[cursor].inputs[2]].outputs.as_slice() != [cursor] contact_point = region;
{ region = self[region].inputs[1]
break 'forward_store;
} }
// pessimistic if region != stack {
// we load from the store, this might be because the load spans multiple break;
// stores
if self[cursor].inputs[3] != MEM
&& self[self[cursor].inputs[3]].outputs.as_slice() != [cursor]
{
break 'forward_store;
} }
let Some(index) = unidentifed.iter().position(|&n| n == contact_point)
else {
break 'eliminate;
};
if self[self[cursor].inputs[1]].kind == Kind::Load if self[self[cursor].inputs[1]].kind == Kind::Load
&& self[value].outputs.iter().any(|&n| { && self[value].outputs.iter().any(|&n| {
self.aclass_index(self[self[cursor].inputs[1]].inputs[1]).0 self.aclass_index(self[self[cursor].inputs[1]].inputs[1]).0
== self.aclass_index(self[n].inputs[2]).0 == self.aclass_index(self[n].inputs[2]).0
}) })
{ {
break 'forward_store; break 'eliminate;
} }
unidentifed.remove(index);
saved.push(contact_point);
first_store = cursor; first_store = cursor;
cursor = self[cursor].inputs[3]; cursor = *self[cursor].inputs.get(3).unwrap_or(&MEM);
store_count += 1;
if unidentifed.is_empty() {
break;
}
} }
if store_count + 1 != self[stack].outputs.len() { if !unidentifed.is_empty() {
debug_assert!(store_count + 1 < self[stack].outputs.len()); break 'eliminate;
break 'forward_store;
} }
// at this potint we know the stack was initialized just to be moved into debug_assert_matches!(
// different location so create new stores that store directly to the self[last_store].kind,
// destination and remove the final load from this stack, that shoucl cause Kind::Stre | Kind::Mem,
// this stack allocation to be eliminated "{:?}",
self[last_store]
);
debug_assert_matches!(
self[first_store].kind,
Kind::Stre | Kind::Mem,
"{:?}",
self[first_store]
);
let mut base_store = store; // FIXME: when the loads and stores become parallel we will need to get saved
if first_store != MEM { // differently
debug_assert_ne!(last_store, MEM); let mut prev_store = store;
let mut cursor = first_store; for mut oper in saved.into_iter().rev() {
loop { let mut region = region;
let mut inps = self[cursor].inputs.clone(); if let Kind::BinOp { op } = self[oper].kind {
inps[2] = if inps[2] == stack { debug_assert_eq!(self[oper].outputs.len(), 1);
region debug_assert_eq!(self[self[oper].outputs[0]].kind, Kind::Stre);
} else { let new_region = self.new_node(
let new_region = self.new_node( self[oper].ty,
self[inps[2]].ty, Kind::BinOp { op },
self[inps[2]].kind, [VOID, region, self[oper].inputs[2]],
[VOID, region, self[inps[2]].inputs[2]], tys,
tys, );
); self.pass_aclass(self.aclass_index(region).1, new_region);
self.pass_aclass(self.aclass_index(region).1, new_region); region = new_region;
new_region oper = self[oper].outputs[0];
};
inps[3] = base_store;
base_store = self.new_node(self[cursor].ty, Kind::Stre, inps, tys);
if self.is_unlocked(base_store) {
self.lock(base_store);
self.queued_peeps.push(base_store);
}
if cursor == last_store {
break;
}
cursor = self[cursor].outputs[0];
} }
for o in self[last_store].outputs.clone() { let mut inps = self[oper].inputs.clone();
if matches!(self[o].kind, Kind::Return { .. }) && self.is_unlocked(o) { debug_assert_eq!(inps.len(), 4);
self.queued_peeps.push(o); inps[2] = region;
} inps[3] = prev_store;
prev_store = self.new_node_nop(self[oper].ty, Kind::Stre, inps);
if self.is_unlocked(prev_store) {
self.lock(prev_store);
self.queued_peeps.push(prev_store);
} }
} else {
debug_assert_eq!(last_store, MEM);
} }
return Some(base_store); return Some(prev_store);
} }
if let Some(&load) = if let Some(&load) =
@ -1567,6 +1529,12 @@ impl Nodes {
self.remove(prev); self.remove(prev);
self.unlock(o); self.unlock(o);
for o in self[o].outputs.clone() {
if self.is_unlocked(o) {
self.lock(o);
self.queued_peeps.push(o);
}
}
self.replace(o, self[o].inputs[1]); self.replace(o, self[o].inputs[1]);
} }
} }
@ -1597,7 +1565,6 @@ impl Nodes {
K::Start => {} K::Start => {}
_ if self.is_cfg(target) && self.idom(target, None) == NEVER => panic!(), _ if self.is_cfg(target) && self.idom(target, None) == NEVER => panic!(),
K::Entry K::Entry
| K::RetVal
| K::Mem | K::Mem
| K::Loops | K::Loops
| K::End | K::End
@ -1671,7 +1638,6 @@ impl Nodes {
} }
pub fn replace(&mut self, target: Nid, with: Nid) { pub fn replace(&mut self, target: Nid, with: Nid) {
self.patch_aclass(target, with);
debug_assert_ne!(target, with, "{:?}", self[target]); debug_assert_ne!(target, with, "{:?}", self[target]);
for out in self[target].outputs.clone() { for out in self[target].outputs.clone() {
let index = self[out].inputs.iter().position(|&p| p == target).unwrap(); let index = self[out].inputs.iter().position(|&p| p == target).unwrap();
@ -1748,7 +1714,7 @@ impl Nodes {
Kind::BinOp { op } | Kind::UnOp { op } => { Kind::BinOp { op } | Kind::UnOp { op } => {
write!(out, "{:>4}: ", op.name()) write!(out, "{:>4}: ", op.name())
} }
Kind::Call { func, args: _, unreachable: _ } => { Kind::Call { func, args: _ } => {
write!(out, "call: {func} {} ", self[node].depth.get()) write!(out, "call: {func} {} ", self[node].depth.get())
} }
Kind::Global { global } => write!(out, "glob: {global:<5}"), Kind::Global { global } => write!(out, "glob: {global:<5}"),
@ -1761,7 +1727,6 @@ impl Nodes {
Kind::Mem => write!(out, " mem: "), Kind::Mem => write!(out, " mem: "),
Kind::Loops => write!(out, "loops: "), Kind::Loops => write!(out, "loops: "),
Kind::Join => write!(out, "join: "), Kind::Join => write!(out, "join: "),
Kind::RetVal => write!(out, "rval: "),
}?; }?;
if self[node].kind != Kind::Loop && self[node].kind != Kind::Region { if self[node].kind != Kind::Loop && self[node].kind != Kind::Region {
@ -2016,25 +1981,6 @@ impl Nodes {
self[blocker].peep_triggers.push(target); self[blocker].peep_triggers.push(target);
} }
} }
fn patch_aclass(&mut self, target: Nid, with: Nid) {
let (_, region) = self.aclass_index(target);
if region == 0 {
return;
}
fn patch_aclass_inner(s: &mut Nodes, root: Nid, with: Nid, matches: Nid) {
for out in s[root].outputs.clone() {
let (_, region) = s.aclass_index(out);
if region == matches {
s.pass_aclass(with, out);
patch_aclass_inner(s, out, with, matches);
}
}
}
patch_aclass_inner(self, target, with, target);
}
} }
impl ops::Index<Nid> for Nodes { impl ops::Index<Nid> for Nodes {
@ -2104,7 +2050,8 @@ impl Node {
} }
pub fn has_no_value(&self) -> bool { pub fn has_no_value(&self) -> bool {
self.kind.is_cfg() || matches!(self.kind, Kind::Stre) (self.kind.is_cfg() && (!self.kind.is_call() || self.ty == ty::Id::VOID))
|| matches!(self.kind, Kind::Stre)
} }
} }
@ -2139,12 +2086,6 @@ pub enum Kind {
Return { Return {
file: ty::Module, file: ty::Module,
}, },
// [ctrl, ...args]
Call {
unreachable: bool,
func: ty::Func,
args: ty::List,
},
// [ctrl] // [ctrl]
Die, Die,
// [ctrl] // [ctrl]
@ -2166,7 +2107,11 @@ pub enum Kind {
Global { Global {
global: ty::Global, global: ty::Global,
}, },
RetVal, // [ctrl, ...args]
Call {
func: ty::Func,
args: ty::Tuple,
},
// [ctrl, cond, value] // [ctrl, cond, value]
Assert { Assert {
kind: AssertKind, kind: AssertKind,
@ -2192,9 +2137,7 @@ impl Kind {
} }
fn is_pinned(&self) -> bool { fn is_pinned(&self) -> bool {
self.is_cfg() self.is_cfg() || self.is_at_start() || matches!(self, Self::Phi | Kind::Assert { .. })
|| self.is_at_start()
|| matches!(self, Self::Phi | Self::Assert { .. } | Self::RetVal)
} }
fn is_at_start(&self) -> bool { fn is_at_start(&self) -> bool {
@ -2220,7 +2163,6 @@ impl Kind {
fn ends_basic_block(&self) -> bool { fn ends_basic_block(&self) -> bool {
matches!(self, Self::Return { .. } | Self::If | Self::End | Self::Die) matches!(self, Self::Return { .. } | Self::If | Self::End | Self::Die)
|| matches!(self, Kind::Call { unreachable: true, .. })
} }
pub fn starts_basic_block(&self) -> bool { pub fn starts_basic_block(&self) -> bool {
@ -2246,7 +2188,6 @@ impl fmt::Display for Kind {
} }
} }
#[derive(Debug)]
pub enum CondOptRes { pub enum CondOptRes {
Unknown, Unknown,
Known { value: bool, pin: Option<Nid> }, Known { value: bool, pin: Option<Nid> },

View file

@ -31,7 +31,7 @@ pub enum FileKind {
Embed, Embed,
} }
pub trait Trans { trait Trans {
fn trans(self) -> Self; fn trans(self) -> Self;
} }
@ -80,7 +80,6 @@ struct ScopeIdent {
declared: bool, declared: bool,
ordered: bool, ordered: bool,
used: bool, used: bool,
is_ct: bool,
flags: IdentFlags, flags: IdentFlags,
} }
@ -197,8 +196,8 @@ impl<'a, 'b> Parser<'a, 'b> {
fn declare_rec(&mut self, expr: &Expr, top_level: bool) { fn declare_rec(&mut self, expr: &Expr, top_level: bool) {
match *expr { match *expr {
Expr::Ident { pos, id, is_first, is_ct, .. } => { Expr::Ident { pos, id, is_first, .. } => {
self.declare(pos, id, !top_level, is_first || top_level, is_ct) self.declare(pos, id, !top_level, is_first || top_level)
} }
Expr::Ctor { fields, .. } => { Expr::Ctor { fields, .. } => {
for CtorField { value, .. } in fields { for CtorField { value, .. } in fields {
@ -209,7 +208,7 @@ impl<'a, 'b> Parser<'a, 'b> {
} }
} }
fn declare(&mut self, pos: Pos, id: Ident, ordered: bool, valid_order: bool, is_ct: bool) { fn declare(&mut self, pos: Pos, id: Ident, ordered: bool, valid_order: bool) {
if !valid_order { if !valid_order {
self.report( self.report(
pos, pos,
@ -231,7 +230,7 @@ impl<'a, 'b> Parser<'a, 'b> {
); );
return; return;
} }
self.ctx.idents[index].is_ct = is_ct;
self.ctx.idents[index].ordered = ordered; self.ctx.idents[index].ordered = ordered;
} }
@ -257,11 +256,7 @@ impl<'a, 'b> Parser<'a, 'b> {
None => { None => {
let ident = match Ident::new(token.start, name.len() as _) { let ident = match Ident::new(token.start, name.len() as _) {
None => { None => {
self.report( self.report(token.start, "identifier can at most have 64 characters");
token.start,
"identifier can at most have 63 characters, \
the code is too clean to efficiently represent in memory",
);
Ident::new(token.start, 63).unwrap() Ident::new(token.start, 63).unwrap()
} }
Some(id) => id, Some(id) => id,
@ -272,7 +267,6 @@ impl<'a, 'b> Parser<'a, 'b> {
declared: false, declared: false,
used: false, used: false,
ordered: false, ordered: false,
is_ct: false,
flags: 0, flags: 0,
}); });
(self.ctx.idents.len() - 1, self.ctx.idents.last_mut().unwrap(), true) (self.ctx.idents.len() - 1, self.ctx.idents.last_mut().unwrap(), true)
@ -282,7 +276,7 @@ impl<'a, 'b> Parser<'a, 'b> {
id.flags |= idfl::COMPTIME * is_ct as u32; id.flags |= idfl::COMPTIME * is_ct as u32;
if id.declared && id.ordered && self.ns_bound > i { if id.declared && id.ordered && self.ns_bound > i {
id.flags |= idfl::COMPTIME; id.flags |= idfl::COMPTIME;
self.ctx.captured.push(CapturedIdent { id: id.ident, is_ct: id.is_ct }); self.ctx.captured.push(id.ident);
} }
(id.ident, bl) (id.ident, bl)
@ -293,10 +287,6 @@ impl<'a, 'b> Parser<'a, 'b> {
} }
fn unit_expr(&mut self) -> Option<Expr<'a>> { fn unit_expr(&mut self) -> Option<Expr<'a>> {
self.unit_expr_low(true)
}
fn unit_expr_low(&mut self, eat_tail: bool) -> Option<Expr<'a>> {
use {Expr as E, TokenKind as T}; use {Expr as E, TokenKind as T};
if matches!( if matches!(
@ -312,6 +302,7 @@ impl<'a, 'b> Parser<'a, 'b> {
let prev_captured = self.ctx.captured.len(); let prev_captured = self.ctx.captured.len();
let mut must_trail = false; let mut must_trail = false;
let mut expr = match token.kind { let mut expr = match token.kind {
T::Ct => E::Ct { pos, value: self.ptr_expr()? },
T::Defer => E::Defer { pos, value: self.ptr_expr()? }, T::Defer => E::Defer { pos, value: self.ptr_expr()? },
T::Slf => E::Slf { pos }, T::Slf => E::Slf { pos },
T::Directive if self.lexer.slice(token.range()) == "use" => { T::Directive if self.lexer.slice(token.range()) == "use" => {
@ -387,15 +378,10 @@ impl<'a, 'b> Parser<'a, 'b> {
} }
let name = s.expect_advance(T::Ident)?; let name = s.expect_advance(T::Ident)?;
s.expect_advance(T::Colon)?; s.expect_advance(T::Colon)?;
let (ty, default_value) = match s.expr()? {
Expr::BinOp { left, op: T::Assign, right, .. } => (*left, Some(*right)),
ty => (ty, None),
};
Some(Some(StructField { Some(Some(StructField {
pos: name.start, pos: name.start,
name: s.tok_str(name), name: s.tok_str(name),
ty, ty: s.expr()?,
default_value,
})) }))
})?, })?,
captured: self.collect_captures(prev_boundary, prev_captured), captured: self.collect_captures(prev_boundary, prev_captured),
@ -409,7 +395,11 @@ impl<'a, 'b> Parser<'a, 'b> {
} }
let name = s.expect_advance(T::Ident)?; let name = s.expect_advance(T::Ident)?;
s.expect_advance(T::Colon)?; s.expect_advance(T::Colon)?;
Some(Some(UnionField { pos: name.start, name: s.tok_str(name), ty: s.expr()? })) Some(Some(StructField {
pos: name.start,
name: s.tok_str(name),
ty: s.expr()?,
}))
})?, })?,
captured: self.collect_captures(prev_boundary, prev_captured), captured: self.collect_captures(prev_boundary, prev_captured),
trailing_comma: core::mem::take(&mut self.trailing_sep) || must_trail, trailing_comma: core::mem::take(&mut self.trailing_sep) || must_trail,
@ -477,7 +467,7 @@ impl<'a, 'b> Parser<'a, 'b> {
self.collect_list(T::Comma, T::RParen, |s| { self.collect_list(T::Comma, T::RParen, |s| {
let name = s.advance_ident()?; let name = s.advance_ident()?;
let (id, _) = s.resolve_ident(name); let (id, _) = s.resolve_ident(name);
s.declare(name.start, id, true, true, name.kind == T::CtIdent); s.declare(name.start, id, true, true);
s.expect_advance(T::Colon)?; s.expect_advance(T::Colon)?;
Some(Arg { Some(Arg {
pos: name.start, pos: name.start,
@ -495,20 +485,14 @@ impl<'a, 'b> Parser<'a, 'b> {
body: self.ptr_expr()?, body: self.ptr_expr()?,
}, },
T::Ctor => self.ctor(pos, None), T::Ctor => self.ctor(pos, None),
T::Tupl => self.tupl(pos, None, ListKind::Tuple), T::Tupl => self.tupl(pos, None),
T::Arr => self.tupl(pos, None, ListKind::Array),
T::LBrack => E::Slice { T::LBrack => E::Slice {
size: { item: self.ptr_unit_expr()?,
if self.advance_if(T::RBrack) { size: self.advance_if(T::Semi).then(|| self.ptr_expr()).trans()?,
None pos: {
} else { self.expect_advance(T::RBrack)?;
let adv = self.ptr_expr()?; pos
self.expect_advance(T::RBrack)?;
Some(adv)
}
}, },
item: self.arena.alloc(self.unit_expr_low(false)?),
pos,
}, },
T::Band | T::Mul | T::Xor | T::Sub | T::Que | T::Not | T::Dot => E::UnOp { T::Band | T::Mul | T::Xor | T::Sub | T::Que | T::Not | T::Dot => E::UnOp {
pos, pos,
@ -560,84 +544,37 @@ impl<'a, 'b> Parser<'a, 'b> {
tok => self.report(token.start, format_args!("unexpected token: {tok}"))?, tok => self.report(token.start, format_args!("unexpected token: {tok}"))?,
}; };
if eat_tail { loop {
loop { let token = self.token;
let token = self.token; if matches!(token.kind, T::LParen | T::Ctor | T::Dot | T::Tupl | T::LBrack) {
if matches!( self.next();
token.kind, }
T::LParen | T::Ctor | T::Dot | T::Tupl | T::Arr | T::LBrack | T::Colon
) {
self.next();
}
expr = match token.kind { expr = match token.kind {
T::LParen => Expr::Call { T::LParen => Expr::Call {
func: self.arena.alloc(expr), func: self.arena.alloc(expr),
args: self.collect_list(T::Comma, T::RParen, Self::expr), args: self.collect_list(T::Comma, T::RParen, Self::expr),
trailing_comma: core::mem::take(&mut self.trailing_sep), trailing_comma: core::mem::take(&mut self.trailing_sep),
},
T::Ctor => self.ctor(token.start, Some(expr)),
T::Tupl => self.tupl(token.start, Some(expr)),
T::LBrack => E::Index {
base: self.arena.alloc(expr),
index: {
let index = self.expr()?;
self.expect_advance(T::RBrack)?;
self.arena.alloc(index)
}, },
T::Ctor => self.ctor(token.start, Some(expr)), },
T::Tupl => self.tupl(token.start, Some(expr), ListKind::Tuple), T::Dot => E::Field {
T::Arr => self.tupl(token.start, Some(expr), ListKind::Array), target: self.arena.alloc(expr),
T::LBrack => E::Index { pos: token.start,
base: self.arena.alloc(expr), name: {
index: self.arena.alloc({ let token = self.expect_advance(T::Ident)?;
if self.advance_if(T::Range) { self.tok_str(token)
let pos = self.token.start;
if self.advance_if(T::RBrack) {
Expr::Range { pos, start: None, end: None }
} else {
let res = Expr::Range {
pos,
start: None,
end: Some(self.ptr_expr()?),
};
self.expect_advance(T::RBrack)?;
res
}
} else {
let start = self.expr()?;
let pos = self.token.start;
if self.advance_if(T::Range) {
let start = self.arena.alloc(start);
if self.advance_if(T::RBrack) {
Expr::Range { pos, start: Some(start), end: None }
} else {
let res = Expr::Range {
pos,
start: Some(start),
end: Some(self.ptr_expr()?),
};
self.expect_advance(T::RBrack)?;
res
}
} else {
self.expect_advance(T::RBrack)?;
start
}
}
}),
}, },
T::Colon => E::BinOp { },
left: { _ => break,
self.declare_rec(&expr, false);
self.arena.alloc(expr)
},
pos,
op: T::Colon,
right: self.ptr_expr()?,
},
T::Dot => E::Field {
target: self.arena.alloc(expr),
pos: token.start,
name: {
let token = self.expect_advance(T::Ident)?;
self.tok_str(token)
},
},
_ => break,
}
} }
} }
@ -648,12 +585,11 @@ impl<'a, 'b> Parser<'a, 'b> {
Some(expr) Some(expr)
} }
fn tupl(&mut self, pos: Pos, ty: Option<Expr<'a>>, kind: ListKind) -> Expr<'a> { fn tupl(&mut self, pos: Pos, ty: Option<Expr<'a>>) -> Expr<'a> {
Expr::List { Expr::Tupl {
pos, pos,
kind,
ty: ty.map(|ty| self.arena.alloc(ty)), ty: ty.map(|ty| self.arena.alloc(ty)),
fields: self.collect_list(TokenKind::Comma, kind.term(), Self::expr), fields: self.collect_list(TokenKind::Comma, TokenKind::RParen, Self::expr),
trailing_comma: core::mem::take(&mut self.trailing_sep), trailing_comma: core::mem::take(&mut self.trailing_sep),
} }
} }
@ -703,11 +639,7 @@ impl<'a, 'b> Parser<'a, 'b> {
})) }))
} }
fn collect_captures( fn collect_captures(&mut self, prev_captured: usize, prev_boundary: usize) -> &'a [Ident] {
&mut self,
prev_captured: usize,
prev_boundary: usize,
) -> &'a [CapturedIdent] {
self.ns_bound = prev_boundary; self.ns_bound = prev_boundary;
let captured = &mut self.ctx.captured[prev_captured..]; let captured = &mut self.ctx.captured[prev_captured..];
crate::quad_sort(captured, core::cmp::Ord::cmp); crate::quad_sort(captured, core::cmp::Ord::cmp);
@ -772,9 +704,7 @@ impl<'a, 'b> Parser<'a, 'b> {
) -> &'a [T] { ) -> &'a [T] {
let mut trailing_sep = false; let mut trailing_sep = false;
let mut view = self.ctx.stack.view(); let mut view = self.ctx.stack.view();
'o: while (keep_end && self.token.kind != end) 'o: while (keep_end && self.token.kind != end) || (!keep_end && !self.advance_if(end)) {
|| (!keep_end && !self.advance_if(end)) && self.token.kind != TokenKind::Eof
{
let val = match f(self) { let val = match f(self) {
Some(val) => val, Some(val) => val,
None => { None => {
@ -847,9 +777,6 @@ impl<'a, 'b> Parser<'a, 'b> {
#[track_caller] #[track_caller]
fn report(&mut self, pos: Pos, msg: impl fmt::Display) -> Option<!> { fn report(&mut self, pos: Pos, msg: impl fmt::Display) -> Option<!> {
if log::log_enabled!(log::Level::Error) { if log::log_enabled!(log::Level::Error) {
if self.ctx.errors.get_mut().len() > 1024 * 10 {
panic!("{}", self.ctx.errors.get_mut());
}
use core::fmt::Write; use core::fmt::Write;
writeln!( writeln!(
self.ctx.errors.get_mut(), self.ctx.errors.get_mut(),
@ -863,19 +790,15 @@ impl<'a, 'b> Parser<'a, 'b> {
fn flag_idents(&mut self, e: Expr<'a>, flags: IdentFlags) { fn flag_idents(&mut self, e: Expr<'a>, flags: IdentFlags) {
match e { match e {
Expr::Ident { id, .. } => { Expr::Ident { id, .. } => find_ident(&mut self.ctx.idents, id).flags |= flags,
if let Some(f) = find_ident(&mut self.ctx.idents, id) {
f.flags |= flags;
}
}
Expr::Field { target, .. } => self.flag_idents(*target, flags), Expr::Field { target, .. } => self.flag_idents(*target, flags),
_ => {} _ => {}
} }
} }
} }
fn find_ident(idents: &mut [ScopeIdent], id: Ident) -> Option<&mut ScopeIdent> { fn find_ident(idents: &mut [ScopeIdent], id: Ident) -> &mut ScopeIdent {
idents.binary_search_by_key(&id, |si| si.ident).map(|i| &mut idents[i]).ok() idents.binary_search_by_key(&id, |si| si.ident).map(|i| &mut idents[i]).unwrap()
} }
pub fn find_symbol(symbols: &[Symbol], id: Ident) -> &Symbol { pub fn find_symbol(symbols: &[Symbol], id: Ident) -> &Symbol {
@ -956,6 +879,11 @@ generate_expr! {
/// `OP := grep for `#define OP:` /// `OP := grep for `#define OP:`
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Expr<'a> { pub enum Expr<'a> {
/// `'ct' Expr`
Ct {
pos: Pos,
value: &'a Self,
},
/// `'defer' Expr` /// `'defer' Expr`
Defer { Defer {
pos: Pos, pos: Pos,
@ -1070,22 +998,22 @@ generate_expr! {
Struct { Struct {
pos: Pos, pos: Pos,
fields: FieldList<'a, StructField<'a>>, fields: FieldList<'a, StructField<'a>>,
captured: &'a [CapturedIdent], captured: &'a [Ident],
trailing_comma: bool, trailing_comma: bool,
packed: bool, packed: bool,
}, },
/// `'union' LIST('{', ',', '}', Ident ':' Expr)` /// `'union' LIST('{', ',', '}', Ident ':' Expr)`
Union { Union {
pos: Pos, pos: Pos,
fields: FieldList<'a, UnionField<'a>>, fields: FieldList<'a, StructField<'a>>,
captured: &'a [CapturedIdent], captured: &'a [Ident],
trailing_comma: bool, trailing_comma: bool,
}, },
/// `'enum' LIST('{', ',', '}', Ident)` /// `'enum' LIST('{', ',', '}', Ident)`
Enum { Enum {
pos: Pos, pos: Pos,
variants: FieldList<'a, EnumField<'a>>, variants: FieldList<'a, EnumField<'a>>,
captured: &'a [CapturedIdent], captured: &'a [Ident],
trailing_comma: bool, trailing_comma: bool,
}, },
/// `[Expr] LIST('.{', ',', '}', Ident [':' Expr])` /// `[Expr] LIST('.{', ',', '}', Ident [':' Expr])`
@ -1096,9 +1024,8 @@ generate_expr! {
trailing_comma: bool, trailing_comma: bool,
}, },
/// `[Expr] LIST('.(', ',', ')', Ident [':' Expr])` /// `[Expr] LIST('.(', ',', ')', Ident [':' Expr])`
List { Tupl {
pos: Pos, pos: Pos,
kind: ListKind,
ty: Option<&'a Self>, ty: Option<&'a Self>,
fields: &'a [Self], fields: &'a [Self],
trailing_comma: bool, trailing_comma: bool,
@ -1114,12 +1041,6 @@ generate_expr! {
base: &'a Self, base: &'a Self,
index: &'a Self, index: &'a Self,
}, },
/// `[ Expr ] .. [ Expr ]`
Range {
pos: u32,
start: Option<&'a Self>,
end: Option<&'a Self>,
},
/// `Expr '.' Ident` /// `Expr '.' Ident`
Field { Field {
target: &'a Self, target: &'a Self,
@ -1165,26 +1086,6 @@ generate_expr! {
} }
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug, PartialOrd, Ord)]
pub struct CapturedIdent {
pub id: Ident,
pub is_ct: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ListKind {
Tuple,
Array,
}
impl ListKind {
fn term(self) -> TokenKind {
match self {
ListKind::Tuple => TokenKind::RParen,
ListKind::Array => TokenKind::RBrack,
}
}
}
impl Expr<'_> { impl Expr<'_> {
pub fn declares(&self, iden: DeclId, source: &str) -> Option<Ident> { pub fn declares(&self, iden: DeclId, source: &str) -> Option<Ident> {
match *self { match *self {
@ -1257,25 +1158,11 @@ impl Poser for EnumField<'_> {
} }
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct UnionField<'a> {
pub pos: Pos,
pub name: &'a str,
pub ty: Expr<'a>,
}
impl Poser for UnionField<'_> {
fn posi(&self) -> Pos {
self.pos
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct StructField<'a> { pub struct StructField<'a> {
pub pos: Pos, pub pos: Pos,
pub name: &'a str, pub name: &'a str,
pub ty: Expr<'a>, pub ty: Expr<'a>,
pub default_value: Option<Expr<'a>>,
} }
impl Poser for StructField<'_> { impl Poser for StructField<'_> {
@ -1340,9 +1227,9 @@ pub enum CommentOr<'a, T> {
Comment { literal: &'a str, pos: Pos }, Comment { literal: &'a str, pos: Pos },
} }
impl<T> CommentOr<'_, T> { impl<T: Copy> CommentOr<'_, T> {
pub fn or(&self) -> Option<&T> { pub fn or(&self) -> Option<T> {
match self { match *self {
CommentOr::Or(v) => Some(v), CommentOr::Or(v) => Some(v),
CommentOr::Comment { .. } => None, CommentOr::Comment { .. } => None,
} }
@ -1373,7 +1260,7 @@ pub struct Ctx {
symbols: Vec<Symbol>, symbols: Vec<Symbol>,
stack: StackAlloc, stack: StackAlloc,
idents: Vec<ScopeIdent>, idents: Vec<ScopeIdent>,
captured: Vec<CapturedIdent>, captured: Vec<Ident>,
} }
impl Ctx { impl Ctx {
@ -1465,7 +1352,10 @@ impl<D: core::fmt::Display> core::fmt::Display for Report<'_, D> {
fn report_to(file: &str, path: &str, pos: Pos, msg: &dyn fmt::Display, out: &mut impl fmt::Write) { fn report_to(file: &str, path: &str, pos: Pos, msg: &dyn fmt::Display, out: &mut impl fmt::Write) {
let (line, mut col) = lexer::line_col(file.as_bytes(), pos); let (line, mut col) = lexer::line_col(file.as_bytes(), pos);
let disp = crate::display_rel_path(path); #[cfg(feature = "std")]
let disp = crate::fs::display_rel_path(path);
#[cfg(not(feature = "std"))]
let disp = path;
_ = writeln!(out, "{}:{}:{}: {}", disp, line, col, msg); _ = writeln!(out, "{}:{}:{}: {}", disp, line, col, msg);
let line = &file[file[..pos as usize].rfind('\n').map_or(0, |i| i + 1) let line = &file[file[..pos as usize].rfind('\n').map_or(0, |i| i + 1)
@ -1524,7 +1414,7 @@ pub fn find_decl<'a>(
id: DeclId, id: DeclId,
) -> Option<(&'a Expr<'a>, Ident)> { ) -> Option<(&'a Expr<'a>, Ident)> {
exprs.iter().find_map(|expr| match expr { exprs.iter().find_map(|expr| match expr {
Expr::BinOp { left, op: TokenKind::Decl | TokenKind::Colon, .. } => { Expr::BinOp { left, op: TokenKind::Decl, .. } => {
left.declares(id, file).map(|id| (expr, id)) left.declares(id, file).map(|id| (expr, id))
} }
_ => None, _ => None,

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,7 @@ use {
crate::{ crate::{
ctx_map, ctx_map,
lexer::TokenKind, lexer::TokenKind,
parser::{self, CapturedIdent, CommentOr, Expr, ExprRef, Pos}, parser::{self, CommentOr, Expr, ExprRef, Pos},
utils::{self, Ent, EntSlice, EntVec}, utils::{self, Ent, EntSlice, EntVec},
Ident, Ident,
}, },
@ -38,9 +38,9 @@ pub type Offset = u32;
pub type Size = u32; pub type Size = u32;
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Default, PartialOrd, Ord)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Default, PartialOrd, Ord)]
pub struct List(pub u32); pub struct Tuple(pub u32);
impl List { impl Tuple {
const LEN_BITS: u32 = 5; const LEN_BITS: u32 = 5;
const LEN_MASK: usize = Self::MAX_LEN - 1; const LEN_MASK: usize = Self::MAX_LEN - 1;
const MAX_LEN: usize = 1 << Self::LEN_BITS; const MAX_LEN: usize = 1 << Self::LEN_BITS;
@ -83,7 +83,7 @@ pub enum Arg {
} }
impl ArgIter { impl ArgIter {
pub fn next(&mut self, tys: &Types) -> Option<Arg> { pub(crate) fn next(&mut self, tys: &Types) -> Option<Arg> {
let ty = tys.ins.args[self.0.next()?]; let ty = tys.ins.args[self.0.next()?];
if ty == Id::TYPE { if ty == Id::TYPE {
return Some(Arg::Type(tys.ins.args[self.0.next().unwrap()])); return Some(Arg::Type(tys.ins.args[self.0.next().unwrap()]));
@ -91,7 +91,7 @@ impl ArgIter {
Some(Arg::Value(ty)) Some(Arg::Value(ty))
} }
pub fn next_value(&mut self, tys: &Types) -> Option<Id> { pub(crate) fn next_value(&mut self, tys: &Types) -> Option<Id> {
loop { loop {
match self.next(tys)? { match self.next(tys)? {
Arg::Type(_) => continue, Arg::Type(_) => continue,
@ -104,12 +104,6 @@ impl ArgIter {
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
pub struct Id(NonZeroU32); pub struct Id(NonZeroU32);
impl AsRef<Id> for Id {
fn as_ref(&self) -> &Id {
self
}
}
impl From<Id> for i64 { impl From<Id> for i64 {
fn from(value: Id) -> Self { fn from(value: Id) -> Self {
value.0.get() as _ value.0.get() as _
@ -143,8 +137,6 @@ impl crate::ctx_map::CtxEntry for Id {
let fc = &ctx.funcs[f]; let fc = &ctx.funcs[f];
if fc.is_generic { if fc.is_generic {
SymKey::Type(fc.parent, fc.pos, fc.sig.args) SymKey::Type(fc.parent, fc.pos, fc.sig.args)
} else if fc.is_import {
SymKey::Import(fc.parent, fc.name)
} else { } else {
SymKey::Decl(fc.parent, fc.name) SymKey::Decl(fc.parent, fc.name)
} }
@ -158,7 +150,6 @@ impl crate::ctx_map::CtxEntry for Id {
SymKey::Decl(gb.file.into(), gb.name) SymKey::Decl(gb.file.into(), gb.name)
} }
Kind::Slice(s) => SymKey::Array(&ctx.slices[s]), Kind::Slice(s) => SymKey::Array(&ctx.slices[s]),
Kind::Tuple(t) => SymKey::Tuple(ctx.tuples[t].fields),
Kind::Module(_) | Kind::Builtin(_) => { Kind::Module(_) | Kind::Builtin(_) => {
SymKey::Decl(Module::default().into(), Ident::INVALID) SymKey::Decl(Module::default().into(), Ident::INVALID)
} }
@ -193,13 +184,11 @@ impl Id {
} }
pub fn is_unsigned(self) -> bool { pub fn is_unsigned(self) -> bool {
matches!(self.repr(), U8..=UINT) matches!(self.repr(), U8..=UINT) || self.is_never()
|| self.is_never()
|| matches!(self.expand(), Kind::Enum(_))
} }
pub fn is_integer(self) -> bool { pub fn is_integer(self) -> bool {
self.is_signed() || self.is_unsigned() matches!(self.repr(), U8..=INT) || self.is_never()
} }
pub fn is_never(self) -> bool { pub fn is_never(self) -> bool {
@ -275,19 +264,22 @@ impl Id {
} }
pub(crate) fn loc(&self, tys: &Types) -> Loc { pub(crate) fn loc(&self, tys: &Types) -> Loc {
use Kind as K;
match self.expand() { match self.expand() {
K::Opt(o) Kind::Opt(o)
if let ty = tys.ins.opts[o].base if let ty = tys.ins.opts[o].base
&& ty.loc(tys) == Loc::Reg && ty.loc(tys) == Loc::Reg
&& (ty.is_pointer() || tys.size_of(ty) < 8) => && (ty.is_pointer() || tys.size_of(ty) < 8) =>
{ {
Loc::Reg Loc::Reg
} }
K::Ptr(_) | K::Enum(_) | K::Builtin(_) => Loc::Reg, Kind::Ptr(_) | Kind::Enum(_) | Kind::Builtin(_) => Loc::Reg,
K::Struct(_) | K::Tuple(_) | K::Union(_) if tys.size_of(*self) == 0 => Loc::Reg, Kind::Struct(_) | Kind::Union(_) if tys.size_of(*self) == 0 => Loc::Reg,
K::Struct(_) | K::Tuple(_) | K::Union(_) | K::Slice(_) | K::Opt(_) => Loc::Stack, Kind::Struct(_) | Kind::Union(_) | Kind::Slice(_) | Kind::Opt(_) => Loc::Stack,
c @ (K::Func(_) | K::Global(_) | K::Module(_) | K::Const(_) | K::Template(_)) => { c @ (Kind::Func(_)
| Kind::Global(_)
| Kind::Module(_)
| Kind::Const(_)
| Kind::Template(_)) => {
unreachable!("{c:?}") unreachable!("{c:?}")
} }
} }
@ -301,10 +293,6 @@ impl Id {
_ => false, _ => false,
} }
} }
pub fn is_aggregate(&self, tys: &Types) -> bool {
self.loc(tys) == Loc::Stack
}
} }
#[derive(PartialEq, Eq, Clone, Copy)] #[derive(PartialEq, Eq, Clone, Copy)]
@ -392,7 +380,6 @@ builtin_type! {
INT; INT;
F32; F32;
F64; F64;
ANY_TYPE;
} }
macro_rules! type_kind { macro_rules! type_kind {
@ -427,12 +414,6 @@ macro_rules! type_kind {
} }
} }
impl Id {
pub fn kind(self) -> u8 {
(self.repr() >> $name::FLAG_OFFSET) as _
}
}
$( $(
impl From<$variant> for $name { impl From<$variant> for $name {
fn from(value: $variant) -> Self { fn from(value: $variant) -> Self {
@ -460,7 +441,6 @@ type_kind! {
pub enum Kind { pub enum Kind {
Builtin, Builtin,
Struct, Struct,
Tuple,
Enum, Enum,
Union, Union,
Ptr, Ptr,
@ -469,8 +449,8 @@ type_kind! {
Func, Func,
Template, Template,
Global, Global,
Const,
Module, Module,
Const,
} }
} }
@ -525,31 +505,31 @@ impl<'a> Display<'a> {
impl core::fmt::Display for Display<'_> { impl core::fmt::Display for Display<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
use Kind as K; use Kind as TK;
match K::from_ty(self.ty) { match TK::from_ty(self.ty) {
K::Module(idx) => { TK::Module(idx) => {
f.write_str("@use(\"")?; f.write_str("@use(\"")?;
self.files[idx].path.fmt(f)?; self.files[idx].path.fmt(f)?;
f.write_str(")[")?; f.write_str(")[")?;
idx.fmt(f)?; idx.fmt(f)?;
f.write_str("]") f.write_str("]")
} }
K::Builtin(ty) => f.write_str(to_str(ty)), TK::Builtin(ty) => f.write_str(to_str(ty)),
K::Opt(ty) => { TK::Opt(ty) => {
f.write_str("?")?; f.write_str("?")?;
self.rety(self.tys.ins.opts[ty].base).fmt(f) self.rety(self.tys.ins.opts[ty].base).fmt(f)
} }
K::Ptr(ty) => { TK::Ptr(ty) => {
f.write_str("^")?; f.write_str("^")?;
self.rety(self.tys.ins.ptrs[ty].base).fmt(f) self.rety(self.tys.ins.ptrs[ty].base).fmt(f)
} }
K::Struct(idx) => { TK::Struct(idx) => {
let record = &self.tys.ins.structs[idx]; let record = &self.tys.ins.structs[idx];
if record.name.is_null() { if record.name.is_null() {
f.write_str("[")?; f.write_str("[")?;
idx.fmt(f)?; idx.fmt(f)?;
f.write_str("]{")?; f.write_str("]{")?;
for (i, &StructField { name, ty, .. }) in for (i, &StructField { name, ty }) in
self.tys.struct_fields(idx).iter().enumerate() self.tys.struct_fields(idx).iter().enumerate()
{ {
if i != 0 { if i != 0 {
@ -565,25 +545,13 @@ impl core::fmt::Display for Display<'_> {
f.write_str(file.ident_str(record.name)) f.write_str(file.ident_str(record.name))
} }
} }
K::Tuple(idx) => { TK::Union(idx) => {
f.write_str(".(")?;
for (i, &ty) in
self.tys.ins.args[self.tys.ins.tuples[idx].fields.range()].iter().enumerate()
{
if i != 0 {
f.write_str(", ")?;
}
self.rety(ty).fmt(f)?;
}
f.write_str(")")
}
K::Union(idx) => {
let record = &self.tys.ins.unions[idx]; let record = &self.tys.ins.unions[idx];
if record.name.is_null() { if record.name.is_null() {
f.write_str("[")?; f.write_str("[")?;
idx.fmt(f)?; idx.fmt(f)?;
f.write_str("]{")?; f.write_str("]{")?;
for (i, &UnionField { name, ty }) in for (i, &StructField { name, ty }) in
self.tys.union_fields(idx).iter().enumerate() self.tys.union_fields(idx).iter().enumerate()
{ {
if i != 0 { if i != 0 {
@ -599,36 +567,37 @@ impl core::fmt::Display for Display<'_> {
f.write_str(file.ident_str(record.name)) f.write_str(file.ident_str(record.name))
} }
} }
K::Enum(idx) => { TK::Enum(idx) => {
let enm = &self.tys.ins.enums[idx]; let enm = &self.tys.ins.enums[idx];
debug_assert!(!enm.name.is_null()); debug_assert!(!enm.name.is_null());
let file = &self.files[enm.file]; let file = &self.files[enm.file];
f.write_str(file.ident_str(enm.name)) f.write_str(file.ident_str(enm.name))
} }
K::Func(idx) => { TK::Func(idx) => {
f.write_str("fn")?; f.write_str("fn")?;
idx.fmt(f) idx.fmt(f)
} }
K::Template(idx) => { TK::Template(idx) => {
f.write_str("fn")?; f.write_str("fn")?;
idx.fmt(f) idx.fmt(f)
} }
K::Global(idx) => { TK::Global(idx) => {
let global = &self.tys.ins.globals[idx]; let global = &self.tys.ins.globals[idx];
let file = &self.files[global.file]; let file = &self.files[global.file];
f.write_str(file.ident_str(global.name))?; f.write_str(file.ident_str(global.name))?;
f.write_str(" (global)") f.write_str(" (global)")
} }
K::Slice(idx) => { TK::Slice(idx) => {
let array = self.tys.ins.slices[idx]; let array = self.tys.ins.slices[idx];
f.write_str("[")?; f.write_str("[")?;
if let Some(len) = array.len() { self.rety(array.elem).fmt(f)?;
len.fmt(f)?; if array.len != ArrayLen::MAX {
f.write_str("; ")?;
array.len.fmt(f)?;
} }
f.write_str("]")?; f.write_str("]")
self.rety(array.elem).fmt(f)
} }
K::Const(idx) => { TK::Const(idx) => {
let cnst = &self.tys.ins.consts[idx]; let cnst = &self.tys.ins.consts[idx];
let file = &self.files[cnst.file]; let file = &self.files[cnst.file];
f.write_str(file.ident_str(cnst.name))?; f.write_str(file.ident_str(cnst.name))?;
@ -640,20 +609,17 @@ impl core::fmt::Display for Display<'_> {
#[derive(PartialEq, Eq, Hash, Clone, Copy)] #[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub enum SymKey<'a> { pub enum SymKey<'a> {
Tuple(List),
Pointer(&'a PtrData), Pointer(&'a PtrData),
Optional(&'a OptData), Optional(&'a OptData),
Type(Id, Pos, List), Type(Id, Pos, Tuple),
Decl(Id, Ident), Decl(Id, Ident),
// separate to avoid colision with decl
Import(Id, Ident),
Array(&'a ArrayData), Array(&'a ArrayData),
Constant(&'a ConstData), Constant(&'a ConstData),
} }
#[derive(Clone, Copy, Default)] #[derive(Clone, Copy, Default)]
pub struct Sig { pub struct Sig {
pub args: List, pub args: Tuple,
pub ret: Id, pub ret: Id,
} }
@ -675,7 +641,6 @@ pub struct FuncData {
pub sig: Sig, pub sig: Sig,
pub is_inline: bool, pub is_inline: bool,
pub is_generic: bool, pub is_generic: bool,
pub is_import: bool,
pub comp_state: [PackedCompState; 2], pub comp_state: [PackedCompState; 2],
} }
@ -748,7 +713,7 @@ pub struct TypeBase {
pub pos: Pos, pub pos: Pos,
pub name: Ident, pub name: Ident,
pub field_start: u32, pub field_start: u32,
pub captured: List, pub captured: Tuple,
pub ast: ExprRef, pub ast: ExprRef,
} }
@ -759,11 +724,6 @@ pub struct EnumData {
impl_deref!(EnumData { base: TypeBase }); impl_deref!(EnumData { base: TypeBase });
pub struct UnionField {
pub name: Ident,
pub ty: Id,
}
#[derive(Default)] #[derive(Default)]
pub struct UnionData { pub struct UnionData {
pub base: TypeBase, pub base: TypeBase,
@ -776,7 +736,6 @@ impl_deref!(UnionData { base: TypeBase });
pub struct StructField { pub struct StructField {
pub name: Ident, pub name: Ident,
pub ty: Id, pub ty: Id,
pub default_value: Option<Const>,
} }
#[derive(Default)] #[derive(Default)]
@ -790,13 +749,6 @@ pub struct StructData {
impl_deref!(StructData { base: TypeBase }); impl_deref!(StructData { base: TypeBase });
#[derive(Default)]
pub struct TupleData {
pub fields: List,
pub size: Cell<Size>,
pub align: Cell<u8>,
}
#[derive(PartialEq, Eq, Hash, Clone, Copy)] #[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub struct OptData { pub struct OptData {
pub base: Id, pub base: Id,
@ -866,7 +818,6 @@ impl IdentInterner {
#[derive(Default)] #[derive(Default)]
pub struct TypesTmp { pub struct TypesTmp {
pub struct_fields: Vec<StructField>, pub struct_fields: Vec<StructField>,
pub union_fields: Vec<UnionField>,
pub enum_fields: Vec<EnumField>, pub enum_fields: Vec<EnumField>,
pub args: Vec<Id>, pub args: Vec<Id>,
} }
@ -875,7 +826,6 @@ pub struct TypesTmp {
pub struct TypeIns { pub struct TypeIns {
pub args: Vec<Id>, pub args: Vec<Id>,
pub struct_fields: Vec<StructField>, pub struct_fields: Vec<StructField>,
pub union_fields: Vec<UnionField>,
pub enum_fields: Vec<EnumField>, pub enum_fields: Vec<EnumField>,
pub funcs: EntVec<Func, FuncData>, pub funcs: EntVec<Func, FuncData>,
pub templates: EntVec<Template, TemplateData>, pub templates: EntVec<Template, TemplateData>,
@ -887,7 +837,6 @@ pub struct TypeIns {
pub ptrs: EntVec<Ptr, PtrData>, pub ptrs: EntVec<Ptr, PtrData>,
pub opts: EntVec<Opt, OptData>, pub opts: EntVec<Opt, OptData>,
pub slices: EntVec<Slice, ArrayData>, pub slices: EntVec<Slice, ArrayData>,
pub tuples: EntVec<Tuple, TupleData>,
} }
pub struct FTask { pub struct FTask {
@ -931,9 +880,7 @@ impl Types {
| Kind::Builtin(_) | Kind::Builtin(_)
| Kind::Ptr(_) | Kind::Ptr(_)
| Kind::Slice(_) | Kind::Slice(_)
| Kind::Tuple(_)
| Kind::Opt(_) => utils::is_pascal_case, | Kind::Opt(_) => utils::is_pascal_case,
Kind::Func(f) if self.ins.funcs[f].is_import => |_| Ok(()),
Kind::Func(f) Kind::Func(f)
if let &Expr::Closure { ret: &Expr::Ident { id, .. }, .. } = if let &Expr::Closure { ret: &Expr::Ident { id, .. }, .. } =
self.ins.funcs[f].expr.get(&files[self.ins.funcs[f].file]) self.ins.funcs[f].expr.get(&files[self.ins.funcs[f].file])
@ -955,23 +902,23 @@ impl Types {
} }
} }
pub fn pack_args(&mut self, arg_base: usize) -> Option<List> { pub fn pack_args(&mut self, arg_base: usize) -> Option<Tuple> {
let base = self.ins.args.len(); let base = self.ins.args.len();
self.ins.args.extend(self.tmp.args.drain(arg_base..)); self.ins.args.extend(self.tmp.args.drain(arg_base..));
let needle = &self.ins.args[base..]; let needle = &self.ins.args[base..];
if needle.is_empty() { if needle.is_empty() {
return Some(List::empty()); return Some(Tuple::empty());
} }
let len = needle.len(); let len = needle.len();
// FIXME: maybe later when this becomes a bottleneck we use more // FIXME: maybe later when this becomes a bottleneck we use more
// efficient search (SIMD?, indexing?) // efficient search (SIMD?, indexing?)
let sp = self.ins.args.windows(needle.len()).position(|val| val == needle).unwrap(); let sp = self.ins.args.windows(needle.len()).position(|val| val == needle).unwrap();
self.ins.args.truncate((sp + needle.len()).max(base)); self.ins.args.truncate((sp + needle.len()).max(base));
List::new(sp, len) Tuple::new(sp, len)
} }
pub fn union_fields(&self, union: Union) -> &[UnionField] { pub fn union_fields(&self, union: Union) -> &[StructField] {
&self.ins.union_fields[self.union_field_range(union)] &self.ins.struct_fields[self.union_field_range(union)]
} }
fn union_field_range(&self, union: Union) -> Range<usize> { fn union_field_range(&self, union: Union) -> Range<usize> {
@ -980,7 +927,7 @@ impl Types {
.ins .ins
.unions .unions
.next(union) .next(union)
.map_or(self.ins.union_fields.len(), |s| s.field_start as usize); .map_or(self.ins.struct_fields.len(), |s| s.field_start as usize);
start..end start..end
} }
@ -988,7 +935,7 @@ impl Types {
&self.ins.struct_fields[self.struct_field_range(strct)] &self.ins.struct_fields[self.struct_field_range(strct)]
} }
pub fn struct_field_range(&self, strct: Struct) -> Range<usize> { fn struct_field_range(&self, strct: Struct) -> Range<usize> {
let start = self.ins.structs[strct].field_start as usize; let start = self.ins.structs[strct].field_start as usize;
let end = self let end = self
.ins .ins
@ -1050,16 +997,6 @@ impl Types {
self.ins.structs[stru].size.set(oiter.offset); self.ins.structs[stru].size.set(oiter.offset);
oiter.offset oiter.offset
} }
Kind::Tuple(tuple) => {
if self.ins.tuples[tuple].size.get() != 0 {
return self.ins.tuples[tuple].size.get();
}
let mut oiter = OffsetIter::new(tuple, self);
while oiter.next(self).is_some() {}
self.ins.tuples[tuple].size.set(oiter.offset);
oiter.offset
}
Kind::Union(union) => { Kind::Union(union) => {
if self.ins.unions[union].size.get() != 0 { if self.ins.unions[union].size.get() != 0 {
return self.ins.unions[union].size.get(); return self.ins.unions[union].size.get();
@ -1079,12 +1016,8 @@ impl Types {
self.size_of(base) + self.align_of(base) self.size_of(base) + self.align_of(base)
} }
} }
Kind::Ptr(_) | Kind::Builtin(_) => ty.simple_size().unwrap(), _ if let Some(size) = ty.simple_size() => size,
Kind::Func(_) ty => unimplemented!("size_of: {:?}", ty),
| Kind::Template(_)
| Kind::Global(_)
| Kind::Const(_)
| Kind::Module(_) => unreachable!(),
} }
} }
@ -1116,15 +1049,6 @@ impl Types {
self.ins.structs[stru].align.set(align.try_into().unwrap()); self.ins.structs[stru].align.set(align.try_into().unwrap());
align align
} }
Kind::Tuple(tuple) => {
if self.ins.tuples[tuple].align.get() != 0 {
return self.ins.tuples[tuple].align.get() as _;
}
let align =
self.tuple_fields(tuple).iter().map(|&f| self.align_of(f)).max().unwrap_or(1);
self.ins.tuples[tuple].align.set(align.try_into().unwrap());
align
}
Kind::Slice(arr) => { Kind::Slice(arr) => {
let arr = &self.ins.slices[arr]; let arr = &self.ins.slices[arr];
match arr.len { match arr.len {
@ -1132,14 +1056,7 @@ impl Types {
_ => self.align_of(arr.elem), _ => self.align_of(arr.elem),
} }
} }
Kind::Opt(opt) => self.align_of(self.ins.opts[opt].base), _ => self.size_of(ty).max(1),
Kind::Builtin(_) | Kind::Enum(_) | Kind::Ptr(_) => self.size_of(ty).max(1),
Kind::Func(_)
| Kind::Template(_)
| Kind::Global(_)
| Kind::Const(_)
| Kind::Module(_) => unreachable!(),
//_ => self.size_of(ty).max(1),
} }
} }
@ -1186,7 +1103,7 @@ impl Types {
self.struct_fields(s).iter().position(|f| f.name == name) self.struct_fields(s).iter().position(|f| f.name == name)
} }
pub fn find_union_field(&self, u: Union, name: &str) -> Option<(usize, &UnionField)> { pub fn find_union_field(&self, u: Union, name: &str) -> Option<(usize, &StructField)> {
let name = self.names.project(name)?; let name = self.names.project(name)?;
self.union_fields(u).iter().enumerate().find(|(_, f)| f.name == name) self.union_fields(u).iter().enumerate().find(|(_, f)| f.name == name)
} }
@ -1201,14 +1118,10 @@ impl Types {
self.ins.globals.clear(); self.ins.globals.clear();
self.ins.structs.clear(); self.ins.structs.clear();
self.ins.struct_fields.clear(); self.ins.struct_fields.clear();
self.ins.union_fields.clear();
self.ins.enum_fields.clear();
self.ins.ptrs.clear(); self.ins.ptrs.clear();
self.ins.slices.clear(); self.ins.slices.clear();
debug_assert_eq!(self.tmp.struct_fields.len(), 0); debug_assert_eq!(self.tmp.struct_fields.len(), 0);
debug_assert_eq!(self.tmp.union_fields.len(), 0);
debug_assert_eq!(self.tmp.enum_fields.len(), 0);
debug_assert_eq!(self.tmp.args.len(), 0); debug_assert_eq!(self.tmp.args.len(), 0);
debug_assert_eq!(self.tasks.len(), 0); debug_assert_eq!(self.tasks.len(), 0);
@ -1227,7 +1140,6 @@ impl Types {
| Kind::Template(_) | Kind::Template(_)
| Kind::Global(_) | Kind::Global(_)
| Kind::Module(_) | Kind::Module(_)
| Kind::Tuple(_)
| Kind::Const(_) => return None, | Kind::Const(_) => return None,
}) })
} }
@ -1253,11 +1165,7 @@ impl Types {
self.type_base_of(ty).map(|b| b.parent) self.type_base_of(ty).map(|b| b.parent)
} }
pub fn captures_of<'a>( pub fn captures_of<'a>(&self, ty: Id, file: &'a parser::Ast) -> Option<(&'a [Ident], Tuple)> {
&self,
ty: Id,
file: &'a parser::Ast,
) -> Option<(&'a [CapturedIdent], List)> {
let base = self.type_base_of(ty)?; let base = self.type_base_of(ty)?;
let (Expr::Struct { captured, .. } let (Expr::Struct { captured, .. }
@ -1273,28 +1181,10 @@ impl Types {
pub fn len_of(&self, ty: Id) -> Option<u32> { pub fn len_of(&self, ty: Id) -> Option<u32> {
Some(match ty.expand() { Some(match ty.expand() {
Kind::Struct(s) => self.struct_field_range(s).len() as _, Kind::Struct(s) => self.struct_field_range(s).len() as _,
Kind::Tuple(s) => self.ins.tuples[s].fields.len() as _,
Kind::Slice(s) => self.ins.slices[s].len()? as _, Kind::Slice(s) => self.ins.slices[s].len()? as _,
_ => return None, _ => return None,
}) })
} }
pub fn name_of(&self, ty: Id, files: &EntSlice<Module, parser::Ast>, data: &mut Vec<u8>) {
use core::fmt::Write;
let str = unsafe { core::mem::transmute::<&mut Vec<u8>, &mut String>(data) };
write!(str, "{}", Display::new(self, files, ty)).unwrap();
}
pub fn tuple_fields(&self, tuple: Tuple) -> &[Id] {
&self.ins.args[self.ins.tuples[tuple].fields.range()]
}
pub fn elem_of(&self, ty: Id) -> Option<Id> {
match ty.expand() {
Kind::Slice(s) => Some(self.ins.slices[s].elem),
_ => None,
}
}
} }
pub struct OptLayout { pub struct OptLayout {
@ -1303,57 +1193,17 @@ pub struct OptLayout {
pub payload_offset: Offset, pub payload_offset: Offset,
} }
pub trait Agregate: Copy { pub struct OffsetIter {
type Field: AsRef<Id> + 'static; strct: Struct,
fn fields(self, tys: &Types) -> Range<usize>;
fn field_by_idx(tys: &Types, index: usize) -> &Self::Field;
fn align_override(self, _: &Types) -> Option<u8> {
None
}
}
impl Agregate for Tuple {
type Field = Id;
fn fields(self, tys: &Types) -> Range<usize> {
tys.ins.tuples[self].fields.range()
}
fn field_by_idx(tys: &Types, index: usize) -> &Self::Field {
&tys.ins.args[index]
}
}
impl Agregate for Struct {
type Field = StructField;
fn fields(self, tys: &Types) -> Range<usize> {
tys.struct_field_range(self)
}
fn field_by_idx(tys: &Types, index: usize) -> &Self::Field {
&tys.ins.struct_fields[index]
}
fn align_override(self, tys: &Types) -> Option<u8> {
tys.ins.structs[self].explicit_alignment
}
}
impl AsRef<Id> for StructField {
fn as_ref(&self) -> &Id {
&self.ty
}
}
pub struct OffsetIter<T> {
strct: T,
offset: Offset, offset: Offset,
fields: Range<usize>, fields: Range<usize>,
} }
impl OffsetIter<Struct> { impl OffsetIter {
pub fn new(strct: Struct, tys: &Types) -> Self {
Self { strct, offset: 0, fields: tys.struct_field_range(strct) }
}
pub fn offset_of(tys: &Types, idx: Struct, field: &str) -> Option<(Offset, Id)> { pub fn offset_of(tys: &Types, idx: Struct, field: &str) -> Option<(Offset, Id)> {
let field_id = tys.names.project(field)?; let field_id = tys.names.project(field)?;
OffsetIter::new(idx, tys) OffsetIter::new(idx, tys)
@ -1361,33 +1211,25 @@ impl OffsetIter<Struct> {
.find(|(f, _)| f.name == field_id) .find(|(f, _)| f.name == field_id)
.map(|(f, off)| (off, f.ty)) .map(|(f, off)| (off, f.ty))
} }
}
impl<T: Agregate> OffsetIter<T> { fn next<'a>(&mut self, tys: &'a Types) -> Option<(&'a StructField, Offset)> {
pub fn new(strct: T, tys: &Types) -> Self { let stru = &tys.ins.structs[self.strct];
Self { strct, offset: 0, fields: strct.fields(tys) } let field = &tys.ins.struct_fields[self.fields.next()?];
}
fn next<'a>(&mut self, tys: &'a Types) -> Option<(&'a T::Field, Offset)> { let align = stru.explicit_alignment.map_or_else(|| tys.align_of(field.ty), |a| a as u32);
let field = &T::field_by_idx(tys, self.fields.next()?);
let align = self
.strct
.align_override(tys)
.map_or_else(|| tys.align_of(*field.as_ref()), |a| a as u32);
self.offset = (self.offset + align - 1) & !(align - 1); self.offset = (self.offset + align - 1) & !(align - 1);
let off = self.offset; let off = self.offset;
self.offset += tys.size_of(*field.as_ref()); self.offset += tys.size_of(field.ty);
Some((field, off)) Some((field, off))
} }
pub fn next_ty(&mut self, tys: &Types) -> Option<(Id, Offset)> { pub fn next_ty(&mut self, tys: &Types) -> Option<(Id, Offset)> {
let (field, off) = self.next(tys)?; let (field, off) = self.next(tys)?;
Some((*field.as_ref(), off)) Some((field.ty, off))
} }
pub fn into_iter(mut self, tys: &Types) -> impl Iterator<Item = (&T::Field, Offset)> { pub fn into_iter(mut self, tys: &Types) -> impl Iterator<Item = (&StructField, Offset)> {
core::iter::from_fn(move || self.next(tys)) core::iter::from_fn(move || self.next(tys))
} }
} }

View file

@ -1,3 +1,4 @@
#![expect(dead_code)]
use { use {
alloc::alloc, alloc::alloc,
core::{ core::{
@ -6,7 +7,7 @@ use {
hint::unreachable_unchecked, hint::unreachable_unchecked,
marker::PhantomData, marker::PhantomData,
mem::MaybeUninit, mem::MaybeUninit,
ops::{Deref, DerefMut, Not, Range}, ops::{Deref, DerefMut, Not},
ptr::Unique, ptr::Unique,
}, },
}; };
@ -31,10 +32,9 @@ pub fn is_screaming_case(str: &str) -> Result<(), &'static str> {
} }
type Nid = u16; type Nid = u16;
type BitSetUnit = usize;
pub union BitSet { pub union BitSet {
inline: BitSetUnit, inline: usize,
alloced: Unique<AllocedBitSet>, alloced: Unique<AllocedBitSet>,
} }
@ -78,9 +78,9 @@ impl Default for BitSet {
} }
impl BitSet { impl BitSet {
const FLAG: BitSetUnit = 1 << (Self::UNIT - 1); const FLAG: usize = 1 << (Self::UNIT - 1);
const INLINE_ELEMS: usize = Self::UNIT - 1; const INLINE_ELEMS: usize = Self::UNIT - 1;
pub const UNIT: usize = core::mem::size_of::<BitSetUnit>() * 8; const UNIT: usize = core::mem::size_of::<usize>() * 8;
pub fn with_capacity(len: usize) -> Self { pub fn with_capacity(len: usize) -> Self {
let mut s = Self::default(); let mut s = Self::default();
@ -92,7 +92,7 @@ impl BitSet {
unsafe { self.inline & Self::FLAG != 0 } unsafe { self.inline & Self::FLAG != 0 }
} }
fn data_and_len(&self) -> (&[BitSetUnit], usize) { fn data_and_len(&self) -> (&[usize], usize) {
unsafe { unsafe {
if self.is_inline() { if self.is_inline() {
(core::slice::from_ref(&self.inline), Self::INLINE_ELEMS) (core::slice::from_ref(&self.inline), Self::INLINE_ELEMS)
@ -100,16 +100,16 @@ impl BitSet {
let small_vec = self.alloced.as_ref(); let small_vec = self.alloced.as_ref();
( (
core::slice::from_raw_parts( core::slice::from_raw_parts(
&small_vec.data as *const _ as *const BitSetUnit, &small_vec.data as *const _ as *const usize,
small_vec.cap, small_vec.cap,
), ),
small_vec.cap * Self::UNIT, small_vec.cap * core::mem::size_of::<usize>() * 8,
) )
} }
} }
} }
fn data_mut_and_len(&mut self) -> (&mut [BitSetUnit], usize) { fn data_mut_and_len(&mut self) -> (&mut [usize], usize) {
unsafe { unsafe {
if self.is_inline() { if self.is_inline() {
(core::slice::from_mut(&mut self.inline), INLINE_ELEMS) (core::slice::from_mut(&mut self.inline), INLINE_ELEMS)
@ -117,7 +117,7 @@ impl BitSet {
let small_vec = self.alloced.as_mut(); let small_vec = self.alloced.as_mut();
( (
core::slice::from_raw_parts_mut( core::slice::from_raw_parts_mut(
&mut small_vec.data as *mut _ as *mut BitSetUnit, &mut small_vec.data as *mut _ as *mut usize,
small_vec.cap, small_vec.cap,
), ),
small_vec.cap * Self::UNIT, small_vec.cap * Self::UNIT,
@ -163,7 +163,7 @@ impl BitSet {
let (ptr, prev_len) = unsafe { let (ptr, prev_len) = unsafe {
if self.is_inline() { if self.is_inline() {
let ptr = alloc::alloc(layout); let ptr = alloc::alloc(layout);
*ptr.add(off).cast::<BitSetUnit>() = self.inline & !Self::FLAG; *ptr.add(off).cast::<usize>() = self.inline & !Self::FLAG;
(ptr, 1) (ptr, 1)
} else { } else {
let prev_len = self.alloced.as_ref().cap; let prev_len = self.alloced.as_ref().cap;
@ -174,7 +174,7 @@ impl BitSet {
unsafe { unsafe {
MaybeUninit::fill( MaybeUninit::fill(
core::slice::from_raw_parts_mut( core::slice::from_raw_parts_mut(
ptr.add(off).cast::<MaybeUninit<BitSetUnit>>().add(prev_len), ptr.add(off).cast::<MaybeUninit<usize>>().add(prev_len),
slot_count - prev_len, slot_count - prev_len,
), ),
0, 0,
@ -187,7 +187,7 @@ impl BitSet {
fn layout(slot_count: usize) -> (core::alloc::Layout, usize) { fn layout(slot_count: usize) -> (core::alloc::Layout, usize) {
unsafe { unsafe {
core::alloc::Layout::new::<AllocedBitSet>() core::alloc::Layout::new::<AllocedBitSet>()
.extend(Layout::array::<BitSetUnit>(slot_count).unwrap_unchecked()) .extend(Layout::array::<usize>(slot_count).unwrap_unchecked())
.unwrap_unchecked() .unwrap_unchecked()
} }
} }
@ -205,10 +205,6 @@ impl BitSet {
pub fn clear(&mut self, len: usize) { pub fn clear(&mut self, len: usize) {
self.reserve(len); self.reserve(len);
self.clear_as_is();
}
pub fn clear_as_is(&mut self) {
if self.is_inline() { if self.is_inline() {
unsafe { self.inline &= Self::FLAG }; unsafe { self.inline &= Self::FLAG };
} else { } else {
@ -216,11 +212,7 @@ impl BitSet {
} }
} }
pub fn approx_unit_cap(&self) -> usize { pub fn units<'a>(&'a self, slot: &'a mut usize) -> &'a [usize] {
self.data_and_len().0.len()
}
pub fn units<'a>(&'a self, slot: &'a mut BitSetUnit) -> &'a [BitSetUnit] {
if self.is_inline() { if self.is_inline() {
*slot = unsafe { self.inline } & !Self::FLAG; *slot = unsafe { self.inline } & !Self::FLAG;
core::slice::from_ref(slot) core::slice::from_ref(slot)
@ -229,47 +221,36 @@ impl BitSet {
} }
} }
pub fn units_mut(&mut self) -> Option<&mut [BitSetUnit]> {
self.is_inline().not().then(|| self.data_mut_and_len().0)
}
pub fn reserve(&mut self, len: usize) { pub fn reserve(&mut self, len: usize) {
if len > self.data_and_len().1 { if len > self.data_and_len().1 {
self.grow(len.next_power_of_two().max(4 * Self::UNIT)); self.grow(len.next_power_of_two().max(4 * Self::UNIT));
} }
} }
pub fn set_range(&mut self, proj_range: Range<usize>) { pub fn units_mut(&mut self) -> Result<&mut [usize], &mut InlineBitSetView> {
if proj_range.is_empty() { if self.is_inline() {
return; Err(unsafe {
} core::mem::transmute::<&mut usize, &mut InlineBitSetView>(&mut self.inline)
})
self.reserve(proj_range.end);
let (units, _) = self.data_mut_and_len();
if proj_range.start / Self::UNIT == (proj_range.end - 1) / Self::UNIT {
debug_assert!(proj_range.len() <= Self::UNIT);
let mask = ((1 << proj_range.len()) - 1) << (proj_range.start % Self::UNIT);
units[proj_range.start / Self::UNIT] |= mask;
} else { } else {
let fill_range = proj_range.start.div_ceil(Self::UNIT)..proj_range.end / Self::UNIT; Ok(self.data_mut_and_len().0)
units[fill_range].fill(BitSetUnit::MAX);
let prefix_len = Self::UNIT - proj_range.start % Self::UNIT;
let prefix_mask = ((1 << prefix_len) - 1) << (proj_range.start % Self::UNIT);
units[proj_range.start / Self::UNIT] |= prefix_mask;
let postfix_len = proj_range.end % Self::UNIT;
let postfix_mask = (1 << postfix_len) - 1;
units[proj_range.end / Self::UNIT] |= postfix_mask;
} }
} }
} }
pub struct InlineBitSetView(usize);
impl InlineBitSetView {
pub(crate) fn add_mask(&mut self, tmp: usize) {
debug_assert!(tmp & BitSet::FLAG == 0);
self.0 |= tmp;
}
}
pub struct BitSetIter<'a> { pub struct BitSetIter<'a> {
index: usize, index: usize,
current: BitSetUnit, current: usize,
remining: &'a [BitSetUnit], remining: &'a [usize],
} }
impl Iterator for BitSetIter<'_> { impl Iterator for BitSetIter<'_> {
@ -289,7 +270,7 @@ impl Iterator for BitSetIter<'_> {
struct AllocedBitSet { struct AllocedBitSet {
cap: usize, cap: usize,
data: [BitSetUnit; 0], data: [usize; 0],
} }
#[cfg(test)] #[cfg(test)]
@ -363,10 +344,6 @@ impl Vc {
} }
} }
pub fn is_empty(&self) -> bool {
self.len() == 0
}
fn len_mut(&mut self) -> &mut Nid { fn len_mut(&mut self) -> &mut Nid {
unsafe { unsafe {
if self.is_inline() { if self.is_inline() {
@ -646,7 +623,7 @@ impl<K: Ent, T> EntVec<K, T> {
} }
} }
pub fn values(&self) -> core::slice::Iter<T> { pub fn iter(&self) -> core::slice::Iter<T> {
self.data.iter() self.data.iter()
} }
} }

View file

@ -5,8 +5,8 @@ main:
ADDI64 r254, r254, -24d ADDI64 r254, r254, -24d
ST r31, r254, 0a, 24h ST r31, r254, 0a, 24h
JAL r31, r0, :cond JAL r31, r0, :cond
CP r32, r0
CP r33, r1 CP r33, r1
CP r32, r0
JNE r33, r32, :0 JNE r33, r32, :0
JMP :1 JMP :1
0: LI64 r32, 2d 0: LI64 r32, 2d

View file

@ -1,11 +1,5 @@
fun:
UN
main: main:
ADDI64 r254, r254, -8d UN
ST r31, r254, 0a, 8h code size: 9
JAL r31, r0, :fun
LD r31, r254, 0a, 8h
ADDI64 r254, r254, 8d
code size: 64
ret: 0 ret: 0
status: Err(Unreachable) status: Err(Unreachable)

View file

@ -12,7 +12,6 @@ main:
0: ST r0, r32, 0a, 8h 0: ST r0, r32, 0a, 8h
LD r33, r32, 0a, 8h LD r33, r32, 0a, 8h
JEQ r33, r0, :2 JEQ r33, r0, :2
ST r0, r32, 8a, 8h
LI64 r32, 200d LI64 r32, 200d
CP r1, r32 CP r1, r32
JMP :1 JMP :1
@ -49,9 +48,10 @@ main:
JMP :1 JMP :1
6: CP r1, r0 6: CP r1, r0
JMP :1 JMP :1
5: ST r0, r32, 0a, 8h 5: ADDI64 r34, r32, 16d
ST r0, r32, 0a, 8h
ST r0, r32, 8a, 8h ST r0, r32, 8a, 8h
ADDI64 r32, r32, 16d CP r32, r34
JMP :7 JMP :7
3: JAL r31, r0, :new_stru 3: JAL r31, r0, :new_stru
ST r1, r32, 0a, 16h ST r1, r32, 0a, 16h
@ -67,6 +67,6 @@ new_stru:
LD r1, r254, 0a, 16h LD r1, r254, 0a, 16h
ADDI64 r254, r254, 16d ADDI64 r254, r254, 16d
JALA r0, r31, 0a JALA r0, r31, 0a
code size: 668 code size: 658
ret: 0 ret: 0
status: Ok(()) status: Ok(())

View file

@ -1,10 +1,10 @@
continue_and_state_change: continue_and_state_change:
CP r13, r2 CP r13, r2
CP r15, r0
LI64 r16, 3d LI64 r16, 3d
LI64 r14, 4d
LI64 r17, 2d LI64 r17, 2d
LI64 r18, 10d LI64 r18, 10d
CP r15, r0
LI64 r14, 4d
6: JLTU r13, r18, :0 6: JLTU r13, r18, :0
JMP :1 JMP :1
0: JNE r13, r17, :2 0: JNE r13, r17, :2
@ -37,41 +37,41 @@ main:
ST r31, r254, 0a, 40h ST r31, r254, 0a, 40h
CP r2, r0 CP r2, r0
JAL r31, r0, :multiple_breaks JAL r31, r0, :multiple_breaks
LI64 r32, 3d CP r32, r1
CP r33, r1 LI64 r33, 3d
JEQ r33, r32, :0 JEQ r32, r33, :0
LI64 r32, 1d LI64 r32, 1d
CP r1, r32 CP r1, r32
JMP :1 JMP :1
0: LI64 r33, 4d 0: LI64 r32, 4d
CP r2, r33 CP r2, r32
JAL r31, r0, :multiple_breaks JAL r31, r0, :multiple_breaks
LI64 r34, 10d CP r34, r1
CP r35, r1 LI64 r35, 10d
JEQ r35, r34, :2 JEQ r34, r35, :2
LI64 r32, 2d LI64 r32, 2d
CP r1, r32 CP r1, r32
JMP :1 JMP :1
2: CP r2, r0 2: CP r2, r0
JAL r31, r0, :state_change_in_break JAL r31, r0, :state_change_in_break
CP r35, r1 CP r34, r1
JEQ r35, r0, :3 JEQ r34, r0, :3
CP r1, r32
JMP :1
3: CP r2, r33
JAL r31, r0, :state_change_in_break
CP r35, r1
JEQ r35, r34, :4
CP r1, r33 CP r1, r33
JMP :1 JMP :1
4: CP r2, r34 3: CP r2, r32
JAL r31, r0, :state_change_in_break
CP r34, r1
JEQ r34, r35, :4
CP r1, r32
JMP :1
4: CP r2, r35
JAL r31, r0, :continue_and_state_change JAL r31, r0, :continue_and_state_change
CP r33, r1 CP r32, r1
JEQ r33, r34, :5 JEQ r32, r35, :5
LI64 r32, 5d LI64 r32, 5d
CP r1, r32 CP r1, r32
JMP :1 JMP :1
5: CP r2, r32 5: CP r2, r33
JAL r31, r0, :continue_and_state_change JAL r31, r0, :continue_and_state_change
CP r32, r1 CP r32, r1
JEQ r32, r0, :6 JEQ r32, r0, :6

View file

@ -1,21 +0,0 @@
b:
CP r13, r3
CP r1, r13
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -32d
ST r31, r254, 8a, 24h
ADDI64 r32, r254, 0d
LI64 r33, 100d
ST r33, r254, 0a, 8h
CP r2, r32
CP r3, r33
JAL r31, r0, :b
CP r32, r1
CP r1, r32
LD r31, r254, 8a, 24h
ADDI64 r254, r254, 32d
JALA r0, r31, 0a
code size: 137
ret: 100
status: Ok(())

View file

@ -42,11 +42,10 @@ free:
CP r4, r14 CP r4, r14
CP r5, r15 CP r5, r15
ECA ECA
CP r13, r1
JALA r0, r31, 0a JALA r0, r31, 0a
main: main:
ADDI64 r254, r254, -88d ADDI64 r254, r254, -96d
ST r31, r254, 48a, 40h ST r31, r254, 48a, 48h
ADDI64 r32, r254, 24d ADDI64 r32, r254, 24d
CP r1, r32 CP r1, r32
JAL r31, r0, :new JAL r31, r0, :new
@ -61,19 +60,20 @@ main:
CP r2, r33 CP r2, r33
CP r3, r34 CP r3, r34
JAL r31, r0, :push JAL r31, r0, :push
LD r34, r254, 0a, 8h CP r34, r1
LD r34, r34, 0a, 1h LD r35, r254, 0a, 8h
LD r35, r254, 24a, 8h LD r35, r35, 0a, 1h
LD r35, r35, 0a, 8h LD r36, r254, 24a, 8h
LD r34, r36, 0a, 8h
CP r2, r33 CP r2, r33
JAL r31, r0, :deinit JAL r31, r0, :deinit
CP r2, r32 CP r2, r32
JAL r31, r0, :deinit JAL r31, r0, :deinit
ANDI r32, r34, 255d ANDI r32, r35, 255d
ADD64 r32, r35, r32 ADD64 r32, r34, r32
CP r1, r32 CP r1, r32
LD r31, r254, 48a, 40h LD r31, r254, 48a, 48h
ADDI64 r254, r254, 88d ADDI64 r254, r254, 96d
JALA r0, r31, 0a JALA r0, r31, 0a
malloc: malloc:
CP r13, r2 CP r13, r2
@ -112,49 +112,51 @@ new:
push: push:
ADDI64 r254, r254, -80d ADDI64 r254, r254, -80d
ST r31, r254, 0a, 80h ST r31, r254, 0a, 80h
CP r36, r2 CP r38, r2
CP r37, r3 CP r39, r3
LI64 r35, 1d LI64 r37, 1d
LD r33, r36, 8a, 8h LD r33, r38, 8a, 8h
LD r32, r36, 16a, 8h LD r32, r38, 16a, 8h
JNE r32, r33, :0 JNE r32, r33, :0
JNE r32, r0, :1 JNE r32, r0, :1
CP r32, r35 CP r32, r37
JMP :2 JMP :2
1: MULI64 r32, r32, 2d 1: MULI64 r32, r32, 2d
2: CP r2, r32 2: CP r2, r32
CP r3, r35 CP r3, r37
JAL r31, r0, :malloc JAL r31, r0, :malloc
ST r32, r36, 16a, 8h CP r35, r1
CP r34, r1 ST r32, r38, 16a, 8h
JNE r34, r0, :3 JNE r35, r0, :3
CP r1, r0 CP r1, r0
JMP :4 JMP :4
3: LD r32, r36, 0a, 8h 3: LD r32, r38, 0a, 8h
ADD64 r38, r33, r32 ADD64 r40, r33, r32
CP r33, r34 CP r34, r35
7: LD r39, r36, 0a, 8h 7: LD r33, r38, 0a, 8h
LD r40, r36, 8a, 8h LD r36, r38, 8a, 8h
JNE r38, r32, :5 JNE r40, r32, :5
JEQ r40, r0, :6 JEQ r36, r0, :6
CP r2, r39 CP r2, r33
CP r3, r40 CP r3, r36
CP r4, r35 CP r4, r37
JAL r31, r0, :free JAL r31, r0, :free
JMP :6 JMP :6
6: ST r34, r36, 0a, 8h 6: ST r35, r38, 0a, 8h
JMP :0 JMP :0
5: LD r39, r32, 0a, 1h 5: ADDI64 r36, r34, 1d
ST r39, r33, 0a, 1h ADDI64 r33, r32, 1d
ADDI64 r33, r33, 1d LD r32, r32, 0a, 1h
ADDI64 r32, r32, 1d ST r32, r34, 0a, 1h
CP r32, r33
CP r34, r36
JMP :7 JMP :7
0: LD r32, r36, 8a, 8h 0: LD r32, r38, 8a, 8h
LD r33, r36, 0a, 8h LD r33, r38, 0a, 8h
ADD64 r33, r32, r33 ADD64 r33, r32, r33
ST r37, r33, 0a, 1h ST r39, r33, 0a, 1h
ADD64 r32, r32, r35 ADD64 r32, r32, r37
ST r32, r36, 8a, 8h ST r32, r38, 8a, 8h
CP r1, r33 CP r1, r33
4: LD r31, r254, 0a, 80h 4: LD r31, r254, 0a, 80h
ADDI64 r254, r254, 80d ADDI64 r254, r254, 80d
@ -162,58 +164,60 @@ push:
push: push:
ADDI64 r254, r254, -88d ADDI64 r254, r254, -88d
ST r31, r254, 0a, 88h ST r31, r254, 0a, 88h
CP r36, r2 CP r38, r2
CP r37, r3 CP r39, r3
LI64 r35, 1d LI64 r37, 1d
LD r33, r36, 8a, 8h LD r33, r38, 8a, 8h
LD r32, r36, 16a, 8h LD r32, r38, 16a, 8h
JNE r32, r33, :0 JNE r32, r33, :0
JNE r32, r0, :1 JNE r32, r0, :1
CP r32, r35 CP r32, r37
JMP :2 JMP :2
1: MULI64 r32, r32, 2d 1: MULI64 r32, r32, 2d
2: LI64 r38, 8d 2: LI64 r40, 8d
MUL64 r34, r32, r38 MUL64 r34, r32, r40
CP r2, r34 CP r2, r34
CP r3, r38 CP r3, r40
JAL r31, r0, :malloc JAL r31, r0, :malloc
ST r32, r36, 16a, 8h CP r35, r1
CP r34, r1 ST r32, r38, 16a, 8h
JNE r34, r0, :3 JNE r35, r0, :3
CP r1, r0 CP r1, r0
JMP :4 JMP :4
3: MULI64 r33, r33, 8d 3: MULI64 r33, r33, 8d
LD r32, r36, 0a, 8h LD r32, r38, 0a, 8h
ADD64 r39, r32, r33 ADD64 r41, r32, r33
CP r33, r34 CP r34, r35
7: LD r40, r36, 0a, 8h 7: LD r33, r38, 0a, 8h
LD r41, r36, 8a, 8h LD r36, r38, 8a, 8h
JNE r39, r32, :5 JNE r41, r32, :5
JEQ r41, r0, :6 JEQ r36, r0, :6
MUL64 r32, r41, r38 MUL64 r32, r36, r40
CP r2, r40 CP r2, r33
CP r3, r32 CP r3, r32
CP r4, r38 CP r4, r40
JAL r31, r0, :free JAL r31, r0, :free
JMP :6 JMP :6
6: ST r34, r36, 0a, 8h 6: ST r35, r38, 0a, 8h
JMP :0 JMP :0
5: LD r40, r32, 0a, 8h 5: ADDI64 r36, r34, 8d
ST r40, r33, 0a, 8h ADDI64 r33, r32, 8d
ADDI64 r33, r33, 8d LD r32, r32, 0a, 8h
ADDI64 r32, r32, 8d ST r32, r34, 0a, 8h
CP r32, r33
CP r34, r36
JMP :7 JMP :7
0: LD r32, r36, 8a, 8h 0: LD r32, r38, 8a, 8h
MULI64 r33, r32, 8d MULI64 r33, r32, 8d
LD r34, r36, 0a, 8h LD r34, r38, 0a, 8h
ADD64 r33, r34, r33 ADD64 r33, r34, r33
ST r37, r33, 0a, 8h ST r39, r33, 0a, 8h
ADD64 r32, r32, r35 ADD64 r32, r32, r37
ST r32, r36, 8a, 8h ST r32, r38, 8a, 8h
CP r1, r33 CP r1, r33
4: LD r31, r254, 0a, 88h 4: LD r31, r254, 0a, 88h
ADDI64 r254, r254, 88d ADDI64 r254, r254, 88d
JALA r0, r31, 0a JALA r0, r31, 0a
code size: 1623 code size: 1635
ret: 69 ret: 69
status: Ok(()) status: Ok(())

View file

@ -2,8 +2,8 @@ inb:
CP r1, r0 CP r1, r0
JALA r0, r31, 0a JALA r0, r31, 0a
main: main:
ADDI64 r254, r254, -24d ADDI64 r254, r254, -32d
ST r31, r254, 0a, 24h ST r31, r254, 0a, 32h
LRA r32, r0, :ports LRA r32, r0, :ports
LD r33, r32, 0a, 1h LD r33, r32, 0a, 1h
ANDI r33, r33, 255d ANDI r33, r33, 255d
@ -11,12 +11,12 @@ main:
JMP :1 JMP :1
0: JAL r31, r0, :inb 0: JAL r31, r0, :inb
CP r33, r1 CP r33, r1
CMPU r33, r33, r0 CMPU r34, r33, r0
CMPUI r33, r33, 0d CMPUI r34, r34, 0d
NOT r33, r33 NOT r34, r34
ST r33, r32, 0a, 1h ST r34, r32, 0a, 1h
1: LD r31, r254, 0a, 24h 1: LD r31, r254, 0a, 32h
ADDI64 r254, r254, 24d ADDI64 r254, r254, 32d
JALA r0, r31, 0a JALA r0, r31, 0a
code size: 164 code size: 164
ret: 0 ret: 0

View file

@ -1,20 +1,21 @@
main: main:
ADDI64 r254, r254, -128d ADDI64 r254, r254, -128d
ADDI64 r14, r254, 0d ADDI64 r15, r254, 0d
LI8 r15, 69b LI8 r16, 69b
LI64 r16, 128d LI64 r17, 128d
CP r13, r0 CP r13, r0
2: LD r17, r254, 42a, 1h 2: LD r14, r254, 42a, 1h
JLTU r13, r16, :0 JLTU r13, r17, :0
ANDI r13, r17, 255d ANDI r13, r14, 255d
CP r1, r13 CP r1, r13
JMP :1 JMP :1
0: ADD64 r17, r14, r13 0: ADDI64 r14, r13, 1d
ST r15, r17, 0a, 1h ADD64 r13, r15, r13
ADDI64 r13, r13, 1d ST r16, r13, 0a, 1h
CP r13, r14
JMP :2 JMP :2
1: ADDI64 r254, r254, 128d 1: ADDI64 r254, r254, 128d
JALA r0, r31, 0a JALA r0, r31, 0a
code size: 138 code size: 141
ret: 69 ret: 69
status: Ok(()) status: Ok(())

View file

@ -59,9 +59,9 @@ put_filled_rect:
LD r14, r14, 0a, 8h LD r14, r14, 0a, 8h
ADD64 r26, r14, r26 ADD64 r26, r14, r26
LD r28, r15, 0a, 8h LD r28, r15, 0a, 8h
MUL64 r15, r27, r25
ADD64 r14, r14, r15
ADD64 r15, r28, r26 ADD64 r15, r28, r26
MUL64 r25, r27, r25
ADD64 r14, r14, r25
ADD64 r14, r28, r14 ADD64 r14, r28, r14
3: JGTU r13, r20, :0 3: JGTU r13, r20, :0
JNE r13, r20, :1 JNE r13, r20, :1

View file

View file

@ -6,9 +6,9 @@ integer_range:
CP r2, r16 CP r2, r16
CP r3, r15 CP r3, r15
ECA ECA
CP r15, r1
SUB64 r14, r14, r13 SUB64 r14, r14, r13
ADDI64 r14, r14, 1d ADDI64 r14, r14, 1d
CP r15, r1
DIRU64 r0, r14, r15, r14 DIRU64 r0, r14, r15, r14
ADD64 r13, r14, r13 ADD64 r13, r14, r13
CP r1, r13 CP r1, r13

View file

@ -1,49 +0,0 @@
chars:
ADDI64 r254, r254, -32d
ST r3, r254, 16a, 16h
ADDI64 r3, r254, 16d
CP r13, r3
ADDI64 r14, r254, 0d
BMC r13, r14, 16h
LD r1, r14, 0a, 16h
ADDI64 r254, r254, 32d
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -56d
ST r31, r254, 32a, 24h
LRA r32, r0, :Hello, World!
ST r32, r254, 16a, 8h
LI64 r32, 13d
ST r32, r254, 24a, 8h
ADDI64 r32, r254, 0d
LD r3, r254, 16a, 16h
JAL r31, r0, :chars
ST r1, r32, 0a, 16h
2: CP r2, r32
JAL r31, r0, :next
CP r33, r1
ANDI r33, r33, 65535d
JNE r33, r0, :0
JMP :1
0: JMP :2
1: LD r31, r254, 32a, 24h
ADDI64 r254, r254, 56d
JALA r0, r31, 0a
next:
CP r13, r2
LD r14, r13, 8a, 8h
JNE r14, r0, :0
CP r1, r0
JMP :1
0: LD r15, r13, 0a, 8h
ADDI64 r15, r15, 1d
ST r15, r13, 0a, 8h
ADDI64 r14, r14, -1d
LD r15, r15, 0a, 1h
ST r14, r13, 8a, 8h
ORI r13, r15, 32768d
CP r1, r13
1: JALA r0, r31, 0a
code size: 423
ret: 0
status: Ok(())

View file

@ -10,26 +10,26 @@ decide:
ADDI64 r254, r254, 24d ADDI64 r254, r254, 24d
JALA r0, r31, 0a JALA r0, r31, 0a
main: main:
ADDI64 r254, r254, -104d ADDI64 r254, r254, -120d
ST r31, r254, 72a, 32h ST r31, r254, 72a, 48h
ADDI64 r32, r254, 48d ADDI64 r32, r254, 48d
CP r1, r32 CP r1, r32
CP r2, r0 CP r2, r0
JAL r31, r0, :decide JAL r31, r0, :decide
ADDI64 r33, r254, 24d ADDI64 r34, r254, 24d
BMC r32, r33, 24h
LI64 r34, 1d
CP r1, r33
CP r2, r34
JAL r31, r0, :decide
ADDI64 r34, r254, 0d
BMC r32, r34, 24h BMC r32, r34, 24h
LI64 r35, 1d
CP r1, r34
CP r2, r35
JAL r31, r0, :decide
ADDI64 r36, r254, 0d
BMC r32, r36, 24h
LD r32, r254, 24a, 8h LD r32, r254, 24a, 8h
LD r33, r254, 0a, 8h LD r33, r254, 0a, 8h
ADD64 r32, r33, r32 ADD64 r32, r33, r32
CP r1, r32 CP r1, r32
LD r31, r254, 72a, 32h LD r31, r254, 72a, 48h
ADDI64 r254, r254, 104d ADDI64 r254, r254, 120d
JALA r0, r31, 0a JALA r0, r31, 0a
code size: 273 code size: 273
ret: 1 ret: 1

View file

@ -3,15 +3,15 @@ main:
ST r31, r254, 32a, 40h ST r31, r254, 32a, 40h
LRA r32, r0, :"Goodbye, World!\0" LRA r32, r0, :"Goodbye, World!\0"
LRA r33, r0, :"Hello, World!\0" LRA r33, r0, :"Hello, World!\0"
ST r32, r254, 16a, 8h ST r32, r254, 8a, 8h
ST r33, r254, 24a, 8h ST r33, r254, 24a, 8h
LD r2, r254, 24a, 8h LD r2, r254, 24a, 8h
LD r3, r254, 16a, 8h LD r3, r254, 8a, 8h
JAL r31, r0, :print JAL r31, r0, :print
ADDI64 r34, r254, 8d ADDI64 r34, r254, 0d
ADDI64 r35, r254, 0d ADDI64 r35, r254, 16d
ST r32, r254, 8a, 8h ST r32, r254, 0a, 8h
ST r33, r254, 0a, 8h ST r33, r254, 16a, 8h
CP r2, r35 CP r2, r35
CP r3, r34 CP r3, r34
JAL r31, r0, :print2 JAL r31, r0, :print2

View file

@ -1,6 +1,6 @@
main: main:
ADDI64 r254, r254, -58d ADDI64 r254, r254, -66d
ST r31, r254, 26a, 32h ST r31, r254, 26a, 40h
JAL r31, r0, :returner_fn JAL r31, r0, :returner_fn
CP r32, r1 CP r32, r1
ADDI64 r33, r254, 2d ADDI64 r33, r254, 2d
@ -25,8 +25,8 @@ main:
JMP :1 JMP :1
0: LI64 r32, 1d 0: LI64 r32, 1d
CP r1, r32 CP r1, r32
1: LD r31, r254, 26a, 32h 1: LD r31, r254, 26a, 40h
ADDI64 r254, r254, 58d ADDI64 r254, r254, 66d
JALA r0, r31, 0a JALA r0, r31, 0a
returner_bn: returner_bn:
ADDI64 r254, r254, -24d ADDI64 r254, r254, -24d

View file

@ -3,18 +3,20 @@ decide:
CP r1, r13 CP r1, r13
JALA r0, r31, 0a JALA r0, r31, 0a
main: main:
ADDI64 r254, r254, -136d ADDI64 r254, r254, -128d
ST r31, r254, 96a, 40h ST r31, r254, 80a, 48h
JAL r31, r0, :decide JAL r31, r0, :decide
CP r33, r0
ADDI64 r34, r254, 88d
CP r32, r1 CP r32, r1
CP r33, r0
ADDI64 r34, r254, 72d
ANDI r32, r32, 255d ANDI r32, r32, 255d
JNE r32, r0, :0 JNE r32, r0, :0
CP r32, r33 CP r32, r33
JMP :1 JMP :1
0: CP r32, r34 0: CP r32, r34
1: JNE r32, r33, :2 1: LI64 r35, 1d
ST r35, r254, 72a, 8h
JNE r32, r33, :2
LI64 r32, 9001d LI64 r32, 9001d
CP r1, r32 CP r1, r32
JMP :3 JMP :3
@ -23,20 +25,20 @@ main:
ANDI r33, r33, 255d ANDI r33, r33, 255d
JNE r33, r0, :4 JNE r33, r0, :4
LI8 r33, 1b LI8 r33, 1b
ST r33, r254, 72a, 1h ST r33, r254, 56a, 1h
LD r32, r32, 0a, 8h LD r32, r32, 0a, 8h
ST r32, r254, 80a, 8h ST r32, r254, 64a, 8h
JMP :5 JMP :5
4: ST r0, r254, 72a, 1h 4: ST r0, r254, 56a, 1h
5: LD r32, r254, 72a, 1h 5: LD r32, r254, 56a, 1h
ANDI r32, r32, 255d ANDI r32, r32, 255d
JEQ r32, r0, :6 JEQ r32, r0, :6
LI64 r32, 42d LI64 r32, 42d
CP r1, r32 CP r1, r32
JMP :3 JMP :3
6: JAL r31, r0, :decide 6: JAL r31, r0, :decide
CP r33, r0
CP r32, r1 CP r32, r1
CP r33, r0
ANDI r32, r32, 255d ANDI r32, r32, 255d
JNE r32, r0, :7 JNE r32, r0, :7
CP r32, r33 CP r32, r33
@ -48,33 +50,28 @@ main:
LI64 r32, 69d LI64 r32, 69d
CP r1, r32 CP r1, r32
JMP :3 JMP :3
9: ADDI64 r33, r254, 56d 9: ADDI64 r33, r254, 40d
JAL r31, r0, :new_foo JAL r31, r0, :new_foo
ST r1, r33, 0a, 16h ST r1, r33, 0a, 16h
LD r35, r254, 56a, 8h LD r36, r254, 40a, 8h
JNE r35, r0, :10 JNE r36, r0, :10
LI64 r32, 999d LI64 r32, 999d
CP r1, r32 CP r1, r32
JMP :3 JMP :3
10: LRA r35, r0, :"foo\0" 10: LRA r36, r0, :"foo\0"
ST r35, r254, 40a, 8h
LI64 r35, 4d
ST r35, r254, 48a, 8h
LD r2, r33, 0a, 16h LD r2, r33, 0a, 16h
LD r4, r254, 40a, 16h CP r4, r36
JAL r31, r0, :use_foo JAL r31, r0, :use_foo
ADDI64 r33, r254, 0d ADDI64 r33, r254, 0d
JAL r31, r0, :no_foo JAL r31, r0, :no_foo
ST r1, r33, 0a, 16h ST r1, r33, 0a, 16h
JAL r31, r0, :decide JAL r31, r0, :decide
CP r35, r1 CP r36, r1
ANDI r35, r35, 255d ANDI r36, r36, 255d
JNE r35, r0, :11 JNE r36, r0, :11
JMP :12 JMP :12
11: ST r34, r254, 0a, 8h 11: ST r34, r254, 0a, 8h
LI64 r35, 1d
ST r35, r254, 8a, 8h ST r35, r254, 8a, 8h
ST r35, r254, 88a, 8h
12: LD r35, r254, 0a, 8h 12: LD r35, r254, 0a, 8h
JNE r35, r0, :13 JNE r35, r0, :13
LI64 r32, 34d LI64 r32, 34d
@ -101,8 +98,8 @@ main:
ANDI r32, r32, 65535d ANDI r32, r32, 65535d
SUB64 r32, r32, r33 SUB64 r32, r32, r33
CP r1, r32 CP r1, r32
3: LD r31, r254, 96a, 40h 3: LD r31, r254, 80a, 48h
ADDI64 r254, r254, 136d ADDI64 r254, r254, 128d
JALA r0, r31, 0a JALA r0, r31, 0a
new_bar: new_bar:
ADDI64 r254, r254, -24d ADDI64 r254, r254, -24d
@ -132,13 +129,11 @@ no_foo:
ADDI64 r254, r254, 16d ADDI64 r254, r254, 16d
JALA r0, r31, 0a JALA r0, r31, 0a
use_foo: use_foo:
ADDI64 r254, r254, -32d ADDI64 r254, r254, -16d
ST r2, r254, 16a, 16h ST r2, r254, 0a, 16h
ADDI64 r2, r254, 16d ADDI64 r2, r254, 0d
ST r4, r254, 0a, 16h ADDI64 r254, r254, 16d
ADDI64 r4, r254, 0d
ADDI64 r254, r254, 32d
JALA r0, r31, 0a JALA r0, r31, 0a
code size: 1162 code size: 1092
ret: 0 ret: 0
status: Ok(()) status: Ok(())

View file

@ -1,6 +1,6 @@
main: main:
ADDI64 r254, r254, -56d ADDI64 r254, r254, -64d
ST r31, r254, 24a, 32h ST r31, r254, 24a, 40h
ADDI64 r32, r254, 0d ADDI64 r32, r254, 0d
LI64 r33, 1d LI64 r33, 1d
ST r33, r254, 16a, 8h ST r33, r254, 16a, 8h
@ -9,14 +9,14 @@ main:
ST r33, r254, 8a, 8h ST r33, r254, 8a, 8h
JAL r31, r0, :opaque JAL r31, r0, :opaque
ST r1, r32, 0a, 16h ST r1, r32, 0a, 16h
LD r33, r254, 8a, 8h LD r34, r254, 8a, 8h
LD r34, r254, 16a, 8h LD r35, r254, 16a, 8h
ADD64 r33, r34, r33 ADD64 r34, r35, r34
LD r32, r254, 0a, 8h LD r32, r254, 0a, 8h
SUB64 r32, r32, r33 SUB64 r32, r32, r34
CP r1, r32 CP r1, r32
LD r31, r254, 24a, 32h LD r31, r254, 24a, 40h
ADDI64 r254, r254, 56d ADDI64 r254, r254, 64d
JALA r0, r31, 0a JALA r0, r31, 0a
opaque: opaque:
ADDI64 r254, r254, -16d ADDI64 r254, r254, -16d

View file

@ -1,6 +0,0 @@
main:
CP r1, r0
JALA r0, r31, 0a
code size: 22
ret: 0
status: Ok(())

View file

@ -1,23 +1,23 @@
main: main:
ADDI64 r254, r254, -44d ADDI64 r254, r254, -52d
ST r31, r254, 4a, 40h ST r31, r254, 4a, 48h
ADDI64 r32, r254, 0d ADDI64 r32, r254, 0d
JAL r31, r0, :random_color JAL r31, r0, :random_color
ST r1, r32, 0a, 4h ST r1, r32, 0a, 4h
LD r33, r254, 0a, 1h LD r34, r254, 0a, 1h
LD r34, r254, 1a, 1h LD r35, r254, 1a, 1h
LD r35, r254, 2a, 1h LD r36, r254, 2a, 1h
ANDI r33, r33, 255d
ANDI r34, r34, 255d ANDI r34, r34, 255d
LD r32, r254, 3a, 1h
ANDI r35, r35, 255d ANDI r35, r35, 255d
ADD64 r33, r34, r33 LD r32, r254, 3a, 1h
ANDI r33, r36, 255d
ADD64 r34, r35, r34
ANDI r32, r32, 255d ANDI r32, r32, 255d
ADD64 r33, r33, r35 ADD64 r33, r34, r33
ADD64 r32, r33, r32 ADD64 r32, r33, r32
CP r1, r32 CP r1, r32
LD r31, r254, 4a, 40h LD r31, r254, 4a, 48h
ADDI64 r254, r254, 44d ADDI64 r254, r254, 52d
JALA r0, r31, 0a JALA r0, r31, 0a
random_color: random_color:
LRA r13, r0, :white LRA r13, r0, :white

View file

@ -1,8 +0,0 @@
main:
LRA r13, r0, :a
LD r13, r13, 0a, 8h
CP r1, r13
JALA r0, r31, 0a
code size: 50
ret: 0
status: Ok(())

View file

@ -1,27 +0,0 @@
main:
ADDI64 r254, r254, -40d
ST r0, r254, 0a, 8h
LI64 r13, 1d
ST r13, r254, 8a, 8h
LI64 r13, 2d
ST r13, r254, 16a, 8h
LI64 r13, 3d
LI64 r14, 10d
ST r13, r254, 24a, 8h
ST r14, r254, 32a, 8h
LD r13, r254, 0a, 8h
LD r14, r254, 8a, 8h
ADD64 r13, r14, r13
LD r14, r254, 16a, 8h
ADD64 r13, r14, r13
LD r14, r254, 24a, 8h
ADD64 r13, r14, r13
LD r14, r254, 32a, 8h
ADDI64 r13, r13, 4d
SUB64 r13, r13, r14
CP r1, r13
ADDI64 r254, r254, 40d
JALA r0, r31, 0a
code size: 241
ret: 0
status: Ok(())

View file

@ -11,9 +11,9 @@ main:
JALA r0, r31, 0a JALA r0, r31, 0a
sqrt: sqrt:
CP r14, r2 CP r14, r2
CP r17, r0
LI64 r16, 15d LI64 r16, 15d
LI64 r15, 32768d LI64 r15, 32768d
CP r17, r0
CP r13, r17 CP r13, r17
3: JNE r15, r17, :0 3: JNE r15, r17, :0
CP r1, r13 CP r1, r13

View file

@ -5,8 +5,8 @@ do_stuff:
just_read: just_read:
JALA r0, r31, 0a JALA r0, r31, 0a
main: main:
ADDI64 r254, r254, -72d ADDI64 r254, r254, -80d
ST r31, r254, 48a, 24h ST r31, r254, 48a, 32h
ADDI64 r32, r254, 16d ADDI64 r32, r254, 16d
CP r1, r32 CP r1, r32
JAL r31, r0, :optionala JAL r31, r0, :optionala
@ -37,8 +37,8 @@ main:
CP r33, r1 CP r33, r1
ADD64 r32, r33, r32 ADD64 r32, r33, r32
CP r1, r32 CP r1, r32
1: LD r31, r254, 48a, 24h 1: LD r31, r254, 48a, 32h
ADDI64 r254, r254, 72d ADDI64 r254, r254, 80d
JALA r0, r31, 0a JALA r0, r31, 0a
optional: optional:
ADDI64 r254, r254, -16d ADDI64 r254, r254, -16d

View file

@ -1,9 +0,0 @@
main:
LRA r13, r0, :"abcdefshijklmnop\0"
LD r13, r13, 0a, 1h
ANDI r13, r13, 255d
CP r1, r13
JALA r0, r31, 0a
code size: 70
ret: 97
status: Ok(())

View file

@ -1,44 +1,45 @@
main: main:
ADDI64 r254, r254, -40d ADDI64 r254, r254, -40d
LI64 r16, 1d LI64 r17, 1d
LI64 r15, 4d LI64 r15, 4d
ADDI64 r17, r254, 0d
CP r14, r0 CP r14, r0
ADDI64 r18, r254, 0d
CP r13, r14 CP r13, r14
6: JNE r13, r15, :0 6: JNE r13, r15, :0
ADDI64 r18, r254, 32d ADDI64 r19, r254, 32d
LI64 r19, 2d LI64 r20, 2d
CP r13, r14 CP r13, r14
4: LD r15, r254, 16a, 8h 4: LD r15, r254, 16a, 8h
JNE r13, r16, :1 JNE r13, r17, :1
CP r1, r15 CP r1, r15
JMP :2 JMP :2
1: ADD64 r15, r13, r16 1: ADD64 r16, r13, r17
SUB64 r20, r19, r15 SUB64 r15, r20, r16
MUL64 r20, r20, r19 MUL64 r21, r15, r20
MUL64 r21, r13, r19 MUL64 r22, r13, r20
CP r13, r14 CP r13, r14
5: JNE r13, r19, :3 5: JNE r13, r20, :3
CP r13, r15 CP r13, r16
JMP :4 JMP :4
3: ADD64 r22, r21, r13 3: ADD64 r15, r13, r17
ADD64 r23, r20, r13 ADD64 r23, r22, r13
MULI64 r22, r22, 8d ADD64 r13, r21, r13
MULI64 r23, r23, 8d MULI64 r23, r23, 8d
ADD64 r22, r17, r22 MULI64 r13, r13, 8d
ADD64 r23, r17, r23 ADD64 r23, r18, r23
BMC r22, r18, 8h ADD64 r13, r18, r13
BMC r23, r22, 8h BMC r23, r19, 8h
BMC r18, r23, 8h BMC r13, r23, 8h
ADD64 r13, r13, r16 BMC r19, r13, 8h
CP r13, r15
JMP :5 JMP :5
0: MULI64 r18, r13, 8d 0: MULI64 r16, r13, 8d
ADD64 r18, r17, r18 ADD64 r16, r18, r16
ST r13, r18, 0a, 8h ST r13, r16, 0a, 8h
ADD64 r13, r13, r16 ADD64 r13, r13, r17
JMP :6 JMP :6
2: ADDI64 r254, r254, 40d 2: ADDI64 r254, r254, 40d
JALA r0, r31, 0a JALA r0, r31, 0a
code size: 264 code size: 267
ret: 0 ret: 0
status: Ok(()) status: Ok(())

View file

@ -9,8 +9,8 @@ foo:
ADDI64 r254, r254, 16d ADDI64 r254, r254, 16d
JALA r0, r31, 0a JALA r0, r31, 0a
main: main:
ADDI64 r254, r254, -80d ADDI64 r254, r254, -88d
ST r31, r254, 48a, 32h ST r31, r254, 48a, 40h
ADDI64 r32, r254, 32d ADDI64 r32, r254, 32d
JAL r31, r0, :foo JAL r31, r0, :foo
ST r1, r32, 0a, 16h ST r1, r32, 0a, 16h
@ -30,8 +30,8 @@ main:
LI64 r33, 7d LI64 r33, 7d
SUB64 r32, r33, r32 SUB64 r32, r33, r32
CP r1, r32 CP r1, r32
LD r31, r254, 48a, 32h LD r31, r254, 48a, 40h
ADDI64 r254, r254, 80d ADDI64 r254, r254, 88d
JALA r0, r31, 0a JALA r0, r31, 0a
code size: 347 code size: 347
ret: 0 ret: 0

View file

@ -1,12 +0,0 @@
main:
LRA r13, r0, :"abcd\0"
ADDI64 r13, r13, 1d
LI64 r14, 37d
CP r2, r14
CP r3, r13
ECA
JALA r0, r31, 0a
bcd
code size: 59
ret: 0
status: Ok(())

View file

@ -1,26 +1,28 @@
main: main:
ADDI64 r254, r254, -10240d ADDI64 r254, r254, -10240d
LI8 r14, 64b LI8 r15, 64b
LI64 r15, 1024d LI64 r16, 1024d
ADDI64 r16, r254, 0d
CP r13, r0 CP r13, r0
4: JLTU r13, r15, :0 ADDI64 r17, r254, 0d
ADDI64 r14, r16, 10240d 4: JLTU r13, r16, :0
ADDI64 r13, r16, 1024d ADDI64 r13, r17, 1024d
3: LD r15, r254, 2048a, 1h ADDI64 r15, r17, 10240d
JLTU r13, r14, :1 3: LD r14, r254, 2048a, 1h
ANDI r13, r15, 255d JLTU r13, r15, :1
ANDI r13, r14, 255d
CP r1, r13 CP r1, r13
JMP :2 JMP :2
1: BMC r16, r13, 1024h 1: ADDI64 r14, r13, 1024d
ADDI64 r13, r13, 1024d BMC r17, r13, 1024h
CP r13, r14
JMP :3 JMP :3
0: ADD64 r17, r16, r13 0: ADDI64 r14, r13, 1d
ST r14, r17, 0a, 1h ADD64 r13, r17, r13
ADDI64 r13, r13, 1d ST r15, r13, 0a, 1h
CP r13, r14
JMP :4 JMP :4
2: ADDI64 r254, r254, 10240d 2: ADDI64 r254, r254, 10240d
JALA r0, r31, 0a JALA r0, r31, 0a
code size: 186 code size: 192
ret: 64 ret: 64
status: Ok(()) status: Ok(())

View file

@ -1,10 +1,10 @@
main: main:
ADDI64 r254, r254, -64d ADDI64 r254, r254, -64d
ST r31, r254, 0a, 64h ST r31, r254, 0a, 64h
LI64 r37, 65536d
CP r36, r0
CP r34, r0 CP r34, r0
LI64 r37, 65536d
LI8 r35, 1b LI8 r35, 1b
CP r36, r0
CP r32, r36 CP r32, r36
7: JAL r31, r0, :opaque 7: JAL r31, r0, :opaque
CP r33, r1 CP r33, r1

View file

@ -1,6 +0,0 @@
main:
CP r1, r0
JALA r0, r31, 0a
code size: 22
ret: 0
status: Ok(())

View file

@ -1,46 +1,46 @@
main: main:
ADDI64 r254, r254, -40d ADDI64 r254, r254, -48d
ST r31, r254, 16a, 24h ST r31, r254, 16a, 32h
ADDI64 r32, r254, 0d ADDI64 r32, r254, 0d
CP r3, r0 CP r3, r0
CP r4, r0 CP r4, r0
JAL r31, r0, :maina JAL r31, r0, :maina
ST r1, r32, 0a, 16h ST r1, r32, 0a, 16h
LD r33, r254, 12a, 1h LD r34, r254, 12a, 1h
LD r32, r254, 3a, 1h LD r32, r254, 3a, 1h
SUB8 r32, r32, r33 SUB8 r32, r32, r34
ANDI r32, r32, 255d ANDI r32, r32, 255d
CP r1, r32 CP r1, r32
LD r31, r254, 16a, 24h LD r31, r254, 16a, 32h
ADDI64 r254, r254, 40d ADDI64 r254, r254, 48d
JALA r0, r31, 0a JALA r0, r31, 0a
maina: maina:
ADDI64 r254, r254, -44d ADDI64 r254, r254, -52d
ST r31, r254, 20a, 24h ST r31, r254, 20a, 32h
ADDI64 r32, r254, 16d ADDI64 r32, r254, 16d
JAL r31, r0, :small_struct JAL r31, r0, :small_struct
ST r1, r32, 0a, 4h ST r1, r32, 0a, 4h
ST r0, r254, 0a, 1h ST r0, r254, 0a, 1h
ST r0, r254, 1a, 1h ST r0, r254, 1a, 1h
ST r0, r254, 2a, 1h ST r0, r254, 2a, 1h
LI8 r32, 3b LI8 r33, 3b
ST r32, r254, 3a, 1h ST r33, r254, 3a, 1h
LI8 r33, 1b LI8 r34, 1b
ST r33, r254, 4a, 1h ST r34, r254, 4a, 1h
ST r0, r254, 5a, 1h ST r0, r254, 5a, 1h
ST r0, r254, 6a, 1h ST r0, r254, 6a, 1h
ST r0, r254, 7a, 1h ST r0, r254, 7a, 1h
ST r0, r254, 8a, 1h ST r0, r254, 8a, 1h
ST r0, r254, 9a, 1h ST r0, r254, 9a, 1h
ST r0, r254, 10a, 1h ST r0, r254, 10a, 1h
ST r32, r254, 11a, 1h ST r33, r254, 11a, 1h
ST r33, r254, 12a, 1h ST r34, r254, 12a, 1h
ST r0, r254, 13a, 1h ST r0, r254, 13a, 1h
ST r0, r254, 14a, 1h ST r0, r254, 14a, 1h
ST r0, r254, 15a, 1h ST r0, r254, 15a, 1h
LD r1, r254, 0a, 16h LD r1, r254, 0a, 16h
LD r31, r254, 20a, 24h LD r31, r254, 20a, 32h
ADDI64 r254, r254, 44d ADDI64 r254, r254, 52d
JALA r0, r31, 0a JALA r0, r31, 0a
small_struct: small_struct:
ADDI64 r254, r254, -4d ADDI64 r254, r254, -4d

View file

@ -3,12 +3,10 @@ name = "hbvm"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
[dependencies]
hbbytecode = { workspace = true }
[features] [features]
default = ["alloc"] default = ["alloc"]
disasm = ["hbbytecode/disasm", "alloc"]
alloc = [] alloc = []
nightly = [] nightly = []
[dependencies]
hbbytecode = { workspace = true }

View file

@ -4,7 +4,7 @@ pub mod softpaging;
pub(crate) mod addr; pub(crate) mod addr;
use crate::{utils::impl_display, value::Value}; use crate::utils::impl_display;
pub use addr::Address; pub use addr::Address;
/// Load-store memory access /// Load-store memory access
@ -36,50 +36,6 @@ pub trait Memory {
/// # Safety /// # Safety
/// - Data read have to be valid /// - Data read have to be valid
unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: Address) -> T; unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: Address) -> T;
/// Log instruction to be executed
fn log_instr(&mut self, _at: Address, _regs: &[Value]) {}
}
#[cfg(feature = "alloc")]
#[derive(Default)]
pub struct InstrLogger {
#[cfg(debug_assertions)]
op_buf: alloc::vec::Vec<hbbytecode::Oper>,
#[cfg(debug_assertions)]
disp_buf: alloc::string::String,
}
#[cfg(feature = "alloc")]
impl InstrLogger {
/// # Safety
/// - `addr` needs to point to a valid instruction
#[cfg(debug_assertions)]
pub unsafe fn display_instr(&mut self, addr: Address, regs: &[Value]) -> &str {
let instr = hbbytecode::Instr::try_from(unsafe { *(addr.get() as *const u8) }).unwrap();
let mut bytes =
unsafe { core::slice::from_raw_parts(addr.get() as *const u8, instr.size()) };
use core::fmt::Write;
hbbytecode::parse_args(&mut bytes, instr, &mut self.op_buf).unwrap();
debug_assert!(bytes.is_empty());
self.disp_buf.clear();
write!(self.disp_buf, "{:<10}", alloc::format!("{instr:?}")).unwrap();
for (i, op) in self.op_buf.drain(..).enumerate() {
if i != 0 {
write!(self.disp_buf, ", ").unwrap();
}
write!(self.disp_buf, "{op:?}").unwrap();
if let hbbytecode::Oper::R(r) = op {
write!(self.disp_buf, "({})", regs[r as usize].0).unwrap()
}
}
&self.disp_buf
}
#[cfg(not(debug_assertions))]
pub unsafe fn display_instr(&mut self, addr: Address, regs: &[Value]) -> &str {
""
}
} }
/// Unhandled load access trap /// Unhandled load access trap

View file

@ -55,7 +55,6 @@ where
// - Yes, we assume you run 64 bit CPU. Else ?conradluget a better CPU // - Yes, we assume you run 64 bit CPU. Else ?conradluget a better CPU
// sorry 8 bit fans, HBVM won't run on your Speccy :( // sorry 8 bit fans, HBVM won't run on your Speccy :(
unsafe { unsafe {
self.memory.log_instr(self.pc, &self.registers);
match self match self
.memory .memory
.prog_read::<u8>(self.pc as _) .prog_read::<u8>(self.pc as _)