forked from AbleOS/holey-bytes
Compare commits
805 commits
Author | SHA1 | Date | |
---|---|---|---|
888b38ad4c | |||
5275a7e0fd | |||
418fd0039e | |||
d220823d78 | |||
af19f4e30d | |||
1621d93e86 | |||
4b3b6af70e | |||
f59c0c1092 | |||
8ad58ee6b6 | |||
6e8eb059f6 | |||
969ea57e3f | |||
cfd3eac0a8 | |||
a8aba7e7c2 | |||
f05c61a99e | |||
e769fa8dba | |||
b3f858f64b | |||
1584ec7563 | |||
6085177982 | |||
47014c6164 | |||
3702a99d03 | |||
248bdf003a | |||
d3f3fe98e3 | |||
14cf5efaa5 | |||
95496116b0 | |||
86f7d70747 | |||
0516ce68f4 | |||
945e5c70f6 | |||
ec9bb886f8 | |||
127fdb3cc5 | |||
e65c72e19f | |||
1ca9529302 | |||
c0d957e70c | |||
b9b8233a53 | |||
d2fa41039b | |||
9fe8d6bbff | |||
04680c8b7c | |||
a1e692eac7 | |||
8bf2d1a266 | |||
1571938e9f | |||
f7d5bccdd9 | |||
07d4fe416a | |||
b2be007ef0 | |||
bfac81c807 | |||
ef36e21475 | |||
8138d6664f | |||
9f43e3bb92 | |||
6fba7da782 | |||
7837eeb90d | |||
5a7a01ca02 | |||
f9c47f86ad | |||
48a0c8d0b9 | |||
00f6729d31 | |||
dc96c8b10a | |||
91e35b72ee | |||
5aeeedbdce | |||
fae75072f4 | |||
71ba2c2486 | |||
c5d5301b7b | |||
c553c3d9e9 | |||
9ce446b507 | |||
3b4b30b2bd | |||
cf672beb79 | |||
3f6ebdd009 | |||
19aca050ed | |||
d368ac023b | |||
8ea6c5cfcc | |||
e7cd2c0129 | |||
e44d003e7f | |||
a2ca8d98df | |||
a3355a59c0 | |||
b63b832383 | |||
116f045a5f | |||
784d552c1d | |||
24a3aed360 | |||
58ee5c0a56 | |||
9dfb2eb606 | |||
5df4fb8882 | |||
86ca959ea3 | |||
f353bd5882 | |||
cad0a828d0 | |||
fb119bc6eb | |||
aa83ed2ec9 | |||
fb11c94af4 | |||
b030b1eeb7 | |||
4856533b22 | |||
d8d039b67a | |||
b760d9ef75 | |||
e587de1778 | |||
b12579ff65 | |||
0aa355695a | |||
4a857d2317 | |||
2253ac6198 | |||
05a7bd0583 | |||
ab55ec0240 | |||
able | 8353ab58a5 | ||
f527d61c1e | |||
f3879cb013 | |||
e89511b14c | |||
1c135a3050 | |||
f83194359c | |||
becd5c4b7f | |||
37dd13cab2 | |||
bc2dd82eb7 | |||
aa2de502cc | |||
542c69fd60 | |||
95e9270fef | |||
fe5a8631f6 | |||
8892dd729a | |||
a7718e1220 | |||
e079bbd312 | |||
12b9d43754 | |||
397b2a4b1b | |||
12bb7029b4 | |||
a64383e72b | |||
2034152c83 | |||
13714eb513 | |||
4088bd18b1 | |||
e94b812b3b | |||
e5d6b35f66 | |||
e6df9b6b01 | |||
baa70d3f12 | |||
ec4499e519 | |||
085c593add | |||
867a750d8f | |||
b1b6d9eba1 | |||
12be64965f | |||
7058efe75c | |||
afc1c5aac5 | |||
83146cfd61 | |||
bb625a9e19 | |||
81cf39b602 | |||
e4da9cc927 | |||
454b0ffd1c | |||
981c17ff19 | |||
d01e31b203 | |||
9cb273a04b | |||
2e2b7612d9 | |||
f493c2776f | |||
f77bc52465 | |||
f524013c34 | |||
3c86eafe72 | |||
0d87bf8f09 | |||
e5a4561f07 | |||
b71031c146 | |||
dd51961fbb | |||
63f2a0dac0 | |||
4ec88e3397 | |||
f1e715e9bd | |||
80fd0e89b4 | |||
9949086011 | |||
c701eb7b6d | |||
f1deab11c9 | |||
f079daa42d | |||
7cac9382ad | |||
ce2f7d2059 | |||
f5f9060803 | |||
ad7fb5d0fc | |||
d99672b751 | |||
7def052749 | |||
b2eefa5b83 | |||
3c35557872 | |||
b6274f3455 | |||
c61efc3933 | |||
654005eea2 | |||
335e6ec20a | |||
1e02efc1eb | |||
8b98c2ed1b | |||
c353d28be0 | |||
7865d692a1 | |||
29a23cec0c | |||
5dce4df2a1 | |||
42a713aeae | |||
823c78bf74 | |||
c657084451 | |||
63a1c7feb4 | |||
bedffa9b32 | |||
b8032aa840 | |||
65e9f272a8 | |||
d2052cd2a3 | |||
29367d8f8b | |||
a299bad75b | |||
7d48d3beb1 | |||
68c0248189 | |||
0ef74d89cb | |||
1b2b9f899d | |||
455f70db6e | |||
0374848b28 | |||
513d2c7127 | |||
9d2f419140 | |||
f535ea7b0a | |||
be6d0d3f18 | |||
2718ef8523 | |||
3ee78f3a31 | |||
2bac7c1fb3 | |||
mlokis | 79a3f1ab2b | ||
koniifer | b15e66b2af | ||
koniifer | d2ba7cc101 | ||
koniifer | d3ee72306e | ||
87cb77a553 | |||
276d1bb0cf | |||
5cce904135 | |||
3338d50672 | |||
2e36f32ae0 | |||
e8f1d2af8c | |||
999b25df8b | |||
61250c906a | |||
44fc9c3e2e | |||
798000c756 | |||
9de631234d | |||
843fbddf3b | |||
38a00cbaa0 | |||
4664240e08 | |||
728d563cea | |||
56984f08ff | |||
3f9f99ff65 | |||
9ed3c7ab9e | |||
acacd10ee9 | |||
f6f661cee3 | |||
4bfb5f192e | |||
ea628c1278 | |||
7448339605 | |||
da7cd5926c | |||
9cf7933251 | |||
24b9f9e78b | |||
80558ea7e6 | |||
348d9014e3 | |||
30bd6103a6 | |||
97eb985a02 | |||
7ef1adf7e2 | |||
be828b8c54 | |||
b4b3bae104 | |||
33d78fbc52 | |||
be2d38a6d2 | |||
bbd7e12af4 | |||
37db783699 | |||
948710dc27 | |||
f0a588fcff | |||
9c32f260a1 | |||
047e1ed15c | |||
2c2f0c048b | |||
3c12c0e288 | |||
ca8497550a | |||
849e842336 | |||
5c82623db9 | |||
e8a8fa3eb1 | |||
5926f69e6c | |||
83d3fb4919 | |||
b429534d23 | |||
b187af64a8 | |||
ce7bb001da | |||
9c90adbfe8 | |||
db62434736 | |||
3d721812f0 | |||
5b23a0661b | |||
7c919cd453 | |||
bb61526d3e | |||
45e1c6743a | |||
39588579a8 | |||
9095af6d84 | |||
b62413046d | |||
af4d965b8c | |||
855da58e06 | |||
2fc24f0f58 | |||
8016b1fad5 | |||
46f9903562 | |||
517850f283 | |||
faa8dd2e6f | |||
d23d010917 | |||
b1da36ecde | |||
e62aab9b4b | |||
423361a80e | |||
62a7c61cdc | |||
2bab16d3ce | |||
c88daa4800 | |||
6988d8893f | |||
64e228450f | |||
897e121eeb | |||
648bd24d0d | |||
aefa7e6405 | |||
026f6141e6 | |||
cb88edea1f | |||
127e8dcb38 | |||
9c43dafcf5 | |||
e65dbcfcbe | |||
e0d4955bd5 | |||
78ebc3292c | |||
0c2db878f0 | |||
cb9d7f7d1e | |||
41b70bec43 | |||
f013e90936 | |||
6977cb218c | |||
3f30735eaa | |||
58f4837ae0 | |||
b95bddac7b | |||
7d53706e71 | |||
4d699fcbf1 | |||
5aa6150c70 | |||
b0a85f44c9 | |||
2aa5ba9abc | |||
35d34dca54 | |||
bc817c4ea2 | |||
0298b32e38 | |||
73c9ccef6a | |||
ad4aed9c98 | |||
8528bef8cf | |||
11c8755b18 | |||
d5c90b95a7 | |||
1da900461c | |||
3aff6fc006 | |||
ccfde6c237 | |||
44c4b71bb3 | |||
c3a6e62bf2 | |||
00949c4ea8 | |||
15e4762d4a | |||
959bfd7f76 | |||
6ad0b41759 | |||
89cc611f7a | |||
cf74fdd99c | |||
58578dd4b2 | |||
4a7b4e4ead | |||
c900f4ef5c | |||
3a494147ec | |||
4336fec653 | |||
11f6537a09 | |||
da58a5926d | |||
f5ef62c6bb | |||
f386c332e5 | |||
23b90b3dd7 | |||
ea736d8824 | |||
dc2e0cc5b3 | |||
c9b85f9004 | |||
af147b3cb6 | |||
0f8a720fe8 | |||
2ab6f6c914 | |||
54d93608aa | |||
19a6cdd764 | |||
2660d976fe | |||
659ccbd637 | |||
3a2367f24f | |||
0f4ff918d2 | |||
6d7e726066 | |||
9e65f3949d | |||
bf00dc85b2 | |||
69b58c2b36 | |||
5364b66629 | |||
c4826d3bfd | |||
07638caff0 | |||
5ef1ec4811 | |||
f0ae65606d | |||
a538c0ddb0 | |||
c31d1dcb9c | |||
54a7f85978 | |||
e200c2fc98 | |||
1626734c1a | |||
13f63c7700 | |||
c7dbe1c43d | |||
4c15f61cb7 | |||
f1ea01ef0c | |||
2361e166cd | |||
4d913462cb | |||
bdc2c43773 | |||
b2254e9820 | |||
d293e02f62 | |||
1ee8d464c6 | |||
2a4d27d8e6 | |||
1f5846afaa | |||
006bc80f12 | |||
802e8b5d55 | |||
6b7572f089 | |||
1d04287532 | |||
8b6d9b5de3 | |||
136bba1631 | |||
c1b00b6d6b | |||
a51b23187d | |||
c3f9e535d3 | |||
6d805dc2ec | |||
4291ebc25e | |||
02c74a181d | |||
c0d4464097 | |||
602249a48a | |||
338e3f1519 | |||
0e9f4402cb | |||
6057e88034 | |||
2a3d077476 | |||
8e62bd747b | |||
b8ff503c14 | |||
9e69e53e24 | |||
4d163a2313 | |||
e4e7f8d5b5 | |||
4849807353 | |||
6e30968c54 | |||
6fc0eb3498 | |||
98dfd6b09c | |||
ece9bb8bf2 | |||
09fcbbc03b | |||
a7fda408ef | |||
5d77ae93b4 | |||
4a9b9de87f | |||
bba3570788 | |||
6852452f1a | |||
254d5ed962 | |||
faf068885a | |||
a2e864360e | |||
79e4cead2d | |||
6968e7d769 | |||
c133c2dbe7 | |||
2bc7a5c13f | |||
16e2c32521 | |||
da85d91a09 | |||
e2a8373c42 | |||
fbdabd8314 | |||
39c4526797 | |||
2e3fbfa966 | |||
eebabc5070 | |||
b177cbe7c7 | |||
641d344d2d | |||
dc418bd5e0 | |||
8bbc40b9b1 | |||
8083bcb0e8 | |||
8928888481 | |||
d64fa7e1f9 | |||
b51f964cae | |||
67b8ffe2f2 | |||
32bed04914 | |||
6cb9489e9a | |||
73727c2383 | |||
e8a5027cab | |||
50f3350418 | |||
bb41da484f | |||
ee30069195 | |||
58c1c29293 | |||
49387dbe16 | |||
803095c0c5 | |||
514c2fe630 | |||
b4f64656fe | |||
73e13bd93c | |||
b404e5b86d | |||
4bcab25231 | |||
414a07b99a | |||
fdf4cccde0 | |||
1a3b0c2eec | |||
955e4a5c7a | |||
d9aab2191b | |||
9dd09b2122 | |||
937c107dec | |||
ed1b9459fc | |||
f063d0a4fd | |||
a21dee61e7 | |||
3807276a55 | |||
894f73ca35 | |||
00ad474881 | |||
9e0e0242aa | |||
a31e02449c | |||
b956cc78bb | |||
7279ed88e9 | |||
9500db8764 | |||
9404eb32a2 | |||
f172c33247 | |||
75dca64648 | |||
97c62e424a | |||
a2c08b6ef6 | |||
a78d2bc3e9 | |||
ad3fc1190c | |||
641be15703 | |||
cbe6f98dff | |||
9bdacfffb2 | |||
f13f500d6e | |||
mlokis | 4e9d6094bd | ||
28e33d11c9 | |||
koniifer | 581c4d531c | ||
9012f976c5 | |||
koniifer | 27462d9a33 | ||
koniifer | 781c40ede0 | ||
9af7bf559f | |||
5a6474f066 | |||
33a4bf7d01 | |||
cac99cd34d | |||
5555b9865a | |||
f964520641 | |||
a88d3a5c9d | |||
416f646957 | |||
12b39c5b3f | |||
4dcaae8362 | |||
ab903fa4ea | |||
c48a2d2799 | |||
fb01407465 | |||
71359d82aa | |||
29d5774c47 | |||
434acfbc7b | |||
6a03f125a5 | |||
03aedb5d3f | |||
a1179f3320 | |||
ba73a89171 | |||
523ca6d103 | |||
654b7eb7af | |||
4c3b63df25 | |||
9a8a56fe97 | |||
aeb3a37f7d | |||
3c01a40ef2 | |||
4f9d4f2e71 | |||
25bbe247e9 | |||
ab41d49a3d | |||
11cb875882 | |||
8984dce0e7 | |||
fd64968f3a | |||
e00f2f08c8 | |||
880cd66c66 | |||
fa41c56cb3 | |||
efa7271a59 | |||
bd7384123c | |||
e9589ebcae | |||
22f925b3f5 | |||
3807fe22da | |||
12c7467be2 | |||
cdc8cb35f7 | |||
36bd1a796b | |||
59705c062d | |||
9fe734c68c | |||
dc0562553d | |||
91907a90ff | |||
e147358fce | |||
f9e46b4641 | |||
93deeee6b9 | |||
876690319f | |||
c835317287 | |||
8442b55aa6 | |||
e07265c88b | |||
6a69042cb7 | |||
c85437e4e8 | |||
76b3f9ff4b | |||
66c3f7b0d4 | |||
b04d9e517e | |||
b46c64db4f | |||
6de8496aa5 | |||
499fe34f1d | |||
36d978d798 | |||
bd2a49d29a | |||
1c8645bf11 | |||
1624559e7b | |||
1ca5d89644 | |||
61ecbbd304 | |||
002a7df509 | |||
20903ef294 | |||
aafcb2fbbd | |||
98862edd58 | |||
b9de362ba2 | |||
e494785f93 | |||
aef9951bc5 | |||
b922dbd232 | |||
71c4d3632a | |||
8cb9f2eaac | |||
aae217dd00 | |||
4502a64514 | |||
ca1d471646 | |||
2dff9f7244 | |||
3127d04e41 | |||
589a30c8a3 | |||
8b81cfef37 | |||
6b74640c3f | |||
87ba7aa203 | |||
78f9eb6acc | |||
3c09a5f23e | |||
70955c1792 | |||
d8a922df26 | |||
9aa5da82c9 | |||
fb481a0600 | |||
d90f386bd2 | |||
c14e6c352d | |||
9ccf91d072 | |||
7cca9a3683 | |||
b28baa86f7 | |||
2226a47aaa | |||
0aec47e985 | |||
5c38115119 | |||
c3cbd054f7 | |||
06e30529bf | |||
4ec635dc56 | |||
a08856a464 | |||
d5a5c932e7 | |||
bc59886428 | |||
f87959aacb | |||
80b05779ea | |||
4bb5ec1953 | |||
2aa315a863 | |||
86013a50a4 | |||
465b185452 | |||
b794fa7c3c | |||
able | ebefc85566 | ||
a3c4b878b2 | |||
7f32e7775c | |||
1d74f27b0e | |||
7435218999 | |||
cf99091a45 | |||
81952cfc40 | |||
68d53544fd | |||
aa77a2f822 | |||
b80528bfd7 | |||
1c08148dc9 | |||
774735b515 | |||
870c1f4718 | |||
326adf47ce | |||
able | 37ff58c5e5 | ||
Erin | cbf4c6572a | ||
Erin | 0070016f74 | ||
Erin | 6c6e29479f | ||
Erin | eb46b24a10 | ||
Erin | 942839a5f8 | ||
9ddc336ecd | |||
Erin | 34d1bf415e | ||
c978b408e2 | |||
Erin | fe9a0667b8 | ||
975ce8a9fe | |||
8a3dd3001d | |||
Erin | de723980da | ||
Erin | 4aa39f3fbc | ||
Erin | 5f8864e251 | ||
bcbe47bcd6 | |||
Erin | 30ee6c84fc | ||
6e464be33d | |||
09aacff161 | |||
433f2db4d1 | |||
e335e55aa0 | |||
Erin | 8e0aeabc07 | ||
Erin | a84e93d562 | ||
Erin | 8374dfe20a | ||
Erin | e9e1242743 | ||
Erin | f604a2463d | ||
Erin | 59e38db874 | ||
Erin | 8c257e9216 | ||
Erin | 84cc1db691 | ||
Erin | 633e3adc61 | ||
Erin | 7f981fe9a0 | ||
Erin | 43c36774a5 | ||
Erin | 5dd0e22c0d | ||
Erin | b161d46a5b | ||
Erin | 42488e1e4a | ||
Erin | b84ff70014 | ||
Erin | b8432d544c | ||
Erin | 207d8d7fa6 | ||
Erin | 569f154bcc | ||
Erin | 68094ce0ae | ||
Bee | 84dcbfc6bb | ||
Bee | d26c285ca7 | ||
Erin | d255967125 | ||
Erin | c5c8d23470 | ||
Erin | aca8045a98 | ||
Erin | 398687d8bf | ||
Erin | a7c4379976 | ||
Erin | 949dd3ba61 | ||
Erin | 3771180909 | ||
Erin | 6b3a132451 | ||
Erin | b45d235312 | ||
Erin | 9ee3e9cb5f | ||
Erin | 57f30109c8 | ||
Erin | d6243fa99f | ||
Erin | 3a6d0fdd2d | ||
Erin | 9b823ef660 | ||
Erin | 84aeac0b2a | ||
Erin | fc4118938e | ||
Erin | cb557d1361 | ||
Erin | 2715bc9107 | ||
Erin | 8182abca98 | ||
Erin | 83563fff84 | ||
Erin | b4923cfb95 | ||
Erin | eab47db4d6 | ||
Erin | 4b45407a70 | ||
Erin | a944a145ed | ||
Erin | 0e701e31b5 | ||
Erin | 0cb20d5727 | ||
Erin | 889aefe87a | ||
Erin | 59be906835 | ||
Erin | 441356b6f2 | ||
Erin | 2f8612c6d2 | ||
Erin | 3e4095da6f | ||
Erin | b1bdbea991 | ||
Erin | 8c8c708279 | ||
Erin | 35f90e94a8 | ||
able | e7aa306e5d | ||
Erin | 42be580425 | ||
Erin | 9b8a4a718e | ||
Erin | 0d2949024c | ||
Erin | 26105bab70 | ||
Erin | 006dcca309 | ||
Erin | 3034469e89 | ||
Erin | 30070818ae | ||
Erin | d282b3d111 | ||
Erin | 600528434b | ||
Erin | 0deeaf3a7e | ||
Erin | 3decd01619 | ||
Erin | a071a4a7ae | ||
Erin | af1a7d3bfa | ||
Erin | 3fdf936f77 | ||
Erin | 96b749060d | ||
Erin | 770c2ebcf0 | ||
Erin | 6609bd10c5 | ||
Erin | 97eaae1c76 | ||
Erin | 1460a7a230 | ||
Erin | 529fbdaed4 | ||
Erin | 3ac80a2e3d | ||
Erin | 06d66289bc | ||
Erin | 430ccd170d | ||
Erin | e2d3f46d3f | ||
Erin | eadf9e0a1f | ||
Erin | d74b32a38d | ||
Erin | 4530ff049e | ||
Erin | 2d2978eec7 | ||
Erin | bf50bcb203 | ||
Erin | 82f23ec2e2 | ||
Erin | 5264576274 | ||
Erin | cdee99598e | ||
Erin | f130a27685 | ||
Erin | aa186b35c2 | ||
Erin | 629fc969c2 | ||
Erin | 8287b1bdc1 | ||
Erin | 73b998911c | ||
Erin | 1a5f101719 | ||
Erin | a667c36d6c | ||
Erin | 582c716445 | ||
Erin | 37a1923f1e | ||
Erin | 10f9907c09 | ||
Erin | 2480a65947 | ||
Erin | 1ed153a9a2 | ||
Erin | 19df4538d7 | ||
Erin | e07bfb2301 | ||
Erin | cfe3203ef1 | ||
Erin | c4e062e742 | ||
Erin | 83436507df | ||
Erin | bdda987da9 | ||
Erin | 6588837769 | ||
Erin | f2ec9a3034 | ||
Erin | 95c979cb83 | ||
Erin | 66f634a70f | ||
Erin | 077da50787 | ||
Erin | 5055626968 | ||
Erin | 0f5d78bb27 | ||
Erin | 668b324cc8 | ||
Erin | 759514686a | ||
Erin | 92793dc93b | ||
Erin | ac149a5319 | ||
able | f4c55ae3cc | ||
Erin | a82686ec07 | ||
Erin | b3a6c42af3 | ||
Erin | d20447dd15 | ||
Erin | 193be0bd5a | ||
Erin | fce3fa5210 | ||
Erin | 4ca4e81ac3 | ||
Erin | dcfd51999d | ||
Erin | 1532c501a6 | ||
Erin | 8eebbacb91 | ||
Erin | c621a5c71d | ||
Erin | 8d5d22eae1 | ||
Erin | 3892a719eb | ||
Erin | 47323e140c | ||
Erin | 3833beb17d | ||
Erin | afbf6dd2e4 | ||
Erin | ec7053a289 | ||
Erin | 1a53c80a62 | ||
Erin | 6fe1fd91bf | ||
able | ac7fdc7688 | ||
able | 9cf8789e9a | ||
able | 3534b26946 | ||
Erin | a21f68ffa6 | ||
Erin | 7833334713 | ||
Erin | 141c5f524f | ||
Erin | 446225bcf6 | ||
d6ea5adf49 | |||
898738fb40 | |||
Erin | beb6e23d71 | ||
5afd081c2a | |||
Erin | 81cf5c4336 | ||
63cf7ac0b0 | |||
Erin | 3cb3ee1fee | ||
Erin | 4dfbe93919 | ||
Erin | 6791b6d48e | ||
Erin | 0ed89234a7 | ||
6afec2a031 | |||
IntoTheNight | b83d1838aa | ||
e25a89d56d | |||
b72f0afe84 | |||
Erin | 9196519fae | ||
6759fbd2ab | |||
Erin | 4f53fb1c87 | ||
Erin | ad96e83f09 | ||
Erin | 5ee8a91479 | ||
Erin | bde00c13f2 | ||
Erin | b4dac1245b | ||
Erin | e700010e7f | ||
Erin | 2d34ed61d0 | ||
Erin | 3919aa8100 | ||
Erin | a548a7b08e | ||
Erin | 132fc1a6ed | ||
able | c31c9e9a54 | ||
able | c26b559898 | ||
Erin | 907dd66d5e | ||
Erin | 2416526014 | ||
Erin | bb50c09538 | ||
Erin | a7cf5e4847 | ||
able | 87ec6ded54 | ||
able | 4a840a6ef0 | ||
able | 5ec6da9fb4 | ||
able | fdca041e6b | ||
Erin | 06b1184772 | ||
Erin | fb78e0a44a | ||
Erin | 7eaa01f53c | ||
Erin | 119ce4405f |
4
.cargo/config.toml
Normal file
4
.cargo/config.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
[alias]
|
||||
xtask = "r -p xtask --"
|
||||
wasm-build = "b --target wasm32-unknown-unknown --profile=small -Zbuild-std=core,alloc -Zbuild-std-features=optimize_for_size,panic_immediate_abort -p"
|
||||
wasm-build-debug = "b --target wasm32-unknown-unknown --profile=small-dev -Zbuild-std=core,alloc -Zbuild-std-features=optimize_for_size -p"
|
13
.gitignore
vendored
13
.gitignore
vendored
|
@ -1 +1,14 @@
|
|||
# garbage
|
||||
/target
|
||||
rustc-ice-*
|
||||
|
||||
# sqlite
|
||||
db.sqlite
|
||||
db.sqlite-journal
|
||||
|
||||
# assets
|
||||
/depell/src/*.gz
|
||||
/depell/src/*.wasm
|
||||
/depell/src/static-pages/*.html
|
||||
#**/*-sv.rs
|
||||
/bytecode/src/instrs.rs
|
||||
|
|
1662
Cargo.lock
generated
1662
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
48
Cargo.toml
48
Cargo.toml
|
@ -1,2 +1,48 @@
|
|||
cargo-features = ["profile-rustflags"]
|
||||
|
||||
[workspace]
|
||||
members = ["hbvm", "compiler"]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"bytecode",
|
||||
"vm",
|
||||
"xrt",
|
||||
"xtask",
|
||||
"lang",
|
||||
"depell",
|
||||
"depell/wasm-fmt",
|
||||
"depell/wasm-hbc",
|
||||
"depell/wasm-rt",
|
||||
]
|
||||
|
||||
[workspace.dependencies]
|
||||
hbbytecode = { path = "bytecode", default-features = false }
|
||||
hbvm = { path = "vm", default-features = false }
|
||||
hblang = { path = "lang", default-features = false }
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
#debug = true
|
||||
strip = true
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
||||
[profile.small]
|
||||
rustflags = ["-Zfmt-debug=none", "-Zlocation-detail=none"]
|
||||
inherits = "release"
|
||||
opt-level = "z"
|
||||
strip = "debuginfo"
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
||||
[profile.small-dev]
|
||||
inherits = "dev"
|
||||
opt-level = "z"
|
||||
strip = "debuginfo"
|
||||
panic = "abort"
|
||||
|
||||
[profile.fuzz]
|
||||
inherits = "dev"
|
||||
debug = true
|
||||
opt-level = 3
|
||||
panic = "abort"
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
# Math operations
|
||||
```
|
||||
MATH_OP
|
||||
Add
|
||||
Sub
|
||||
Mul
|
||||
Div
|
||||
Mod
|
||||
```
|
||||
```
|
||||
MATH_TYPE
|
||||
Unsigned
|
||||
Signed
|
||||
FloatingPoint
|
||||
```
|
||||
|
||||
```
|
||||
MATH_OP_SIDES
|
||||
Register Constant
|
||||
Register Register
|
||||
Constant Constant
|
||||
Constant Register
|
||||
```
|
||||
`[MATH_OP] [MATH_OP_SIDES] [MATH_TYPE] [IMM_LHS] [IMM_RHS] [REG]`
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
load 0 a0 ;; 05 00 A0
|
||||
load 10 a1 ;; 05 10 A1
|
||||
add a0 1 a0 ;; 01 A0 01 A0
|
||||
jump_neq a0 a1 0 ;; a1 A0 A1 0
|
|
@ -1,4 +0,0 @@
|
|||
load 10 A1
|
||||
load 0 A0
|
||||
add A0 1
|
||||
jump_less_than A0 A1 0
|
10
bytecode/Cargo.toml
Normal file
10
bytecode/Cargo.toml
Normal file
|
@ -0,0 +1,10 @@
|
|||
[package]
|
||||
name = "hbbytecode"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
default = ["disasm"]
|
||||
std = []
|
||||
disasm = ["std"]
|
||||
|
204
bytecode/build.rs
Normal file
204
bytecode/build.rs
Normal file
|
@ -0,0 +1,204 @@
|
|||
#![feature(iter_next_chunk)]
|
||||
|
||||
use std::{collections::HashSet, fmt::Write};
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
println!("cargo:rerun-if-changed=instructions.in");
|
||||
|
||||
let mut generated = String::new();
|
||||
gen_instrs(&mut generated)?;
|
||||
std::fs::write("src/instrs.rs", generated)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn gen_instrs(generated: &mut String) -> Result<(), Box<dyn std::error::Error>> {
|
||||
writeln!(generated, "#![expect(dead_code)]")?;
|
||||
writeln!(generated, "use crate::*;")?;
|
||||
|
||||
'_opcode_structs: {
|
||||
let mut seen = HashSet::new();
|
||||
for [.., args, _] in instructions() {
|
||||
if !seen.insert(args) {
|
||||
continue;
|
||||
}
|
||||
|
||||
writeln!(generated, "#[derive(Clone, Copy, Debug)]")?;
|
||||
writeln!(generated, "#[repr(packed)]")?;
|
||||
write!(generated, "pub struct Ops{args}(")?;
|
||||
let mut first = true;
|
||||
for ch in args.chars().filter(|&ch| ch != 'N') {
|
||||
if !std::mem::take(&mut first) {
|
||||
write!(generated, ",")?;
|
||||
}
|
||||
write!(generated, "pub Op{ch}")?;
|
||||
}
|
||||
writeln!(generated, ");")?;
|
||||
writeln!(generated, "unsafe impl BytecodeItem for Ops{args} {{}}")?;
|
||||
}
|
||||
}
|
||||
|
||||
'_max_size: {
|
||||
let max = instructions()
|
||||
.map(
|
||||
|[_, _, ty, _]| {
|
||||
if ty == "N" {
|
||||
1
|
||||
} else {
|
||||
iter_args(ty).map(arg_to_width).sum::<usize>() + 1
|
||||
}
|
||||
},
|
||||
)
|
||||
.max()
|
||||
.unwrap();
|
||||
|
||||
writeln!(generated, "pub const MAX_SIZE: usize = {max};")?;
|
||||
}
|
||||
|
||||
'_encoders: {
|
||||
for [op, name, ty, doc] in instructions() {
|
||||
writeln!(generated, "/// {}", doc.trim_matches('"'))?;
|
||||
let name = name.to_lowercase();
|
||||
let args = comma_sep(
|
||||
iter_args(ty)
|
||||
.enumerate()
|
||||
.map(|(i, c)| format!("{}{i}: {}", arg_to_name(c), arg_to_type(c))),
|
||||
);
|
||||
writeln!(generated, "pub fn {name}({args}) -> (usize, [u8; MAX_SIZE]) {{")?;
|
||||
let arg_names =
|
||||
comma_sep(iter_args(ty).enumerate().map(|(i, c)| format!("{}{i}", arg_to_name(c))));
|
||||
writeln!(generated, " unsafe {{ crate::encode({ty}({op}, {arg_names})) }}")?;
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
}
|
||||
|
||||
'_structs: {
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
for [_, _, ty, _] in instructions() {
|
||||
if !seen.insert(ty) {
|
||||
continue;
|
||||
}
|
||||
let types = comma_sep(iter_args(ty).map(arg_to_type).map(|s| s.to_string()));
|
||||
writeln!(generated, "#[repr(packed)] pub struct {ty}(u8, {types});")?;
|
||||
}
|
||||
}
|
||||
|
||||
'_name_list: {
|
||||
writeln!(generated, "pub const COUNT: u8 = {};", instructions().count())?;
|
||||
}
|
||||
|
||||
let instr = "Instr";
|
||||
let oper = "Oper";
|
||||
|
||||
'_instr_enum: {
|
||||
writeln!(generated, "#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[repr(u8)]")?;
|
||||
writeln!(generated, "pub enum {instr} {{")?;
|
||||
for [id, name, ..] in instructions() {
|
||||
writeln!(generated, " {name} = {id},")?;
|
||||
}
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
|
||||
'_arg_kind: {
|
||||
writeln!(generated, "#[derive(Debug, Clone, Copy, PartialEq, Eq)]")?;
|
||||
writeln!(generated, "pub enum {oper} {{")?;
|
||||
let mut seen = HashSet::new();
|
||||
for ty in instructions().flat_map(|[.., ty, _]| iter_args(ty)) {
|
||||
if !seen.insert(ty) {
|
||||
continue;
|
||||
}
|
||||
writeln!(generated, " {ty}({}),", arg_to_type(ty))?;
|
||||
}
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
|
||||
'_parse_opers: {
|
||||
writeln!(
|
||||
generated,
|
||||
"/// This assumes the instruction byte is still at the beginning of the buffer"
|
||||
)?;
|
||||
writeln!(generated, "#[cfg(feature = \"disasm\")]")?;
|
||||
writeln!(generated, "pub fn parse_args(bytes: &mut &[u8], kind: {instr}, buf: &mut alloc::vec::Vec<{oper}>) -> Option<()> {{")?;
|
||||
writeln!(generated, " match kind {{")?;
|
||||
let mut instrs = instructions().collect::<Vec<_>>();
|
||||
instrs.sort_unstable_by_key(|&[.., ty, _]| ty);
|
||||
for group in instrs.chunk_by(|[.., a, _], [.., b, _]| a == b) {
|
||||
let ty = group[0][2];
|
||||
for &[_, name, ..] in group {
|
||||
writeln!(generated, " | {instr}::{name}")?;
|
||||
}
|
||||
generated.pop();
|
||||
writeln!(generated, " => {{")?;
|
||||
if iter_args(ty).count() != 0 {
|
||||
writeln!(generated, " let data = crate::decode::<{ty}>(bytes)?;")?;
|
||||
writeln!(
|
||||
generated,
|
||||
" buf.extend([{}]);",
|
||||
comma_sep(
|
||||
iter_args(ty).zip(1u32..).map(|(t, i)| format!("{oper}::{t}(data.{i})"))
|
||||
)
|
||||
)?;
|
||||
} else {
|
||||
writeln!(generated, " crate::decode::<{ty}>(bytes)?;")?;
|
||||
}
|
||||
|
||||
writeln!(generated, " }}")?;
|
||||
}
|
||||
writeln!(generated, " }}")?;
|
||||
writeln!(generated, " Some(())")?;
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
|
||||
std::fs::write("src/instrs.rs", generated)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn comma_sep(items: impl Iterator<Item = String>) -> String {
|
||||
items.map(|item| item.to_string()).collect::<Vec<_>>().join(", ")
|
||||
}
|
||||
|
||||
fn instructions() -> impl Iterator<Item = [&'static str; 4]> {
|
||||
include_str!("instructions.in")
|
||||
.lines()
|
||||
.filter_map(|line| line.strip_suffix(';'))
|
||||
.map(|line| line.splitn(4, ',').map(str::trim).next_chunk().unwrap())
|
||||
}
|
||||
|
||||
fn arg_to_type(arg: char) -> &'static str {
|
||||
match arg {
|
||||
'R' | 'B' => "u8",
|
||||
'H' => "u16",
|
||||
'W' => "u32",
|
||||
'D' | 'A' => "u64",
|
||||
'P' => "i16",
|
||||
'O' => "i32",
|
||||
_ => panic!("unknown type: {}", arg),
|
||||
}
|
||||
}
|
||||
|
||||
fn arg_to_width(arg: char) -> usize {
|
||||
match arg {
|
||||
'R' | 'B' => 1,
|
||||
'H' => 2,
|
||||
'W' => 4,
|
||||
'D' | 'A' => 8,
|
||||
'P' => 2,
|
||||
'O' => 4,
|
||||
_ => panic!("unknown type: {}", arg),
|
||||
}
|
||||
}
|
||||
|
||||
fn arg_to_name(arg: char) -> &'static str {
|
||||
match arg {
|
||||
'R' => "reg",
|
||||
'B' | 'H' | 'W' | 'D' => "imm",
|
||||
'P' | 'O' => "offset",
|
||||
'A' => "addr",
|
||||
_ => panic!("unknown type: {}", arg),
|
||||
}
|
||||
}
|
||||
|
||||
fn iter_args(ty: &'static str) -> impl Iterator<Item = char> {
|
||||
ty.chars().filter(|c| *c != 'N')
|
||||
}
|
75
bytecode/hbbytecode.h
Normal file
75
bytecode/hbbytecode.h
Normal file
|
@ -0,0 +1,75 @@
|
|||
/* HoleyBytes Bytecode representation in C
|
||||
* Requires C23 compiler or better
|
||||
*
|
||||
* Uses MSVC pack pragma extension,
|
||||
* proved to work with Clang and GNU® GCC™.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
#include <assert.h>
|
||||
#include <limits.h>
|
||||
#include <stdint.h>
|
||||
|
||||
static_assert(CHAR_BIT == 8, "Cursed architectures are not supported");
|
||||
|
||||
enum hbbc_Opcode: uint8_t {
|
||||
hbbc_Op_UN , hbbc_Op_TX , hbbc_Op_NOP , hbbc_Op_ADD , hbbc_Op_SUB , hbbc_Op_MUL ,
|
||||
hbbc_Op_AND , hbbc_Op_OR , hbbc_Op_XOR , hbbc_Op_SL , hbbc_Op_SR , hbbc_Op_SRS ,
|
||||
hbbc_Op_CMP , hbbc_Op_CMPU , hbbc_Op_DIR , hbbc_Op_NEG , hbbc_Op_NOT , hbbc_Op_ADDI ,
|
||||
hbbc_Op_MULI , hbbc_Op_ANDI , hbbc_Op_ORI , hbbc_Op_XORI , hbbc_Op_SLI , hbbc_Op_SRI ,
|
||||
hbbc_Op_SRSI , hbbc_Op_CMPI , hbbc_Op_CMPUI , hbbc_Op_CP , hbbc_Op_SWA , hbbc_Op_LI ,
|
||||
hhbc_Op_LRA , hbbc_Op_LD , hbbc_Op_ST , hbbc_Op_LDR , hhbc_Op_STR , hbbc_Op_BMC ,
|
||||
hbbc_Op_BRC , hbbc_Op_JMP , hbbc_Op_JMPR , hbbc_Op_JAL , hbbc_Op_JALR , hbbc_Op_JEQ ,
|
||||
hbbc_Op_JNE , hbbc_Op_JLT , hbbc_Op_JGT , hbbc_Op_JLTU , hbbc_Op_JGTU , hbbc_Op_ECALL ,
|
||||
hbbc_Op_ADDF , hbbc_Op_SUBF , hbbc_Op_MULF , hbbc_Op_DIRF , hbbc_Op_FMAF , hbbc_Op_NEGF ,
|
||||
hbbc_Op_ITF , hbbc_Op_FTI , hbbc_Op_ADDFI , hbbc_Op_MULFI ,
|
||||
} typedef hbbc_Opcode;
|
||||
|
||||
static_assert(sizeof(hbbc_Opcode) == 1);
|
||||
|
||||
#pragma pack(push, 1)
|
||||
struct hbbc_ParamBBBB
|
||||
{ uint8_t _0; uint8_t _1; uint8_t _2; uint8_t _3; }
|
||||
typedef hbbc_ParamBBBB;
|
||||
static_assert(sizeof(hbbc_ParamBBBB) == 32 / 8);
|
||||
|
||||
struct hbbc_ParamBBB
|
||||
{ uint8_t _0; uint8_t _1; uint8_t _2; }
|
||||
typedef hbbc_ParamBBB;
|
||||
static_assert(sizeof(hbbc_ParamBBB) == 24 / 8);
|
||||
|
||||
struct hbbc_ParamBBDH
|
||||
{ uint8_t _0; uint8_t _1; uint64_t _2; uint16_t _3; }
|
||||
typedef hbbc_ParamBBDH;
|
||||
static_assert(sizeof(hbbc_ParamBBDH) == 96 / 8);
|
||||
|
||||
struct hbbc_ParamBBWH
|
||||
{ uint8_t _0; uint8_t _1; uint32_t _2; uint16_t _3; }
|
||||
typedef hbbc_ParamBBWH;
|
||||
static_assert(sizeof(hbbc_ParamBBWH) == 64 / 8);
|
||||
|
||||
|
||||
struct hbbc_ParamBBD
|
||||
{ uint8_t _0; uint8_t _1; uint64_t _2; }
|
||||
typedef hbbc_ParamBBD;
|
||||
static_assert(sizeof(hbbc_ParamBBD) == 80 / 8);
|
||||
|
||||
struct hbbc_ParamBBW
|
||||
{ uint8_t _0; uint8_t _1; uint32_t _2; }
|
||||
typedef hbbc_ParamBBW;
|
||||
static_assert(sizeof(hbbc_ParamBBW) == 48 / 8);
|
||||
|
||||
struct hbbc_ParamBB
|
||||
{ uint8_t _0; uint8_t _1; }
|
||||
typedef hbbc_ParamBB;
|
||||
static_assert(sizeof(hbbc_ParamBB) == 16 / 8);
|
||||
|
||||
struct hbbc_ParamBD
|
||||
{ uint8_t _0; uint64_t _1; }
|
||||
typedef hbbc_ParamBD;
|
||||
static_assert(sizeof(hbbc_ParamBD) == 72 / 8);
|
||||
|
||||
typedef uint64_t hbbc_ParamD;
|
||||
static_assert(sizeof(hbbc_ParamD) == 64 / 8);
|
||||
|
||||
#pragma pack(pop)
|
120
bytecode/instructions.in
Normal file
120
bytecode/instructions.in
Normal file
|
@ -0,0 +1,120 @@
|
|||
0x00, UN, N, "Cause an unreachable code trap" ;
|
||||
0x01, TX, N, "Termiante execution" ;
|
||||
0x02, NOP, N, "Do nothing" ;
|
||||
0x03, ADD8, RRR, "Addition (8b)" ;
|
||||
0x04, ADD16, RRR, "Addition (16b)" ;
|
||||
0x05, ADD32, RRR, "Addition (32b)" ;
|
||||
0x06, ADD64, RRR, "Addition (64b)" ;
|
||||
0x07, SUB8, RRR, "Subtraction (8b)" ;
|
||||
0x08, SUB16, RRR, "Subtraction (16b)" ;
|
||||
0x09, SUB32, RRR, "Subtraction (32b)" ;
|
||||
0x0A, SUB64, RRR, "Subtraction (64b)" ;
|
||||
0x0B, MUL8, RRR, "Multiplication (8b)" ;
|
||||
0x0C, MUL16, RRR, "Multiplication (16b)" ;
|
||||
0x0D, MUL32, RRR, "Multiplication (32b)" ;
|
||||
0x0E, MUL64, RRR, "Multiplication (64b)" ;
|
||||
0x0F, AND, RRR, "Bitand" ;
|
||||
0x10, OR, RRR, "Bitor" ;
|
||||
0x11, XOR, RRR, "Bitxor" ;
|
||||
0x12, SLU8, RRR, "Unsigned left bitshift (8b)" ;
|
||||
0x13, SLU16, RRR, "Unsigned left bitshift (16b)" ;
|
||||
0x14, SLU32, RRR, "Unsigned left bitshift (32b)" ;
|
||||
0x15, SLU64, RRR, "Unsigned left bitshift (64b)" ;
|
||||
0x16, SRU8, RRR, "Unsigned right bitshift (8b)" ;
|
||||
0x17, SRU16, RRR, "Unsigned right bitshift (16b)" ;
|
||||
0x18, SRU32, RRR, "Unsigned right bitshift (32b)" ;
|
||||
0x19, SRU64, RRR, "Unsigned right bitshift (64b)" ;
|
||||
0x1A, SRS8, RRR, "Signed right bitshift (8b)" ;
|
||||
0x1B, SRS16, RRR, "Signed right bitshift (16b)" ;
|
||||
0x1C, SRS32, RRR, "Signed right bitshift (32b)" ;
|
||||
0x1D, SRS64, RRR, "Signed right bitshift (64b)" ;
|
||||
0x1E, CMPU, RRR, "Unsigned comparsion" ;
|
||||
0x1F, CMPS, RRR, "Signed comparsion" ;
|
||||
0x20, DIRU8, RRRR, "Merged divide-remainder (unsigned 8b)" ;
|
||||
0x21, DIRU16, RRRR, "Merged divide-remainder (unsigned 16b)" ;
|
||||
0x22, DIRU32, RRRR, "Merged divide-remainder (unsigned 32b)" ;
|
||||
0x23, DIRU64, RRRR, "Merged divide-remainder (unsigned 64b)" ;
|
||||
0x24, DIRS8, RRRR, "Merged divide-remainder (signed 8b)" ;
|
||||
0x25, DIRS16, RRRR, "Merged divide-remainder (signed 16b)" ;
|
||||
0x26, DIRS32, RRRR, "Merged divide-remainder (signed 32b)" ;
|
||||
0x27, DIRS64, RRRR, "Merged divide-remainder (signed 64b)" ;
|
||||
0x28, NEG, RR, "Bit negation" ;
|
||||
0x29, NOT, RR, "Logical negation" ;
|
||||
0x2A, SXT8, RR, "Sign extend 8b to 64b" ;
|
||||
0x2B, SXT16, RR, "Sign extend 16b to 64b" ;
|
||||
0x2C, SXT32, RR, "Sign extend 32b to 64b" ;
|
||||
0x2D, ADDI8, RRB, "Addition with immediate (8b)" ;
|
||||
0x2E, ADDI16, RRH, "Addition with immediate (16b)" ;
|
||||
0x2F, ADDI32, RRW, "Addition with immediate (32b)" ;
|
||||
0x30, ADDI64, RRD, "Addition with immediate (64b)" ;
|
||||
0x31, MULI8, RRB, "Multiplication with immediate (8b)" ;
|
||||
0x32, MULI16, RRH, "Multiplication with immediate (16b)" ;
|
||||
0x33, MULI32, RRW, "Multiplication with immediate (32b)" ;
|
||||
0x34, MULI64, RRD, "Multiplication with immediate (64b)" ;
|
||||
0x35, ANDI, RRD, "Bitand with immediate" ;
|
||||
0x36, ORI, RRD, "Bitor with immediate" ;
|
||||
0x37, XORI, RRD, "Bitxor with immediate" ;
|
||||
0x38, SLUI8, RRB, "Unsigned left bitshift with immedidate (8b)" ;
|
||||
0x39, SLUI16, RRB, "Unsigned left bitshift with immedidate (16b)";
|
||||
0x3A, SLUI32, RRB, "Unsigned left bitshift with immedidate (32b)";
|
||||
0x3B, SLUI64, RRB, "Unsigned left bitshift with immedidate (64b)";
|
||||
0x3C, SRUI8, RRB, "Unsigned right bitshift with immediate (8b)" ;
|
||||
0x3D, SRUI16, RRB, "Unsigned right bitshift with immediate (16b)";
|
||||
0x3E, SRUI32, RRB, "Unsigned right bitshift with immediate (32b)";
|
||||
0x3F, SRUI64, RRB, "Unsigned right bitshift with immediate (64b)";
|
||||
0x40, SRSI8, RRB, "Signed right bitshift with immediate" ;
|
||||
0x41, SRSI16, RRB, "Signed right bitshift with immediate" ;
|
||||
0x42, SRSI32, RRB, "Signed right bitshift with immediate" ;
|
||||
0x43, SRSI64, RRB, "Signed right bitshift with immediate" ;
|
||||
0x44, CMPUI, RRD, "Unsigned compare with immediate" ;
|
||||
0x45, CMPSI, RRD, "Signed compare with immediate" ;
|
||||
0x46, CP, RR, "Copy register" ;
|
||||
0x47, SWA, RR, "Swap registers" ;
|
||||
0x48, LI8, RB, "Load immediate (8b)" ;
|
||||
0x49, LI16, RH, "Load immediate (16b)" ;
|
||||
0x4A, LI32, RW, "Load immediate (32b)" ;
|
||||
0x4B, LI64, RD, "Load immediate (64b)" ;
|
||||
0x4C, LRA, RRO, "Load relative address" ;
|
||||
0x4D, LD, RRAH, "Load from absolute address" ;
|
||||
0x4E, ST, RRAH, "Store to absolute address" ;
|
||||
0x4F, LDR, RROH, "Load from relative address" ;
|
||||
0x50, STR, RROH, "Store to relative address" ;
|
||||
0x51, BMC, RRH, "Copy block of memory" ;
|
||||
0x52, BRC, RRB, "Copy register block" ;
|
||||
0x53, JMP, O, "Relative jump" ;
|
||||
0x54, JAL, RRO, "Linking relative jump" ;
|
||||
0x55, JALA, RRA, "Linking absolute jump" ;
|
||||
0x56, JEQ, RRP, "Branch on equal" ;
|
||||
0x57, JNE, RRP, "Branch on nonequal" ;
|
||||
0x58, JLTU, RRP, "Branch on lesser-than (unsigned)" ;
|
||||
0x59, JGTU, RRP, "Branch on greater-than (unsigned)" ;
|
||||
0x5A, JLTS, RRP, "Branch on lesser-than (signed)" ;
|
||||
0x5B, JGTS, RRP, "Branch on greater-than (signed)" ;
|
||||
0x5C, ECA, N, "Environment call trap" ;
|
||||
0x5D, EBP, N, "Environment breakpoint" ;
|
||||
0x5E, FADD32, RRR, "Floating point addition (32b)" ;
|
||||
0x5F, FADD64, RRR, "Floating point addition (64b)" ;
|
||||
0x60, FSUB32, RRR, "Floating point subtraction (32b)" ;
|
||||
0x61, FSUB64, RRR, "Floating point subtraction (64b)" ;
|
||||
0x62, FMUL32, RRR, "Floating point multiply (32b)" ;
|
||||
0x63, FMUL64, RRR, "Floating point multiply (64b)" ;
|
||||
0x64, FDIV32, RRR, "Floating point division (32b)" ;
|
||||
0x65, FDIV64, RRR, "Floating point division (64b)" ;
|
||||
0x66, FMA32, RRRR, "Float fused multiply-add (32b)" ;
|
||||
0x67, FMA64, RRRR, "Float fused multiply-add (64b)" ;
|
||||
0x68, FINV32, RR, "Float reciprocal (32b)" ;
|
||||
0x69, FINV64, RR, "Float reciprocal (64b)" ;
|
||||
0x6A, FCMPLT32, RRR, "Flaot compare less than (32b)" ;
|
||||
0x6B, FCMPLT64, RRR, "Flaot compare less than (64b)" ;
|
||||
0x6C, FCMPGT32, RRR, "Flaot compare greater than (32b)" ;
|
||||
0x6D, FCMPGT64, RRR, "Flaot compare greater than (64b)" ;
|
||||
0x6E, ITF32, RR, "Int to 32 bit float" ;
|
||||
0x6F, ITF64, RR, "Int to 64 bit float" ;
|
||||
0x70, FTI32, RRB, "Float 32 to int" ;
|
||||
0x71, FTI64, RRB, "Float 64 to int" ;
|
||||
0x72, FC32T64, RR, "Float 64 to Float 32" ;
|
||||
0x73, FC64T32, RRB, "Float 32 to Float 64" ;
|
||||
0x74, LRA16, RRP, "Load relative immediate (16 bit)" ;
|
||||
0x75, LDR16, RRPH, "Load from relative address (16 bit)" ;
|
||||
0x76, STR16, RRPH, "Store to relative address (16 bit)" ;
|
||||
0x77, JMP16, P, "Relative jump (16 bit)" ;
|
283
bytecode/src/lib.rs
Normal file
283
bytecode/src/lib.rs
Normal file
|
@ -0,0 +1,283 @@
|
|||
#![no_std]
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
extern crate alloc;
|
||||
|
||||
pub use crate::instrs::*;
|
||||
use core::convert::TryFrom;
|
||||
|
||||
mod instrs;
|
||||
|
||||
type OpR = u8;
|
||||
|
||||
type OpA = u64;
|
||||
type OpO = i32;
|
||||
type OpP = i16;
|
||||
|
||||
type OpB = u8;
|
||||
type OpH = u16;
|
||||
type OpW = u32;
|
||||
type OpD = u64;
|
||||
|
||||
/// # Safety
|
||||
/// Has to be valid to be decoded from bytecode.
|
||||
pub unsafe trait BytecodeItem {}
|
||||
unsafe impl BytecodeItem for u8 {}
|
||||
|
||||
impl TryFrom<u8> for Instr {
|
||||
type Error = u8;
|
||||
|
||||
#[inline]
|
||||
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||
#[cold]
|
||||
fn failed(value: u8) -> Result<Instr, u8> {
|
||||
Err(value)
|
||||
}
|
||||
|
||||
if value < COUNT {
|
||||
unsafe { Ok(core::mem::transmute::<u8, Instr>(value)) }
|
||||
} else {
|
||||
failed(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn encode<T>(instr: T) -> (usize, [u8; instrs::MAX_SIZE]) {
|
||||
let mut buf = [0; instrs::MAX_SIZE];
|
||||
core::ptr::write(buf.as_mut_ptr() as *mut T, instr);
|
||||
(core::mem::size_of::<T>(), buf)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg(feature = "disasm")]
|
||||
fn decode<T>(binary: &mut &[u8]) -> Option<T> {
|
||||
let (front, rest) = core::mem::take(binary).split_at_checked(core::mem::size_of::<T>())?;
|
||||
*binary = rest;
|
||||
unsafe { Some(core::ptr::read(front.as_ptr() as *const T)) }
|
||||
}
|
||||
|
||||
/// Rounding mode
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
#[repr(u8)]
|
||||
pub enum RoundingMode {
|
||||
NearestEven = 0,
|
||||
Truncate = 1,
|
||||
Up = 2,
|
||||
Down = 3,
|
||||
}
|
||||
|
||||
impl TryFrom<u8> for RoundingMode {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||
(value <= 3).then(|| unsafe { core::mem::transmute(value) }).ok_or(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum DisasmItem {
|
||||
Func,
|
||||
Global,
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
#[derive(Debug)]
|
||||
pub enum DisasmError<'a> {
|
||||
InvalidInstruction(u8),
|
||||
InstructionOutOfBounds(&'a str),
|
||||
FmtFailed(core::fmt::Error),
|
||||
HasOutOfBoundsJumps,
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
impl From<core::fmt::Error> for DisasmError<'_> {
|
||||
fn from(value: core::fmt::Error) -> Self {
|
||||
Self::FmtFailed(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
impl core::fmt::Display for DisasmError<'_> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
match *self {
|
||||
DisasmError::InvalidInstruction(b) => write!(f, "invalid instruction opcode: {b}"),
|
||||
DisasmError::InstructionOutOfBounds(name) => {
|
||||
write!(f, "instruction would go out of bounds of {name} symbol")
|
||||
}
|
||||
DisasmError::FmtFailed(error) => write!(f, "fmt failed: {error}"),
|
||||
DisasmError::HasOutOfBoundsJumps => write!(
|
||||
f,
|
||||
"the code contained jumps that dont got neither to a \
|
||||
valid symbol or local insturction"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
impl core::error::Error for DisasmError<'_> {}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
pub fn disasm<'a>(
|
||||
binary: &mut &[u8],
|
||||
functions: &alloc::collections::BTreeMap<u32, (&'a str, u32, DisasmItem)>,
|
||||
out: &mut alloc::string::String,
|
||||
mut eca_handler: impl FnMut(&mut &[u8]),
|
||||
) -> Result<(), DisasmError<'a>> {
|
||||
use {
|
||||
self::instrs::Instr,
|
||||
alloc::{
|
||||
collections::btree_map::{BTreeMap, Entry},
|
||||
vec::Vec,
|
||||
},
|
||||
core::{convert::TryInto, fmt::Write},
|
||||
};
|
||||
|
||||
fn instr_from_byte(b: u8) -> Result<Instr, DisasmError<'static>> {
|
||||
b.try_into().map_err(DisasmError::InvalidInstruction)
|
||||
}
|
||||
|
||||
let mut labels = BTreeMap::<u32, u32>::default();
|
||||
let mut buf = Vec::<instrs::Oper>::new();
|
||||
let mut has_oob = false;
|
||||
|
||||
'_offset_pass: for (&off, &(name, len, kind)) in functions.iter() {
|
||||
if matches!(kind, DisasmItem::Global) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let prev = *binary;
|
||||
|
||||
*binary = &binary[off as usize..];
|
||||
|
||||
let mut label_count = 0;
|
||||
while let Some(&byte) = binary.first() {
|
||||
let offset: i32 = (prev.len() - binary.len()).try_into().unwrap();
|
||||
if offset as u32 == off + len {
|
||||
break;
|
||||
}
|
||||
let Ok(inst) = instr_from_byte(byte) else { break };
|
||||
instrs::parse_args(binary, inst, &mut buf)
|
||||
.ok_or(DisasmError::InstructionOutOfBounds(name))?;
|
||||
|
||||
for op in buf.drain(..) {
|
||||
let rel = match op {
|
||||
instrs::Oper::O(rel) => rel,
|
||||
instrs::Oper::P(rel) => rel.into(),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let global_offset: u32 = (offset + rel).try_into().unwrap();
|
||||
if functions.get(&global_offset).is_some() {
|
||||
continue;
|
||||
}
|
||||
label_count += match labels.entry(global_offset) {
|
||||
Entry::Occupied(_) => 0,
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(label_count);
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(inst, Instr::ECA) {
|
||||
eca_handler(binary);
|
||||
}
|
||||
}
|
||||
|
||||
*binary = prev;
|
||||
}
|
||||
|
||||
let mut ordered = functions.iter().collect::<Vec<_>>();
|
||||
ordered.sort_unstable_by_key(|(_, (name, _, _))| name);
|
||||
|
||||
'_dump: for (&off, &(name, len, kind)) in ordered {
|
||||
if matches!(kind, DisasmItem::Global) {
|
||||
continue;
|
||||
}
|
||||
let prev = *binary;
|
||||
|
||||
writeln!(out, "{name}:")?;
|
||||
|
||||
*binary = &binary[off as usize..];
|
||||
while let Some(&byte) = binary.first() {
|
||||
let offset: i32 = (prev.len() - binary.len()).try_into().unwrap();
|
||||
if offset as u32 == off + len {
|
||||
break;
|
||||
}
|
||||
let Ok(inst) = instr_from_byte(byte) else {
|
||||
writeln!(out, "invalid instr {byte}")?;
|
||||
break;
|
||||
};
|
||||
instrs::parse_args(binary, inst, &mut buf).unwrap();
|
||||
|
||||
if let Some(label) = labels.get(&offset.try_into().unwrap()) {
|
||||
write!(out, "{:>2}: ", label)?;
|
||||
} else {
|
||||
write!(out, " ")?;
|
||||
}
|
||||
|
||||
write!(out, "{inst:<8?} ")?;
|
||||
|
||||
'a: for (i, op) in buf.drain(..).enumerate() {
|
||||
if i != 0 {
|
||||
write!(out, ", ")?;
|
||||
}
|
||||
|
||||
let rel = 'b: {
|
||||
match op {
|
||||
instrs::Oper::O(rel) => break 'b rel,
|
||||
instrs::Oper::P(rel) => break 'b rel.into(),
|
||||
instrs::Oper::R(r) => write!(out, "r{r}")?,
|
||||
instrs::Oper::B(b) => write!(out, "{b}b")?,
|
||||
instrs::Oper::H(h) => write!(out, "{h}h")?,
|
||||
instrs::Oper::W(w) => write!(out, "{w}w")?,
|
||||
instrs::Oper::D(d) if (d as i64) < 0 => write!(out, "{}d", d as i64)?,
|
||||
instrs::Oper::D(d) => write!(out, "{d}d")?,
|
||||
instrs::Oper::A(a) => write!(out, "{a}a")?,
|
||||
}
|
||||
|
||||
continue 'a;
|
||||
};
|
||||
|
||||
let global_offset: u32 = (offset + rel).try_into().unwrap();
|
||||
if let Some(&(name, ..)) = functions.get(&global_offset) {
|
||||
if name.contains('\0') {
|
||||
write!(out, ":{name:?}")?;
|
||||
} else {
|
||||
write!(out, ":{name}")?;
|
||||
}
|
||||
} else {
|
||||
let local_has_oob = global_offset < off
|
||||
|| global_offset > off + len
|
||||
|| prev
|
||||
.get(global_offset as usize)
|
||||
.is_none_or(|&b| instr_from_byte(b).is_err());
|
||||
has_oob |= local_has_oob;
|
||||
let label = labels.get(&global_offset).unwrap();
|
||||
if local_has_oob {
|
||||
write!(out, "!!!!!!!!!{rel}")?;
|
||||
} else {
|
||||
write!(out, ":{label}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(out)?;
|
||||
|
||||
if matches!(inst, Instr::ECA) {
|
||||
eca_handler(binary);
|
||||
}
|
||||
}
|
||||
|
||||
*binary = prev;
|
||||
}
|
||||
|
||||
if has_oob {
|
||||
return Err(DisasmError::HasOutOfBoundsJumps);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
[package]
|
||||
name = "compiler"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
|
@ -1,5 +0,0 @@
|
|||
fn main() {
|
||||
let prog = "load 1, A0
|
||||
jump 0";
|
||||
println!("Hello, world!");
|
||||
}
|
23
depell/Cargo.toml
Normal file
23
depell/Cargo.toml
Normal file
|
@ -0,0 +1,23 @@
|
|||
[package]
|
||||
name = "depell"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
argon2 = "0.5.3"
|
||||
axum = "0.7.7"
|
||||
axum-server = { version = "0.7.1", optional = true, features = ["rustls", "tls-rustls"] }
|
||||
const_format = "0.2.33"
|
||||
getrandom = "0.2.15"
|
||||
hblang.workspace = true
|
||||
htmlm = "0.5.0"
|
||||
log = "0.4.22"
|
||||
rand_core = { version = "0.6.4", features = ["getrandom"] }
|
||||
rusqlite = { version = "0.32.1", features = ["bundled"] }
|
||||
serde = { version = "1.0.210", features = ["derive"] }
|
||||
time = "0.3.36"
|
||||
tokio = { version = "1.40.0", features = ["rt"] }
|
||||
|
||||
[features]
|
||||
#default = ["tls"]
|
||||
tls = ["dep:axum-server"]
|
14
depell/README.md
Normal file
14
depell/README.md
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Depell
|
||||
|
||||
Depell is a website that allows users to import/post/run hblang code and create huge dependency graphs. Its currently hosted at https://depell.mlokis.tech.
|
||||
|
||||
## Local Development
|
||||
|
||||
Prerequirements:
|
||||
- rust nigthly toolchain: install rust from [here](https://www.rust-lang.org/tools/install)
|
||||
|
||||
```bash
|
||||
rustup default nightly
|
||||
cargo xtask watch-depell-debug
|
||||
# browser http://localhost:8080
|
||||
```
|
1
depell/src/icons/download.svg
Normal file
1
depell/src/icons/download.svg
Normal file
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px" fill="#e8eaed"><path d="M480-320 280-520l56-58 104 104v-326h80v326l104-104 56 58-200 200ZM240-160q-33 0-56.5-23.5T160-240v-120h80v120h480v-120h80v120q0 33-23.5 56.5T720-160H240Z"/></svg>
|
After Width: | Height: | Size: 279 B |
1
depell/src/icons/run.svg
Normal file
1
depell/src/icons/run.svg
Normal file
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px" fill="#e8eaed"><path d="M320-200v-560l440 280-440 280Zm80-280Zm0 134 210-134-210-134v268Z"/></svg>
|
After Width: | Height: | Size: 190 B |
213
depell/src/index.css
Normal file
213
depell/src/index.css
Normal file
|
@ -0,0 +1,213 @@
|
|||
* {
|
||||
font-family: var(--font);
|
||||
line-height: 1.3;
|
||||
}
|
||||
|
||||
body {
|
||||
--primary: light-dark(white, #181A1B);
|
||||
--secondary: light-dark(#EFEFEF, #212425);
|
||||
--timestamp: light-dark(#555555, #AAAAAA);
|
||||
--error: #ff3333;
|
||||
}
|
||||
|
||||
body {
|
||||
--small-gap: 5px;
|
||||
--font: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
|
||||
--monospace: 'Courier New', Courier, monospace;
|
||||
|
||||
nav {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
|
||||
section:last-child {
|
||||
display: flex;
|
||||
gap: var(--small-gap);
|
||||
}
|
||||
}
|
||||
|
||||
main {
|
||||
margin-top: var(--small-gap);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--small-gap);
|
||||
}
|
||||
}
|
||||
|
||||
div.preview {
|
||||
margin: var(--small-gap) 0px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--small-gap);
|
||||
|
||||
div.info {
|
||||
display: flex;
|
||||
gap: var(--small-gap);
|
||||
|
||||
span[apply=timestamp] {
|
||||
color: var(--timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
div.stat {
|
||||
display: flex;
|
||||
|
||||
svg {
|
||||
height: 18px;
|
||||
}
|
||||
}
|
||||
|
||||
div.code {
|
||||
position: relative;
|
||||
|
||||
nav {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
padding: var(--small-gap);
|
||||
|
||||
button {
|
||||
display: flex;
|
||||
padding: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
svg {
|
||||
fill: black;
|
||||
}
|
||||
|
||||
form {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--small-gap);
|
||||
|
||||
|
||||
.error {
|
||||
color: var(--error);
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
textarea {
|
||||
outline: none;
|
||||
border: none;
|
||||
background: var(--secondary);
|
||||
padding: var(--small-gap);
|
||||
padding-top: calc(var(--small-gap) * 1.5);
|
||||
font-family: var(--monospace);
|
||||
resize: none;
|
||||
tab-size: 4;
|
||||
}
|
||||
|
||||
pre {
|
||||
background: var(--secondary);
|
||||
padding: var(--small-gap);
|
||||
padding-top: calc(var(--small-gap) * 1.5);
|
||||
margin: 0px;
|
||||
font-family: var(--monospace);
|
||||
tab-size: 4;
|
||||
overflow-x: auto;
|
||||
white-space: pre-wrap;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
|
||||
input {
|
||||
font-size: inherit;
|
||||
outline: none;
|
||||
border: none;
|
||||
background: var(--secondary);
|
||||
padding: var(--small-gap);
|
||||
}
|
||||
|
||||
input:is(:hover, :focus) {
|
||||
background: var(--primary);
|
||||
}
|
||||
|
||||
button {
|
||||
border: none;
|
||||
outline: none;
|
||||
font-size: inherit;
|
||||
background: var(--secondary);
|
||||
}
|
||||
|
||||
button:hover:not(:active) {
|
||||
background: var(--primary);
|
||||
}
|
||||
|
||||
code {
|
||||
font-family: var(--monospace);
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
div#code-editor {
|
||||
display: flex;
|
||||
position: relative;
|
||||
|
||||
textarea {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
span#code-size {
|
||||
position: absolute;
|
||||
right: 2px;
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
div#dep-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: var(--small-gap);
|
||||
|
||||
section {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
text-align: center;
|
||||
gap: var(--small-gap);
|
||||
|
||||
div {
|
||||
text-align: left;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.syn {
|
||||
font-family: var(--monospace);
|
||||
|
||||
&.Comment {
|
||||
color: #939f91;
|
||||
}
|
||||
|
||||
&.Keyword {
|
||||
color: #f85552;
|
||||
}
|
||||
|
||||
&.Identifier,
|
||||
&.Directive {
|
||||
color: #3a94c5;
|
||||
}
|
||||
|
||||
/* &.Number {} */
|
||||
|
||||
&.String {
|
||||
color: #8da101;
|
||||
}
|
||||
|
||||
&.Op,
|
||||
&.Assign {
|
||||
color: #f57d26;
|
||||
}
|
||||
|
||||
&.Paren,
|
||||
&.Bracket,
|
||||
&.Comma,
|
||||
&.Dot,
|
||||
&.Ctor,
|
||||
&.Colon {
|
||||
color: light-dark(#5c6a72, #999999);
|
||||
}
|
||||
}
|
554
depell/src/index.js
Normal file
554
depell/src/index.js
Normal file
|
@ -0,0 +1,554 @@
|
|||
/// @ts-check
|
||||
|
||||
/** @return {never} */
|
||||
function never() { throw new Error() }
|
||||
|
||||
/**@type{WebAssembly.Instance}*/ let hbcInstance;
|
||||
/**@type{Promise<WebAssembly.WebAssemblyInstantiatedSource>}*/ let hbcInstaceFuture;
|
||||
async function getHbcInstance() {
|
||||
hbcInstaceFuture ??= WebAssembly.instantiateStreaming(fetch("/hbc.wasm"), {});
|
||||
return hbcInstance ??= (await hbcInstaceFuture).instance;
|
||||
}
|
||||
|
||||
const stack_pointer_offset = 1 << 20;
|
||||
|
||||
/** @param {WebAssembly.Instance} instance @param {Post[]} packages @param {number} fuel
|
||||
* @returns {string} */
|
||||
function compileCode(instance, packages, fuel = 100) {
|
||||
let {
|
||||
INPUT, INPUT_LEN,
|
||||
LOG_MESSAGES, LOG_MESSAGES_LEN,
|
||||
memory, compile_and_run,
|
||||
} = instance.exports;
|
||||
|
||||
if (!(true
|
||||
&& memory instanceof WebAssembly.Memory
|
||||
&& INPUT instanceof WebAssembly.Global
|
||||
&& INPUT_LEN instanceof WebAssembly.Global
|
||||
&& LOG_MESSAGES instanceof WebAssembly.Global
|
||||
&& LOG_MESSAGES_LEN instanceof WebAssembly.Global
|
||||
&& typeof compile_and_run === "function"
|
||||
)) never();
|
||||
|
||||
const codeLength = packPosts(packages, new DataView(memory.buffer, INPUT.value));
|
||||
new DataView(memory.buffer).setUint32(INPUT_LEN.value, codeLength, true);
|
||||
|
||||
runWasmFunction(instance, compile_and_run, fuel);
|
||||
return bufToString(memory, LOG_MESSAGES, LOG_MESSAGES_LEN).trim();
|
||||
}
|
||||
|
||||
/**@type{WebAssembly.Instance}*/ let fmtInstance;
|
||||
/**@type{Promise<WebAssembly.WebAssemblyInstantiatedSource>}*/ let fmtInstaceFuture;
|
||||
async function getFmtInstance() {
|
||||
fmtInstaceFuture ??= WebAssembly.instantiateStreaming(fetch("/hbfmt.wasm"), {});
|
||||
return fmtInstance ??= (await fmtInstaceFuture).instance;
|
||||
}
|
||||
|
||||
/** @param {WebAssembly.Instance} instance @param {string} code @param {"tok" | "fmt" | "minify"} action
|
||||
* @returns {string | Uint8Array | undefined} */
|
||||
function modifyCode(instance, code, action) {
|
||||
let {
|
||||
INPUT, INPUT_LEN,
|
||||
OUTPUT, OUTPUT_LEN,
|
||||
memory, fmt, tok, minify
|
||||
} = instance.exports;
|
||||
|
||||
let funs = { fmt, tok, minify };
|
||||
let fun = funs[action];
|
||||
if (!(true
|
||||
&& memory instanceof WebAssembly.Memory
|
||||
&& INPUT instanceof WebAssembly.Global
|
||||
&& INPUT_LEN instanceof WebAssembly.Global
|
||||
&& OUTPUT instanceof WebAssembly.Global
|
||||
&& OUTPUT_LEN instanceof WebAssembly.Global
|
||||
&& funs.hasOwnProperty(action)
|
||||
&& typeof fun === "function"
|
||||
)) never();
|
||||
|
||||
if (action !== "fmt") {
|
||||
INPUT = OUTPUT;
|
||||
INPUT_LEN = OUTPUT_LEN;
|
||||
}
|
||||
|
||||
let dw = new DataView(memory.buffer);
|
||||
dw.setUint32(INPUT_LEN.value, code.length, true);
|
||||
new Uint8Array(memory.buffer, INPUT.value).set(new TextEncoder().encode(code));
|
||||
|
||||
if (!runWasmFunction(instance, fun)) {
|
||||
return undefined;
|
||||
}
|
||||
if (action === "tok") {
|
||||
return bufSlice(memory, OUTPUT, OUTPUT_LEN);
|
||||
} else {
|
||||
return bufToString(memory, OUTPUT, OUTPUT_LEN);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** @param {WebAssembly.Instance} instance @param {CallableFunction} func @param {any[]} args
|
||||
* @returns {boolean} */
|
||||
function runWasmFunction(instance, func, ...args) {
|
||||
const { PANIC_MESSAGE, PANIC_MESSAGE_LEN, memory, stack_pointer } = instance.exports;
|
||||
if (!(true
|
||||
&& memory instanceof WebAssembly.Memory
|
||||
&& stack_pointer instanceof WebAssembly.Global
|
||||
)) never();
|
||||
const ptr = stack_pointer.value;
|
||||
try {
|
||||
func(...args);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error instanceof WebAssembly.RuntimeError
|
||||
&& error.message == "unreachable"
|
||||
&& PANIC_MESSAGE instanceof WebAssembly.Global
|
||||
&& PANIC_MESSAGE_LEN instanceof WebAssembly.Global) {
|
||||
console.error(bufToString(memory, PANIC_MESSAGE, PANIC_MESSAGE_LEN), error);
|
||||
} else {
|
||||
console.error(error);
|
||||
}
|
||||
stack_pointer.value = ptr;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/** @typedef {Object} Post
|
||||
* @property {string} path
|
||||
* @property {string} code */
|
||||
|
||||
/** @param {Post[]} posts @param {DataView} view @returns {number} */
|
||||
function packPosts(posts, view) {
|
||||
const enc = new TextEncoder(), buf = new Uint8Array(view.buffer, view.byteOffset);
|
||||
let len = 0; for (const post of posts) {
|
||||
view.setUint16(len, post.path.length, true); len += 2;
|
||||
buf.set(enc.encode(post.path), len); len += post.path.length;
|
||||
view.setUint16(len, post.code.length, true); len += 2;
|
||||
buf.set(enc.encode(post.code), len); len += post.code.length;
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
/** @param {WebAssembly.Memory} mem
|
||||
* @param {WebAssembly.Global} ptr
|
||||
* @param {WebAssembly.Global} len
|
||||
* @return {Uint8Array} */
|
||||
function bufSlice(mem, ptr, len) {
|
||||
return new Uint8Array(mem.buffer, ptr.value,
|
||||
new DataView(mem.buffer).getUint32(len.value, true));
|
||||
}
|
||||
|
||||
/** @param {WebAssembly.Memory} mem
|
||||
* @param {WebAssembly.Global} ptr
|
||||
* @param {WebAssembly.Global} len
|
||||
* @return {string} */
|
||||
function bufToString(mem, ptr, len) {
|
||||
const res = new TextDecoder()
|
||||
.decode(new Uint8Array(mem.buffer, ptr.value,
|
||||
new DataView(mem.buffer).getUint32(len.value, true)));
|
||||
new DataView(mem.buffer).setUint32(len.value, 0, true);
|
||||
return res;
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function wireUp(target) {
|
||||
execApply(target);
|
||||
cacheInputs(target);
|
||||
bindCodeEdit(target);
|
||||
bindTextareaAutoResize(target);
|
||||
}
|
||||
|
||||
const importRe = /@use\s*\(\s*"(([^"]|\\")+)"\s*\)/g;
|
||||
|
||||
/** @param {WebAssembly.Instance} fmt
|
||||
* @param {string} code
|
||||
* @param {string[]} roots
|
||||
* @param {Post[]} buf
|
||||
* @param {Set<string>} prevRoots
|
||||
* @returns {void} */
|
||||
function loadCachedPackages(fmt, code, roots, buf, prevRoots) {
|
||||
buf[0].code = code;
|
||||
|
||||
roots.length = 0;
|
||||
let changed = false;
|
||||
for (const match of code.matchAll(importRe)) {
|
||||
changed ||= !prevRoots.has(match[1]);
|
||||
roots.push(match[1]);
|
||||
}
|
||||
|
||||
if (!changed) return;
|
||||
buf.length = 1;
|
||||
prevRoots.clear();
|
||||
|
||||
for (let imp = roots.pop(); imp !== undefined; imp = roots.pop()) {
|
||||
if (prevRoots.has(imp)) continue; prevRoots.add(imp);
|
||||
|
||||
const fmtd = modifyCode(fmt, localStorage.getItem("package-" + imp) ?? never(), "fmt");
|
||||
if (typeof fmtd != "string") never();
|
||||
buf.push({ path: imp, code: fmtd });
|
||||
for (const match of buf[buf.length - 1].code.matchAll(importRe)) {
|
||||
roots.push(match[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**@type{Set<string>}*/ const prevRoots = new Set();
|
||||
/**@typedef {Object} PackageCtx
|
||||
* @property {AbortController} [cancelation]
|
||||
* @property {string[]} keyBuf
|
||||
* @property {Set<string>} prevParams
|
||||
* @property {HTMLTextAreaElement} [edit] */
|
||||
|
||||
/** @param {string} source @param {Set<string>} importDiff @param {HTMLPreElement} errors @param {PackageCtx} ctx */
|
||||
async function fetchPackages(source, importDiff, errors, ctx) {
|
||||
importDiff.clear();
|
||||
for (const match of source.matchAll(importRe)) {
|
||||
if (localStorage["package-" + match[1]]) continue;
|
||||
importDiff.add(match[1]);
|
||||
}
|
||||
|
||||
if (importDiff.size !== 0 && (ctx.prevParams.size != importDiff.size
|
||||
|| [...ctx.prevParams.keys()].every(e => importDiff.has(e)))) {
|
||||
if (ctx.cancelation) ctx.cancelation.abort();
|
||||
ctx.prevParams.clear();
|
||||
ctx.prevParams = new Set([...importDiff]);
|
||||
ctx.cancelation = new AbortController();
|
||||
|
||||
ctx.keyBuf.length = 0;
|
||||
ctx.keyBuf.push(...importDiff.keys());
|
||||
|
||||
errors.textContent = "fetching: " + ctx.keyBuf.join(", ");
|
||||
|
||||
await fetch(`/code`, {
|
||||
method: "POST",
|
||||
signal: ctx.cancelation.signal,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(ctx.keyBuf),
|
||||
}).then(async e => {
|
||||
try {
|
||||
const json = await e.json();
|
||||
if (e.status == 200) {
|
||||
for (const [key, value] of Object.entries(json)) {
|
||||
localStorage["package-" + key] = value;
|
||||
}
|
||||
const missing = ctx.keyBuf.filter(i => json[i] === undefined);
|
||||
if (missing.length !== 0) {
|
||||
errors.textContent = "deps not found: " + missing.join(", ");
|
||||
} else {
|
||||
ctx.cancelation = undefined;
|
||||
ctx.edit?.dispatchEvent(new InputEvent("input"));
|
||||
}
|
||||
}
|
||||
} catch (er) {
|
||||
errors.textContent = "completely failed to fetch ("
|
||||
+ e.status + "): " + ctx.keyBuf.join(", ");
|
||||
console.error(e, er);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
async function bindCodeEdit(target) {
|
||||
const edit = target.querySelector("#code-edit");
|
||||
if (!(edit instanceof HTMLTextAreaElement)) return;
|
||||
|
||||
const codeSize = target.querySelector("#code-size");
|
||||
const errors = target.querySelector("#compiler-output");
|
||||
if (!(true
|
||||
&& codeSize instanceof HTMLSpanElement
|
||||
&& errors instanceof HTMLPreElement
|
||||
)) never();
|
||||
|
||||
const MAX_CODE_SIZE = parseInt(codeSize.innerHTML);
|
||||
if (Number.isNaN(MAX_CODE_SIZE)) never();
|
||||
|
||||
const hbc = await getHbcInstance(), fmt = await getFmtInstance();
|
||||
let importDiff = new Set();
|
||||
/**@type{Post[]}*/
|
||||
const packages = [{ path: "local.hb", code: "" }];
|
||||
const debounce = 100;
|
||||
let timeout = 0;
|
||||
const ctx = { keyBuf: [], prevParams: new Set(), edit };
|
||||
|
||||
prevRoots.clear();
|
||||
|
||||
const onInput = () => {
|
||||
fetchPackages(edit.value, importDiff, errors, ctx);
|
||||
|
||||
if (ctx.cancelation && importDiff.size !== 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
loadCachedPackages(fmt, edit.value, ctx.keyBuf, packages, prevRoots);
|
||||
|
||||
errors.textContent = compileCode(hbc, packages);
|
||||
const minified_size = modifyCode(fmt, edit.value, "minify")?.length;
|
||||
if (minified_size) {
|
||||
codeSize.textContent = (MAX_CODE_SIZE - minified_size) + "";
|
||||
const perc = Math.min(100, Math.floor(100 * (minified_size / MAX_CODE_SIZE)));
|
||||
codeSize.style.color = `color-mix(in srgb, light-dark(black, white), var(--error) ${perc}%)`;
|
||||
}
|
||||
timeout = 0;
|
||||
};
|
||||
|
||||
edit.addEventListener("input", () => {
|
||||
if (timeout) clearTimeout(timeout);
|
||||
timeout = setTimeout(onInput, debounce)
|
||||
});
|
||||
edit.dispatchEvent(new InputEvent("input"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {Array<string>}
|
||||
* to be synched with `enum TokenGroup` in bytecode/src/fmt.rs */
|
||||
const TOK_CLASSES = [
|
||||
'Blank',
|
||||
'Comment',
|
||||
'Keyword',
|
||||
'Identifier',
|
||||
'Directive',
|
||||
'Number',
|
||||
'String',
|
||||
'Op',
|
||||
'Assign',
|
||||
'Paren',
|
||||
'Bracket',
|
||||
'Colon',
|
||||
'Comma',
|
||||
'Dot',
|
||||
'Ctor',
|
||||
];
|
||||
|
||||
/** @type {{ [key: string]: (el: HTMLElement) => void | Promise<void> }} */
|
||||
const applyFns = {
|
||||
timestamp: (el) => {
|
||||
const timestamp = el.innerText;
|
||||
const date = new Date(parseInt(timestamp) * 1000);
|
||||
el.innerText = date.toLocaleString();
|
||||
},
|
||||
fmt,
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {HTMLElement} target */
|
||||
async function fmt(target) {
|
||||
const code = target.innerText;
|
||||
const instance = await getFmtInstance();
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
const fmt = modifyCode(instance, code, 'fmt');
|
||||
if (typeof fmt !== "string") return;
|
||||
const codeBytes = new TextEncoder().encode(fmt);
|
||||
const tok = modifyCode(instance, fmt, 'tok');
|
||||
if (!(tok instanceof Uint8Array)) return;
|
||||
target.innerHTML = '';
|
||||
let start = 0;
|
||||
let kind = tok[0];
|
||||
for (let ii = 1; ii <= tok.length; ii += 1) {
|
||||
// split over same tokens and buffer end
|
||||
if (tok[ii] === kind && ii < tok.length) {
|
||||
continue;
|
||||
}
|
||||
const text = decoder.decode(codeBytes.subarray(start, ii));
|
||||
const textNode = document.createTextNode(text);;
|
||||
if (kind === 0) {
|
||||
target.appendChild(textNode);
|
||||
} else {
|
||||
const el = document.createElement('span');
|
||||
el.classList.add('syn');
|
||||
el.classList.add(TOK_CLASSES[kind]);
|
||||
el.appendChild(textNode);
|
||||
target.appendChild(el);
|
||||
}
|
||||
if (ii == tok.length) {
|
||||
break;
|
||||
}
|
||||
start = ii;
|
||||
kind = tok[ii];
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function execApply(target) {
|
||||
const proises = [];
|
||||
for (const elem of target.querySelectorAll('[apply]')) {
|
||||
if (!(elem instanceof HTMLElement)) continue;
|
||||
const funcname = elem.getAttribute('apply') ?? never();
|
||||
const vl = applyFns[funcname](elem);
|
||||
if (vl instanceof Promise) proises.push(vl);
|
||||
}
|
||||
if (target === document.body) {
|
||||
Promise.all(proises).then(() => document.body.hidden = false);
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function bindTextareaAutoResize(target) {
|
||||
for (const textarea of target.querySelectorAll("textarea")) {
|
||||
if (!(textarea instanceof HTMLTextAreaElement)) never();
|
||||
|
||||
const taCssMap = window.getComputedStyle(textarea);
|
||||
const padding = parseInt(taCssMap.getPropertyValue('padding-top') ?? "0")
|
||||
+ parseInt(taCssMap.getPropertyValue('padding-top') ?? "0");
|
||||
textarea.style.height = "auto";
|
||||
textarea.style.height = (textarea.scrollHeight - padding) + "px";
|
||||
textarea.style.overflowY = "hidden";
|
||||
textarea.addEventListener("input", function() {
|
||||
let top = window.scrollY;
|
||||
textarea.style.height = "auto";
|
||||
textarea.style.height = (textarea.scrollHeight - padding) + "px";
|
||||
window.scrollTo({ top });
|
||||
});
|
||||
|
||||
textarea.onkeydown = (ev) => {
|
||||
if (ev.key === "Tab") {
|
||||
ev.preventDefault();
|
||||
document.execCommand('insertText', false, "\t");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function cacheInputs(target) {
|
||||
/**@type {HTMLFormElement}*/ let form;
|
||||
for (form of target.querySelectorAll('form')) {
|
||||
const path = form.getAttribute('hx-post') || form.getAttribute('hx-delete');
|
||||
if (!path) {
|
||||
console.warn('form does not have a hx-post or hx-delete attribute', form);
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const input of form.elements) {
|
||||
if (input instanceof HTMLInputElement || input instanceof HTMLTextAreaElement) {
|
||||
if ('password submit button'.includes(input.type)) continue;
|
||||
const key = path + input.name;
|
||||
input.value = localStorage.getItem(key) ?? '';
|
||||
input.addEventListener("input", () => localStorage.setItem(key, input.value));
|
||||
} else {
|
||||
console.warn("unhandled form element: ", input);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {string} [path] */
|
||||
function updateTab(path) {
|
||||
console.log(path);
|
||||
for (const elem of document.querySelectorAll("button[hx-push-url]")) {
|
||||
if (elem instanceof HTMLButtonElement)
|
||||
elem.disabled =
|
||||
elem.getAttribute("hx-push-url") === path
|
||||
|| elem.getAttribute("hx-push-url") === window.location.pathname;
|
||||
}
|
||||
}
|
||||
|
||||
if (window.location.hostname === 'localhost') {
|
||||
let id; setInterval(async () => {
|
||||
let new_id = await fetch('/hot-reload').then(reps => reps.text());
|
||||
id ??= new_id;
|
||||
if (id !== new_id) window.location.reload();
|
||||
}, 300);
|
||||
|
||||
(async function test() {
|
||||
{
|
||||
const code = "main:=fn():void{return}";
|
||||
const inst = await getFmtInstance()
|
||||
const fmtd = modifyCode(inst, code, "fmt") ?? never();
|
||||
if (typeof fmtd !== "string") never();
|
||||
const prev = modifyCode(inst, fmtd, "minify") ?? never();
|
||||
if (code != prev) console.error(code, prev);
|
||||
}
|
||||
{
|
||||
const posts = [{
|
||||
path: "foo.hb",
|
||||
code: "main:=fn():int{return 42}",
|
||||
}];
|
||||
const res = compileCode(await getHbcInstance(), posts, 1) ?? never();
|
||||
const expected = "exit code: 42";
|
||||
if (expected != res) console.error(expected, res);
|
||||
}
|
||||
})()
|
||||
}
|
||||
|
||||
document.body.addEventListener('htmx:afterSwap', (ev) => {
|
||||
if (!(ev.target instanceof HTMLElement)) never();
|
||||
wireUp(ev.target);
|
||||
if (ev.target.tagName == "MAIN" || ev.target.tagName == "BODY")
|
||||
updateTab(ev['detail'].pathInfo.finalRequestPath);
|
||||
});
|
||||
|
||||
getFmtInstance().then(inst => {
|
||||
document.body.addEventListener('htmx:configRequest', (ev) => {
|
||||
const details = ev['detail'];
|
||||
if (details.path === "/post" && details.verb === "post") {
|
||||
details.parameters['code'] = modifyCode(inst, details.parameters['code'], "minify");
|
||||
}
|
||||
});
|
||||
|
||||
/** @param {string} query @param {string} target @returns {number} */
|
||||
function fuzzyCost(query, target) {
|
||||
let qi = 0, bi = 0, cost = 0, matched = false;
|
||||
while (qi < query.length) {
|
||||
if (query.charAt(qi) === target.charAt(bi++)) {
|
||||
matched = true;
|
||||
qi++;
|
||||
} else {
|
||||
cost++;
|
||||
}
|
||||
if (bi === target.length) (bi = 0, qi++);
|
||||
}
|
||||
return cost + (matched ? 0 : 100 * target.length);
|
||||
}
|
||||
|
||||
let deps = undefined;
|
||||
/** @param {HTMLInputElement} input @returns {void} */
|
||||
function filterCodeDeps(input) {
|
||||
deps ??= document.getElementById("deps");
|
||||
if (!(deps instanceof HTMLElement)) never();
|
||||
if (input.value === "") {
|
||||
deps.textContent = "results show here...";
|
||||
return;
|
||||
}
|
||||
deps.innerHTML = "";
|
||||
for (const root of [...prevRoots.keys()]
|
||||
.sort((a, b) => fuzzyCost(input.value, a) - fuzzyCost(input.value, b))) {
|
||||
const pane = document.createElement("div");
|
||||
const code = modifyCode(inst, localStorage["package-" + root], "fmt");
|
||||
pane.innerHTML = `<div>${root}</div><pre>${code}</pre>`;
|
||||
deps.appendChild(pane);
|
||||
}
|
||||
if (deps.innerHTML === "") {
|
||||
deps.textContent = "no results";
|
||||
}
|
||||
}
|
||||
|
||||
Object.assign(window, { filterCodeDeps });
|
||||
});
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function runPost(target) {
|
||||
while (!target.matches("div[class=preview]")) target = target.parentElement ?? never();
|
||||
const code = target.querySelector("pre[apply=fmt]");
|
||||
if (!(code instanceof HTMLPreElement)) never();
|
||||
const output = target.querySelector("pre[id=compiler-output]");
|
||||
if (!(output instanceof HTMLPreElement)) never();
|
||||
|
||||
Promise.all([getHbcInstance(), getFmtInstance()]).then(async ([hbc, fmt]) => {
|
||||
const ctx = { keyBuf: [], prevParams: new Set() };
|
||||
await fetchPackages(code.innerText ?? never(), new Set(), output, ctx);
|
||||
const posts = [{ path: "this", code: "" }];
|
||||
loadCachedPackages(fmt, code.innerText ?? never(), ctx.keyBuf, posts, new Set());
|
||||
output.textContent = compileCode(hbc, posts);
|
||||
output.hidden = false;
|
||||
});
|
||||
|
||||
let author = encodeURIComponent(target.dataset.author ?? never());
|
||||
let name = encodeURIComponent(target.dataset.name ?? never());
|
||||
fetch(`/post/run?author=${author}&name=${name}`, { method: "POST" })
|
||||
}
|
||||
|
||||
Object.assign(window, { runPost });
|
||||
|
||||
updateTab();
|
||||
wireUp(document.body);
|
||||
|
1040
depell/src/main.rs
Normal file
1040
depell/src/main.rs
Normal file
File diff suppressed because it is too large
Load diff
1
depell/src/migrations/1.sql
Normal file
1
depell/src/migrations/1.sql
Normal file
|
@ -0,0 +1 @@
|
|||
|
55
depell/src/schema.sql
Normal file
55
depell/src/schema.sql
Normal file
|
@ -0,0 +1,55 @@
|
|||
PRAGMA foreign_keys = ON;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS user(
|
||||
name TEXT NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
PRIMARY KEY (name)
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS session(
|
||||
id BLOB NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
expiration INTEGER NOT NULL,
|
||||
FOREIGN KEY (username) REFERENCES user (name)
|
||||
PRIMARY KEY (username)
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS
|
||||
session_id ON session (id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS post(
|
||||
name TEXT NOT NULL,
|
||||
author TEXT,
|
||||
timestamp INTEGER,
|
||||
code TEXT NOT NULL,
|
||||
FOREIGN KEY (author) REFERENCES user(name) ON DELETE SET NULL,
|
||||
PRIMARY KEY (author, name)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS
|
||||
post_timestamp ON post(timestamp DESC);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS import(
|
||||
from_name TEXT NOT NULL,
|
||||
from_author TEXT,
|
||||
to_name TEXT NOT NULL,
|
||||
to_author TEXT,
|
||||
FOREIGN KEY (from_name, from_author) REFERENCES post(name, author),
|
||||
FOREIGN KEY (to_name, to_author) REFERENCES post(name, author)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS
|
||||
dependencies ON import(from_name, from_author);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS
|
||||
dependants ON import(to_name, to_author);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS run(
|
||||
code_name TEXT NOT NULL,
|
||||
code_author TEXT NOT NULL,
|
||||
runner TEXT NOT NULL,
|
||||
FOREIGN KEY (code_name, code_author) REFERENCES post(name, author),
|
||||
FOREIGN KEY (runner) REFERENCES user(name),
|
||||
PRIMARY KEY (code_name, code_author, runner)
|
||||
);
|
||||
|
61
depell/src/static-pages/developing-hblang.md
Normal file
61
depell/src/static-pages/developing-hblang.md
Normal file
|
@ -0,0 +1,61 @@
|
|||
# The journey to an optimizing compiler
|
||||
|
||||
It's been years since I was continuously trying to make a compiler to implement language of my dreams. Problem was tho that I wanted something similar to Rust, which if you did not know, `rustc` far exceeded the one million lines of code mark some time ago, so implementing such language would take me years if not decades, but I still tired it.
|
||||
|
||||
Besides being extremely ambitions, the problem with my earliest attempts at making a compiler, is that literally nobody, not even me, was using the language, and so retroactively I am confident, what I implemented was a complex test-case implementation, and not a compiler. I often fall into a trap of implementing edge cases instead of an algorithm that would handle not only the very few thing the tests do but also all the other stuff that users of the language would try.
|
||||
|
||||
Another part of why I was failing for all that time, is that I did the hardest thing first without understanding the core concepts involved in translating written language to IR, god forbid assembly. I wasted a lot of time like this, but at least I learned Rust well. At some point I found a job where I started developing a decentralized network and that fully drawn me away from language development.
|
||||
|
||||
## Completely new approach
|
||||
|
||||
At some point the company I was working for started having financial issues and they were unable to pay me. During that period, I discovered that my love for networking was majorly fueled by the monetary gains associated with it. I burned out, and started to look for things to do with the free time.
|
||||
|
||||
One could say timing was perfect because [`ableos`](https://git.ablecorp.us/AbleOS/ableos) was desperately in need of a sane programming language that compiles to the home made VM ISA used for all software ran in `ableos`, but there was nobody crazy enough to do this. I got terribly nerd sniped, tho I don't regret it. Process of making a language for `ableos` was completely different. Firstly, it needed to be done asap, the lack of a good language blocked everyone form writing drivers for `ableos`, secondly, the moment the language is at least a little bit usable, people other then me will start using it, and lastly, the ISA the language compiles to very simple to emit, understand, and run.
|
||||
|
||||
### Urgency is a bliss
|
||||
|
||||
I actually managed to make the language somewhat work in one week, mainly because my mind set changed. I no longer spent a lot of time designing syntax for elegance, I designed it so that it incredibly easy to parse, meaning I can spent minimal effort implementing the parser, and fully focus on the hard problem of translating AST to instructions. Surprisingly, making everything an expression and not enforcing any arbitrary rules, makes the code you can write incredibly flexible and (most) people love it. One of the decisions I made to save time (or maybe it was an accident) was to make `,;` not enforced, meaning, you are allowed to write delimiters in lists but, as long as it does not change the intent of the code, you can leave them out. In practice, you actually don't need semicolons, unless the next line starts with something sticky like `*x`, int that case you put a semicolon on the previous line to tell the parser where the current expression ends.
|
||||
|
||||
### Only the problem I care about
|
||||
|
||||
Its good to note that writing a parser is no longer interesting for me. I wrote many parsers before and writing one no longer feel rewarding, but more like a chore. The real problem I was excited about was translating AST to instructions, I always ended up overcomplicating this step wit edge cases for every possible scenario that can happen in code, for which there are infinite. But why did I succeed this time? Well all the friction related to getting something that I can execute was so low, I could iterate quickly and realize what I am doing wrong before I burn out. In a week I managed to understand what I was failing to do for years, partly because of all the previous suffering, but mainly because it was so easy to pivot and try new things. And so I managed to make my first single pass compiler, and people immediately started using it.
|
||||
|
||||
### Don't implement features nobody asked for
|
||||
|
||||
Immediately after someone else then me wrote something in `hb` stuff started breaking, over the course of a month I kept fixing bugs and adding new features just fine, and more people started to use the language. All was good and well until I looked into the code. It was incredibly cursed, full of tricks to work around the compiler not doing any optimizations. At that moment I realized the whole compiler after parser needs to be rewritten, I had to implement optimizations, otherwise people wont be able to write readable code that runs fast. All of the features I have added up until now, were a technical dept now. Unless they are all working with optimizations, can't compile the existing code. Yes, if feature exists, be sure as hell it will be used.
|
||||
|
||||
It took around 4 months to reimplement everything make make the optimal code look like what you are used to in other languages. I am really thankful for [sea of nodes](https://github.com/SeaOfNodes), and all the amazing work Cliff Click and others do to make demystify optimizers, It would have taken much longer to for me to figure all the principles out without the exhaustive [tutorial](https://github.com/SeaOfNodes/Simple?tab=readme-ov-file).
|
||||
|
||||
## How my understanding of optimizations changed
|
||||
|
||||
### Optimizations allow us to scale software
|
||||
|
||||
I need to admit, before writing a single pass compiler and later upgrading it to optimizing one, I thought optimizations only affect the quality of final assembly emitted by the compiler. It never occur to me that what the optimizations actually do, is reduce the impact of how you decide to write the code. In a single pass compiler (with zero optimizations), the machine code reflects:
|
||||
|
||||
- order of operations as written in code
|
||||
- whether the value was stored in intermediate locations
|
||||
- exact structure of the control flow and at which point the operations are placed
|
||||
- how many times is something recomputed
|
||||
- operations that only help to convey intent for the reader of the source code
|
||||
- and more I can't think of...
|
||||
|
||||
If you took some code you wrote and then modified it to obfuscate these aspects (in reference to the original code), you would to a subset of what optimizing compiler does. Of course, a good compiler would try hard to improve the metrics its optimizing for, it would:
|
||||
|
||||
- reorder operations to allow the CPU to parallelize them
|
||||
- remove needless stores, or store values directly to places you cant express in code
|
||||
- pull operations out of the loops and into the branches (if it can)
|
||||
- find all common sub-expressions and compute them only once
|
||||
- fold constants as much as possible and use obscure tricks to replace slow instructions if any of the operands are constant
|
||||
- and more...
|
||||
|
||||
In the end, compiler optimizations try to reduce correlation between how the code happens to be written and how well it performs, which is extremely important when you want humans to read the code.
|
||||
|
||||
### Optimizing compilers know more then you
|
||||
|
||||
Optimizing code is a search problem, an optimizer searches the code for patterns that can be rewritten so something more practical for the computer, while preserving the observable behavior of the program. This means it needs enough context about the code to not make a mistake. In fact, the optimizer has so much context, it is able to determine your code is useless. But wait, didn't you write the code because you needed it to do something? Maybe your intention was to break out of the loop after you are done, but the optimizer looked at the code and said, "great, we are so lucky that this integer is always small enough to miss this check by one, DELETE", and then he goes "jackpot, since this loop is now infinite, we don't need this code after it, DELETE". Notice that the optimizer is eager to delete dead code, it did not ask you "Brah, why did you place all your code after an infinite loop?". This is just an example, there are many more cases where modern optimizers just delete all your code because they proven it does something invalid without running it.
|
||||
|
||||
Its stupid but its the world we live in, optimizers are usually a black box you import and feed it the code in a format they understand, they then proceed to optimize it, and if they find a glaring bug they wont tell you, god forbid, they will just molest the code in unspecified ways and spit out whats left. Before writing an optimizer, I did no know this can happen and I did not know this is a problem I pay for with my time, spent figuring out why noting is happening when I run the program.
|
||||
|
||||
But wait its worse! Since optimizers wont ever share the fact you are stupid, we end up with other people painstakingly writing complex linters, that will do a shitty job detecting things that matter, and instead whine about style and other bullcrap (and they suck even at that). If the people who write linters and people who write optimizers swapped the roles, I would be ranting about optimizers instead.
|
||||
|
||||
And so, this is the area where I want to innovate, lets report the dead code to the frontend, and let the compiler frontend filter out the noise and show relevant information in the diagnostics. Refuse to compile the program if you `i /= 0`. Refuse to compile if you `arr[arr.len]`. This is the level of stupid optimizer sees, once it normalizes your code, but proceeds to protect your feelings. My goal so for hblang to relay this to you as much as possible. If we can query for optimizations, we can query for bugs too.
|
8
depell/src/static-pages/post.md
Normal file
8
depell/src/static-pages/post.md
Normal file
|
@ -0,0 +1,8 @@
|
|||
### About posting code
|
||||
|
||||
If you are unfammiliar with [hblang](https://git.ablecorp.us/AbleOS/holey-bytes), refer to the **hblang/README.md** or vizit [mlokis'es posts](/profile/mlokis). Preferably don't edit the code here.
|
||||
|
||||
### Extra textarea features
|
||||
|
||||
- proper tab behaviour
|
||||
- snap to previous tab boundary on "empty" lines
|
11
depell/src/static-pages/welcome.md
Normal file
11
depell/src/static-pages/welcome.md
Normal file
|
@ -0,0 +1,11 @@
|
|||
## Welcome to depell
|
||||
|
||||
Depell (dependency hell) is a simple "social" media site, except that all you can post is [hblang](https://git.ablecorp.us/AbleOS/holey-bytes) code. Instead of likes you run the program, and instead of mentions you import the program as dependency. Run counts even when ran indirectly.
|
||||
|
||||
The backend only serves the code and frontend compiles and runs it locally. All posts are immutable.
|
||||
|
||||
## Security?
|
||||
|
||||
All code runs in WASM (inside a holey-bytes VM until hblang compiles to wasm) and is controlled by JavaScript. WASM
|
||||
cant do any form of IO without going trough JavaScript so as long as JS import does not allow wasm to execute
|
||||
arbitrary JS code, WASM can act as a container inside the JS.
|
11
depell/wasm-fmt/Cargo.toml
Normal file
11
depell/wasm-fmt/Cargo.toml
Normal file
|
@ -0,0 +1,11 @@
|
|||
[package]
|
||||
name = "wasm-hbfmt"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
hblang = { workspace = true, features = ["no_log"] }
|
||||
wasm-rt = { version = "0.1.0", path = "../wasm-rt" }
|
42
depell/wasm-fmt/src/lib.rs
Normal file
42
depell/wasm-fmt/src/lib.rs
Normal file
|
@ -0,0 +1,42 @@
|
|||
#![no_std]
|
||||
#![feature(str_from_raw_parts)]
|
||||
#![feature(alloc_error_handler)]
|
||||
|
||||
use hblang::{fmt, parser};
|
||||
|
||||
wasm_rt::decl_runtime!(128 * 1024, 1024 * 4);
|
||||
|
||||
const MAX_OUTPUT_SIZE: usize = 1024 * 10;
|
||||
wasm_rt::decl_buffer!(MAX_OUTPUT_SIZE, MAX_OUTPUT, OUTPUT, OUTPUT_LEN);
|
||||
|
||||
const MAX_INPUT_SIZE: usize = 1024 * 4;
|
||||
wasm_rt::decl_buffer!(MAX_INPUT_SIZE, MAX_INPUT, INPUT, INPUT_LEN);
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn fmt() {
|
||||
ALLOCATOR.reset();
|
||||
|
||||
let code = core::str::from_raw_parts(core::ptr::addr_of!(INPUT).cast(), INPUT_LEN);
|
||||
|
||||
let arena = parser::Arena::with_capacity(code.len() * parser::SOURCE_TO_AST_FACTOR);
|
||||
let mut ctx = parser::Ctx::default();
|
||||
let exprs = parser::Parser::parse(&mut ctx, code, "source.hb", &mut parser::no_loader, &arena);
|
||||
|
||||
let mut f = wasm_rt::Write(&mut OUTPUT[..]);
|
||||
fmt::fmt_file(exprs, code, &mut f).unwrap();
|
||||
OUTPUT_LEN = MAX_OUTPUT_SIZE - f.0.len();
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn tok() {
|
||||
let code = core::slice::from_raw_parts_mut(
|
||||
core::ptr::addr_of_mut!(OUTPUT).cast(), OUTPUT_LEN);
|
||||
OUTPUT_LEN = fmt::get_token_kinds(code);
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn minify() {
|
||||
let code = core::str::from_raw_parts_mut(
|
||||
core::ptr::addr_of_mut!(OUTPUT).cast(), OUTPUT_LEN);
|
||||
OUTPUT_LEN = fmt::minify(code);
|
||||
}
|
14
depell/wasm-hbc/Cargo.toml
Normal file
14
depell/wasm-hbc/Cargo.toml
Normal file
|
@ -0,0 +1,14 @@
|
|||
[package]
|
||||
name = "wasm-hbc"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
hblang = { workspace = true, features = [] }
|
||||
hbvm.workspace = true
|
||||
log = { version = "0.4.22", features = ["release_max_level_error"] }
|
||||
wasm-rt = { version = "0.1.0", path = "../wasm-rt", features = ["log"] }
|
||||
|
128
depell/wasm-hbc/src/lib.rs
Normal file
128
depell/wasm-hbc/src/lib.rs
Normal file
|
@ -0,0 +1,128 @@
|
|||
#![feature(alloc_error_handler)]
|
||||
#![feature(slice_take)]
|
||||
#![no_std]
|
||||
|
||||
use {
|
||||
alloc::{string::String, vec::Vec},
|
||||
core::ffi::CStr,
|
||||
hblang::{
|
||||
backend::hbvm::HbvmBackend,
|
||||
son::{Codegen, CodegenCtx},
|
||||
ty::Module,
|
||||
Ent,
|
||||
},
|
||||
};
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
const ARENA_CAP: usize = 128 * 16 * 1024;
|
||||
wasm_rt::decl_runtime!(ARENA_CAP, 1024 * 4);
|
||||
|
||||
const MAX_INPUT_SIZE: usize = 32 * 4 * 1024;
|
||||
wasm_rt::decl_buffer!(MAX_INPUT_SIZE, MAX_INPUT, INPUT, INPUT_LEN);
|
||||
|
||||
#[no_mangle]
|
||||
unsafe fn compile_and_run(mut fuel: usize) {
|
||||
ALLOCATOR.reset();
|
||||
|
||||
_ = log::set_logger(&wasm_rt::Logger);
|
||||
log::set_max_level(log::LevelFilter::Error);
|
||||
|
||||
struct File<'a> {
|
||||
path: &'a str,
|
||||
code: &'a mut str,
|
||||
}
|
||||
|
||||
let mut root = 0;
|
||||
|
||||
let files = {
|
||||
let mut input_bytes =
|
||||
core::slice::from_raw_parts_mut(core::ptr::addr_of_mut!(INPUT).cast::<u8>(), INPUT_LEN);
|
||||
|
||||
let mut files = Vec::with_capacity(32);
|
||||
while let Some((&mut path_len, rest)) = input_bytes.split_first_chunk_mut() {
|
||||
let (path, rest) = rest.split_at_mut(u16::from_le_bytes(path_len) as usize);
|
||||
let (&mut code_len, rest) = rest.split_first_chunk_mut().unwrap();
|
||||
let (code, rest) = rest.split_at_mut(u16::from_le_bytes(code_len) as usize);
|
||||
files.push(File {
|
||||
path: core::str::from_utf8_unchecked(path),
|
||||
code: core::str::from_utf8_unchecked_mut(code),
|
||||
});
|
||||
input_bytes = rest;
|
||||
}
|
||||
|
||||
let root_path = files[root].path;
|
||||
hblang::quad_sort(&mut files, |a, b| a.path.cmp(b.path));
|
||||
root = files.binary_search_by_key(&root_path, |p| p.path).unwrap();
|
||||
|
||||
files
|
||||
};
|
||||
|
||||
let mut ctx = CodegenCtx::default();
|
||||
|
||||
let files = {
|
||||
let paths = files.iter().map(|f| f.path).collect::<Vec<_>>();
|
||||
let mut loader = |path: &str, _: &str, kind| match kind {
|
||||
hblang::parser::FileKind::Module => Ok(paths.binary_search(&path).unwrap()),
|
||||
hblang::parser::FileKind::Embed => Err("embeds are not supported".into()),
|
||||
};
|
||||
files
|
||||
.into_iter()
|
||||
.map(|f| {
|
||||
hblang::parser::Ast::new(
|
||||
f.path,
|
||||
// since 'free' does nothing this is fine
|
||||
String::from_raw_parts(f.code.as_mut_ptr(), f.code.len(), f.code.len()),
|
||||
&mut ctx.parser,
|
||||
&mut loader,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
let mut ct = {
|
||||
let mut backend = HbvmBackend::default();
|
||||
Codegen::new(&mut backend, &files, &mut ctx).generate(Module::new(root));
|
||||
|
||||
if !ctx.parser.errors.borrow().is_empty() {
|
||||
log::error!("{}", ctx.parser.errors.borrow());
|
||||
return;
|
||||
}
|
||||
|
||||
let mut c = Codegen::new(&mut backend, &files, &mut ctx);
|
||||
c.assemble_comptime()
|
||||
};
|
||||
|
||||
while fuel != 0 {
|
||||
match ct.vm.run() {
|
||||
Ok(hbvm::VmRunOk::End) => {
|
||||
log::error!("exit code: {}", ct.vm.read_reg(1).0 as i64);
|
||||
break;
|
||||
}
|
||||
Ok(hbvm::VmRunOk::Ecall) => {
|
||||
let kind = ct.vm.read_reg(2).0;
|
||||
match kind {
|
||||
0 => {
|
||||
let str = ct.vm.read_reg(3).0;
|
||||
let str = unsafe { CStr::from_ptr(str as _) };
|
||||
log::error!("{}", str.to_str().unwrap());
|
||||
}
|
||||
unknown => log::error!("unknown ecall: {unknown}"),
|
||||
}
|
||||
}
|
||||
Ok(hbvm::VmRunOk::Timer) => {
|
||||
fuel -= 1;
|
||||
if fuel == 0 {
|
||||
log::error!("program timed out");
|
||||
}
|
||||
}
|
||||
Ok(hbvm::VmRunOk::Breakpoint) => todo!(),
|
||||
Err(e) => {
|
||||
log::error!("vm error: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//log::error!("memory consumption: {}b / {}b", ALLOCATOR.used(), ARENA_CAP);
|
||||
}
|
7
depell/wasm-rt/Cargo.toml
Normal file
7
depell/wasm-rt/Cargo.toml
Normal file
|
@ -0,0 +1,7 @@
|
|||
[package]
|
||||
name = "wasm-rt"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
log = { version = "0.4.22", optional = true }
|
162
depell/wasm-rt/src/lib.rs
Normal file
162
depell/wasm-rt/src/lib.rs
Normal file
|
@ -0,0 +1,162 @@
|
|||
#![feature(alloc_error_handler)]
|
||||
#![feature(pointer_is_aligned_to)]
|
||||
#![feature(slice_take)]
|
||||
#![no_std]
|
||||
|
||||
use core::{
|
||||
alloc::{GlobalAlloc, Layout},
|
||||
cell::UnsafeCell,
|
||||
};
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! decl_buffer {
|
||||
($cap:expr, $export_cap:ident, $export_base:ident, $export_len:ident) => {
|
||||
#[no_mangle]
|
||||
static $export_cap: usize = $cap;
|
||||
#[no_mangle]
|
||||
static mut $export_base: [u8; $cap] = [0; $cap];
|
||||
#[no_mangle]
|
||||
static mut $export_len: usize = 0;
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! decl_runtime {
|
||||
($memory_size:expr, $max_panic_size:expr) => {
|
||||
#[cfg(debug_assertions)]
|
||||
#[no_mangle]
|
||||
static mut PANIC_MESSAGE: [u8; $max_panic_size] = [0; $max_panic_size];
|
||||
#[cfg(debug_assertions)]
|
||||
#[no_mangle]
|
||||
static mut PANIC_MESSAGE_LEN: usize = 0;
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[panic_handler]
|
||||
pub fn handle_panic(_info: &core::panic::PanicInfo) -> ! {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
unsafe {
|
||||
use core::fmt::Write;
|
||||
let mut f = $crate::Write(&mut PANIC_MESSAGE[..]);
|
||||
_ = writeln!(f, "{}", _info);
|
||||
PANIC_MESSAGE_LEN = $max_panic_size - f.0.len();
|
||||
}
|
||||
}
|
||||
|
||||
core::arch::wasm32::unreachable();
|
||||
}
|
||||
|
||||
#[global_allocator]
|
||||
static ALLOCATOR: $crate::ArenaAllocator<{ $memory_size }> = $crate::ArenaAllocator::new();
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[alloc_error_handler]
|
||||
fn alloc_error(_: core::alloc::Layout) -> ! {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
unsafe {
|
||||
use core::fmt::Write;
|
||||
let mut f = $crate::Write(&mut PANIC_MESSAGE[..]);
|
||||
_ = writeln!(f, "out of memory");
|
||||
PANIC_MESSAGE_LEN = $max_panic_size - f.0.len();
|
||||
}
|
||||
}
|
||||
|
||||
core::arch::wasm32::unreachable()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "log")]
|
||||
pub struct Logger;
|
||||
|
||||
#[cfg(feature = "log")]
|
||||
impl log::Log for Logger {
|
||||
fn enabled(&self, _: &log::Metadata) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn log(&self, record: &log::Record) {
|
||||
if self.enabled(record.metadata()) {
|
||||
const MAX_LOG_MESSAGE: usize = 1024 * 8;
|
||||
#[no_mangle]
|
||||
static mut LOG_MESSAGES: [u8; MAX_LOG_MESSAGE] = [0; MAX_LOG_MESSAGE];
|
||||
#[no_mangle]
|
||||
static mut LOG_MESSAGES_LEN: usize = 0;
|
||||
|
||||
unsafe {
|
||||
use core::fmt::Write;
|
||||
let mut f = Write(&mut LOG_MESSAGES[LOG_MESSAGES_LEN..]);
|
||||
_ = writeln!(f, "{}", record.args());
|
||||
LOG_MESSAGES_LEN = MAX_LOG_MESSAGE - f.0.len();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&self) {}
|
||||
}
|
||||
|
||||
pub struct ArenaAllocator<const SIZE: usize> {
|
||||
arena: UnsafeCell<[u8; SIZE]>,
|
||||
head: UnsafeCell<*mut u8>,
|
||||
}
|
||||
|
||||
impl<const SIZE: usize> ArenaAllocator<SIZE> {
|
||||
#[expect(clippy::new_without_default)]
|
||||
pub const fn new() -> Self {
|
||||
ArenaAllocator {
|
||||
arena: UnsafeCell::new([0; SIZE]),
|
||||
head: UnsafeCell::new(core::ptr::null_mut()),
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(clippy::missing_safety_doc)]
|
||||
pub unsafe fn reset(&self) {
|
||||
(*self.head.get()) = self.arena.get().cast::<u8>().add(SIZE);
|
||||
}
|
||||
|
||||
pub fn used(&self) -> usize {
|
||||
unsafe { self.arena.get() as usize + SIZE - (*self.head.get()) as usize }
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl<const SIZE: usize> Sync for ArenaAllocator<SIZE> {}
|
||||
|
||||
unsafe impl<const SIZE: usize> GlobalAlloc for ArenaAllocator<SIZE> {
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
let size = layout.size();
|
||||
let align = layout.align();
|
||||
|
||||
let until = self.arena.get() as *mut u8;
|
||||
|
||||
let new_head = (*self.head.get()).sub(size);
|
||||
let aligned_head = (new_head as usize & !(align - 1)) as *mut u8;
|
||||
debug_assert!(aligned_head.is_aligned_to(align));
|
||||
|
||||
if until > aligned_head {
|
||||
return core::ptr::null_mut();
|
||||
}
|
||||
|
||||
*self.head.get() = aligned_head;
|
||||
aligned_head
|
||||
}
|
||||
|
||||
unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
|
||||
/* lol */
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Write<'a>(pub &'a mut [u8]);
|
||||
|
||||
impl core::fmt::Write for Write<'_> {
|
||||
fn write_str(&mut self, s: &str) -> core::fmt::Result {
|
||||
if let Some(m) = self.0.take_mut(..s.len()) {
|
||||
m.copy_from_slice(s.as_bytes());
|
||||
Ok(())
|
||||
} else {
|
||||
Err(core::fmt::Error)
|
||||
}
|
||||
}
|
||||
}
|
100
examples/asm/hello-name.hba
Normal file
100
examples/asm/hello-name.hba
Normal file
|
@ -0,0 +1,100 @@
|
|||
jmp entry
|
||||
|
||||
puts:
|
||||
-- Write string to console
|
||||
-- r2: [IN] *const u8 String pointer
|
||||
-- r3: [IN] usize String length
|
||||
|
||||
li8 r1, 0x1 -- Write syscall
|
||||
brc r2, r3, 2 -- Copy parameters
|
||||
li8 r2, 0x1 -- STDOUT
|
||||
eca
|
||||
|
||||
jal r0, r31, 0
|
||||
|
||||
gets:
|
||||
-- Read string until end of buffer or LF
|
||||
-- r2: [IN] *mut u8 Buffer
|
||||
-- r3: [IN] usize Buffer length
|
||||
|
||||
-- Register allocations:
|
||||
-- r33: *mut u8 Buffer end
|
||||
-- r34: u8 Immediate char
|
||||
-- r35: u8 Const [0x0A = LF]
|
||||
|
||||
li8 r35, 0x0A
|
||||
add64 r33, r2, r3
|
||||
|
||||
-- Setup syscall
|
||||
li8 r2, 0x1 -- Stdin
|
||||
cp r3, r2
|
||||
li8 r4, 0x1 -- Read one char
|
||||
|
||||
jeq r3, r33, end
|
||||
loop:
|
||||
li8 r1, 0x1 -- Read syscall
|
||||
eca
|
||||
addi64 r3, r3, 1
|
||||
ld r34, r3, 0, 1
|
||||
jeq r34, r35, end
|
||||
jne r3, r33, loop
|
||||
|
||||
end:
|
||||
-- Set copied amount
|
||||
sub64 r1, r33, r3
|
||||
addi64 r1, -1
|
||||
jal r0, r31, 0
|
||||
|
||||
alloc-pages:
|
||||
-- Allocate pages
|
||||
-- r1: [OUT] *mut u8 Pointer to page
|
||||
-- r2: [IN] u16 Page count
|
||||
|
||||
muli16 r3, r2, 4096 -- page count
|
||||
li8 r1, 0x9 -- mmap syscall
|
||||
li8 r2, 0x0 -- no address set, kernel chosen
|
||||
li8 r4, 0x2 -- PROT_WRITE
|
||||
li8 r5, 0x20 -- MAP_ANONYMOUS
|
||||
li64 r6, -1 -- Doesn't map file
|
||||
li8 r7, 0x0 -- Doesn't map file
|
||||
eca
|
||||
|
||||
jal r0, r31, 0
|
||||
|
||||
entry:
|
||||
-- Program entrypoint
|
||||
|
||||
-- Register allocations:
|
||||
-- r32: *mut u8 Buffer
|
||||
-- r36: usize Read buffer length
|
||||
|
||||
-- Allocate one page (4096 KiB)
|
||||
li8 r2, 1
|
||||
jal r31, 0, alloc-pages
|
||||
cp r32, r1
|
||||
|
||||
-- Print message
|
||||
lra16 r2, r0, #enter-your-name
|
||||
li8 r3, 17
|
||||
jal r31, r0, puts
|
||||
|
||||
-- Read name
|
||||
cp r2, r32
|
||||
li16 r3, 4096
|
||||
jal r31, r0, gets
|
||||
cp r36, r1
|
||||
|
||||
-- Print your name is
|
||||
lra16 r2, r0, #your-name-is
|
||||
li8 r3, 15
|
||||
jal r31, r0, puts
|
||||
|
||||
-- And now print the name
|
||||
cp r2, r32
|
||||
cp r3, r36
|
||||
jal r31, r0, puts
|
||||
|
||||
tx
|
||||
|
||||
#enter-your-name: "Enter your name: "
|
||||
#your-name-is : "\nYour name is: "
|
BIN
examples/bytecode/addition.hbf
Normal file
BIN
examples/bytecode/addition.hbf
Normal file
Binary file not shown.
BIN
examples/bytecode/linux-hello.hbf
Normal file
BIN
examples/bytecode/linux-hello.hbf
Normal file
Binary file not shown.
|
@ -1,10 +0,0 @@
|
|||
[package]
|
||||
name = "hbvm"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
log = "*"
|
||||
hashbrown = "0.13.2"
|
Binary file not shown.
|
@ -1,2 +0,0 @@
|
|||
pub mod ops;
|
||||
pub mod types;
|
|
@ -1,68 +0,0 @@
|
|||
#[repr(u8)]
|
||||
pub enum Operations {
|
||||
NOP = 0,
|
||||
|
||||
ADD = 1,
|
||||
SUB = 2,
|
||||
MUL = 3,
|
||||
DIV = 4,
|
||||
MOD = 5,
|
||||
|
||||
AND = 6,
|
||||
OR = 7,
|
||||
XOR = 8,
|
||||
NOT = 9,
|
||||
|
||||
// LOADs a memory address/constant into a register
|
||||
LOAD = 15,
|
||||
// STOREs a register/constant into a memory address
|
||||
STORE = 16,
|
||||
|
||||
MapPage = 17,
|
||||
UnmapPage = 18,
|
||||
|
||||
// SHIFT LEFT 16 A0
|
||||
Shift = 20,
|
||||
|
||||
JUMP = 100,
|
||||
JumpCond = 101,
|
||||
RET = 103,
|
||||
|
||||
EnviromentCall = 255,
|
||||
}
|
||||
|
||||
pub enum PageMapTypes {
|
||||
// Have the host make a new VMPage
|
||||
VMPage = 0,
|
||||
// Ask the host to map a RealPage into memory
|
||||
RealPage = 1,
|
||||
}
|
||||
|
||||
pub enum MathOpSubTypes {
|
||||
Unsigned = 0,
|
||||
Signed = 1,
|
||||
FloatingPoint = 2,
|
||||
}
|
||||
|
||||
pub enum MathOpSides {
|
||||
RegisterConstant = 0,
|
||||
RegisterRegister = 1,
|
||||
ConstantConstant = 2,
|
||||
ConstantRegister = 3,
|
||||
}
|
||||
|
||||
pub enum RWSubTypes {
|
||||
AddrToReg = 0,
|
||||
RegToAddr,
|
||||
ConstToReg,
|
||||
ConstToAddr,
|
||||
}
|
||||
|
||||
pub enum JumpConditionals {
|
||||
Equal = 0,
|
||||
NotEqual = 1,
|
||||
LessThan = 2,
|
||||
LessThanOrEqualTo = 3,
|
||||
GreaterThan = 4,
|
||||
GreaterThanOrEqualTo = 5,
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
|
||||
pub const CONST_U8: u8 = 0x00;
|
||||
pub const CONST_I8: i8 = 0x01;
|
||||
|
||||
pub const CONST_U64: u8 = 0x02;
|
||||
pub const CONST_I64: u8 = 0x03;
|
||||
pub const CONST_F64: u8 = 0x04;
|
||||
|
||||
pub const ADDRESS: u8 = 0x05;
|
|
@ -1,6 +0,0 @@
|
|||
use alloc::vec::Vec;
|
||||
|
||||
pub type CallStack = Vec<FnCall>;
|
||||
pub struct FnCall {
|
||||
pub ret: usize,
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
pub struct EngineConfig {
|
||||
pub call_stack_depth: usize,
|
||||
pub quantum: u32,
|
||||
}
|
||||
|
||||
impl EngineConfig {
|
||||
pub fn default() -> Self {
|
||||
Self {
|
||||
call_stack_depth: 32,
|
||||
quantum: 0,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
use super::Engine;
|
||||
|
||||
pub type EnviromentCall = fn(&mut Engine) -> Result<&mut Engine, u64>;
|
|
@ -1,100 +0,0 @@
|
|||
pub mod call_stack;
|
||||
pub mod config;
|
||||
pub mod enviroment_calls;
|
||||
pub mod regs;
|
||||
#[cfg(test)]
|
||||
pub mod tests;
|
||||
|
||||
use {
|
||||
self::call_stack::CallStack,
|
||||
crate::{memory, HaltStatus, RuntimeErrors},
|
||||
alloc::vec::Vec,
|
||||
config::EngineConfig,
|
||||
log::trace,
|
||||
regs::Registers,
|
||||
};
|
||||
// pub const PAGE_SIZE: usize = 8192;
|
||||
|
||||
pub struct RealPage {
|
||||
pub ptr: *mut u8,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct VMPage {
|
||||
pub data: [u8; 8192],
|
||||
}
|
||||
impl VMPage {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
data: [0; 4096 * 2],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum Page {
|
||||
VMPage(VMPage),
|
||||
RealPage(RealPage),
|
||||
}
|
||||
impl Page {
|
||||
pub fn data(&self) -> [u8; 4096 * 2] {
|
||||
match self {
|
||||
Page::VMPage(vmpage) => vmpage.data,
|
||||
Page::RealPage(_) => {
|
||||
unimplemented!("Memmapped hw page not yet supported")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn empty_enviroment_call(engine: &mut Engine) -> Result<&mut Engine, u64> {
|
||||
trace!("Registers {:?}", engine.registers);
|
||||
Err(0)
|
||||
}
|
||||
|
||||
pub struct Engine {
|
||||
pub index: usize,
|
||||
pub program: Vec<u8>,
|
||||
pub registers: Registers,
|
||||
pub config: EngineConfig,
|
||||
|
||||
/// BUG: This DOES NOT account for overflowing
|
||||
pub last_timer_count: u32,
|
||||
pub timer_callback: Option<fn() -> u32>,
|
||||
pub memory: memory::Memory,
|
||||
pub enviroment_call_table: [Option<EnviromentCall>; 256],
|
||||
pub call_stack: CallStack,
|
||||
}
|
||||
use crate::engine::enviroment_calls::EnviromentCall;
|
||||
impl Engine {
|
||||
pub fn set_timer_callback(&mut self, func: fn() -> u32) {
|
||||
self.timer_callback = Some(func);
|
||||
}
|
||||
pub fn set_register(&mut self, register: u8, value: u64) {}
|
||||
}
|
||||
|
||||
impl Engine {
|
||||
pub fn new(program: Vec<u8>) -> Self {
|
||||
let mut mem = memory::Memory::new();
|
||||
for (addr, byte) in program.clone().into_iter().enumerate() {
|
||||
let _ = mem.set_addr8(addr as u64, byte);
|
||||
}
|
||||
trace!("{:?}", mem.read_addr8(0));
|
||||
let ecall_table: [Option<EnviromentCall>; 256] = [None; 256];
|
||||
Self {
|
||||
index: 0,
|
||||
program,
|
||||
registers: Registers::new(),
|
||||
config: EngineConfig::default(),
|
||||
last_timer_count: 0,
|
||||
timer_callback: None,
|
||||
enviroment_call_table: ecall_table,
|
||||
memory: mem,
|
||||
call_stack: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dump(&self) {}
|
||||
pub fn run(&mut self) -> Result<HaltStatus, RuntimeErrors> {
|
||||
Ok(HaltStatus::Halted)
|
||||
}
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
#[rustfmt::skip]
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Registers {
|
||||
pub a0: u64, pub b0: u64, pub c0: u64, pub d0: u64, pub e0: u64, pub f0: u64,
|
||||
pub a1: u64, pub b1: u64, pub c1: u64, pub d1: u64, pub e1: u64, pub f1: u64,
|
||||
pub a2: u64, pub b2: u64, pub c2: u64, pub d2: u64, pub e2: u64, pub f2: u64,
|
||||
pub a3: u64, pub b3: u64, pub c3: u64, pub d3: u64, pub e3: u64, pub f3: u64,
|
||||
pub a4: u64, pub b4: u64, pub c4: u64, pub d4: u64, pub e4: u64, pub f4: u64,
|
||||
pub a5: u64, pub b5: u64, pub c5: u64, pub d5: u64, pub e5: u64, pub f5: u64,
|
||||
pub a6: u64, pub b6: u64, pub c6: u64, pub d6: u64, pub e6: u64, pub f6: u64,
|
||||
pub a7: u64, pub b7: u64, pub c7: u64, pub d7: u64, pub e7: u64, pub f7: u64,
|
||||
pub a8: u64, pub b8: u64, pub c8: u64, pub d8: u64, pub e8: u64, pub f8: u64,
|
||||
pub a9: u64, pub b9: u64, pub c9: u64, pub d9: u64, pub e9: u64, pub f9: u64,
|
||||
}
|
||||
|
||||
impl Registers {
|
||||
#[rustfmt::skip]
|
||||
pub fn new() -> Self{
|
||||
Self {
|
||||
a0: 0, b0: 0, c0: 0, d0: 0, e0: 0, f0: 0,
|
||||
a1: 0, b1: 0, c1: 0, d1: 0, e1: 0, f1: 0,
|
||||
a2: 0, b2: 0, c2: 0, d2: 0, e2: 0, f2: 0,
|
||||
a3: 0, b3: 0, c3: 0, d3: 0, e3: 0, f3: 0,
|
||||
a4: 0, b4: 0, c4: 0, d4: 0, e4: 0, f4: 0,
|
||||
a5: 0, b5: 0, c5: 0, d5: 0, e5: 0, f5: 0,
|
||||
a6: 0, b6: 0, c6: 0, d6: 0, e6: 0, f6: 0,
|
||||
a7: 0, b7: 0, c7: 0, d7: 0, e7: 0, f7: 0,
|
||||
a8: 0, b8: 0, c8: 0, d8: 0, e8: 0, f8: 0,
|
||||
a9: 0, b9: 0, c9: 0, d9: 0, e9: 0, f9: 0,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,125 +0,0 @@
|
|||
use {
|
||||
super::Engine,
|
||||
crate::{HaltStatus, RuntimeErrors},
|
||||
alloc::vec,
|
||||
RuntimeErrors::*,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn invalid_program() {
|
||||
let prog = vec![1, 0];
|
||||
let mut eng = Engine::new(prog);
|
||||
let ret = eng.run();
|
||||
assert_eq!(ret, Err(InvalidOpcodePair(1, 0)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_program() {
|
||||
let prog = vec![];
|
||||
let mut eng = Engine::new(prog);
|
||||
let ret = eng.run();
|
||||
assert_eq!(ret, Ok(HaltStatus::Halted));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn max_quantum_reached() {
|
||||
let prog = vec![0, 0, 0, 0];
|
||||
let mut eng = Engine::new(prog);
|
||||
eng.set_timer_callback(|| {
|
||||
return 1;
|
||||
});
|
||||
eng.config.quantum = 1;
|
||||
let ret = eng.run();
|
||||
assert_eq!(ret, Ok(HaltStatus::Running));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn jump_out_of_bounds() {
|
||||
use crate::bytecode::ops::Operations::JUMP;
|
||||
let prog = vec![JUMP as u8, 0, 0, 0, 0, 0, 0, 1, 0];
|
||||
let mut eng = Engine::new(prog);
|
||||
let ret = eng.run();
|
||||
assert_eq!(ret, Err(InvalidJumpAddress(256)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_system_call() {
|
||||
let prog = vec![255, 0];
|
||||
let mut eng = Engine::new(prog);
|
||||
let ret = eng.run();
|
||||
assert_eq!(ret, Err(InvalidSystemCall(0)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_u8() {
|
||||
use crate::bytecode::ops::{MathOpSides::ConstantConstant, Operations::ADD};
|
||||
|
||||
let prog = vec![ADD as u8, ConstantConstant as u8, 100, 98, 0xA0];
|
||||
let mut eng = Engine::new(prog);
|
||||
let _ = eng.run();
|
||||
assert_eq!(eng.registers.a0, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sub_u8() {
|
||||
use crate::bytecode::ops::Operations::SUB;
|
||||
|
||||
let prog = vec![SUB as u8];
|
||||
let mut eng = Engine::new(prog);
|
||||
let _ = eng.run();
|
||||
assert_eq!(eng.registers.a0, 1);
|
||||
}
|
||||
#[test]
|
||||
fn mul_u8() {
|
||||
use crate::bytecode::ops::{MathOpSides::ConstantConstant, Operations::MUL};
|
||||
|
||||
let prog = vec![MUL as u8, ConstantConstant as u8, 1, 2, 0xA0];
|
||||
let mut eng = Engine::new(prog);
|
||||
let _ = eng.run();
|
||||
assert_eq!(eng.registers.a0, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn div_u8() {
|
||||
use crate::bytecode::ops::Operations::DIV;
|
||||
|
||||
let prog = vec![DIV as u8];
|
||||
let mut eng = Engine::new(prog);
|
||||
let _ = eng.run();
|
||||
assert_eq!(eng.registers.a0, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_register() {
|
||||
let prog = alloc::vec![];
|
||||
let mut eng = Engine::new(prog);
|
||||
eng.set_register(0xA0, 1);
|
||||
assert_eq!(eng.registers.a0, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn load_u8() {
|
||||
use crate::bytecode::ops::{Operations::LOAD, RWSubTypes::AddrToReg};
|
||||
|
||||
let prog = vec![LOAD as u8, AddrToReg as u8, 0, 0, 0, 0, 0, 0, 1, 0, 0xA0];
|
||||
let mut eng = Engine::new(prog);
|
||||
let ret = eng.memory.set_addr8(256, 1);
|
||||
assert_eq!(ret, Ok(()));
|
||||
let _ = eng.run();
|
||||
assert_eq!(eng.registers.a0, 1);
|
||||
}
|
||||
#[test]
|
||||
fn set_memory_8() {
|
||||
let prog = vec![];
|
||||
let mut eng = Engine::new(prog);
|
||||
let ret = eng.memory.set_addr8(256, 1);
|
||||
assert_eq!(ret, Ok(()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_memory_64() {
|
||||
let prog = vec![];
|
||||
let mut eng = Engine::new(prog);
|
||||
let ret = eng.memory.set_addr64(256, 1);
|
||||
assert_eq!(ret, Ok(()));
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
#![no_std]
|
||||
extern crate alloc;
|
||||
|
||||
pub mod bytecode;
|
||||
pub mod engine;
|
||||
pub mod memory;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum RuntimeErrors {
|
||||
InvalidOpcodePair(u8, u8),
|
||||
RegisterTooSmall,
|
||||
HostError(u64),
|
||||
PageNotMapped(u64),
|
||||
InvalidJumpAddress(u64),
|
||||
InvalidSystemCall(u8),
|
||||
}
|
||||
|
||||
// If you solve the halting problem feel free to remove this
|
||||
#[derive(PartialEq, Debug)]
|
||||
pub enum HaltStatus {
|
||||
Halted,
|
||||
Running,
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
use hbvm::{
|
||||
bytecode::ops::{Operations::*, RWSubTypes::*},
|
||||
engine::Engine,
|
||||
RuntimeErrors,
|
||||
};
|
||||
|
||||
fn main() -> Result<(), RuntimeErrors> {
|
||||
// TODO: Grab program from cmdline
|
||||
#[rustfmt::skip]
|
||||
let prog: Vec<u8> = vec![
|
||||
NOP as u8,
|
||||
JUMP as u8, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
];
|
||||
|
||||
let mut eng = Engine::new(prog);
|
||||
// eng.set_timer_callback(time);
|
||||
eng.enviroment_call_table[10] = Some(print_fn);
|
||||
eng.run()?;
|
||||
eng.dump();
|
||||
println!("{:#?}", eng.registers);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn time() -> u32 {
|
||||
9
|
||||
}
|
||||
pub fn print_fn(engine: &mut Engine) -> Result<&mut Engine, u64> {
|
||||
println!("hello");
|
||||
Ok(engine)
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
use crate::engine::VMPage;
|
||||
|
||||
use {
|
||||
crate::{engine::Page, RuntimeErrors},
|
||||
alloc::vec::Vec,
|
||||
hashbrown::HashMap,
|
||||
log::trace,
|
||||
};
|
||||
|
||||
pub struct Memory {
|
||||
inner: HashMap<u64, Page>,
|
||||
}
|
||||
|
||||
impl Memory {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
inner: HashMap::new(),
|
||||
}
|
||||
//
|
||||
}
|
||||
|
||||
pub fn map_vec(&mut self, address: u64, vec: Vec<u8>) {
|
||||
panic!("Mapping vectors into pages is not supported yet");
|
||||
}
|
||||
}
|
||||
|
||||
impl Memory {
|
||||
pub fn read_addr8(&mut self, address: u64) -> Result<u8, RuntimeErrors> {
|
||||
let (page, offset) = addr_to_page(address);
|
||||
trace!("page {} offset {}", page, offset);
|
||||
match self.inner.get(&page) {
|
||||
Some(page) => {
|
||||
let val = page.data()[offset as usize];
|
||||
trace!("Value {}", val);
|
||||
Ok(val)
|
||||
}
|
||||
None => {
|
||||
trace!("page not mapped");
|
||||
Err(RuntimeErrors::PageNotMapped(page))
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn read_addr64(&mut self, address: u64) -> u64 {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn set_addr8(&mut self, address: u64, value: u8) -> Result<(), RuntimeErrors> {
|
||||
let (page, offset) = addr_to_page(address);
|
||||
let ret: Option<(&u64, &mut Page)> = self.inner.get_key_value_mut(&page);
|
||||
match ret {
|
||||
Some((_, page)) => {
|
||||
page.data()[offset as usize] = value;
|
||||
}
|
||||
None => {
|
||||
let mut pg = VMPage::new();
|
||||
pg.data[offset as usize] = value;
|
||||
self.inner.insert(page, Page::VMPage(pg));
|
||||
trace!("Mapped page {}", page);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
pub fn set_addr64(&mut self, address: u64, value: u64) -> Result<(), RuntimeErrors> {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
fn addr_to_page(addr: u64) -> (u64, u64) {
|
||||
(addr / 8192, addr % 8192)
|
||||
}
|
23
lang/Cargo.toml
Normal file
23
lang/Cargo.toml
Normal file
|
@ -0,0 +1,23 @@
|
|||
[package]
|
||||
name = "hblang"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[[bin]]
|
||||
name = "hbc"
|
||||
path = "src/main.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "fuzz"
|
||||
path = "src/fuzz_main.rs"
|
||||
|
||||
[dependencies]
|
||||
hbbytecode = { workspace = true, features = ["disasm"] }
|
||||
hbvm = { workspace = true, features = ["nightly"] }
|
||||
hashbrown = { version = "0.15.0", default-features = false, features = ["raw-entry"] }
|
||||
log = "0.4.22"
|
||||
|
||||
[features]
|
||||
default = ["std"]
|
||||
std = []
|
||||
no_log = ["log/max_level_off"]
|
2053
lang/README.md
Normal file
2053
lang/README.md
Normal file
File diff suppressed because one or more lines are too long
5
lang/command-help.txt
Normal file
5
lang/command-help.txt
Normal file
|
@ -0,0 +1,5 @@
|
|||
--fmt - format all imported source files
|
||||
--fmt-stdout - dont write the formatted file but print it
|
||||
--dump-asm - output assembly instead of raw code, (the assembly is more for debugging the compiler)
|
||||
--threads <1...> - number of extra threads compiler can use [default: 0]
|
||||
--path-resolver <name> - choose between builtin path resolvers, options are: ableos
|
943
lang/src/backend/hbvm.rs
Normal file
943
lang/src/backend/hbvm.rs
Normal file
|
@ -0,0 +1,943 @@
|
|||
use {
|
||||
super::{AssemblySpec, Backend},
|
||||
crate::{
|
||||
lexer::TokenKind,
|
||||
nodes::{Kind, Nid, Nodes, MEM},
|
||||
parser,
|
||||
ty::{self, Loc, Module, Offset, Size, Types},
|
||||
utils::{EntSlice, EntVec},
|
||||
},
|
||||
alloc::{boxed::Box, collections::BTreeMap, string::String, vec::Vec},
|
||||
core::{assert_matches::debug_assert_matches, mem, ops::Range},
|
||||
hbbytecode::{self as instrs, *},
|
||||
reg::Reg,
|
||||
};
|
||||
|
||||
mod regalloc;
|
||||
|
||||
mod reg {
|
||||
pub const STACK_PTR: Reg = 254;
|
||||
pub const ZERO: Reg = 0;
|
||||
pub const RET: Reg = 1;
|
||||
pub const RET_ADDR: Reg = 31;
|
||||
|
||||
pub type Reg = u8;
|
||||
}
|
||||
|
||||
fn write_reloc(doce: &mut [u8], offset: usize, value: i64, size: u16) {
|
||||
let value = value.to_ne_bytes();
|
||||
doce[offset..offset + size as usize].copy_from_slice(&value[..size as usize]);
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct TypedReloc {
|
||||
target: ty::Id,
|
||||
reloc: Reloc,
|
||||
}
|
||||
|
||||
// TODO: make into bit struct (width: u2, sub_offset: u3, offset: u27)
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
struct Reloc {
|
||||
offset: Offset,
|
||||
sub_offset: u8,
|
||||
width: u8,
|
||||
}
|
||||
|
||||
impl Reloc {
|
||||
fn new(offset: usize, sub_offset: u8, width: u8) -> Self {
|
||||
Self { offset: offset as u32, sub_offset, width }
|
||||
}
|
||||
|
||||
fn apply_jump(mut self, code: &mut [u8], to: u32, from: u32) -> i64 {
|
||||
self.offset += from;
|
||||
let offset = to as i64 - self.offset as i64;
|
||||
self.write_offset(code, offset);
|
||||
offset
|
||||
}
|
||||
|
||||
fn write_offset(&self, code: &mut [u8], offset: i64) {
|
||||
let bytes = offset.to_ne_bytes();
|
||||
let slice = &mut code[self.offset as usize + self.sub_offset as usize..];
|
||||
slice[..self.width as usize].copy_from_slice(&bytes[..self.width as usize]);
|
||||
}
|
||||
}
|
||||
|
||||
struct FuncDt {
|
||||
offset: Offset,
|
||||
// TODO: change to indices into common vec
|
||||
relocs: Vec<TypedReloc>,
|
||||
code: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Default for FuncDt {
|
||||
fn default() -> Self {
|
||||
Self { offset: u32::MAX, relocs: Default::default(), code: Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
struct GlobalDt {
|
||||
offset: Offset,
|
||||
}
|
||||
|
||||
impl Default for GlobalDt {
|
||||
fn default() -> Self {
|
||||
Self { offset: u32::MAX }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Assembler {
|
||||
frontier: Vec<ty::Id>,
|
||||
globals: Vec<ty::Global>,
|
||||
funcs: Vec<ty::Func>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct HbvmBackend {
|
||||
funcs: EntVec<ty::Func, FuncDt>,
|
||||
globals: EntVec<ty::Global, GlobalDt>,
|
||||
asm: Assembler,
|
||||
ralloc: regalloc::Res,
|
||||
|
||||
ret_relocs: Vec<Reloc>,
|
||||
relocs: Vec<TypedReloc>,
|
||||
jump_relocs: Vec<(Nid, Reloc)>,
|
||||
code: Vec<u8>,
|
||||
offsets: Vec<Offset>,
|
||||
}
|
||||
|
||||
impl HbvmBackend {
|
||||
fn emit(&mut self, instr: (usize, [u8; instrs::MAX_SIZE])) {
|
||||
emit(&mut self.code, instr);
|
||||
}
|
||||
}
|
||||
|
||||
impl Backend for HbvmBackend {
|
||||
fn assemble_bin(&mut self, entry: ty::Func, types: &Types, to: &mut Vec<u8>) {
|
||||
to.extend([0u8; HEADER_SIZE]);
|
||||
|
||||
binary_prelude(to);
|
||||
let AssemblySpec { code_length, data_length, entry } =
|
||||
self.assemble_reachable(entry, types, to);
|
||||
|
||||
let exe = AbleOsExecutableHeader {
|
||||
magic_number: [0x15, 0x91, 0xD2],
|
||||
executable_version: 0,
|
||||
code_length,
|
||||
data_length,
|
||||
debug_length: 0,
|
||||
config_length: 0,
|
||||
metadata_length: 0,
|
||||
};
|
||||
Reloc::new(HEADER_SIZE, 3, 4).apply_jump(to, entry, 0);
|
||||
|
||||
unsafe { *to.as_mut_ptr().cast::<AbleOsExecutableHeader>() = exe }
|
||||
}
|
||||
|
||||
fn assemble_reachable(
|
||||
&mut self,
|
||||
from: ty::Func,
|
||||
types: &Types,
|
||||
to: &mut Vec<u8>,
|
||||
) -> AssemblySpec {
|
||||
debug_assert!(self.asm.frontier.is_empty());
|
||||
debug_assert!(self.asm.funcs.is_empty());
|
||||
debug_assert!(self.asm.globals.is_empty());
|
||||
|
||||
self.globals.shadow(types.ins.globals.len());
|
||||
|
||||
self.asm.frontier.push(from.into());
|
||||
while let Some(itm) = self.asm.frontier.pop() {
|
||||
match itm.expand() {
|
||||
ty::Kind::Func(func) => {
|
||||
let fuc = &mut self.funcs[func];
|
||||
debug_assert!(!fuc.code.is_empty());
|
||||
if fuc.offset != u32::MAX {
|
||||
continue;
|
||||
}
|
||||
fuc.offset = 0;
|
||||
self.asm.funcs.push(func);
|
||||
self.asm.frontier.extend(fuc.relocs.iter().map(|r| r.target));
|
||||
}
|
||||
ty::Kind::Global(glob) => {
|
||||
let glb = &mut self.globals[glob];
|
||||
if glb.offset != u32::MAX {
|
||||
continue;
|
||||
}
|
||||
glb.offset = 0;
|
||||
self.asm.globals.push(glob);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
let init_len = to.len();
|
||||
|
||||
for &func in &self.asm.funcs {
|
||||
let fuc = &mut self.funcs[func];
|
||||
fuc.offset = to.len() as _;
|
||||
debug_assert!(!fuc.code.is_empty());
|
||||
to.extend(&fuc.code);
|
||||
}
|
||||
|
||||
let code_length = to.len() - init_len;
|
||||
|
||||
for global in self.asm.globals.drain(..) {
|
||||
self.globals[global].offset = to.len() as _;
|
||||
to.extend(&types.ins.globals[global].data);
|
||||
}
|
||||
|
||||
let data_length = to.len() - code_length - init_len;
|
||||
|
||||
for func in self.asm.funcs.drain(..) {
|
||||
let fuc = &self.funcs[func];
|
||||
for rel in &fuc.relocs {
|
||||
let offset = match rel.target.expand() {
|
||||
ty::Kind::Func(fun) => self.funcs[fun].offset,
|
||||
ty::Kind::Global(glo) => self.globals[glo].offset,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
rel.reloc.apply_jump(to, offset, fuc.offset);
|
||||
}
|
||||
}
|
||||
|
||||
AssemblySpec {
|
||||
code_length: code_length as _,
|
||||
data_length: data_length as _,
|
||||
entry: self.funcs[from].offset,
|
||||
}
|
||||
}
|
||||
|
||||
fn disasm<'a>(
|
||||
&'a self,
|
||||
mut sluce: &[u8],
|
||||
eca_handler: &mut dyn FnMut(&mut &[u8]),
|
||||
types: &'a Types,
|
||||
files: &'a EntSlice<Module, parser::Ast>,
|
||||
output: &mut String,
|
||||
) -> Result<(), hbbytecode::DisasmError<'a>> {
|
||||
use hbbytecode::DisasmItem;
|
||||
let functions = types
|
||||
.ins
|
||||
.funcs
|
||||
.iter()
|
||||
.zip(self.funcs.iter())
|
||||
.filter(|(_, f)| f.offset != u32::MAX)
|
||||
.map(|(f, fd)| {
|
||||
let name = if f.file != Module::default() {
|
||||
let file = &files[f.file];
|
||||
file.ident_str(f.name)
|
||||
} else {
|
||||
"target_fn"
|
||||
};
|
||||
(fd.offset, (name, fd.code.len() as u32, DisasmItem::Func))
|
||||
})
|
||||
.chain(
|
||||
types
|
||||
.ins
|
||||
.globals
|
||||
.iter()
|
||||
.zip(self.globals.iter())
|
||||
.filter(|(_, g)| g.offset != u32::MAX)
|
||||
.map(|(g, gd)| {
|
||||
let name = if g.file == Module::default() {
|
||||
core::str::from_utf8(&g.data).unwrap_or("invalid utf-8")
|
||||
} else {
|
||||
let file = &files[g.file];
|
||||
file.ident_str(g.name)
|
||||
};
|
||||
(gd.offset, (name, g.data.len() as Size, DisasmItem::Global))
|
||||
}),
|
||||
)
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
hbbytecode::disasm(&mut sluce, &functions, output, eca_handler)
|
||||
}
|
||||
|
||||
fn emit_ct_body(
|
||||
&mut self,
|
||||
id: ty::Func,
|
||||
nodes: &Nodes,
|
||||
tys: &Types,
|
||||
files: &EntSlice<Module, parser::Ast>,
|
||||
) {
|
||||
self.emit_body(id, nodes, tys, files);
|
||||
let fd = &mut self.funcs[id];
|
||||
fd.code.truncate(fd.code.len() - instrs::jala(0, 0, 0).0);
|
||||
emit(&mut fd.code, instrs::tx());
|
||||
}
|
||||
|
||||
fn emit_body(
|
||||
&mut self,
|
||||
id: ty::Func,
|
||||
nodes: &Nodes,
|
||||
tys: &Types,
|
||||
files: &EntSlice<Module, parser::Ast>,
|
||||
) {
|
||||
let sig = tys.ins.funcs[id].sig;
|
||||
|
||||
debug_assert!(self.code.is_empty());
|
||||
|
||||
self.offsets.clear();
|
||||
self.offsets.resize(nodes.len(), Offset::MAX);
|
||||
|
||||
let mut stack_size = 0;
|
||||
'_compute_stack: {
|
||||
let mems = &nodes[MEM].outputs;
|
||||
for &stck in mems.iter() {
|
||||
if !matches!(nodes[stck].kind, Kind::Stck | Kind::Arg) {
|
||||
debug_assert_matches!(
|
||||
nodes[stck].kind,
|
||||
Kind::Phi
|
||||
| Kind::Return { .. }
|
||||
| Kind::Load
|
||||
| Kind::Call { .. }
|
||||
| Kind::Stre
|
||||
| Kind::Join
|
||||
);
|
||||
continue;
|
||||
}
|
||||
stack_size += tys.size_of(nodes[stck].ty);
|
||||
self.offsets[stck as usize] = stack_size;
|
||||
}
|
||||
for &stck in mems.iter() {
|
||||
if !matches!(nodes[stck].kind, Kind::Stck | Kind::Arg) {
|
||||
continue;
|
||||
}
|
||||
self.offsets[stck as usize] = stack_size - self.offsets[stck as usize];
|
||||
}
|
||||
}
|
||||
|
||||
let (saved, tail) = self.emit_body_code(nodes, sig, tys, files);
|
||||
|
||||
if let Some(last_ret) = self.ret_relocs.last()
|
||||
&& last_ret.offset as usize == self.code.len() - 5
|
||||
&& self
|
||||
.jump_relocs
|
||||
.last()
|
||||
.is_none_or(|&(r, _)| self.offsets[r as usize] as usize != self.code.len())
|
||||
{
|
||||
self.code.truncate(self.code.len() - 5);
|
||||
self.ret_relocs.pop();
|
||||
}
|
||||
|
||||
for (nd, rel) in self.jump_relocs.drain(..) {
|
||||
let offset = self.offsets[nd as usize];
|
||||
rel.apply_jump(&mut self.code, offset, 0);
|
||||
}
|
||||
|
||||
let end = self.code.len();
|
||||
for ret_rel in self.ret_relocs.drain(..) {
|
||||
ret_rel.apply_jump(&mut self.code, end as _, 0);
|
||||
}
|
||||
|
||||
let mut stripped_prelude_size = 0;
|
||||
'_close_function: {
|
||||
let pushed = (saved as i64 + !tail as i64) * 8;
|
||||
let stack = stack_size as i64;
|
||||
|
||||
let add_len = instrs::addi64(0, 0, 0).0;
|
||||
let st_len = instrs::st(0, 0, 0, 0).0;
|
||||
|
||||
match (pushed, stack) {
|
||||
(0, 0) => {
|
||||
stripped_prelude_size = add_len + st_len;
|
||||
self.code.drain(0..stripped_prelude_size);
|
||||
break '_close_function;
|
||||
}
|
||||
(0, stack) => {
|
||||
write_reloc(&mut self.code, 3, -stack, 8);
|
||||
stripped_prelude_size = st_len;
|
||||
let end = add_len + st_len;
|
||||
self.code.drain(add_len..end);
|
||||
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, stack as _));
|
||||
break '_close_function;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
write_reloc(&mut self.code, 3, -(pushed + stack), 8);
|
||||
write_reloc(&mut self.code, 3 + 8 + 3, stack, 8);
|
||||
write_reloc(&mut self.code, 3 + 8 + 3 + 8, pushed, 2);
|
||||
|
||||
self.emit(instrs::ld(
|
||||
reg::RET_ADDR + tail as u8,
|
||||
reg::STACK_PTR,
|
||||
stack as _,
|
||||
pushed as _,
|
||||
));
|
||||
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, (pushed + stack) as _));
|
||||
}
|
||||
self.relocs.iter_mut().for_each(|r| r.reloc.offset -= stripped_prelude_size as u32);
|
||||
if sig.ret != ty::Id::NEVER {
|
||||
self.emit(instrs::jala(reg::ZERO, reg::RET_ADDR, 0));
|
||||
}
|
||||
|
||||
self.funcs.shadow(tys.ins.funcs.len());
|
||||
self.funcs[id].code = mem::take(&mut self.code);
|
||||
self.funcs[id].relocs = mem::take(&mut self.relocs);
|
||||
|
||||
debug_assert_eq!(self.ret_relocs.len(), 0);
|
||||
debug_assert_eq!(self.relocs.len(), 0);
|
||||
debug_assert_eq!(self.jump_relocs.len(), 0);
|
||||
debug_assert_eq!(self.code.len(), 0);
|
||||
}
|
||||
}
|
||||
|
||||
impl Nodes {
|
||||
fn cond_op(&self, cnd: Nid) -> CondRet {
|
||||
let Kind::BinOp { op } = self[cnd].kind else { return None };
|
||||
if self.is_unlocked(cnd) {
|
||||
return None;
|
||||
}
|
||||
op.cond_op(self[self[cnd].inputs[1]].ty)
|
||||
}
|
||||
|
||||
fn strip_offset(&self, region: Nid) -> (Nid, Offset) {
|
||||
if matches!(self[region].kind, Kind::BinOp { op: TokenKind::Add | TokenKind::Sub })
|
||||
&& self.is_locked(region)
|
||||
&& let Kind::CInt { value } = self[self[region].inputs[2]].kind
|
||||
{
|
||||
(self[region].inputs[1], value as _)
|
||||
} else {
|
||||
(region, 0)
|
||||
}
|
||||
}
|
||||
|
||||
fn is_never_used(&self, nid: Nid, tys: &Types) -> bool {
|
||||
let node = &self[nid];
|
||||
match node.kind {
|
||||
Kind::CInt { value: 0 } => false,
|
||||
Kind::CInt { value: 1.. } => node.outputs.iter().all(|&o| {
|
||||
matches!(self[o].kind, Kind::BinOp { op }
|
||||
if op.imm_binop(self[o].ty).is_some()
|
||||
&& self.is_const(self[o].inputs[2])
|
||||
&& op.cond_op(self[o].ty).is_none())
|
||||
}),
|
||||
Kind::BinOp { op: TokenKind::Mul } if node.ty.is_float() => {
|
||||
node.outputs.iter().all(|&n| {
|
||||
self[n].kind == Kind::BinOp { op: TokenKind::Add } && self[n].inputs[1] == nid
|
||||
})
|
||||
}
|
||||
Kind::BinOp { op: TokenKind::Add | TokenKind::Sub } => {
|
||||
(self.is_locked(node.inputs[1]) && !self[node.inputs[1]].ty.is_float())
|
||||
|| (self.is_const(node.inputs[2])
|
||||
&& node.outputs.iter().all(|&n| self.uses_direct_offset_of(n, nid, tys)))
|
||||
}
|
||||
Kind::BinOp { op } => {
|
||||
op.cond_op(self[node.inputs[1]].ty).is_some()
|
||||
&& node.outputs.iter().all(|&n| self[n].kind == Kind::If)
|
||||
}
|
||||
Kind::Stck if tys.size_of(node.ty) == 0 => true,
|
||||
Kind::Stck | Kind::Arg => node.outputs.iter().all(|&n| {
|
||||
self.uses_direct_offset_of(n, nid, tys)
|
||||
|| (matches!(self[n].kind, Kind::BinOp { op: TokenKind::Add })
|
||||
&& self.is_never_used(n, tys))
|
||||
}),
|
||||
Kind::Load { .. } => node.ty.loc(tys) == Loc::Stack,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn uses_direct_offset_of(&self, user: Nid, target: Nid, tys: &Types) -> bool {
|
||||
let node = &self[user];
|
||||
((node.kind == Kind::Stre && node.inputs[2] == target)
|
||||
|| (node.kind == Kind::Load && node.inputs[1] == target))
|
||||
&& (node.ty.loc(tys) == Loc::Reg
|
||||
// this means the struct is actually loaded into a register so no BMC needed
|
||||
|| (node.kind == Kind::Load
|
||||
&& !matches!(tys.parama(node.ty).0, Some(PLoc::Ref(..)))
|
||||
&& node.outputs.iter().all(|&o| matches!(self[o].kind, Kind::Call { .. } | Kind::Return { .. }))))
|
||||
}
|
||||
}
|
||||
|
||||
impl HbvmBackend {
|
||||
fn extend(
|
||||
&mut self,
|
||||
base: ty::Id,
|
||||
dest: ty::Id,
|
||||
reg: Reg,
|
||||
tys: &Types,
|
||||
files: &EntSlice<Module, parser::Ast>,
|
||||
) {
|
||||
if reg == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let (bsize, dsize) = (tys.size_of(base), tys.size_of(dest));
|
||||
debug_assert!(bsize <= 8, "{}", ty::Display::new(tys, files, base));
|
||||
debug_assert!(dsize <= 8, "{}", ty::Display::new(tys, files, dest));
|
||||
if bsize == dsize {
|
||||
return Default::default();
|
||||
}
|
||||
self.emit(match (base.is_signed(), dest.is_signed()) {
|
||||
(true, true) => {
|
||||
let op = [instrs::sxt8, instrs::sxt16, instrs::sxt32][bsize.ilog2() as usize];
|
||||
op(reg, reg)
|
||||
}
|
||||
_ => {
|
||||
let mask = (1u64 << (bsize * 8)) - 1;
|
||||
instrs::andi(reg, reg, mask)
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
type CondRet = Option<(fn(u8, u8, i16) -> EncodedInstr, bool)>;
|
||||
|
||||
impl TokenKind {
|
||||
fn cmp_against(self) -> Option<u64> {
|
||||
Some(match self {
|
||||
Self::Le | Self::Gt => 1,
|
||||
Self::Ne | Self::Eq => 0,
|
||||
Self::Ge | Self::Lt => (-1i64) as _,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
|
||||
fn float_cmp(self, ty: ty::Id) -> Option<fn(u8, u8, u8) -> EncodedInstr> {
|
||||
if !ty.is_float() {
|
||||
return None;
|
||||
}
|
||||
let size = ty.simple_size().unwrap();
|
||||
|
||||
let ops = match self {
|
||||
Self::Gt => [instrs::fcmpgt32, instrs::fcmpgt64],
|
||||
Self::Lt => [instrs::fcmplt32, instrs::fcmplt64],
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(ops[size.ilog2() as usize - 2])
|
||||
}
|
||||
|
||||
fn cond_op(self, ty: ty::Id) -> CondRet {
|
||||
let signed = ty.is_signed();
|
||||
Some((
|
||||
match self {
|
||||
Self::Eq => instrs::jne,
|
||||
Self::Ne => instrs::jeq,
|
||||
_ if ty.is_float() => return None,
|
||||
Self::Le if signed => instrs::jgts,
|
||||
Self::Le => instrs::jgtu,
|
||||
Self::Lt if signed => instrs::jlts,
|
||||
Self::Lt => instrs::jltu,
|
||||
Self::Ge if signed => instrs::jlts,
|
||||
Self::Ge => instrs::jltu,
|
||||
Self::Gt if signed => instrs::jgts,
|
||||
Self::Gt => instrs::jgtu,
|
||||
_ => return None,
|
||||
},
|
||||
matches!(self, Self::Lt | Self::Gt),
|
||||
))
|
||||
}
|
||||
|
||||
fn binop(self, ty: ty::Id) -> Option<fn(u8, u8, u8) -> EncodedInstr> {
|
||||
let size = ty.simple_size().unwrap_or_else(|| panic!("{:?}", ty.expand()));
|
||||
if ty.is_integer() || ty == ty::Id::BOOL || ty.is_pointer() {
|
||||
macro_rules! div { ($($op:ident),*) => {[$(|a, b, c| $op(a, 0, b, c)),*]}; }
|
||||
macro_rules! rem { ($($op:ident),*) => {[$(|a, b, c| $op(0, a, b, c)),*]}; }
|
||||
let signed = ty.is_signed();
|
||||
|
||||
let ops = match self {
|
||||
Self::Add => [add8, add16, add32, add64],
|
||||
Self::Sub => [sub8, sub16, sub32, sub64],
|
||||
Self::Mul => [mul8, mul16, mul32, mul64],
|
||||
Self::Div if signed => div!(dirs8, dirs16, dirs32, dirs64),
|
||||
Self::Div => div!(diru8, diru16, diru32, diru64),
|
||||
Self::Mod if signed => rem!(dirs8, dirs16, dirs32, dirs64),
|
||||
Self::Mod => rem!(diru8, diru16, diru32, diru64),
|
||||
Self::Band => return Some(and),
|
||||
Self::Bor => return Some(or),
|
||||
Self::Xor => return Some(xor),
|
||||
Self::Shl => [slu8, slu16, slu32, slu64],
|
||||
Self::Shr if signed => [srs8, srs16, srs32, srs64],
|
||||
Self::Shr => [sru8, sru16, sru32, sru64],
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(ops[size.ilog2() as usize])
|
||||
} else {
|
||||
debug_assert!(ty.is_float(), "{self} {ty:?}");
|
||||
let ops = match self {
|
||||
Self::Add => [fadd32, fadd64],
|
||||
Self::Sub => [fsub32, fsub64],
|
||||
Self::Mul => [fmul32, fmul64],
|
||||
Self::Div => [fdiv32, fdiv64],
|
||||
_ => return None,
|
||||
};
|
||||
Some(ops[size.ilog2() as usize - 2])
|
||||
}
|
||||
}
|
||||
|
||||
fn imm_binop(self, ty: ty::Id) -> Option<fn(u8, u8, u64) -> EncodedInstr> {
|
||||
macro_rules! def_op {
|
||||
($name:ident |$a:ident, $b:ident, $c:ident| $($tt:tt)*) => {
|
||||
macro_rules! $name {
|
||||
($$($$op:ident),*) => {
|
||||
[$$(
|
||||
|$a, $b, $c: u64| $$op($($tt)*),
|
||||
)*]
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if ty.is_float() {
|
||||
return None;
|
||||
}
|
||||
|
||||
def_op!(basic_op | a, b, c | a, b, c as _);
|
||||
def_op!(sub_op | a, b, c | a, b, c.wrapping_neg() as _);
|
||||
|
||||
let signed = ty.is_signed();
|
||||
let ops = match self {
|
||||
Self::Add => basic_op!(addi8, addi16, addi32, addi64),
|
||||
Self::Sub => sub_op!(addi8, addi16, addi32, addi64),
|
||||
Self::Mul => basic_op!(muli8, muli16, muli32, muli64),
|
||||
Self::Band => return Some(andi),
|
||||
Self::Bor => return Some(ori),
|
||||
Self::Xor => return Some(xori),
|
||||
Self::Shr if signed => basic_op!(srsi8, srsi16, srsi32, srsi64),
|
||||
Self::Shr => basic_op!(srui8, srui16, srui32, srui64),
|
||||
Self::Shl => basic_op!(slui8, slui16, slui32, slui64),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let size = ty.simple_size().unwrap();
|
||||
Some(ops[size.ilog2() as usize])
|
||||
}
|
||||
|
||||
fn unop(&self, dst: ty::Id, src: ty::Id, tys: &Types) -> Option<fn(u8, u8) -> EncodedInstr> {
|
||||
let src_idx = tys.size_of(src).ilog2() as usize;
|
||||
Some(match self {
|
||||
Self::Sub => [
|
||||
|a, b| sub8(a, reg::ZERO, b),
|
||||
|a, b| sub16(a, reg::ZERO, b),
|
||||
|a, b| sub32(a, reg::ZERO, b),
|
||||
|a, b| sub64(a, reg::ZERO, b),
|
||||
][src_idx],
|
||||
Self::Not => instrs::not,
|
||||
Self::Float if dst.is_float() && src.is_integer() => {
|
||||
debug_assert_matches!(
|
||||
(dst.simple_size(), src.simple_size()),
|
||||
(Some(4 | 8), Some(8))
|
||||
);
|
||||
[instrs::itf32, instrs::itf64][dst.simple_size().unwrap().ilog2() as usize - 2]
|
||||
}
|
||||
Self::Number if src.is_float() && dst.is_integer() => {
|
||||
[|a, b| instrs::fti32(a, b, 1), |a, b| instrs::fti64(a, b, 1)][src_idx - 2]
|
||||
}
|
||||
Self::Number if src.is_signed() && (dst.is_integer() || dst.is_pointer()) => {
|
||||
[instrs::sxt8, instrs::sxt16, instrs::sxt32][src_idx]
|
||||
}
|
||||
Self::Number
|
||||
if (src.is_unsigned() || src == ty::Id::BOOL)
|
||||
&& (dst.is_integer() || dst.is_pointer()) =>
|
||||
{
|
||||
[
|
||||
|a, b| instrs::andi(a, b, 0xff),
|
||||
|a, b| instrs::andi(a, b, 0xffff),
|
||||
|a, b| instrs::andi(a, b, 0xffffffff),
|
||||
][src_idx]
|
||||
}
|
||||
Self::Float if dst.is_float() && src.is_float() => {
|
||||
[instrs::fc32t64, |a, b| instrs::fc64t32(a, b, 1)][src_idx - 2]
|
||||
}
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
enum PLoc {
|
||||
Reg(Reg, u16),
|
||||
WideReg(Reg, u16),
|
||||
Ref(Reg, u32),
|
||||
}
|
||||
|
||||
impl PLoc {
|
||||
fn reg(self) -> u8 {
|
||||
match self {
|
||||
PLoc::Reg(r, _) | PLoc::WideReg(r, _) | PLoc::Ref(r, _) => r,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ParamAlloc(Range<Reg>);
|
||||
|
||||
impl ParamAlloc {
|
||||
pub fn next(&mut self, ty: ty::Id, tys: &Types) -> Option<PLoc> {
|
||||
Some(match tys.size_of(ty) {
|
||||
0 => return None,
|
||||
size @ 1..=8 => PLoc::Reg(self.0.next().unwrap(), size as _),
|
||||
size @ 9..=16 => PLoc::WideReg(self.0.next_chunk::<2>().unwrap()[0], size as _),
|
||||
size @ 17.. => PLoc::Ref(self.0.next().unwrap(), size),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Types {
|
||||
fn parama(&self, ret: ty::Id) -> (Option<PLoc>, ParamAlloc) {
|
||||
let mut iter = ParamAlloc(1..12);
|
||||
let ret = iter.next(ret, self);
|
||||
iter.0.start += ret.is_none() as u8;
|
||||
(ret, iter)
|
||||
}
|
||||
}
|
||||
|
||||
type EncodedInstr = (usize, [u8; instrs::MAX_SIZE]);
|
||||
fn emit(out: &mut Vec<u8>, (len, instr): EncodedInstr) {
|
||||
out.extend_from_slice(&instr[..len]);
|
||||
}
|
||||
|
||||
fn binary_prelude(to: &mut Vec<u8>) {
|
||||
emit(to, instrs::jal(reg::RET_ADDR, reg::ZERO, 0));
|
||||
emit(to, instrs::tx());
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct LoggedMem {
|
||||
pub mem: hbvm::mem::HostMemory,
|
||||
op_buf: Vec<hbbytecode::Oper>,
|
||||
disp_buf: String,
|
||||
prev_instr: Option<hbbytecode::Instr>,
|
||||
}
|
||||
|
||||
impl LoggedMem {
|
||||
unsafe fn display_instr<T>(&mut self, instr: hbbytecode::Instr, addr: hbvm::mem::Address) {
|
||||
let novm: *const hbvm::Vm<Self, 0> = core::ptr::null();
|
||||
let offset = core::ptr::addr_of!((*novm).memory) as usize;
|
||||
let regs = unsafe {
|
||||
&*core::ptr::addr_of!(
|
||||
(*(((self as *mut _ as *mut u8).sub(offset)) as *const hbvm::Vm<Self, 0>))
|
||||
.registers
|
||||
)
|
||||
};
|
||||
|
||||
let mut bytes = core::slice::from_raw_parts(
|
||||
(addr.get() - 1) as *const u8,
|
||||
core::mem::size_of::<T>() + 1,
|
||||
);
|
||||
use core::fmt::Write;
|
||||
hbbytecode::parse_args(&mut bytes, instr, &mut self.op_buf).unwrap();
|
||||
debug_assert!(bytes.is_empty());
|
||||
self.disp_buf.clear();
|
||||
write!(self.disp_buf, "{:<10}", format!("{instr:?}")).unwrap();
|
||||
for (i, op) in self.op_buf.drain(..).enumerate() {
|
||||
if i != 0 {
|
||||
write!(self.disp_buf, ", ").unwrap();
|
||||
}
|
||||
write!(self.disp_buf, "{op:?}").unwrap();
|
||||
if let hbbytecode::Oper::R(r) = op {
|
||||
write!(self.disp_buf, "({})", regs[r as usize].0).unwrap()
|
||||
}
|
||||
}
|
||||
log::trace!("read-typed: {:x}: {}", addr.get(), self.disp_buf);
|
||||
}
|
||||
}
|
||||
|
||||
impl hbvm::mem::Memory for LoggedMem {
|
||||
unsafe fn load(
|
||||
&mut self,
|
||||
addr: hbvm::mem::Address,
|
||||
target: *mut u8,
|
||||
count: usize,
|
||||
) -> Result<(), hbvm::mem::LoadError> {
|
||||
log::trace!(
|
||||
"load: {:x} {}",
|
||||
addr.get(),
|
||||
AsHex(core::slice::from_raw_parts(addr.get() as *const u8, count))
|
||||
);
|
||||
self.mem.load(addr, target, count)
|
||||
}
|
||||
|
||||
unsafe fn store(
|
||||
&mut self,
|
||||
addr: hbvm::mem::Address,
|
||||
source: *const u8,
|
||||
count: usize,
|
||||
) -> Result<(), hbvm::mem::StoreError> {
|
||||
log::trace!(
|
||||
"store: {:x} {}",
|
||||
addr.get(),
|
||||
AsHex(core::slice::from_raw_parts(source, count))
|
||||
);
|
||||
self.mem.store(addr, source, count)
|
||||
}
|
||||
|
||||
unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: hbvm::mem::Address) -> T {
|
||||
if log::log_enabled!(log::Level::Trace) {
|
||||
if core::any::TypeId::of::<u8>() == core::any::TypeId::of::<T>() {
|
||||
if let Some(instr) = self.prev_instr {
|
||||
self.display_instr::<()>(instr, addr);
|
||||
}
|
||||
self.prev_instr = hbbytecode::Instr::try_from(*(addr.get() as *const u8)).ok();
|
||||
} else {
|
||||
let instr = self.prev_instr.take().unwrap();
|
||||
self.display_instr::<T>(instr, addr);
|
||||
}
|
||||
}
|
||||
|
||||
self.mem.prog_read(addr)
|
||||
}
|
||||
}
|
||||
|
||||
struct AsHex<'a>(&'a [u8]);
|
||||
|
||||
impl core::fmt::Display for AsHex<'_> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
for &b in self.0 {
|
||||
write!(f, "{b:02x}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
const VM_STACK_SIZE: usize = 1024 * 64;
|
||||
|
||||
pub struct Comptime {
|
||||
pub vm: hbvm::Vm<LoggedMem, { 1024 * 10 }>,
|
||||
stack: Box<[u8; VM_STACK_SIZE]>,
|
||||
pub code: Vec<u8>,
|
||||
depth: usize,
|
||||
}
|
||||
|
||||
impl Comptime {
|
||||
pub fn run(&mut self, ret_loc: &mut [u8], offset: u32) -> u64 {
|
||||
self.vm.write_reg(reg::RET, ret_loc.as_mut_ptr() as u64);
|
||||
let prev_pc = self.push_pc(offset);
|
||||
loop {
|
||||
match self.vm.run().expect("TODO") {
|
||||
hbvm::VmRunOk::End => break,
|
||||
hbvm::VmRunOk::Timer => todo!(),
|
||||
hbvm::VmRunOk::Ecall => todo!(),
|
||||
hbvm::VmRunOk::Breakpoint => todo!(),
|
||||
}
|
||||
}
|
||||
self.pop_pc(prev_pc);
|
||||
|
||||
if let len @ 1..=8 = ret_loc.len() {
|
||||
ret_loc.copy_from_slice(&self.vm.read_reg(reg::RET).0.to_ne_bytes()[..len])
|
||||
}
|
||||
|
||||
self.vm.read_reg(reg::RET).0
|
||||
}
|
||||
|
||||
pub fn reset(&mut self) {
|
||||
let ptr = unsafe { self.stack.as_mut_ptr().cast::<u8>().add(VM_STACK_SIZE) as u64 };
|
||||
self.vm.registers.fill(hbvm::value::Value(0));
|
||||
self.vm.write_reg(reg::STACK_PTR, ptr);
|
||||
self.vm.pc = hbvm::mem::Address::new(self.code.as_ptr() as u64 + HEADER_SIZE as u64);
|
||||
}
|
||||
|
||||
fn push_pc(&mut self, offset: Offset) -> hbvm::mem::Address {
|
||||
let entry = &mut self.code[offset as usize] as *mut _ as _;
|
||||
core::mem::replace(&mut self.vm.pc, hbvm::mem::Address::new(entry))
|
||||
- self.code.as_ptr() as usize
|
||||
}
|
||||
|
||||
fn pop_pc(&mut self, prev_pc: hbvm::mem::Address) {
|
||||
self.vm.pc = prev_pc + self.code.as_ptr() as usize;
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.code.clear();
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn active(&self) -> bool {
|
||||
self.depth != 0
|
||||
}
|
||||
|
||||
pub fn activate(&mut self) {
|
||||
self.depth += 1;
|
||||
}
|
||||
|
||||
pub fn deactivate(&mut self) {
|
||||
self.depth -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Comptime {
|
||||
fn default() -> Self {
|
||||
let mut stack = Box::<[u8; VM_STACK_SIZE]>::new_uninit();
|
||||
let mut vm = hbvm::Vm::default();
|
||||
let ptr = unsafe { stack.as_mut_ptr().cast::<u8>().add(VM_STACK_SIZE) as u64 };
|
||||
vm.write_reg(reg::STACK_PTR, ptr);
|
||||
Self { vm, stack: unsafe { stack.assume_init() }, code: Default::default(), depth: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
const HEADER_SIZE: usize = core::mem::size_of::<AbleOsExecutableHeader>();
|
||||
|
||||
#[repr(packed)]
|
||||
#[expect(dead_code)]
|
||||
pub struct AbleOsExecutableHeader {
|
||||
magic_number: [u8; 3],
|
||||
executable_version: u32,
|
||||
|
||||
code_length: u64,
|
||||
data_length: u64,
|
||||
debug_length: u64,
|
||||
config_length: u64,
|
||||
metadata_length: u64,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn test_run_vm(out: &[u8], output: &mut String) {
|
||||
use core::{ffi::CStr, fmt::Write};
|
||||
|
||||
let mut stack = [0_u64; 1024 * 20];
|
||||
|
||||
let mut vm = unsafe {
|
||||
hbvm::Vm::<_, { 1024 * 100 }>::new(
|
||||
LoggedMem::default(),
|
||||
hbvm::mem::Address::new(out.as_ptr() as u64).wrapping_add(HEADER_SIZE),
|
||||
)
|
||||
};
|
||||
|
||||
vm.write_reg(reg::STACK_PTR, unsafe { stack.as_mut_ptr().add(stack.len()) } as u64);
|
||||
|
||||
let stat = loop {
|
||||
match vm.run() {
|
||||
Ok(hbvm::VmRunOk::End) => break Ok(()),
|
||||
Ok(hbvm::VmRunOk::Ecall) => match vm.read_reg(2).0 {
|
||||
37 => writeln!(
|
||||
output,
|
||||
"{}",
|
||||
unsafe { CStr::from_ptr(vm.read_reg(3).0 as _) }.to_str().unwrap()
|
||||
)
|
||||
.unwrap(),
|
||||
1 => writeln!(output, "ev: Ecall").unwrap(), // compatibility with a test
|
||||
69 => {
|
||||
let [size, align] = [vm.read_reg(3).0 as usize, vm.read_reg(4).0 as usize];
|
||||
let layout = core::alloc::Layout::from_size_align(size, align).unwrap();
|
||||
let ptr = unsafe { alloc::alloc::alloc(layout) };
|
||||
vm.write_reg(1, ptr as u64);
|
||||
}
|
||||
96 => {
|
||||
let [ptr, size, align] = [
|
||||
vm.read_reg(3).0 as usize,
|
||||
vm.read_reg(4).0 as usize,
|
||||
vm.read_reg(5).0 as usize,
|
||||
];
|
||||
|
||||
let layout = core::alloc::Layout::from_size_align(size, align).unwrap();
|
||||
unsafe { alloc::alloc::dealloc(ptr as *mut u8, layout) };
|
||||
}
|
||||
3 => vm.write_reg(1, 42),
|
||||
8 => {}
|
||||
unknown => writeln!(output, "unknown ecall: {unknown:?}").unwrap(),
|
||||
},
|
||||
Ok(hbvm::VmRunOk::Timer) => {
|
||||
writeln!(output, "timed out").unwrap();
|
||||
break Ok(());
|
||||
}
|
||||
Ok(ev) => writeln!(output, "ev: {:?}", ev).unwrap(),
|
||||
Err(e) => break Err(e),
|
||||
}
|
||||
};
|
||||
|
||||
writeln!(output, "code size: {}", out.len() - HEADER_SIZE).unwrap();
|
||||
writeln!(output, "ret: {:?}", vm.read_reg(1).0).unwrap();
|
||||
writeln!(output, "status: {:?}", stat).unwrap();
|
||||
}
|
1148
lang/src/backend/hbvm/regalloc.rs
Normal file
1148
lang/src/backend/hbvm/regalloc.rs
Normal file
File diff suppressed because it is too large
Load diff
675
lang/src/fmt.rs
Normal file
675
lang/src/fmt.rs
Normal file
|
@ -0,0 +1,675 @@
|
|||
use {
|
||||
crate::{
|
||||
lexer::{self, Lexer, TokenKind},
|
||||
parser::{
|
||||
self, CommentOr, CtorField, EnumField, Expr, FieldList, ListKind, Poser, Radix,
|
||||
StructField, UnionField,
|
||||
},
|
||||
},
|
||||
core::{
|
||||
fmt::{self},
|
||||
mem,
|
||||
},
|
||||
};
|
||||
|
||||
pub fn display_radix(radix: Radix, mut value: u64, buf: &mut [u8; 64]) -> &str {
|
||||
fn conv_radix(d: u8) -> u8 {
|
||||
match d {
|
||||
0..=9 => d + b'0',
|
||||
_ => d - 10 + b'A',
|
||||
}
|
||||
}
|
||||
|
||||
for (i, b) in buf.iter_mut().enumerate().rev() {
|
||||
let d = (value % radix as u64) as u8;
|
||||
value /= radix as u64;
|
||||
*b = conv_radix(d);
|
||||
if value == 0 {
|
||||
return unsafe { core::str::from_utf8_unchecked(&buf[i..]) };
|
||||
}
|
||||
}
|
||||
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[repr(u8)]
|
||||
enum TokenGroup {
|
||||
Blank,
|
||||
Comment,
|
||||
Keyword,
|
||||
Identifier,
|
||||
Directive,
|
||||
Number,
|
||||
String,
|
||||
Op,
|
||||
Assign,
|
||||
Paren,
|
||||
Bracket,
|
||||
Colon,
|
||||
Comma,
|
||||
Dot,
|
||||
Ctor,
|
||||
}
|
||||
|
||||
impl TokenKind {
|
||||
fn to_higlight_group(self) -> TokenGroup {
|
||||
use {TokenGroup as TG, TokenKind::*};
|
||||
match self {
|
||||
BSlash | Pound | Eof | Ct => TG::Blank,
|
||||
Comment => TG::Comment,
|
||||
Directive => TG::Directive,
|
||||
Colon => TG::Colon,
|
||||
Semi | Comma => TG::Comma,
|
||||
Dot => TG::Dot,
|
||||
Ctor | Arr | Tupl | TArrow | Range => TG::Ctor,
|
||||
LParen | RParen => TG::Paren,
|
||||
LBrace | RBrace | LBrack | RBrack => TG::Bracket,
|
||||
Number | Float => TG::Number,
|
||||
Under | CtIdent | Ident => TG::Identifier,
|
||||
Tick | Tilde | Que | Not | Mod | Band | Bor | Xor | Mul | Add | Sub | Div | Shl
|
||||
| Shr | Or | And | Lt | Gt | Eq | Le | Ge | Ne => TG::Op,
|
||||
Decl | Assign | BorAss | XorAss | BandAss | AddAss | SubAss | MulAss | DivAss
|
||||
| ModAss | ShrAss | ShlAss => TG::Assign,
|
||||
DQuote | Quote => TG::String,
|
||||
Slf | Defer | Return | If | Else | Loop | Break | Continue | Fn | Idk | Die
|
||||
| Struct | Packed | True | False | Null | Match | Enum | Union | CtLoop => TG::Keyword,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_token_kinds(mut source: &mut [u8]) -> usize {
|
||||
let len = source.len();
|
||||
loop {
|
||||
let src = unsafe { core::str::from_utf8_unchecked(source) };
|
||||
let mut token = lexer::Lexer::new(src).eat();
|
||||
match token.kind {
|
||||
TokenKind::Eof => break,
|
||||
// ???
|
||||
TokenKind::CtIdent | TokenKind::Directive => token.start -= 1,
|
||||
_ => {}
|
||||
}
|
||||
let start = token.start as usize;
|
||||
let end = token.end as usize;
|
||||
source[..start].fill(0);
|
||||
source[start..end].fill(token.kind.to_higlight_group() as u8);
|
||||
source = &mut source[end..];
|
||||
}
|
||||
len
|
||||
}
|
||||
|
||||
pub fn minify(source: &mut str) -> usize {
|
||||
fn needs_space(c: u8) -> bool {
|
||||
matches!(c, b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | 127..)
|
||||
}
|
||||
|
||||
let mut writer = source.as_mut_ptr();
|
||||
let mut reader = &source[..];
|
||||
let mut prev_needs_whitecpace = false;
|
||||
let mut prev_needs_newline = false;
|
||||
loop {
|
||||
let mut token = lexer::Lexer::new(reader).eat();
|
||||
match token.kind {
|
||||
TokenKind::Eof => break,
|
||||
TokenKind::CtIdent | TokenKind::CtLoop | TokenKind::Directive => token.start -= 1,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let cpy_len = token.range().len();
|
||||
|
||||
let mut prefix = 0;
|
||||
if prev_needs_whitecpace && needs_space(reader.as_bytes()[token.start as usize]) {
|
||||
prefix = b' ';
|
||||
debug_assert!(token.start != 0, "{reader}");
|
||||
}
|
||||
prev_needs_whitecpace = needs_space(reader.as_bytes()[token.end as usize - 1]);
|
||||
|
||||
let inbetween_new_lines =
|
||||
reader[..token.start as usize].bytes().filter(|&b| b == b'\n').count()
|
||||
+ token.kind.precedence().is_some() as usize;
|
||||
let extra_prefix_new_lines = if inbetween_new_lines > 1 {
|
||||
1 + token.kind.precedence().is_none() as usize
|
||||
} else {
|
||||
prev_needs_newline as usize
|
||||
};
|
||||
|
||||
if token.kind == TokenKind::Comment && reader.as_bytes()[token.end as usize - 1] != b'/' {
|
||||
prev_needs_newline = true;
|
||||
prev_needs_whitecpace = false;
|
||||
} else {
|
||||
prev_needs_newline = false;
|
||||
}
|
||||
|
||||
let sstr = reader[token.start as usize..].as_ptr();
|
||||
reader = &reader[token.end as usize..];
|
||||
unsafe {
|
||||
if extra_prefix_new_lines != 0 {
|
||||
for _ in 0..extra_prefix_new_lines {
|
||||
writer.write(b'\n');
|
||||
writer = writer.add(1);
|
||||
}
|
||||
} else if prefix != 0 {
|
||||
writer.write(prefix);
|
||||
writer = writer.add(1);
|
||||
}
|
||||
writer.copy_from(sstr, cpy_len);
|
||||
writer = writer.add(cpy_len);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe { writer.sub_ptr(source.as_mut_ptr()) }
|
||||
}
|
||||
|
||||
pub struct Formatter<'a> {
|
||||
source: &'a str,
|
||||
depth: usize,
|
||||
}
|
||||
|
||||
// we exclusively use `write_str` to reduce bloat
|
||||
impl<'a> Formatter<'a> {
|
||||
pub fn new(source: &'a str) -> Self {
|
||||
Self { source, depth: 0 }
|
||||
}
|
||||
|
||||
fn fmt_list<T: Poser, F: core::fmt::Write>(
|
||||
&mut self,
|
||||
f: &mut F,
|
||||
trailing: bool,
|
||||
end: &str,
|
||||
sep: &str,
|
||||
list: &[T],
|
||||
fmt: impl Fn(&mut Self, &T, &mut F) -> fmt::Result,
|
||||
) -> fmt::Result {
|
||||
self.fmt_list_low(f, trailing, end, sep, list, |s, v, f| {
|
||||
fmt(s, v, f)?;
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
fn fmt_list_low<T: Poser, F: core::fmt::Write>(
|
||||
&mut self,
|
||||
f: &mut F,
|
||||
trailing: bool,
|
||||
end: &str,
|
||||
sep: &str,
|
||||
list: &[T],
|
||||
fmt: impl Fn(&mut Self, &T, &mut F) -> Result<bool, fmt::Error>,
|
||||
) -> fmt::Result {
|
||||
if !trailing {
|
||||
let mut first = true;
|
||||
for expr in list {
|
||||
if !core::mem::take(&mut first) {
|
||||
f.write_str(sep)?;
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
first = !fmt(self, expr, f)?;
|
||||
}
|
||||
return f.write_str(end);
|
||||
}
|
||||
|
||||
if !end.is_empty() {
|
||||
writeln!(f)?;
|
||||
}
|
||||
|
||||
self.depth += !end.is_empty() as usize;
|
||||
let mut already_indented = end.is_empty();
|
||||
let res = (|| {
|
||||
for (i, stmt) in list.iter().enumerate() {
|
||||
if !mem::take(&mut already_indented) {
|
||||
for _ in 0..self.depth {
|
||||
f.write_str("\t")?;
|
||||
}
|
||||
}
|
||||
let add_sep = fmt(self, stmt, f)?;
|
||||
if add_sep {
|
||||
f.write_str(sep)?;
|
||||
}
|
||||
if let Some(expr) = list.get(i + 1)
|
||||
&& let Some(prev) = self.source.get(..expr.posi() as usize)
|
||||
{
|
||||
if sep.is_empty() && prev.trim_end().ends_with(';') {
|
||||
f.write_str(";")?;
|
||||
}
|
||||
if count_trailing_newlines(prev) > 1 {
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
}
|
||||
if add_sep {
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})();
|
||||
self.depth -= !end.is_empty() as usize;
|
||||
|
||||
if !end.is_empty() {
|
||||
for _ in 0..self.depth {
|
||||
f.write_str("\t")?;
|
||||
}
|
||||
f.write_str(end)?;
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn fmt_paren<F: core::fmt::Write>(
|
||||
&mut self,
|
||||
expr: &Expr,
|
||||
f: &mut F,
|
||||
cond: impl FnOnce(&Expr) -> bool,
|
||||
) -> fmt::Result {
|
||||
if cond(expr) {
|
||||
f.write_str("(")?;
|
||||
self.fmt(expr, f)?;
|
||||
f.write_str(")")
|
||||
} else {
|
||||
self.fmt(expr, f)
|
||||
}
|
||||
}
|
||||
|
||||
fn fmt_fields<F: core::fmt::Write, T: Poser + Copy>(
|
||||
&mut self,
|
||||
f: &mut F,
|
||||
keyword: &str,
|
||||
trailing_comma: bool,
|
||||
fields: FieldList<T>,
|
||||
fmt: impl Fn(&mut Self, &T, &mut F) -> Result<(), fmt::Error>,
|
||||
) -> fmt::Result {
|
||||
f.write_str(keyword)?;
|
||||
f.write_str(" {")?;
|
||||
self.fmt_list_low(f, trailing_comma, "}", ",", fields, |s, field, f| {
|
||||
match field {
|
||||
CommentOr::Or(Ok(field)) => fmt(s, field, f)?,
|
||||
CommentOr::Or(Err(scope)) => {
|
||||
s.fmt_list(f, true, "", "", scope, Self::fmt)?;
|
||||
return Ok(false);
|
||||
}
|
||||
CommentOr::Comment { literal, .. } => {
|
||||
f.write_str(literal)?;
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
}
|
||||
Ok(field.or().is_some())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn fmt<F: core::fmt::Write>(&mut self, expr: &Expr, f: &mut F) -> fmt::Result {
|
||||
macro_rules! impl_parenter {
|
||||
($($name:ident => $pat:pat,)*) => {
|
||||
$(
|
||||
let $name = |e: &Expr| matches!(e, $pat);
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
impl_parenter! {
|
||||
unary => Expr::BinOp { .. },
|
||||
postfix => Expr::UnOp { .. } | Expr::BinOp { .. },
|
||||
consecutive => Expr::UnOp { .. },
|
||||
}
|
||||
|
||||
match *expr {
|
||||
Expr::Defer { value, .. } => {
|
||||
f.write_str("defer ")?;
|
||||
self.fmt(value, f)
|
||||
}
|
||||
Expr::Slf { .. } => f.write_str("Self"),
|
||||
Expr::String { literal, .. } => f.write_str(literal),
|
||||
Expr::Char { literal, .. } => f.write_str(literal),
|
||||
Expr::Comment { literal, .. } => f.write_str(literal),
|
||||
Expr::Mod { path, .. } => write!(f, "@use(\"{path}\")"),
|
||||
Expr::Embed { path, .. } => write!(f, "@embed(\"{path}\")"),
|
||||
Expr::Field { target, name: field, .. } => {
|
||||
self.fmt_paren(target, f, postfix)?;
|
||||
f.write_str(".")?;
|
||||
f.write_str(field)
|
||||
}
|
||||
Expr::Range { start, end, .. } => {
|
||||
if let Some(start) = start {
|
||||
self.fmt(start, f)?;
|
||||
}
|
||||
f.write_str("..")?;
|
||||
if let Some(end) = end {
|
||||
self.fmt(end, f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Expr::Directive { name, args, .. } => {
|
||||
f.write_str("@")?;
|
||||
f.write_str(name)?;
|
||||
f.write_str("(")?;
|
||||
self.fmt_list(f, false, ")", ",", args, Self::fmt)
|
||||
}
|
||||
Expr::Struct { fields, trailing_comma, packed, .. } => {
|
||||
if packed {
|
||||
f.write_str("packed ")?;
|
||||
}
|
||||
|
||||
self.fmt_fields(
|
||||
f,
|
||||
"struct",
|
||||
trailing_comma,
|
||||
fields,
|
||||
|s, StructField { name, ty, default_value, .. }, f| {
|
||||
f.write_str(name)?;
|
||||
f.write_str(": ")?;
|
||||
s.fmt(ty, f)?;
|
||||
if let Some(deva) = default_value {
|
||||
f.write_str(" = ")?;
|
||||
s.fmt(deva, f)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
}
|
||||
Expr::Union { fields, trailing_comma, .. } => self.fmt_fields(
|
||||
f,
|
||||
"union",
|
||||
trailing_comma,
|
||||
fields,
|
||||
|s, UnionField { name, ty, .. }, f| {
|
||||
f.write_str(name)?;
|
||||
f.write_str(": ")?;
|
||||
s.fmt(ty, f)
|
||||
},
|
||||
),
|
||||
Expr::Enum { variants, trailing_comma, .. } => self.fmt_fields(
|
||||
f,
|
||||
"enum",
|
||||
trailing_comma,
|
||||
variants,
|
||||
|_, EnumField { name, .. }, f| f.write_str(name),
|
||||
),
|
||||
Expr::Ctor { ty, fields, trailing_comma, .. } => {
|
||||
if let Some(ty) = ty {
|
||||
self.fmt_paren(ty, f, postfix)?;
|
||||
}
|
||||
f.write_str(".{")?;
|
||||
self.fmt_list(
|
||||
f,
|
||||
trailing_comma,
|
||||
"}",
|
||||
",",
|
||||
fields,
|
||||
|s: &mut Self, CtorField { name, value, .. }: &_, f| {
|
||||
f.write_str(name)?;
|
||||
if !matches!(value, &Expr::Ident { id, .. } if *name == &self.source[id.range()]) {
|
||||
f.write_str(": ")?;
|
||||
s.fmt(value, f)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
}
|
||||
Expr::List {
|
||||
pos,
|
||||
kind: term,
|
||||
ty: Some(&Expr::Slice { pos: spos, size: Some(&Expr::Number { value, .. }), item }),
|
||||
fields,
|
||||
trailing_comma,
|
||||
} if value as usize == fields.len() => self.fmt(
|
||||
&Expr::List {
|
||||
pos,
|
||||
kind: term,
|
||||
ty: Some(&Expr::Slice { pos: spos, size: None, item }),
|
||||
fields,
|
||||
trailing_comma,
|
||||
},
|
||||
f,
|
||||
),
|
||||
Expr::List { ty, kind: term, fields, trailing_comma, .. } => {
|
||||
if let Some(ty) = ty {
|
||||
self.fmt_paren(ty, f, postfix)?;
|
||||
}
|
||||
let (start, end) = match term {
|
||||
ListKind::Tuple => (".(", ")"),
|
||||
ListKind::Array => (".[", "]"),
|
||||
};
|
||||
f.write_str(start)?;
|
||||
self.fmt_list(f, trailing_comma, end, ",", fields, Self::fmt)
|
||||
}
|
||||
Expr::Slice { item, size, .. } => {
|
||||
f.write_str("[")?;
|
||||
if let Some(size) = size {
|
||||
self.fmt(size, f)?;
|
||||
}
|
||||
f.write_str("]")?;
|
||||
self.fmt_paren(item, f, unary)
|
||||
}
|
||||
Expr::Index { base, index } => {
|
||||
self.fmt_paren(base, f, postfix)?;
|
||||
f.write_str("[")?;
|
||||
self.fmt(index, f)?;
|
||||
f.write_str("]")
|
||||
}
|
||||
Expr::UnOp { op, val, .. } => {
|
||||
f.write_str(op.name())?;
|
||||
self.fmt_paren(val, f, unary)
|
||||
}
|
||||
Expr::Break { .. } => f.write_str("break"),
|
||||
Expr::Continue { .. } => f.write_str("continue"),
|
||||
Expr::If { cond, then, else_, .. } => {
|
||||
f.write_str("if ")?;
|
||||
self.fmt(cond, f)?;
|
||||
f.write_str(" ")?;
|
||||
self.fmt_paren(then, f, consecutive)?;
|
||||
if let Some(e) = else_ {
|
||||
f.write_str(" else ")?;
|
||||
self.fmt(e, f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Expr::Match { value, branches, .. } => {
|
||||
f.write_str("match ")?;
|
||||
self.fmt(value, f)?;
|
||||
f.write_str(" {")?;
|
||||
self.fmt_list(f, true, "}", ",", branches, |s, br, f| {
|
||||
s.fmt(&br.pat, f)?;
|
||||
f.write_str(" => ")?;
|
||||
s.fmt(&br.body, f)
|
||||
})
|
||||
}
|
||||
Expr::Loop { body, unrolled, .. } => {
|
||||
f.write_str(if unrolled { "$loop " } else { "loop " })?;
|
||||
self.fmt(body, f)
|
||||
}
|
||||
Expr::Closure { ret, body, args, .. } => {
|
||||
f.write_str("fn(")?;
|
||||
self.fmt_list(f, false, "", ",", args, |s, arg, f| {
|
||||
if arg.is_ct {
|
||||
f.write_str("$")?;
|
||||
}
|
||||
f.write_str(arg.name)?;
|
||||
f.write_str(": ")?;
|
||||
s.fmt(&arg.ty, f)
|
||||
})?;
|
||||
f.write_str("): ")?;
|
||||
self.fmt(ret, f)?;
|
||||
f.write_str(" ")?;
|
||||
self.fmt_paren(body, f, consecutive)?;
|
||||
Ok(())
|
||||
}
|
||||
Expr::Call { func, args, trailing_comma } => {
|
||||
self.fmt_paren(func, f, postfix)?;
|
||||
f.write_str("(")?;
|
||||
self.fmt_list(f, trailing_comma, ")", ",", args, Self::fmt)
|
||||
}
|
||||
Expr::Return { val: Some(val), .. } => {
|
||||
f.write_str("return ")?;
|
||||
self.fmt(val, f)
|
||||
}
|
||||
Expr::Return { val: None, .. } => f.write_str("return"),
|
||||
Expr::Wildcard { .. } => f.write_str("_"),
|
||||
Expr::Ident { pos, is_ct, .. } => {
|
||||
if is_ct {
|
||||
f.write_str("$")?;
|
||||
}
|
||||
f.write_str(&self.source[Lexer::restore(self.source, pos).eat().range()])
|
||||
}
|
||||
Expr::Block { stmts, .. } => {
|
||||
f.write_str("{")?;
|
||||
self.fmt_list(f, true, "}", "", stmts, Self::fmt)
|
||||
}
|
||||
Expr::Number { value, radix, .. } => {
|
||||
f.write_str(match radix {
|
||||
Radix::Decimal => "",
|
||||
Radix::Hex => "0x",
|
||||
Radix::Octal => "0o",
|
||||
Radix::Binary => "0b",
|
||||
})?;
|
||||
let mut buf = [0u8; 64];
|
||||
f.write_str(display_radix(radix, value as u64, &mut buf))
|
||||
}
|
||||
Expr::Float { pos, .. } => {
|
||||
f.write_str(&self.source[Lexer::restore(self.source, pos).eat().range()])
|
||||
}
|
||||
Expr::Bool { value, .. } => f.write_str(if value { "true" } else { "false" }),
|
||||
Expr::Idk { .. } => f.write_str("idk"),
|
||||
Expr::Die { .. } => f.write_str("die"),
|
||||
Expr::Null { .. } => f.write_str("null"),
|
||||
Expr::BinOp {
|
||||
left,
|
||||
op: TokenKind::Assign,
|
||||
right: &Expr::BinOp { left: lleft, op, right, .. },
|
||||
..
|
||||
} if left.pos() == lleft.pos() => {
|
||||
self.fmt(left, f)?;
|
||||
f.write_str(" ")?;
|
||||
f.write_str(op.name())?;
|
||||
f.write_str("= ")?;
|
||||
self.fmt(right, f)
|
||||
}
|
||||
Expr::BinOp { right, op, left, .. } => {
|
||||
let prec_miss_left = |e: &Expr| {
|
||||
matches!(
|
||||
e, Expr::BinOp { op: lop, .. } if op.precedence() > lop.precedence()
|
||||
)
|
||||
};
|
||||
let prec_miss_right = |e: &Expr| {
|
||||
matches!(
|
||||
e, Expr::BinOp { op: lop, .. }
|
||||
if (op.precedence() == lop.precedence() && !op.is_comutative())
|
||||
|| op.precedence() > lop.precedence()
|
||||
)
|
||||
};
|
||||
|
||||
self.fmt_paren(left, f, prec_miss_left)?;
|
||||
if let Some(mut prev) = self.source.get(..right.pos() as usize) {
|
||||
prev = prev.trim_end();
|
||||
let estimate_bound =
|
||||
prev.rfind(|c: char| c.is_ascii_whitespace()).map_or(prev.len(), |i| i + 1);
|
||||
let exact_bound = lexer::Lexer::new(&prev[estimate_bound..]).last().start;
|
||||
prev = &prev[..exact_bound as usize + estimate_bound];
|
||||
if count_trailing_newlines(prev) > 0 {
|
||||
f.write_str("\n")?;
|
||||
for _ in 0..self.depth + 1 {
|
||||
f.write_str("\t")?;
|
||||
}
|
||||
f.write_str(op.name())?;
|
||||
f.write_str(" ")?;
|
||||
} else {
|
||||
if op != TokenKind::Colon {
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
f.write_str(op.name())?;
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
} else {
|
||||
f.write_str(" ")?;
|
||||
f.write_str(op.name())?;
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
self.fmt_paren(right, f, prec_miss_right)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn count_trailing_newlines(source: &str) -> usize {
|
||||
source[source.trim_end().len()..].bytes().filter(|&c| c == b'\n').count()
|
||||
}
|
||||
|
||||
impl core::fmt::Display for parser::Ast {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt_file(self.exprs(), &self.file, f)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fmt_file(exprs: &[Expr], file: &str, f: &mut impl fmt::Write) -> fmt::Result {
|
||||
for (i, expr) in exprs.iter().enumerate() {
|
||||
Formatter::new(file).fmt(expr, f)?;
|
||||
if let Some(expr) = exprs.get(i + 1)
|
||||
&& let Some(prefix) = file.get(..expr.pos() as usize)
|
||||
{
|
||||
if prefix.trim_end().ends_with(';') {
|
||||
f.write_str(";")?;
|
||||
}
|
||||
|
||||
if count_trailing_newlines(prefix) > 1 {
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
}
|
||||
|
||||
if i + 1 != exprs.len() {
|
||||
writeln!(f)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod test {
|
||||
use {
|
||||
crate::parser::{self, Ctx},
|
||||
alloc::borrow::ToOwned,
|
||||
std::{fmt::Write, string::String},
|
||||
};
|
||||
|
||||
pub fn format(ident: &str, input: &str) {
|
||||
let mut minned = input.to_owned();
|
||||
let len = crate::fmt::minify(&mut minned);
|
||||
minned.truncate(len);
|
||||
|
||||
let mut ctx = Ctx::default();
|
||||
let ast = parser::Ast::new(ident, minned, &mut ctx, &mut parser::no_loader);
|
||||
log::info!("{}", ctx.errors.borrow());
|
||||
let mut output = String::new();
|
||||
write!(output, "{ast}").unwrap();
|
||||
|
||||
let input_path = format!("formatter_{ident}.expected");
|
||||
let output_path = format!("formatter_{ident}.actual");
|
||||
std::fs::write(&input_path, input).unwrap();
|
||||
std::fs::write(&output_path, output).unwrap();
|
||||
|
||||
let success = std::process::Command::new("diff")
|
||||
.arg("-u")
|
||||
.arg("--color")
|
||||
.arg(&input_path)
|
||||
.arg(&output_path)
|
||||
.status()
|
||||
.unwrap()
|
||||
.success();
|
||||
std::fs::remove_file(&input_path).unwrap();
|
||||
std::fs::remove_file(&output_path).unwrap();
|
||||
assert!(success, "test failed");
|
||||
}
|
||||
|
||||
macro_rules! test {
|
||||
($($name:ident => $input:expr;)*) => {$(
|
||||
#[test]
|
||||
fn $name() {
|
||||
format(stringify!($name), $input);
|
||||
}
|
||||
)*};
|
||||
}
|
||||
|
||||
test! {
|
||||
comments => "// comment\n// comment\n\n// comment\n\n\
|
||||
/* comment */\n/* comment */\n\n/* comment */";
|
||||
some_ordinary_code => "loft := fn(): int return loft(1, 2, 3)";
|
||||
some_arg_per_line_code => "loft := fn(): int return loft(\
|
||||
\n\t1,\n\t2,\n\t3,\n)";
|
||||
some_ordinary_struct => "loft := fn(): int return loft.{a: 1, b: 2}";
|
||||
some_ordinary_fild_per_lin_struct => "loft := fn(): int return loft.{\
|
||||
\n\ta: 1,\n\tb: 2,\n}";
|
||||
code_block => "loft := fn(): int {\n\tloft()\n\treturn 1\n}";
|
||||
}
|
||||
}
|
449
lang/src/fs.rs
Normal file
449
lang/src/fs.rs
Normal file
|
@ -0,0 +1,449 @@
|
|||
use {
|
||||
crate::{
|
||||
backend::hbvm::HbvmBackend,
|
||||
parser::{Ast, Ctx, FileKind},
|
||||
son::{self},
|
||||
ty, FnvBuildHasher,
|
||||
},
|
||||
alloc::{string::String, vec::Vec},
|
||||
core::{fmt::Write, num::NonZeroUsize, ops::Deref},
|
||||
hashbrown::hash_map,
|
||||
std::{
|
||||
borrow::ToOwned,
|
||||
collections::VecDeque,
|
||||
eprintln,
|
||||
ffi::OsStr,
|
||||
io::{self, Write as _},
|
||||
path::{Path, PathBuf},
|
||||
string::ToString,
|
||||
sync::Mutex,
|
||||
},
|
||||
};
|
||||
|
||||
type HashMap<K, V> = hashbrown::HashMap<K, V, FnvBuildHasher>;
|
||||
|
||||
pub struct Logger;
|
||||
|
||||
impl log::Log for Logger {
|
||||
fn enabled(&self, _: &log::Metadata) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn log(&self, record: &log::Record) {
|
||||
if self.enabled(record.metadata()) {
|
||||
eprintln!("{}", record.args())
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&self) {}
|
||||
}
|
||||
|
||||
pub const ABLEOS_PATH_RESOLVER: PathResolver =
|
||||
&|mut path: &str, mut from: &str, tmp: &mut PathBuf| {
|
||||
tmp.clear();
|
||||
|
||||
path = match path {
|
||||
"stn" => {
|
||||
from = "";
|
||||
"./sysdata/libraries/stn/src/lib.hb"
|
||||
}
|
||||
_ => path,
|
||||
};
|
||||
|
||||
match path.split_once(':') {
|
||||
Some(("lib", p)) => tmp.extend(["./sysdata/libraries", p, "src/lib.hb"]),
|
||||
Some(("stn", p)) => {
|
||||
tmp.extend(["./sysdata/libraries/stn/src", &(p.to_owned() + ".hb")])
|
||||
}
|
||||
Some(("sysdata", p)) => tmp.extend(["./sysdata", p]),
|
||||
None => match Path::new(from).parent() {
|
||||
Some(parent) => tmp.extend([parent, Path::new(path)]),
|
||||
None => tmp.push(path),
|
||||
},
|
||||
_ => panic!("path: '{path}' is invalid: unexpected ':'"),
|
||||
};
|
||||
tmp.canonicalize().map_err(|source| CantLoadFile { path: std::mem::take(tmp), source })
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Options<'a> {
|
||||
pub fmt: bool,
|
||||
pub fmt_stdout: bool,
|
||||
pub dump_asm: bool,
|
||||
pub extra_threads: usize,
|
||||
pub resolver: Option<PathResolver<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> Options<'a> {
|
||||
pub fn from_args(
|
||||
args: &[&str],
|
||||
out: &mut Vec<u8>,
|
||||
resolvers: &'a [(&str, PathResolver)],
|
||||
) -> std::io::Result<Self> {
|
||||
if args.contains(&"--help") || args.contains(&"-h") {
|
||||
writeln!(out, "Usage: hbc [OPTIONS...] <FILE>")?;
|
||||
writeln!(out, include_str!("../command-help.txt"))?;
|
||||
return Err(std::io::ErrorKind::Other.into());
|
||||
}
|
||||
|
||||
Ok(Options {
|
||||
fmt: args.contains(&"--fmt"),
|
||||
fmt_stdout: args.contains(&"--fmt-stdout"),
|
||||
dump_asm: args.contains(&"--dump-asm"),
|
||||
extra_threads: args
|
||||
.iter()
|
||||
.position(|&a| a == "--threads")
|
||||
.map(|i| {
|
||||
args[i + 1].parse::<NonZeroUsize>().map_err(|e| {
|
||||
writeln!(out, "--threads expects non zero integer: {e}")
|
||||
.err()
|
||||
.unwrap_or(std::io::ErrorKind::Other.into())
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.map_or(1, NonZeroUsize::get)
|
||||
- 1,
|
||||
resolver: args
|
||||
.iter()
|
||||
.position(|&a| a == "--path-resolver")
|
||||
.map(|i| {
|
||||
resolvers.iter().find(|&&(n, _)| args[i + 1] == n).map(|&(_, r)| r).ok_or_else(
|
||||
|| {
|
||||
writeln!(
|
||||
out,
|
||||
"--path-resolver can only be one of: {}",
|
||||
resolvers
|
||||
.iter()
|
||||
.map(|&(n, _)| n)
|
||||
.intersperse(", ")
|
||||
.collect::<String>()
|
||||
)
|
||||
.err()
|
||||
.unwrap_or(std::io::ErrorKind::Other.into())
|
||||
},
|
||||
)
|
||||
})
|
||||
.transpose()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_compiler(
|
||||
root_file: &str,
|
||||
options: Options,
|
||||
out: &mut Vec<u8>,
|
||||
warnings: &mut String,
|
||||
) -> std::io::Result<()> {
|
||||
let parsed = parse_from_fs(
|
||||
options.extra_threads,
|
||||
root_file,
|
||||
options.resolver.unwrap_or(&default_resolve),
|
||||
)?;
|
||||
|
||||
if (options.fmt || options.fmt_stdout) && !parsed.errors.is_empty() {
|
||||
*out = parsed.errors.into_bytes();
|
||||
return Err(std::io::Error::other("fmt fialed (errors are in out)"));
|
||||
}
|
||||
|
||||
if options.fmt {
|
||||
let mut output = String::new();
|
||||
for ast in parsed.ast {
|
||||
write!(output, "{ast}").unwrap();
|
||||
if ast.file.deref().trim() != output.as_str().trim() {
|
||||
std::fs::write(&*ast.path, &output)?;
|
||||
}
|
||||
output.clear();
|
||||
}
|
||||
} else if options.fmt_stdout {
|
||||
write!(out, "{}", &parsed.ast[0])?;
|
||||
} else {
|
||||
let mut backend = HbvmBackend::default();
|
||||
|
||||
let mut ctx = crate::son::CodegenCtx::default();
|
||||
*ctx.parser.errors.get_mut() = parsed.errors;
|
||||
let mut codegen = son::Codegen::new(&mut backend, &parsed.ast, &mut ctx);
|
||||
codegen.push_embeds(parsed.embeds);
|
||||
codegen.generate(ty::Module::MAIN);
|
||||
|
||||
*warnings = core::mem::take(&mut *codegen.warnings.borrow_mut());
|
||||
|
||||
if !codegen.errors.borrow().is_empty() {
|
||||
drop(codegen);
|
||||
*out = ctx.parser.errors.into_inner().into_bytes();
|
||||
return Err(std::io::Error::other("compilation faoled (errors are in out)"));
|
||||
}
|
||||
|
||||
codegen.assemble(out);
|
||||
|
||||
if options.dump_asm {
|
||||
let mut disasm = String::new();
|
||||
let err = codegen.disasm(&mut disasm, out).map_err(|e| io::Error::other(e.to_string()));
|
||||
*out = disasm.into_bytes();
|
||||
err?
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct TaskQueue<T> {
|
||||
inner: Mutex<TaskQueueInner<T>>,
|
||||
}
|
||||
|
||||
impl<T> TaskQueue<T> {
|
||||
fn new(max_waiters: usize) -> Self {
|
||||
Self { inner: Mutex::new(TaskQueueInner::new(max_waiters)) }
|
||||
}
|
||||
|
||||
pub fn push(&self, message: T) {
|
||||
self.extend([message]);
|
||||
}
|
||||
|
||||
pub fn extend(&self, messages: impl IntoIterator<Item = T>) {
|
||||
self.inner.lock().unwrap().push(messages);
|
||||
}
|
||||
|
||||
pub fn pop(&self) -> Option<T> {
|
||||
TaskQueueInner::pop(&self.inner)
|
||||
}
|
||||
}
|
||||
|
||||
enum TaskSlot<T> {
|
||||
Waiting,
|
||||
Delivered(T),
|
||||
Closed,
|
||||
}
|
||||
|
||||
struct TaskQueueInner<T> {
|
||||
max_waiters: usize,
|
||||
messages: VecDeque<T>,
|
||||
parked: VecDeque<(*mut TaskSlot<T>, std::thread::Thread)>,
|
||||
}
|
||||
|
||||
unsafe impl<T: Send> Send for TaskQueueInner<T> {}
|
||||
unsafe impl<T: Send + Sync> Sync for TaskQueueInner<T> {}
|
||||
|
||||
impl<T> TaskQueueInner<T> {
|
||||
fn new(max_waiters: usize) -> Self {
|
||||
Self { max_waiters, messages: Default::default(), parked: Default::default() }
|
||||
}
|
||||
|
||||
fn push(&mut self, messages: impl IntoIterator<Item = T>) {
|
||||
for msg in messages {
|
||||
if let Some((dest, thread)) = self.parked.pop_front() {
|
||||
unsafe { *dest = TaskSlot::Delivered(msg) };
|
||||
thread.unpark();
|
||||
} else {
|
||||
self.messages.push_back(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pop(s: &Mutex<Self>) -> Option<T> {
|
||||
let mut res = TaskSlot::Waiting;
|
||||
{
|
||||
let mut s = s.lock().unwrap();
|
||||
if let Some(msg) = s.messages.pop_front() {
|
||||
return Some(msg);
|
||||
}
|
||||
|
||||
if s.max_waiters == s.parked.len() + 1 {
|
||||
for (dest, thread) in s.parked.drain(..) {
|
||||
unsafe { *dest = TaskSlot::Closed };
|
||||
thread.unpark();
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
s.parked.push_back((&mut res, std::thread::current()));
|
||||
}
|
||||
|
||||
loop {
|
||||
std::thread::park();
|
||||
|
||||
let _s = s.lock().unwrap();
|
||||
match core::mem::replace(&mut res, TaskSlot::Waiting) {
|
||||
TaskSlot::Delivered(msg) => return Some(msg),
|
||||
TaskSlot::Closed => return None,
|
||||
TaskSlot::Waiting => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Loaded {
|
||||
ast: Vec<Ast>,
|
||||
embeds: Vec<Vec<u8>>,
|
||||
errors: String,
|
||||
}
|
||||
|
||||
fn default_resolve(path: &str, from: &str, tmp: &mut PathBuf) -> Result<PathBuf, CantLoadFile> {
|
||||
tmp.clear();
|
||||
match Path::new(from).parent() {
|
||||
Some(parent) => tmp.extend([parent, Path::new(path)]),
|
||||
None => tmp.push(path),
|
||||
};
|
||||
|
||||
tmp.canonicalize().map_err(|source| CantLoadFile { path: std::mem::take(tmp), source })
|
||||
}
|
||||
|
||||
/// fn(path, from, tmp)
|
||||
pub type PathResolver<'a> =
|
||||
&'a (dyn Fn(&str, &str, &mut PathBuf) -> Result<PathBuf, CantLoadFile> + Send + Sync);
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CantLoadFile {
|
||||
pub path: PathBuf,
|
||||
pub source: io::Error,
|
||||
}
|
||||
|
||||
impl core::fmt::Display for CantLoadFile {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
|
||||
write!(f, "can't load file: {}", display_rel_path(&self.path),)
|
||||
}
|
||||
}
|
||||
|
||||
impl core::error::Error for CantLoadFile {
|
||||
fn source(&self) -> Option<&(dyn core::error::Error + 'static)> {
|
||||
Some(&self.source)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CantLoadFile> for io::Error {
|
||||
fn from(e: CantLoadFile) -> Self {
|
||||
io::Error::new(io::ErrorKind::InvalidData, e)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_from_fs(
|
||||
extra_threads: usize,
|
||||
root: &str,
|
||||
resolve: PathResolver,
|
||||
) -> io::Result<Loaded> {
|
||||
type Task = (usize, PathBuf);
|
||||
|
||||
let seen_modules = Mutex::new(HashMap::<PathBuf, usize>::default());
|
||||
let seen_embeds = Mutex::new(HashMap::<PathBuf, usize>::default());
|
||||
let tasks = TaskQueue::<Task>::new(extra_threads + 1);
|
||||
let ast = Mutex::new(Vec::<io::Result<Ast>>::new());
|
||||
let embeds = Mutex::new(Vec::<Vec<u8>>::new());
|
||||
|
||||
let loader = |path: &str, from: &str, kind: FileKind, tmp: &mut _| {
|
||||
let mut physiscal_path = resolve(path, from, tmp)?;
|
||||
|
||||
match kind {
|
||||
FileKind::Module => {
|
||||
let id = {
|
||||
let mut seen = seen_modules.lock().unwrap();
|
||||
let len = seen.len();
|
||||
match seen.entry(physiscal_path) {
|
||||
hash_map::Entry::Occupied(entry) => {
|
||||
return Ok(*entry.get());
|
||||
}
|
||||
hash_map::Entry::Vacant(entry) => {
|
||||
physiscal_path = entry.insert_entry(len as _).key().clone();
|
||||
len
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if !physiscal_path.exists() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
format!("can't find file: {}", display_rel_path(&physiscal_path)),
|
||||
));
|
||||
}
|
||||
|
||||
tasks.push((id, physiscal_path));
|
||||
Ok(id)
|
||||
}
|
||||
FileKind::Embed => {
|
||||
let id = {
|
||||
let mut seen = seen_embeds.lock().unwrap();
|
||||
let len = seen.len();
|
||||
match seen.entry(physiscal_path) {
|
||||
hash_map::Entry::Occupied(entry) => {
|
||||
return Ok(*entry.get());
|
||||
}
|
||||
hash_map::Entry::Vacant(entry) => {
|
||||
physiscal_path = entry.insert_entry(len as _).key().clone();
|
||||
len
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let content = std::fs::read(&physiscal_path).map_err(|e| {
|
||||
io::Error::new(
|
||||
e.kind(),
|
||||
format!(
|
||||
"can't load embed file: {}: {e}",
|
||||
display_rel_path(&physiscal_path)
|
||||
),
|
||||
)
|
||||
})?;
|
||||
let mut embeds = embeds.lock().unwrap();
|
||||
if id >= embeds.len() {
|
||||
embeds.resize(id + 1, Default::default());
|
||||
}
|
||||
embeds[id] = content;
|
||||
Ok(id)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let execute_task = |ctx: &mut _, (_, path): Task, tmp: &mut _| {
|
||||
let path = path.to_str().ok_or_else(|| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("path contains invalid characters: {}", display_rel_path(&path)),
|
||||
)
|
||||
})?;
|
||||
Ok(Ast::new(path, std::fs::read_to_string(path)?, ctx, &mut |path, from, kind| {
|
||||
loader(path, from, kind, tmp).map_err(|e| e.to_string())
|
||||
}))
|
||||
};
|
||||
|
||||
let thread = || {
|
||||
let mut ctx = Ctx::default();
|
||||
let mut tmp = PathBuf::new();
|
||||
while let Some(task @ (indx, ..)) = tasks.pop() {
|
||||
let res = execute_task(&mut ctx, task, &mut tmp);
|
||||
let mut ast = ast.lock().unwrap();
|
||||
let len = ast.len().max(indx + 1);
|
||||
ast.resize_with(len, || Err(io::ErrorKind::InvalidData.into()));
|
||||
ast[indx] = res;
|
||||
}
|
||||
ctx.errors.into_inner()
|
||||
};
|
||||
|
||||
let path = Path::new(root).canonicalize().map_err(|e| {
|
||||
io::Error::new(e.kind(), format!("can't canonicalize root file path ({root})"))
|
||||
})?;
|
||||
seen_modules.lock().unwrap().insert(path.clone(), 0);
|
||||
tasks.push((0, path));
|
||||
|
||||
let errors = if extra_threads == 0 {
|
||||
thread()
|
||||
} else {
|
||||
std::thread::scope(|s| {
|
||||
(0..extra_threads + 1)
|
||||
.map(|_| s.spawn(thread))
|
||||
.collect::<Vec<_>>()
|
||||
.into_iter()
|
||||
.map(|t| t.join().unwrap())
|
||||
.collect::<String>()
|
||||
})
|
||||
};
|
||||
|
||||
Ok(Loaded {
|
||||
ast: ast.into_inner().unwrap().into_iter().collect::<io::Result<Vec<_>>>()?,
|
||||
embeds: embeds.into_inner().unwrap(),
|
||||
errors,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn display_rel_path(path: &(impl AsRef<OsStr> + ?Sized)) -> std::path::Display {
|
||||
static CWD: std::sync::LazyLock<PathBuf> =
|
||||
std::sync::LazyLock::new(|| std::env::current_dir().unwrap_or_default());
|
||||
std::path::Path::new(path).strip_prefix(&*CWD).unwrap_or(std::path::Path::new(path)).display()
|
||||
}
|
142
lang/src/fuzz.rs
Normal file
142
lang/src/fuzz.rs
Normal file
|
@ -0,0 +1,142 @@
|
|||
use {
|
||||
crate::{
|
||||
backend::hbvm::HbvmBackend,
|
||||
lexer::TokenKind,
|
||||
parser,
|
||||
son::{Codegen, CodegenCtx},
|
||||
ty::Module,
|
||||
},
|
||||
alloc::string::String,
|
||||
core::{fmt::Write, hash::BuildHasher, ops::Range},
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
struct Rand(pub u64);
|
||||
|
||||
impl Rand {
|
||||
pub fn next(&mut self) -> u64 {
|
||||
self.0 = crate::FnvBuildHasher::default().hash_one(self.0);
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn range(&mut self, min: u64, max: u64) -> u64 {
|
||||
self.next() % (max - min) + min
|
||||
}
|
||||
|
||||
fn bool(&mut self) -> bool {
|
||||
self.next() % 2 == 0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FuncGen {
|
||||
rand: Rand,
|
||||
buf: String,
|
||||
vars: u64,
|
||||
}
|
||||
|
||||
impl FuncGen {
|
||||
fn gen(&mut self, seed: u64) -> &str {
|
||||
self.rand = Rand(seed);
|
||||
self.buf.clear();
|
||||
self.buf.push_str("main := fn(): void ");
|
||||
self.block().unwrap();
|
||||
&self.buf
|
||||
}
|
||||
|
||||
fn block(&mut self) -> core::fmt::Result {
|
||||
let prev_vars = self.vars;
|
||||
self.buf.push('{');
|
||||
for _ in 0..self.rand.range(1, 10) {
|
||||
self.stmt()?;
|
||||
}
|
||||
self.buf.push('}');
|
||||
self.vars = prev_vars;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stmt(&mut self) -> core::fmt::Result {
|
||||
match self.rand.range(0, 100) {
|
||||
0..4 => _ = self.block(),
|
||||
4..10 => {
|
||||
write!(self.buf, "var{} := ", self.vars)?;
|
||||
self.expr()?;
|
||||
self.vars += 1;
|
||||
}
|
||||
|
||||
10..20 if self.vars != 0 => {
|
||||
write!(self.buf, "var{} = ", self.rand.range(0, self.vars))?;
|
||||
self.expr()?;
|
||||
}
|
||||
20..23 => {
|
||||
self.buf.push_str("if ");
|
||||
self.expr()?;
|
||||
self.block()?;
|
||||
if self.rand.bool() {
|
||||
self.buf.push_str(" else ");
|
||||
self.block()?;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.buf.push_str("return ");
|
||||
self.expr()?;
|
||||
}
|
||||
}
|
||||
|
||||
self.buf.push(';');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn expr(&mut self) -> core::fmt::Result {
|
||||
match self.rand.range(0, 100) {
|
||||
0..80 => {
|
||||
write!(self.buf, "{}", self.rand.next())
|
||||
}
|
||||
80..90 if self.vars != 0 => {
|
||||
write!(self.buf, "var{}", self.rand.range(0, self.vars))
|
||||
}
|
||||
80..100 => {
|
||||
self.expr()?;
|
||||
let ops = [
|
||||
TokenKind::Add,
|
||||
TokenKind::Sub,
|
||||
TokenKind::Mul,
|
||||
TokenKind::Div,
|
||||
TokenKind::Shl,
|
||||
TokenKind::Eq,
|
||||
TokenKind::Ne,
|
||||
TokenKind::Lt,
|
||||
TokenKind::Gt,
|
||||
TokenKind::Le,
|
||||
TokenKind::Ge,
|
||||
TokenKind::Band,
|
||||
TokenKind::Bor,
|
||||
TokenKind::Xor,
|
||||
TokenKind::Mod,
|
||||
TokenKind::Shr,
|
||||
];
|
||||
let op = ops[self.rand.range(0, ops.len() as u64) as usize];
|
||||
write!(self.buf, " {op} ")?;
|
||||
self.expr()
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fuzz(seed_range: Range<u64>) {
|
||||
let mut gen = FuncGen::default();
|
||||
let mut ctx = CodegenCtx::default();
|
||||
for i in seed_range {
|
||||
ctx.clear();
|
||||
let src = gen.gen(i);
|
||||
let parsed = parser::Ast::new("fuzz", src, &mut ctx.parser, &mut parser::no_loader);
|
||||
|
||||
assert!(ctx.parser.errors.get_mut().is_empty());
|
||||
|
||||
let mut backend = HbvmBackend::default();
|
||||
let mut cdg = Codegen::new(&mut backend, core::slice::from_ref(&parsed), &mut ctx);
|
||||
cdg.generate(Module::MAIN);
|
||||
}
|
||||
}
|
3
lang/src/fuzz_main.rs
Normal file
3
lang/src/fuzz_main.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
fn main() {
|
||||
hblang::fuzz::fuzz(0..1000000);
|
||||
}
|
625
lang/src/lexer.rs
Normal file
625
lang/src/lexer.rs
Normal file
|
@ -0,0 +1,625 @@
|
|||
const fn ascii_mask(chars: &[u8]) -> u128 {
|
||||
let mut eq = 0;
|
||||
let mut i = 0;
|
||||
while i < chars.len() {
|
||||
let b = chars[i];
|
||||
eq |= 1 << b;
|
||||
i += 1;
|
||||
}
|
||||
eq
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub struct Token {
|
||||
pub kind: TokenKind,
|
||||
pub start: u32,
|
||||
pub end: u32,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn range(&self) -> core::ops::Range<usize> {
|
||||
self.start as usize..self.end as usize
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! gen_token_kind {
|
||||
($(
|
||||
#[$atts:meta])*
|
||||
$vis:vis enum $name:ident {
|
||||
#[patterns] $(
|
||||
$pattern:ident,
|
||||
)*
|
||||
#[keywords] $(
|
||||
$keyword:ident = $keyword_lit:literal,
|
||||
)*
|
||||
#[const_keywords] $(
|
||||
$const_keyword:ident = $const_keyword_lit:literal,
|
||||
)*
|
||||
#[punkt] $(
|
||||
$punkt:ident = $punkt_lit:literal,
|
||||
)*
|
||||
#[ops] $(
|
||||
#[$prec:ident] $(
|
||||
$op:ident = $op_lit:literal $(=> $assign:ident)?,
|
||||
)*
|
||||
)*
|
||||
}
|
||||
) => {
|
||||
impl core::fmt::Display for $name {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
|
||||
f.write_str(self.name())
|
||||
}
|
||||
}
|
||||
|
||||
impl $name {
|
||||
pub const OPS: &[Self] = &[$($(Self::$op),*),*];
|
||||
|
||||
pub fn name(&self) -> &str {
|
||||
let sf = unsafe { &*(self as *const _ as *const u8) } ;
|
||||
match *self {
|
||||
$( Self::$pattern => concat!('<', stringify!($pattern), '>'), )*
|
||||
$( Self::$keyword => stringify!($keyword_lit), )*
|
||||
$( Self::$const_keyword => concat!('$', $const_keyword_lit), )*
|
||||
$( Self::$punkt => stringify!($punkt_lit), )*
|
||||
$($( Self::$op => $op_lit,
|
||||
$(Self::$assign => concat!($op_lit, "="),)?)*)*
|
||||
_ => unsafe { core::str::from_utf8_unchecked(core::slice::from_ref(&sf)) },
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn precedence(&self) -> Option<u8> {
|
||||
Some(match self {
|
||||
$($(Self::$op => ${ignore($prec)} ${index(1)},
|
||||
$(Self::$assign => 0,)?)*)*
|
||||
_ => return None,
|
||||
} + 1)
|
||||
}
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
fn from_ident(ident: &[u8]) -> Self {
|
||||
$(const $keyword: &[u8] = $keyword_lit.as_bytes();)*
|
||||
match ident {
|
||||
$($keyword => Self::$keyword,)*
|
||||
_ => Self::Ident,
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
fn from_ct_ident(ident: &[u8]) -> Self {
|
||||
$(const $const_keyword: &[u8] = $const_keyword_lit.as_bytes();)*
|
||||
match ident {
|
||||
$($const_keyword => Self::$const_keyword,)*
|
||||
_ => Self::CtIdent,
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Hash, PartialOrd, Ord)]
|
||||
#[repr(u8)]
|
||||
pub enum TokenKind {
|
||||
Not = b'!',
|
||||
DQuote = b'"',
|
||||
Pound = b'#',
|
||||
CtIdent = b'$',
|
||||
Mod = b'%',
|
||||
Band = b'&',
|
||||
Quote = b'\'',
|
||||
LParen = b'(',
|
||||
RParen = b')',
|
||||
Mul = b'*',
|
||||
Add = b'+',
|
||||
Comma = b',',
|
||||
Sub = b'-',
|
||||
Dot = b'.',
|
||||
Div = b'/',
|
||||
// Unused = 2-6
|
||||
Shl = b'<' - 5,
|
||||
// Unused = 8
|
||||
Shr = b'>' - 5,
|
||||
Colon = b':',
|
||||
Semi = b';',
|
||||
Lt = b'<',
|
||||
Assign = b'=',
|
||||
Gt = b'>',
|
||||
Que = b'?',
|
||||
Directive = b'@',
|
||||
|
||||
Comment,
|
||||
|
||||
Ident,
|
||||
Number,
|
||||
Float,
|
||||
Eof,
|
||||
|
||||
Ct,
|
||||
|
||||
Ctor,
|
||||
Tupl,
|
||||
Arr,
|
||||
TArrow,
|
||||
Range,
|
||||
|
||||
Or,
|
||||
And,
|
||||
|
||||
// Unused = R-Z
|
||||
LBrack = b'[',
|
||||
BSlash = b'\\',
|
||||
RBrack = b']',
|
||||
Xor = b'^',
|
||||
Under = b'_',
|
||||
Tick = b'`',
|
||||
|
||||
Slf,
|
||||
Return,
|
||||
If,
|
||||
Match,
|
||||
Else,
|
||||
Loop,
|
||||
Break,
|
||||
Continue,
|
||||
Fn,
|
||||
Struct,
|
||||
Packed,
|
||||
Enum,
|
||||
Union,
|
||||
True,
|
||||
False,
|
||||
Null,
|
||||
Idk,
|
||||
Die,
|
||||
Defer,
|
||||
|
||||
CtLoop,
|
||||
|
||||
// Unused = a-z
|
||||
LBrace = b'{',
|
||||
Bor = b'|',
|
||||
RBrace = b'}',
|
||||
Tilde = b'~',
|
||||
|
||||
Decl = b':' + 128,
|
||||
Eq = b'=' + 128,
|
||||
Ne = b'!' + 128,
|
||||
Le = b'<' + 128,
|
||||
Ge = b'>' + 128,
|
||||
|
||||
BorAss = b'|' + 128,
|
||||
AddAss = b'+' + 128,
|
||||
SubAss = b'-' + 128,
|
||||
MulAss = b'*' + 128,
|
||||
DivAss = b'/' + 128,
|
||||
ModAss = b'%' + 128,
|
||||
XorAss = b'^' + 128,
|
||||
BandAss = b'&' + 128,
|
||||
ShrAss = b'>' - 5 + 128,
|
||||
ShlAss = b'<' - 5 + 128,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for TokenKind {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
core::fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenKind {
|
||||
pub fn ass_op(self) -> Option<Self> {
|
||||
let id = (self as u8).saturating_sub(128);
|
||||
if ascii_mask(b"|+-*/%^&79") & (1u128 << id) == 0 {
|
||||
return None;
|
||||
}
|
||||
Some(unsafe { core::mem::transmute::<u8, Self>(id) })
|
||||
}
|
||||
|
||||
pub fn is_comutative(self) -> bool {
|
||||
use TokenKind as S;
|
||||
matches!(self, S::Eq | S::Ne | S::Bor | S::Xor | S::Band | S::Add | S::Mul)
|
||||
}
|
||||
|
||||
pub fn is_compatison(self) -> bool {
|
||||
matches!(self, Self::Lt | Self::Gt | Self::Ge | Self::Le | Self::Ne | Self::Eq)
|
||||
}
|
||||
|
||||
pub fn is_supported_float_op(self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::Add
|
||||
| Self::Sub
|
||||
| Self::Mul
|
||||
| Self::Div
|
||||
| Self::Eq
|
||||
| Self::Ne
|
||||
| Self::Le
|
||||
| Self::Ge
|
||||
| Self::Lt
|
||||
| Self::Gt
|
||||
)
|
||||
}
|
||||
|
||||
pub fn apply_binop(self, a: i64, b: i64, float: bool) -> i64 {
|
||||
if float {
|
||||
debug_assert!(self.is_supported_float_op());
|
||||
let [a, b] = [f64::from_bits(a as _), f64::from_bits(b as _)];
|
||||
let res = match self {
|
||||
Self::Add => a + b,
|
||||
Self::Sub => a - b,
|
||||
Self::Mul => a * b,
|
||||
Self::Div => a / b,
|
||||
Self::Eq => return (a == b) as i64,
|
||||
Self::Ne => return (a != b) as i64,
|
||||
Self::Lt => return (a < b) as i64,
|
||||
Self::Gt => return (a > b) as i64,
|
||||
Self::Le => return (a >= b) as i64,
|
||||
Self::Ge => return (a <= b) as i64,
|
||||
_ => todo!("floating point op: {self}"),
|
||||
};
|
||||
|
||||
return res.to_bits() as _;
|
||||
}
|
||||
|
||||
match self {
|
||||
Self::Add => a.wrapping_add(b),
|
||||
Self::Sub => a.wrapping_sub(b),
|
||||
Self::Mul => a.wrapping_mul(b),
|
||||
Self::Div if b == 0 => 0,
|
||||
Self::Div => a.wrapping_div(b),
|
||||
Self::Shl => a.wrapping_shl(b as _),
|
||||
Self::Eq => (a == b) as i64,
|
||||
Self::Ne => (a != b) as i64,
|
||||
Self::Lt => (a < b) as i64,
|
||||
Self::Gt => (a > b) as i64,
|
||||
Self::Le => (a >= b) as i64,
|
||||
Self::Ge => (a <= b) as i64,
|
||||
Self::Band => a & b,
|
||||
Self::Bor => a | b,
|
||||
Self::Xor => a ^ b,
|
||||
Self::Mod if b == 0 => 0,
|
||||
Self::Mod => a.wrapping_rem(b),
|
||||
Self::Shr => a.wrapping_shr(b as _),
|
||||
s => todo!("{s}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_homogenous(&self) -> bool {
|
||||
self.precedence() != Self::Eq.precedence()
|
||||
&& self.precedence() != Self::Gt.precedence()
|
||||
&& self.precedence() != Self::Eof.precedence()
|
||||
}
|
||||
|
||||
pub fn apply_unop(&self, value: i64, float: bool) -> i64 {
|
||||
match self {
|
||||
Self::Sub if float => (-f64::from_bits(value as _)).to_bits() as _,
|
||||
Self::Sub => value.wrapping_neg(),
|
||||
Self::Not => (value == 0) as _,
|
||||
Self::Float if float => value,
|
||||
Self::Float => (value as f64).to_bits() as _,
|
||||
Self::Number if float => f64::from_bits(value as _) as _,
|
||||
Self::Number => value,
|
||||
s => todo!("{s}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn closing(&self) -> Option<TokenKind> {
|
||||
Some(match self {
|
||||
Self::Ctor => Self::RBrace,
|
||||
Self::Tupl => Self::RParen,
|
||||
Self::LParen => Self::RParen,
|
||||
Self::LBrack => Self::RBrack,
|
||||
Self::LBrace => Self::RBrace,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
gen_token_kind! {
|
||||
pub enum TokenKind {
|
||||
#[patterns]
|
||||
CtIdent,
|
||||
Ident,
|
||||
Number,
|
||||
Float,
|
||||
Eof,
|
||||
Directive,
|
||||
#[keywords]
|
||||
Slf = "Self",
|
||||
Return = "return",
|
||||
If = "if",
|
||||
Match = "match",
|
||||
Else = "else",
|
||||
Loop = "loop",
|
||||
Break = "break",
|
||||
Continue = "continue",
|
||||
Fn = "fn",
|
||||
Struct = "struct",
|
||||
Packed = "packed",
|
||||
Enum = "enum",
|
||||
Union = "union",
|
||||
True = "true",
|
||||
False = "false",
|
||||
Null = "null",
|
||||
Idk = "idk",
|
||||
Die = "die",
|
||||
Defer = "defer",
|
||||
Under = "_",
|
||||
#[const_keywords]
|
||||
CtLoop = "loop",
|
||||
#[punkt]
|
||||
Ctor = ".{",
|
||||
Tupl = ".(",
|
||||
Arr = ".[",
|
||||
TArrow = "=>",
|
||||
Range = "..",
|
||||
// #define OP: each `#[prec]` delimeters a level of precedence from lowest to highest
|
||||
#[ops]
|
||||
#[prec]
|
||||
// this also includess all `<op>=` tokens
|
||||
Decl = ":=",
|
||||
Assign = "=",
|
||||
#[prec]
|
||||
Or = "||",
|
||||
#[prec]
|
||||
And = "&&",
|
||||
#[prec]
|
||||
Bor = "|" => BorAss,
|
||||
#[prec]
|
||||
Xor = "^" => XorAss,
|
||||
#[prec]
|
||||
Band = "&" => BandAss,
|
||||
#[prec]
|
||||
Eq = "==",
|
||||
Ne = "!=",
|
||||
#[prec]
|
||||
Le = "<=",
|
||||
Ge = ">=",
|
||||
Lt = "<",
|
||||
Gt = ">",
|
||||
#[prec]
|
||||
Shl = "<<" => ShlAss,
|
||||
Shr = ">>" => ShrAss,
|
||||
#[prec]
|
||||
Add = "+" => AddAss,
|
||||
Sub = "-" => SubAss,
|
||||
#[prec]
|
||||
Mul = "*" => MulAss,
|
||||
Div = "/" => DivAss,
|
||||
Mod = "%" => ModAss,
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Lexer<'a> {
|
||||
pos: u32,
|
||||
source: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> Lexer<'a> {
|
||||
pub fn new(input: &'a str) -> Self {
|
||||
Self::restore(input, 0)
|
||||
}
|
||||
|
||||
pub fn uses(input: &'a str) -> impl Iterator<Item = &'a str> {
|
||||
let mut s = Self::new(input);
|
||||
core::iter::from_fn(move || loop {
|
||||
let t = s.eat();
|
||||
if t.kind == TokenKind::Eof {
|
||||
return None;
|
||||
}
|
||||
if t.kind == TokenKind::Directive
|
||||
&& s.slice(t.range()) == "use"
|
||||
&& s.eat().kind == TokenKind::LParen
|
||||
{
|
||||
let t = s.eat();
|
||||
if t.kind == TokenKind::DQuote {
|
||||
return Some(&s.slice(t.range())[1..t.range().len() - 1]);
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn restore(input: &'a str, pos: u32) -> Self {
|
||||
Self { pos, source: input.as_bytes() }
|
||||
}
|
||||
|
||||
pub fn source(&self) -> &'a str {
|
||||
unsafe { core::str::from_utf8_unchecked(self.source) }
|
||||
}
|
||||
|
||||
pub fn slice(&self, tok: core::ops::Range<usize>) -> &'a str {
|
||||
unsafe { core::str::from_utf8_unchecked(&self.source[tok]) }
|
||||
}
|
||||
|
||||
pub fn taste(&self) -> Token {
|
||||
Lexer { pos: self.pos, source: self.source }.eat()
|
||||
}
|
||||
|
||||
fn peek_n<const N: usize>(&self) -> Option<&[u8; N]> {
|
||||
if core::intrinsics::unlikely(self.pos as usize + N > self.source.len()) {
|
||||
None
|
||||
} else {
|
||||
Some(unsafe {
|
||||
self.source
|
||||
.get_unchecked(self.pos as usize..self.pos as usize + N)
|
||||
.first_chunk()
|
||||
.unwrap_unchecked()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn peek(&self) -> Option<u8> {
|
||||
if core::intrinsics::unlikely(self.pos >= self.source.len() as u32) {
|
||||
None
|
||||
} else {
|
||||
Some(unsafe { *self.source.get_unchecked(self.pos as usize) })
|
||||
}
|
||||
}
|
||||
|
||||
fn advance(&mut self) -> Option<u8> {
|
||||
let c = self.peek()?;
|
||||
self.pos += 1;
|
||||
Some(c)
|
||||
}
|
||||
|
||||
pub fn last(&mut self) -> Token {
|
||||
let mut token = self.eat();
|
||||
loop {
|
||||
let next = self.eat();
|
||||
if next.kind == TokenKind::Eof {
|
||||
break;
|
||||
}
|
||||
token = next;
|
||||
}
|
||||
token
|
||||
}
|
||||
|
||||
pub fn eat(&mut self) -> Token {
|
||||
use TokenKind as T;
|
||||
loop {
|
||||
let mut start = self.pos;
|
||||
|
||||
let Some(c) = self.advance() else {
|
||||
return Token { kind: T::Eof, start, end: self.pos };
|
||||
};
|
||||
|
||||
let advance_ident = |s: &mut Self| {
|
||||
while let Some(b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_' | 127..) = s.peek() {
|
||||
s.advance();
|
||||
}
|
||||
};
|
||||
|
||||
let identity = |s: u8| unsafe { core::mem::transmute::<u8, T>(s) };
|
||||
|
||||
let kind = match c {
|
||||
..=b' ' => continue,
|
||||
b'0' if self.advance_if(b'x') => {
|
||||
while let Some(b'0'..=b'9' | b'A'..=b'F' | b'a'..=b'f') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Number
|
||||
}
|
||||
b'0' if self.advance_if(b'b') => {
|
||||
while let Some(b'0' | b'1') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Number
|
||||
}
|
||||
b'0' if self.advance_if(b'o') => {
|
||||
while let Some(b'0'..=b'7') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Number
|
||||
}
|
||||
b'0'..=b'9' => {
|
||||
while let Some(b'0'..=b'9') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
|
||||
if self
|
||||
.peek_n()
|
||||
.map_or_else(|| self.peek() == Some(b'.'), |&[a, b]| a == b'.' && b != b'.')
|
||||
{
|
||||
self.pos += 1;
|
||||
while let Some(b'0'..=b'9') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Float
|
||||
} else {
|
||||
T::Number
|
||||
}
|
||||
}
|
||||
b'a'..=b'z' | b'A'..=b'Z' | b'_' | 127.. => {
|
||||
advance_ident(self);
|
||||
let ident = &self.source[start as usize..self.pos as usize];
|
||||
T::from_ident(ident)
|
||||
}
|
||||
b'"' | b'\'' => loop {
|
||||
match self.advance() {
|
||||
Some(b'\\') => _ = self.advance(),
|
||||
Some(nc) if nc == c => break identity(c),
|
||||
Some(_) => {}
|
||||
None => break T::Eof,
|
||||
}
|
||||
},
|
||||
b'/' if self.advance_if(b'/') => {
|
||||
while let Some(l) = self.peek()
|
||||
&& l != b'\n'
|
||||
{
|
||||
self.pos += 1;
|
||||
}
|
||||
|
||||
let end = self.source[..self.pos as usize]
|
||||
.iter()
|
||||
.rposition(|&b| !b.is_ascii_whitespace())
|
||||
.map_or(self.pos, |i| i as u32 + 1);
|
||||
|
||||
return Token { kind: T::Comment, start, end };
|
||||
}
|
||||
b'/' if self.advance_if(b'*') => {
|
||||
let mut depth = 1;
|
||||
while let Some(l) = self.advance() {
|
||||
match l {
|
||||
b'/' if self.advance_if(b'*') => depth += 1,
|
||||
b'*' if self.advance_if(b'/') => match depth {
|
||||
1 => break,
|
||||
_ => depth -= 1,
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
T::Comment
|
||||
}
|
||||
b'.' if self.advance_if(b'{') => T::Ctor,
|
||||
b'.' if self.advance_if(b'(') => T::Tupl,
|
||||
b'.' if self.advance_if(b'[') => T::Arr,
|
||||
b'.' if self.advance_if(b'.') => T::Range,
|
||||
b'=' if self.advance_if(b'>') => T::TArrow,
|
||||
b'&' if self.advance_if(b'&') => T::And,
|
||||
b'|' if self.advance_if(b'|') => T::Or,
|
||||
b'$' if self.advance_if(b':') => T::Ct,
|
||||
b'@' => {
|
||||
start += 1;
|
||||
advance_ident(self);
|
||||
identity(c)
|
||||
}
|
||||
b'$' => {
|
||||
start += 1;
|
||||
advance_ident(self);
|
||||
let ident = &self.source[start as usize..self.pos as usize];
|
||||
T::from_ct_ident(ident)
|
||||
}
|
||||
b'<' | b'>' if self.advance_if(c) => {
|
||||
identity(c - 5 + 128 * self.advance_if(b'=') as u8)
|
||||
}
|
||||
b':' | b'=' | b'!' | b'<' | b'>' | b'|' | b'+' | b'-' | b'*' | b'/' | b'%'
|
||||
| b'^' | b'&'
|
||||
if self.advance_if(b'=') =>
|
||||
{
|
||||
identity(c + 128)
|
||||
}
|
||||
_ => identity(c),
|
||||
};
|
||||
|
||||
return Token { kind, start, end: self.pos };
|
||||
}
|
||||
}
|
||||
|
||||
fn advance_if(&mut self, arg: u8) -> bool {
|
||||
if self.peek() == Some(arg) {
|
||||
self.advance();
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn line_col(bytes: &[u8], pos: u32) -> (usize, usize) {
|
||||
bytes[..pos as usize]
|
||||
.split(|&b| b == b'\n')
|
||||
.map(<[u8]>::len)
|
||||
.enumerate()
|
||||
.last()
|
||||
.map(|(line, col)| (line + 1, col + 1))
|
||||
.unwrap_or((1, 1))
|
||||
}
|
551
lang/src/lib.rs
Normal file
551
lang/src/lib.rs
Normal file
|
@ -0,0 +1,551 @@
|
|||
#![feature(
|
||||
iter_array_chunks,
|
||||
assert_matches,
|
||||
let_chains,
|
||||
if_let_guard,
|
||||
macro_metavar_expr,
|
||||
anonymous_lifetime_in_impl_trait,
|
||||
core_intrinsics,
|
||||
never_type,
|
||||
unwrap_infallible,
|
||||
slice_partition_dedup,
|
||||
portable_simd,
|
||||
iter_collect_into,
|
||||
ptr_metadata,
|
||||
slice_ptr_get,
|
||||
slice_take,
|
||||
map_try_insert,
|
||||
extract_if,
|
||||
ptr_internals,
|
||||
iter_intersperse,
|
||||
str_from_raw_parts,
|
||||
ptr_sub_ptr,
|
||||
slice_from_ptr_range,
|
||||
iter_next_chunk,
|
||||
pointer_is_aligned_to,
|
||||
maybe_uninit_fill,
|
||||
array_chunks,
|
||||
array_windows
|
||||
)]
|
||||
#![warn(clippy::dbg_macro)]
|
||||
#![expect(internal_features)]
|
||||
#![no_std]
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
pub use fs::*;
|
||||
pub use utils::Ent;
|
||||
use {self::ty::Builtin, alloc::vec::Vec};
|
||||
|
||||
#[macro_use]
|
||||
extern crate alloc;
|
||||
|
||||
#[cfg(any(feature = "std", test))]
|
||||
extern crate std;
|
||||
|
||||
#[cfg(test)]
|
||||
const README: &str = include_str!("../README.md");
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_export]
|
||||
macro_rules! run_tests {
|
||||
($runner:path: $($name:ident;)*) => {$(
|
||||
#[test]
|
||||
fn $name() {
|
||||
$crate::run_test(core::any::type_name_of_val(&$name), stringify!($name), $crate::README, $runner);
|
||||
}
|
||||
)*};
|
||||
}
|
||||
|
||||
pub mod fmt;
|
||||
#[cfg(any(feature = "std", test))]
|
||||
pub mod fs;
|
||||
pub mod fuzz;
|
||||
pub mod lexer;
|
||||
pub mod nodes;
|
||||
pub mod parser;
|
||||
pub mod son;
|
||||
pub mod ty;
|
||||
|
||||
pub mod backend {
|
||||
use {
|
||||
crate::{
|
||||
nodes::Nodes,
|
||||
parser,
|
||||
ty::{self, Module, Types},
|
||||
utils::EntSlice,
|
||||
},
|
||||
alloc::{string::String, vec::Vec},
|
||||
};
|
||||
|
||||
pub mod hbvm;
|
||||
|
||||
pub struct AssemblySpec {
|
||||
pub entry: u32,
|
||||
pub code_length: u64,
|
||||
pub data_length: u64,
|
||||
}
|
||||
|
||||
pub trait Backend {
|
||||
fn assemble_reachable(
|
||||
&mut self,
|
||||
from: ty::Func,
|
||||
types: &Types,
|
||||
to: &mut Vec<u8>,
|
||||
) -> AssemblySpec;
|
||||
fn disasm<'a>(
|
||||
&'a self,
|
||||
sluce: &[u8],
|
||||
eca_handler: &mut dyn FnMut(&mut &[u8]),
|
||||
types: &'a Types,
|
||||
files: &'a EntSlice<Module, parser::Ast>,
|
||||
output: &mut String,
|
||||
) -> Result<(), hbbytecode::DisasmError<'a>>;
|
||||
fn emit_body(
|
||||
&mut self,
|
||||
id: ty::Func,
|
||||
ci: &Nodes,
|
||||
tys: &Types,
|
||||
files: &EntSlice<Module, parser::Ast>,
|
||||
);
|
||||
|
||||
fn emit_ct_body(
|
||||
&mut self,
|
||||
id: ty::Func,
|
||||
ci: &Nodes,
|
||||
tys: &Types,
|
||||
files: &EntSlice<Module, parser::Ast>,
|
||||
) {
|
||||
self.emit_body(id, ci, tys, files);
|
||||
}
|
||||
|
||||
fn assemble_bin(&mut self, from: ty::Func, types: &Types, to: &mut Vec<u8>) {
|
||||
self.assemble_reachable(from, types, to);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod utils;
|
||||
|
||||
mod debug {
|
||||
use core::fmt::Debug;
|
||||
|
||||
pub fn panicking() -> bool {
|
||||
#[cfg(feature = "std")]
|
||||
{
|
||||
std::thread::panicking()
|
||||
}
|
||||
#[cfg(not(feature = "std"))]
|
||||
{
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(debug_assertions, feature = "std"))]
|
||||
pub type Trace = std::rc::Rc<(std::backtrace::Backtrace, std::string::String)>;
|
||||
#[cfg(not(all(debug_assertions, feature = "std")))]
|
||||
pub type Trace = ();
|
||||
|
||||
pub fn trace(_hint: impl Debug) -> Trace {
|
||||
#[cfg(all(debug_assertions, feature = "std"))]
|
||||
{
|
||||
std::rc::Rc::new((std::backtrace::Backtrace::capture(), format!("{_hint:?}")))
|
||||
}
|
||||
#[cfg(not(all(debug_assertions, feature = "std")))]
|
||||
{}
|
||||
}
|
||||
}
|
||||
|
||||
mod ctx_map {
|
||||
use core::hash::BuildHasher;
|
||||
|
||||
pub type Hash = u64;
|
||||
pub type HashBuilder = core::hash::BuildHasherDefault<IdentityHasher>;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct IdentityHasher(u64);
|
||||
|
||||
impl core::hash::Hasher for IdentityHasher {
|
||||
fn finish(&self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
|
||||
fn write(&mut self, _: &[u8]) {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn write_u64(&mut self, i: u64) {
|
||||
self.0 = i;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Key<T> {
|
||||
pub value: T,
|
||||
pub hash: Hash,
|
||||
}
|
||||
|
||||
impl<T> core::hash::Hash for Key<T> {
|
||||
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
|
||||
state.write_u64(self.hash);
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CtxEntry {
|
||||
type Ctx: ?Sized;
|
||||
type Key<'a>: Eq + core::hash::Hash;
|
||||
|
||||
fn key<'a>(&self, ctx: &'a Self::Ctx) -> Self::Key<'a>;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CtxMap<T> {
|
||||
inner: hashbrown::HashMap<Key<T>, (), HashBuilder>,
|
||||
}
|
||||
|
||||
impl<T> Default for CtxMap<T> {
|
||||
fn default() -> Self {
|
||||
Self { inner: Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: CtxEntry> CtxMap<T> {
|
||||
pub fn entry<'a, 'b>(
|
||||
&'a mut self,
|
||||
key: T::Key<'b>,
|
||||
ctx: &'b T::Ctx,
|
||||
) -> (hashbrown::hash_map::RawEntryMut<'a, Key<T>, (), HashBuilder>, Hash) {
|
||||
let hash = crate::FnvBuildHasher::default().hash_one(&key);
|
||||
(self.inner.raw_entry_mut().from_hash(hash, |k| k.value.key(ctx) == key), hash)
|
||||
}
|
||||
|
||||
pub fn get<'a>(&self, key: T::Key<'a>, ctx: &'a T::Ctx) -> Option<&T> {
|
||||
let hash = crate::FnvBuildHasher::default().hash_one(&key);
|
||||
self.inner
|
||||
.raw_entry()
|
||||
.from_hash(hash, |k| k.value.key(ctx) == key)
|
||||
.map(|(k, _)| &k.value)
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.inner.clear();
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, value: &T, ctx: &T::Ctx) -> Option<T> {
|
||||
let (entry, _) = self.entry(value.key(ctx), ctx);
|
||||
match entry {
|
||||
hashbrown::hash_map::RawEntryMut::Occupied(o) => Some(o.remove_entry().0.value),
|
||||
hashbrown::hash_map::RawEntryMut::Vacant(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert<'a>(&mut self, key: T::Key<'a>, value: T, ctx: &'a T::Ctx) {
|
||||
let (entry, hash) = self.entry(key, ctx);
|
||||
match entry {
|
||||
hashbrown::hash_map::RawEntryMut::Occupied(_) => unreachable!(),
|
||||
hashbrown::hash_map::RawEntryMut::Vacant(v) => {
|
||||
_ = v.insert(Key { hash, value }, ())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_or_insert<'a>(
|
||||
&mut self,
|
||||
key: T::Key<'a>,
|
||||
ctx: &'a mut T::Ctx,
|
||||
with: impl FnOnce(&'a mut T::Ctx) -> T,
|
||||
) -> &mut T {
|
||||
let (entry, hash) = self.entry(key, unsafe { &mut *(&mut *ctx as *mut _) });
|
||||
match entry {
|
||||
hashbrown::hash_map::RawEntryMut::Occupied(o) => &mut o.into_key_value().0.value,
|
||||
hashbrown::hash_map::RawEntryMut::Vacant(v) => {
|
||||
&mut v.insert(Key { hash, value: with(ctx) }, ()).0.value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
|
||||
pub struct Ident(u32);
|
||||
|
||||
impl Ident {
|
||||
pub const INVALID: Self = Self(u32::MAX);
|
||||
const LEN_BITS: u32 = 6;
|
||||
|
||||
pub fn len(self) -> u32 {
|
||||
self.0 & ((1 << Self::LEN_BITS) - 1)
|
||||
}
|
||||
|
||||
pub fn is_type(self) -> bool {
|
||||
ty::Builtin::try_from(self) == Ok(ty::Builtin::TYPE)
|
||||
}
|
||||
|
||||
pub fn is_empty(self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn is_null(self) -> bool {
|
||||
(self.0 >> Self::LEN_BITS) == 0
|
||||
}
|
||||
|
||||
pub fn pos(self) -> u32 {
|
||||
(self.0 >> Self::LEN_BITS).saturating_sub(1)
|
||||
}
|
||||
|
||||
pub fn new(pos: u32, len: u32) -> Option<Self> {
|
||||
(len < (1 << Self::LEN_BITS)).then_some(((pos + 1) << Self::LEN_BITS) | len).map(Self)
|
||||
}
|
||||
|
||||
pub fn range(self) -> core::ops::Range<usize> {
|
||||
let (len, pos) = (self.len() as usize, self.pos() as usize);
|
||||
pos..pos + len
|
||||
}
|
||||
|
||||
fn builtin(builtin: Builtin) -> Ident {
|
||||
Self(builtin.index() as _)
|
||||
}
|
||||
}
|
||||
|
||||
fn endoce_string(
|
||||
literal: &str,
|
||||
str: &mut Vec<u8>,
|
||||
report: impl Fn(&core::str::Bytes, &str),
|
||||
) -> Option<usize> {
|
||||
let report = |bytes: &core::str::Bytes, msg: &_| {
|
||||
report(bytes, msg);
|
||||
None::<u8>
|
||||
};
|
||||
|
||||
let decode_braces = |str: &mut Vec<u8>, bytes: &mut core::str::Bytes| {
|
||||
while let Some(b) = bytes.next()
|
||||
&& b != b'}'
|
||||
{
|
||||
let c = bytes.next().or_else(|| report(bytes, "incomplete escape sequence"))?;
|
||||
let decode = |b: u8| {
|
||||
Some(match b {
|
||||
b'0'..=b'9' => b - b'0',
|
||||
b'a'..=b'f' => b - b'a' + 10,
|
||||
b'A'..=b'F' => b - b'A' + 10,
|
||||
_ => report(bytes, "expected hex digit or '}'")?,
|
||||
})
|
||||
};
|
||||
str.push(decode(b)? << 4 | decode(c)?);
|
||||
}
|
||||
|
||||
Some(())
|
||||
};
|
||||
|
||||
let mut bytes = literal.bytes();
|
||||
let mut char_len = 0;
|
||||
while let Some(b) = bytes.next() {
|
||||
char_len += 1;
|
||||
if b != b'\\' {
|
||||
str.push(b);
|
||||
continue;
|
||||
}
|
||||
let b = match bytes.next().or_else(|| report(&bytes, "incomplete escape sequence"))? {
|
||||
b'n' => b'\n',
|
||||
b'r' => b'\r',
|
||||
b't' => b'\t',
|
||||
b'\\' => b'\\',
|
||||
b'\'' => b'\'',
|
||||
b'"' => b'"',
|
||||
b'0' => b'\0',
|
||||
b'{' => {
|
||||
decode_braces(str, &mut bytes);
|
||||
continue;
|
||||
}
|
||||
_ => report(&bytes, "unknown escape sequence, expected [nrt\\\"'{0]")?,
|
||||
};
|
||||
str.push(b);
|
||||
}
|
||||
|
||||
Some(char_len)
|
||||
}
|
||||
|
||||
pub fn quad_sort<T>(mut slice: &mut [T], mut cmp: impl FnMut(&T, &T) -> core::cmp::Ordering) {
|
||||
while let Some(it) = slice.take_first_mut() {
|
||||
for ot in &mut *slice {
|
||||
if cmp(it, ot) == core::cmp::Ordering::Greater {
|
||||
core::mem::swap(it, ot);
|
||||
}
|
||||
}
|
||||
}
|
||||
debug_assert!(slice.is_sorted_by(|a, b| cmp(a, b) != core::cmp::Ordering::Greater));
|
||||
}
|
||||
|
||||
type FnvBuildHasher = core::hash::BuildHasherDefault<FnvHasher>;
|
||||
|
||||
struct FnvHasher(u64);
|
||||
|
||||
impl core::hash::Hasher for FnvHasher {
|
||||
fn finish(&self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
|
||||
fn write(&mut self, bytes: &[u8]) {
|
||||
self.0 = bytes.iter().fold(self.0, |hash, &byte| {
|
||||
let mut hash = hash;
|
||||
hash ^= byte as u64;
|
||||
hash = hash.wrapping_mul(0x100000001B3);
|
||||
hash
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FnvHasher {
|
||||
fn default() -> Self {
|
||||
Self(0xCBF29CE484222325)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn run_test(
|
||||
name: &'static str,
|
||||
ident: &'static str,
|
||||
input: &'static str,
|
||||
test: fn(&'static str, &'static str, &mut alloc::string::String),
|
||||
) {
|
||||
use std::{
|
||||
io::Write,
|
||||
path::PathBuf,
|
||||
string::{String, ToString},
|
||||
};
|
||||
|
||||
let filter = std::env::var("PT_FILTER").unwrap_or_default();
|
||||
if !filter.is_empty() && !name.contains(&filter) {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut output = String::new();
|
||||
{
|
||||
struct DumpOut<'a>(&'a mut String);
|
||||
impl Drop for DumpOut<'_> {
|
||||
fn drop(&mut self) {
|
||||
if std::thread::panicking() {
|
||||
std::println!("{}", self.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let dump = DumpOut(&mut output);
|
||||
test(ident, input, dump.0);
|
||||
}
|
||||
|
||||
let mut root = PathBuf::from(
|
||||
std::env::var("PT_TEST_ROOT")
|
||||
.unwrap_or(concat!(env!("CARGO_MANIFEST_DIR"), "/tests").to_string()),
|
||||
);
|
||||
root.push(name.replace("::", "_").replace(concat!(env!("CARGO_PKG_NAME"), "_"), ""));
|
||||
root.set_extension("txt");
|
||||
|
||||
let expected = std::fs::read_to_string(&root).unwrap_or_default();
|
||||
|
||||
if output == expected {
|
||||
return;
|
||||
}
|
||||
|
||||
if std::env::var("PT_UPDATE").is_ok() {
|
||||
std::fs::write(&root, output).unwrap();
|
||||
return;
|
||||
}
|
||||
|
||||
if !root.exists() {
|
||||
std::fs::create_dir_all(root.parent().unwrap()).unwrap();
|
||||
std::fs::write(&root, vec![]).unwrap();
|
||||
}
|
||||
|
||||
let mut proc = std::process::Command::new("diff")
|
||||
.arg("-u")
|
||||
.arg("--color")
|
||||
.arg(&root)
|
||||
.arg("-")
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.stdout(std::process::Stdio::inherit())
|
||||
.spawn()
|
||||
.unwrap();
|
||||
|
||||
proc.stdin.as_mut().unwrap().write_all(output.as_bytes()).unwrap();
|
||||
|
||||
proc.wait().unwrap();
|
||||
|
||||
panic!("test failed");
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn test_parse_files(
|
||||
ident: &str,
|
||||
input: &str,
|
||||
ctx: &mut parser::Ctx,
|
||||
) -> (Vec<parser::Ast>, Vec<Vec<u8>>) {
|
||||
use {
|
||||
self::parser::FileKind,
|
||||
std::{borrow::ToOwned, string::ToString},
|
||||
};
|
||||
|
||||
fn find_block<'a>(mut input: &'a str, test_name: &str) -> &'a str {
|
||||
const CASE_PREFIX: &str = "#### ";
|
||||
const CASE_SUFFIX: &str = "\n```hb";
|
||||
loop {
|
||||
let Some(pos) = input.find(CASE_PREFIX) else {
|
||||
unreachable!("test {test_name} not found");
|
||||
};
|
||||
|
||||
input = unsafe { input.get_unchecked(pos + CASE_PREFIX.len()..) };
|
||||
if !input.starts_with(test_name) {
|
||||
continue;
|
||||
}
|
||||
input = unsafe { input.get_unchecked(test_name.len()..) };
|
||||
if !input.starts_with(CASE_SUFFIX) {
|
||||
continue;
|
||||
}
|
||||
input = unsafe { input.get_unchecked(CASE_SUFFIX.len()..) };
|
||||
|
||||
let end = input.find("```").unwrap_or(input.len());
|
||||
break unsafe { input.get_unchecked(..end) };
|
||||
}
|
||||
}
|
||||
|
||||
let input = find_block(input, ident);
|
||||
|
||||
let mut module_map = Vec::new();
|
||||
let mut embed_map = Vec::new();
|
||||
let mut last_start = 0;
|
||||
let mut last_module_name = "test.hb";
|
||||
for (i, m) in input.match_indices("// in module: ") {
|
||||
if last_module_name.ends_with(".hb") {
|
||||
fmt::test::format(ident, input[last_start..i].trim());
|
||||
module_map.push((last_module_name, &input[last_start..i]));
|
||||
} else {
|
||||
embed_map.push((last_module_name, &input[last_start..i]));
|
||||
}
|
||||
let (module_name, _) = input[i + m.len()..].split_once('\n').unwrap();
|
||||
last_module_name = module_name;
|
||||
last_start = i + m.len() + module_name.len() + 1;
|
||||
}
|
||||
if last_module_name.ends_with(".hb") {
|
||||
fmt::test::format(ident, input[last_start..].trim());
|
||||
module_map.push((last_module_name, &input[last_start..]));
|
||||
} else {
|
||||
embed_map.push((last_module_name, &input[last_start..]));
|
||||
}
|
||||
|
||||
let mut loader = |path: &str, _: &str, kind| match kind {
|
||||
FileKind::Module => module_map
|
||||
.iter()
|
||||
.position(|&(name, _)| name == path)
|
||||
.ok_or("Module Not Found".to_string()),
|
||||
FileKind::Embed => embed_map
|
||||
.iter()
|
||||
.position(|&(name, _)| name == path)
|
||||
.ok_or("Embed Not Found".to_string()),
|
||||
};
|
||||
|
||||
(
|
||||
module_map
|
||||
.iter()
|
||||
.map(|&(path, content)| parser::Ast::new(path, content.to_owned(), ctx, &mut loader))
|
||||
.collect(),
|
||||
embed_map.iter().map(|&(_, content)| content.to_owned().into_bytes()).collect(),
|
||||
)
|
||||
}
|
31
lang/src/main.rs
Normal file
31
lang/src/main.rs
Normal file
|
@ -0,0 +1,31 @@
|
|||
#[cfg(feature = "std")]
|
||||
fn main() {
|
||||
use std::io::Write;
|
||||
|
||||
fn run(out: &mut Vec<u8>, warnings: &mut String) -> std::io::Result<()> {
|
||||
let args = std::env::args().collect::<Vec<_>>();
|
||||
let args = args.iter().map(String::as_str).collect::<Vec<_>>();
|
||||
let resolvers = &[("ableos", hblang::ABLEOS_PATH_RESOLVER)];
|
||||
let opts = hblang::Options::from_args(&args, out, resolvers)?;
|
||||
let file = args.iter().filter(|a| !a.starts_with('-')).nth(1).copied().unwrap_or("main.hb");
|
||||
|
||||
hblang::run_compiler(file, opts, out, warnings)
|
||||
}
|
||||
|
||||
log::set_logger(&hblang::fs::Logger).unwrap();
|
||||
log::set_max_level(log::LevelFilter::Error);
|
||||
|
||||
let mut out = Vec::new();
|
||||
let mut warnings = String::new();
|
||||
match run(&mut out, &mut warnings) {
|
||||
Ok(_) => {
|
||||
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
|
||||
std::io::stdout().write_all(&out).unwrap()
|
||||
}
|
||||
Err(_) => {
|
||||
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
|
||||
std::io::stderr().write_all(&out).unwrap();
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
2216
lang/src/nodes.rs
Normal file
2216
lang/src/nodes.rs
Normal file
File diff suppressed because it is too large
Load diff
1841
lang/src/parser.rs
Normal file
1841
lang/src/parser.rs
Normal file
File diff suppressed because it is too large
Load diff
4505
lang/src/son.rs
Normal file
4505
lang/src/son.rs
Normal file
File diff suppressed because it is too large
Load diff
1383
lang/src/ty.rs
Normal file
1383
lang/src/ty.rs
Normal file
File diff suppressed because it is too large
Load diff
689
lang/src/utils.rs
Normal file
689
lang/src/utils.rs
Normal file
|
@ -0,0 +1,689 @@
|
|||
use {
|
||||
alloc::alloc,
|
||||
core::{
|
||||
alloc::Layout,
|
||||
fmt::Debug,
|
||||
hint::unreachable_unchecked,
|
||||
marker::PhantomData,
|
||||
mem::MaybeUninit,
|
||||
ops::{Deref, DerefMut, Not, Range},
|
||||
ptr::Unique,
|
||||
},
|
||||
};
|
||||
|
||||
fn decide(b: bool, name: &'static str) -> Result<(), &'static str> {
|
||||
b.then_some(()).ok_or(name)
|
||||
}
|
||||
|
||||
pub fn is_snake_case(str: &str) -> Result<(), &'static str> {
|
||||
decide(str.bytes().all(|c| matches!(c, b'a'..=b'z' | b'0'..=b'9' | b'_')), "snake_case")
|
||||
}
|
||||
|
||||
pub fn is_pascal_case(str: &str) -> Result<(), &'static str> {
|
||||
decide(
|
||||
str.as_bytes()[0].is_ascii_uppercase() && str.bytes().all(|c| c.is_ascii_alphanumeric()),
|
||||
"PascalCase",
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_screaming_case(str: &str) -> Result<(), &'static str> {
|
||||
decide(str.bytes().all(|c| matches!(c, b'A'..=b'Z' | b'0'..=b'9' | b'_')), "SCREAMING_CASE")
|
||||
}
|
||||
|
||||
type Nid = u16;
|
||||
type BitSetUnit = usize;
|
||||
|
||||
pub union BitSet {
|
||||
inline: BitSetUnit,
|
||||
alloced: Unique<AllocedBitSet>,
|
||||
}
|
||||
|
||||
impl Debug for BitSet {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
f.debug_list().entries(self.iter()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for BitSet {
|
||||
fn clone(&self) -> Self {
|
||||
if self.is_inline() {
|
||||
Self { inline: unsafe { self.inline } }
|
||||
} else {
|
||||
let (data, _) = self.data_and_len();
|
||||
let (layout, _) = Self::layout(data.len());
|
||||
unsafe {
|
||||
let ptr = alloc::alloc(layout);
|
||||
ptr.copy_from_nonoverlapping(self.alloced.as_ptr() as _, layout.size());
|
||||
Self { alloced: Unique::new_unchecked(ptr as _) }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for BitSet {
|
||||
fn drop(&mut self) {
|
||||
if !self.is_inline() {
|
||||
unsafe {
|
||||
let cap = self.alloced.as_ref().cap;
|
||||
alloc::dealloc(self.alloced.as_ptr() as _, Self::layout(cap).0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for BitSet {
|
||||
fn default() -> Self {
|
||||
Self { inline: Self::FLAG }
|
||||
}
|
||||
}
|
||||
|
||||
impl BitSet {
|
||||
const FLAG: BitSetUnit = 1 << (Self::UNIT - 1);
|
||||
const INLINE_ELEMS: usize = Self::UNIT - 1;
|
||||
pub const UNIT: usize = core::mem::size_of::<BitSetUnit>() * 8;
|
||||
|
||||
pub fn with_capacity(len: usize) -> Self {
|
||||
let mut s = Self::default();
|
||||
s.reserve(len);
|
||||
s
|
||||
}
|
||||
|
||||
fn is_inline(&self) -> bool {
|
||||
unsafe { self.inline & Self::FLAG != 0 }
|
||||
}
|
||||
|
||||
fn data_and_len(&self) -> (&[BitSetUnit], usize) {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
(core::slice::from_ref(&self.inline), Self::INLINE_ELEMS)
|
||||
} else {
|
||||
let small_vec = self.alloced.as_ref();
|
||||
(
|
||||
core::slice::from_raw_parts(
|
||||
&small_vec.data as *const _ as *const BitSetUnit,
|
||||
small_vec.cap,
|
||||
),
|
||||
small_vec.cap * Self::UNIT,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn data_mut_and_len(&mut self) -> (&mut [BitSetUnit], usize) {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
(core::slice::from_mut(&mut self.inline), INLINE_ELEMS)
|
||||
} else {
|
||||
let small_vec = self.alloced.as_mut();
|
||||
(
|
||||
core::slice::from_raw_parts_mut(
|
||||
&mut small_vec.data as *mut _ as *mut BitSetUnit,
|
||||
small_vec.cap,
|
||||
),
|
||||
small_vec.cap * Self::UNIT,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn indexes(index: usize) -> (usize, usize) {
|
||||
(index / Self::UNIT, index % Self::UNIT)
|
||||
}
|
||||
|
||||
pub fn get(&self, index: Nid) -> bool {
|
||||
let index = index as usize;
|
||||
let (data, len) = self.data_and_len();
|
||||
if index >= len {
|
||||
return false;
|
||||
}
|
||||
let (elem, bit) = Self::indexes(index);
|
||||
(unsafe { *data.get_unchecked(elem) }) & (1 << bit) != 0
|
||||
}
|
||||
|
||||
pub fn set(&mut self, index: Nid) -> bool {
|
||||
let index = index as usize;
|
||||
let (mut data, len) = self.data_mut_and_len();
|
||||
if core::intrinsics::unlikely(index >= len) {
|
||||
self.grow((index + 1).next_power_of_two().max(4 * Self::UNIT));
|
||||
(data, _) = self.data_mut_and_len();
|
||||
}
|
||||
|
||||
let (elem, bit) = Self::indexes(index);
|
||||
debug_assert!(elem < data.len(), "{} < {}", elem, data.len());
|
||||
let elem = unsafe { data.get_unchecked_mut(elem) };
|
||||
let prev = *elem;
|
||||
*elem |= 1 << bit;
|
||||
*elem != prev
|
||||
}
|
||||
|
||||
fn grow(&mut self, size: usize) {
|
||||
debug_assert!(size.is_power_of_two());
|
||||
let slot_count = size / Self::UNIT;
|
||||
let (layout, off) = Self::layout(slot_count);
|
||||
let (ptr, prev_len) = unsafe {
|
||||
if self.is_inline() {
|
||||
let ptr = alloc::alloc(layout);
|
||||
*ptr.add(off).cast::<BitSetUnit>() = self.inline & !Self::FLAG;
|
||||
(ptr, 1)
|
||||
} else {
|
||||
let prev_len = self.alloced.as_ref().cap;
|
||||
let (prev_layout, _) = Self::layout(prev_len);
|
||||
(alloc::realloc(self.alloced.as_ptr() as _, prev_layout, layout.size()), prev_len)
|
||||
}
|
||||
};
|
||||
unsafe {
|
||||
MaybeUninit::fill(
|
||||
core::slice::from_raw_parts_mut(
|
||||
ptr.add(off).cast::<MaybeUninit<BitSetUnit>>().add(prev_len),
|
||||
slot_count - prev_len,
|
||||
),
|
||||
0,
|
||||
);
|
||||
*ptr.cast::<usize>() = slot_count;
|
||||
core::ptr::write(self, Self { alloced: Unique::new_unchecked(ptr as _) });
|
||||
}
|
||||
}
|
||||
|
||||
fn layout(slot_count: usize) -> (core::alloc::Layout, usize) {
|
||||
unsafe {
|
||||
core::alloc::Layout::new::<AllocedBitSet>()
|
||||
.extend(Layout::array::<BitSetUnit>(slot_count).unwrap_unchecked())
|
||||
.unwrap_unchecked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> BitSetIter {
|
||||
if self.is_inline() {
|
||||
BitSetIter { index: 0, current: unsafe { self.inline & !Self::FLAG }, remining: &[] }
|
||||
} else {
|
||||
let &[current, ref remining @ ..] = self.data_and_len().0 else {
|
||||
unsafe { unreachable_unchecked() }
|
||||
};
|
||||
BitSetIter { index: 0, current, remining }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear(&mut self, len: usize) {
|
||||
self.reserve(len);
|
||||
self.clear_as_is();
|
||||
}
|
||||
|
||||
pub fn clear_as_is(&mut self) {
|
||||
if self.is_inline() {
|
||||
unsafe { self.inline &= Self::FLAG };
|
||||
} else {
|
||||
self.data_mut_and_len().0.fill(0);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn approx_unit_cap(&self) -> usize {
|
||||
self.data_and_len().0.len()
|
||||
}
|
||||
|
||||
pub fn units<'a>(&'a self, slot: &'a mut BitSetUnit) -> &'a [BitSetUnit] {
|
||||
if self.is_inline() {
|
||||
*slot = unsafe { self.inline } & !Self::FLAG;
|
||||
core::slice::from_ref(slot)
|
||||
} else {
|
||||
self.data_and_len().0
|
||||
}
|
||||
}
|
||||
|
||||
pub fn units_mut(&mut self) -> Option<&mut [BitSetUnit]> {
|
||||
self.is_inline().not().then(|| self.data_mut_and_len().0)
|
||||
}
|
||||
|
||||
pub fn reserve(&mut self, len: usize) {
|
||||
if len > self.data_and_len().1 {
|
||||
self.grow(len.next_power_of_two().max(4 * Self::UNIT));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_range(&mut self, proj_range: Range<usize>) {
|
||||
if proj_range.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.reserve(proj_range.end);
|
||||
let (units, _) = self.data_mut_and_len();
|
||||
|
||||
if proj_range.start / Self::UNIT == (proj_range.end - 1) / Self::UNIT {
|
||||
debug_assert!(proj_range.len() <= Self::UNIT);
|
||||
let mask = ((1 << proj_range.len()) - 1) << (proj_range.start % Self::UNIT);
|
||||
units[proj_range.start / Self::UNIT] |= mask;
|
||||
} else {
|
||||
let fill_range = proj_range.start.div_ceil(Self::UNIT)..proj_range.end / Self::UNIT;
|
||||
units[fill_range].fill(BitSetUnit::MAX);
|
||||
|
||||
let prefix_len = Self::UNIT - proj_range.start % Self::UNIT;
|
||||
let prefix_mask = ((1 << prefix_len) - 1) << (proj_range.start % Self::UNIT);
|
||||
units[proj_range.start / Self::UNIT] |= prefix_mask;
|
||||
|
||||
let postfix_len = proj_range.end % Self::UNIT;
|
||||
let postfix_mask = (1 << postfix_len) - 1;
|
||||
units[proj_range.end / Self::UNIT] |= postfix_mask;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BitSetIter<'a> {
|
||||
index: usize,
|
||||
current: BitSetUnit,
|
||||
remining: &'a [BitSetUnit],
|
||||
}
|
||||
|
||||
impl Iterator for BitSetIter<'_> {
|
||||
type Item = usize;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while self.current == 0 {
|
||||
self.current = *self.remining.take_first()?;
|
||||
self.index += 1;
|
||||
}
|
||||
|
||||
let sub_idx = self.current.trailing_zeros() as usize;
|
||||
self.current &= self.current - 1;
|
||||
Some(self.index * BitSet::UNIT + sub_idx)
|
||||
}
|
||||
}
|
||||
|
||||
struct AllocedBitSet {
|
||||
cap: usize,
|
||||
data: [BitSetUnit; 0],
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[test]
|
||||
fn test_small_bit_set() {
|
||||
use std::vec::Vec;
|
||||
|
||||
let mut sv = BitSet::default();
|
||||
|
||||
sv.set(10);
|
||||
debug_assert!(sv.get(10));
|
||||
sv.set(100);
|
||||
debug_assert!(sv.get(100));
|
||||
sv.set(10000);
|
||||
debug_assert!(sv.get(10000));
|
||||
debug_assert_eq!(sv.iter().collect::<Vec<_>>(), &[10, 100, 10000]);
|
||||
sv.clear(10000);
|
||||
debug_assert_eq!(sv.iter().collect::<Vec<_>>(), &[]);
|
||||
}
|
||||
|
||||
pub union Vc {
|
||||
inline: InlineVc,
|
||||
alloced: AllocedVc,
|
||||
}
|
||||
|
||||
impl Default for Vc {
|
||||
fn default() -> Self {
|
||||
Vc { inline: InlineVc { elems: MaybeUninit::uninit(), cap: Default::default() } }
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Vc {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
self.as_slice().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<Nid> for Vc {
|
||||
fn from_iter<T: IntoIterator<Item = Nid>>(iter: T) -> Self {
|
||||
let mut slf = Self::default();
|
||||
for i in iter {
|
||||
slf.push(i);
|
||||
}
|
||||
slf
|
||||
}
|
||||
}
|
||||
|
||||
const INLINE_ELEMS: usize = VC_SIZE / 2 - 1;
|
||||
const VC_SIZE: usize = 16;
|
||||
|
||||
impl Vc {
|
||||
fn is_inline(&self) -> bool {
|
||||
unsafe { self.inline.cap <= INLINE_ELEMS as Nid }
|
||||
}
|
||||
|
||||
fn layout(&self) -> Option<core::alloc::Layout> {
|
||||
unsafe {
|
||||
self.is_inline().not().then(|| {
|
||||
core::alloc::Layout::array::<Nid>(self.alloced.cap as _).unwrap_unchecked()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
self.inline.cap as _
|
||||
} else {
|
||||
self.alloced.len as _
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn len_mut(&mut self) -> &mut Nid {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
&mut self.inline.cap
|
||||
} else {
|
||||
&mut self.alloced.len
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_ptr(&self) -> *const Nid {
|
||||
unsafe {
|
||||
match self.is_inline() {
|
||||
true => self.inline.elems.as_ptr().cast(),
|
||||
false => self.alloced.base.as_ptr(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_mut_ptr(&mut self) -> *mut Nid {
|
||||
unsafe {
|
||||
match self.is_inline() {
|
||||
true => self.inline.elems.as_mut_ptr().cast(),
|
||||
false => self.alloced.base.as_ptr(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_slice(&self) -> &[Nid] {
|
||||
unsafe { core::slice::from_raw_parts(self.as_ptr(), self.len()) }
|
||||
}
|
||||
|
||||
fn as_slice_mut(&mut self) -> &mut [Nid] {
|
||||
unsafe { core::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
|
||||
}
|
||||
|
||||
pub fn push(&mut self, value: Nid) {
|
||||
if let Some(layout) = self.layout() {
|
||||
if unsafe { self.alloced.len == self.alloced.cap } {
|
||||
unsafe {
|
||||
self.alloced.cap *= 2;
|
||||
self.alloced.base = Unique::new_unchecked(
|
||||
alloc::realloc(
|
||||
self.alloced.base.as_ptr().cast(),
|
||||
layout,
|
||||
self.alloced.cap as usize * core::mem::size_of::<Nid>(),
|
||||
)
|
||||
.cast(),
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if self.len() == INLINE_ELEMS {
|
||||
unsafe {
|
||||
let mut allcd =
|
||||
Self::alloc((self.inline.cap + 1).next_power_of_two() as _, self.len());
|
||||
core::ptr::copy_nonoverlapping(self.as_ptr(), allcd.as_mut_ptr(), self.len());
|
||||
debug_assert!(!allcd.is_inline());
|
||||
*self = allcd;
|
||||
}
|
||||
}
|
||||
|
||||
unsafe {
|
||||
*self.len_mut() += 1;
|
||||
self.as_mut_ptr().add(self.len() - 1).write(value);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn alloc(cap: usize, len: usize) -> Self {
|
||||
debug_assert!(cap > INLINE_ELEMS);
|
||||
let layout = unsafe { core::alloc::Layout::array::<Nid>(cap).unwrap_unchecked() };
|
||||
let alloc = unsafe { alloc::alloc(layout) };
|
||||
unsafe {
|
||||
Vc {
|
||||
alloced: AllocedVc {
|
||||
base: Unique::new_unchecked(alloc.cast()),
|
||||
len: len as _,
|
||||
cap: cap as _,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn swap_remove(&mut self, index: usize) {
|
||||
let len = self.len() - 1;
|
||||
self.as_slice_mut().swap(index, len);
|
||||
*self.len_mut() -= 1;
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, index: usize) {
|
||||
self.as_slice_mut().copy_within(index + 1.., index);
|
||||
*self.len_mut() -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Vc {
|
||||
fn drop(&mut self) {
|
||||
if let Some(layout) = self.layout() {
|
||||
unsafe {
|
||||
alloc::dealloc(self.alloced.base.as_ptr().cast(), layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Vc {
|
||||
fn clone(&self) -> Self {
|
||||
self.as_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Vc {
|
||||
type IntoIter = VcIntoIter;
|
||||
type Item = Nid;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
VcIntoIter { start: 0, end: self.len(), vc: self }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VcIntoIter {
|
||||
start: usize,
|
||||
end: usize,
|
||||
vc: Vc,
|
||||
}
|
||||
|
||||
impl Iterator for VcIntoIter {
|
||||
type Item = Nid;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.start == self.end {
|
||||
return None;
|
||||
}
|
||||
|
||||
let ret = unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.start)) };
|
||||
self.start += 1;
|
||||
Some(ret)
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = self.end - self.start;
|
||||
(len, Some(len))
|
||||
}
|
||||
}
|
||||
|
||||
impl DoubleEndedIterator for VcIntoIter {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
if self.start == self.end {
|
||||
return None;
|
||||
}
|
||||
|
||||
self.end -= 1;
|
||||
Some(unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.end)) })
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for VcIntoIter {}
|
||||
|
||||
impl<const SIZE: usize> From<[Nid; SIZE]> for Vc {
|
||||
fn from(value: [Nid; SIZE]) -> Self {
|
||||
value.as_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a [Nid]> for Vc {
|
||||
fn from(value: &'a [Nid]) -> Self {
|
||||
if value.len() <= INLINE_ELEMS {
|
||||
let mut dflt = Self::default();
|
||||
unsafe {
|
||||
core::ptr::copy_nonoverlapping(value.as_ptr(), dflt.as_mut_ptr(), value.len())
|
||||
};
|
||||
dflt.inline.cap = value.len() as _;
|
||||
dflt
|
||||
} else {
|
||||
let mut allcd = unsafe { Self::alloc(value.len(), value.len()) };
|
||||
unsafe {
|
||||
core::ptr::copy_nonoverlapping(value.as_ptr(), allcd.as_mut_ptr(), value.len())
|
||||
};
|
||||
allcd
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Vc {
|
||||
type Target = [Nid];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Vc {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
self.as_slice_mut()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[repr(C)]
|
||||
struct InlineVc {
|
||||
cap: Nid,
|
||||
elems: MaybeUninit<[Nid; INLINE_ELEMS]>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[repr(C)]
|
||||
struct AllocedVc {
|
||||
cap: Nid,
|
||||
len: Nid,
|
||||
base: Unique<Nid>,
|
||||
}
|
||||
|
||||
pub trait Ent: Copy {
|
||||
fn new(index: usize) -> Self;
|
||||
fn index(self) -> usize;
|
||||
}
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct EntSlice<K: Ent, T> {
|
||||
k: PhantomData<fn(K)>,
|
||||
data: [T],
|
||||
}
|
||||
|
||||
impl<'a, K: Ent, T> From<&'a [T]> for &'a EntSlice<K, T> {
|
||||
fn from(value: &'a [T]) -> Self {
|
||||
unsafe { core::mem::transmute(value) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ent, T> core::ops::Index<K> for EntSlice<K, T> {
|
||||
type Output = T;
|
||||
|
||||
fn index(&self, index: K) -> &Self::Output {
|
||||
&self.data[index.index()]
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EntVec<K: Ent, T> {
|
||||
data: ::alloc::vec::Vec<T>,
|
||||
k: PhantomData<fn(K)>,
|
||||
}
|
||||
|
||||
impl<K: Ent, T> Default for EntVec<K, T> {
|
||||
fn default() -> Self {
|
||||
Self { data: Default::default(), k: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ent, T> EntVec<K, T> {
|
||||
pub fn clear(&mut self) {
|
||||
self.data.clear();
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.data.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.data.len()
|
||||
}
|
||||
|
||||
pub fn push(&mut self, value: T) -> K {
|
||||
let k = K::new(self.data.len());
|
||||
self.data.push(value);
|
||||
k
|
||||
}
|
||||
|
||||
pub fn next(&self, index: K) -> Option<&T> {
|
||||
self.data.get(index.index() + 1)
|
||||
}
|
||||
|
||||
pub fn shadow(&mut self, len: usize)
|
||||
where
|
||||
T: Default,
|
||||
{
|
||||
if self.data.len() < len {
|
||||
self.data.resize_with(len, Default::default);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> core::slice::Iter<T> {
|
||||
self.data.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ent, T> core::ops::Index<K> for EntVec<K, T> {
|
||||
type Output = T;
|
||||
|
||||
fn index(&self, index: K) -> &Self::Output {
|
||||
&self.data[index.index()]
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ent, T> core::ops::IndexMut<K> for EntVec<K, T> {
|
||||
fn index_mut(&mut self, index: K) -> &mut Self::Output {
|
||||
&mut self.data[index.index()]
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! decl_ent {
|
||||
($(
|
||||
$vis:vis struct $name:ident($index:ty);
|
||||
)*) => {$(
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
$vis struct $name($index);
|
||||
|
||||
impl crate::utils::Ent for $name {
|
||||
fn new(index: usize) -> Self {
|
||||
Self(index as $index)
|
||||
}
|
||||
|
||||
fn index(self) -> usize {
|
||||
self.0 as _
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl core::fmt::Display for $name {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
write!(f, concat!(stringify!($name), "{}"), self.0)
|
||||
}
|
||||
}
|
||||
)*};
|
||||
}
|
||||
pub(crate) use decl_ent;
|
47
lang/tests/son_tests_advanced_floating_point_arithmetic.txt
Normal file
47
lang/tests/son_tests_advanced_floating_point_arithmetic.txt
Normal file
|
@ -0,0 +1,47 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LI32 r32, 1148846080w
|
||||
CP r2, r32
|
||||
JAL r31, r0, :sin
|
||||
CP r33, r1
|
||||
FMUL32 r32, r33, r32
|
||||
FTI32 r32, r32, 1b
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
sin:
|
||||
CP r13, r2
|
||||
LI32 r14, 1124073472w
|
||||
LI32 r15, 1078530011w
|
||||
FMUL32 r14, r13, r14
|
||||
FDIV32 r14, r14, r15
|
||||
FTI32 r14, r14, 1b
|
||||
ANDI r15, r14, 255d
|
||||
MULI64 r15, r15, 4d
|
||||
LRA r16, r0, :sin_table
|
||||
LI32 r17, 1086918619w
|
||||
ITF32 r18, r14
|
||||
ADDI64 r14, r14, 64d
|
||||
ADD64 r15, r16, r15
|
||||
LI32 r19, 1132462080w
|
||||
FMUL32 r17, r18, r17
|
||||
ANDI r14, r14, 255d
|
||||
LI32 r18, 1056964608w
|
||||
LD r15, r15, 0a, 4h
|
||||
FDIV32 r17, r17, r19
|
||||
MULI64 r14, r14, 4d
|
||||
FMUL32 r18, r15, r18
|
||||
FSUB32 r13, r13, r17
|
||||
ADD64 r14, r16, r14
|
||||
FMUL32 r16, r13, r18
|
||||
LD r14, r14, 0a, 4h
|
||||
FSUB32 r14, r14, r16
|
||||
FMUL32 r13, r14, r13
|
||||
FADD32 r13, r15, r13
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 1311
|
||||
ret: 826
|
||||
status: Ok(())
|
6
lang/tests/son_tests_aliasing_overoptimization.txt
Normal file
6
lang/tests/son_tests_aliasing_overoptimization.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 22
|
||||
ret: 0
|
||||
status: Ok(())
|
6
lang/tests/son_tests_arithmetic.txt
Normal file
6
lang/tests/son_tests_arithmetic.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 22
|
||||
ret: 0
|
||||
status: Ok(())
|
32
lang/tests/son_tests_arrays.txt
Normal file
32
lang/tests/son_tests_arrays.txt
Normal file
|
@ -0,0 +1,32 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -56d
|
||||
ST r31, r254, 24a, 32h
|
||||
LI64 r32, 1d
|
||||
ADDI64 r33, r254, 0d
|
||||
ST r32, r254, 0a, 8h
|
||||
LI64 r34, 2d
|
||||
ST r34, r254, 8a, 8h
|
||||
LI64 r34, 4d
|
||||
ST r34, r254, 16a, 8h
|
||||
CP r2, r33
|
||||
JAL r31, r0, :pass
|
||||
CP r33, r1
|
||||
ADD64 r32, r33, r32
|
||||
CP r1, r32
|
||||
LD r31, r254, 24a, 32h
|
||||
ADDI64 r254, r254, 56d
|
||||
JALA r0, r31, 0a
|
||||
pass:
|
||||
CP r13, r2
|
||||
LD r14, r13, 8a, 8h
|
||||
MULI64 r15, r14, 8d
|
||||
LD r16, r13, 0a, 8h
|
||||
ADD64 r13, r15, r13
|
||||
ADD64 r14, r14, r16
|
||||
LD r13, r13, 0a, 8h
|
||||
ADD64 r13, r13, r14
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 246
|
||||
ret: 8
|
||||
status: Ok(())
|
8
lang/tests/son_tests_big_array_crash.txt
Normal file
8
lang/tests/son_tests_big_array_crash.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
main:
|
||||
LRA r13, r0, :sin_table
|
||||
LD r13, r13, 80a, 8h
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 770
|
||||
ret: 1736
|
||||
status: Ok(())
|
14
lang/tests/son_tests_branch_assignments.txt
Normal file
14
lang/tests/son_tests_branch_assignments.txt
Normal file
|
@ -0,0 +1,14 @@
|
|||
main:
|
||||
CP r14, r2
|
||||
LI64 r13, 1d
|
||||
JNE r14, r13, :0
|
||||
JMP :1
|
||||
0: JNE r14, r0, :2
|
||||
LI64 r13, 2d
|
||||
JMP :1
|
||||
2: LI64 r13, 3d
|
||||
1: CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 75
|
||||
ret: 2
|
||||
status: Ok(())
|
32
lang/tests/son_tests_c_strings.txt
Normal file
32
lang/tests/son_tests_c_strings.txt
Normal file
|
@ -0,0 +1,32 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LRA r32, r0, :"abඞ\n\r\t56789\0"
|
||||
CP r2, r32
|
||||
JAL r31, r0, :str_len
|
||||
CP r32, r1
|
||||
LRA r33, r0, :"fff\0"
|
||||
CP r2, r33
|
||||
JAL r31, r0, :str_len
|
||||
CP r33, r1
|
||||
ADD64 r32, r33, r32
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
str_len:
|
||||
CP r13, r2
|
||||
CP r15, r0
|
||||
CP r14, r15
|
||||
2: LD r16, r13, 0a, 1h
|
||||
ANDI r16, r16, 255d
|
||||
JNE r16, r15, :0
|
||||
CP r1, r14
|
||||
JMP :1
|
||||
0: ADDI64 r13, r13, 1d
|
||||
ADDI64 r14, r14, 1d
|
||||
JMP :2
|
||||
1: JALA r0, r31, 0a
|
||||
code size: 216
|
||||
ret: 16
|
||||
status: Ok(())
|
13
lang/tests/son_tests_comments.txt
Normal file
13
lang/tests/son_tests_comments.txt
Normal file
|
@ -0,0 +1,13 @@
|
|||
foo:
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -8d
|
||||
ST r31, r254, 0a, 8h
|
||||
JAL r31, r0, :foo
|
||||
CP r1, r0
|
||||
LD r31, r254, 0a, 8h
|
||||
ADDI64 r254, r254, 8d
|
||||
JALA r0, r31, 0a
|
||||
code size: 88
|
||||
ret: 0
|
||||
status: Ok(())
|
65
lang/tests/son_tests_comparing_floating_points.txt
Normal file
65
lang/tests/son_tests_comparing_floating_points.txt
Normal file
|
@ -0,0 +1,65 @@
|
|||
box:
|
||||
CP r13, r2
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -32d
|
||||
ST r31, r254, 0a, 32h
|
||||
LI32 r32, 1065353216w
|
||||
CP r2, r32
|
||||
JAL r31, r0, :box
|
||||
CP r33, r1
|
||||
CP r2, r0
|
||||
JAL r31, r0, :box
|
||||
CP r34, r1
|
||||
FCMPLT32 r33, r33, r34
|
||||
ANDI r33, r33, 255d
|
||||
JNE r33, r0, :0
|
||||
CP r2, r32
|
||||
JAL r31, r0, :box
|
||||
CP r33, r1
|
||||
CP r2, r0
|
||||
JAL r31, r0, :box
|
||||
CP r34, r1
|
||||
FCMPGT32 r33, r33, r34
|
||||
NOT r33, r33
|
||||
ANDI r33, r33, 255d
|
||||
JNE r33, r0, :1
|
||||
CP r2, r0
|
||||
JAL r31, r0, :box
|
||||
CP r33, r1
|
||||
CP r2, r32
|
||||
JAL r31, r0, :box
|
||||
CP r34, r1
|
||||
FCMPGT32 r33, r33, r34
|
||||
ANDI r33, r33, 255d
|
||||
JNE r33, r0, :2
|
||||
CP r2, r0
|
||||
JAL r31, r0, :box
|
||||
CP r33, r1
|
||||
CP r2, r32
|
||||
JAL r31, r0, :box
|
||||
CP r32, r1
|
||||
FCMPLT32 r32, r33, r32
|
||||
NOT r32, r32
|
||||
ANDI r32, r32, 255d
|
||||
JNE r32, r0, :3
|
||||
CP r1, r0
|
||||
JMP :4
|
||||
3: LI64 r32, 4d
|
||||
CP r1, r32
|
||||
JMP :4
|
||||
2: LI64 r32, 3d
|
||||
CP r1, r32
|
||||
JMP :4
|
||||
1: LI64 r32, 2d
|
||||
CP r1, r32
|
||||
JMP :4
|
||||
0: LI64 r32, 1d
|
||||
CP r1, r32
|
||||
4: LD r31, r254, 0a, 32h
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
code size: 355
|
||||
ret: 0
|
||||
status: Ok(())
|
|
@ -0,0 +1,8 @@
|
|||
main:
|
||||
LRA r13, r0, :a
|
||||
LD r13, r13, 0a, 8h
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 50
|
||||
ret: 50
|
||||
status: Ok(())
|
8
lang/tests/son_tests_comptime_min_reg_leak.txt
Normal file
8
lang/tests/son_tests_comptime_min_reg_leak.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
main:
|
||||
LRA r13, r0, :a
|
||||
LD r13, r13, 0a, 8h
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 50
|
||||
ret: 50
|
||||
status: Ok(())
|
19
lang/tests/son_tests_conditional_stores.txt
Normal file
19
lang/tests/son_tests_conditional_stores.txt
Normal file
|
@ -0,0 +1,19 @@
|
|||
cond:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
JAL r31, r0, :cond
|
||||
CP r32, r0
|
||||
CP r33, r1
|
||||
JNE r33, r32, :0
|
||||
JMP :1
|
||||
0: LI64 r32, 2d
|
||||
1: CP r1, r32
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
code size: 117
|
||||
ret: 0
|
||||
status: Ok(())
|
6
lang/tests/son_tests_const_folding_with_arg.txt
Normal file
6
lang/tests/son_tests_const_folding_with_arg.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 22
|
||||
ret: 0
|
||||
status: Ok(())
|
7
lang/tests/son_tests_constants.txt
Normal file
7
lang/tests/son_tests_constants.txt
Normal file
|
@ -0,0 +1,7 @@
|
|||
main:
|
||||
LI32 r13, 69w
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 28
|
||||
ret: 69
|
||||
status: Ok(())
|
6
lang/tests/son_tests_dead_code_in_loop.txt
Normal file
6
lang/tests/son_tests_dead_code_in_loop.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 22
|
||||
ret: 0
|
||||
status: Ok(())
|
19
lang/tests/son_tests_defer.txt
Normal file
19
lang/tests/son_tests_defer.txt
Normal file
|
@ -0,0 +1,19 @@
|
|||
main:
|
||||
LI64 r15, 3d
|
||||
LI64 r16, 10d
|
||||
CP r14, r0
|
||||
CP r13, r14
|
||||
3: JNE r13, r16, :0
|
||||
LI64 r14, -10d
|
||||
ADD64 r14, r13, r14
|
||||
CP r1, r14
|
||||
JMP :1
|
||||
0: DIRU64 r0, r17, r13, r15
|
||||
JNE r17, r14, :2
|
||||
JMP :2
|
||||
2: ADDI64 r13, r13, 1d
|
||||
JMP :3
|
||||
1: JALA r0, r31, 0a
|
||||
code size: 103
|
||||
ret: 0
|
||||
status: Ok(())
|
11
lang/tests/son_tests_die.txt
Normal file
11
lang/tests/son_tests_die.txt
Normal file
|
@ -0,0 +1,11 @@
|
|||
fun:
|
||||
UN
|
||||
main:
|
||||
ADDI64 r254, r254, -8d
|
||||
ST r31, r254, 0a, 8h
|
||||
JAL r31, r0, :fun
|
||||
LD r31, r254, 0a, 8h
|
||||
ADDI64 r254, r254, 8d
|
||||
code size: 64
|
||||
ret: 0
|
||||
status: Err(Unreachable)
|
71
lang/tests/son_tests_different_function_destinations.txt
Normal file
71
lang/tests/son_tests_different_function_destinations.txt
Normal file
|
@ -0,0 +1,71 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -88d
|
||||
ST r31, r254, 48a, 40h
|
||||
LRA r32, r0, :glob_stru
|
||||
JAL r31, r0, :new_stru
|
||||
ST r1, r32, 0a, 16h
|
||||
LD r33, r32, 0a, 8h
|
||||
JEQ r33, r0, :0
|
||||
LI64 r32, 300d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
0: ST r0, r32, 0a, 8h
|
||||
LD r33, r32, 0a, 8h
|
||||
JEQ r33, r0, :2
|
||||
LI64 r32, 200d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
2: LI64 r34, 1d
|
||||
ST r34, r32, 0a, 8h
|
||||
ST r34, r32, 8a, 8h
|
||||
ADDI64 r33, r254, 0d
|
||||
ST r34, r254, 0a, 8h
|
||||
ST r34, r254, 8a, 8h
|
||||
ST r34, r254, 16a, 8h
|
||||
ST r34, r254, 24a, 8h
|
||||
ST r34, r254, 32a, 8h
|
||||
ST r34, r254, 40a, 8h
|
||||
ADDI64 r35, r33, 48d
|
||||
CP r32, r33
|
||||
8: JNE r35, r32, :3
|
||||
LD r32, r254, 32a, 8h
|
||||
JEQ r32, r0, :4
|
||||
LI64 r32, 100d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
4: ST r34, r254, 0a, 8h
|
||||
ST r34, r254, 8a, 8h
|
||||
ST r34, r254, 16a, 8h
|
||||
ST r34, r254, 24a, 8h
|
||||
ST r34, r254, 32a, 8h
|
||||
ST r34, r254, 40a, 8h
|
||||
CP r32, r33
|
||||
7: LD r34, r254, 32a, 8h
|
||||
JNE r35, r32, :5
|
||||
JEQ r34, r0, :6
|
||||
LI64 r32, 10d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
6: CP r1, r0
|
||||
JMP :1
|
||||
5: ST r0, r32, 0a, 8h
|
||||
ST r0, r32, 8a, 8h
|
||||
ADDI64 r32, r32, 16d
|
||||
JMP :7
|
||||
3: JAL r31, r0, :new_stru
|
||||
ST r1, r32, 0a, 16h
|
||||
ADDI64 r32, r32, 16d
|
||||
JMP :8
|
||||
1: LD r31, r254, 48a, 40h
|
||||
ADDI64 r254, r254, 88d
|
||||
JALA r0, r31, 0a
|
||||
new_stru:
|
||||
ADDI64 r254, r254, -16d
|
||||
ST r0, r254, 0a, 8h
|
||||
ST r0, r254, 8a, 8h
|
||||
LD r1, r254, 0a, 16h
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
code size: 655
|
||||
ret: 0
|
||||
status: Ok(())
|
29
lang/tests/son_tests_different_types.txt
Normal file
29
lang/tests/son_tests_different_types.txt
Normal file
|
@ -0,0 +1,29 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -12d
|
||||
LI8 r13, 255b
|
||||
ST r13, r254, 0a, 1h
|
||||
ST r0, r254, 1a, 1h
|
||||
ST r0, r254, 2a, 1h
|
||||
ST r13, r254, 3a, 1h
|
||||
ST r0, r254, 4a, 4h
|
||||
LD r13, r254, 4a, 4h
|
||||
LI32 r14, 2w
|
||||
ST r14, r254, 8a, 4h
|
||||
LD r14, r254, 8a, 4h
|
||||
LI64 r15, 2d
|
||||
ANDI r14, r14, 4294967295d
|
||||
JEQ r14, r15, :0
|
||||
CP r1, r0
|
||||
JMP :1
|
||||
0: ANDI r13, r13, 4294967295d
|
||||
JEQ r13, r0, :2
|
||||
LI64 r13, 64d
|
||||
CP r1, r13
|
||||
JMP :1
|
||||
2: LI64 r13, 512d
|
||||
CP r1, r13
|
||||
1: ADDI64 r254, r254, 12d
|
||||
JALA r0, r31, 0a
|
||||
code size: 235
|
||||
ret: 512
|
||||
status: Ok(())
|
21
lang/tests/son_tests_directives.txt
Normal file
21
lang/tests/son_tests_directives.txt
Normal file
|
@ -0,0 +1,21 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -16d
|
||||
LI64 r13, 10d
|
||||
ST r13, r254, 0a, 8h
|
||||
LI64 r13, 20d
|
||||
ST r13, r254, 8a, 8h
|
||||
LI64 r13, 6d
|
||||
LI64 r14, 5d
|
||||
LI64 r15, 1d
|
||||
CP r2, r15
|
||||
LD r3, r254, 0a, 16h
|
||||
CP r5, r14
|
||||
CP r6, r13
|
||||
ECA
|
||||
CP r1, r0
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
ev: Ecall
|
||||
code size: 143
|
||||
ret: 0
|
||||
status: Ok(())
|
|
@ -0,0 +1,28 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -40d
|
||||
ST r31, r254, 24a, 16h
|
||||
LI64 r32, 1d
|
||||
ST r32, r254, 0a, 8h
|
||||
ST r0, r254, 8a, 8h
|
||||
ST r0, r254, 16a, 8h
|
||||
LD r2, r254, 8a, 16h
|
||||
JAL r31, r0, :pass
|
||||
CP r32, r1
|
||||
CP r1, r32
|
||||
LD r31, r254, 24a, 16h
|
||||
ADDI64 r254, r254, 40d
|
||||
JALA r0, r31, 0a
|
||||
pass:
|
||||
ADDI64 r254, r254, -16d
|
||||
ST r2, r254, 0a, 16h
|
||||
ADDI64 r2, r254, 0d
|
||||
CP r13, r2
|
||||
LD r14, r13, 0a, 8h
|
||||
LD r13, r13, 8a, 8h
|
||||
ADD64 r13, r13, r14
|
||||
CP r1, r13
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
code size: 235
|
||||
ret: 0
|
||||
status: Ok(())
|
20
lang/tests/son_tests_enums.txt
Normal file
20
lang/tests/son_tests_enums.txt
Normal file
|
@ -0,0 +1,20 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -16d
|
||||
ST r31, r254, 0a, 16h
|
||||
JAL r31, r0, :some_enum
|
||||
CP r32, r1
|
||||
ANDI r32, r32, 255d
|
||||
JNE r32, r0, :0
|
||||
CP r1, r0
|
||||
JMP :1
|
||||
0: LI64 r32, 100d
|
||||
CP r1, r32
|
||||
1: LD r31, r254, 0a, 16h
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
some_enum:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 128
|
||||
ret: 0
|
||||
status: Ok(())
|
114
lang/tests/son_tests_exhaustive_loop_testing.txt
Normal file
114
lang/tests/son_tests_exhaustive_loop_testing.txt
Normal file
|
@ -0,0 +1,114 @@
|
|||
continue_and_state_change:
|
||||
CP r13, r2
|
||||
LI64 r16, 3d
|
||||
LI64 r17, 2d
|
||||
LI64 r18, 10d
|
||||
CP r15, r0
|
||||
LI64 r14, 4d
|
||||
6: JLTU r13, r18, :0
|
||||
JMP :1
|
||||
0: JNE r13, r17, :2
|
||||
CP r13, r14
|
||||
JMP :3
|
||||
2: JNE r13, r16, :4
|
||||
CP r13, r15
|
||||
1: CP r1, r13
|
||||
JMP :5
|
||||
4: ADDI64 r13, r13, 1d
|
||||
3: JMP :6
|
||||
5: JALA r0, r31, 0a
|
||||
infinite_loop:
|
||||
ADDI64 r254, r254, -32d
|
||||
ST r31, r254, 0a, 32h
|
||||
LI64 r34, 1d
|
||||
CP r33, r0
|
||||
CP r32, r33
|
||||
1: JNE r32, r34, :0
|
||||
JMP :0
|
||||
0: CP r2, r33
|
||||
JAL r31, r0, :continue_and_state_change
|
||||
CP r32, r1
|
||||
JMP :1
|
||||
LD r31, r254, 0a, 32h
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -40d
|
||||
ST r31, r254, 0a, 40h
|
||||
CP r2, r0
|
||||
JAL r31, r0, :multiple_breaks
|
||||
LI64 r32, 3d
|
||||
CP r33, r1
|
||||
JEQ r33, r32, :0
|
||||
LI64 r32, 1d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
0: LI64 r33, 4d
|
||||
CP r2, r33
|
||||
JAL r31, r0, :multiple_breaks
|
||||
LI64 r34, 10d
|
||||
CP r35, r1
|
||||
JEQ r35, r34, :2
|
||||
LI64 r32, 2d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
2: CP r2, r0
|
||||
JAL r31, r0, :state_change_in_break
|
||||
CP r35, r1
|
||||
JEQ r35, r0, :3
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
3: CP r2, r33
|
||||
JAL r31, r0, :state_change_in_break
|
||||
CP r35, r1
|
||||
JEQ r35, r34, :4
|
||||
CP r1, r33
|
||||
JMP :1
|
||||
4: CP r2, r34
|
||||
JAL r31, r0, :continue_and_state_change
|
||||
CP r33, r1
|
||||
JEQ r33, r34, :5
|
||||
LI64 r32, 5d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
5: CP r2, r32
|
||||
JAL r31, r0, :continue_and_state_change
|
||||
CP r32, r1
|
||||
JEQ r32, r0, :6
|
||||
LI64 r32, 6d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
6: JAL r31, r0, :infinite_loop
|
||||
CP r1, r0
|
||||
1: LD r31, r254, 0a, 40h
|
||||
ADDI64 r254, r254, 40d
|
||||
JALA r0, r31, 0a
|
||||
multiple_breaks:
|
||||
CP r13, r2
|
||||
LI64 r14, 3d
|
||||
LI64 r15, 10d
|
||||
4: JLTU r13, r15, :0
|
||||
JMP :1
|
||||
0: ADDI64 r13, r13, 1d
|
||||
JNE r13, r14, :2
|
||||
1: CP r1, r13
|
||||
JMP :3
|
||||
2: JMP :4
|
||||
3: JALA r0, r31, 0a
|
||||
state_change_in_break:
|
||||
CP r13, r2
|
||||
LI64 r14, 3d
|
||||
LI64 r15, 10d
|
||||
4: JLTU r13, r15, :0
|
||||
JMP :1
|
||||
0: JNE r13, r14, :2
|
||||
CP r13, r0
|
||||
1: CP r1, r13
|
||||
JMP :3
|
||||
2: ADDI64 r13, r13, 1d
|
||||
JMP :4
|
||||
3: JALA r0, r31, 0a
|
||||
timed out
|
||||
code size: 667
|
||||
ret: 10
|
||||
status: Ok(())
|
55
lang/tests/son_tests_fb_driver.txt
Normal file
55
lang/tests/son_tests_fb_driver.txt
Normal file
|
@ -0,0 +1,55 @@
|
|||
check_platform:
|
||||
ADDI64 r254, r254, -16d
|
||||
ST r31, r254, 0a, 16h
|
||||
JAL r31, r0, :x86_fb_ptr
|
||||
CP r32, r1
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 16h
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -56d
|
||||
ST r31, r254, 0a, 56h
|
||||
JAL r31, r0, :check_platform
|
||||
CP r35, r0
|
||||
LI64 r36, 30d
|
||||
LI64 r37, 100d
|
||||
CP r34, r35
|
||||
CP r32, r35
|
||||
CP r33, r35
|
||||
5: JLTU r34, r36, :0
|
||||
ADDI64 r32, r32, 1d
|
||||
CP r2, r35
|
||||
CP r3, r32
|
||||
CP r4, r36
|
||||
JAL r31, r0, :set_pixel
|
||||
CP r34, r1
|
||||
JEQ r34, r33, :1
|
||||
CP r1, r35
|
||||
JMP :2
|
||||
1: JNE r32, r37, :3
|
||||
CP r1, r33
|
||||
JMP :2
|
||||
3: CP r34, r35
|
||||
JMP :4
|
||||
0: ADDI64 r33, r33, 1d
|
||||
ADDI64 r34, r34, 1d
|
||||
4: JMP :5
|
||||
2: LD r31, r254, 0a, 56h
|
||||
ADDI64 r254, r254, 56d
|
||||
JALA r0, r31, 0a
|
||||
set_pixel:
|
||||
CP r13, r2
|
||||
CP r14, r3
|
||||
CP r15, r4
|
||||
MUL64 r14, r14, r15
|
||||
ADD64 r13, r14, r13
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
x86_fb_ptr:
|
||||
LI64 r13, 100d
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 329
|
||||
ret: 3000
|
||||
status: Ok(())
|
7
lang/tests/son_tests_floating_point_arithmetic.txt
Normal file
7
lang/tests/son_tests_floating_point_arithmetic.txt
Normal file
|
@ -0,0 +1,7 @@
|
|||
main:
|
||||
LI32 r13, 3212836864w
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 28
|
||||
ret: 3212836864
|
||||
status: Ok(())
|
29
lang/tests/son_tests_functions.txt
Normal file
29
lang/tests/son_tests_functions.txt
Normal file
|
@ -0,0 +1,29 @@
|
|||
add_one:
|
||||
CP r13, r2
|
||||
ADDI64 r13, r13, 1d
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
add_two:
|
||||
CP r13, r2
|
||||
ADDI64 r13, r13, 2d
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LI64 r32, 10d
|
||||
CP r2, r32
|
||||
JAL r31, r0, :add_one
|
||||
CP r32, r1
|
||||
LI64 r33, 20d
|
||||
CP r2, r33
|
||||
JAL r31, r0, :add_two
|
||||
CP r33, r1
|
||||
ADD64 r32, r33, r32
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
code size: 176
|
||||
ret: 33
|
||||
status: Ok(())
|
38
lang/tests/son_tests_generic_functions.txt
Normal file
38
lang/tests/son_tests_generic_functions.txt
Normal file
|
@ -0,0 +1,38 @@
|
|||
add:
|
||||
CP r13, r2
|
||||
CP r14, r3
|
||||
ADD64 r13, r13, r14
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
add:
|
||||
CP r13, r2
|
||||
CP r14, r3
|
||||
ADD32 r13, r13, r14
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
add:
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -32d
|
||||
ST r31, r254, 0a, 32h
|
||||
JAL r31, r0, :add
|
||||
LI32 r32, 2w
|
||||
CP r2, r32
|
||||
CP r3, r32
|
||||
JAL r31, r0, :add
|
||||
CP r32, r1
|
||||
LI64 r33, 3d
|
||||
LI64 r34, 1d
|
||||
CP r2, r34
|
||||
CP r3, r33
|
||||
JAL r31, r0, :add
|
||||
CP r33, r1
|
||||
ANDI r32, r32, 4294967295d
|
||||
SUB64 r32, r32, r33
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 32h
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
code size: 209
|
||||
ret: 0
|
||||
status: Ok(())
|
32
lang/tests/son_tests_generic_type_mishap.txt
Normal file
32
lang/tests/son_tests_generic_type_mishap.txt
Normal file
|
@ -0,0 +1,32 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -8d
|
||||
ST r31, r254, 0a, 8h
|
||||
JAL r31, r0, :process
|
||||
LD r31, r254, 0a, 8h
|
||||
ADDI64 r254, r254, 8d
|
||||
JALA r0, r31, 0a
|
||||
opaque:
|
||||
JALA r0, r31, 0a
|
||||
process:
|
||||
ADDI64 r254, r254, -48d
|
||||
ST r31, r254, 16a, 32h
|
||||
ADDI64 r33, r254, 0d
|
||||
ST r0, r254, 0a, 1h
|
||||
LI64 r32, 1000d
|
||||
4: JGTU r32, r0, :0
|
||||
JMP :1
|
||||
0: CP r2, r33
|
||||
JAL r31, r0, :opaque
|
||||
LD r34, r254, 0a, 1h
|
||||
ANDI r34, r34, 255d
|
||||
JEQ r34, r0, :2
|
||||
JMP :3
|
||||
2: ADDI64 r32, r32, -1d
|
||||
1: JMP :4
|
||||
3: LD r31, r254, 16a, 32h
|
||||
ADDI64 r254, r254, 48d
|
||||
JALA r0, r31, 0a
|
||||
timed out
|
||||
code size: 248
|
||||
ret: 0
|
||||
status: Ok(())
|
219
lang/tests/son_tests_generic_types.txt
Normal file
219
lang/tests/son_tests_generic_types.txt
Normal file
|
@ -0,0 +1,219 @@
|
|||
deinit:
|
||||
ADDI64 r254, r254, -40d
|
||||
ST r31, r254, 0a, 40h
|
||||
CP r32, r2
|
||||
LD r33, r32, 16a, 8h
|
||||
LI64 r34, 8d
|
||||
MUL64 r33, r33, r34
|
||||
LD r35, r32, 0a, 8h
|
||||
CP r2, r35
|
||||
CP r3, r33
|
||||
CP r4, r34
|
||||
JAL r31, r0, :free
|
||||
CP r1, r32
|
||||
JAL r31, r0, :new
|
||||
LD r31, r254, 0a, 40h
|
||||
ADDI64 r254, r254, 40d
|
||||
JALA r0, r31, 0a
|
||||
deinit:
|
||||
ADDI64 r254, r254, -40d
|
||||
ST r31, r254, 0a, 40h
|
||||
CP r32, r2
|
||||
LI64 r33, 1d
|
||||
LD r34, r32, 16a, 8h
|
||||
LD r35, r32, 0a, 8h
|
||||
CP r2, r35
|
||||
CP r3, r34
|
||||
CP r4, r33
|
||||
JAL r31, r0, :free
|
||||
CP r1, r32
|
||||
JAL r31, r0, :new
|
||||
LD r31, r254, 0a, 40h
|
||||
ADDI64 r254, r254, 40d
|
||||
JALA r0, r31, 0a
|
||||
free:
|
||||
CP r13, r2
|
||||
CP r14, r3
|
||||
CP r15, r4
|
||||
LRA r16, r0, :free_sys_call
|
||||
LD r16, r16, 0a, 8h
|
||||
CP r2, r16
|
||||
CP r3, r13
|
||||
CP r4, r14
|
||||
CP r5, r15
|
||||
ECA
|
||||
CP r13, r1
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -88d
|
||||
ST r31, r254, 48a, 40h
|
||||
ADDI64 r32, r254, 24d
|
||||
CP r1, r32
|
||||
JAL r31, r0, :new
|
||||
LI64 r33, 35d
|
||||
CP r2, r32
|
||||
CP r3, r33
|
||||
JAL r31, r0, :push
|
||||
ADDI64 r33, r254, 0d
|
||||
CP r1, r33
|
||||
JAL r31, r0, :new
|
||||
LI8 r34, 34b
|
||||
CP r2, r33
|
||||
CP r3, r34
|
||||
JAL r31, r0, :push
|
||||
LD r34, r254, 0a, 8h
|
||||
LD r34, r34, 0a, 1h
|
||||
LD r35, r254, 24a, 8h
|
||||
LD r35, r35, 0a, 8h
|
||||
CP r2, r33
|
||||
JAL r31, r0, :deinit
|
||||
CP r2, r32
|
||||
JAL r31, r0, :deinit
|
||||
ANDI r32, r34, 255d
|
||||
ADD64 r32, r35, r32
|
||||
CP r1, r32
|
||||
LD r31, r254, 48a, 40h
|
||||
ADDI64 r254, r254, 88d
|
||||
JALA r0, r31, 0a
|
||||
malloc:
|
||||
CP r13, r2
|
||||
CP r14, r3
|
||||
LRA r15, r0, :malloc_sys_call
|
||||
LD r15, r15, 0a, 8h
|
||||
CP r2, r15
|
||||
CP r3, r13
|
||||
CP r4, r14
|
||||
ECA
|
||||
CP r13, r1
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
new:
|
||||
ADDI64 r254, r254, -24d
|
||||
CP r15, r1
|
||||
LI64 r14, 8d
|
||||
ADDI64 r13, r254, 0d
|
||||
ST r14, r254, 0a, 8h
|
||||
ST r0, r254, 8a, 8h
|
||||
ST r0, r254, 16a, 8h
|
||||
BMC r13, r15, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
new:
|
||||
ADDI64 r254, r254, -24d
|
||||
CP r15, r1
|
||||
LI64 r14, 1d
|
||||
ADDI64 r13, r254, 0d
|
||||
ST r14, r254, 0a, 8h
|
||||
ST r0, r254, 8a, 8h
|
||||
ST r0, r254, 16a, 8h
|
||||
BMC r13, r15, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
push:
|
||||
ADDI64 r254, r254, -80d
|
||||
ST r31, r254, 0a, 80h
|
||||
CP r36, r2
|
||||
CP r37, r3
|
||||
LI64 r35, 1d
|
||||
LD r33, r36, 8a, 8h
|
||||
LD r32, r36, 16a, 8h
|
||||
JNE r32, r33, :0
|
||||
JNE r32, r0, :1
|
||||
CP r32, r35
|
||||
JMP :2
|
||||
1: MULI64 r32, r32, 2d
|
||||
2: CP r2, r32
|
||||
CP r3, r35
|
||||
JAL r31, r0, :malloc
|
||||
ST r32, r36, 16a, 8h
|
||||
CP r34, r1
|
||||
JNE r34, r0, :3
|
||||
CP r1, r0
|
||||
JMP :4
|
||||
3: LD r32, r36, 0a, 8h
|
||||
ADD64 r38, r33, r32
|
||||
CP r33, r34
|
||||
7: LD r39, r36, 0a, 8h
|
||||
LD r40, r36, 8a, 8h
|
||||
JNE r38, r32, :5
|
||||
JEQ r40, r0, :6
|
||||
CP r2, r39
|
||||
CP r3, r40
|
||||
CP r4, r35
|
||||
JAL r31, r0, :free
|
||||
JMP :6
|
||||
6: ST r34, r36, 0a, 8h
|
||||
JMP :0
|
||||
5: LD r39, r32, 0a, 1h
|
||||
ST r39, r33, 0a, 1h
|
||||
ADDI64 r33, r33, 1d
|
||||
ADDI64 r32, r32, 1d
|
||||
JMP :7
|
||||
0: LD r32, r36, 8a, 8h
|
||||
LD r33, r36, 0a, 8h
|
||||
ADD64 r33, r32, r33
|
||||
ST r37, r33, 0a, 1h
|
||||
ADD64 r32, r32, r35
|
||||
ST r32, r36, 8a, 8h
|
||||
CP r1, r33
|
||||
4: LD r31, r254, 0a, 80h
|
||||
ADDI64 r254, r254, 80d
|
||||
JALA r0, r31, 0a
|
||||
push:
|
||||
ADDI64 r254, r254, -88d
|
||||
ST r31, r254, 0a, 88h
|
||||
CP r36, r2
|
||||
CP r37, r3
|
||||
LI64 r35, 1d
|
||||
LD r33, r36, 8a, 8h
|
||||
LD r32, r36, 16a, 8h
|
||||
JNE r32, r33, :0
|
||||
JNE r32, r0, :1
|
||||
CP r32, r35
|
||||
JMP :2
|
||||
1: MULI64 r32, r32, 2d
|
||||
2: LI64 r38, 8d
|
||||
MUL64 r34, r32, r38
|
||||
CP r2, r34
|
||||
CP r3, r38
|
||||
JAL r31, r0, :malloc
|
||||
ST r32, r36, 16a, 8h
|
||||
CP r34, r1
|
||||
JNE r34, r0, :3
|
||||
CP r1, r0
|
||||
JMP :4
|
||||
3: MULI64 r33, r33, 8d
|
||||
LD r32, r36, 0a, 8h
|
||||
ADD64 r39, r32, r33
|
||||
CP r33, r34
|
||||
7: LD r40, r36, 0a, 8h
|
||||
LD r41, r36, 8a, 8h
|
||||
JNE r39, r32, :5
|
||||
JEQ r41, r0, :6
|
||||
MUL64 r32, r41, r38
|
||||
CP r2, r40
|
||||
CP r3, r32
|
||||
CP r4, r38
|
||||
JAL r31, r0, :free
|
||||
JMP :6
|
||||
6: ST r34, r36, 0a, 8h
|
||||
JMP :0
|
||||
5: LD r40, r32, 0a, 8h
|
||||
ST r40, r33, 0a, 8h
|
||||
ADDI64 r33, r33, 8d
|
||||
ADDI64 r32, r32, 8d
|
||||
JMP :7
|
||||
0: LD r32, r36, 8a, 8h
|
||||
MULI64 r33, r32, 8d
|
||||
LD r34, r36, 0a, 8h
|
||||
ADD64 r33, r34, r33
|
||||
ST r37, r33, 0a, 8h
|
||||
ADD64 r32, r32, r35
|
||||
ST r32, r36, 8a, 8h
|
||||
CP r1, r33
|
||||
4: LD r31, r254, 0a, 88h
|
||||
ADDI64 r254, r254, 88d
|
||||
JALA r0, r31, 0a
|
||||
code size: 1623
|
||||
ret: 69
|
||||
status: Ok(())
|
19
lang/tests/son_tests_global_aliasing_overptimization.txt
Normal file
19
lang/tests/son_tests_global_aliasing_overptimization.txt
Normal file
|
@ -0,0 +1,19 @@
|
|||
clobber:
|
||||
LRA r13, r0, :var
|
||||
ST r0, r13, 0a, 8h
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LRA r32, r0, :var
|
||||
LI64 r33, 2d
|
||||
ST r33, r32, 0a, 8h
|
||||
JAL r31, r0, :clobber
|
||||
LD r32, r32, 0a, 8h
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
code size: 159
|
||||
ret: 0
|
||||
status: Ok(())
|
23
lang/tests/son_tests_global_variable_wiredness.txt
Normal file
23
lang/tests/son_tests_global_variable_wiredness.txt
Normal file
|
@ -0,0 +1,23 @@
|
|||
inb:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LRA r32, r0, :ports
|
||||
LD r33, r32, 0a, 1h
|
||||
ANDI r33, r33, 255d
|
||||
JNE r33, r0, :0
|
||||
JMP :1
|
||||
0: JAL r31, r0, :inb
|
||||
CP r33, r1
|
||||
CMPU r33, r33, r0
|
||||
CMPUI r33, r33, 0d
|
||||
NOT r33, r33
|
||||
ST r33, r32, 0a, 1h
|
||||
1: LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
code size: 164
|
||||
ret: 0
|
||||
status: Ok(())
|
10
lang/tests/son_tests_global_variables.txt
Normal file
10
lang/tests/son_tests_global_variables.txt
Normal file
|
@ -0,0 +1,10 @@
|
|||
main:
|
||||
LRA r13, r0, :complex_global_var
|
||||
LD r14, r13, 0a, 8h
|
||||
ADDI64 r14, r14, 5d
|
||||
ST r14, r13, 0a, 8h
|
||||
CP r1, r14
|
||||
JALA r0, r31, 0a
|
||||
code size: 74
|
||||
ret: 55
|
||||
status: Ok(())
|
6
lang/tests/son_tests_hex_octal_binary_literals.txt
Normal file
6
lang/tests/son_tests_hex_octal_binary_literals.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 22
|
||||
ret: 0
|
||||
status: Ok(())
|
20
lang/tests/son_tests_idk.txt
Normal file
20
lang/tests/son_tests_idk.txt
Normal file
|
@ -0,0 +1,20 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -128d
|
||||
ADDI64 r14, r254, 0d
|
||||
LI8 r15, 69b
|
||||
LI64 r16, 128d
|
||||
CP r13, r0
|
||||
2: LD r17, r254, 42a, 1h
|
||||
JLTU r13, r16, :0
|
||||
ANDI r13, r17, 255d
|
||||
CP r1, r13
|
||||
JMP :1
|
||||
0: ADD64 r17, r14, r13
|
||||
ST r15, r17, 0a, 1h
|
||||
ADDI64 r13, r13, 1d
|
||||
JMP :2
|
||||
1: ADDI64 r254, r254, 128d
|
||||
JALA r0, r31, 0a
|
||||
code size: 138
|
||||
ret: 69
|
||||
status: Ok(())
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue