forked from AbleOS/holey-bytes
Compare commits
729 commits
Author | SHA1 | Date | |
---|---|---|---|
5df4fb8882 | |||
86ca959ea3 | |||
f353bd5882 | |||
cad0a828d0 | |||
fb119bc6eb | |||
aa83ed2ec9 | |||
fb11c94af4 | |||
b030b1eeb7 | |||
4856533b22 | |||
d8d039b67a | |||
b760d9ef75 | |||
e587de1778 | |||
b12579ff65 | |||
0aa355695a | |||
4a857d2317 | |||
2253ac6198 | |||
05a7bd0583 | |||
ab55ec0240 | |||
able | 8353ab58a5 | ||
f527d61c1e | |||
f3879cb013 | |||
e89511b14c | |||
1c135a3050 | |||
f83194359c | |||
becd5c4b7f | |||
37dd13cab2 | |||
bc2dd82eb7 | |||
aa2de502cc | |||
542c69fd60 | |||
95e9270fef | |||
fe5a8631f6 | |||
8892dd729a | |||
a7718e1220 | |||
e079bbd312 | |||
12b9d43754 | |||
397b2a4b1b | |||
12bb7029b4 | |||
a64383e72b | |||
2034152c83 | |||
13714eb513 | |||
4088bd18b1 | |||
e94b812b3b | |||
e5d6b35f66 | |||
e6df9b6b01 | |||
baa70d3f12 | |||
ec4499e519 | |||
085c593add | |||
867a750d8f | |||
b1b6d9eba1 | |||
12be64965f | |||
7058efe75c | |||
afc1c5aac5 | |||
83146cfd61 | |||
bb625a9e19 | |||
81cf39b602 | |||
e4da9cc927 | |||
454b0ffd1c | |||
981c17ff19 | |||
d01e31b203 | |||
9cb273a04b | |||
2e2b7612d9 | |||
f493c2776f | |||
f77bc52465 | |||
f524013c34 | |||
3c86eafe72 | |||
0d87bf8f09 | |||
e5a4561f07 | |||
b71031c146 | |||
dd51961fbb | |||
63f2a0dac0 | |||
4ec88e3397 | |||
f1e715e9bd | |||
80fd0e89b4 | |||
9949086011 | |||
c701eb7b6d | |||
f1deab11c9 | |||
f079daa42d | |||
7cac9382ad | |||
ce2f7d2059 | |||
f5f9060803 | |||
ad7fb5d0fc | |||
d99672b751 | |||
7def052749 | |||
b2eefa5b83 | |||
3c35557872 | |||
b6274f3455 | |||
c61efc3933 | |||
654005eea2 | |||
335e6ec20a | |||
1e02efc1eb | |||
8b98c2ed1b | |||
c353d28be0 | |||
7865d692a1 | |||
29a23cec0c | |||
5dce4df2a1 | |||
42a713aeae | |||
823c78bf74 | |||
c657084451 | |||
63a1c7feb4 | |||
bedffa9b32 | |||
b8032aa840 | |||
65e9f272a8 | |||
d2052cd2a3 | |||
29367d8f8b | |||
a299bad75b | |||
7d48d3beb1 | |||
68c0248189 | |||
0ef74d89cb | |||
1b2b9f899d | |||
455f70db6e | |||
0374848b28 | |||
513d2c7127 | |||
9d2f419140 | |||
f535ea7b0a | |||
be6d0d3f18 | |||
2718ef8523 | |||
3ee78f3a31 | |||
2bac7c1fb3 | |||
mlokis | 79a3f1ab2b | ||
koniifer | b15e66b2af | ||
koniifer | d2ba7cc101 | ||
koniifer | d3ee72306e | ||
87cb77a553 | |||
276d1bb0cf | |||
5cce904135 | |||
3338d50672 | |||
2e36f32ae0 | |||
e8f1d2af8c | |||
999b25df8b | |||
61250c906a | |||
44fc9c3e2e | |||
798000c756 | |||
9de631234d | |||
843fbddf3b | |||
38a00cbaa0 | |||
4664240e08 | |||
728d563cea | |||
56984f08ff | |||
3f9f99ff65 | |||
9ed3c7ab9e | |||
acacd10ee9 | |||
f6f661cee3 | |||
4bfb5f192e | |||
ea628c1278 | |||
7448339605 | |||
da7cd5926c | |||
9cf7933251 | |||
24b9f9e78b | |||
80558ea7e6 | |||
348d9014e3 | |||
30bd6103a6 | |||
97eb985a02 | |||
7ef1adf7e2 | |||
be828b8c54 | |||
b4b3bae104 | |||
33d78fbc52 | |||
be2d38a6d2 | |||
bbd7e12af4 | |||
37db783699 | |||
948710dc27 | |||
f0a588fcff | |||
9c32f260a1 | |||
047e1ed15c | |||
2c2f0c048b | |||
3c12c0e288 | |||
ca8497550a | |||
849e842336 | |||
5c82623db9 | |||
e8a8fa3eb1 | |||
5926f69e6c | |||
83d3fb4919 | |||
b429534d23 | |||
b187af64a8 | |||
ce7bb001da | |||
9c90adbfe8 | |||
db62434736 | |||
3d721812f0 | |||
5b23a0661b | |||
7c919cd453 | |||
bb61526d3e | |||
45e1c6743a | |||
39588579a8 | |||
9095af6d84 | |||
b62413046d | |||
af4d965b8c | |||
855da58e06 | |||
2fc24f0f58 | |||
8016b1fad5 | |||
46f9903562 | |||
517850f283 | |||
faa8dd2e6f | |||
d23d010917 | |||
b1da36ecde | |||
e62aab9b4b | |||
423361a80e | |||
62a7c61cdc | |||
2bab16d3ce | |||
c88daa4800 | |||
6988d8893f | |||
64e228450f | |||
897e121eeb | |||
648bd24d0d | |||
aefa7e6405 | |||
026f6141e6 | |||
cb88edea1f | |||
127e8dcb38 | |||
9c43dafcf5 | |||
e65dbcfcbe | |||
e0d4955bd5 | |||
78ebc3292c | |||
0c2db878f0 | |||
cb9d7f7d1e | |||
41b70bec43 | |||
f013e90936 | |||
6977cb218c | |||
3f30735eaa | |||
58f4837ae0 | |||
b95bddac7b | |||
7d53706e71 | |||
4d699fcbf1 | |||
5aa6150c70 | |||
b0a85f44c9 | |||
2aa5ba9abc | |||
35d34dca54 | |||
bc817c4ea2 | |||
0298b32e38 | |||
73c9ccef6a | |||
ad4aed9c98 | |||
8528bef8cf | |||
11c8755b18 | |||
d5c90b95a7 | |||
1da900461c | |||
3aff6fc006 | |||
ccfde6c237 | |||
44c4b71bb3 | |||
c3a6e62bf2 | |||
00949c4ea8 | |||
15e4762d4a | |||
959bfd7f76 | |||
6ad0b41759 | |||
89cc611f7a | |||
cf74fdd99c | |||
58578dd4b2 | |||
4a7b4e4ead | |||
c900f4ef5c | |||
3a494147ec | |||
4336fec653 | |||
11f6537a09 | |||
da58a5926d | |||
f5ef62c6bb | |||
f386c332e5 | |||
23b90b3dd7 | |||
ea736d8824 | |||
dc2e0cc5b3 | |||
c9b85f9004 | |||
af147b3cb6 | |||
0f8a720fe8 | |||
2ab6f6c914 | |||
54d93608aa | |||
19a6cdd764 | |||
2660d976fe | |||
659ccbd637 | |||
3a2367f24f | |||
0f4ff918d2 | |||
6d7e726066 | |||
9e65f3949d | |||
bf00dc85b2 | |||
69b58c2b36 | |||
5364b66629 | |||
c4826d3bfd | |||
07638caff0 | |||
5ef1ec4811 | |||
f0ae65606d | |||
a538c0ddb0 | |||
c31d1dcb9c | |||
54a7f85978 | |||
e200c2fc98 | |||
1626734c1a | |||
13f63c7700 | |||
c7dbe1c43d | |||
4c15f61cb7 | |||
f1ea01ef0c | |||
2361e166cd | |||
4d913462cb | |||
bdc2c43773 | |||
b2254e9820 | |||
d293e02f62 | |||
1ee8d464c6 | |||
2a4d27d8e6 | |||
1f5846afaa | |||
006bc80f12 | |||
802e8b5d55 | |||
6b7572f089 | |||
1d04287532 | |||
8b6d9b5de3 | |||
136bba1631 | |||
c1b00b6d6b | |||
a51b23187d | |||
c3f9e535d3 | |||
6d805dc2ec | |||
4291ebc25e | |||
02c74a181d | |||
c0d4464097 | |||
602249a48a | |||
338e3f1519 | |||
0e9f4402cb | |||
6057e88034 | |||
2a3d077476 | |||
8e62bd747b | |||
b8ff503c14 | |||
9e69e53e24 | |||
4d163a2313 | |||
e4e7f8d5b5 | |||
4849807353 | |||
6e30968c54 | |||
6fc0eb3498 | |||
98dfd6b09c | |||
ece9bb8bf2 | |||
09fcbbc03b | |||
a7fda408ef | |||
5d77ae93b4 | |||
4a9b9de87f | |||
bba3570788 | |||
6852452f1a | |||
254d5ed962 | |||
faf068885a | |||
a2e864360e | |||
79e4cead2d | |||
6968e7d769 | |||
c133c2dbe7 | |||
2bc7a5c13f | |||
16e2c32521 | |||
da85d91a09 | |||
e2a8373c42 | |||
fbdabd8314 | |||
39c4526797 | |||
2e3fbfa966 | |||
eebabc5070 | |||
b177cbe7c7 | |||
641d344d2d | |||
dc418bd5e0 | |||
8bbc40b9b1 | |||
8083bcb0e8 | |||
8928888481 | |||
d64fa7e1f9 | |||
b51f964cae | |||
67b8ffe2f2 | |||
32bed04914 | |||
6cb9489e9a | |||
73727c2383 | |||
e8a5027cab | |||
50f3350418 | |||
bb41da484f | |||
ee30069195 | |||
58c1c29293 | |||
49387dbe16 | |||
803095c0c5 | |||
514c2fe630 | |||
b4f64656fe | |||
73e13bd93c | |||
b404e5b86d | |||
4bcab25231 | |||
414a07b99a | |||
fdf4cccde0 | |||
1a3b0c2eec | |||
955e4a5c7a | |||
d9aab2191b | |||
9dd09b2122 | |||
937c107dec | |||
ed1b9459fc | |||
f063d0a4fd | |||
a21dee61e7 | |||
3807276a55 | |||
894f73ca35 | |||
00ad474881 | |||
9e0e0242aa | |||
a31e02449c | |||
b956cc78bb | |||
7279ed88e9 | |||
9500db8764 | |||
9404eb32a2 | |||
f172c33247 | |||
75dca64648 | |||
97c62e424a | |||
a2c08b6ef6 | |||
a78d2bc3e9 | |||
ad3fc1190c | |||
641be15703 | |||
cbe6f98dff | |||
9bdacfffb2 | |||
f13f500d6e | |||
mlokis | 4e9d6094bd | ||
28e33d11c9 | |||
koniifer | 581c4d531c | ||
9012f976c5 | |||
koniifer | 27462d9a33 | ||
koniifer | 781c40ede0 | ||
9af7bf559f | |||
5a6474f066 | |||
33a4bf7d01 | |||
cac99cd34d | |||
5555b9865a | |||
f964520641 | |||
a88d3a5c9d | |||
416f646957 | |||
12b39c5b3f | |||
4dcaae8362 | |||
ab903fa4ea | |||
c48a2d2799 | |||
fb01407465 | |||
71359d82aa | |||
29d5774c47 | |||
434acfbc7b | |||
6a03f125a5 | |||
03aedb5d3f | |||
a1179f3320 | |||
ba73a89171 | |||
523ca6d103 | |||
654b7eb7af | |||
4c3b63df25 | |||
9a8a56fe97 | |||
aeb3a37f7d | |||
3c01a40ef2 | |||
4f9d4f2e71 | |||
25bbe247e9 | |||
ab41d49a3d | |||
11cb875882 | |||
8984dce0e7 | |||
fd64968f3a | |||
e00f2f08c8 | |||
880cd66c66 | |||
fa41c56cb3 | |||
efa7271a59 | |||
bd7384123c | |||
e9589ebcae | |||
22f925b3f5 | |||
3807fe22da | |||
12c7467be2 | |||
cdc8cb35f7 | |||
36bd1a796b | |||
59705c062d | |||
9fe734c68c | |||
dc0562553d | |||
91907a90ff | |||
e147358fce | |||
f9e46b4641 | |||
93deeee6b9 | |||
876690319f | |||
c835317287 | |||
8442b55aa6 | |||
e07265c88b | |||
6a69042cb7 | |||
c85437e4e8 | |||
76b3f9ff4b | |||
66c3f7b0d4 | |||
b04d9e517e | |||
b46c64db4f | |||
6de8496aa5 | |||
499fe34f1d | |||
36d978d798 | |||
bd2a49d29a | |||
1c8645bf11 | |||
1624559e7b | |||
1ca5d89644 | |||
61ecbbd304 | |||
002a7df509 | |||
20903ef294 | |||
aafcb2fbbd | |||
98862edd58 | |||
b9de362ba2 | |||
e494785f93 | |||
aef9951bc5 | |||
b922dbd232 | |||
71c4d3632a | |||
8cb9f2eaac | |||
aae217dd00 | |||
4502a64514 | |||
ca1d471646 | |||
2dff9f7244 | |||
3127d04e41 | |||
589a30c8a3 | |||
8b81cfef37 | |||
6b74640c3f | |||
87ba7aa203 | |||
78f9eb6acc | |||
3c09a5f23e | |||
70955c1792 | |||
d8a922df26 | |||
9aa5da82c9 | |||
fb481a0600 | |||
d90f386bd2 | |||
c14e6c352d | |||
9ccf91d072 | |||
7cca9a3683 | |||
b28baa86f7 | |||
2226a47aaa | |||
0aec47e985 | |||
5c38115119 | |||
c3cbd054f7 | |||
06e30529bf | |||
4ec635dc56 | |||
a08856a464 | |||
d5a5c932e7 | |||
bc59886428 | |||
f87959aacb | |||
80b05779ea | |||
4bb5ec1953 | |||
2aa315a863 | |||
86013a50a4 | |||
465b185452 | |||
b794fa7c3c | |||
able | ebefc85566 | ||
a3c4b878b2 | |||
7f32e7775c | |||
1d74f27b0e | |||
7435218999 | |||
cf99091a45 | |||
81952cfc40 | |||
68d53544fd | |||
aa77a2f822 | |||
b80528bfd7 | |||
1c08148dc9 | |||
774735b515 | |||
870c1f4718 | |||
326adf47ce | |||
able | 37ff58c5e5 | ||
Erin | cbf4c6572a | ||
Erin | 0070016f74 | ||
Erin | 6c6e29479f | ||
Erin | eb46b24a10 | ||
Erin | 942839a5f8 | ||
9ddc336ecd | |||
Erin | 34d1bf415e | ||
c978b408e2 | |||
Erin | fe9a0667b8 | ||
975ce8a9fe | |||
8a3dd3001d | |||
Erin | de723980da | ||
Erin | 4aa39f3fbc | ||
Erin | 5f8864e251 | ||
bcbe47bcd6 | |||
Erin | 30ee6c84fc | ||
6e464be33d | |||
09aacff161 | |||
433f2db4d1 | |||
e335e55aa0 | |||
Erin | 8e0aeabc07 | ||
Erin | a84e93d562 | ||
Erin | 8374dfe20a | ||
Erin | e9e1242743 | ||
Erin | f604a2463d | ||
Erin | 59e38db874 | ||
Erin | 8c257e9216 | ||
Erin | 84cc1db691 | ||
Erin | 633e3adc61 | ||
Erin | 7f981fe9a0 | ||
Erin | 43c36774a5 | ||
Erin | 5dd0e22c0d | ||
Erin | b161d46a5b | ||
Erin | 42488e1e4a | ||
Erin | b84ff70014 | ||
Erin | b8432d544c | ||
Erin | 207d8d7fa6 | ||
Erin | 569f154bcc | ||
Erin | 68094ce0ae | ||
Bee | 84dcbfc6bb | ||
Bee | d26c285ca7 | ||
Erin | d255967125 | ||
Erin | c5c8d23470 | ||
Erin | aca8045a98 | ||
Erin | 398687d8bf | ||
Erin | a7c4379976 | ||
Erin | 949dd3ba61 | ||
Erin | 3771180909 | ||
Erin | 6b3a132451 | ||
Erin | b45d235312 | ||
Erin | 9ee3e9cb5f | ||
Erin | 57f30109c8 | ||
Erin | d6243fa99f | ||
Erin | 3a6d0fdd2d | ||
Erin | 9b823ef660 | ||
Erin | 84aeac0b2a | ||
Erin | fc4118938e | ||
Erin | cb557d1361 | ||
Erin | 2715bc9107 | ||
Erin | 8182abca98 | ||
Erin | 83563fff84 | ||
Erin | b4923cfb95 | ||
Erin | eab47db4d6 | ||
Erin | 4b45407a70 | ||
Erin | a944a145ed | ||
Erin | 0e701e31b5 | ||
Erin | 0cb20d5727 | ||
Erin | 889aefe87a | ||
Erin | 59be906835 | ||
Erin | 441356b6f2 | ||
Erin | 2f8612c6d2 | ||
Erin | 3e4095da6f | ||
Erin | b1bdbea991 | ||
Erin | 8c8c708279 | ||
Erin | 35f90e94a8 | ||
able | e7aa306e5d | ||
Erin | 42be580425 | ||
Erin | 9b8a4a718e | ||
Erin | 0d2949024c | ||
Erin | 26105bab70 | ||
Erin | 006dcca309 | ||
Erin | 3034469e89 | ||
Erin | 30070818ae | ||
Erin | d282b3d111 | ||
Erin | 600528434b | ||
Erin | 0deeaf3a7e | ||
Erin | 3decd01619 | ||
Erin | a071a4a7ae | ||
Erin | af1a7d3bfa | ||
Erin | 3fdf936f77 | ||
Erin | 96b749060d | ||
Erin | 770c2ebcf0 | ||
Erin | 6609bd10c5 | ||
Erin | 97eaae1c76 | ||
Erin | 1460a7a230 | ||
Erin | 529fbdaed4 | ||
Erin | 3ac80a2e3d | ||
Erin | 06d66289bc | ||
Erin | 430ccd170d | ||
Erin | e2d3f46d3f | ||
Erin | eadf9e0a1f | ||
Erin | d74b32a38d | ||
Erin | 4530ff049e | ||
Erin | 2d2978eec7 | ||
Erin | bf50bcb203 | ||
Erin | 82f23ec2e2 | ||
Erin | 5264576274 | ||
Erin | cdee99598e | ||
Erin | f130a27685 | ||
Erin | aa186b35c2 | ||
Erin | 629fc969c2 | ||
Erin | 8287b1bdc1 | ||
Erin | 73b998911c | ||
Erin | 1a5f101719 | ||
Erin | a667c36d6c | ||
Erin | 582c716445 | ||
Erin | 37a1923f1e | ||
Erin | 10f9907c09 | ||
Erin | 2480a65947 | ||
Erin | 1ed153a9a2 | ||
Erin | 19df4538d7 | ||
Erin | e07bfb2301 | ||
Erin | cfe3203ef1 | ||
Erin | c4e062e742 | ||
Erin | 83436507df | ||
Erin | bdda987da9 | ||
Erin | 6588837769 | ||
Erin | f2ec9a3034 | ||
Erin | 95c979cb83 | ||
Erin | 66f634a70f | ||
Erin | 077da50787 | ||
Erin | 5055626968 | ||
Erin | 0f5d78bb27 | ||
Erin | 668b324cc8 | ||
Erin | 759514686a | ||
Erin | 92793dc93b | ||
Erin | ac149a5319 | ||
able | f4c55ae3cc | ||
Erin | a82686ec07 | ||
Erin | b3a6c42af3 | ||
Erin | d20447dd15 | ||
Erin | 193be0bd5a | ||
Erin | fce3fa5210 | ||
Erin | 4ca4e81ac3 | ||
Erin | dcfd51999d | ||
Erin | 1532c501a6 | ||
Erin | 8eebbacb91 | ||
Erin | c621a5c71d | ||
Erin | 8d5d22eae1 | ||
Erin | 3892a719eb | ||
Erin | 47323e140c | ||
Erin | 3833beb17d | ||
Erin | afbf6dd2e4 | ||
Erin | ec7053a289 | ||
Erin | 1a53c80a62 | ||
Erin | 6fe1fd91bf | ||
able | ac7fdc7688 | ||
able | 9cf8789e9a | ||
able | 3534b26946 | ||
Erin | a21f68ffa6 | ||
Erin | 7833334713 | ||
Erin | 141c5f524f | ||
Erin | 446225bcf6 | ||
d6ea5adf49 | |||
898738fb40 | |||
Erin | beb6e23d71 | ||
5afd081c2a | |||
Erin | 81cf5c4336 | ||
63cf7ac0b0 | |||
Erin | 3cb3ee1fee | ||
Erin | 4dfbe93919 | ||
Erin | 6791b6d48e | ||
Erin | 0ed89234a7 | ||
6afec2a031 | |||
IntoTheNight | b83d1838aa | ||
e25a89d56d | |||
b72f0afe84 | |||
Erin | 9196519fae | ||
6759fbd2ab | |||
Erin | 4f53fb1c87 | ||
Erin | ad96e83f09 | ||
Erin | 5ee8a91479 | ||
Erin | bde00c13f2 | ||
Erin | b4dac1245b | ||
Erin | e700010e7f | ||
Erin | 2d34ed61d0 | ||
Erin | 3919aa8100 | ||
Erin | a548a7b08e | ||
Erin | 132fc1a6ed | ||
able | c31c9e9a54 | ||
able | c26b559898 | ||
Erin | 907dd66d5e | ||
Erin | 2416526014 | ||
Erin | bb50c09538 | ||
Erin | a7cf5e4847 | ||
able | 87ec6ded54 | ||
able | 4a840a6ef0 | ||
able | 5ec6da9fb4 | ||
able | fdca041e6b | ||
Erin | 06b1184772 | ||
Erin | fb78e0a44a | ||
Erin | 7eaa01f53c | ||
Erin | 119ce4405f |
|
@ -1,2 +1,4 @@
|
|||
[alias]
|
||||
xtask = "r -p xtask --"
|
||||
wasm-build = "b --target wasm32-unknown-unknown --profile=small -Zbuild-std=core,alloc -Zbuild-std-features=optimize_for_size,panic_immediate_abort -p"
|
||||
wasm-build-debug = "b --target wasm32-unknown-unknown --profile=small-dev -Zbuild-std=core,alloc -Zbuild-std-features=optimize_for_size -p"
|
||||
|
|
12
.gitignore
vendored
12
.gitignore
vendored
|
@ -1 +1,13 @@
|
|||
# garbage
|
||||
/target
|
||||
rustc-ice-*
|
||||
|
||||
# sqlite
|
||||
db.sqlite
|
||||
db.sqlite-journal
|
||||
|
||||
# assets
|
||||
/depell/src/*.gz
|
||||
/depell/src/*.wasm
|
||||
#**/*-sv.rs
|
||||
/bytecode/src/instrs.rs
|
||||
|
|
1603
Cargo.lock
generated
1603
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
49
Cargo.toml
49
Cargo.toml
|
@ -1,3 +1,50 @@
|
|||
cargo-features = ["profile-rustflags"]
|
||||
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = ["hbasm", "hbbytecode", "hbvm", "hbvm_aos_on_linux", "hbxrt", "xtask"]
|
||||
members = [
|
||||
"bytecode",
|
||||
"vm",
|
||||
"xrt",
|
||||
"xtask",
|
||||
"lang",
|
||||
"depell",
|
||||
"depell/wasm-fmt",
|
||||
"depell/wasm-hbc",
|
||||
"depell/wasm-rt",
|
||||
]
|
||||
|
||||
[workspace.dependencies]
|
||||
hbbytecode = { path = "bytecode", default-features = false }
|
||||
hbvm = { path = "vm", default-features = false }
|
||||
hbxrt = { path = "xrt" }
|
||||
hblang = { path = "lang", default-features = false }
|
||||
hbjit = { path = "jit" }
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
#debug = true
|
||||
strip = true
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
||||
[profile.small]
|
||||
rustflags = ["-Zfmt-debug=none", "-Zlocation-detail=none"]
|
||||
inherits = "release"
|
||||
opt-level = "z"
|
||||
strip = "debuginfo"
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
||||
[profile.small-dev]
|
||||
inherits = "dev"
|
||||
opt-level = "z"
|
||||
strip = "debuginfo"
|
||||
panic = "abort"
|
||||
|
||||
[profile.fuzz]
|
||||
inherits = "dev"
|
||||
debug = true
|
||||
opt-level = 3
|
||||
panic = "abort"
|
||||
|
|
|
@ -3,5 +3,8 @@ name = "hbbytecode"
|
|||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
with_builtin_macros = "0.0.3"
|
||||
[features]
|
||||
default = ["disasm"]
|
||||
std = []
|
||||
disasm = ["std"]
|
||||
|
204
bytecode/build.rs
Normal file
204
bytecode/build.rs
Normal file
|
@ -0,0 +1,204 @@
|
|||
#![feature(iter_next_chunk)]
|
||||
|
||||
use std::{collections::HashSet, fmt::Write};
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
println!("cargo:rerun-if-changed=instructions.in");
|
||||
|
||||
let mut generated = String::new();
|
||||
gen_instrs(&mut generated)?;
|
||||
std::fs::write("src/instrs.rs", generated)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn gen_instrs(generated: &mut String) -> Result<(), Box<dyn std::error::Error>> {
|
||||
writeln!(generated, "#![expect(dead_code)]")?;
|
||||
writeln!(generated, "use crate::*;")?;
|
||||
|
||||
'_opcode_structs: {
|
||||
let mut seen = HashSet::new();
|
||||
for [.., args, _] in instructions() {
|
||||
if !seen.insert(args) {
|
||||
continue;
|
||||
}
|
||||
|
||||
writeln!(generated, "#[derive(Clone, Copy, Debug)]")?;
|
||||
writeln!(generated, "#[repr(packed)]")?;
|
||||
write!(generated, "pub struct Ops{args}(")?;
|
||||
let mut first = true;
|
||||
for ch in args.chars().filter(|&ch| ch != 'N') {
|
||||
if !std::mem::take(&mut first) {
|
||||
write!(generated, ",")?;
|
||||
}
|
||||
write!(generated, "pub Op{ch}")?;
|
||||
}
|
||||
writeln!(generated, ");")?;
|
||||
writeln!(generated, "unsafe impl BytecodeItem for Ops{args} {{}}")?;
|
||||
}
|
||||
}
|
||||
|
||||
'_max_size: {
|
||||
let max = instructions()
|
||||
.map(
|
||||
|[_, _, ty, _]| {
|
||||
if ty == "N" {
|
||||
1
|
||||
} else {
|
||||
iter_args(ty).map(arg_to_width).sum::<usize>() + 1
|
||||
}
|
||||
},
|
||||
)
|
||||
.max()
|
||||
.unwrap();
|
||||
|
||||
writeln!(generated, "pub const MAX_SIZE: usize = {max};")?;
|
||||
}
|
||||
|
||||
'_encoders: {
|
||||
for [op, name, ty, doc] in instructions() {
|
||||
writeln!(generated, "/// {}", doc.trim_matches('"'))?;
|
||||
let name = name.to_lowercase();
|
||||
let args = comma_sep(
|
||||
iter_args(ty)
|
||||
.enumerate()
|
||||
.map(|(i, c)| format!("{}{i}: {}", arg_to_name(c), arg_to_type(c))),
|
||||
);
|
||||
writeln!(generated, "pub fn {name}({args}) -> (usize, [u8; MAX_SIZE]) {{")?;
|
||||
let arg_names =
|
||||
comma_sep(iter_args(ty).enumerate().map(|(i, c)| format!("{}{i}", arg_to_name(c))));
|
||||
writeln!(generated, " unsafe {{ crate::encode({ty}({op}, {arg_names})) }}")?;
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
}
|
||||
|
||||
'_structs: {
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
for [_, _, ty, _] in instructions() {
|
||||
if !seen.insert(ty) {
|
||||
continue;
|
||||
}
|
||||
let types = comma_sep(iter_args(ty).map(arg_to_type).map(|s| s.to_string()));
|
||||
writeln!(generated, "#[repr(packed)] pub struct {ty}(u8, {types});")?;
|
||||
}
|
||||
}
|
||||
|
||||
'_name_list: {
|
||||
writeln!(generated, "pub const COUNT: u8 = {};", instructions().count())?;
|
||||
}
|
||||
|
||||
let instr = "Instr";
|
||||
let oper = "Oper";
|
||||
|
||||
'_instr_enum: {
|
||||
writeln!(generated, "#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[repr(u8)]")?;
|
||||
writeln!(generated, "pub enum {instr} {{")?;
|
||||
for [id, name, ..] in instructions() {
|
||||
writeln!(generated, " {name} = {id},")?;
|
||||
}
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
|
||||
'_arg_kind: {
|
||||
writeln!(generated, "#[derive(Debug, Clone, Copy, PartialEq, Eq)]")?;
|
||||
writeln!(generated, "pub enum {oper} {{")?;
|
||||
let mut seen = HashSet::new();
|
||||
for ty in instructions().flat_map(|[.., ty, _]| iter_args(ty)) {
|
||||
if !seen.insert(ty) {
|
||||
continue;
|
||||
}
|
||||
writeln!(generated, " {ty}({}),", arg_to_type(ty))?;
|
||||
}
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
|
||||
'_parse_opers: {
|
||||
writeln!(
|
||||
generated,
|
||||
"/// This assumes the instruction byte is still at the beginning of the buffer"
|
||||
)?;
|
||||
writeln!(generated, "#[cfg(feature = \"disasm\")]")?;
|
||||
writeln!(generated, "pub fn parse_args(bytes: &mut &[u8], kind: {instr}, buf: &mut alloc::vec::Vec<{oper}>) -> Option<()> {{")?;
|
||||
writeln!(generated, " match kind {{")?;
|
||||
let mut instrs = instructions().collect::<Vec<_>>();
|
||||
instrs.sort_unstable_by_key(|&[.., ty, _]| ty);
|
||||
for group in instrs.chunk_by(|[.., a, _], [.., b, _]| a == b) {
|
||||
let ty = group[0][2];
|
||||
for &[_, name, ..] in group {
|
||||
writeln!(generated, " | {instr}::{name}")?;
|
||||
}
|
||||
generated.pop();
|
||||
writeln!(generated, " => {{")?;
|
||||
if iter_args(ty).count() != 0 {
|
||||
writeln!(generated, " let data = crate::decode::<{ty}>(bytes)?;")?;
|
||||
writeln!(
|
||||
generated,
|
||||
" buf.extend([{}]);",
|
||||
comma_sep(
|
||||
iter_args(ty).zip(1u32..).map(|(t, i)| format!("{oper}::{t}(data.{i})"))
|
||||
)
|
||||
)?;
|
||||
} else {
|
||||
writeln!(generated, " crate::decode::<{ty}>(bytes)?;")?;
|
||||
}
|
||||
|
||||
writeln!(generated, " }}")?;
|
||||
}
|
||||
writeln!(generated, " }}")?;
|
||||
writeln!(generated, " Some(())")?;
|
||||
writeln!(generated, "}}")?;
|
||||
}
|
||||
|
||||
std::fs::write("src/instrs.rs", generated)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn comma_sep(items: impl Iterator<Item = String>) -> String {
|
||||
items.map(|item| item.to_string()).collect::<Vec<_>>().join(", ")
|
||||
}
|
||||
|
||||
fn instructions() -> impl Iterator<Item = [&'static str; 4]> {
|
||||
include_str!("instructions.in")
|
||||
.lines()
|
||||
.filter_map(|line| line.strip_suffix(';'))
|
||||
.map(|line| line.splitn(4, ',').map(str::trim).next_chunk().unwrap())
|
||||
}
|
||||
|
||||
fn arg_to_type(arg: char) -> &'static str {
|
||||
match arg {
|
||||
'R' | 'B' => "u8",
|
||||
'H' => "u16",
|
||||
'W' => "u32",
|
||||
'D' | 'A' => "u64",
|
||||
'P' => "i16",
|
||||
'O' => "i32",
|
||||
_ => panic!("unknown type: {}", arg),
|
||||
}
|
||||
}
|
||||
|
||||
fn arg_to_width(arg: char) -> usize {
|
||||
match arg {
|
||||
'R' | 'B' => 1,
|
||||
'H' => 2,
|
||||
'W' => 4,
|
||||
'D' | 'A' => 8,
|
||||
'P' => 2,
|
||||
'O' => 4,
|
||||
_ => panic!("unknown type: {}", arg),
|
||||
}
|
||||
}
|
||||
|
||||
fn arg_to_name(arg: char) -> &'static str {
|
||||
match arg {
|
||||
'R' => "reg",
|
||||
'B' | 'H' | 'W' | 'D' => "imm",
|
||||
'P' | 'O' => "offset",
|
||||
'A' => "addr",
|
||||
_ => panic!("unknown type: {}", arg),
|
||||
}
|
||||
}
|
||||
|
||||
fn iter_args(ty: &'static str) -> impl Iterator<Item = char> {
|
||||
ty.chars().filter(|c| *c != 'N')
|
||||
}
|
|
@ -47,7 +47,7 @@
|
|||
0x2E, ADDI16, RRH, "Addition with immediate (16b)" ;
|
||||
0x2F, ADDI32, RRW, "Addition with immediate (32b)" ;
|
||||
0x30, ADDI64, RRD, "Addition with immediate (64b)" ;
|
||||
0x31, MULI8, RRW, "Multiplication with immediate (8b)" ;
|
||||
0x31, MULI8, RRB, "Multiplication with immediate (8b)" ;
|
||||
0x32, MULI16, RRH, "Multiplication with immediate (16b)" ;
|
||||
0x33, MULI32, RRW, "Multiplication with immediate (32b)" ;
|
||||
0x34, MULI64, RRD, "Multiplication with immediate (64b)" ;
|
284
bytecode/src/lib.rs
Normal file
284
bytecode/src/lib.rs
Normal file
|
@ -0,0 +1,284 @@
|
|||
#![no_std]
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
extern crate alloc;
|
||||
|
||||
pub use crate::instrs::*;
|
||||
use core::convert::TryFrom;
|
||||
|
||||
mod instrs;
|
||||
|
||||
type OpR = u8;
|
||||
|
||||
type OpA = u64;
|
||||
type OpO = i32;
|
||||
type OpP = i16;
|
||||
|
||||
type OpB = u8;
|
||||
type OpH = u16;
|
||||
type OpW = u32;
|
||||
type OpD = u64;
|
||||
|
||||
/// # Safety
|
||||
/// Has to be valid to be decoded from bytecode.
|
||||
pub unsafe trait BytecodeItem {}
|
||||
unsafe impl BytecodeItem for u8 {}
|
||||
|
||||
impl TryFrom<u8> for Instr {
|
||||
type Error = u8;
|
||||
|
||||
#[inline]
|
||||
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||
#[cold]
|
||||
fn failed(value: u8) -> Result<Instr, u8> {
|
||||
Err(value)
|
||||
}
|
||||
|
||||
if value < COUNT {
|
||||
unsafe { Ok(core::mem::transmute::<u8, Instr>(value)) }
|
||||
} else {
|
||||
failed(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn encode<T>(instr: T) -> (usize, [u8; instrs::MAX_SIZE]) {
|
||||
let mut buf = [0; instrs::MAX_SIZE];
|
||||
core::ptr::write(buf.as_mut_ptr() as *mut T, instr);
|
||||
(core::mem::size_of::<T>(), buf)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg(feature = "disasm")]
|
||||
fn decode<T>(binary: &mut &[u8]) -> Option<T> {
|
||||
let (front, rest) = core::mem::take(binary).split_at_checked(core::mem::size_of::<T>())?;
|
||||
*binary = rest;
|
||||
unsafe { Some(core::ptr::read(front.as_ptr() as *const T)) }
|
||||
}
|
||||
|
||||
/// Rounding mode
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
#[repr(u8)]
|
||||
pub enum RoundingMode {
|
||||
NearestEven = 0,
|
||||
Truncate = 1,
|
||||
Up = 2,
|
||||
Down = 3,
|
||||
}
|
||||
|
||||
impl TryFrom<u8> for RoundingMode {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||
(value <= 3).then(|| unsafe { core::mem::transmute(value) }).ok_or(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum DisasmItem {
|
||||
Func,
|
||||
Global,
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
#[derive(Debug)]
|
||||
pub enum DisasmError<'a> {
|
||||
InvalidInstruction(u8),
|
||||
InstructionOutOfBounds(&'a str),
|
||||
FmtFailed(core::fmt::Error),
|
||||
HasOutOfBoundsJumps,
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
impl From<core::fmt::Error> for DisasmError<'_> {
|
||||
fn from(value: core::fmt::Error) -> Self {
|
||||
Self::FmtFailed(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
impl core::fmt::Display for DisasmError<'_> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
match *self {
|
||||
DisasmError::InvalidInstruction(b) => write!(f, "invalid instruction opcode: {b}"),
|
||||
DisasmError::InstructionOutOfBounds(name) => {
|
||||
write!(f, "instruction would go out of bounds of {name} symbol")
|
||||
}
|
||||
DisasmError::FmtFailed(error) => write!(f, "fmt failed: {error}"),
|
||||
DisasmError::HasOutOfBoundsJumps => write!(
|
||||
f,
|
||||
"the code contained jumps that dont got neither to a \
|
||||
valid symbol or local insturction"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
impl core::error::Error for DisasmError<'_> {}
|
||||
|
||||
#[cfg(feature = "disasm")]
|
||||
pub fn disasm<'a>(
|
||||
binary: &mut &[u8],
|
||||
functions: &alloc::collections::BTreeMap<u32, (&'a str, u32, DisasmItem)>,
|
||||
out: &mut alloc::string::String,
|
||||
mut eca_handler: impl FnMut(&mut &[u8]),
|
||||
) -> Result<(), DisasmError<'a>> {
|
||||
use {
|
||||
self::instrs::Instr,
|
||||
alloc::{
|
||||
collections::btree_map::{BTreeMap, Entry},
|
||||
vec::Vec,
|
||||
},
|
||||
core::{convert::TryInto, fmt::Write},
|
||||
};
|
||||
|
||||
fn instr_from_byte(b: u8) -> Result<Instr, DisasmError<'static>> {
|
||||
b.try_into().map_err(DisasmError::InvalidInstruction)
|
||||
}
|
||||
|
||||
let mut labels = BTreeMap::<u32, u32>::default();
|
||||
let mut buf = Vec::<instrs::Oper>::new();
|
||||
let mut has_oob = false;
|
||||
|
||||
'_offset_pass: for (&off, &(name, len, kind)) in functions.iter() {
|
||||
if matches!(kind, DisasmItem::Global) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let prev = *binary;
|
||||
|
||||
*binary = &binary[off as usize..];
|
||||
|
||||
let mut label_count = 0;
|
||||
while let Some(&byte) = binary.first() {
|
||||
let offset: i32 = (prev.len() - binary.len()).try_into().unwrap();
|
||||
if offset as u32 == off + len {
|
||||
break;
|
||||
}
|
||||
let Ok(inst) = instr_from_byte(byte) else { break };
|
||||
instrs::parse_args(binary, inst, &mut buf)
|
||||
.ok_or(DisasmError::InstructionOutOfBounds(name))?;
|
||||
|
||||
for op in buf.drain(..) {
|
||||
let rel = match op {
|
||||
instrs::Oper::O(rel) => rel,
|
||||
instrs::Oper::P(rel) => rel.into(),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let global_offset: u32 = (offset + rel).try_into().unwrap();
|
||||
if functions.get(&global_offset).is_some() {
|
||||
continue;
|
||||
}
|
||||
label_count += match labels.entry(global_offset) {
|
||||
Entry::Occupied(_) => 0,
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(label_count);
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(inst, Instr::ECA) {
|
||||
eca_handler(binary);
|
||||
}
|
||||
}
|
||||
|
||||
*binary = prev;
|
||||
}
|
||||
|
||||
let mut ordered = functions.iter().collect::<Vec<_>>();
|
||||
ordered.sort_unstable_by_key(|(_, (name, _, _))| name);
|
||||
|
||||
'_dump: for (&off, &(name, len, kind)) in ordered {
|
||||
if matches!(kind, DisasmItem::Global) {
|
||||
continue;
|
||||
}
|
||||
let prev = *binary;
|
||||
|
||||
writeln!(out, "{name}:")?;
|
||||
|
||||
*binary = &binary[off as usize..];
|
||||
while let Some(&byte) = binary.first() {
|
||||
let offset: i32 = (prev.len() - binary.len()).try_into().unwrap();
|
||||
if offset as u32 == off + len {
|
||||
break;
|
||||
}
|
||||
let Ok(inst) = instr_from_byte(byte) else {
|
||||
writeln!(out, "invalid instr {byte}")?;
|
||||
break;
|
||||
};
|
||||
instrs::parse_args(binary, inst, &mut buf).unwrap();
|
||||
|
||||
if let Some(label) = labels.get(&offset.try_into().unwrap()) {
|
||||
write!(out, "{:>2}: ", label)?;
|
||||
} else {
|
||||
write!(out, " ")?;
|
||||
}
|
||||
|
||||
write!(out, "{inst:<8?} ")?;
|
||||
|
||||
'a: for (i, op) in buf.drain(..).enumerate() {
|
||||
if i != 0 {
|
||||
write!(out, ", ")?;
|
||||
}
|
||||
|
||||
let rel = 'b: {
|
||||
match op {
|
||||
instrs::Oper::O(rel) => break 'b rel,
|
||||
instrs::Oper::P(rel) => break 'b rel.into(),
|
||||
instrs::Oper::R(r) => write!(out, "r{r}")?,
|
||||
instrs::Oper::B(b) => write!(out, "{b}b")?,
|
||||
instrs::Oper::H(h) => write!(out, "{h}h")?,
|
||||
instrs::Oper::W(w) => write!(out, "{w}w")?,
|
||||
instrs::Oper::D(d) if (d as i64) < 0 => write!(out, "{}d", d as i64)?,
|
||||
instrs::Oper::D(d) => write!(out, "{d}d")?,
|
||||
instrs::Oper::A(a) => write!(out, "{a}a")?,
|
||||
}
|
||||
|
||||
continue 'a;
|
||||
};
|
||||
|
||||
let global_offset: u32 = (offset + rel).try_into().unwrap();
|
||||
if let Some(&(name, ..)) = functions.get(&global_offset) {
|
||||
if name.contains('\0') {
|
||||
write!(out, ":{name:?}")?;
|
||||
} else {
|
||||
write!(out, ":{name}")?;
|
||||
}
|
||||
} else {
|
||||
let local_has_oob = global_offset < off
|
||||
|| global_offset > off + len
|
||||
|| prev
|
||||
.get(global_offset as usize)
|
||||
.map_or(true, |&b| instr_from_byte(b).is_err())
|
||||
|| prev[global_offset as usize] == 0;
|
||||
has_oob |= local_has_oob;
|
||||
let label = labels.get(&global_offset).unwrap();
|
||||
if local_has_oob {
|
||||
write!(out, "!!!!!!!!!{rel}")?;
|
||||
} else {
|
||||
write!(out, ":{label}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(out)?;
|
||||
|
||||
if matches!(inst, Instr::ECA) {
|
||||
eca_handler(binary);
|
||||
}
|
||||
}
|
||||
|
||||
*binary = prev;
|
||||
}
|
||||
|
||||
if has_oob {
|
||||
return Err(DisasmError::HasOutOfBoundsJumps);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
29
c-abi.md
29
c-abi.md
|
@ -1,29 +0,0 @@
|
|||
# C ABI (proposal)
|
||||
|
||||
## C datatypes
|
||||
| C Type | Description | Size (B) |
|
||||
|:------------|:-------------------------|-------------:|
|
||||
| char | Character / byte | 8 |
|
||||
| short | Short integer | 16 |
|
||||
| int | Integer | 32 |
|
||||
| long | Long integer | 64 |
|
||||
| long long | Long long integer | 64 |
|
||||
| T* | Pointer | 64 |
|
||||
| float | Single-precision float | 32 |
|
||||
| double | Double-precision float | 64 |
|
||||
| long double | Extended-precision float | **Bikeshed** |
|
||||
|
||||
## Registers
|
||||
| Register | ABI Name | Description | Saver |
|
||||
|:---------|:---------|:---------------|:-------|
|
||||
| `r0` | — | Zero register | N/A |
|
||||
| `r1` | `ra` | Return address | Caller |
|
||||
| `r2` | `sp` | Stack pointer | Callee |
|
||||
| `r3` | `tp` | Thread pointer | N/A |
|
||||
|
||||
**TODO:** Parameters
|
||||
|
||||
**TODO:** Saved
|
||||
|
||||
**TODO:** Temp
|
||||
|
23
depell/Cargo.toml
Normal file
23
depell/Cargo.toml
Normal file
|
@ -0,0 +1,23 @@
|
|||
[package]
|
||||
name = "depell"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
argon2 = "0.5.3"
|
||||
axum = "0.7.7"
|
||||
axum-server = { version = "0.7.1", optional = true, features = ["rustls", "tls-rustls"] }
|
||||
const_format = "0.2.33"
|
||||
getrandom = "0.2.15"
|
||||
hblang.workspace = true
|
||||
htmlm = "0.5.0"
|
||||
log = "0.4.22"
|
||||
rand_core = { version = "0.6.4", features = ["getrandom"] }
|
||||
rusqlite = { version = "0.32.1", features = ["bundled"] }
|
||||
serde = { version = "1.0.210", features = ["derive"] }
|
||||
time = "0.3.36"
|
||||
tokio = { version = "1.40.0", features = ["rt"] }
|
||||
|
||||
[features]
|
||||
#default = ["tls"]
|
||||
tls = ["dep:axum-server"]
|
14
depell/README.md
Normal file
14
depell/README.md
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Depell
|
||||
|
||||
Depell is a website that allows users to import/post/run hblang code and create huge dependency graphs. Its currently hosted at https://depell.mlokis.tech.
|
||||
|
||||
## Local Development
|
||||
|
||||
Prerequirements:
|
||||
- rust nigthly toolchain: install rust from [here](https://www.rust-lang.org/tools/install)
|
||||
|
||||
```bash
|
||||
rustup default nightly
|
||||
cargo xtask watch-depell-debug
|
||||
# browser http://localhost:8080
|
||||
```
|
1
depell/src/icons/download.svg
Normal file
1
depell/src/icons/download.svg
Normal file
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px" fill="#e8eaed"><path d="M480-320 280-520l56-58 104 104v-326h80v326l104-104 56 58-200 200ZM240-160q-33 0-56.5-23.5T160-240v-120h80v120h480v-120h80v120q0 33-23.5 56.5T720-160H240Z"/></svg>
|
After Width: | Height: | Size: 279 B |
1
depell/src/icons/run.svg
Normal file
1
depell/src/icons/run.svg
Normal file
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px" fill="#e8eaed"><path d="M320-200v-560l440 280-440 280Zm80-280Zm0 134 210-134-210-134v268Z"/></svg>
|
After Width: | Height: | Size: 190 B |
207
depell/src/index.css
Normal file
207
depell/src/index.css
Normal file
|
@ -0,0 +1,207 @@
|
|||
* {
|
||||
font-family: var(--font);
|
||||
}
|
||||
|
||||
body {
|
||||
--primary: light-dark(white, #181A1B);
|
||||
--secondary: light-dark(#EFEFEF, #212425);
|
||||
--timestamp: light-dark(#555555, #AAAAAA);
|
||||
--error: #ff3333;
|
||||
}
|
||||
|
||||
body {
|
||||
--small-gap: 5px;
|
||||
--font: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
|
||||
--monospace: 'Courier New', Courier, monospace;
|
||||
|
||||
nav {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
|
||||
section:last-child {
|
||||
display: flex;
|
||||
gap: var(--small-gap);
|
||||
}
|
||||
}
|
||||
|
||||
main {
|
||||
margin-top: var(--small-gap);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--small-gap);
|
||||
}
|
||||
}
|
||||
|
||||
div.preview {
|
||||
margin: var(--small-gap) 0px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--small-gap);
|
||||
|
||||
div.info {
|
||||
display: flex;
|
||||
gap: var(--small-gap);
|
||||
|
||||
span[apply=timestamp] {
|
||||
color: var(--timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
div.stat {
|
||||
display: flex;
|
||||
|
||||
svg {
|
||||
height: 18px;
|
||||
}
|
||||
}
|
||||
|
||||
div.code {
|
||||
position: relative;
|
||||
|
||||
nav {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
padding: var(--small-gap);
|
||||
|
||||
button {
|
||||
display: flex;
|
||||
padding: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
svg {
|
||||
fill: black;
|
||||
}
|
||||
|
||||
form {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--small-gap);
|
||||
|
||||
|
||||
.error {
|
||||
color: var(--error);
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
textarea {
|
||||
outline: none;
|
||||
border: none;
|
||||
background: var(--secondary);
|
||||
padding: var(--small-gap);
|
||||
padding-top: calc(var(--small-gap) * 1.5);
|
||||
font-family: var(--monospace);
|
||||
resize: none;
|
||||
tab-size: 4;
|
||||
}
|
||||
|
||||
pre {
|
||||
background: var(--secondary);
|
||||
padding: var(--small-gap);
|
||||
padding-top: calc(var(--small-gap) * 1.5);
|
||||
margin: 0px;
|
||||
font-family: var(--monospace);
|
||||
tab-size: 4;
|
||||
overflow-x: auto;
|
||||
white-space: pre-wrap;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
|
||||
input {
|
||||
font-size: inherit;
|
||||
outline: none;
|
||||
border: none;
|
||||
background: var(--secondary);
|
||||
padding: var(--small-gap);
|
||||
}
|
||||
|
||||
input:is(:hover, :focus) {
|
||||
background: var(--primary);
|
||||
}
|
||||
|
||||
button {
|
||||
border: none;
|
||||
outline: none;
|
||||
font-size: inherit;
|
||||
background: var(--secondary);
|
||||
}
|
||||
|
||||
button:hover:not(:active) {
|
||||
background: var(--primary);
|
||||
}
|
||||
|
||||
div#code-editor {
|
||||
display: flex;
|
||||
position: relative;
|
||||
|
||||
textarea {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
span#code-size {
|
||||
position: absolute;
|
||||
right: 2px;
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
div#dep-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: var(--small-gap);
|
||||
|
||||
section {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
text-align: center;
|
||||
gap: var(--small-gap);
|
||||
|
||||
div {
|
||||
text-align: left;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.syn {
|
||||
font-family: var(--monospace);
|
||||
|
||||
&.Comment {
|
||||
color: #939f91;
|
||||
}
|
||||
|
||||
&.Keyword {
|
||||
color: #f85552;
|
||||
}
|
||||
|
||||
&.Identifier,
|
||||
&.Directive {
|
||||
color: #3a94c5;
|
||||
}
|
||||
|
||||
/* &.Number {} */
|
||||
|
||||
&.String {
|
||||
color: #8da101;
|
||||
}
|
||||
|
||||
&.Op,
|
||||
&.Assign {
|
||||
color: #f57d26;
|
||||
}
|
||||
|
||||
&.Paren,
|
||||
&.Bracket,
|
||||
&.Comma,
|
||||
&.Dot,
|
||||
&.Ctor,
|
||||
&.Colon {
|
||||
color: light-dark(#5c6a72, #999999);
|
||||
}
|
||||
}
|
551
depell/src/index.js
Normal file
551
depell/src/index.js
Normal file
|
@ -0,0 +1,551 @@
|
|||
/// @ts-check
|
||||
|
||||
/** @return {never} */
|
||||
function never() { throw new Error() }
|
||||
|
||||
/**@type{WebAssembly.Instance}*/ let hbcInstance;
|
||||
/**@type{Promise<WebAssembly.WebAssemblyInstantiatedSource>}*/ let hbcInstaceFuture;
|
||||
async function getHbcInstance() {
|
||||
hbcInstaceFuture ??= WebAssembly.instantiateStreaming(fetch("/hbc.wasm"), {});
|
||||
return hbcInstance ??= (await hbcInstaceFuture).instance;
|
||||
}
|
||||
|
||||
const stack_pointer_offset = 1 << 20;
|
||||
|
||||
/** @param {WebAssembly.Instance} instance @param {Post[]} packages @param {number} fuel
|
||||
* @returns {string} */
|
||||
function compileCode(instance, packages, fuel = 100) {
|
||||
let {
|
||||
INPUT, INPUT_LEN,
|
||||
LOG_MESSAGES, LOG_MESSAGES_LEN,
|
||||
memory, compile_and_run,
|
||||
} = instance.exports;
|
||||
|
||||
if (!(true
|
||||
&& memory instanceof WebAssembly.Memory
|
||||
&& INPUT instanceof WebAssembly.Global
|
||||
&& INPUT_LEN instanceof WebAssembly.Global
|
||||
&& LOG_MESSAGES instanceof WebAssembly.Global
|
||||
&& LOG_MESSAGES_LEN instanceof WebAssembly.Global
|
||||
&& typeof compile_and_run === "function"
|
||||
)) never();
|
||||
|
||||
const codeLength = packPosts(packages, new DataView(memory.buffer, INPUT.value));
|
||||
new DataView(memory.buffer).setUint32(INPUT_LEN.value, codeLength, true);
|
||||
|
||||
runWasmFunction(instance, compile_and_run, fuel);
|
||||
return bufToString(memory, LOG_MESSAGES, LOG_MESSAGES_LEN).trim();
|
||||
}
|
||||
|
||||
/**@type{WebAssembly.Instance}*/ let fmtInstance;
|
||||
/**@type{Promise<WebAssembly.WebAssemblyInstantiatedSource>}*/ let fmtInstaceFuture;
|
||||
async function getFmtInstance() {
|
||||
fmtInstaceFuture ??= WebAssembly.instantiateStreaming(fetch("/hbfmt.wasm"), {});
|
||||
return fmtInstance ??= (await fmtInstaceFuture).instance;
|
||||
}
|
||||
|
||||
/** @param {WebAssembly.Instance} instance @param {string} code @param {"tok" | "fmt" | "minify"} action
|
||||
* @returns {string | Uint8Array | undefined} */
|
||||
function modifyCode(instance, code, action) {
|
||||
let {
|
||||
INPUT, INPUT_LEN,
|
||||
OUTPUT, OUTPUT_LEN,
|
||||
memory, fmt, tok, minify
|
||||
} = instance.exports;
|
||||
|
||||
let funs = { fmt, tok, minify };
|
||||
let fun = funs[action];
|
||||
if (!(true
|
||||
&& memory instanceof WebAssembly.Memory
|
||||
&& INPUT instanceof WebAssembly.Global
|
||||
&& INPUT_LEN instanceof WebAssembly.Global
|
||||
&& OUTPUT instanceof WebAssembly.Global
|
||||
&& OUTPUT_LEN instanceof WebAssembly.Global
|
||||
&& funs.hasOwnProperty(action)
|
||||
&& typeof fun === "function"
|
||||
)) never();
|
||||
|
||||
if (action !== "fmt") {
|
||||
INPUT = OUTPUT;
|
||||
INPUT_LEN = OUTPUT_LEN;
|
||||
}
|
||||
|
||||
let dw = new DataView(memory.buffer);
|
||||
dw.setUint32(INPUT_LEN.value, code.length, true);
|
||||
new Uint8Array(memory.buffer, INPUT.value).set(new TextEncoder().encode(code));
|
||||
|
||||
if (!runWasmFunction(instance, fun)) {
|
||||
return undefined;
|
||||
}
|
||||
if (action === "tok") {
|
||||
return bufSlice(memory, OUTPUT, OUTPUT_LEN);
|
||||
} else {
|
||||
return bufToString(memory, OUTPUT, OUTPUT_LEN);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** @param {WebAssembly.Instance} instance @param {CallableFunction} func @param {any[]} args
|
||||
* @returns {boolean} */
|
||||
function runWasmFunction(instance, func, ...args) {
|
||||
const { PANIC_MESSAGE, PANIC_MESSAGE_LEN, memory, stack_pointer } = instance.exports;
|
||||
if (!(true
|
||||
&& memory instanceof WebAssembly.Memory
|
||||
&& stack_pointer instanceof WebAssembly.Global
|
||||
)) never();
|
||||
const ptr = stack_pointer.value;
|
||||
try {
|
||||
func(...args);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error instanceof WebAssembly.RuntimeError
|
||||
&& error.message == "unreachable"
|
||||
&& PANIC_MESSAGE instanceof WebAssembly.Global
|
||||
&& PANIC_MESSAGE_LEN instanceof WebAssembly.Global) {
|
||||
console.error(bufToString(memory, PANIC_MESSAGE, PANIC_MESSAGE_LEN), error);
|
||||
} else {
|
||||
console.error(error);
|
||||
}
|
||||
stack_pointer.value = ptr;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/** @typedef {Object} Post
|
||||
* @property {string} path
|
||||
* @property {string} code */
|
||||
|
||||
/** @param {Post[]} posts @param {DataView} view @returns {number} */
|
||||
function packPosts(posts, view) {
|
||||
const enc = new TextEncoder(), buf = new Uint8Array(view.buffer, view.byteOffset);
|
||||
let len = 0; for (const post of posts) {
|
||||
view.setUint16(len, post.path.length, true); len += 2;
|
||||
buf.set(enc.encode(post.path), len); len += post.path.length;
|
||||
view.setUint16(len, post.code.length, true); len += 2;
|
||||
buf.set(enc.encode(post.code), len); len += post.code.length;
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
/** @param {WebAssembly.Memory} mem
|
||||
* @param {WebAssembly.Global} ptr
|
||||
* @param {WebAssembly.Global} len
|
||||
* @return {Uint8Array} */
|
||||
function bufSlice(mem, ptr, len) {
|
||||
return new Uint8Array(mem.buffer, ptr.value,
|
||||
new DataView(mem.buffer).getUint32(len.value, true));
|
||||
}
|
||||
|
||||
/** @param {WebAssembly.Memory} mem
|
||||
* @param {WebAssembly.Global} ptr
|
||||
* @param {WebAssembly.Global} len
|
||||
* @return {string} */
|
||||
function bufToString(mem, ptr, len) {
|
||||
const res = new TextDecoder()
|
||||
.decode(new Uint8Array(mem.buffer, ptr.value,
|
||||
new DataView(mem.buffer).getUint32(len.value, true)));
|
||||
new DataView(mem.buffer).setUint32(len.value, 0, true);
|
||||
return res;
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function wireUp(target) {
|
||||
execApply(target);
|
||||
cacheInputs(target);
|
||||
bindCodeEdit(target);
|
||||
bindTextareaAutoResize(target);
|
||||
}
|
||||
|
||||
const importRe = /@use\s*\(\s*"(([^"]|\\")+)"\s*\)/g;
|
||||
|
||||
/** @param {WebAssembly.Instance} fmt
|
||||
* @param {string} code
|
||||
* @param {string[]} roots
|
||||
* @param {Post[]} buf
|
||||
* @param {Set<string>} prevRoots
|
||||
* @returns {void} */
|
||||
function loadCachedPackages(fmt, code, roots, buf, prevRoots) {
|
||||
buf[0].code = code;
|
||||
|
||||
roots.length = 0;
|
||||
let changed = false;
|
||||
for (const match of code.matchAll(importRe)) {
|
||||
changed ||= !prevRoots.has(match[1]);
|
||||
roots.push(match[1]);
|
||||
}
|
||||
|
||||
if (!changed) return;
|
||||
buf.length = 1;
|
||||
prevRoots.clear();
|
||||
|
||||
for (let imp = roots.pop(); imp !== undefined; imp = roots.pop()) {
|
||||
if (prevRoots.has(imp)) continue; prevRoots.add(imp);
|
||||
|
||||
const fmtd = modifyCode(fmt, localStorage.getItem("package-" + imp) ?? never(), "fmt");
|
||||
if (typeof fmtd != "string") never();
|
||||
buf.push({ path: imp, code: fmtd });
|
||||
for (const match of buf[buf.length - 1].code.matchAll(importRe)) {
|
||||
roots.push(match[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**@type{Set<string>}*/ const prevRoots = new Set();
|
||||
/**@typedef {Object} PackageCtx
|
||||
* @property {AbortController} [cancelation]
|
||||
* @property {string[]} keyBuf
|
||||
* @property {Set<string>} prevParams
|
||||
* @property {HTMLTextAreaElement} [edit] */
|
||||
|
||||
/** @param {string} source @param {Set<string>} importDiff @param {HTMLPreElement} errors @param {PackageCtx} ctx */
|
||||
async function fetchPackages(source, importDiff, errors, ctx) {
|
||||
importDiff.clear();
|
||||
for (const match of source.matchAll(importRe)) {
|
||||
if (localStorage["package-" + match[1]]) continue;
|
||||
importDiff.add(match[1]);
|
||||
}
|
||||
|
||||
if (importDiff.size !== 0 && (ctx.prevParams.size != importDiff.size
|
||||
|| [...ctx.prevParams.keys()].every(e => importDiff.has(e)))) {
|
||||
if (ctx.cancelation) ctx.cancelation.abort();
|
||||
ctx.prevParams.clear();
|
||||
ctx.prevParams = new Set([...importDiff]);
|
||||
ctx.cancelation = new AbortController();
|
||||
|
||||
ctx.keyBuf.length = 0;
|
||||
ctx.keyBuf.push(...importDiff.keys());
|
||||
|
||||
errors.textContent = "fetching: " + ctx.keyBuf.join(", ");
|
||||
|
||||
await fetch(`/code`, {
|
||||
method: "POST",
|
||||
signal: ctx.cancelation.signal,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(ctx.keyBuf),
|
||||
}).then(async e => {
|
||||
try {
|
||||
const json = await e.json();
|
||||
if (e.status == 200) {
|
||||
for (const [key, value] of Object.entries(json)) {
|
||||
localStorage["package-" + key] = value;
|
||||
}
|
||||
const missing = ctx.keyBuf.filter(i => json[i] === undefined);
|
||||
if (missing.length !== 0) {
|
||||
errors.textContent = "deps not found: " + missing.join(", ");
|
||||
} else {
|
||||
ctx.cancelation = undefined;
|
||||
ctx.edit?.dispatchEvent(new InputEvent("input"));
|
||||
}
|
||||
}
|
||||
} catch (er) {
|
||||
errors.textContent = "completely failed to fetch ("
|
||||
+ e.status + "): " + ctx.keyBuf.join(", ");
|
||||
console.error(e, er);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
async function bindCodeEdit(target) {
|
||||
const edit = target.querySelector("#code-edit");
|
||||
if (!(edit instanceof HTMLTextAreaElement)) return;
|
||||
|
||||
const codeSize = target.querySelector("#code-size");
|
||||
const errors = target.querySelector("#compiler-output");
|
||||
if (!(true
|
||||
&& codeSize instanceof HTMLSpanElement
|
||||
&& errors instanceof HTMLPreElement
|
||||
)) never();
|
||||
|
||||
const MAX_CODE_SIZE = parseInt(codeSize.innerHTML);
|
||||
if (Number.isNaN(MAX_CODE_SIZE)) never();
|
||||
|
||||
const hbc = await getHbcInstance(), fmt = await getFmtInstance();
|
||||
let importDiff = new Set();
|
||||
/**@type{Post[]}*/
|
||||
const packages = [{ path: "local.hb", code: "" }];
|
||||
const debounce = 100;
|
||||
let timeout = 0;
|
||||
const ctx = { keyBuf: [], prevParams: new Set(), edit };
|
||||
|
||||
prevRoots.clear();
|
||||
|
||||
const onInput = () => {
|
||||
fetchPackages(edit.value, importDiff, errors, ctx);
|
||||
|
||||
if (ctx.cancelation && importDiff.size !== 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
loadCachedPackages(fmt, edit.value, ctx.keyBuf, packages, prevRoots);
|
||||
|
||||
errors.textContent = compileCode(hbc, packages);
|
||||
const minified_size = modifyCode(fmt, edit.value, "minify")?.length;
|
||||
if (minified_size) {
|
||||
codeSize.textContent = (MAX_CODE_SIZE - minified_size) + "";
|
||||
const perc = Math.min(100, Math.floor(100 * (minified_size / MAX_CODE_SIZE)));
|
||||
codeSize.style.color = `color-mix(in srgb, light-dark(black, white), var(--error) ${perc}%)`;
|
||||
}
|
||||
timeout = 0;
|
||||
};
|
||||
|
||||
edit.addEventListener("input", () => {
|
||||
if (timeout) clearTimeout(timeout);
|
||||
timeout = setTimeout(onInput, debounce)
|
||||
});
|
||||
edit.dispatchEvent(new InputEvent("input"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {Array<string>}
|
||||
* to be synched with `enum TokenGroup` in bytecode/src/fmt.rs */
|
||||
const TOK_CLASSES = [
|
||||
'Blank',
|
||||
'Comment',
|
||||
'Keyword',
|
||||
'Identifier',
|
||||
'Directive',
|
||||
'Number',
|
||||
'String',
|
||||
'Op',
|
||||
'Assign',
|
||||
'Paren',
|
||||
'Bracket',
|
||||
'Colon',
|
||||
'Comma',
|
||||
'Dot',
|
||||
'Ctor',
|
||||
];
|
||||
|
||||
/** @type {{ [key: string]: (el: HTMLElement) => void | Promise<void> }} */
|
||||
const applyFns = {
|
||||
timestamp: (el) => {
|
||||
const timestamp = el.innerText;
|
||||
const date = new Date(parseInt(timestamp) * 1000);
|
||||
el.innerText = date.toLocaleString();
|
||||
},
|
||||
fmt,
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {HTMLElement} target */
|
||||
async function fmt(target) {
|
||||
const code = target.innerText;
|
||||
const instance = await getFmtInstance();
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
const fmt = modifyCode(instance, code, 'fmt');
|
||||
if (typeof fmt !== "string") never()
|
||||
const codeBytes = new TextEncoder().encode(fmt);
|
||||
const tok = modifyCode(instance, fmt, 'tok');
|
||||
if (!(tok instanceof Uint8Array)) never();
|
||||
target.innerHTML = '';
|
||||
let start = 0;
|
||||
let kind = tok[0];
|
||||
for (let ii = 1; ii <= tok.length; ii += 1) {
|
||||
// split over same tokens and buffer end
|
||||
if (tok[ii] === kind && ii < tok.length) {
|
||||
continue;
|
||||
}
|
||||
const text = decoder.decode(codeBytes.subarray(start, ii));
|
||||
const textNode = document.createTextNode(text);;
|
||||
if (kind === 0) {
|
||||
target.appendChild(textNode);
|
||||
} else {
|
||||
const el = document.createElement('span');
|
||||
el.classList.add('syn');
|
||||
el.classList.add(TOK_CLASSES[kind]);
|
||||
el.appendChild(textNode);
|
||||
target.appendChild(el);
|
||||
}
|
||||
if (ii == tok.length) {
|
||||
break;
|
||||
}
|
||||
start = ii;
|
||||
kind = tok[ii];
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function execApply(target) {
|
||||
const proises = [];
|
||||
for (const elem of target.querySelectorAll('[apply]')) {
|
||||
if (!(elem instanceof HTMLElement)) continue;
|
||||
const funcname = elem.getAttribute('apply') ?? never();
|
||||
const vl = applyFns[funcname](elem);
|
||||
if (vl instanceof Promise) proises.push(vl);
|
||||
}
|
||||
if (target === document.body) {
|
||||
Promise.all(proises).then(() => document.body.hidden = false);
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function bindTextareaAutoResize(target) {
|
||||
for (const textarea of target.querySelectorAll("textarea")) {
|
||||
if (!(textarea instanceof HTMLTextAreaElement)) never();
|
||||
|
||||
const taCssMap = window.getComputedStyle(textarea);
|
||||
const padding = parseInt(taCssMap.getPropertyValue('padding-top') ?? "0")
|
||||
+ parseInt(taCssMap.getPropertyValue('padding-top') ?? "0");
|
||||
textarea.style.height = "auto";
|
||||
textarea.style.height = (textarea.scrollHeight - padding) + "px";
|
||||
textarea.style.overflowY = "hidden";
|
||||
textarea.addEventListener("input", function() {
|
||||
let top = window.scrollY;
|
||||
textarea.style.height = "auto";
|
||||
textarea.style.height = (textarea.scrollHeight - padding) + "px";
|
||||
window.scrollTo({ top });
|
||||
});
|
||||
|
||||
textarea.onkeydown = (ev) => {
|
||||
if (ev.key === "Tab") {
|
||||
ev.preventDefault();
|
||||
document.execCommand('insertText', false, "\t");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function cacheInputs(target) {
|
||||
/**@type {HTMLFormElement}*/ let form;
|
||||
for (form of target.querySelectorAll('form')) {
|
||||
const path = form.getAttribute('hx-post') || form.getAttribute('hx-delete');
|
||||
if (!path) {
|
||||
console.warn('form does not have a hx-post or hx-delete attribute', form);
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const input of form.elements) {
|
||||
if (input instanceof HTMLInputElement || input instanceof HTMLTextAreaElement) {
|
||||
if ('password submit button'.includes(input.type)) continue;
|
||||
const key = path + input.name;
|
||||
input.value = localStorage.getItem(key) ?? '';
|
||||
input.addEventListener("input", () => localStorage.setItem(key, input.value));
|
||||
} else {
|
||||
console.warn("unhandled form element: ", input);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @param {string} [path] */
|
||||
function updateTab(path) {
|
||||
for (const elem of document.querySelectorAll("button[hx-push-url]")) {
|
||||
if (elem instanceof HTMLButtonElement)
|
||||
elem.disabled = elem.getAttribute("hx-push-url") === (path ?? window.location.pathname);
|
||||
}
|
||||
}
|
||||
|
||||
if (window.location.hostname === 'localhost') {
|
||||
let id; setInterval(async () => {
|
||||
let new_id = await fetch('/hot-reload').then(reps => reps.text());
|
||||
id ??= new_id;
|
||||
if (id !== new_id) window.location.reload();
|
||||
}, 300);
|
||||
|
||||
(async function test() {
|
||||
{
|
||||
const code = "main:=fn():void{return}";
|
||||
const inst = await getFmtInstance()
|
||||
const fmtd = modifyCode(inst, code, "fmt") ?? never();
|
||||
if (typeof fmtd !== "string") never();
|
||||
const prev = modifyCode(inst, fmtd, "minify") ?? never();
|
||||
if (code != prev) console.error(code, prev);
|
||||
}
|
||||
{
|
||||
const posts = [{
|
||||
path: "foo.hb",
|
||||
code: "main:=fn():int{return 42}",
|
||||
}];
|
||||
const res = compileCode(await getHbcInstance(), posts, 1) ?? never();
|
||||
const expected = "exit code: 42";
|
||||
if (expected != res) console.error(expected, res);
|
||||
}
|
||||
})()
|
||||
}
|
||||
|
||||
document.body.addEventListener('htmx:afterSwap', (ev) => {
|
||||
if (!(ev.target instanceof HTMLElement)) never();
|
||||
wireUp(ev.target);
|
||||
if (ev.target.tagName == "MAIN" || ev.target.tagName == "BODY")
|
||||
updateTab(ev['detail'].pathInfo.finalRequestPath);
|
||||
});
|
||||
|
||||
getFmtInstance().then(inst => {
|
||||
document.body.addEventListener('htmx:configRequest', (ev) => {
|
||||
const details = ev['detail'];
|
||||
if (details.path === "/post" && details.verb === "post") {
|
||||
details.parameters['code'] = modifyCode(inst, details.parameters['code'], "minify");
|
||||
}
|
||||
});
|
||||
|
||||
/** @param {string} query @param {string} target @returns {number} */
|
||||
function fuzzyCost(query, target) {
|
||||
let qi = 0, bi = 0, cost = 0, matched = false;
|
||||
while (qi < query.length) {
|
||||
if (query.charAt(qi) === target.charAt(bi++)) {
|
||||
matched = true;
|
||||
qi++;
|
||||
} else {
|
||||
cost++;
|
||||
}
|
||||
if (bi === target.length) (bi = 0, qi++);
|
||||
}
|
||||
return cost + (matched ? 0 : 100 * target.length);
|
||||
}
|
||||
|
||||
let deps = undefined;
|
||||
/** @param {HTMLInputElement} input @returns {void} */
|
||||
function filterCodeDeps(input) {
|
||||
deps ??= document.getElementById("deps");
|
||||
if (!(deps instanceof HTMLElement)) never();
|
||||
if (input.value === "") {
|
||||
deps.textContent = "results show here...";
|
||||
return;
|
||||
}
|
||||
deps.innerHTML = "";
|
||||
for (const root of [...prevRoots.keys()]
|
||||
.sort((a, b) => fuzzyCost(input.value, a) - fuzzyCost(input.value, b))) {
|
||||
const pane = document.createElement("div");
|
||||
const code = modifyCode(inst, localStorage["package-" + root], "fmt");
|
||||
pane.innerHTML = `<div>${root}</div><pre>${code}</pre>`;
|
||||
deps.appendChild(pane);
|
||||
}
|
||||
if (deps.innerHTML === "") {
|
||||
deps.textContent = "no results";
|
||||
}
|
||||
}
|
||||
|
||||
Object.assign(window, { filterCodeDeps });
|
||||
});
|
||||
|
||||
/** @param {HTMLElement} target */
|
||||
function runPost(target) {
|
||||
while (!target.matches("div[class=preview]")) target = target.parentElement ?? never();
|
||||
const code = target.querySelector("pre[apply=fmt]");
|
||||
if (!(code instanceof HTMLPreElement)) never();
|
||||
const output = target.querySelector("pre[id=compiler-output]");
|
||||
if (!(output instanceof HTMLPreElement)) never();
|
||||
|
||||
Promise.all([getHbcInstance(), getFmtInstance()]).then(async ([hbc, fmt]) => {
|
||||
const ctx = { keyBuf: [], prevParams: new Set() };
|
||||
await fetchPackages(code.innerText ?? never(), new Set(), output, ctx);
|
||||
const posts = [{ path: "this", code: "" }];
|
||||
loadCachedPackages(fmt, code.innerText ?? never(), ctx.keyBuf, posts, new Set());
|
||||
output.textContent = compileCode(hbc, posts);
|
||||
output.hidden = false;
|
||||
});
|
||||
|
||||
let author = encodeURIComponent(target.dataset.author ?? never());
|
||||
let name = encodeURIComponent(target.dataset.name ?? never());
|
||||
fetch(`/post/run?author=${author}&name=${name}`, { method: "POST" })
|
||||
}
|
||||
|
||||
Object.assign(window, { runPost });
|
||||
|
||||
updateTab();
|
||||
wireUp(document.body);
|
||||
|
972
depell/src/main.rs
Normal file
972
depell/src/main.rs
Normal file
|
@ -0,0 +1,972 @@
|
|||
#![feature(iter_collect_into)]
|
||||
use {
|
||||
argon2::{password_hash::SaltString, PasswordVerifier},
|
||||
axum::{
|
||||
body::Bytes,
|
||||
extract::{DefaultBodyLimit, Path},
|
||||
http::{header::COOKIE, request::Parts, StatusCode},
|
||||
response::{AppendHeaders, Html},
|
||||
},
|
||||
const_format::formatcp,
|
||||
core::fmt,
|
||||
htmlm::{html, write_html},
|
||||
rand_core::OsRng,
|
||||
serde::{Deserialize, Serialize},
|
||||
std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::{Display, Write},
|
||||
net::Ipv4Addr,
|
||||
},
|
||||
};
|
||||
|
||||
const MAX_NAME_LENGTH: usize = 32;
|
||||
const MAX_POSTNAME_LENGTH: usize = 64;
|
||||
const MAX_CODE_LENGTH: usize = 1024 * 4;
|
||||
const SESSION_DURATION_SECS: u64 = 60 * 60;
|
||||
const MAX_FEED_SIZE: usize = 8 * 1024;
|
||||
|
||||
type Redirect<const COUNT: usize = 1> = AppendHeaders<[(&'static str, &'static str); COUNT]>;
|
||||
|
||||
macro_rules! static_asset {
|
||||
($mime:literal, $body:literal) => {
|
||||
get(|| async {
|
||||
axum::http::Response::builder()
|
||||
.header("content-type", $mime)
|
||||
.header("content-encoding", "gzip")
|
||||
.body(axum::body::Body::from(Bytes::from_static(include_bytes!(concat!(
|
||||
$body, ".gz"
|
||||
)))))
|
||||
.unwrap()
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
async fn amain() {
|
||||
use axum::routing::{delete, get, post};
|
||||
|
||||
let debug = cfg!(debug_assertions);
|
||||
|
||||
log::set_logger(&Logger).unwrap();
|
||||
log::set_max_level(if debug { log::LevelFilter::Warn } else { log::LevelFilter::Error });
|
||||
|
||||
db::init();
|
||||
|
||||
let router = axum::Router::new()
|
||||
.route("/", get(Index::page))
|
||||
.route("/index.css", static_asset!("text/css", "index.css"))
|
||||
.route("/index.js", static_asset!("text/javascript", "index.js"))
|
||||
.route("/hbfmt.wasm", static_asset!("application/wasm", "hbfmt.wasm"))
|
||||
.route("/hbc.wasm", static_asset!("application/wasm", "hbc.wasm"))
|
||||
.route("/index-view", get(Index::get))
|
||||
.route("/feed", get(Feed::page))
|
||||
.route("/feed-view", get(Feed::get))
|
||||
.route("/feed-more", post(Feed::more))
|
||||
.route("/profile", get(Profile::page))
|
||||
.route("/profile-view", get(Profile::get))
|
||||
.route("/profile/:name", get(Profile::get_other_page))
|
||||
.route("/profile/password", post(PasswordChange::post))
|
||||
.route("/profile-view/:name", get(Profile::get_other))
|
||||
.route("/post", get(Post::page))
|
||||
.route("/post-view", get(Post::get))
|
||||
.route("/post", post(Post::post))
|
||||
.route("/post/run", post(Post::run))
|
||||
.route("/code", post(fetch_code))
|
||||
.route("/login", get(Login::page))
|
||||
.route("/login-view", get(Login::get))
|
||||
.route("/login", post(Login::post))
|
||||
.route("/login", delete(Login::delete))
|
||||
.route("/signup", get(Signup::page))
|
||||
.route("/signup-view", get(Signup::get))
|
||||
.route("/signup", post(Signup::post))
|
||||
.route(
|
||||
"/hot-reload",
|
||||
get({
|
||||
let id = std::time::SystemTime::now()
|
||||
.duration_since(std::time::SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
move || async move { id.to_string() }
|
||||
}),
|
||||
)
|
||||
.layer(DefaultBodyLimit::max(16 * 1024));
|
||||
|
||||
#[cfg(feature = "tls")]
|
||||
{
|
||||
let addr =
|
||||
(Ipv4Addr::UNSPECIFIED, std::env::var("DEPELL_PORT").unwrap().parse::<u16>().unwrap());
|
||||
let config = axum_server::tls_rustls::RustlsConfig::from_pem_file(
|
||||
std::env::var("DEPELL_CERT_PATH").unwrap(),
|
||||
std::env::var("DEPELL_KEY_PATH").unwrap(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
axum_server::bind_rustls(addr.into(), config)
|
||||
.serve(router.into_make_service())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
#[cfg(not(feature = "tls"))]
|
||||
{
|
||||
let addr = (Ipv4Addr::UNSPECIFIED, 8080);
|
||||
let socket = tokio::net::TcpListener::bind(addr).await.unwrap();
|
||||
axum::serve(socket, router).await.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_code(
|
||||
axum::Json(paths): axum::Json<Vec<String>>,
|
||||
) -> axum::Json<HashMap<String, String>> {
|
||||
let mut deps = HashMap::<String, String>::new();
|
||||
db::with(|db| {
|
||||
for path in &paths {
|
||||
let Some((author, name)) = path.split_once('/') else { continue };
|
||||
db.fetch_deps
|
||||
.query_map((name, author), |r| {
|
||||
Ok((
|
||||
r.get::<_, String>(1)? + "/" + r.get_ref(0)?.as_str()?,
|
||||
r.get::<_, String>(2)?,
|
||||
))
|
||||
})
|
||||
.log("fetch deps query")
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|r| r.log("deps row"))
|
||||
.collect_into(&mut deps);
|
||||
}
|
||||
});
|
||||
axum::Json(deps)
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum Feed {
|
||||
Before { before_timestamp: u64 },
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Before {
|
||||
before_timestamp: u64,
|
||||
}
|
||||
|
||||
impl Feed {
|
||||
async fn more(session: Session, axum::Form(data): axum::Form<Before>) -> Html<String> {
|
||||
Self::Before { before_timestamp: data.before_timestamp }.render(&session)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Feed {
|
||||
fn default() -> Self {
|
||||
Self::Before { before_timestamp: now() + 3600 }
|
||||
}
|
||||
}
|
||||
|
||||
impl Page for Feed {
|
||||
fn render_to_buf(self, _: &Session, buf: &mut String) {
|
||||
db::with(|db| {
|
||||
let cursor = match self {
|
||||
Feed::Before { before_timestamp } => db
|
||||
.get_pots_before
|
||||
.query_map((before_timestamp,), Post::from_row)
|
||||
.log("fetch before posts query")
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|r| r.log("fetch before posts row")),
|
||||
};
|
||||
|
||||
let base_len = buf.len();
|
||||
let mut last_timestamp = None;
|
||||
for post in cursor {
|
||||
write!(buf, "{}", post).unwrap();
|
||||
if buf.len() - base_len > MAX_FEED_SIZE {
|
||||
last_timestamp = Some(post.timestamp);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
write_html!((*buf)
|
||||
if let Some(last_timestamp) = last_timestamp {
|
||||
<div "hx-post"="/feed-more"
|
||||
"hx-trigger"="intersect once"
|
||||
"hx-swap"="outerHTML"
|
||||
"hx-vals"={format_args!("{{\"before_timestamp\":{last_timestamp}}}")}
|
||||
>"there might be more"</div>
|
||||
} else {
|
||||
"no more stuff"
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Index;
|
||||
|
||||
impl PublicPage for Index {
|
||||
fn render_to_buf(self, buf: &mut String) {
|
||||
buf.push_str(include_str!("welcome-page.html"));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default)]
|
||||
struct Post {
|
||||
author: String,
|
||||
name: String,
|
||||
#[serde(skip)]
|
||||
timestamp: u64,
|
||||
#[serde(skip)]
|
||||
imports: usize,
|
||||
#[serde(skip)]
|
||||
runs: usize,
|
||||
#[serde(skip)]
|
||||
dependencies: usize,
|
||||
code: String,
|
||||
#[serde(skip)]
|
||||
error: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl Page for Post {
|
||||
fn render_to_buf(self, session: &Session, buf: &mut String) {
|
||||
let Self { name, code, error, .. } = self;
|
||||
write_html! { (buf)
|
||||
<form id="postForm" "hx-post"="/post" "hx-swap"="outerHTML">
|
||||
if let Some(e) = error { <div class="error">e</div> }
|
||||
<input name="author" type="text" value={session.name} hidden>
|
||||
<input name="name" type="text" placeholder="name" value=name
|
||||
required maxlength=MAX_POSTNAME_LENGTH>
|
||||
<div id="code-editor">
|
||||
<textarea id="code-edit" name="code" placeholder="code" rows=1
|
||||
required>code</textarea>
|
||||
<span id="code-size">MAX_CODE_LENGTH</span>
|
||||
</div>
|
||||
<input type="submit" value="submit">
|
||||
<pre id="compiler-output"></pre>
|
||||
</form>
|
||||
!{include_str!("post-page.html")}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Run {
|
||||
author: String,
|
||||
name: String,
|
||||
}
|
||||
|
||||
impl Post {
|
||||
pub fn from_row(r: &rusqlite::Row) -> rusqlite::Result<Self> {
|
||||
Ok(Post {
|
||||
author: r.get(0)?,
|
||||
name: r.get(1)?,
|
||||
timestamp: r.get(2)?,
|
||||
code: r.get(3)?,
|
||||
imports: r.get(4)?,
|
||||
runs: r.get(5)?,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
async fn run(
|
||||
session: Session,
|
||||
axum::extract::Query(run): axum::extract::Query<Run>,
|
||||
) -> StatusCode {
|
||||
match db::with(|qes| qes.creata_run.insert((run.name, run.author, session.name))) {
|
||||
Ok(_) => StatusCode::OK,
|
||||
Err(e) => {
|
||||
log::error!("creating run record failed: {e}");
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn post(
|
||||
session: Session,
|
||||
axum::Form(mut data): axum::Form<Self>,
|
||||
) -> Result<Redirect, Html<String>> {
|
||||
if data.name.len() > MAX_POSTNAME_LENGTH {
|
||||
data.error = Some(formatcp!("name too long, max length is {MAX_POSTNAME_LENGTH}"));
|
||||
return Err(data.render(&session));
|
||||
}
|
||||
|
||||
if data.code.len() > MAX_CODE_LENGTH {
|
||||
data.error = Some(formatcp!("code too long, max length is {MAX_CODE_LENGTH}"));
|
||||
return Err(data.render(&session));
|
||||
}
|
||||
|
||||
db::with(|db| {
|
||||
if let Err(e) = db.create_post.insert((&data.name, &session.name, now(), &data.code)) {
|
||||
if let rusqlite::Error::SqliteFailure(e, _) = e {
|
||||
if e.code == rusqlite::ErrorCode::ConstraintViolation {
|
||||
data.error = Some("this name is already used");
|
||||
}
|
||||
}
|
||||
data.error = data.error.or_else(|| {
|
||||
log::error!("create post error: {e}");
|
||||
Some("internal server error")
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
for (author, name) in hblang::lexer::Lexer::uses(&data.code)
|
||||
.filter_map(|v| v.split_once('/'))
|
||||
.collect::<HashSet<_>>()
|
||||
{
|
||||
if db
|
||||
.create_import
|
||||
.insert((author, name, &session.name, &data.name))
|
||||
.log("create import query")
|
||||
.is_none()
|
||||
{
|
||||
data.error = Some("internal server error");
|
||||
return;
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
if data.error.is_some() {
|
||||
Err(data.render(&session))
|
||||
} else {
|
||||
Ok(redirect("/profile"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Post {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let Self { author, name, timestamp, imports, runs, dependencies, code, .. } = self;
|
||||
write_html! { f <div class="preview" "data-author"=author "data-name"=name>
|
||||
<div class="info">
|
||||
<span>
|
||||
<a "hx-get"={format_args!("/profile-view/{author}")} href="" "hx-target"="main"
|
||||
"hx-push-url"={format_args!("/profile/{author}")}
|
||||
"hx-swam"="innerHTML">author</a>
|
||||
"/"
|
||||
name
|
||||
</span>
|
||||
<span apply="timestamp">timestamp</span>
|
||||
for (name, count) in [include_str!("icons/download.svg"), include_str!("icons/run.svg"), "deps"]
|
||||
.iter()
|
||||
.zip([imports, runs, dependencies])
|
||||
.filter(|(_, &c)| c != 0)
|
||||
{
|
||||
<div class="stat">!name count</div>
|
||||
}
|
||||
</div>
|
||||
<div class="code">
|
||||
<nav>
|
||||
<button onmousedown="runPost(this)">!{include_str!("icons/run.svg")}</button>
|
||||
</nav>
|
||||
<pre apply="fmt">code</pre>
|
||||
</div>
|
||||
<pre hidden id="compiler-output"></pre>
|
||||
if *timestamp == 0 {
|
||||
<button "hx-get"="/post" "hx-swap"="outerHTML"
|
||||
"hx-target"="[preview]">"edit"</button>
|
||||
}
|
||||
</div> }
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default)]
|
||||
struct PasswordChange {
|
||||
old_password: String,
|
||||
new_password: String,
|
||||
#[serde(skip)]
|
||||
error: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl PasswordChange {
|
||||
async fn post(
|
||||
session: Session,
|
||||
axum::Form(mut change): axum::Form<PasswordChange>,
|
||||
) -> Html<String> {
|
||||
db::with(|que| {
|
||||
match que.authenticate.query_row((&session.name,), |r| r.get::<_, String>(1)) {
|
||||
Ok(hash) if verify_password(&hash, &change.old_password).is_err() => {
|
||||
change.error = Some("invalid credentials");
|
||||
}
|
||||
Ok(_) => {
|
||||
let new_hashed = hash_password(&change.new_password);
|
||||
match que
|
||||
.change_passowrd
|
||||
.execute((new_hashed, &session.name))
|
||||
.log("execute update")
|
||||
{
|
||||
None => change.error = Some("intenal server error"),
|
||||
Some(0) => change.error = Some("password is incorrect"),
|
||||
Some(_) => {}
|
||||
}
|
||||
}
|
||||
Err(rusqlite::Error::QueryReturnedNoRows) => {
|
||||
change.error = Some("invalid credentials");
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("login queri failed: {e}");
|
||||
change.error = Some("internal server error");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if change.error.is_some() {
|
||||
change.render(&session)
|
||||
} else {
|
||||
PasswordChange::default().render(&session)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Page for PasswordChange {
|
||||
fn render_to_buf(self, _: &Session, buf: &mut String) {
|
||||
let Self { old_password, new_password, error } = self;
|
||||
write_html! { (buf)
|
||||
<form "hx-post"="/profile/password" "hx-swap"="outerHTML">
|
||||
if let Some(e) = error { <div class="error">e</div> }
|
||||
<input name="old_password" type="password" autocomplete="old-password"
|
||||
placeholder="old password" value=old_password>
|
||||
<input name="new_password" type="password" autocomplete="new-password" placeholder="new password"
|
||||
value=new_password>
|
||||
<input type="submit" value="submit">
|
||||
</form>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Profile {
|
||||
other: Option<String>,
|
||||
}
|
||||
|
||||
impl Profile {
|
||||
async fn get_other(session: Session, Path(name): Path<String>) -> Html<String> {
|
||||
Profile { other: Some(name) }.render(&session)
|
||||
}
|
||||
|
||||
async fn get_other_page(session: Session, Path(name): Path<String>) -> Html<String> {
|
||||
base(|b| Profile { other: Some(name) }.render_to_buf(&session, b), Some(&session))
|
||||
}
|
||||
}
|
||||
|
||||
impl Page for Profile {
|
||||
fn render_to_buf(self, session: &Session, buf: &mut String) {
|
||||
db::with(|db| {
|
||||
let name = self.other.as_ref().unwrap_or(&session.name);
|
||||
let iter = db
|
||||
.get_user_posts
|
||||
.query_map((name,), Post::from_row)
|
||||
.log("get user posts query")
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|p| p.log("user post row"));
|
||||
write_html! { (*buf)
|
||||
if name == &session.name {
|
||||
|b|{PasswordChange::default().render_to_buf(session, b)}
|
||||
}
|
||||
|
||||
for post in iter {
|
||||
!{post}
|
||||
} else {
|
||||
"no posts"
|
||||
}
|
||||
!{include_str!("profile-page.html")}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_password(password: &str) -> String {
|
||||
use argon2::PasswordHasher;
|
||||
argon2::Argon2::default()
|
||||
.hash_password(password.as_bytes(), &SaltString::generate(&mut OsRng))
|
||||
.unwrap()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn verify_password(hash: &str, password: &str) -> Result<(), argon2::password_hash::Error> {
|
||||
argon2::Argon2::default()
|
||||
.verify_password(password.as_bytes(), &argon2::PasswordHash::new(hash)?)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Debug)]
|
||||
struct Login {
|
||||
name: String,
|
||||
password: String,
|
||||
#[serde(skip)]
|
||||
error: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl PublicPage for Login {
|
||||
fn render_to_buf(self, buf: &mut String) {
|
||||
let Self { name, password, error } = self;
|
||||
write_html! { (buf)
|
||||
<form "hx-post"="/login" "hx-swap"="outerHTML">
|
||||
if let Some(e) = error { <div class="error">e</div> }
|
||||
<input name="name" type="text" autocomplete="name" placeholder="name" value=name
|
||||
required maxlength=MAX_NAME_LENGTH>
|
||||
<input name="password" type="password" autocomplete="current-password" placeholder="password"
|
||||
value=password>
|
||||
<input type="submit" value="submit">
|
||||
</form>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Login {
|
||||
async fn post(
|
||||
axum::Form(mut data): axum::Form<Self>,
|
||||
) -> Result<AppendHeaders<[(&'static str, String); 2]>, Html<String>> {
|
||||
// TODO: hash password
|
||||
let mut id = [0u8; 32];
|
||||
db::with(|db| match db.authenticate.query_row((&data.name,), |r| r.get::<_, String>(1)) {
|
||||
Ok(hash) => {
|
||||
if verify_password(&hash, &data.password).is_err() {
|
||||
data.error = Some("invalid credentials");
|
||||
} else {
|
||||
getrandom::getrandom(&mut id).unwrap();
|
||||
if db
|
||||
.login
|
||||
.insert((id, &data.name, now() + SESSION_DURATION_SECS))
|
||||
.log("create session query")
|
||||
.is_none()
|
||||
{
|
||||
data.error = Some("internal server error");
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(rusqlite::Error::QueryReturnedNoRows) => {
|
||||
data.error = Some("invalid credentials");
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("login queri failed: {e}");
|
||||
data.error = Some("internal server error");
|
||||
}
|
||||
});
|
||||
|
||||
if data.error.is_some() {
|
||||
Err(data.render())
|
||||
} else {
|
||||
Ok(AppendHeaders([
|
||||
("hx-location", "/feed".into()),
|
||||
(
|
||||
"set-cookie",
|
||||
format!(
|
||||
"id={}; SameSite=Strict; Secure; Max-Age={SESSION_DURATION_SECS}",
|
||||
to_hex(&id)
|
||||
),
|
||||
),
|
||||
]))
|
||||
}
|
||||
}
|
||||
|
||||
async fn delete(session: Session) -> Redirect {
|
||||
_ = db::with(|q| q.logout.execute((session.id,)).log("delete session query"));
|
||||
redirect("/login")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
struct Signup {
|
||||
name: String,
|
||||
new_password: String,
|
||||
confirm_password: String,
|
||||
#[serde(default)]
|
||||
confirm_no_password: bool,
|
||||
#[serde(skip)]
|
||||
error: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl PublicPage for Signup {
|
||||
fn render_to_buf(self, buf: &mut String) {
|
||||
let Signup { name, new_password, confirm_password, confirm_no_password, error } = self;
|
||||
let vals = if confirm_no_password { "{\"confirm_no_password\":true}" } else { "{}" };
|
||||
write_html! { (buf)
|
||||
<form "hx-post"="/signup" "hx-swap"="outerHTML" "hx-vals"=vals>
|
||||
if let Some(e) = error { <div class="error">e</div> }
|
||||
<input name="name" type="text" autocomplete="name" placeholder="name" value=name
|
||||
maxlength=MAX_NAME_LENGTH required>
|
||||
<input name="new_password" type="password" autocomplete="new-password" placeholder="new password"
|
||||
value=new_password>
|
||||
<input name="confirm_password" type="password" autocomplete="confirm-password"
|
||||
placeholder="confirm password" value=confirm_password>
|
||||
<input type="submit" value="submit">
|
||||
</form>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Signup {
|
||||
async fn post(axum::Form(mut data): axum::Form<Self>) -> Result<Redirect, Html<String>> {
|
||||
if data.name.len() > MAX_NAME_LENGTH {
|
||||
data.error = Some(formatcp!("name too long, max length is {MAX_NAME_LENGTH}"));
|
||||
return Err(data.render());
|
||||
}
|
||||
|
||||
if !data.confirm_no_password && data.new_password.is_empty() {
|
||||
data.confirm_no_password = true;
|
||||
data.error = Some("Are you sure you don't want to use a password? (then submit again)");
|
||||
return Err(data.render());
|
||||
}
|
||||
|
||||
db::with(|db| {
|
||||
// TODO: hash passwords
|
||||
match db.register.insert((&data.name, hash_password(&data.new_password))) {
|
||||
Ok(_) => {}
|
||||
Err(rusqlite::Error::SqliteFailure(e, _))
|
||||
if e.code == rusqlite::ErrorCode::ConstraintViolation =>
|
||||
{
|
||||
data.error = Some("username already taken");
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("create user query: {e}");
|
||||
data.error = Some("internal server error");
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
if data.error.is_some() {
|
||||
Err(data.render())
|
||||
} else {
|
||||
Ok(redirect("/login"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn base(body: impl FnOnce(&mut String), session: Option<&Session>) -> Html<String> {
|
||||
let username = session.map(|s| &s.name);
|
||||
|
||||
let nav_button = |f: &mut String, name: &str| {
|
||||
write_html! {(f)
|
||||
<button "hx-push-url"={format_args!("/{name}")}
|
||||
"hx-get"={format_args!("/{name}-view")}
|
||||
"hx-target"="main"
|
||||
"hx-swap"="innerHTML">name</button>
|
||||
}
|
||||
};
|
||||
|
||||
Html(html! {
|
||||
"<!DOCTYPE html>"
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta name="charset" content="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="description" content="code dependency hell socila media hblang">
|
||||
<link rel="stylesheet" href="/index.css">
|
||||
<title>"depell"</title>
|
||||
</head>
|
||||
<body hidden>
|
||||
<nav>
|
||||
<button "hx-push-url"="/" "hx-get"="/index-view" "hx-target"="main" "hx-swap"="innerHTML">"depell"</button>
|
||||
<section>
|
||||
if let Some(username) = username {
|
||||
<button "hx-push-url"={format_args!("/profile/{username}")} "hx-get"="/profile-view" "hx-target"="main"
|
||||
"hx-swap"="innerHTML">username</button>
|
||||
|f|{nav_button(f, "feed"); nav_button(f, "post")}
|
||||
<button "hx-delete"="/login">"logout"</button>
|
||||
} else {
|
||||
|f|{nav_button(f, "login"); nav_button(f, "signup")}
|
||||
}
|
||||
</section>
|
||||
</nav>
|
||||
<section id="post-form"></section>
|
||||
<main>|f|{body(f)}</main>
|
||||
</body>
|
||||
<script src="https://unpkg.com/htmx.org@2.0.3/dist/htmx.min.js" integrity="sha384-0895/pl2MU10Hqc6jd4RvrthNlDiE9U1tWmX7WRESftEDRosgxNsQG/Ze9YMRzHq" crossorigin="anonymous"></script>
|
||||
<script type="module" src="/index.js"></script>
|
||||
</html>
|
||||
})
|
||||
}
|
||||
|
||||
struct Session {
|
||||
name: String,
|
||||
id: [u8; 32],
|
||||
}
|
||||
|
||||
#[axum::async_trait]
|
||||
impl<S> axum::extract::FromRequestParts<S> for Session {
|
||||
/// If the extractor fails it'll use this "rejection" type. A rejection is
|
||||
/// a kind of error that can be converted into a response.
|
||||
type Rejection = Redirect;
|
||||
|
||||
/// Perform the extraction.
|
||||
async fn from_request_parts(parts: &mut Parts, _: &S) -> Result<Self, Self::Rejection> {
|
||||
let err = redirect("/login");
|
||||
|
||||
let value = parts
|
||||
.headers
|
||||
.get_all(COOKIE)
|
||||
.into_iter()
|
||||
.find_map(|c| c.to_str().ok()?.trim().strip_prefix("id="))
|
||||
.map(|c| c.split_once(';').unwrap_or((c, "")).0)
|
||||
.ok_or(err)?;
|
||||
let mut id = [0u8; 32];
|
||||
parse_hex(value, &mut id).ok_or(err)?;
|
||||
|
||||
let (name, expiration) = db::with(|db| {
|
||||
db.get_session
|
||||
.query_row((id,), |r| Ok((r.get::<_, String>(0)?, r.get::<_, u64>(1)?)))
|
||||
.log("fetching session")
|
||||
.ok_or(err)
|
||||
})?;
|
||||
|
||||
if expiration < now() {
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
Ok(Self { name, id })
|
||||
}
|
||||
}
|
||||
|
||||
fn now() -> u64 {
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs()
|
||||
}
|
||||
|
||||
fn parse_hex(hex: &str, dst: &mut [u8]) -> Option<()> {
|
||||
fn hex_to_nibble(b: u8) -> Option<u8> {
|
||||
Some(match b {
|
||||
b'a'..=b'f' => b - b'a' + 10,
|
||||
b'A'..=b'F' => b - b'A' + 10,
|
||||
b'0'..=b'9' => b - b'0',
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
|
||||
if hex.len() != dst.len() * 2 {
|
||||
return None;
|
||||
}
|
||||
|
||||
for (d, p) in dst.iter_mut().zip(hex.as_bytes().chunks_exact(2)) {
|
||||
*d = (hex_to_nibble(p[0])? << 4) | hex_to_nibble(p[1])?;
|
||||
}
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn to_hex(src: &[u8]) -> String {
|
||||
use std::fmt::Write;
|
||||
let mut buf = String::new();
|
||||
for &b in src {
|
||||
write!(buf, "{b:02x}").unwrap()
|
||||
}
|
||||
buf
|
||||
}
|
||||
|
||||
fn main() {
|
||||
tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(amain());
|
||||
}
|
||||
|
||||
mod db {
|
||||
use std::cell::RefCell;
|
||||
|
||||
macro_rules! gen_queries {
|
||||
($vis:vis struct $name:ident {
|
||||
$($qname:ident: $code:expr,)*
|
||||
}) => {
|
||||
$vis struct $name<'a> {
|
||||
$($vis $qname: rusqlite::Statement<'a>,)*
|
||||
}
|
||||
|
||||
impl<'a> $name<'a> {
|
||||
fn new(db: &'a rusqlite::Connection) -> Self {
|
||||
Self {
|
||||
$($qname: db.prepare($code).unwrap(),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
gen_queries! {
|
||||
pub struct Queries {
|
||||
register: "INSERT INTO user (name, password_hash) VALUES(?, ?)",
|
||||
change_passowrd: "UPDATE user SET password_hash = ? WHERE name = ?",
|
||||
authenticate: "SELECT name, password_hash FROM user WHERE name = ?",
|
||||
login: "INSERT OR REPLACE INTO session (id, username, expiration) VALUES(?, ?, ?)",
|
||||
logout: "DELETE FROM session WHERE id = ?",
|
||||
get_session: "SELECT username, expiration FROM session WHERE id = ?",
|
||||
get_user_posts: "SELECT author, name, timestamp, code, (
|
||||
WITH RECURSIVE roots(name, author, code) AS (
|
||||
SELECT name, author, code FROM post WHERE name = outher.name AND author = outher.author
|
||||
UNION
|
||||
SELECT post.name, post.author, post.code FROM
|
||||
post JOIN import ON post.name = import.from_name
|
||||
AND post.author = import.from_author
|
||||
JOIN roots ON import.to_name = roots.name
|
||||
AND import.to_author = roots.author
|
||||
) SELECT (count(*) - 1) FROM roots
|
||||
) AS imports, (
|
||||
WITH RECURSIVE roots(name, author, code) AS (
|
||||
SELECT name, author, code FROM post WHERE name = outher.name AND author = outher.author
|
||||
UNION
|
||||
SELECT post.name, post.author, post.code FROM post
|
||||
JOIN import ON post.name = import.from_name
|
||||
AND post.author = import.from_author
|
||||
JOIN roots ON import.to_name = roots.name
|
||||
AND import.to_author = roots.author
|
||||
) SELECT count(*) FROM roots
|
||||
JOIN run ON roots.name = run.code_name
|
||||
AND roots.author = run.code_author
|
||||
) AS runs FROM post as outher WHERE author = ? ORDER BY timestamp DESC",
|
||||
// TODO: we might want to cache the recursive queries
|
||||
get_pots_before: "SELECT author, name, timestamp, code, (
|
||||
WITH RECURSIVE roots(name, author, code) AS (
|
||||
SELECT name, author, code FROM post WHERE name = outher.name AND author = outher.author
|
||||
UNION
|
||||
SELECT post.name, post.author, post.code FROM
|
||||
post JOIN import ON post.name = import.from_name
|
||||
AND post.author = import.from_author
|
||||
JOIN roots ON import.to_name = roots.name
|
||||
AND import.to_author = roots.author
|
||||
) SELECT (count(*) - 1) FROM roots
|
||||
) AS imports, (
|
||||
WITH RECURSIVE roots(name, author, code) AS (
|
||||
SELECT name, author, code FROM post WHERE name = outher.name AND author = outher.author
|
||||
UNION
|
||||
SELECT post.name, post.author, post.code FROM post
|
||||
JOIN import ON post.name = import.from_name
|
||||
AND post.author = import.from_author
|
||||
JOIN roots ON import.to_name = roots.name
|
||||
AND import.to_author = roots.author
|
||||
) SELECT count(*) FROM roots
|
||||
JOIN run ON roots.name = run.code_name
|
||||
AND roots.author = run.code_author
|
||||
) as runs FROM post AS outher WHERE timestamp < ?",
|
||||
create_post: "INSERT INTO post (name, author, timestamp, code) VALUES(?, ?, ?, ?)",
|
||||
fetch_deps: "
|
||||
WITH RECURSIVE roots(name, author, code) AS (
|
||||
SELECT name, author, code FROM post WHERE name = ? AND author = ?
|
||||
UNION
|
||||
SELECT post.name, post.author, post.code FROM
|
||||
post JOIN import ON post.name = import.to_name
|
||||
AND post.author = import.to_author
|
||||
JOIN roots ON import.from_name = roots.name
|
||||
AND import.from_author = roots.author
|
||||
) SELECT * FROM roots;
|
||||
",
|
||||
create_import: "INSERT INTO import(to_author, to_name, from_author, from_name)
|
||||
VALUES(?, ?, ?, ?)",
|
||||
creata_run: "INSERT OR IGNORE INTO run(code_name, code_author, runner) VALUES(?, ?, ?)",
|
||||
}
|
||||
}
|
||||
|
||||
struct Db {
|
||||
queries: Queries<'static>,
|
||||
_db: Box<rusqlite::Connection>,
|
||||
}
|
||||
|
||||
impl Db {
|
||||
fn new() -> Self {
|
||||
let db = Box::new(rusqlite::Connection::open("db.sqlite").unwrap());
|
||||
Self {
|
||||
queries: Queries::new(unsafe {
|
||||
std::mem::transmute::<&rusqlite::Connection, &rusqlite::Connection>(&db)
|
||||
}),
|
||||
_db: db,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with<T>(with: impl FnOnce(&mut Queries) -> T) -> T {
|
||||
thread_local! { static DB_CONN: RefCell<Db> = RefCell::new(Db::new()); }
|
||||
DB_CONN.with_borrow_mut(|q| with(&mut q.queries))
|
||||
}
|
||||
|
||||
pub fn init() {
|
||||
const SCHEMA_VERSION: usize = 0;
|
||||
const MIGRATIONS: &[&str] = &[include_str!("migrations/1.sql")];
|
||||
|
||||
let db = rusqlite::Connection::open("db.sqlite").unwrap();
|
||||
db.execute_batch(include_str!("schema.sql")).unwrap();
|
||||
|
||||
let schema_version =
|
||||
db.pragma_query_value(None, "user_version", |v| v.get::<_, usize>(0)).unwrap();
|
||||
|
||||
if schema_version != SCHEMA_VERSION {
|
||||
for &mig in &MIGRATIONS[schema_version..] {
|
||||
db.execute_batch(mig).expect(mig);
|
||||
}
|
||||
db.pragma_update(None, "user_version", SCHEMA_VERSION).unwrap();
|
||||
}
|
||||
|
||||
Queries::new(&db);
|
||||
}
|
||||
}
|
||||
|
||||
fn redirect(to: &'static str) -> Redirect {
|
||||
AppendHeaders([("hx-location", to)])
|
||||
}
|
||||
|
||||
trait PublicPage: Default {
|
||||
fn render_to_buf(self, buf: &mut String);
|
||||
|
||||
fn render(self) -> Html<String> {
|
||||
let mut str = String::new();
|
||||
self.render_to_buf(&mut str);
|
||||
Html(str)
|
||||
}
|
||||
|
||||
async fn get() -> Html<String> {
|
||||
Self::default().render()
|
||||
}
|
||||
|
||||
async fn page(session: Option<Session>) -> Html<String> {
|
||||
base(|s| Self::default().render_to_buf(s), session.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
trait Page: Default {
|
||||
fn render_to_buf(self, session: &Session, buf: &mut String);
|
||||
|
||||
fn render(self, session: &Session) -> Html<String> {
|
||||
let mut str = String::new();
|
||||
self.render_to_buf(session, &mut str);
|
||||
Html(str)
|
||||
}
|
||||
|
||||
async fn get(session: Session) -> Html<String> {
|
||||
Self::default().render(&session)
|
||||
}
|
||||
|
||||
async fn page(session: Option<Session>) -> Result<Html<String>, axum::response::Redirect> {
|
||||
match session {
|
||||
Some(session) => {
|
||||
Ok(base(|f| Self::default().render_to_buf(&session, f), Some(&session)))
|
||||
}
|
||||
None => Err(axum::response::Redirect::permanent("/login")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait ResultExt<O, E> {
|
||||
fn log(self, prefix: impl Display) -> Option<O>;
|
||||
}
|
||||
|
||||
impl<O, E: Display> ResultExt<O, E> for Result<O, E> {
|
||||
fn log(self, prefix: impl Display) -> Option<O> {
|
||||
match self {
|
||||
Ok(v) => Some(v),
|
||||
Err(e) => {
|
||||
log::error!("{prefix}: {e}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Logger;
|
||||
|
||||
impl log::Log for Logger {
|
||||
fn enabled(&self, _: &log::Metadata) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn log(&self, record: &log::Record) {
|
||||
if self.enabled(record.metadata()) {
|
||||
eprintln!("{} - {}", record.module_path().unwrap_or("=="), record.args());
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&self) {}
|
||||
}
|
1
depell/src/migrations/1.sql
Normal file
1
depell/src/migrations/1.sql
Normal file
|
@ -0,0 +1 @@
|
|||
|
21
depell/src/post-page.html
Normal file
21
depell/src/post-page.html
Normal file
|
@ -0,0 +1,21 @@
|
|||
<div id="dep-list">
|
||||
<input placeholder="search impoted deps.." oninput="filterCodeDeps(this, event)">
|
||||
<section id="deps">
|
||||
results show here...
|
||||
</section>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
|
||||
<h3>About posting code</h3>
|
||||
<p>
|
||||
If you are unfammiliar with <a href="https://git.ablecorp.us/AbleOS/holey-bytes">hblang</a>, refer to the
|
||||
<strong>hblang/README.md</strong> or
|
||||
vizit <a href="/profile/mlokis">mlokis'es posts</a>. Preferably don't edit the code here.
|
||||
</p>
|
||||
|
||||
<h3>Extra textarea features</h3>
|
||||
<ul>
|
||||
<li>proper tab behaviour</li>
|
||||
<li>snap to previous tab boundary on "empty" lines</li>
|
||||
</ul>
|
55
depell/src/schema.sql
Normal file
55
depell/src/schema.sql
Normal file
|
@ -0,0 +1,55 @@
|
|||
PRAGMA foreign_keys = ON;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS user(
|
||||
name TEXT NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
PRIMARY KEY (name)
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS session(
|
||||
id BLOB NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
expiration INTEGER NOT NULL,
|
||||
FOREIGN KEY (username) REFERENCES user (name)
|
||||
PRIMARY KEY (username)
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS
|
||||
session_id ON session (id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS post(
|
||||
name TEXT NOT NULL,
|
||||
author TEXT,
|
||||
timestamp INTEGER,
|
||||
code TEXT NOT NULL,
|
||||
FOREIGN KEY (author) REFERENCES user(name) ON DELETE SET NULL,
|
||||
PRIMARY KEY (author, name)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS
|
||||
post_timestamp ON post(timestamp DESC);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS import(
|
||||
from_name TEXT NOT NULL,
|
||||
from_author TEXT,
|
||||
to_name TEXT NOT NULL,
|
||||
to_author TEXT,
|
||||
FOREIGN KEY (from_name, from_author) REFERENCES post(name, author),
|
||||
FOREIGN KEY (to_name, to_author) REFERENCES post(name, author)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS
|
||||
dependencies ON import(from_name, from_author);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS
|
||||
dependants ON import(to_name, to_author);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS run(
|
||||
code_name TEXT NOT NULL,
|
||||
code_author TEXT NOT NULL,
|
||||
runner TEXT NOT NULL,
|
||||
FOREIGN KEY (code_name, code_author) REFERENCES post(name, author),
|
||||
FOREIGN KEY (runner) REFERENCES user(name),
|
||||
PRIMARY KEY (code_name, code_author, runner)
|
||||
);
|
||||
|
17
depell/src/welcome-page.html
Normal file
17
depell/src/welcome-page.html
Normal file
|
@ -0,0 +1,17 @@
|
|||
<h1>Welcome to depell</h1>
|
||||
<p>
|
||||
Depell (dependency hell) is a simple "social" media site best compared to twitter, except that all you can post is
|
||||
<a href="https://git.ablecorp.us/AbleOS/holey-bytes">hblang</a> code with no comments allowed. Instead of likes you
|
||||
run the program, and instead of retweets you import the program as dependency. Run counts even when ran indirectly.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
The backend only serves the code and frontend compiles and runs it locally. All posts are immutable.
|
||||
</p>
|
||||
|
||||
<h2>Security?</h2>
|
||||
<p>
|
||||
All code runs in WASM (inside a holey-bytes VM until hblang compiles to wasm) and is controlled by JavaScript. WASM
|
||||
cant do any form of IO without going trough JavaScript so as long as JS import does not allow wasm to execute
|
||||
arbitrary JS code, WASM can act as a container inside the JS.
|
||||
</p>
|
11
depell/wasm-fmt/Cargo.toml
Normal file
11
depell/wasm-fmt/Cargo.toml
Normal file
|
@ -0,0 +1,11 @@
|
|||
[package]
|
||||
name = "wasm-hbfmt"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
hblang = { workspace = true, features = ["no_log"] }
|
||||
wasm-rt = { version = "0.1.0", path = "../wasm-rt" }
|
42
depell/wasm-fmt/src/lib.rs
Normal file
42
depell/wasm-fmt/src/lib.rs
Normal file
|
@ -0,0 +1,42 @@
|
|||
#![no_std]
|
||||
#![feature(str_from_raw_parts)]
|
||||
#![feature(alloc_error_handler)]
|
||||
|
||||
use hblang::{fmt, parser};
|
||||
|
||||
wasm_rt::decl_runtime!(128 * 1024, 1024 * 4);
|
||||
|
||||
const MAX_OUTPUT_SIZE: usize = 1024 * 10;
|
||||
wasm_rt::decl_buffer!(MAX_OUTPUT_SIZE, MAX_OUTPUT, OUTPUT, OUTPUT_LEN);
|
||||
|
||||
const MAX_INPUT_SIZE: usize = 1024 * 4;
|
||||
wasm_rt::decl_buffer!(MAX_INPUT_SIZE, MAX_INPUT, INPUT, INPUT_LEN);
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn fmt() {
|
||||
ALLOCATOR.reset();
|
||||
|
||||
let code = core::str::from_raw_parts(core::ptr::addr_of!(INPUT).cast(), INPUT_LEN);
|
||||
|
||||
let arena = parser::Arena::with_capacity(code.len() * parser::SOURCE_TO_AST_FACTOR);
|
||||
let mut ctx = parser::Ctx::default();
|
||||
let exprs = parser::Parser::parse(&mut ctx, code, "source.hb", &mut parser::no_loader, &arena);
|
||||
|
||||
let mut f = wasm_rt::Write(&mut OUTPUT[..]);
|
||||
fmt::fmt_file(exprs, code, &mut f).unwrap();
|
||||
OUTPUT_LEN = MAX_OUTPUT_SIZE - f.0.len();
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn tok() {
|
||||
let code = core::slice::from_raw_parts_mut(
|
||||
core::ptr::addr_of_mut!(OUTPUT).cast(), OUTPUT_LEN);
|
||||
OUTPUT_LEN = fmt::get_token_kinds(code);
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn minify() {
|
||||
let code = core::str::from_raw_parts_mut(
|
||||
core::ptr::addr_of_mut!(OUTPUT).cast(), OUTPUT_LEN);
|
||||
OUTPUT_LEN = fmt::minify(code);
|
||||
}
|
14
depell/wasm-hbc/Cargo.toml
Normal file
14
depell/wasm-hbc/Cargo.toml
Normal file
|
@ -0,0 +1,14 @@
|
|||
[package]
|
||||
name = "wasm-hbc"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
hblang = { workspace = true, features = [] }
|
||||
hbvm.workspace = true
|
||||
log = { version = "0.4.22", features = ["release_max_level_error"] }
|
||||
wasm-rt = { version = "0.1.0", path = "../wasm-rt", features = ["log"] }
|
||||
|
127
depell/wasm-hbc/src/lib.rs
Normal file
127
depell/wasm-hbc/src/lib.rs
Normal file
|
@ -0,0 +1,127 @@
|
|||
#![feature(alloc_error_handler)]
|
||||
#![feature(slice_take)]
|
||||
#![no_std]
|
||||
|
||||
use {
|
||||
alloc::{string::String, vec::Vec},
|
||||
core::ffi::CStr,
|
||||
hblang::{
|
||||
son::{hbvm::HbvmBackend, Codegen, CodegenCtx},
|
||||
ty::Module,
|
||||
Ent,
|
||||
},
|
||||
};
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
const ARENA_CAP: usize = 128 * 16 * 1024;
|
||||
wasm_rt::decl_runtime!(ARENA_CAP, 1024 * 4);
|
||||
|
||||
const MAX_INPUT_SIZE: usize = 32 * 4 * 1024;
|
||||
wasm_rt::decl_buffer!(MAX_INPUT_SIZE, MAX_INPUT, INPUT, INPUT_LEN);
|
||||
|
||||
#[no_mangle]
|
||||
unsafe fn compile_and_run(mut fuel: usize) {
|
||||
ALLOCATOR.reset();
|
||||
|
||||
_ = log::set_logger(&wasm_rt::Logger);
|
||||
log::set_max_level(log::LevelFilter::Error);
|
||||
|
||||
struct File<'a> {
|
||||
path: &'a str,
|
||||
code: &'a mut str,
|
||||
}
|
||||
|
||||
let mut root = 0;
|
||||
|
||||
let files = {
|
||||
let mut input_bytes =
|
||||
core::slice::from_raw_parts_mut(core::ptr::addr_of_mut!(INPUT).cast::<u8>(), INPUT_LEN);
|
||||
|
||||
let mut files = Vec::with_capacity(32);
|
||||
while let Some((&mut path_len, rest)) = input_bytes.split_first_chunk_mut() {
|
||||
let (path, rest) = rest.split_at_mut(u16::from_le_bytes(path_len) as usize);
|
||||
let (&mut code_len, rest) = rest.split_first_chunk_mut().unwrap();
|
||||
let (code, rest) = rest.split_at_mut(u16::from_le_bytes(code_len) as usize);
|
||||
files.push(File {
|
||||
path: core::str::from_utf8_unchecked(path),
|
||||
code: core::str::from_utf8_unchecked_mut(code),
|
||||
});
|
||||
input_bytes = rest;
|
||||
}
|
||||
|
||||
let root_path = files[root].path;
|
||||
hblang::quad_sort(&mut files, |a, b| a.path.cmp(b.path));
|
||||
root = files.binary_search_by_key(&root_path, |p| p.path).unwrap();
|
||||
|
||||
files
|
||||
};
|
||||
|
||||
let mut ctx = CodegenCtx::default();
|
||||
|
||||
let files = {
|
||||
let paths = files.iter().map(|f| f.path).collect::<Vec<_>>();
|
||||
let mut loader = |path: &str, _: &str, kind| match kind {
|
||||
hblang::parser::FileKind::Module => Ok(paths.binary_search(&path).unwrap()),
|
||||
hblang::parser::FileKind::Embed => Err("embeds are not supported".into()),
|
||||
};
|
||||
files
|
||||
.into_iter()
|
||||
.map(|f| {
|
||||
hblang::parser::Ast::new(
|
||||
f.path,
|
||||
// since 'free' does nothing this is fine
|
||||
String::from_raw_parts(f.code.as_mut_ptr(), f.code.len(), f.code.len()),
|
||||
&mut ctx.parser,
|
||||
&mut loader,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
let mut ct = {
|
||||
let mut backend = HbvmBackend::default();
|
||||
Codegen::new(&mut backend, &files, &mut ctx).generate(Module::new(root));
|
||||
|
||||
if !ctx.parser.errors.borrow().is_empty() {
|
||||
log::error!("{}", ctx.parser.errors.borrow());
|
||||
return;
|
||||
}
|
||||
|
||||
let mut c = Codegen::new(&mut backend, &files, &mut ctx);
|
||||
c.assemble_comptime()
|
||||
};
|
||||
|
||||
while fuel != 0 {
|
||||
match ct.vm.run() {
|
||||
Ok(hbvm::VmRunOk::End) => {
|
||||
log::error!("exit code: {}", ct.vm.read_reg(1).0 as i64);
|
||||
break;
|
||||
}
|
||||
Ok(hbvm::VmRunOk::Ecall) => {
|
||||
let kind = ct.vm.read_reg(2).0;
|
||||
match kind {
|
||||
0 => {
|
||||
let str = ct.vm.read_reg(3).0;
|
||||
let str = unsafe { CStr::from_ptr(str as _) };
|
||||
log::error!("{}", str.to_str().unwrap());
|
||||
}
|
||||
unknown => log::error!("unknown ecall: {unknown}"),
|
||||
}
|
||||
}
|
||||
Ok(hbvm::VmRunOk::Timer) => {
|
||||
fuel -= 1;
|
||||
if fuel == 0 {
|
||||
log::error!("program timed out");
|
||||
}
|
||||
}
|
||||
Ok(hbvm::VmRunOk::Breakpoint) => todo!(),
|
||||
Err(e) => {
|
||||
log::error!("vm error: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//log::error!("memory consumption: {}b / {}b", ALLOCATOR.used(), ARENA_CAP);
|
||||
}
|
7
depell/wasm-rt/Cargo.toml
Normal file
7
depell/wasm-rt/Cargo.toml
Normal file
|
@ -0,0 +1,7 @@
|
|||
[package]
|
||||
name = "wasm-rt"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
log = { version = "0.4.22", optional = true }
|
162
depell/wasm-rt/src/lib.rs
Normal file
162
depell/wasm-rt/src/lib.rs
Normal file
|
@ -0,0 +1,162 @@
|
|||
#![feature(alloc_error_handler)]
|
||||
#![feature(pointer_is_aligned_to)]
|
||||
#![feature(slice_take)]
|
||||
#![no_std]
|
||||
|
||||
use core::{
|
||||
alloc::{GlobalAlloc, Layout},
|
||||
cell::UnsafeCell,
|
||||
};
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! decl_buffer {
|
||||
($cap:expr, $export_cap:ident, $export_base:ident, $export_len:ident) => {
|
||||
#[no_mangle]
|
||||
static $export_cap: usize = $cap;
|
||||
#[no_mangle]
|
||||
static mut $export_base: [u8; $cap] = [0; $cap];
|
||||
#[no_mangle]
|
||||
static mut $export_len: usize = 0;
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! decl_runtime {
|
||||
($memory_size:expr, $max_panic_size:expr) => {
|
||||
#[cfg(debug_assertions)]
|
||||
#[no_mangle]
|
||||
static mut PANIC_MESSAGE: [u8; $max_panic_size] = [0; $max_panic_size];
|
||||
#[cfg(debug_assertions)]
|
||||
#[no_mangle]
|
||||
static mut PANIC_MESSAGE_LEN: usize = 0;
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[panic_handler]
|
||||
pub fn handle_panic(_info: &core::panic::PanicInfo) -> ! {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
unsafe {
|
||||
use core::fmt::Write;
|
||||
let mut f = $crate::Write(&mut PANIC_MESSAGE[..]);
|
||||
_ = writeln!(f, "{}", _info);
|
||||
PANIC_MESSAGE_LEN = $max_panic_size - f.0.len();
|
||||
}
|
||||
}
|
||||
|
||||
core::arch::wasm32::unreachable();
|
||||
}
|
||||
|
||||
#[global_allocator]
|
||||
static ALLOCATOR: $crate::ArenaAllocator<{ $memory_size }> = $crate::ArenaAllocator::new();
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[alloc_error_handler]
|
||||
fn alloc_error(_: core::alloc::Layout) -> ! {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
unsafe {
|
||||
use core::fmt::Write;
|
||||
let mut f = $crate::Write(&mut PANIC_MESSAGE[..]);
|
||||
_ = writeln!(f, "out of memory");
|
||||
PANIC_MESSAGE_LEN = $max_panic_size - f.0.len();
|
||||
}
|
||||
}
|
||||
|
||||
core::arch::wasm32::unreachable()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "log")]
|
||||
pub struct Logger;
|
||||
|
||||
#[cfg(feature = "log")]
|
||||
impl log::Log for Logger {
|
||||
fn enabled(&self, _: &log::Metadata) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn log(&self, record: &log::Record) {
|
||||
if self.enabled(record.metadata()) {
|
||||
const MAX_LOG_MESSAGE: usize = 1024 * 8;
|
||||
#[no_mangle]
|
||||
static mut LOG_MESSAGES: [u8; MAX_LOG_MESSAGE] = [0; MAX_LOG_MESSAGE];
|
||||
#[no_mangle]
|
||||
static mut LOG_MESSAGES_LEN: usize = 0;
|
||||
|
||||
unsafe {
|
||||
use core::fmt::Write;
|
||||
let mut f = Write(&mut LOG_MESSAGES[LOG_MESSAGES_LEN..]);
|
||||
_ = writeln!(f, "{}", record.args());
|
||||
LOG_MESSAGES_LEN = MAX_LOG_MESSAGE - f.0.len();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&self) {}
|
||||
}
|
||||
|
||||
pub struct ArenaAllocator<const SIZE: usize> {
|
||||
arena: UnsafeCell<[u8; SIZE]>,
|
||||
head: UnsafeCell<*mut u8>,
|
||||
}
|
||||
|
||||
impl<const SIZE: usize> ArenaAllocator<SIZE> {
|
||||
#[expect(clippy::new_without_default)]
|
||||
pub const fn new() -> Self {
|
||||
ArenaAllocator {
|
||||
arena: UnsafeCell::new([0; SIZE]),
|
||||
head: UnsafeCell::new(core::ptr::null_mut()),
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(clippy::missing_safety_doc)]
|
||||
pub unsafe fn reset(&self) {
|
||||
(*self.head.get()) = self.arena.get().cast::<u8>().add(SIZE);
|
||||
}
|
||||
|
||||
pub fn used(&self) -> usize {
|
||||
unsafe { self.arena.get() as usize + SIZE - (*self.head.get()) as usize }
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl<const SIZE: usize> Sync for ArenaAllocator<SIZE> {}
|
||||
|
||||
unsafe impl<const SIZE: usize> GlobalAlloc for ArenaAllocator<SIZE> {
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
let size = layout.size();
|
||||
let align = layout.align();
|
||||
|
||||
let until = self.arena.get() as *mut u8;
|
||||
|
||||
let new_head = (*self.head.get()).sub(size);
|
||||
let aligned_head = (new_head as usize & !(align - 1)) as *mut u8;
|
||||
debug_assert!(aligned_head.is_aligned_to(align));
|
||||
|
||||
if until > aligned_head {
|
||||
return core::ptr::null_mut();
|
||||
}
|
||||
|
||||
*self.head.get() = aligned_head;
|
||||
aligned_head
|
||||
}
|
||||
|
||||
unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
|
||||
/* lol */
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Write<'a>(pub &'a mut [u8]);
|
||||
|
||||
impl core::fmt::Write for Write<'_> {
|
||||
fn write_str(&mut self, s: &str) -> core::fmt::Result {
|
||||
if let Some(m) = self.0.take_mut(..s.len()) {
|
||||
m.copy_from_slice(s.as_bytes());
|
||||
Ok(())
|
||||
} else {
|
||||
Err(core::fmt::Error)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
[package]
|
||||
name = "hbasm"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
paste = "1.0"
|
||||
rhai = "1.16"
|
||||
with_builtin_macros = "0.0.3"
|
|
@ -1,13 +0,0 @@
|
|||
import "hbasm/examples/ableos/std" as std;
|
||||
|
||||
fn main(){
|
||||
std::Error(":+)");
|
||||
std::Warn("Your mom fell in a well!");
|
||||
std::Info("Hello, world!");
|
||||
std::Debug("ABC");
|
||||
std::Trace("Trace Deez");
|
||||
|
||||
tx();
|
||||
}
|
||||
|
||||
main();
|
|
@ -1,24 +0,0 @@
|
|||
fn ipc_send(buffer_id, mem_addr, length){
|
||||
// set the ecall
|
||||
li8(r1, 3);
|
||||
// Set the buffer ID to be the BufferID
|
||||
li64(r2, buffer_id);
|
||||
lra(r3, r0, mem_addr);
|
||||
// set the length
|
||||
li64(r4, length);
|
||||
// ecall
|
||||
eca();
|
||||
}
|
||||
|
||||
private fn log(log_level, string){
|
||||
let str = data::str(string);
|
||||
ipc_send(1, str, str.len);
|
||||
}
|
||||
|
||||
fn Error(string) {log(0, string);}
|
||||
fn Warn(string) {log(1, string);}
|
||||
fn Info(string) {log(2, string);}
|
||||
// Due to rhai limitations this cannot be debug
|
||||
// because of this all of the log levels are upper case
|
||||
fn Debug(string) {log(3, string);}
|
||||
fn Trace(string) {log(4, string);}
|
|
@ -1,9 +0,0 @@
|
|||
let hello = data::str("Hello, world!");
|
||||
|
||||
li8 (r1, 1); // Write syscall
|
||||
li8 (r2, 1); // Stdout FD
|
||||
lra16 (r3, r0, hello); // String buffer
|
||||
li8 (r4, hello.len); // String length
|
||||
eca (); // System call
|
||||
|
||||
tx (); // End program
|
|
@ -1,33 +0,0 @@
|
|||
li8(r1, 69);
|
||||
li8(r2, 0);
|
||||
|
||||
if_eq(r1, r2,
|
||||
|| puts("Equals!"),
|
||||
|| puts("Not equals!"),
|
||||
);
|
||||
|
||||
|
||||
tx(); // END OF MAIN
|
||||
|
||||
/// Inline function – write text to stdout
|
||||
fn puts(string) {
|
||||
let d = data::str(string);
|
||||
li8 (r1, 1); // Write syscall
|
||||
li8 (r2, 1); // Stdout handle
|
||||
lra16 (r3, r0, d);
|
||||
li64 (r4, d.len);
|
||||
eca ();
|
||||
}
|
||||
|
||||
fn if_eq(a, b, thenblk, elseblk) {
|
||||
let elselbl = declabel();
|
||||
let endlbl = declabel();
|
||||
|
||||
jne(a, b, elselbl);
|
||||
thenblk.call();
|
||||
jmp16(endlbl);
|
||||
|
||||
elselbl.here();
|
||||
elseblk.call();
|
||||
endlbl.here();
|
||||
}
|
|
@ -1,85 +0,0 @@
|
|||
use rhai::{CustomType, Engine, ImmutableString};
|
||||
|
||||
use {
|
||||
crate::{object::SymbolRef, SharedObject},
|
||||
rhai::Module,
|
||||
};
|
||||
|
||||
macro_rules! gen_data_insertions {
|
||||
($module:expr, $obj:expr, [$($ty:ident),* $(,)?] $(,)?) => {{
|
||||
let (module, obj) = ($module, $obj);
|
||||
$({
|
||||
let obj = ::std::rc::Rc::clone(obj);
|
||||
let hash = module.set_native_fn(stringify!($ty), move |arr: ::rhai::Array| {
|
||||
let obj = &mut *obj.borrow_mut();
|
||||
let symbol = obj.symbol($crate::object::Section::Data);
|
||||
|
||||
obj.sections
|
||||
.data
|
||||
.reserve(arr.len() * ::std::mem::size_of::<$ty>());
|
||||
|
||||
for item in arr {
|
||||
obj.sections.data.extend(
|
||||
match item.as_int() {
|
||||
Ok(num) => $ty::try_from(num).map_err(|_| "i64".to_owned()),
|
||||
Err(ty) => Err(ty.to_owned()),
|
||||
}
|
||||
.map_err(|err| {
|
||||
::rhai::EvalAltResult::ErrorMismatchDataType(
|
||||
stringify!($ty).to_owned(),
|
||||
err,
|
||||
::rhai::Position::NONE,
|
||||
)
|
||||
})?
|
||||
.to_le_bytes(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(DataRef {
|
||||
symbol,
|
||||
len: obj.sections.data.len() - symbol.0,
|
||||
})
|
||||
});
|
||||
|
||||
module.update_fn_namespace(hash, ::rhai::FnNamespace::Global);
|
||||
})*
|
||||
}};
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DataRef {
|
||||
pub symbol: SymbolRef,
|
||||
pub len: usize,
|
||||
}
|
||||
|
||||
impl CustomType for DataRef {
|
||||
fn build(mut builder: rhai::TypeBuilder<Self>) {
|
||||
builder
|
||||
.with_name("DataRef")
|
||||
.with_get("symbol", |this: &mut Self| this.symbol)
|
||||
.with_get("len", |this: &mut Self| this.len as u64 as i64);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn module(engine: &mut Engine, obj: SharedObject) -> Module {
|
||||
let mut module = Module::new();
|
||||
gen_data_insertions!(&mut module, &obj, [i8, i16, i32, i64]);
|
||||
|
||||
{
|
||||
let hash = module.set_native_fn("str", move |s: ImmutableString| {
|
||||
let obj = &mut *obj.borrow_mut();
|
||||
let symbol = obj.symbol(crate::object::Section::Data);
|
||||
|
||||
obj.sections.data.extend(s.as_bytes());
|
||||
Ok(DataRef {
|
||||
symbol,
|
||||
len: s.len(),
|
||||
})
|
||||
});
|
||||
|
||||
module.update_fn_namespace(hash, rhai::FnNamespace::Global);
|
||||
}
|
||||
|
||||
engine.build_type::<DataRef>();
|
||||
module
|
||||
}
|
226
hbasm/src/ins.rs
226
hbasm/src/ins.rs
|
@ -1,226 +0,0 @@
|
|||
use {
|
||||
crate::object::Object,
|
||||
rhai::{FnNamespace, Module},
|
||||
std::{cell::RefCell, rc::Rc},
|
||||
};
|
||||
|
||||
mod optypes {
|
||||
use {
|
||||
crate::{
|
||||
label::UnboundLabel,
|
||||
object::{Object, RelocKey, RelocType, SymbolRef},
|
||||
},
|
||||
rhai::{Dynamic, EvalAltResult, ImmutableString, Position},
|
||||
};
|
||||
|
||||
pub type R = u8;
|
||||
pub type B = i8;
|
||||
pub type H = i16;
|
||||
pub type W = i32;
|
||||
pub type D = i64;
|
||||
|
||||
pub type A = Dynamic;
|
||||
pub type O = Dynamic;
|
||||
pub type P = Dynamic;
|
||||
|
||||
pub fn insert_reloc(
|
||||
obj: &mut Object,
|
||||
ty: RelocType,
|
||||
val: &Dynamic,
|
||||
) -> Result<(), EvalAltResult> {
|
||||
match () {
|
||||
_ if val.is::<SymbolRef>() => {
|
||||
obj.relocation(RelocKey::Symbol(val.clone_cast::<SymbolRef>().0), ty)
|
||||
}
|
||||
_ if val.is::<UnboundLabel>() => {
|
||||
obj.relocation(RelocKey::Symbol(val.clone_cast::<UnboundLabel>().0), ty)
|
||||
}
|
||||
_ if val.is::<DataRef>() => {
|
||||
obj.relocation(RelocKey::Symbol(val.clone_cast::<DataRef>().symbol.0), ty)
|
||||
}
|
||||
_ if val.is_string() => {
|
||||
obj.relocation(RelocKey::Label(val.clone_cast::<ImmutableString>()), ty)
|
||||
}
|
||||
_ if val.is_int() => {
|
||||
let int = val.clone_cast::<i64>();
|
||||
match ty {
|
||||
RelocType::Rel32 => obj.sections.text.extend((int as i32).to_le_bytes()),
|
||||
RelocType::Rel16 => obj.sections.text.extend((int as i16).to_le_bytes()),
|
||||
RelocType::Abs64 => obj.sections.text.extend(int.to_le_bytes()),
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(EvalAltResult::ErrorMismatchDataType(
|
||||
"SybolRef, UnboundLabel, String or Int".to_owned(),
|
||||
val.type_name().to_owned(),
|
||||
Position::NONE,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
macro_rules! gen_insert {
|
||||
(le_bytes: [$($lety:ident),* $(,)?]) => {
|
||||
macro_rules! insert {
|
||||
$(($thing:expr, $obj: expr, $lety) => {
|
||||
$obj.sections.text.extend($thing.to_le_bytes());
|
||||
};)*
|
||||
|
||||
($thing:expr, $obj:expr, A) => {
|
||||
$crate::ins::optypes::insert_reloc(
|
||||
$obj,
|
||||
$crate::object::RelocType::Abs64,
|
||||
$thing
|
||||
)?
|
||||
};
|
||||
($thing:expr, $obj:expr, O) => {
|
||||
$crate::ins::optypes::insert_reloc(
|
||||
$obj,
|
||||
$crate::object::RelocType::Rel32,
|
||||
$thing
|
||||
)?
|
||||
};
|
||||
($thing:expr, $obj:expr, P) => {
|
||||
$crate::ins::optypes::insert_reloc(
|
||||
$obj,
|
||||
$crate::object::RelocType::Rel16,
|
||||
$thing
|
||||
)?
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
gen_insert!(le_bytes: [R, B, H, W, D]);
|
||||
|
||||
#[allow(clippy::single_component_path_imports)]
|
||||
pub(super) use insert;
|
||||
|
||||
use crate::data::DataRef;
|
||||
}
|
||||
|
||||
mod rity {
|
||||
pub use super::optypes::{A, O, P, R};
|
||||
pub type B = i64;
|
||||
pub type H = i64;
|
||||
pub type W = i64;
|
||||
pub type D = i64;
|
||||
}
|
||||
|
||||
mod generic {
|
||||
use {crate::object::Object, rhai::EvalAltResult};
|
||||
|
||||
pub(super) fn convert_op<A, B>(from: A) -> Result<B, EvalAltResult>
|
||||
where
|
||||
B: TryFrom<A>,
|
||||
<B as TryFrom<A>>::Error: std::error::Error + Sync + Send + 'static,
|
||||
{
|
||||
B::try_from(from).map_err(|e| {
|
||||
EvalAltResult::ErrorSystem("Data conversion error".to_owned(), Box::new(e))
|
||||
})
|
||||
}
|
||||
|
||||
macro_rules! gen_ins {
|
||||
($($($name:ident : $ty:ty),*;)*) => {
|
||||
paste::paste! {
|
||||
$(#[inline]
|
||||
pub fn [<$($ty:lower)*>](
|
||||
obj: &mut Object,
|
||||
opcode: u8,
|
||||
$($name: $crate::ins::optypes::$ty),*,
|
||||
) -> Result<(), EvalAltResult> {
|
||||
obj.sections.text.push(opcode);
|
||||
$($crate::ins::optypes::insert!(&$name, obj, $ty);)*
|
||||
Ok(())
|
||||
})*
|
||||
|
||||
macro_rules! gen_ins_fn {
|
||||
$(($obj:expr, $opcode:expr, [<$($ty)*>]) => {
|
||||
move |$($name: $crate::ins::rity::$ty),*| {
|
||||
$crate::ins::generic::[<$($ty:lower)*>](
|
||||
&mut *$obj.borrow_mut(),
|
||||
$opcode,
|
||||
$(
|
||||
$crate::ins::generic::convert_op::<
|
||||
_,
|
||||
$crate::ins::optypes::$ty
|
||||
>($name)?
|
||||
),*
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
};)*
|
||||
|
||||
($obj:expr, $opcode:expr, N) => {
|
||||
move || {
|
||||
$crate::ins::generic::n(&mut *$obj.borrow_mut(), $opcode);
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn n(obj: &mut Object, opcode: u8) {
|
||||
obj.sections.text.push(opcode);
|
||||
}
|
||||
|
||||
gen_ins! {
|
||||
o0: R, o1: R;
|
||||
o0: R, o1: R, o2: R;
|
||||
o0: R, o1: R, o2: R, o3: R;
|
||||
o0: R, o1: R, o2: B;
|
||||
o0: R, o1: R, o2: H;
|
||||
o0: R, o1: R, o2: W;
|
||||
o0: R, o1: R, o2: D;
|
||||
o0: R, o1: B;
|
||||
o0: R, o1: H;
|
||||
o0: R, o1: W;
|
||||
o0: R, o1: D;
|
||||
o0: R, o1: R, o2: A;
|
||||
o0: R, o1: R, o2: A, o3: H;
|
||||
o0: R, o1: R, o2: O, o3: H;
|
||||
o0: R, o1: R, o2: P, o3: H;
|
||||
o0: R, o1: R, o2: O;
|
||||
o0: R, o1: R, o2: P;
|
||||
o0: O;
|
||||
o0: P;
|
||||
}
|
||||
|
||||
#[allow(clippy::single_component_path_imports)]
|
||||
pub(super) use gen_ins_fn;
|
||||
}
|
||||
|
||||
macro_rules! instructions {
|
||||
(
|
||||
($module:expr, $obj:expr $(,)?)
|
||||
{ $($opcode:expr, $mnemonic:ident, $ops:ident, $doc:literal;)* }
|
||||
) => {{
|
||||
let (module, obj) = ($module, $obj);
|
||||
$({
|
||||
let obj = Rc::clone(&obj);
|
||||
let hash = module.set_native_fn(
|
||||
paste::paste!(stringify!([<$mnemonic:lower>])),
|
||||
generic::gen_ins_fn!(
|
||||
obj,
|
||||
$opcode,
|
||||
$ops
|
||||
)
|
||||
);
|
||||
|
||||
module.update_fn_namespace(hash, FnNamespace::Global);
|
||||
})*
|
||||
}};
|
||||
}
|
||||
|
||||
pub fn setup(module: &mut Module, obj: Rc<RefCell<Object>>) {
|
||||
with_builtin_macros::with_builtin! {
|
||||
let $spec = include_from_root!("../hbbytecode/instructions.in") in {
|
||||
instructions!((module, obj) { $spec });
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,76 +0,0 @@
|
|||
use {
|
||||
crate::SharedObject,
|
||||
rhai::{Engine, ImmutableString, Module},
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct UnboundLabel(pub usize);
|
||||
|
||||
pub fn setup(engine: &mut Engine, module: &mut Module, object: SharedObject) {
|
||||
{
|
||||
let object = SharedObject::clone(&object);
|
||||
let hash = module.set_native_fn("label", move || {
|
||||
let mut obj = object.borrow_mut();
|
||||
let symbol = obj.symbol(crate::object::Section::Text);
|
||||
Ok(symbol)
|
||||
});
|
||||
|
||||
module.update_fn_namespace(hash, rhai::FnNamespace::Global);
|
||||
}
|
||||
|
||||
{
|
||||
let object = SharedObject::clone(&object);
|
||||
let hash = module.set_native_fn("label", move |label: ImmutableString| {
|
||||
let mut obj = object.borrow_mut();
|
||||
let symbol = obj.symbol(crate::object::Section::Text);
|
||||
obj.labels.insert(label, symbol.0);
|
||||
|
||||
Ok(symbol)
|
||||
});
|
||||
|
||||
module.update_fn_namespace(hash, rhai::FnNamespace::Global);
|
||||
}
|
||||
|
||||
{
|
||||
let object = SharedObject::clone(&object);
|
||||
let hash = module.set_native_fn("declabel", move || {
|
||||
let mut obj = object.borrow_mut();
|
||||
|
||||
let index = obj.symbols.len();
|
||||
obj.symbols.push(None);
|
||||
|
||||
Ok(UnboundLabel(index))
|
||||
});
|
||||
|
||||
module.update_fn_namespace(hash, rhai::FnNamespace::Global);
|
||||
}
|
||||
|
||||
{
|
||||
let object = SharedObject::clone(&object);
|
||||
let hash = module.set_native_fn("declabel", move |label: ImmutableString| {
|
||||
let mut obj = object.borrow_mut();
|
||||
|
||||
let index = obj.symbols.len();
|
||||
obj.symbols.push(None);
|
||||
obj.labels.insert(label, index);
|
||||
|
||||
Ok(UnboundLabel(index))
|
||||
});
|
||||
|
||||
module.update_fn_namespace(hash, rhai::FnNamespace::Global);
|
||||
}
|
||||
|
||||
{
|
||||
module.set_native_fn("here", move |label: UnboundLabel| {
|
||||
let mut obj = object.borrow_mut();
|
||||
obj.symbols[label.0] = Some(crate::object::SymbolEntry {
|
||||
location: crate::object::Section::Text,
|
||||
offset: obj.sections.text.len(),
|
||||
});
|
||||
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
engine.register_type_with_name::<UnboundLabel>("UnboundLabel");
|
||||
}
|
|
@ -1,45 +0,0 @@
|
|||
mod data;
|
||||
mod ins;
|
||||
mod label;
|
||||
mod linker;
|
||||
mod object;
|
||||
|
||||
use {
|
||||
object::Object,
|
||||
rhai::{Engine, Module},
|
||||
std::{cell::RefCell, rc::Rc},
|
||||
};
|
||||
|
||||
type SharedObject = Rc<RefCell<Object>>;
|
||||
|
||||
pub fn assembler(
|
||||
linkout: &mut impl std::io::Write,
|
||||
loader: impl FnOnce(&mut Engine) -> Result<(), Box<rhai::EvalAltResult>>,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let mut engine = Engine::new();
|
||||
let mut module = Module::new();
|
||||
let obj = Rc::new(RefCell::new(Object::default()));
|
||||
ins::setup(&mut module, Rc::clone(&obj));
|
||||
label::setup(&mut engine, &mut module, Rc::clone(&obj));
|
||||
|
||||
// Registers
|
||||
for n in 0_u8..=255 {
|
||||
module.set_var(format!("r{n}"), n);
|
||||
}
|
||||
|
||||
module.set_native_fn("reg", |n: i64| {
|
||||
Ok(u8::try_from(n).map_err(|_| {
|
||||
rhai::EvalAltResult::ErrorRuntime("Invalid register value".into(), rhai::Position::NONE)
|
||||
})?)
|
||||
});
|
||||
|
||||
module.set_native_fn("as_i64", |n: u8| Ok(n as i64));
|
||||
|
||||
let datamod = Rc::new(data::module(&mut engine, SharedObject::clone(&obj)));
|
||||
engine.register_global_module(Rc::new(module));
|
||||
engine.register_static_module("data", datamod);
|
||||
engine.register_type_with_name::<object::SymbolRef>("SymbolRef");
|
||||
loader(&mut engine)?;
|
||||
linker::link(obj, linkout)?;
|
||||
Ok(())
|
||||
}
|
|
@ -1,35 +0,0 @@
|
|||
use {
|
||||
crate::{
|
||||
object::{RelocKey, RelocType, Section},
|
||||
SharedObject,
|
||||
},
|
||||
std::io::Write,
|
||||
};
|
||||
|
||||
pub fn link(object: SharedObject, out: &mut impl Write) -> std::io::Result<()> {
|
||||
let obj = &mut *object.borrow_mut();
|
||||
for (&loc, entry) in &obj.relocs {
|
||||
let value = match &entry.key {
|
||||
RelocKey::Symbol(sym) => obj.symbols[*sym],
|
||||
RelocKey::Label(label) => obj.symbols[obj.labels[label]],
|
||||
}
|
||||
.ok_or_else(|| std::io::Error::other("Invalid symbol"))?;
|
||||
|
||||
let offset = match value.location {
|
||||
Section::Text => value.offset,
|
||||
Section::Data => value.offset + obj.sections.text.len(),
|
||||
};
|
||||
|
||||
match entry.ty {
|
||||
RelocType::Rel32 => obj.sections.text[loc..loc + 4]
|
||||
.copy_from_slice(&((offset as isize - loc as isize) as i32).to_le_bytes()),
|
||||
RelocType::Rel16 => obj.sections.text[loc..loc + 2]
|
||||
.copy_from_slice(&((offset as isize - loc as isize) as i16).to_le_bytes()),
|
||||
RelocType::Abs64 => obj.sections.text[loc..loc + 8]
|
||||
.copy_from_slice(&(offset as isize - loc as isize).to_le_bytes()),
|
||||
}
|
||||
}
|
||||
|
||||
out.write_all(&obj.sections.text)?;
|
||||
out.write_all(&obj.sections.data)
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
use std::{io::stdout, path::PathBuf};
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let path = PathBuf::from(std::env::args().nth(1).ok_or("Missing path")?);
|
||||
hbasm::assembler(&mut stdout(), |engine| engine.run_file(path))?;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
use {rhai::ImmutableString, std::collections::HashMap};
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum Section {
|
||||
Text,
|
||||
Data,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct SymbolEntry {
|
||||
pub location: Section,
|
||||
pub offset: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum RelocKey {
|
||||
Symbol(usize),
|
||||
Label(ImmutableString),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum RelocType {
|
||||
Rel32,
|
||||
Rel16,
|
||||
Abs64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RelocEntry {
|
||||
pub key: RelocKey,
|
||||
pub ty: RelocType,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct Sections {
|
||||
pub text: Vec<u8>,
|
||||
pub data: Vec<u8>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct Object {
|
||||
pub sections: Sections,
|
||||
pub symbols: Vec<Option<SymbolEntry>>,
|
||||
pub labels: HashMap<ImmutableString, usize>,
|
||||
pub relocs: HashMap<usize, RelocEntry>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[repr(transparent)]
|
||||
pub struct SymbolRef(pub usize);
|
||||
|
||||
impl Object {
|
||||
pub fn symbol(&mut self, section: Section) -> SymbolRef {
|
||||
let section_buf = match section {
|
||||
Section::Text => &mut self.sections.text,
|
||||
Section::Data => &mut self.sections.data,
|
||||
};
|
||||
|
||||
self.symbols.push(Some(SymbolEntry {
|
||||
location: section,
|
||||
offset: section_buf.len(),
|
||||
}));
|
||||
|
||||
SymbolRef(self.symbols.len() - 1)
|
||||
}
|
||||
|
||||
pub fn relocation(&mut self, key: RelocKey, ty: RelocType) {
|
||||
self.relocs
|
||||
.insert(self.sections.text.len(), RelocEntry { key, ty });
|
||||
|
||||
self.sections.text.extend(match ty {
|
||||
RelocType::Rel32 => &[0_u8; 4] as &[u8],
|
||||
RelocType::Rel16 => &[0; 2],
|
||||
RelocType::Abs64 => &[0; 8],
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,119 +0,0 @@
|
|||
#![no_std]
|
||||
|
||||
use core::convert::TryFrom;
|
||||
|
||||
type OpR = u8;
|
||||
|
||||
type OpA = u64;
|
||||
type OpO = i32;
|
||||
type OpP = i16;
|
||||
|
||||
type OpB = u8;
|
||||
type OpH = u16;
|
||||
type OpW = u32;
|
||||
type OpD = u64;
|
||||
|
||||
/// # Safety
|
||||
/// Has to be valid to be decoded from bytecode.
|
||||
pub unsafe trait BytecodeItem {}
|
||||
macro_rules! define_items {
|
||||
($($name:ident ($($item:ident),* $(,)?)),* $(,)?) => {
|
||||
$(
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[repr(packed)]
|
||||
pub struct $name($(pub $item),*);
|
||||
unsafe impl BytecodeItem for $name {}
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
define_items! {
|
||||
OpsRR (OpR, OpR ),
|
||||
OpsRRR (OpR, OpR, OpR ),
|
||||
OpsRRRR (OpR, OpR, OpR, OpR),
|
||||
OpsRRB (OpR, OpR, OpB ),
|
||||
OpsRRH (OpR, OpR, OpH ),
|
||||
OpsRRW (OpR, OpR, OpW ),
|
||||
OpsRRD (OpR, OpR, OpD ),
|
||||
OpsRB (OpR, OpB ),
|
||||
OpsRH (OpR, OpH ),
|
||||
OpsRW (OpR, OpW ),
|
||||
OpsRD (OpR, OpD ),
|
||||
OpsRRA (OpR, OpR, OpA ),
|
||||
OpsRRAH (OpR, OpR, OpA, OpH),
|
||||
OpsRROH (OpR, OpR, OpO, OpH),
|
||||
OpsRRPH (OpR, OpR, OpP, OpH),
|
||||
OpsRRO (OpR, OpR, OpO ),
|
||||
OpsRRP (OpR, OpR, OpP ),
|
||||
OpsO (OpO, ),
|
||||
OpsP (OpP, ),
|
||||
OpsN ( ),
|
||||
}
|
||||
|
||||
unsafe impl BytecodeItem for u8 {}
|
||||
|
||||
::with_builtin_macros::with_builtin! {
|
||||
let $spec = include_from_root!("instructions.in") in {
|
||||
/// Invoke macro with bytecode definition
|
||||
///
|
||||
/// # Format
|
||||
/// ```text
|
||||
/// Opcode, Mnemonic, Type, Docstring;
|
||||
/// ```
|
||||
///
|
||||
/// # Type
|
||||
/// ```text
|
||||
/// Types consist of letters meaning a single field
|
||||
/// | Type | Size (B) | Meaning |
|
||||
/// |:-----|:---------|:------------------------|
|
||||
/// | N | 0 | Empty |
|
||||
/// | R | 1 | Register |
|
||||
/// | A | 8 | Absolute address |
|
||||
/// | O | 4 | Relative address offset |
|
||||
/// | P | 2 | Relative address offset |
|
||||
/// | B | 1 | Immediate |
|
||||
/// | H | 2 | Immediate |
|
||||
/// | W | 4 | Immediate |
|
||||
/// | D | 8 | Immediate |
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! invoke_with_def {
|
||||
($macro:path) => {
|
||||
$macro! { $spec }
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! gen_opcodes {
|
||||
($($opcode:expr, $mnemonic:ident, $_ty:ident, $doc:literal;)*) => {
|
||||
pub mod opcode {
|
||||
$(
|
||||
#[doc = $doc]
|
||||
pub const $mnemonic: u8 = $opcode;
|
||||
)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Rounding mode
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
#[repr(u8)]
|
||||
pub enum RoundingMode {
|
||||
NearestEven = 0,
|
||||
Truncate = 1,
|
||||
Up = 2,
|
||||
Down = 3,
|
||||
}
|
||||
|
||||
impl TryFrom<u8> for RoundingMode {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||
(value <= 3)
|
||||
.then(|| unsafe { core::mem::transmute(value) })
|
||||
.ok_or(())
|
||||
}
|
||||
}
|
||||
|
||||
invoke_with_def!(gen_opcodes);
|
|
@ -1,17 +0,0 @@
|
|||
macro_rules! arch_specific {
|
||||
{
|
||||
$({$($cfg:tt)*} : $mod:ident;)*
|
||||
} => {$(
|
||||
#[cfg($($cfg)*)]
|
||||
mod $mod;
|
||||
|
||||
#[cfg($($cfg)*)]
|
||||
pub use $mod::*;
|
||||
)*};
|
||||
}
|
||||
|
||||
arch_specific! {
|
||||
{target_arch = "x86_64" }: x86_64;
|
||||
{target_arch = "riscv64"}: riscv64;
|
||||
{target_arch = "aarch64"}: aarch64;
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
[package]
|
||||
name = "hbvm_aos_on_linux"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
default-run = "hbvm_aos_on_linux"
|
||||
|
||||
[dependencies]
|
||||
hbvm.path = "../hbvm"
|
||||
nix = { version = "0.27", features = ["mman", "signal"] }
|
|
@ -1,3 +0,0 @@
|
|||
As close to the AbleOS runtime as possible
|
||||
|
||||
useful for me to spec out things on my laptop
|
|
@ -1,96 +0,0 @@
|
|||
//! Holey Bytes Experimental Runtime
|
||||
mod mem;
|
||||
|
||||
use {
|
||||
hbvm::{mem::Address, Vm, VmRunOk},
|
||||
nix::sys::mman::{mmap, MapFlags, ProtFlags},
|
||||
std::{env::args, fs::File, num::NonZeroUsize, process::exit},
|
||||
};
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
eprintln!("== HB×RT (Holey Bytes Linux Runtime) v0.1 ==");
|
||||
eprintln!("[W] Currently supporting only flat images");
|
||||
|
||||
let Some(image_path) = args().nth(1) else {
|
||||
eprintln!("[E] Missing image path");
|
||||
exit(1);
|
||||
};
|
||||
|
||||
// Load program
|
||||
eprintln!("[I] Loading image from \"{image_path}\"");
|
||||
let file = File::open(image_path)?;
|
||||
let ptr = unsafe {
|
||||
mmap(
|
||||
None,
|
||||
NonZeroUsize::new(file.metadata()?.len() as usize).ok_or("File is empty")?,
|
||||
ProtFlags::PROT_READ,
|
||||
MapFlags::MAP_PRIVATE,
|
||||
Some(&file),
|
||||
0,
|
||||
)?
|
||||
};
|
||||
|
||||
eprintln!("[I] Image loaded at {ptr:p}");
|
||||
|
||||
// Execute program
|
||||
let mut vm = unsafe { Vm::<_, 0>::new(mem::HostMemory, Address::new(ptr as u64)) };
|
||||
|
||||
// Memory access fault handling
|
||||
unsafe {
|
||||
use nix::sys::signal;
|
||||
|
||||
extern "C" fn action(
|
||||
_: std::ffi::c_int,
|
||||
info: *mut nix::libc::siginfo_t,
|
||||
_: *mut std::ffi::c_void,
|
||||
) {
|
||||
unsafe {
|
||||
eprintln!("[E] Memory access fault at {:p}", (*info).si_addr());
|
||||
}
|
||||
}
|
||||
|
||||
signal::sigaction(
|
||||
signal::Signal::SIGSEGV,
|
||||
&nix::sys::signal::SigAction::new(
|
||||
signal::SigHandler::SigAction(action),
|
||||
signal::SaFlags::SA_NODEFER,
|
||||
nix::sys::signalfd::SigSet::empty(),
|
||||
),
|
||||
)?;
|
||||
}
|
||||
|
||||
let stat = loop {
|
||||
match vm.run() {
|
||||
Ok(VmRunOk::Breakpoint) => eprintln!(
|
||||
"[I] Hit breakpoint\nIP: {}\n== Registers ==\n{:?}",
|
||||
vm.pc, vm.registers
|
||||
),
|
||||
Ok(VmRunOk::Timer) => (),
|
||||
Ok(VmRunOk::Ecall) => {
|
||||
|
||||
// unsafe {
|
||||
// std::arch::asm!(
|
||||
// "syscall",
|
||||
// inlateout("rax") vm.registers[1].0,
|
||||
// in("rdi") vm.registers[2].0,
|
||||
// in("rsi") vm.registers[3].0,
|
||||
// in("rdx") vm.registers[4].0,
|
||||
// in("r10") vm.registers[5].0,
|
||||
// in("r8") vm.registers[6].0,
|
||||
// in("r9") vm.registers[7].0,
|
||||
// )
|
||||
// }
|
||||
}
|
||||
Ok(VmRunOk::End) => break Ok(()),
|
||||
Err(e) => break Err(e),
|
||||
}
|
||||
};
|
||||
|
||||
eprintln!("\n== Registers ==\n{:?}", vm.registers);
|
||||
if let Err(e) = stat {
|
||||
eprintln!("\n[E] Runtime error: {e:?}");
|
||||
exit(2);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
use hbvm::mem::{Address, LoadError, Memory, StoreError};
|
||||
|
||||
pub struct HostMemory;
|
||||
impl Memory for HostMemory {
|
||||
#[inline]
|
||||
unsafe fn load(
|
||||
&mut self,
|
||||
addr: Address,
|
||||
target: *mut u8,
|
||||
count: usize,
|
||||
) -> Result<(), LoadError> {
|
||||
unsafe { core::ptr::copy(addr.get() as *const u8, target, count) }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn store(
|
||||
&mut self,
|
||||
addr: Address,
|
||||
source: *const u8,
|
||||
count: usize,
|
||||
) -> Result<(), StoreError> {
|
||||
unsafe { core::ptr::copy(source, addr.get() as *mut u8, count) }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn prog_read<T: Copy>(&mut self, addr: Address) -> T {
|
||||
core::ptr::read(addr.get() as *const T)
|
||||
}
|
||||
}
|
|
@ -1,94 +0,0 @@
|
|||
//! Holey Bytes Experimental Runtime
|
||||
mod mem;
|
||||
|
||||
use {
|
||||
hbvm::{mem::Address, Vm, VmRunOk},
|
||||
nix::sys::mman::{mmap, MapFlags, ProtFlags},
|
||||
std::{env::args, fs::File, num::NonZeroUsize, process::exit},
|
||||
};
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
eprintln!("== HB×RT (Holey Bytes Linux Runtime) v0.1 ==");
|
||||
eprintln!("[W] Currently supporting only flat images");
|
||||
|
||||
let Some(image_path) = args().nth(1) else {
|
||||
eprintln!("[E] Missing image path");
|
||||
exit(1);
|
||||
};
|
||||
|
||||
// Load program
|
||||
eprintln!("[I] Loading image from \"{image_path}\"");
|
||||
let file = File::open(image_path)?;
|
||||
let ptr = unsafe {
|
||||
mmap(
|
||||
None,
|
||||
NonZeroUsize::new(file.metadata()?.len() as usize).ok_or("File is empty")?,
|
||||
ProtFlags::PROT_READ,
|
||||
MapFlags::MAP_PRIVATE,
|
||||
Some(&file),
|
||||
0,
|
||||
)?
|
||||
};
|
||||
|
||||
eprintln!("[I] Image loaded at {ptr:p}");
|
||||
|
||||
// Execute program
|
||||
let mut vm = unsafe { Vm::<_, 0>::new(mem::HostMemory, Address::new(ptr as u64)) };
|
||||
|
||||
// Memory access fault handling
|
||||
unsafe {
|
||||
use nix::sys::signal;
|
||||
|
||||
extern "C" fn action(
|
||||
_: std::ffi::c_int,
|
||||
info: *mut nix::libc::siginfo_t,
|
||||
_: *mut std::ffi::c_void,
|
||||
) {
|
||||
unsafe {
|
||||
eprintln!("[E] Memory access fault at {:p}", (*info).si_addr());
|
||||
exit(2);
|
||||
}
|
||||
}
|
||||
|
||||
signal::sigaction(
|
||||
signal::Signal::SIGSEGV,
|
||||
&nix::sys::signal::SigAction::new(
|
||||
signal::SigHandler::SigAction(action),
|
||||
signal::SaFlags::SA_NODEFER,
|
||||
nix::sys::signalfd::SigSet::empty(),
|
||||
),
|
||||
)?;
|
||||
}
|
||||
|
||||
let stat = loop {
|
||||
match vm.run() {
|
||||
Ok(VmRunOk::Breakpoint) => eprintln!(
|
||||
"[I] Hit breakpoint\nIP: {}\n== Registers ==\n{:?}",
|
||||
vm.pc, vm.registers
|
||||
),
|
||||
Ok(VmRunOk::Timer) => (),
|
||||
Ok(VmRunOk::Ecall) => unsafe {
|
||||
std::arch::asm!(
|
||||
"syscall",
|
||||
inlateout("rax") vm.registers[1].0,
|
||||
in("rdi") vm.registers[2].0,
|
||||
in("rsi") vm.registers[3].0,
|
||||
in("rdx") vm.registers[4].0,
|
||||
in("r10") vm.registers[5].0,
|
||||
in("r8") vm.registers[6].0,
|
||||
in("r9") vm.registers[7].0,
|
||||
)
|
||||
},
|
||||
Ok(VmRunOk::End) => break Ok(()),
|
||||
Err(e) => break Err(e),
|
||||
}
|
||||
};
|
||||
|
||||
eprintln!("\n== Registers ==\n{:?}", vm.registers);
|
||||
if let Err(e) = stat {
|
||||
eprintln!("\n[E] Runtime error: {e:?}");
|
||||
exit(2);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
use hbvm::mem::{Address, LoadError, Memory, StoreError};
|
||||
|
||||
pub struct HostMemory;
|
||||
impl Memory for HostMemory {
|
||||
#[inline]
|
||||
unsafe fn load(
|
||||
&mut self,
|
||||
addr: Address,
|
||||
target: *mut u8,
|
||||
count: usize,
|
||||
) -> Result<(), LoadError> {
|
||||
unsafe { core::ptr::copy(addr.get() as *const u8, target, count) }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn store(
|
||||
&mut self,
|
||||
addr: Address,
|
||||
source: *const u8,
|
||||
count: usize,
|
||||
) -> Result<(), StoreError> {
|
||||
unsafe { core::ptr::copy(source, addr.get() as *mut u8, count) }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn prog_read<T: Copy>(&mut self, addr: Address) -> T {
|
||||
core::ptr::read(addr.get() as *const T)
|
||||
}
|
||||
}
|
23
lang/Cargo.toml
Normal file
23
lang/Cargo.toml
Normal file
|
@ -0,0 +1,23 @@
|
|||
[package]
|
||||
name = "hblang"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[[bin]]
|
||||
name = "hbc"
|
||||
path = "src/main.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "fuzz"
|
||||
path = "src/fuzz_main.rs"
|
||||
|
||||
[dependencies]
|
||||
hbbytecode = { workspace = true, features = ["disasm"] }
|
||||
hbvm = { workspace = true, features = ["nightly"] }
|
||||
hashbrown = { version = "0.15.0", default-features = false, features = ["raw-entry"] }
|
||||
log = "0.4.22"
|
||||
|
||||
[features]
|
||||
default = ["std"]
|
||||
std = []
|
||||
no_log = ["log/max_level_off"]
|
1810
lang/README.md
Normal file
1810
lang/README.md
Normal file
File diff suppressed because one or more lines are too long
4
lang/command-help.txt
Normal file
4
lang/command-help.txt
Normal file
|
@ -0,0 +1,4 @@
|
|||
--fmt - format all imported source files
|
||||
--fmt-stdout - dont write the formatted file but print it
|
||||
--dump-asm - output assembly instead of raw code, (the assembly is more for debugging the compiler)
|
||||
--threads <1...> - number of extra threads compiler can use [default: 0]
|
614
lang/src/fmt.rs
Normal file
614
lang/src/fmt.rs
Normal file
|
@ -0,0 +1,614 @@
|
|||
use {
|
||||
crate::{
|
||||
lexer::{self, Lexer, TokenKind},
|
||||
parser::{self, CommentOr, CtorField, EnumField, Expr, Poser, Radix, StructField},
|
||||
},
|
||||
core::fmt::{self},
|
||||
};
|
||||
|
||||
pub fn display_radix(radix: Radix, mut value: u64, buf: &mut [u8; 64]) -> &str {
|
||||
fn conv_radix(d: u8) -> u8 {
|
||||
match d {
|
||||
0..=9 => d + b'0',
|
||||
_ => d - 10 + b'A',
|
||||
}
|
||||
}
|
||||
|
||||
for (i, b) in buf.iter_mut().enumerate().rev() {
|
||||
let d = (value % radix as u64) as u8;
|
||||
value /= radix as u64;
|
||||
*b = conv_radix(d);
|
||||
if value == 0 {
|
||||
return unsafe { core::str::from_utf8_unchecked(&buf[i..]) };
|
||||
}
|
||||
}
|
||||
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[repr(u8)]
|
||||
enum TokenGroup {
|
||||
Blank,
|
||||
Comment,
|
||||
Keyword,
|
||||
Identifier,
|
||||
Directive,
|
||||
Number,
|
||||
String,
|
||||
Op,
|
||||
Assign,
|
||||
Paren,
|
||||
Bracket,
|
||||
Colon,
|
||||
Comma,
|
||||
Dot,
|
||||
Ctor,
|
||||
}
|
||||
|
||||
fn token_group(kind: TokenKind) -> TokenGroup {
|
||||
use {crate::lexer::TokenKind::*, TokenGroup as TG};
|
||||
match kind {
|
||||
BSlash | Pound | Eof | Ct => TG::Blank,
|
||||
Comment => TG::Comment,
|
||||
Directive => TG::Directive,
|
||||
Colon => TG::Colon,
|
||||
Semi | Comma => TG::Comma,
|
||||
Dot => TG::Dot,
|
||||
Ctor | Tupl | TArrow => TG::Ctor,
|
||||
LParen | RParen => TG::Paren,
|
||||
LBrace | RBrace | LBrack | RBrack => TG::Bracket,
|
||||
Number | Float => TG::Number,
|
||||
Under | CtIdent | Ident => TG::Identifier,
|
||||
Tick | Tilde | Que | Not | Mod | Band | Bor | Xor | Mul | Add | Sub | Div | Shl | Shr
|
||||
| Or | And | Lt | Gt | Eq | Le | Ge | Ne => TG::Op,
|
||||
Decl | Assign | BorAss | XorAss | BandAss | AddAss | SubAss | MulAss | DivAss | ModAss
|
||||
| ShrAss | ShlAss => TG::Assign,
|
||||
DQuote | Quote => TG::String,
|
||||
Return | If | Else | Loop | Break | Continue | Fn | Idk | Die | Struct | Packed | True
|
||||
| False | Null | Match | Enum => TG::Keyword,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_token_kinds(mut source: &mut [u8]) -> usize {
|
||||
let len = source.len();
|
||||
loop {
|
||||
let src = unsafe { core::str::from_utf8_unchecked(source) };
|
||||
let mut token = lexer::Lexer::new(src).eat();
|
||||
match token.kind {
|
||||
TokenKind::Eof => break,
|
||||
// ???
|
||||
TokenKind::CtIdent | TokenKind::Directive => token.start -= 1,
|
||||
_ => {}
|
||||
}
|
||||
let start = token.start as usize;
|
||||
let end = token.end as usize;
|
||||
source[..start].fill(0);
|
||||
source[start..end].fill(token_group(token.kind) as u8);
|
||||
source = &mut source[end..];
|
||||
}
|
||||
len
|
||||
}
|
||||
|
||||
pub fn minify(source: &mut str) -> usize {
|
||||
fn needs_space(c: u8) -> bool {
|
||||
matches!(c, b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | 127..)
|
||||
}
|
||||
|
||||
let mut writer = source.as_mut_ptr();
|
||||
let mut reader = &source[..];
|
||||
let mut prev_needs_whitecpace = false;
|
||||
let mut prev_needs_newline = false;
|
||||
loop {
|
||||
let mut token = lexer::Lexer::new(reader).eat();
|
||||
match token.kind {
|
||||
TokenKind::Eof => break,
|
||||
TokenKind::CtIdent | TokenKind::Directive => token.start -= 1,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let cpy_len = token.range().len();
|
||||
|
||||
let mut prefix = 0;
|
||||
if prev_needs_whitecpace && needs_space(reader.as_bytes()[token.start as usize]) {
|
||||
prefix = b' ';
|
||||
debug_assert!(token.start != 0, "{reader}");
|
||||
}
|
||||
prev_needs_whitecpace = needs_space(reader.as_bytes()[token.end as usize - 1]);
|
||||
|
||||
let inbetween_new_lines =
|
||||
reader[..token.start as usize].bytes().filter(|&b| b == b'\n').count()
|
||||
+ token.kind.precedence().is_some() as usize;
|
||||
let extra_prefix_new_lines = if inbetween_new_lines > 1 {
|
||||
1 + token.kind.precedence().is_none() as usize
|
||||
} else {
|
||||
prev_needs_newline as usize
|
||||
};
|
||||
|
||||
if token.kind == TokenKind::Comment && reader.as_bytes()[token.end as usize - 1] != b'/' {
|
||||
prev_needs_newline = true;
|
||||
prev_needs_whitecpace = false;
|
||||
} else {
|
||||
prev_needs_newline = false;
|
||||
}
|
||||
|
||||
let sstr = reader[token.start as usize..].as_ptr();
|
||||
reader = &reader[token.end as usize..];
|
||||
unsafe {
|
||||
if extra_prefix_new_lines != 0 {
|
||||
for _ in 0..extra_prefix_new_lines {
|
||||
writer.write(b'\n');
|
||||
writer = writer.add(1);
|
||||
}
|
||||
} else if prefix != 0 {
|
||||
writer.write(prefix);
|
||||
writer = writer.add(1);
|
||||
}
|
||||
writer.copy_from(sstr, cpy_len);
|
||||
writer = writer.add(cpy_len);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe { writer.sub_ptr(source.as_mut_ptr()) }
|
||||
}
|
||||
|
||||
pub struct Formatter<'a> {
|
||||
source: &'a str,
|
||||
depth: usize,
|
||||
}
|
||||
|
||||
// we exclusively use `write_str` to reduce bloat
|
||||
impl<'a> Formatter<'a> {
|
||||
pub fn new(source: &'a str) -> Self {
|
||||
Self { source, depth: 0 }
|
||||
}
|
||||
|
||||
fn fmt_list<T: Poser, F: core::fmt::Write>(
|
||||
&mut self,
|
||||
f: &mut F,
|
||||
trailing: bool,
|
||||
end: &str,
|
||||
sep: &str,
|
||||
list: &[T],
|
||||
fmt: impl Fn(&mut Self, &T, &mut F) -> fmt::Result,
|
||||
) -> fmt::Result {
|
||||
self.fmt_list_low(f, trailing, end, sep, list, |s, v, f| {
|
||||
fmt(s, v, f)?;
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
fn fmt_list_low<T: Poser, F: core::fmt::Write>(
|
||||
&mut self,
|
||||
f: &mut F,
|
||||
trailing: bool,
|
||||
end: &str,
|
||||
sep: &str,
|
||||
list: &[T],
|
||||
fmt: impl Fn(&mut Self, &T, &mut F) -> Result<bool, fmt::Error>,
|
||||
) -> fmt::Result {
|
||||
if !trailing {
|
||||
let mut first = true;
|
||||
for expr in list {
|
||||
if !core::mem::take(&mut first) {
|
||||
f.write_str(sep)?;
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
first = !fmt(self, expr, f)?;
|
||||
}
|
||||
return f.write_str(end);
|
||||
}
|
||||
|
||||
writeln!(f)?;
|
||||
self.depth += 1;
|
||||
let res = (|| {
|
||||
for (i, stmt) in list.iter().enumerate() {
|
||||
for _ in 0..self.depth {
|
||||
f.write_str("\t")?;
|
||||
}
|
||||
let add_sep = fmt(self, stmt, f)?;
|
||||
if add_sep {
|
||||
f.write_str(sep)?;
|
||||
}
|
||||
if let Some(expr) = list.get(i + 1)
|
||||
&& let Some(rest) = self.source.get(expr.posi() as usize..)
|
||||
{
|
||||
if sep.is_empty() && insert_needed_semicolon(rest) {
|
||||
f.write_str(";")?;
|
||||
}
|
||||
if preserve_newlines(&self.source[..expr.posi() as usize]) > 1 {
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
}
|
||||
if add_sep {
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})();
|
||||
self.depth -= 1;
|
||||
|
||||
for _ in 0..self.depth {
|
||||
f.write_str("\t")?;
|
||||
}
|
||||
f.write_str(end)?;
|
||||
res
|
||||
}
|
||||
|
||||
fn fmt_paren<F: core::fmt::Write>(
|
||||
&mut self,
|
||||
expr: &Expr,
|
||||
f: &mut F,
|
||||
cond: impl FnOnce(&Expr) -> bool,
|
||||
) -> fmt::Result {
|
||||
if cond(expr) {
|
||||
f.write_str("(")?;
|
||||
self.fmt(expr, f)?;
|
||||
f.write_str(")")
|
||||
} else {
|
||||
self.fmt(expr, f)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fmt<F: core::fmt::Write>(&mut self, expr: &Expr, f: &mut F) -> fmt::Result {
|
||||
macro_rules! impl_parenter {
|
||||
($($name:ident => $pat:pat,)*) => {
|
||||
$(
|
||||
let $name = |e: &Expr| matches!(e, $pat);
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
impl_parenter! {
|
||||
unary => Expr::BinOp { .. },
|
||||
postfix => Expr::UnOp { .. } | Expr::BinOp { .. },
|
||||
consecutive => Expr::UnOp { .. },
|
||||
}
|
||||
|
||||
match *expr {
|
||||
Expr::Ct { value, .. } => {
|
||||
f.write_str("$: ")?;
|
||||
self.fmt(value, f)
|
||||
}
|
||||
Expr::String { literal, .. } => f.write_str(literal),
|
||||
Expr::Comment { literal, .. } => f.write_str(literal),
|
||||
Expr::Mod { path, .. } => write!(f, "@use(\"{path}\")"),
|
||||
Expr::Embed { path, .. } => write!(f, "@embed(\"{path}\")"),
|
||||
Expr::Field { target, name: field, .. } => {
|
||||
self.fmt_paren(target, f, postfix)?;
|
||||
f.write_str(".")?;
|
||||
f.write_str(field)
|
||||
}
|
||||
Expr::Directive { name, args, .. } => {
|
||||
f.write_str("@")?;
|
||||
f.write_str(name)?;
|
||||
f.write_str("(")?;
|
||||
self.fmt_list(f, false, ")", ",", args, Self::fmt)
|
||||
}
|
||||
Expr::Struct { fields, trailing_comma, packed, .. } => {
|
||||
if packed {
|
||||
f.write_str("packed ")?;
|
||||
}
|
||||
|
||||
f.write_str("struct {")?;
|
||||
self.fmt_list_low(f, trailing_comma, "}", ",", fields, |s, field, f| {
|
||||
match field {
|
||||
CommentOr::Or(StructField { name, ty, .. }) => {
|
||||
f.write_str(name)?;
|
||||
f.write_str(": ")?;
|
||||
s.fmt(ty, f)?
|
||||
}
|
||||
CommentOr::Comment { literal, .. } => {
|
||||
f.write_str(literal)?;
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
}
|
||||
Ok(field.or().is_some())
|
||||
})
|
||||
}
|
||||
Expr::Enum { variants, trailing_comma, .. } => {
|
||||
f.write_str("enum {")?;
|
||||
self.fmt_list_low(f, trailing_comma, "}", ",", variants, |_, var, f| {
|
||||
match var {
|
||||
CommentOr::Or(EnumField { name, .. }) => {
|
||||
f.write_str(name)?;
|
||||
}
|
||||
CommentOr::Comment { literal, .. } => {
|
||||
f.write_str(literal)?;
|
||||
f.write_str("\n")?;
|
||||
}
|
||||
}
|
||||
Ok(var.or().is_some())
|
||||
})
|
||||
}
|
||||
Expr::Ctor { ty, fields, trailing_comma, .. } => {
|
||||
if let Some(ty) = ty {
|
||||
self.fmt_paren(ty, f, unary)?;
|
||||
}
|
||||
f.write_str(".{")?;
|
||||
self.fmt_list(
|
||||
f,
|
||||
trailing_comma,
|
||||
"}",
|
||||
",",
|
||||
fields,
|
||||
|s: &mut Self, CtorField { name, value, .. }: &_, f| {
|
||||
f.write_str(name)?;
|
||||
if !matches!(value, &Expr::Ident { id, .. } if *name == &self.source[id.range()]) {
|
||||
f.write_str(": ")?;
|
||||
s.fmt(value, f)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
}
|
||||
Expr::Tupl {
|
||||
pos,
|
||||
ty: Some(&Expr::Slice { pos: spos, size: Some(&Expr::Number { value, .. }), item }),
|
||||
fields,
|
||||
trailing_comma,
|
||||
} if value as usize == fields.len() => self.fmt(
|
||||
&Expr::Tupl {
|
||||
pos,
|
||||
ty: Some(&Expr::Slice { pos: spos, size: None, item }),
|
||||
fields,
|
||||
trailing_comma,
|
||||
},
|
||||
f,
|
||||
),
|
||||
Expr::Tupl { ty, fields, trailing_comma, .. } => {
|
||||
if let Some(ty) = ty {
|
||||
self.fmt_paren(ty, f, unary)?;
|
||||
}
|
||||
f.write_str(".(")?;
|
||||
self.fmt_list(f, trailing_comma, ")", ",", fields, Self::fmt)
|
||||
}
|
||||
Expr::Slice { item, size, .. } => {
|
||||
f.write_str("[")?;
|
||||
self.fmt(item, f)?;
|
||||
if let Some(size) = size {
|
||||
f.write_str("; ")?;
|
||||
self.fmt(size, f)?;
|
||||
}
|
||||
f.write_str("]")
|
||||
}
|
||||
Expr::Index { base, index } => {
|
||||
self.fmt(base, f)?;
|
||||
f.write_str("[")?;
|
||||
self.fmt(index, f)?;
|
||||
f.write_str("]")
|
||||
}
|
||||
Expr::UnOp { op, val, .. } => {
|
||||
f.write_str(op.name())?;
|
||||
self.fmt_paren(val, f, unary)
|
||||
}
|
||||
Expr::Break { .. } => f.write_str("break"),
|
||||
Expr::Continue { .. } => f.write_str("continue"),
|
||||
Expr::If { cond, then, else_, .. } => {
|
||||
f.write_str("if ")?;
|
||||
self.fmt(cond, f)?;
|
||||
f.write_str(" ")?;
|
||||
self.fmt_paren(then, f, consecutive)?;
|
||||
if let Some(e) = else_ {
|
||||
f.write_str(" else ")?;
|
||||
self.fmt(e, f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Expr::Match { value, branches, .. } => {
|
||||
f.write_str("match ")?;
|
||||
self.fmt(value, f)?;
|
||||
f.write_str(" {")?;
|
||||
self.fmt_list(f, true, "}", ",", branches, |s, br, f| {
|
||||
s.fmt(&br.pat, f)?;
|
||||
f.write_str(" => ")?;
|
||||
s.fmt(&br.body, f)
|
||||
})
|
||||
}
|
||||
Expr::Loop { body, .. } => {
|
||||
f.write_str("loop ")?;
|
||||
self.fmt(body, f)
|
||||
}
|
||||
Expr::Closure { ret, body, args, .. } => {
|
||||
f.write_str("fn(")?;
|
||||
self.fmt_list(f, false, "", ",", args, |s, arg, f| {
|
||||
if arg.is_ct {
|
||||
f.write_str("$")?;
|
||||
}
|
||||
f.write_str(arg.name)?;
|
||||
f.write_str(": ")?;
|
||||
s.fmt(&arg.ty, f)
|
||||
})?;
|
||||
f.write_str("): ")?;
|
||||
self.fmt(ret, f)?;
|
||||
f.write_str(" ")?;
|
||||
self.fmt_paren(body, f, consecutive)?;
|
||||
Ok(())
|
||||
}
|
||||
Expr::Call { func, args, trailing_comma } => {
|
||||
self.fmt_paren(func, f, postfix)?;
|
||||
f.write_str("(")?;
|
||||
self.fmt_list(f, trailing_comma, ")", ",", args, Self::fmt)
|
||||
}
|
||||
Expr::Return { val: Some(val), .. } => {
|
||||
f.write_str("return ")?;
|
||||
self.fmt(val, f)
|
||||
}
|
||||
Expr::Return { val: None, .. } => f.write_str("return"),
|
||||
Expr::Wildcard { .. } => f.write_str("_"),
|
||||
Expr::Ident { pos, is_ct, .. } => {
|
||||
if is_ct {
|
||||
f.write_str("$")?;
|
||||
}
|
||||
f.write_str(&self.source[Lexer::restore(self.source, pos).eat().range()])
|
||||
}
|
||||
Expr::Block { stmts, .. } => {
|
||||
f.write_str("{")?;
|
||||
self.fmt_list(f, true, "}", "", stmts, Self::fmt)
|
||||
}
|
||||
Expr::Number { value, radix, .. } => {
|
||||
f.write_str(match radix {
|
||||
Radix::Decimal => "",
|
||||
Radix::Hex => "0x",
|
||||
Radix::Octal => "0o",
|
||||
Radix::Binary => "0b",
|
||||
})?;
|
||||
let mut buf = [0u8; 64];
|
||||
f.write_str(display_radix(radix, value as u64, &mut buf))
|
||||
}
|
||||
Expr::Float { pos, .. } => {
|
||||
f.write_str(&self.source[Lexer::restore(self.source, pos).eat().range()])
|
||||
}
|
||||
Expr::Bool { value, .. } => f.write_str(if value { "true" } else { "false" }),
|
||||
Expr::Idk { .. } => f.write_str("idk"),
|
||||
Expr::Die { .. } => f.write_str("die"),
|
||||
Expr::Null { .. } => f.write_str("null"),
|
||||
Expr::BinOp {
|
||||
left,
|
||||
op: TokenKind::Assign,
|
||||
right: &Expr::BinOp { left: lleft, op, right, .. },
|
||||
..
|
||||
} if left.pos() == lleft.pos() => {
|
||||
self.fmt(left, f)?;
|
||||
f.write_str(" ")?;
|
||||
f.write_str(op.name())?;
|
||||
f.write_str("= ")?;
|
||||
self.fmt(right, f)
|
||||
}
|
||||
Expr::BinOp { right, op, left, .. } => {
|
||||
let prec_miss_left = |e: &Expr| {
|
||||
matches!(
|
||||
e, Expr::BinOp { op: lop, .. } if op.precedence() > lop.precedence()
|
||||
)
|
||||
};
|
||||
let prec_miss_right = |e: &Expr| {
|
||||
matches!(
|
||||
e, Expr::BinOp { op: lop, .. }
|
||||
if (op.precedence() == lop.precedence() && !op.is_comutative())
|
||||
|| op.precedence() > lop.precedence()
|
||||
)
|
||||
};
|
||||
|
||||
self.fmt_paren(left, f, prec_miss_left)?;
|
||||
if let Some(mut prev) = self.source.get(..right.pos() as usize) {
|
||||
prev = prev.trim_end();
|
||||
let estimate_bound =
|
||||
prev.rfind(|c: char| c.is_ascii_whitespace()).map_or(prev.len(), |i| i + 1);
|
||||
let exact_bound = lexer::Lexer::new(&prev[estimate_bound..]).last().start;
|
||||
prev = &prev[..exact_bound as usize + estimate_bound];
|
||||
if preserve_newlines(prev) > 0 {
|
||||
f.write_str("\n")?;
|
||||
for _ in 0..self.depth + 1 {
|
||||
f.write_str("\t")?;
|
||||
}
|
||||
f.write_str(op.name())?;
|
||||
f.write_str(" ")?;
|
||||
} else {
|
||||
f.write_str(" ")?;
|
||||
f.write_str(op.name())?;
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
} else {
|
||||
f.write_str(" ")?;
|
||||
f.write_str(op.name())?;
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
self.fmt_paren(right, f, prec_miss_right)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn preserve_newlines(source: &str) -> usize {
|
||||
source[source.trim_end().len()..].bytes().filter(|&c| c == b'\n').count()
|
||||
}
|
||||
|
||||
pub fn insert_needed_semicolon(source: &str) -> bool {
|
||||
let kind = lexer::Lexer::new(source).eat().kind;
|
||||
kind.precedence().is_some() || matches!(kind, TokenKind::Ctor | TokenKind::Tupl)
|
||||
}
|
||||
|
||||
impl core::fmt::Display for parser::Ast {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt_file(self.exprs(), &self.file, f)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fmt_file(exprs: &[Expr], file: &str, f: &mut impl fmt::Write) -> fmt::Result {
|
||||
for (i, expr) in exprs.iter().enumerate() {
|
||||
Formatter::new(file).fmt(expr, f)?;
|
||||
if let Some(expr) = exprs.get(i + 1)
|
||||
&& let Some(rest) = file.get(expr.pos() as usize..)
|
||||
{
|
||||
if insert_needed_semicolon(rest) {
|
||||
write!(f, ";")?;
|
||||
}
|
||||
|
||||
if preserve_newlines(&file[..expr.pos() as usize]) > 1 {
|
||||
writeln!(f)?;
|
||||
}
|
||||
}
|
||||
|
||||
if i + 1 != exprs.len() {
|
||||
writeln!(f)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod test {
|
||||
use {
|
||||
crate::parser::{self, Ctx},
|
||||
alloc::borrow::ToOwned,
|
||||
std::{fmt::Write, string::String},
|
||||
};
|
||||
|
||||
pub fn format(ident: &str, input: &str) {
|
||||
let mut minned = input.to_owned();
|
||||
let len = crate::fmt::minify(&mut minned);
|
||||
minned.truncate(len);
|
||||
|
||||
let mut ctx = Ctx::default();
|
||||
let ast = parser::Ast::new(ident, minned, &mut ctx, &mut parser::no_loader);
|
||||
let mut output = String::new();
|
||||
write!(output, "{ast}").unwrap();
|
||||
|
||||
let input_path = format!("formatter_{ident}.expected");
|
||||
let output_path = format!("formatter_{ident}.actual");
|
||||
std::fs::write(&input_path, input).unwrap();
|
||||
std::fs::write(&output_path, output).unwrap();
|
||||
|
||||
let success = std::process::Command::new("diff")
|
||||
.arg("-u")
|
||||
.arg("--color")
|
||||
.arg(&input_path)
|
||||
.arg(&output_path)
|
||||
.status()
|
||||
.unwrap()
|
||||
.success();
|
||||
std::fs::remove_file(&input_path).unwrap();
|
||||
std::fs::remove_file(&output_path).unwrap();
|
||||
assert!(success, "test failed");
|
||||
}
|
||||
|
||||
macro_rules! test {
|
||||
($($name:ident => $input:expr;)*) => {$(
|
||||
#[test]
|
||||
fn $name() {
|
||||
format(stringify!($name), $input);
|
||||
}
|
||||
)*};
|
||||
}
|
||||
|
||||
test! {
|
||||
comments => "// comment\n// comment\n\n// comment\n\n\
|
||||
/* comment */\n/* comment */\n\n/* comment */";
|
||||
some_ordinary_code => "loft := fn(): int return loft(1, 2, 3)";
|
||||
some_arg_per_line_code => "loft := fn(): int return loft(\
|
||||
\n\t1,\n\t2,\n\t3,\n)";
|
||||
some_ordinary_struct => "loft := fn(): int return loft.{a: 1, b: 2}";
|
||||
some_ordinary_fild_per_lin_struct => "loft := fn(): int return loft.{\
|
||||
\n\ta: 1,\n\tb: 2,\n}";
|
||||
code_block => "loft := fn(): int {\n\tloft()\n\treturn 1\n}";
|
||||
}
|
||||
}
|
384
lang/src/fs.rs
Normal file
384
lang/src/fs.rs
Normal file
|
@ -0,0 +1,384 @@
|
|||
use {
|
||||
crate::{
|
||||
parser::{Ast, Ctx, FileKind},
|
||||
son::{self, hbvm::HbvmBackend},
|
||||
ty, FnvBuildHasher,
|
||||
},
|
||||
alloc::{string::String, vec::Vec},
|
||||
core::{fmt::Write, num::NonZeroUsize, ops::Deref},
|
||||
hashbrown::hash_map,
|
||||
std::{
|
||||
collections::VecDeque,
|
||||
eprintln,
|
||||
ffi::OsStr,
|
||||
io::{self, Write as _},
|
||||
path::{Path, PathBuf},
|
||||
string::ToString,
|
||||
sync::Mutex,
|
||||
},
|
||||
};
|
||||
|
||||
type HashMap<K, V> = hashbrown::HashMap<K, V, FnvBuildHasher>;
|
||||
|
||||
pub struct Logger;
|
||||
|
||||
impl log::Log for Logger {
|
||||
fn enabled(&self, _: &log::Metadata) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn log(&self, record: &log::Record) {
|
||||
if self.enabled(record.metadata()) {
|
||||
eprintln!("{}", record.args())
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&self) {}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Options {
|
||||
pub fmt: bool,
|
||||
pub fmt_stdout: bool,
|
||||
pub dump_asm: bool,
|
||||
pub in_house_regalloc: bool,
|
||||
pub extra_threads: usize,
|
||||
}
|
||||
|
||||
impl Options {
|
||||
pub fn from_args(args: &[&str], out: &mut Vec<u8>) -> std::io::Result<Self> {
|
||||
if args.contains(&"--help") || args.contains(&"-h") {
|
||||
writeln!(out, "Usage: hbc [OPTIONS...] <FILE>")?;
|
||||
writeln!(out, include_str!("../command-help.txt"))?;
|
||||
return Err(std::io::ErrorKind::Other.into());
|
||||
}
|
||||
|
||||
Ok(Options {
|
||||
fmt: args.contains(&"--fmt"),
|
||||
fmt_stdout: args.contains(&"--fmt-stdout"),
|
||||
dump_asm: args.contains(&"--dump-asm"),
|
||||
in_house_regalloc: args.contains(&"--in-house-regalloc"),
|
||||
extra_threads: args
|
||||
.iter()
|
||||
.position(|&a| a == "--threads")
|
||||
.map(|i| {
|
||||
args[i + 1].parse::<NonZeroUsize>().map_err(|e| {
|
||||
writeln!(out, "--threads expects non zero integer: {e}")
|
||||
.err()
|
||||
.unwrap_or(std::io::ErrorKind::Other.into())
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.map_or(1, NonZeroUsize::get)
|
||||
- 1,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_compiler(
|
||||
root_file: &str,
|
||||
options: Options,
|
||||
out: &mut Vec<u8>,
|
||||
warnings: &mut String,
|
||||
) -> std::io::Result<()> {
|
||||
let parsed = parse_from_fs(options.extra_threads, root_file)?;
|
||||
|
||||
if (options.fmt || options.fmt_stdout) && !parsed.errors.is_empty() {
|
||||
*out = parsed.errors.into_bytes();
|
||||
return Err(std::io::Error::other("fmt fialed (errors are in out)"));
|
||||
}
|
||||
|
||||
if options.fmt {
|
||||
let mut output = String::new();
|
||||
for ast in parsed.ast {
|
||||
write!(output, "{ast}").unwrap();
|
||||
if ast.file.deref().trim() != output.as_str().trim() {
|
||||
std::fs::write(&*ast.path, &output)?;
|
||||
}
|
||||
output.clear();
|
||||
}
|
||||
} else if options.fmt_stdout {
|
||||
write!(out, "{}", &parsed.ast[0])?;
|
||||
} else {
|
||||
let mut backend = HbvmBackend::default();
|
||||
backend.use_in_house_regalloc = options.in_house_regalloc;
|
||||
|
||||
let mut ctx = crate::son::CodegenCtx::default();
|
||||
*ctx.parser.errors.get_mut() = parsed.errors;
|
||||
let mut codegen = son::Codegen::new(&mut backend, &parsed.ast, &mut ctx);
|
||||
codegen.push_embeds(parsed.embeds);
|
||||
codegen.generate(ty::Module::MAIN);
|
||||
|
||||
*warnings = core::mem::take(&mut *codegen.warnings.borrow_mut());
|
||||
|
||||
if !codegen.errors.borrow().is_empty() {
|
||||
drop(codegen);
|
||||
*out = ctx.parser.errors.into_inner().into_bytes();
|
||||
return Err(std::io::Error::other("compilation faoled (errors are in out)"));
|
||||
}
|
||||
|
||||
codegen.assemble(out);
|
||||
|
||||
if options.dump_asm {
|
||||
let mut disasm = String::new();
|
||||
codegen.disasm(&mut disasm, out).map_err(|e| io::Error::other(e.to_string()))?;
|
||||
*out = disasm.into_bytes();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct TaskQueue<T> {
|
||||
inner: Mutex<TaskQueueInner<T>>,
|
||||
}
|
||||
|
||||
impl<T> TaskQueue<T> {
|
||||
fn new(max_waiters: usize) -> Self {
|
||||
Self { inner: Mutex::new(TaskQueueInner::new(max_waiters)) }
|
||||
}
|
||||
|
||||
pub fn push(&self, message: T) {
|
||||
self.extend([message]);
|
||||
}
|
||||
|
||||
pub fn extend(&self, messages: impl IntoIterator<Item = T>) {
|
||||
self.inner.lock().unwrap().push(messages);
|
||||
}
|
||||
|
||||
pub fn pop(&self) -> Option<T> {
|
||||
TaskQueueInner::pop(&self.inner)
|
||||
}
|
||||
}
|
||||
|
||||
enum TaskSlot<T> {
|
||||
Waiting,
|
||||
Delivered(T),
|
||||
Closed,
|
||||
}
|
||||
|
||||
struct TaskQueueInner<T> {
|
||||
max_waiters: usize,
|
||||
messages: VecDeque<T>,
|
||||
parked: VecDeque<(*mut TaskSlot<T>, std::thread::Thread)>,
|
||||
}
|
||||
|
||||
unsafe impl<T: Send> Send for TaskQueueInner<T> {}
|
||||
unsafe impl<T: Send + Sync> Sync for TaskQueueInner<T> {}
|
||||
|
||||
impl<T> TaskQueueInner<T> {
|
||||
fn new(max_waiters: usize) -> Self {
|
||||
Self { max_waiters, messages: Default::default(), parked: Default::default() }
|
||||
}
|
||||
|
||||
fn push(&mut self, messages: impl IntoIterator<Item = T>) {
|
||||
for msg in messages {
|
||||
if let Some((dest, thread)) = self.parked.pop_front() {
|
||||
unsafe { *dest = TaskSlot::Delivered(msg) };
|
||||
thread.unpark();
|
||||
} else {
|
||||
self.messages.push_back(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pop(s: &Mutex<Self>) -> Option<T> {
|
||||
let mut res = TaskSlot::Waiting;
|
||||
{
|
||||
let mut s = s.lock().unwrap();
|
||||
if let Some(msg) = s.messages.pop_front() {
|
||||
return Some(msg);
|
||||
}
|
||||
|
||||
if s.max_waiters == s.parked.len() + 1 {
|
||||
for (dest, thread) in s.parked.drain(..) {
|
||||
unsafe { *dest = TaskSlot::Closed };
|
||||
thread.unpark();
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
s.parked.push_back((&mut res, std::thread::current()));
|
||||
}
|
||||
|
||||
loop {
|
||||
std::thread::park();
|
||||
|
||||
let _s = s.lock().unwrap();
|
||||
match core::mem::replace(&mut res, TaskSlot::Waiting) {
|
||||
TaskSlot::Delivered(msg) => return Some(msg),
|
||||
TaskSlot::Closed => return None,
|
||||
TaskSlot::Waiting => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Loaded {
|
||||
ast: Vec<Ast>,
|
||||
embeds: Vec<Vec<u8>>,
|
||||
errors: String,
|
||||
}
|
||||
|
||||
pub fn parse_from_fs(extra_threads: usize, root: &str) -> io::Result<Loaded> {
|
||||
fn resolve(path: &str, from: &str, tmp: &mut PathBuf) -> Result<PathBuf, CantLoadFile> {
|
||||
tmp.clear();
|
||||
match Path::new(from).parent() {
|
||||
Some(parent) => tmp.extend([parent, Path::new(path)]),
|
||||
None => tmp.push(path),
|
||||
};
|
||||
|
||||
tmp.canonicalize().map_err(|source| CantLoadFile { path: std::mem::take(tmp), source })
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CantLoadFile {
|
||||
path: PathBuf,
|
||||
source: io::Error,
|
||||
}
|
||||
|
||||
impl core::fmt::Display for CantLoadFile {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
|
||||
write!(f, "can't load file: {}", display_rel_path(&self.path),)
|
||||
}
|
||||
}
|
||||
|
||||
impl core::error::Error for CantLoadFile {
|
||||
fn source(&self) -> Option<&(dyn core::error::Error + 'static)> {
|
||||
Some(&self.source)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CantLoadFile> for io::Error {
|
||||
fn from(e: CantLoadFile) -> Self {
|
||||
io::Error::new(io::ErrorKind::InvalidData, e)
|
||||
}
|
||||
}
|
||||
|
||||
type Task = (usize, PathBuf);
|
||||
|
||||
let seen_modules = Mutex::new(HashMap::<PathBuf, usize>::default());
|
||||
let seen_embeds = Mutex::new(HashMap::<PathBuf, usize>::default());
|
||||
let tasks = TaskQueue::<Task>::new(extra_threads + 1);
|
||||
let ast = Mutex::new(Vec::<io::Result<Ast>>::new());
|
||||
let embeds = Mutex::new(Vec::<Vec<u8>>::new());
|
||||
|
||||
let loader = |path: &str, from: &str, kind: FileKind, tmp: &mut _| {
|
||||
let mut physiscal_path = resolve(path, from, tmp)?;
|
||||
|
||||
match kind {
|
||||
FileKind::Module => {
|
||||
let id = {
|
||||
let mut seen = seen_modules.lock().unwrap();
|
||||
let len = seen.len();
|
||||
match seen.entry(physiscal_path) {
|
||||
hash_map::Entry::Occupied(entry) => {
|
||||
return Ok(*entry.get());
|
||||
}
|
||||
hash_map::Entry::Vacant(entry) => {
|
||||
physiscal_path = entry.insert_entry(len as _).key().clone();
|
||||
len
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if !physiscal_path.exists() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
format!("can't find file: {}", display_rel_path(&physiscal_path)),
|
||||
));
|
||||
}
|
||||
|
||||
tasks.push((id, physiscal_path));
|
||||
Ok(id)
|
||||
}
|
||||
FileKind::Embed => {
|
||||
let id = {
|
||||
let mut seen = seen_embeds.lock().unwrap();
|
||||
let len = seen.len();
|
||||
match seen.entry(physiscal_path) {
|
||||
hash_map::Entry::Occupied(entry) => {
|
||||
return Ok(*entry.get());
|
||||
}
|
||||
hash_map::Entry::Vacant(entry) => {
|
||||
physiscal_path = entry.insert_entry(len as _).key().clone();
|
||||
len
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let content = std::fs::read(&physiscal_path).map_err(|e| {
|
||||
io::Error::new(
|
||||
e.kind(),
|
||||
format!(
|
||||
"can't load embed file: {}: {e}",
|
||||
display_rel_path(&physiscal_path)
|
||||
),
|
||||
)
|
||||
})?;
|
||||
let mut embeds = embeds.lock().unwrap();
|
||||
if id as usize >= embeds.len() {
|
||||
embeds.resize(id as usize + 1, Default::default());
|
||||
}
|
||||
embeds[id as usize] = content;
|
||||
Ok(id)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let execute_task = |ctx: &mut _, (_, path): Task, tmp: &mut _| {
|
||||
let path = path.to_str().ok_or_else(|| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("path contains invalid characters: {}", display_rel_path(&path)),
|
||||
)
|
||||
})?;
|
||||
Ok(Ast::new(path, std::fs::read_to_string(path)?, ctx, &mut |path, from, kind| {
|
||||
loader(path, from, kind, tmp).map_err(|e| e.to_string())
|
||||
}))
|
||||
};
|
||||
|
||||
let thread = || {
|
||||
let mut ctx = Ctx::default();
|
||||
let mut tmp = PathBuf::new();
|
||||
while let Some(task @ (indx, ..)) = tasks.pop() {
|
||||
let res = execute_task(&mut ctx, task, &mut tmp);
|
||||
let mut ast = ast.lock().unwrap();
|
||||
let len = ast.len().max(indx + 1);
|
||||
ast.resize_with(len, || Err(io::ErrorKind::InvalidData.into()));
|
||||
ast[indx] = res;
|
||||
}
|
||||
ctx.errors.into_inner()
|
||||
};
|
||||
|
||||
let path = Path::new(root).canonicalize().map_err(|e| {
|
||||
io::Error::new(e.kind(), format!("can't canonicalize root file path ({root})"))
|
||||
})?;
|
||||
seen_modules.lock().unwrap().insert(path.clone(), 0);
|
||||
tasks.push((0, path));
|
||||
|
||||
let errors = if extra_threads == 0 {
|
||||
thread()
|
||||
} else {
|
||||
std::thread::scope(|s| {
|
||||
(0..extra_threads + 1)
|
||||
.map(|_| s.spawn(thread))
|
||||
.collect::<Vec<_>>()
|
||||
.into_iter()
|
||||
.map(|t| t.join().unwrap())
|
||||
.collect::<String>()
|
||||
})
|
||||
};
|
||||
|
||||
Ok(Loaded {
|
||||
ast: ast.into_inner().unwrap().into_iter().collect::<io::Result<Vec<_>>>()?,
|
||||
embeds: embeds.into_inner().unwrap(),
|
||||
errors,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn display_rel_path(path: &(impl AsRef<OsStr> + ?Sized)) -> std::path::Display {
|
||||
static CWD: std::sync::LazyLock<PathBuf> =
|
||||
std::sync::LazyLock::new(|| std::env::current_dir().unwrap_or_default());
|
||||
std::path::Path::new(path).strip_prefix(&*CWD).unwrap_or(std::path::Path::new(path)).display()
|
||||
}
|
141
lang/src/fuzz.rs
Normal file
141
lang/src/fuzz.rs
Normal file
|
@ -0,0 +1,141 @@
|
|||
use {
|
||||
crate::{
|
||||
lexer::TokenKind,
|
||||
parser,
|
||||
son::{hbvm::HbvmBackend, Codegen, CodegenCtx},
|
||||
ty::Module,
|
||||
},
|
||||
alloc::string::String,
|
||||
core::{fmt::Write, hash::BuildHasher, ops::Range},
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
struct Rand(pub u64);
|
||||
|
||||
impl Rand {
|
||||
pub fn next(&mut self) -> u64 {
|
||||
self.0 = crate::FnvBuildHasher::default().hash_one(self.0);
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn range(&mut self, min: u64, max: u64) -> u64 {
|
||||
self.next() % (max - min) + min
|
||||
}
|
||||
|
||||
fn bool(&mut self) -> bool {
|
||||
self.next() % 2 == 0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FuncGen {
|
||||
rand: Rand,
|
||||
buf: String,
|
||||
vars: u64,
|
||||
}
|
||||
|
||||
impl FuncGen {
|
||||
fn gen(&mut self, seed: u64) -> &str {
|
||||
self.rand = Rand(seed);
|
||||
self.buf.clear();
|
||||
self.buf.push_str("main := fn(): void ");
|
||||
self.block().unwrap();
|
||||
&self.buf
|
||||
}
|
||||
|
||||
fn block(&mut self) -> core::fmt::Result {
|
||||
let prev_vars = self.vars;
|
||||
self.buf.push('{');
|
||||
for _ in 0..self.rand.range(1, 10) {
|
||||
self.stmt()?;
|
||||
}
|
||||
self.buf.push('}');
|
||||
self.vars = prev_vars;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stmt(&mut self) -> core::fmt::Result {
|
||||
match self.rand.range(0, 100) {
|
||||
0..4 => _ = self.block(),
|
||||
4..10 => {
|
||||
write!(self.buf, "var{} := ", self.vars)?;
|
||||
self.expr()?;
|
||||
self.vars += 1;
|
||||
}
|
||||
|
||||
10..20 if self.vars != 0 => {
|
||||
write!(self.buf, "var{} = ", self.rand.range(0, self.vars))?;
|
||||
self.expr()?;
|
||||
}
|
||||
20..23 => {
|
||||
self.buf.push_str("if ");
|
||||
self.expr()?;
|
||||
self.block()?;
|
||||
if self.rand.bool() {
|
||||
self.buf.push_str(" else ");
|
||||
self.block()?;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.buf.push_str("return ");
|
||||
self.expr()?;
|
||||
}
|
||||
}
|
||||
|
||||
self.buf.push(';');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn expr(&mut self) -> core::fmt::Result {
|
||||
match self.rand.range(0, 100) {
|
||||
0..80 => {
|
||||
write!(self.buf, "{}", self.rand.next())
|
||||
}
|
||||
80..90 if self.vars != 0 => {
|
||||
write!(self.buf, "var{}", self.rand.range(0, self.vars))
|
||||
}
|
||||
80..100 => {
|
||||
self.expr()?;
|
||||
let ops = [
|
||||
TokenKind::Add,
|
||||
TokenKind::Sub,
|
||||
TokenKind::Mul,
|
||||
TokenKind::Div,
|
||||
TokenKind::Shl,
|
||||
TokenKind::Eq,
|
||||
TokenKind::Ne,
|
||||
TokenKind::Lt,
|
||||
TokenKind::Gt,
|
||||
TokenKind::Le,
|
||||
TokenKind::Ge,
|
||||
TokenKind::Band,
|
||||
TokenKind::Bor,
|
||||
TokenKind::Xor,
|
||||
TokenKind::Mod,
|
||||
TokenKind::Shr,
|
||||
];
|
||||
let op = ops[self.rand.range(0, ops.len() as u64) as usize];
|
||||
write!(self.buf, " {op} ")?;
|
||||
self.expr()
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fuzz(seed_range: Range<u64>) {
|
||||
let mut gen = FuncGen::default();
|
||||
let mut ctx = CodegenCtx::default();
|
||||
for i in seed_range {
|
||||
ctx.clear();
|
||||
let src = gen.gen(i);
|
||||
let parsed = parser::Ast::new("fuzz", src, &mut ctx.parser, &mut parser::no_loader);
|
||||
|
||||
assert!(ctx.parser.errors.get_mut().is_empty());
|
||||
|
||||
let mut backend = HbvmBackend::default();
|
||||
let mut cdg = Codegen::new(&mut backend, core::slice::from_ref(&parsed), &mut ctx);
|
||||
cdg.generate(Module::MAIN);
|
||||
}
|
||||
}
|
3
lang/src/fuzz_main.rs
Normal file
3
lang/src/fuzz_main.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
fn main() {
|
||||
hblang::fuzz::fuzz(0..1000000);
|
||||
}
|
567
lang/src/lexer.rs
Normal file
567
lang/src/lexer.rs
Normal file
|
@ -0,0 +1,567 @@
|
|||
const fn ascii_mask(chars: &[u8]) -> u128 {
|
||||
let mut eq = 0;
|
||||
let mut i = 0;
|
||||
while i < chars.len() {
|
||||
let b = chars[i];
|
||||
eq |= 1 << b;
|
||||
i += 1;
|
||||
}
|
||||
eq
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub struct Token {
|
||||
pub kind: TokenKind,
|
||||
pub start: u32,
|
||||
pub end: u32,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn range(&self) -> core::ops::Range<usize> {
|
||||
self.start as usize..self.end as usize
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! gen_token_kind {
|
||||
($(
|
||||
#[$atts:meta])*
|
||||
$vis:vis enum $name:ident {
|
||||
#[patterns] $(
|
||||
$pattern:ident,
|
||||
)*
|
||||
#[keywords] $(
|
||||
$keyword:ident = $keyword_lit:literal,
|
||||
)*
|
||||
#[punkt] $(
|
||||
$punkt:ident = $punkt_lit:literal,
|
||||
)*
|
||||
#[ops] $(
|
||||
#[$prec:ident] $(
|
||||
$op:ident = $op_lit:literal $(=> $assign:ident)?,
|
||||
)*
|
||||
)*
|
||||
}
|
||||
) => {
|
||||
impl core::fmt::Display for $name {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
|
||||
f.write_str(self.name())
|
||||
}
|
||||
}
|
||||
|
||||
impl $name {
|
||||
pub const OPS: &[Self] = &[$($(Self::$op),*),*];
|
||||
|
||||
pub fn name(&self) -> &str {
|
||||
let sf = unsafe { &*(self as *const _ as *const u8) } ;
|
||||
match *self {
|
||||
$( Self::$pattern => concat!('<', stringify!($pattern), '>'), )*
|
||||
$( Self::$keyword => stringify!($keyword_lit), )*
|
||||
$( Self::$punkt => stringify!($punkt_lit), )*
|
||||
$($( Self::$op => $op_lit,
|
||||
$(Self::$assign => concat!($op_lit, "="),)?)*)*
|
||||
_ => unsafe { core::str::from_utf8_unchecked(core::slice::from_ref(&sf)) },
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn precedence(&self) -> Option<u8> {
|
||||
Some(match self {
|
||||
$($(Self::$op => ${ignore($prec)} ${index(1)},
|
||||
$(Self::$assign => 0,)?)*)*
|
||||
_ => return None,
|
||||
} + 1)
|
||||
}
|
||||
|
||||
fn from_ident(ident: &[u8]) -> Self {
|
||||
match ident {
|
||||
$($keyword_lit => Self::$keyword,)*
|
||||
_ => Self::Ident,
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Hash, PartialOrd, Ord)]
|
||||
#[repr(u8)]
|
||||
pub enum TokenKind {
|
||||
Not = b'!',
|
||||
DQuote = b'"',
|
||||
Pound = b'#',
|
||||
CtIdent = b'$',
|
||||
Mod = b'%',
|
||||
Band = b'&',
|
||||
Quote = b'\'',
|
||||
LParen = b'(',
|
||||
RParen = b')',
|
||||
Mul = b'*',
|
||||
Add = b'+',
|
||||
Comma = b',',
|
||||
Sub = b'-',
|
||||
Dot = b'.',
|
||||
Div = b'/',
|
||||
// Unused = 2-6
|
||||
Shl = b'<' - 5,
|
||||
// Unused = 8
|
||||
Shr = b'>' - 5,
|
||||
Colon = b':',
|
||||
Semi = b';',
|
||||
Lt = b'<',
|
||||
Assign = b'=',
|
||||
Gt = b'>',
|
||||
Que = b'?',
|
||||
Directive = b'@',
|
||||
|
||||
Comment,
|
||||
|
||||
Ident,
|
||||
Number,
|
||||
Float,
|
||||
Eof,
|
||||
|
||||
Ct,
|
||||
|
||||
Ctor,
|
||||
Tupl,
|
||||
TArrow,
|
||||
|
||||
Or,
|
||||
And,
|
||||
|
||||
// Unused = R-Z
|
||||
LBrack = b'[',
|
||||
BSlash = b'\\',
|
||||
RBrack = b']',
|
||||
Xor = b'^',
|
||||
Under = b'_',
|
||||
Tick = b'`',
|
||||
|
||||
Return,
|
||||
If,
|
||||
Match,
|
||||
Else,
|
||||
Loop,
|
||||
Break,
|
||||
Continue,
|
||||
Fn,
|
||||
Struct,
|
||||
Packed,
|
||||
Enum,
|
||||
True,
|
||||
False,
|
||||
Null,
|
||||
Idk,
|
||||
Die,
|
||||
|
||||
// Unused = a-z
|
||||
LBrace = b'{',
|
||||
Bor = b'|',
|
||||
RBrace = b'}',
|
||||
Tilde = b'~',
|
||||
|
||||
Decl = b':' + 128,
|
||||
Eq = b'=' + 128,
|
||||
Ne = b'!' + 128,
|
||||
Le = b'<' + 128,
|
||||
Ge = b'>' + 128,
|
||||
|
||||
BorAss = b'|' + 128,
|
||||
AddAss = b'+' + 128,
|
||||
SubAss = b'-' + 128,
|
||||
MulAss = b'*' + 128,
|
||||
DivAss = b'/' + 128,
|
||||
ModAss = b'%' + 128,
|
||||
XorAss = b'^' + 128,
|
||||
BandAss = b'&' + 128,
|
||||
ShrAss = b'>' - 5 + 128,
|
||||
ShlAss = b'<' - 5 + 128,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for TokenKind {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
core::fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenKind {
|
||||
pub fn ass_op(self) -> Option<Self> {
|
||||
let id = (self as u8).saturating_sub(128);
|
||||
if ascii_mask(b"|+-*/%^&79") & (1u128 << id) == 0 {
|
||||
return None;
|
||||
}
|
||||
Some(unsafe { core::mem::transmute::<u8, Self>(id) })
|
||||
}
|
||||
|
||||
pub fn is_comutative(self) -> bool {
|
||||
use TokenKind as S;
|
||||
matches!(self, S::Eq | S::Ne | S::Bor | S::Xor | S::Band | S::Add | S::Mul)
|
||||
}
|
||||
|
||||
pub fn is_compatison(self) -> bool {
|
||||
matches!(self, Self::Lt | Self::Gt | Self::Ge | Self::Le | Self::Ne | Self::Eq)
|
||||
}
|
||||
|
||||
pub fn is_supported_float_op(self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::Add
|
||||
| Self::Sub
|
||||
| Self::Mul
|
||||
| Self::Div
|
||||
| Self::Eq
|
||||
| Self::Ne
|
||||
| Self::Le
|
||||
| Self::Ge
|
||||
| Self::Lt
|
||||
| Self::Gt
|
||||
)
|
||||
}
|
||||
|
||||
pub fn apply_binop(self, a: i64, b: i64, float: bool) -> i64 {
|
||||
if float {
|
||||
debug_assert!(self.is_supported_float_op());
|
||||
let [a, b] = [f64::from_bits(a as _), f64::from_bits(b as _)];
|
||||
let res = match self {
|
||||
Self::Add => a + b,
|
||||
Self::Sub => a - b,
|
||||
Self::Mul => a * b,
|
||||
Self::Div => a / b,
|
||||
Self::Eq => return (a == b) as i64,
|
||||
Self::Ne => return (a != b) as i64,
|
||||
Self::Lt => return (a < b) as i64,
|
||||
Self::Gt => return (a > b) as i64,
|
||||
Self::Le => return (a >= b) as i64,
|
||||
Self::Ge => return (a <= b) as i64,
|
||||
_ => todo!("floating point op: {self}"),
|
||||
};
|
||||
|
||||
return res.to_bits() as _;
|
||||
}
|
||||
|
||||
match self {
|
||||
Self::Add => a.wrapping_add(b),
|
||||
Self::Sub => a.wrapping_sub(b),
|
||||
Self::Mul => a.wrapping_mul(b),
|
||||
Self::Div if b == 0 => 0,
|
||||
Self::Div => a.wrapping_div(b),
|
||||
Self::Shl => a.wrapping_shl(b as _),
|
||||
Self::Eq => (a == b) as i64,
|
||||
Self::Ne => (a != b) as i64,
|
||||
Self::Lt => (a < b) as i64,
|
||||
Self::Gt => (a > b) as i64,
|
||||
Self::Le => (a >= b) as i64,
|
||||
Self::Ge => (a <= b) as i64,
|
||||
Self::Band => a & b,
|
||||
Self::Bor => a | b,
|
||||
Self::Xor => a ^ b,
|
||||
Self::Mod if b == 0 => 0,
|
||||
Self::Mod => a.wrapping_rem(b),
|
||||
Self::Shr => a.wrapping_shr(b as _),
|
||||
s => todo!("{s}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_homogenous(&self) -> bool {
|
||||
self.precedence() != Self::Eq.precedence()
|
||||
&& self.precedence() != Self::Gt.precedence()
|
||||
&& self.precedence() != Self::Eof.precedence()
|
||||
}
|
||||
|
||||
pub fn apply_unop(&self, value: i64, float: bool) -> i64 {
|
||||
match self {
|
||||
Self::Sub if float => (-f64::from_bits(value as _)).to_bits() as _,
|
||||
Self::Sub => value.wrapping_neg(),
|
||||
Self::Not => (value == 0) as _,
|
||||
Self::Float if float => value,
|
||||
Self::Float => (value as f64).to_bits() as _,
|
||||
Self::Number if float => f64::from_bits(value as _) as _,
|
||||
Self::Number => value,
|
||||
s => todo!("{s}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn closing(&self) -> Option<TokenKind> {
|
||||
Some(match self {
|
||||
Self::Ctor => Self::RBrace,
|
||||
Self::Tupl => Self::RParen,
|
||||
Self::LParen => Self::RParen,
|
||||
Self::LBrack => Self::RBrack,
|
||||
Self::LBrace => Self::RBrace,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
gen_token_kind! {
|
||||
pub enum TokenKind {
|
||||
#[patterns]
|
||||
CtIdent,
|
||||
Ident,
|
||||
Number,
|
||||
Float,
|
||||
Eof,
|
||||
Directive,
|
||||
#[keywords]
|
||||
Return = b"return",
|
||||
If = b"if",
|
||||
Match = b"match",
|
||||
Else = b"else",
|
||||
Loop = b"loop",
|
||||
Break = b"break",
|
||||
Continue = b"continue",
|
||||
Fn = b"fn",
|
||||
Struct = b"struct",
|
||||
Packed = b"packed",
|
||||
Enum = b"enum",
|
||||
True = b"true",
|
||||
False = b"false",
|
||||
Null = b"null",
|
||||
Idk = b"idk",
|
||||
Die = b"die",
|
||||
Under = b"_",
|
||||
#[punkt]
|
||||
Ctor = ".{",
|
||||
Tupl = ".(",
|
||||
TArrow = "=>",
|
||||
// #define OP: each `#[prec]` delimeters a level of precedence from lowest to highest
|
||||
#[ops]
|
||||
#[prec]
|
||||
// this also includess all `<op>=` tokens
|
||||
Decl = ":=",
|
||||
Assign = "=",
|
||||
#[prec]
|
||||
Or = "||",
|
||||
#[prec]
|
||||
And = "&&",
|
||||
#[prec]
|
||||
Bor = "|" => BorAss,
|
||||
#[prec]
|
||||
Xor = "^" => XorAss,
|
||||
#[prec]
|
||||
Band = "&" => BandAss,
|
||||
#[prec]
|
||||
Eq = "==",
|
||||
Ne = "!=",
|
||||
#[prec]
|
||||
Le = "<=",
|
||||
Ge = ">=",
|
||||
Lt = "<",
|
||||
Gt = ">",
|
||||
#[prec]
|
||||
Shl = "<<" => ShlAss,
|
||||
Shr = ">>" => ShrAss,
|
||||
#[prec]
|
||||
Add = "+" => AddAss,
|
||||
Sub = "-" => SubAss,
|
||||
#[prec]
|
||||
Mul = "*" => MulAss,
|
||||
Div = "/" => DivAss,
|
||||
Mod = "%" => ModAss,
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Lexer<'a> {
|
||||
pos: u32,
|
||||
source: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> Lexer<'a> {
|
||||
pub fn new(input: &'a str) -> Self {
|
||||
Self::restore(input, 0)
|
||||
}
|
||||
|
||||
pub fn uses(input: &'a str) -> impl Iterator<Item = &'a str> {
|
||||
let mut s = Self::new(input);
|
||||
core::iter::from_fn(move || loop {
|
||||
let t = s.eat();
|
||||
if t.kind == TokenKind::Eof {
|
||||
return None;
|
||||
}
|
||||
if t.kind == TokenKind::Directive
|
||||
&& s.slice(t.range()) == "use"
|
||||
&& s.eat().kind == TokenKind::LParen
|
||||
{
|
||||
let t = s.eat();
|
||||
if t.kind == TokenKind::DQuote {
|
||||
return Some(&s.slice(t.range())[1..t.range().len() - 1]);
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn restore(input: &'a str, pos: u32) -> Self {
|
||||
Self { pos, source: input.as_bytes() }
|
||||
}
|
||||
|
||||
pub fn source(&self) -> &'a str {
|
||||
unsafe { core::str::from_utf8_unchecked(self.source) }
|
||||
}
|
||||
|
||||
pub fn slice(&self, tok: core::ops::Range<usize>) -> &'a str {
|
||||
unsafe { core::str::from_utf8_unchecked(&self.source[tok]) }
|
||||
}
|
||||
|
||||
fn peek(&self) -> Option<u8> {
|
||||
if core::intrinsics::unlikely(self.pos >= self.source.len() as u32) {
|
||||
None
|
||||
} else {
|
||||
Some(unsafe { *self.source.get_unchecked(self.pos as usize) })
|
||||
}
|
||||
}
|
||||
|
||||
fn advance(&mut self) -> Option<u8> {
|
||||
let c = self.peek()?;
|
||||
self.pos += 1;
|
||||
Some(c)
|
||||
}
|
||||
|
||||
pub fn last(&mut self) -> Token {
|
||||
let mut token = self.eat();
|
||||
loop {
|
||||
let next = self.eat();
|
||||
if next.kind == TokenKind::Eof {
|
||||
break;
|
||||
}
|
||||
token = next;
|
||||
}
|
||||
token
|
||||
}
|
||||
|
||||
pub fn eat(&mut self) -> Token {
|
||||
use TokenKind as T;
|
||||
loop {
|
||||
let mut start = self.pos;
|
||||
|
||||
let Some(c) = self.advance() else {
|
||||
return Token { kind: T::Eof, start, end: self.pos };
|
||||
};
|
||||
|
||||
let advance_ident = |s: &mut Self| {
|
||||
while let Some(b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_' | 127..) = s.peek() {
|
||||
s.advance();
|
||||
}
|
||||
};
|
||||
|
||||
let identity = |s: u8| unsafe { core::mem::transmute::<u8, T>(s) };
|
||||
|
||||
let kind = match c {
|
||||
..=b' ' => continue,
|
||||
b'0' if self.advance_if(b'x') => {
|
||||
while let Some(b'0'..=b'9' | b'A'..=b'F' | b'a'..=b'f') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Number
|
||||
}
|
||||
b'0' if self.advance_if(b'b') => {
|
||||
while let Some(b'0' | b'1') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Number
|
||||
}
|
||||
b'0' if self.advance_if(b'o') => {
|
||||
while let Some(b'0'..=b'7') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Number
|
||||
}
|
||||
b'0'..=b'9' => {
|
||||
while let Some(b'0'..=b'9') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
|
||||
if self.advance_if(b'.') {
|
||||
while let Some(b'0'..=b'9') = self.peek() {
|
||||
self.advance();
|
||||
}
|
||||
T::Float
|
||||
} else {
|
||||
T::Number
|
||||
}
|
||||
}
|
||||
b'a'..=b'z' | b'A'..=b'Z' | b'_' | 127.. => {
|
||||
advance_ident(self);
|
||||
let ident = &self.source[start as usize..self.pos as usize];
|
||||
T::from_ident(ident)
|
||||
}
|
||||
b'"' | b'\'' => loop {
|
||||
match self.advance() {
|
||||
Some(b'\\') => _ = self.advance(),
|
||||
Some(nc) if nc == c => break identity(c),
|
||||
Some(_) => {}
|
||||
None => break T::Eof,
|
||||
}
|
||||
},
|
||||
b'/' if self.advance_if(b'/') => {
|
||||
while let Some(l) = self.peek()
|
||||
&& l != b'\n'
|
||||
{
|
||||
self.pos += 1;
|
||||
}
|
||||
|
||||
let end = self.source[..self.pos as usize]
|
||||
.iter()
|
||||
.rposition(|&b| !b.is_ascii_whitespace())
|
||||
.map_or(self.pos, |i| i as u32 + 1);
|
||||
|
||||
return Token { kind: T::Comment, start, end };
|
||||
}
|
||||
b'/' if self.advance_if(b'*') => {
|
||||
let mut depth = 1;
|
||||
while let Some(l) = self.advance() {
|
||||
match l {
|
||||
b'/' if self.advance_if(b'*') => depth += 1,
|
||||
b'*' if self.advance_if(b'/') => match depth {
|
||||
1 => break,
|
||||
_ => depth -= 1,
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
T::Comment
|
||||
}
|
||||
b'.' if self.advance_if(b'{') => T::Ctor,
|
||||
b'.' if self.advance_if(b'(') => T::Tupl,
|
||||
b'=' if self.advance_if(b'>') => T::TArrow,
|
||||
b'&' if self.advance_if(b'&') => T::And,
|
||||
b'|' if self.advance_if(b'|') => T::Or,
|
||||
b'$' if self.advance_if(b':') => T::Ct,
|
||||
b'@' | b'$' => {
|
||||
start += 1;
|
||||
advance_ident(self);
|
||||
identity(c)
|
||||
}
|
||||
b'<' | b'>' if self.advance_if(c) => {
|
||||
identity(c - 5 + 128 * self.advance_if(b'=') as u8)
|
||||
}
|
||||
b':' | b'=' | b'!' | b'<' | b'>' | b'|' | b'+' | b'-' | b'*' | b'/' | b'%'
|
||||
| b'^' | b'&'
|
||||
if self.advance_if(b'=') =>
|
||||
{
|
||||
identity(c + 128)
|
||||
}
|
||||
_ => identity(c),
|
||||
};
|
||||
|
||||
return Token { kind, start, end: self.pos };
|
||||
}
|
||||
}
|
||||
|
||||
fn advance_if(&mut self, arg: u8) -> bool {
|
||||
if self.peek() == Some(arg) {
|
||||
self.advance();
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn line_col(bytes: &[u8], pos: u32) -> (usize, usize) {
|
||||
bytes[..pos as usize]
|
||||
.split(|&b| b == b'\n')
|
||||
.map(<[u8]>::len)
|
||||
.enumerate()
|
||||
.last()
|
||||
.map(|(line, col)| (line + 1, col + 1))
|
||||
.unwrap_or((1, 1))
|
||||
}
|
1551
lang/src/lib.rs
Normal file
1551
lang/src/lib.rs
Normal file
File diff suppressed because it is too large
Load diff
31
lang/src/main.rs
Normal file
31
lang/src/main.rs
Normal file
|
@ -0,0 +1,31 @@
|
|||
#[cfg(feature = "std")]
|
||||
fn main() {
|
||||
use std::io::Write;
|
||||
|
||||
fn run(out: &mut Vec<u8>, warnings: &mut String) -> std::io::Result<()> {
|
||||
let args = std::env::args().collect::<Vec<_>>();
|
||||
let args = args.iter().map(String::as_str).collect::<Vec<_>>();
|
||||
|
||||
let opts = hblang::Options::from_args(&args, out)?;
|
||||
let file = args.iter().filter(|a| !a.starts_with('-')).nth(1).copied().unwrap_or("main.hb");
|
||||
|
||||
hblang::run_compiler(file, opts, out, warnings)
|
||||
}
|
||||
|
||||
log::set_logger(&hblang::fs::Logger).unwrap();
|
||||
log::set_max_level(log::LevelFilter::Error);
|
||||
|
||||
let mut out = Vec::new();
|
||||
let mut warnings = String::new();
|
||||
match run(&mut out, &mut warnings) {
|
||||
Ok(_) => {
|
||||
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
|
||||
std::io::stdout().write_all(&out).unwrap()
|
||||
}
|
||||
Err(_) => {
|
||||
std::io::stderr().write_all(warnings.as_bytes()).unwrap();
|
||||
std::io::stderr().write_all(&out).unwrap();
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
1625
lang/src/parser.rs
Normal file
1625
lang/src/parser.rs
Normal file
File diff suppressed because it is too large
Load diff
5576
lang/src/son.rs
Normal file
5576
lang/src/son.rs
Normal file
File diff suppressed because it is too large
Load diff
1172
lang/src/son/hbvm.rs
Normal file
1172
lang/src/son/hbvm.rs
Normal file
File diff suppressed because it is too large
Load diff
805
lang/src/son/hbvm/regalloc.rs
Normal file
805
lang/src/son/hbvm/regalloc.rs
Normal file
|
@ -0,0 +1,805 @@
|
|||
use {
|
||||
super::{HbvmBackend, Nid, Nodes},
|
||||
crate::{
|
||||
parser,
|
||||
reg::{self, Reg},
|
||||
son::{debug_assert_matches, Kind, ARG_START, MEM, VOID},
|
||||
ty::{self, Arg, Loc},
|
||||
utils::BitSet,
|
||||
PLoc, Sig, Types,
|
||||
},
|
||||
alloc::{borrow::ToOwned, vec::Vec},
|
||||
core::{mem, ops::Range},
|
||||
hbbytecode::{self as instrs},
|
||||
};
|
||||
|
||||
impl HbvmBackend {
|
||||
pub(super) fn emit_body_code(
|
||||
&mut self,
|
||||
nodes: &Nodes,
|
||||
sig: Sig,
|
||||
tys: &Types,
|
||||
files: &[parser::Ast],
|
||||
) -> (usize, bool) {
|
||||
let tail = Function::build(nodes, tys, &mut self.ralloc, sig);
|
||||
nodes.basic_blocks();
|
||||
|
||||
let strip_load = |value| match nodes[value].kind {
|
||||
Kind::Load { .. } if nodes[value].ty.loc(tys) == Loc::Stack => nodes[value].inputs[1],
|
||||
_ => value,
|
||||
};
|
||||
|
||||
let mut res = mem::take(&mut self.ralloc);
|
||||
|
||||
Regalloc::run(nodes, &mut res);
|
||||
|
||||
'_open_function: {
|
||||
self.emit(instrs::addi64(reg::STACK_PTR, reg::STACK_PTR, 0));
|
||||
self.emit(instrs::st(reg::RET_ADDR + tail as u8, reg::STACK_PTR, 0, 0));
|
||||
}
|
||||
|
||||
if let Some(PLoc::Ref(..)) = tys.parama(sig.ret).0 {
|
||||
res.node_to_reg[MEM as usize] = res.bundles.len() as u8 + 1;
|
||||
res.bundles.push(Bundle::new(0));
|
||||
}
|
||||
|
||||
let reg_offset = if tail { reg::RET + 12 } else { reg::RET_ADDR + 1 };
|
||||
let bundle_count = res.bundles.len() + (reg_offset as usize);
|
||||
|
||||
res.node_to_reg.iter_mut().filter(|r| **r != 0).for_each(|r| {
|
||||
if *r == u8::MAX {
|
||||
*r = 0
|
||||
} else {
|
||||
*r += reg_offset - 1;
|
||||
if tail && *r >= reg::RET_ADDR {
|
||||
*r += 1;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
debug_assert!(!res
|
||||
.node_to_reg
|
||||
.iter()
|
||||
.any(|&a| a == reg::RET_ADDR || (reg::RET..reg_offset - 1).contains(&a)));
|
||||
|
||||
let atr = |allc: Nid| {
|
||||
let allc = strip_load(allc);
|
||||
debug_assert_eq!(
|
||||
nodes[allc].lock_rc.get(),
|
||||
0,
|
||||
"{:?} {}",
|
||||
nodes[allc],
|
||||
ty::Display::new(tys, files, nodes[allc].ty)
|
||||
);
|
||||
res.node_to_reg[allc as usize]
|
||||
};
|
||||
|
||||
let (retl, mut parama) = tys.parama(sig.ret);
|
||||
let mut typs = sig.args.args();
|
||||
let mut args = nodes[VOID].outputs[ARG_START..].iter();
|
||||
while let Some(aty) = typs.next(tys) {
|
||||
let Arg::Value(ty) = aty else { continue };
|
||||
let Some(loc) = parama.next(ty, tys) else { continue };
|
||||
let &arg = args.next().unwrap();
|
||||
let (rg, size) = match loc {
|
||||
PLoc::WideReg(rg, size) => (rg, size),
|
||||
PLoc::Reg(rg, size) if ty.loc(tys) == Loc::Stack => (rg, size),
|
||||
PLoc::Reg(r, ..) | PLoc::Ref(r, ..) => {
|
||||
self.emit_cp(atr(arg), r);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
self.emit(instrs::st(rg, reg::STACK_PTR, self.offsets[arg as usize] as _, size));
|
||||
if nodes.is_unlocked(arg) {
|
||||
self.emit(instrs::addi64(rg, reg::STACK_PTR, self.offsets[arg as usize] as _));
|
||||
}
|
||||
self.emit_cp(atr(arg), rg);
|
||||
}
|
||||
|
||||
let mut alloc_buf = vec![];
|
||||
for (i, block) in res.blocks.iter().enumerate() {
|
||||
self.offsets[block.entry as usize] = self.code.len() as _;
|
||||
for &nid in &res.instrs[block.range()] {
|
||||
if nid == VOID {
|
||||
continue;
|
||||
}
|
||||
|
||||
let node = &nodes[nid];
|
||||
alloc_buf.clear();
|
||||
|
||||
let atr = |allc: Nid| {
|
||||
let allc = strip_load(allc);
|
||||
debug_assert_eq!(
|
||||
nodes[allc].lock_rc.get(),
|
||||
0,
|
||||
"{:?} {}",
|
||||
nodes[allc],
|
||||
ty::Display::new(tys, files, nodes[allc].ty)
|
||||
);
|
||||
#[cfg(debug_assertions)]
|
||||
debug_assert!(
|
||||
res.marked.contains(&(allc, nid))
|
||||
|| nid == allc
|
||||
|| nodes.is_hard_zero(allc)
|
||||
|| allc == MEM
|
||||
|| matches!(node.kind, Kind::Loop | Kind::Region),
|
||||
"{nid} {:?}\n{allc} {:?}",
|
||||
nodes[nid],
|
||||
nodes[allc]
|
||||
);
|
||||
res.node_to_reg[allc as usize]
|
||||
};
|
||||
|
||||
let mut is_next_block = false;
|
||||
match node.kind {
|
||||
Kind::If => {
|
||||
let &[_, cnd] = node.inputs.as_slice() else { unreachable!() };
|
||||
if nodes.cond_op(cnd).is_some() {
|
||||
let &[_, lh, rh] = nodes[cnd].inputs.as_slice() else { unreachable!() };
|
||||
alloc_buf.extend([atr(lh), atr(rh)]);
|
||||
} else {
|
||||
alloc_buf.push(atr(cnd));
|
||||
}
|
||||
}
|
||||
Kind::Loop | Kind::Region => {
|
||||
let index = node
|
||||
.inputs
|
||||
.iter()
|
||||
.position(|&n| block.entry == nodes.idom_of(n))
|
||||
.unwrap()
|
||||
+ 1;
|
||||
|
||||
let mut moves = vec![];
|
||||
for &out in node.outputs.iter() {
|
||||
if nodes[out].is_data_phi() {
|
||||
let src = nodes[out].inputs[index];
|
||||
if atr(out) != atr(src) {
|
||||
moves.push([atr(out), atr(src), 0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
debug_assert_eq!(moves.len(), {
|
||||
moves.sort_unstable();
|
||||
moves.dedup();
|
||||
moves.len()
|
||||
});
|
||||
|
||||
moves.sort_unstable_by(|[aa, ab, _], [ba, bb, _]| {
|
||||
if aa == bb && ab == ba {
|
||||
core::cmp::Ordering::Equal
|
||||
} else if aa == bb {
|
||||
core::cmp::Ordering::Greater
|
||||
} else {
|
||||
core::cmp::Ordering::Less
|
||||
}
|
||||
});
|
||||
|
||||
moves.dedup_by(|[aa, ab, _], [ba, bb, kind]| {
|
||||
if aa == bb && ab == ba {
|
||||
*kind = 1;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
for [dst, src, kind] in moves {
|
||||
if kind == 0 {
|
||||
self.emit(instrs::cp(dst, src));
|
||||
} else {
|
||||
self.emit(instrs::swa(dst, src));
|
||||
}
|
||||
}
|
||||
is_next_block = res.backrefs[nid as usize] as usize == i + 1;
|
||||
}
|
||||
Kind::Return { .. } => {
|
||||
let &[_, ret, ..] = node.inputs.as_slice() else { unreachable!() };
|
||||
match retl {
|
||||
Some(PLoc::Reg(r, _)) if sig.ret.loc(tys) == Loc::Reg => {
|
||||
alloc_buf.push(atr(ret));
|
||||
self.emit(instrs::cp(r, atr(ret)));
|
||||
}
|
||||
Some(PLoc::Ref(..)) => alloc_buf.extend([atr(ret), atr(MEM)]),
|
||||
Some(_) => alloc_buf.push(atr(ret)),
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
Kind::Die => {}
|
||||
Kind::CInt { .. } => alloc_buf.push(atr(nid)),
|
||||
Kind::UnOp { .. } => alloc_buf.extend([atr(nid), atr(node.inputs[1])]),
|
||||
Kind::BinOp { op } => {
|
||||
let &[.., lhs, rhs] = node.inputs.as_slice() else { unreachable!() };
|
||||
|
||||
if let Kind::CInt { .. } = nodes[rhs].kind
|
||||
&& nodes.is_locked(rhs)
|
||||
&& op.imm_binop(node.ty).is_some()
|
||||
{
|
||||
alloc_buf.extend([atr(nid), atr(lhs)]);
|
||||
} else {
|
||||
alloc_buf.extend([atr(nid), atr(lhs), atr(rhs)]);
|
||||
}
|
||||
}
|
||||
Kind::Call { args, .. } => {
|
||||
let (ret, mut parama) = tys.parama(node.ty);
|
||||
if ret.is_some() {
|
||||
alloc_buf.push(atr(nid));
|
||||
}
|
||||
let mut args = args.args();
|
||||
let mut allocs = node.inputs[1..].iter();
|
||||
while let Some(arg) = args.next(tys) {
|
||||
let Arg::Value(ty) = arg else { continue };
|
||||
let Some(loc) = parama.next(ty, tys) else { continue };
|
||||
|
||||
let arg = *allocs.next().unwrap();
|
||||
alloc_buf.push(atr(arg));
|
||||
match loc {
|
||||
PLoc::Reg(..) if ty.loc(tys) == Loc::Stack => {}
|
||||
PLoc::WideReg(..) => alloc_buf.push(0),
|
||||
PLoc::Reg(r, ..) | PLoc::Ref(r, ..) => {
|
||||
self.emit(instrs::cp(r, atr(arg)))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if node.ty.loc(tys) == Loc::Stack {
|
||||
alloc_buf.push(atr(*node.inputs.last().unwrap()));
|
||||
}
|
||||
|
||||
if let Some(PLoc::Ref(r, ..)) = ret {
|
||||
self.emit(instrs::cp(r, *alloc_buf.last().unwrap()))
|
||||
}
|
||||
}
|
||||
Kind::Stck | Kind::Global { .. } => alloc_buf.push(atr(nid)),
|
||||
Kind::Load => {
|
||||
let (region, _) = nodes.strip_offset(node.inputs[1], node.ty, tys);
|
||||
if node.ty.loc(tys) != Loc::Stack {
|
||||
alloc_buf.push(atr(nid));
|
||||
match nodes[region].kind {
|
||||
Kind::Stck => {}
|
||||
_ => alloc_buf.push(atr(region)),
|
||||
}
|
||||
}
|
||||
}
|
||||
Kind::Stre if node.inputs[1] == VOID => {}
|
||||
Kind::Stre => {
|
||||
let (region, _) = nodes.strip_offset(node.inputs[2], node.ty, tys);
|
||||
match nodes[region].kind {
|
||||
Kind::Stck if node.ty.loc(tys) == Loc::Reg => {
|
||||
alloc_buf.push(atr(node.inputs[1]))
|
||||
}
|
||||
_ => alloc_buf.extend([atr(region), atr(node.inputs[1])]),
|
||||
}
|
||||
}
|
||||
Kind::Mem => {
|
||||
self.emit(instrs::cp(atr(MEM), reg::RET));
|
||||
continue;
|
||||
}
|
||||
Kind::Arg => {
|
||||
continue;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.emit_instr(super::InstrCtx {
|
||||
nid,
|
||||
sig,
|
||||
is_next_block,
|
||||
is_last_block: i == res.blocks.len() - 1,
|
||||
retl,
|
||||
allocs: &alloc_buf,
|
||||
nodes,
|
||||
tys,
|
||||
files,
|
||||
});
|
||||
|
||||
if let Kind::Call { .. } = node.kind {
|
||||
let (ret, ..) = tys.parama(node.ty);
|
||||
|
||||
match ret {
|
||||
Some(PLoc::WideReg(..)) => {}
|
||||
Some(PLoc::Reg(..)) if node.ty.loc(tys) == Loc::Stack => {}
|
||||
Some(PLoc::Reg(r, ..)) => self.emit_cp(atr(nid), r),
|
||||
None | Some(PLoc::Ref(..)) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.ralloc = res;
|
||||
|
||||
debug_assert!(bundle_count < reg::STACK_PTR as usize, "TODO: spill memory");
|
||||
debug_assert_eq!(
|
||||
self.ralloc
|
||||
.node_to_reg
|
||||
.iter()
|
||||
.filter(|&&r| r
|
||||
> (bundle_count as u8
|
||||
+ (tail && bundle_count > (reg::RET_ADDR) as usize) as u8))
|
||||
.copied()
|
||||
.collect::<Vec<_>>(),
|
||||
vec![],
|
||||
"{bundle_count}"
|
||||
);
|
||||
(
|
||||
if tail {
|
||||
bundle_count.saturating_sub(reg::RET_ADDR as _)
|
||||
} else {
|
||||
self.ralloc.bundles.len()
|
||||
},
|
||||
tail,
|
||||
)
|
||||
}
|
||||
|
||||
fn emit_cp(&mut self, dst: Reg, src: Reg) {
|
||||
if dst != 0 {
|
||||
self.emit(instrs::cp(dst, src));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Function<'a> {
|
||||
sig: Sig,
|
||||
tail: bool,
|
||||
nodes: &'a Nodes,
|
||||
tys: &'a Types,
|
||||
func: &'a mut Res,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for Function<'_> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
for block in &self.func.blocks {
|
||||
writeln!(f, "{:?}", self.nodes[block.entry].kind)?;
|
||||
for &instr in &self.func.instrs[block.range()] {
|
||||
writeln!(f, "{:?}", self.nodes[instr].kind)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Function<'a> {
|
||||
fn build(nodes: &'a Nodes, tys: &'a Types, func: &'a mut Res, sig: Sig) -> bool {
|
||||
func.blocks.clear();
|
||||
func.instrs.clear();
|
||||
func.backrefs.resize(nodes.values.len(), u16::MAX);
|
||||
func.visited.clear(nodes.values.len());
|
||||
let mut s = Self { tail: true, nodes, tys, sig, func };
|
||||
s.emit_node(VOID);
|
||||
debug_assert!(s.func.blocks.array_chunks().all(|[a, b]| a.end == b.start));
|
||||
log::info!("{s:?}");
|
||||
s.tail
|
||||
}
|
||||
|
||||
fn add_block(&mut self, entry: Nid) {
|
||||
self.func.blocks.push(Block {
|
||||
start: self.func.instrs.len() as _,
|
||||
end: self.func.instrs.len() as _,
|
||||
entry,
|
||||
});
|
||||
self.func.backrefs[entry as usize] = self.func.blocks.len() as u16 - 1;
|
||||
}
|
||||
|
||||
fn close_block(&mut self, exit: Nid) {
|
||||
if !matches!(self.nodes[exit].kind, Kind::Loop | Kind::Region) {
|
||||
self.add_instr(exit);
|
||||
} else {
|
||||
self.func.instrs.push(exit);
|
||||
}
|
||||
let prev = self.func.blocks.last_mut().unwrap();
|
||||
prev.end = self.func.instrs.len() as _;
|
||||
}
|
||||
|
||||
fn add_instr(&mut self, nid: Nid) {
|
||||
debug_assert_ne!(self.nodes[nid].kind, Kind::Loop);
|
||||
self.func.backrefs[nid as usize] = self.func.instrs.len() as u16;
|
||||
self.func.instrs.push(nid);
|
||||
}
|
||||
|
||||
fn emit_node(&mut self, nid: Nid) {
|
||||
if matches!(self.nodes[nid].kind, Kind::Region | Kind::Loop) {
|
||||
match (self.nodes[nid].kind, self.func.visited.set(nid)) {
|
||||
(Kind::Loop, false) | (Kind::Region, true) => {
|
||||
self.close_block(nid);
|
||||
return;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
} else if !self.func.visited.set(nid) {
|
||||
return;
|
||||
}
|
||||
|
||||
if self.nodes.is_never_used(nid, self.tys) {
|
||||
self.nodes.lock(nid);
|
||||
return;
|
||||
}
|
||||
|
||||
let mut node = self.nodes[nid].clone();
|
||||
match node.kind {
|
||||
Kind::Start => {
|
||||
debug_assert_matches!(self.nodes[node.outputs[0]].kind, Kind::Entry);
|
||||
self.add_block(VOID);
|
||||
self.emit_node(node.outputs[0])
|
||||
}
|
||||
Kind::If => {
|
||||
let &[_, cnd] = node.inputs.as_slice() else { unreachable!() };
|
||||
let &[mut then, mut else_] = node.outputs.as_slice() else { unreachable!() };
|
||||
|
||||
if let Some((_, swapped)) = self.nodes.cond_op(cnd) {
|
||||
if swapped {
|
||||
mem::swap(&mut then, &mut else_);
|
||||
}
|
||||
} else {
|
||||
mem::swap(&mut then, &mut else_);
|
||||
}
|
||||
|
||||
self.close_block(nid);
|
||||
self.emit_node(then);
|
||||
self.emit_node(else_);
|
||||
}
|
||||
Kind::Region | Kind::Loop => {
|
||||
self.close_block(nid);
|
||||
self.add_block(nid);
|
||||
self.nodes.reschedule_block(nid, &mut node.outputs);
|
||||
for o in node.outputs.into_iter().rev() {
|
||||
self.emit_node(o);
|
||||
}
|
||||
}
|
||||
Kind::Return { .. } | Kind::Die => {
|
||||
self.close_block(nid);
|
||||
self.emit_node(node.outputs[0]);
|
||||
}
|
||||
Kind::Entry => {
|
||||
let (ret, mut parama) = self.tys.parama(self.sig.ret);
|
||||
|
||||
if let Some(PLoc::Ref(..)) = ret {
|
||||
self.add_instr(MEM);
|
||||
}
|
||||
|
||||
let mut typs = self.sig.args.args();
|
||||
#[expect(clippy::unnecessary_to_owned)]
|
||||
let mut args = self.nodes[VOID].outputs[ARG_START..].to_owned().into_iter();
|
||||
while let Some(ty) = typs.next_value(self.tys) {
|
||||
let arg = args.next().unwrap();
|
||||
debug_assert_eq!(self.nodes[arg].kind, Kind::Arg);
|
||||
match parama.next(ty, self.tys) {
|
||||
None => {}
|
||||
Some(_) => self.add_instr(arg),
|
||||
}
|
||||
}
|
||||
|
||||
self.nodes.reschedule_block(nid, &mut node.outputs);
|
||||
for o in node.outputs.into_iter().rev() {
|
||||
self.emit_node(o);
|
||||
}
|
||||
}
|
||||
Kind::Then | Kind::Else => {
|
||||
self.add_block(nid);
|
||||
self.nodes.reschedule_block(nid, &mut node.outputs);
|
||||
for o in node.outputs.into_iter().rev() {
|
||||
self.emit_node(o);
|
||||
}
|
||||
}
|
||||
Kind::Call { func, .. } => {
|
||||
self.tail &= func == ty::Func::ECA;
|
||||
|
||||
self.add_instr(nid);
|
||||
|
||||
self.nodes.reschedule_block(nid, &mut node.outputs);
|
||||
for o in node.outputs.into_iter().rev() {
|
||||
if self.nodes[o].inputs[0] == nid
|
||||
|| (matches!(self.nodes[o].kind, Kind::Loop | Kind::Region)
|
||||
&& self.nodes[o].inputs[1] == nid)
|
||||
{
|
||||
self.emit_node(o);
|
||||
}
|
||||
}
|
||||
}
|
||||
Kind::CInt { value: 0 } if self.nodes.is_hard_zero(nid) => {}
|
||||
Kind::CInt { .. }
|
||||
| Kind::BinOp { .. }
|
||||
| Kind::UnOp { .. }
|
||||
| Kind::Global { .. }
|
||||
| Kind::Load { .. }
|
||||
| Kind::Stre
|
||||
| Kind::Stck => self.add_instr(nid),
|
||||
Kind::End | Kind::Phi | Kind::Arg | Kind::Mem | Kind::Loops | Kind::Join => {}
|
||||
Kind::Assert { .. } => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Nodes {
|
||||
fn vreg_count(&self) -> usize {
|
||||
self.values.len()
|
||||
}
|
||||
|
||||
fn use_block_of(&self, inst: Nid, uinst: Nid) -> Nid {
|
||||
let mut block = self.use_block(inst, uinst, None);
|
||||
while !self[block].kind.starts_basic_block() {
|
||||
block = self.idom(block, None);
|
||||
}
|
||||
block
|
||||
}
|
||||
|
||||
fn phi_inputs_of(&self, nid: Nid) -> impl Iterator<Item = [Nid; 3]> + use<'_> {
|
||||
match self[nid].kind {
|
||||
Kind::Region | Kind::Loop => Some({
|
||||
self[nid]
|
||||
.outputs
|
||||
.as_slice()
|
||||
.iter()
|
||||
.filter(|&&n| self[n].is_data_phi())
|
||||
.map(|&n| [n, self[n].inputs[1], self[n].inputs[2]])
|
||||
})
|
||||
.into_iter()
|
||||
.flatten(),
|
||||
_ => None.into_iter().flatten(),
|
||||
}
|
||||
}
|
||||
|
||||
fn idom_of(&self, mut nid: Nid) -> Nid {
|
||||
while !self[nid].kind.starts_basic_block() {
|
||||
nid = self.idom(nid, None);
|
||||
}
|
||||
nid
|
||||
}
|
||||
|
||||
fn uses_of(&self, nid: Nid) -> impl Iterator<Item = (Nid, Nid)> + use<'_> {
|
||||
if self[nid].kind.is_cfg() && !matches!(self[nid].kind, Kind::Call { .. }) {
|
||||
return None.into_iter().flatten();
|
||||
}
|
||||
|
||||
Some(
|
||||
self[nid]
|
||||
.outputs
|
||||
.iter()
|
||||
.filter(move |&&n| self.is_data_dep(nid, n))
|
||||
.map(move |n| self.this_or_delegates(nid, n))
|
||||
.flat_map(|(p, ls)| ls.iter().map(move |l| (p, l)))
|
||||
.filter(|&(o, &n)| self.is_data_dep(o, n))
|
||||
.map(|(p, &n)| (self.use_block_of(p, n), n))
|
||||
.inspect(|&(_, n)| debug_assert_eq!(self[n].lock_rc.get(), 0)),
|
||||
)
|
||||
.into_iter()
|
||||
.flatten()
|
||||
}
|
||||
}
|
||||
|
||||
struct Regalloc<'a> {
|
||||
nodes: &'a Nodes,
|
||||
res: &'a mut Res,
|
||||
}
|
||||
|
||||
impl<'a> Regalloc<'a> {
|
||||
fn instr_of(&self, nid: Nid) -> Option<Nid> {
|
||||
if self.nodes[nid].kind == Kind::Phi || self.nodes.is_locked(nid) {
|
||||
return None;
|
||||
}
|
||||
debug_assert_ne!(self.res.backrefs[nid as usize], Nid::MAX, "{:?}", self.nodes[nid]);
|
||||
Some(self.res.backrefs[nid as usize])
|
||||
}
|
||||
|
||||
fn block_of(&self, nid: Nid) -> Nid {
|
||||
debug_assert!(self.nodes[nid].kind.starts_basic_block());
|
||||
self.res.backrefs[nid as usize]
|
||||
}
|
||||
|
||||
fn run(ctx: &'a Nodes, res: &'a mut Res) {
|
||||
Self { nodes: ctx, res }.run_low();
|
||||
}
|
||||
|
||||
fn run_low(&mut self) {
|
||||
self.res.bundles.clear();
|
||||
self.res.node_to_reg.clear();
|
||||
#[cfg(debug_assertions)]
|
||||
self.res.marked.clear();
|
||||
self.res.node_to_reg.resize(self.nodes.vreg_count(), 0);
|
||||
|
||||
debug_assert!(self.res.dfs_buf.is_empty());
|
||||
|
||||
let mut bundle = Bundle::new(self.res.instrs.len());
|
||||
self.res.visited.clear(self.nodes.values.len());
|
||||
|
||||
for i in (0..self.res.blocks.len()).rev() {
|
||||
for [a, rest @ ..] in self.nodes.phi_inputs_of(self.res.blocks[i].entry) {
|
||||
if self.res.visited.set(a) {
|
||||
self.append_bundle(a, &mut bundle, None);
|
||||
}
|
||||
|
||||
for r in rest {
|
||||
if !self.res.visited.set(r) {
|
||||
continue;
|
||||
}
|
||||
|
||||
self.append_bundle(
|
||||
r,
|
||||
&mut bundle,
|
||||
Some(self.res.node_to_reg[a as usize] as usize - 1),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let instrs = mem::take(&mut self.res.instrs);
|
||||
for &inst in &instrs {
|
||||
if self.nodes[inst].has_no_value() || self.res.visited.get(inst) || inst == 0 {
|
||||
continue;
|
||||
}
|
||||
self.append_bundle(inst, &mut bundle, None);
|
||||
}
|
||||
self.res.instrs = instrs;
|
||||
}
|
||||
|
||||
fn collect_bundle(&mut self, inst: Nid, into: &mut Bundle) {
|
||||
let dom = self.nodes.idom_of(inst);
|
||||
self.res.dfs_seem.clear(self.nodes.values.len());
|
||||
for (cursor, uinst) in self.nodes.uses_of(inst) {
|
||||
if !self.res.dfs_seem.set(uinst) {
|
||||
continue;
|
||||
}
|
||||
#[cfg(debug_assertions)]
|
||||
debug_assert!(self.res.marked.insert((inst, uinst)));
|
||||
|
||||
self.reverse_cfg_dfs(cursor, dom, |s, n, b| {
|
||||
let mut range = b.range();
|
||||
debug_assert!(range.start < range.end);
|
||||
range.start = range.start.max(s.instr_of(inst).map_or(0, |n| n + 1) as usize);
|
||||
debug_assert!(
|
||||
range.start < range.end,
|
||||
"{:?} {:?} {n} {inst}",
|
||||
range,
|
||||
self.nodes[inst]
|
||||
);
|
||||
let new = range.end.min(
|
||||
s.instr_of(uinst)
|
||||
.filter(|_| {
|
||||
n == cursor
|
||||
&& self.nodes.loop_depth(dom, None)
|
||||
== self.nodes.loop_depth(cursor, None)
|
||||
})
|
||||
.map_or(Nid::MAX, |n| n + 1) as usize,
|
||||
);
|
||||
|
||||
range.end = new;
|
||||
debug_assert!(range.start < range.end, "{:?} {inst} {uinst}", range);
|
||||
|
||||
into.add(range);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn append_bundle(&mut self, inst: Nid, tmp: &mut Bundle, prefered: Option<usize>) {
|
||||
self.collect_bundle(inst, tmp);
|
||||
|
||||
if tmp.is_empty() {
|
||||
self.res.node_to_reg[inst as usize] = u8::MAX;
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(prefered) = prefered
|
||||
&& !self.res.bundles[prefered].overlaps(tmp)
|
||||
{
|
||||
self.res.bundles[prefered].merge(tmp);
|
||||
tmp.clear();
|
||||
self.res.node_to_reg[inst as usize] = prefered as Reg + 1;
|
||||
return;
|
||||
}
|
||||
|
||||
match self.res.bundles.iter_mut().enumerate().find(|(_, b)| !b.overlaps(tmp)) {
|
||||
Some((i, other)) => {
|
||||
other.merge(tmp);
|
||||
tmp.clear();
|
||||
self.res.node_to_reg[inst as usize] = i as Reg + 1;
|
||||
}
|
||||
None => {
|
||||
self.res.bundles.push(tmp.take());
|
||||
self.res.node_to_reg[inst as usize] = self.res.bundles.len() as Reg;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn reverse_cfg_dfs(
|
||||
&mut self,
|
||||
from: Nid,
|
||||
until: Nid,
|
||||
mut each: impl FnMut(&mut Self, Nid, Block),
|
||||
) {
|
||||
debug_assert!(self.res.dfs_buf.is_empty());
|
||||
self.res.dfs_buf.push(from);
|
||||
|
||||
debug_assert!(self.nodes.dominates(until, from, None));
|
||||
|
||||
while let Some(nid) = self.res.dfs_buf.pop() {
|
||||
debug_assert!(
|
||||
self.nodes.dominates(until, nid, None),
|
||||
"{until} {:?}",
|
||||
self.nodes[until]
|
||||
);
|
||||
each(self, nid, self.res.blocks[self.block_of(nid) as usize]);
|
||||
if nid == until {
|
||||
continue;
|
||||
}
|
||||
match self.nodes[nid].kind {
|
||||
Kind::Then | Kind::Else | Kind::Region | Kind::Loop => {
|
||||
for &n in self.nodes[nid].inputs.iter() {
|
||||
if self.nodes[n].kind == Kind::Loops {
|
||||
continue;
|
||||
}
|
||||
let d = self.nodes.idom_of(n);
|
||||
if self.res.dfs_seem.set(d) {
|
||||
self.res.dfs_buf.push(d);
|
||||
}
|
||||
}
|
||||
}
|
||||
Kind::Start => {}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub(super) struct Res {
|
||||
blocks: Vec<Block>,
|
||||
instrs: Vec<Nid>,
|
||||
backrefs: Vec<u16>,
|
||||
|
||||
bundles: Vec<Bundle>,
|
||||
node_to_reg: Vec<Reg>,
|
||||
|
||||
visited: BitSet,
|
||||
dfs_buf: Vec<Nid>,
|
||||
dfs_seem: BitSet,
|
||||
#[cfg(debug_assertions)]
|
||||
marked: hashbrown::HashSet<(Nid, Nid), crate::FnvBuildHasher>,
|
||||
}
|
||||
|
||||
struct Bundle {
|
||||
taken: Vec<bool>,
|
||||
}
|
||||
|
||||
impl Bundle {
|
||||
fn new(size: usize) -> Self {
|
||||
Self { taken: vec![false; size] }
|
||||
}
|
||||
|
||||
fn add(&mut self, range: Range<usize>) {
|
||||
self.taken[range].fill(true);
|
||||
}
|
||||
|
||||
fn overlaps(&self, other: &Self) -> bool {
|
||||
self.taken.iter().zip(other.taken.iter()).any(|(a, b)| a & b)
|
||||
}
|
||||
|
||||
fn merge(&mut self, other: &Self) {
|
||||
debug_assert!(!self.overlaps(other));
|
||||
self.taken.iter_mut().zip(other.taken.iter()).for_each(|(a, b)| *a |= *b);
|
||||
}
|
||||
|
||||
fn clear(&mut self) {
|
||||
self.taken.fill(false);
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
!self.taken.contains(&true)
|
||||
}
|
||||
|
||||
fn take(&mut self) -> Self {
|
||||
mem::replace(self, Self::new(self.taken.len()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct Block {
|
||||
start: u16,
|
||||
end: u16,
|
||||
entry: Nid,
|
||||
}
|
||||
|
||||
impl Block {
|
||||
pub fn range(&self) -> Range<usize> {
|
||||
self.start as usize..self.end as usize
|
||||
}
|
||||
}
|
649
lang/src/utils.rs
Normal file
649
lang/src/utils.rs
Normal file
|
@ -0,0 +1,649 @@
|
|||
#![expect(dead_code)]
|
||||
use {
|
||||
alloc::alloc,
|
||||
core::{
|
||||
alloc::Layout,
|
||||
fmt::Debug,
|
||||
hint::unreachable_unchecked,
|
||||
marker::PhantomData,
|
||||
mem::MaybeUninit,
|
||||
ops::{Deref, DerefMut, Not},
|
||||
ptr::Unique,
|
||||
},
|
||||
};
|
||||
|
||||
fn decide(b: bool, name: &'static str) -> Result<(), &'static str> {
|
||||
b.then_some(()).ok_or(name)
|
||||
}
|
||||
|
||||
pub fn is_snake_case(str: &str) -> Result<(), &'static str> {
|
||||
decide(str.bytes().all(|c| matches!(c, b'a'..=b'z' | b'0'..=b'9' | b'_')), "snake_case")
|
||||
}
|
||||
|
||||
pub fn is_pascal_case(str: &str) -> Result<(), &'static str> {
|
||||
decide(
|
||||
str.as_bytes()[0].is_ascii_uppercase() && str.bytes().all(|c| c.is_ascii_alphanumeric()),
|
||||
"PascalCase",
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_screaming_case(str: &str) -> Result<(), &'static str> {
|
||||
decide(str.bytes().all(|c| matches!(c, b'A'..=b'Z' | b'0'..=b'9' | b'_')), "SCREAMING_CASE")
|
||||
}
|
||||
|
||||
type Nid = u16;
|
||||
|
||||
pub union BitSet {
|
||||
inline: usize,
|
||||
alloced: Unique<AllocedBitSet>,
|
||||
}
|
||||
|
||||
impl Debug for BitSet {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
f.debug_list().entries(self.iter()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for BitSet {
|
||||
fn clone(&self) -> Self {
|
||||
if self.is_inline() {
|
||||
Self { inline: unsafe { self.inline } }
|
||||
} else {
|
||||
let (data, _) = self.data_and_len();
|
||||
let (layout, _) = Self::layout(data.len());
|
||||
unsafe {
|
||||
let ptr = alloc::alloc(layout);
|
||||
ptr.copy_from_nonoverlapping(self.alloced.as_ptr() as _, layout.size());
|
||||
Self { alloced: Unique::new_unchecked(ptr as _) }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for BitSet {
|
||||
fn drop(&mut self) {
|
||||
if !self.is_inline() {
|
||||
unsafe {
|
||||
let cap = self.alloced.as_ref().cap;
|
||||
alloc::dealloc(self.alloced.as_ptr() as _, Self::layout(cap).0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for BitSet {
|
||||
fn default() -> Self {
|
||||
Self { inline: Self::FLAG }
|
||||
}
|
||||
}
|
||||
|
||||
impl BitSet {
|
||||
const FLAG: usize = 1 << (Self::UNIT - 1);
|
||||
const INLINE_ELEMS: usize = Self::UNIT - 1;
|
||||
const UNIT: usize = core::mem::size_of::<usize>() * 8;
|
||||
|
||||
pub fn with_capacity(len: usize) -> Self {
|
||||
let mut s = Self::default();
|
||||
s.reserve(len);
|
||||
s
|
||||
}
|
||||
|
||||
fn is_inline(&self) -> bool {
|
||||
unsafe { self.inline & Self::FLAG != 0 }
|
||||
}
|
||||
|
||||
fn data_and_len(&self) -> (&[usize], usize) {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
(core::slice::from_ref(&self.inline), Self::INLINE_ELEMS)
|
||||
} else {
|
||||
let small_vec = self.alloced.as_ref();
|
||||
(
|
||||
core::slice::from_raw_parts(
|
||||
&small_vec.data as *const _ as *const usize,
|
||||
small_vec.cap,
|
||||
),
|
||||
small_vec.cap * core::mem::size_of::<usize>() * 8,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn data_mut_and_len(&mut self) -> (&mut [usize], usize) {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
(core::slice::from_mut(&mut self.inline), INLINE_ELEMS)
|
||||
} else {
|
||||
let small_vec = self.alloced.as_mut();
|
||||
(
|
||||
core::slice::from_raw_parts_mut(
|
||||
&mut small_vec.data as *mut _ as *mut usize,
|
||||
small_vec.cap,
|
||||
),
|
||||
small_vec.cap * Self::UNIT,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn indexes(index: usize) -> (usize, usize) {
|
||||
(index / Self::UNIT, index % Self::UNIT)
|
||||
}
|
||||
|
||||
pub fn get(&self, index: Nid) -> bool {
|
||||
let index = index as usize;
|
||||
let (data, len) = self.data_and_len();
|
||||
if index >= len {
|
||||
return false;
|
||||
}
|
||||
let (elem, bit) = Self::indexes(index);
|
||||
(unsafe { *data.get_unchecked(elem) }) & (1 << bit) != 0
|
||||
}
|
||||
|
||||
pub fn set(&mut self, index: Nid) -> bool {
|
||||
let index = index as usize;
|
||||
let (mut data, len) = self.data_mut_and_len();
|
||||
if core::intrinsics::unlikely(index >= len) {
|
||||
self.grow((index + 1).next_power_of_two().max(4 * Self::UNIT));
|
||||
(data, _) = self.data_mut_and_len();
|
||||
}
|
||||
|
||||
let (elem, bit) = Self::indexes(index);
|
||||
debug_assert!(elem < data.len(), "{} < {}", elem, data.len());
|
||||
let elem = unsafe { data.get_unchecked_mut(elem) };
|
||||
let prev = *elem;
|
||||
*elem |= 1 << bit;
|
||||
*elem != prev
|
||||
}
|
||||
|
||||
fn grow(&mut self, size: usize) {
|
||||
debug_assert!(size.is_power_of_two());
|
||||
let slot_count = size / Self::UNIT;
|
||||
let (layout, off) = Self::layout(slot_count);
|
||||
let (ptr, prev_len) = unsafe {
|
||||
if self.is_inline() {
|
||||
let ptr = alloc::alloc(layout);
|
||||
*ptr.add(off).cast::<usize>() = self.inline & !Self::FLAG;
|
||||
(ptr, 1)
|
||||
} else {
|
||||
let prev_len = self.alloced.as_ref().cap;
|
||||
let (prev_layout, _) = Self::layout(prev_len);
|
||||
(alloc::realloc(self.alloced.as_ptr() as _, prev_layout, layout.size()), prev_len)
|
||||
}
|
||||
};
|
||||
unsafe {
|
||||
MaybeUninit::fill(
|
||||
core::slice::from_raw_parts_mut(
|
||||
ptr.add(off).cast::<MaybeUninit<usize>>().add(prev_len),
|
||||
slot_count - prev_len,
|
||||
),
|
||||
0,
|
||||
);
|
||||
*ptr.cast::<usize>() = slot_count;
|
||||
core::ptr::write(self, Self { alloced: Unique::new_unchecked(ptr as _) });
|
||||
}
|
||||
}
|
||||
|
||||
fn layout(slot_count: usize) -> (core::alloc::Layout, usize) {
|
||||
unsafe {
|
||||
core::alloc::Layout::new::<AllocedBitSet>()
|
||||
.extend(Layout::array::<usize>(slot_count).unwrap_unchecked())
|
||||
.unwrap_unchecked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> BitSetIter {
|
||||
if self.is_inline() {
|
||||
BitSetIter { index: 0, current: unsafe { self.inline & !Self::FLAG }, remining: &[] }
|
||||
} else {
|
||||
let &[current, ref remining @ ..] = self.data_and_len().0 else {
|
||||
unsafe { unreachable_unchecked() }
|
||||
};
|
||||
BitSetIter { index: 0, current, remining }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear(&mut self, len: usize) {
|
||||
self.reserve(len);
|
||||
if self.is_inline() {
|
||||
unsafe { self.inline &= Self::FLAG };
|
||||
} else {
|
||||
self.data_mut_and_len().0.fill(0);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn units<'a>(&'a self, slot: &'a mut usize) -> &'a [usize] {
|
||||
if self.is_inline() {
|
||||
*slot = unsafe { self.inline } & !Self::FLAG;
|
||||
core::slice::from_ref(slot)
|
||||
} else {
|
||||
self.data_and_len().0
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reserve(&mut self, len: usize) {
|
||||
if len > self.data_and_len().1 {
|
||||
self.grow(len.next_power_of_two().max(4 * Self::UNIT));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn units_mut(&mut self) -> Result<&mut [usize], &mut InlineBitSetView> {
|
||||
if self.is_inline() {
|
||||
Err(unsafe {
|
||||
core::mem::transmute::<&mut usize, &mut InlineBitSetView>(&mut self.inline)
|
||||
})
|
||||
} else {
|
||||
Ok(self.data_mut_and_len().0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct InlineBitSetView(usize);
|
||||
|
||||
impl InlineBitSetView {
|
||||
pub(crate) fn add_mask(&mut self, tmp: usize) {
|
||||
debug_assert!(tmp & BitSet::FLAG == 0);
|
||||
self.0 |= tmp;
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BitSetIter<'a> {
|
||||
index: usize,
|
||||
current: usize,
|
||||
remining: &'a [usize],
|
||||
}
|
||||
|
||||
impl Iterator for BitSetIter<'_> {
|
||||
type Item = usize;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while self.current == 0 {
|
||||
self.current = *self.remining.take_first()?;
|
||||
self.index += 1;
|
||||
}
|
||||
|
||||
let sub_idx = self.current.trailing_zeros() as usize;
|
||||
self.current &= self.current - 1;
|
||||
Some(self.index * BitSet::UNIT + sub_idx)
|
||||
}
|
||||
}
|
||||
|
||||
struct AllocedBitSet {
|
||||
cap: usize,
|
||||
data: [usize; 0],
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[test]
|
||||
fn test_small_bit_set() {
|
||||
use std::vec::Vec;
|
||||
|
||||
let mut sv = BitSet::default();
|
||||
|
||||
sv.set(10);
|
||||
debug_assert!(sv.get(10));
|
||||
sv.set(100);
|
||||
debug_assert!(sv.get(100));
|
||||
sv.set(10000);
|
||||
debug_assert!(sv.get(10000));
|
||||
debug_assert_eq!(sv.iter().collect::<Vec<_>>(), &[10, 100, 10000]);
|
||||
sv.clear(10000);
|
||||
debug_assert_eq!(sv.iter().collect::<Vec<_>>(), &[]);
|
||||
}
|
||||
|
||||
pub union Vc {
|
||||
inline: InlineVc,
|
||||
alloced: AllocedVc,
|
||||
}
|
||||
|
||||
impl Default for Vc {
|
||||
fn default() -> Self {
|
||||
Vc { inline: InlineVc { elems: MaybeUninit::uninit(), cap: Default::default() } }
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Vc {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
self.as_slice().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<Nid> for Vc {
|
||||
fn from_iter<T: IntoIterator<Item = Nid>>(iter: T) -> Self {
|
||||
let mut slf = Self::default();
|
||||
for i in iter {
|
||||
slf.push(i);
|
||||
}
|
||||
slf
|
||||
}
|
||||
}
|
||||
|
||||
const INLINE_ELEMS: usize = VC_SIZE / 2 - 1;
|
||||
const VC_SIZE: usize = 16;
|
||||
|
||||
impl Vc {
|
||||
fn is_inline(&self) -> bool {
|
||||
unsafe { self.inline.cap <= INLINE_ELEMS as Nid }
|
||||
}
|
||||
|
||||
fn layout(&self) -> Option<core::alloc::Layout> {
|
||||
unsafe {
|
||||
self.is_inline().not().then(|| {
|
||||
core::alloc::Layout::array::<Nid>(self.alloced.cap as _).unwrap_unchecked()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
self.inline.cap as _
|
||||
} else {
|
||||
self.alloced.len as _
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn len_mut(&mut self) -> &mut Nid {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
&mut self.inline.cap
|
||||
} else {
|
||||
&mut self.alloced.len
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_ptr(&self) -> *const Nid {
|
||||
unsafe {
|
||||
match self.is_inline() {
|
||||
true => self.inline.elems.as_ptr().cast(),
|
||||
false => self.alloced.base.as_ptr(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_mut_ptr(&mut self) -> *mut Nid {
|
||||
unsafe {
|
||||
match self.is_inline() {
|
||||
true => self.inline.elems.as_mut_ptr().cast(),
|
||||
false => self.alloced.base.as_ptr(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_slice(&self) -> &[Nid] {
|
||||
unsafe { core::slice::from_raw_parts(self.as_ptr(), self.len()) }
|
||||
}
|
||||
|
||||
fn as_slice_mut(&mut self) -> &mut [Nid] {
|
||||
unsafe { core::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
|
||||
}
|
||||
|
||||
pub fn push(&mut self, value: Nid) {
|
||||
if let Some(layout) = self.layout()
|
||||
&& unsafe { self.alloced.len == self.alloced.cap }
|
||||
{
|
||||
unsafe {
|
||||
self.alloced.cap *= 2;
|
||||
self.alloced.base = Unique::new_unchecked(
|
||||
alloc::realloc(
|
||||
self.alloced.base.as_ptr().cast(),
|
||||
layout,
|
||||
self.alloced.cap as usize * core::mem::size_of::<Nid>(),
|
||||
)
|
||||
.cast(),
|
||||
);
|
||||
}
|
||||
} else if self.len() == INLINE_ELEMS {
|
||||
unsafe {
|
||||
let mut allcd =
|
||||
Self::alloc((self.inline.cap + 1).next_power_of_two() as _, self.len());
|
||||
core::ptr::copy_nonoverlapping(self.as_ptr(), allcd.as_mut_ptr(), self.len());
|
||||
*self = allcd;
|
||||
}
|
||||
}
|
||||
|
||||
unsafe {
|
||||
*self.len_mut() += 1;
|
||||
self.as_mut_ptr().add(self.len() - 1).write(value);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn alloc(cap: usize, len: usize) -> Self {
|
||||
debug_assert!(cap > INLINE_ELEMS);
|
||||
let layout = unsafe { core::alloc::Layout::array::<Nid>(cap).unwrap_unchecked() };
|
||||
let alloc = unsafe { alloc::alloc(layout) };
|
||||
unsafe {
|
||||
Vc {
|
||||
alloced: AllocedVc {
|
||||
base: Unique::new_unchecked(alloc.cast()),
|
||||
len: len as _,
|
||||
cap: cap as _,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn swap_remove(&mut self, index: usize) {
|
||||
let len = self.len() - 1;
|
||||
self.as_slice_mut().swap(index, len);
|
||||
*self.len_mut() -= 1;
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, index: usize) {
|
||||
self.as_slice_mut().copy_within(index + 1.., index);
|
||||
*self.len_mut() -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Vc {
|
||||
fn drop(&mut self) {
|
||||
if let Some(layout) = self.layout() {
|
||||
unsafe {
|
||||
alloc::dealloc(self.alloced.base.as_ptr().cast(), layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Vc {
|
||||
fn clone(&self) -> Self {
|
||||
self.as_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Vc {
|
||||
type IntoIter = VcIntoIter;
|
||||
type Item = Nid;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
VcIntoIter { start: 0, end: self.len(), vc: self }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VcIntoIter {
|
||||
start: usize,
|
||||
end: usize,
|
||||
vc: Vc,
|
||||
}
|
||||
|
||||
impl Iterator for VcIntoIter {
|
||||
type Item = Nid;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.start == self.end {
|
||||
return None;
|
||||
}
|
||||
|
||||
let ret = unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.start)) };
|
||||
self.start += 1;
|
||||
Some(ret)
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = self.end - self.start;
|
||||
(len, Some(len))
|
||||
}
|
||||
}
|
||||
|
||||
impl DoubleEndedIterator for VcIntoIter {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
if self.start == self.end {
|
||||
return None;
|
||||
}
|
||||
|
||||
self.end -= 1;
|
||||
Some(unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.end)) })
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for VcIntoIter {}
|
||||
|
||||
impl<const SIZE: usize> From<[Nid; SIZE]> for Vc {
|
||||
fn from(value: [Nid; SIZE]) -> Self {
|
||||
value.as_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a [Nid]> for Vc {
|
||||
fn from(value: &'a [Nid]) -> Self {
|
||||
if value.len() <= INLINE_ELEMS {
|
||||
let mut dflt = Self::default();
|
||||
unsafe {
|
||||
core::ptr::copy_nonoverlapping(value.as_ptr(), dflt.as_mut_ptr(), value.len())
|
||||
};
|
||||
dflt.inline.cap = value.len() as _;
|
||||
dflt
|
||||
} else {
|
||||
let mut allcd = unsafe { Self::alloc(value.len(), value.len()) };
|
||||
unsafe {
|
||||
core::ptr::copy_nonoverlapping(value.as_ptr(), allcd.as_mut_ptr(), value.len())
|
||||
};
|
||||
allcd
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Vc {
|
||||
type Target = [Nid];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Vc {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
self.as_slice_mut()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[repr(C)]
|
||||
struct InlineVc {
|
||||
cap: Nid,
|
||||
elems: MaybeUninit<[Nid; INLINE_ELEMS]>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[repr(C)]
|
||||
struct AllocedVc {
|
||||
cap: Nid,
|
||||
len: Nid,
|
||||
base: Unique<Nid>,
|
||||
}
|
||||
|
||||
pub trait Ent: Copy {
|
||||
fn new(index: usize) -> Self;
|
||||
fn index(self) -> usize;
|
||||
}
|
||||
|
||||
pub struct EntVec<K: Ent, T> {
|
||||
data: ::alloc::vec::Vec<T>,
|
||||
k: PhantomData<fn(K)>,
|
||||
}
|
||||
|
||||
impl<K: Ent, T> Default for EntVec<K, T> {
|
||||
fn default() -> Self {
|
||||
Self { data: Default::default(), k: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ent, T> EntVec<K, T> {
|
||||
pub fn clear(&mut self) {
|
||||
self.data.clear();
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.data.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.data.len()
|
||||
}
|
||||
|
||||
pub fn push(&mut self, value: T) -> K {
|
||||
let k = K::new(self.data.len());
|
||||
self.data.push(value);
|
||||
k
|
||||
}
|
||||
|
||||
pub fn next(&self, index: K) -> Option<&T> {
|
||||
self.data.get(index.index() + 1)
|
||||
}
|
||||
|
||||
pub fn shadow(&mut self, len: usize)
|
||||
where
|
||||
T: Default,
|
||||
{
|
||||
if self.data.len() < len {
|
||||
self.data.resize_with(len, Default::default);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> core::slice::Iter<T> {
|
||||
self.data.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ent, T> core::ops::Index<K> for EntVec<K, T> {
|
||||
type Output = T;
|
||||
|
||||
fn index(&self, index: K) -> &Self::Output {
|
||||
&self.data[index.index()]
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ent, T> core::ops::IndexMut<K> for EntVec<K, T> {
|
||||
fn index_mut(&mut self, index: K) -> &mut Self::Output {
|
||||
&mut self.data[index.index()]
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! decl_ent {
|
||||
($(
|
||||
$vis:vis struct $name:ident($index:ty);
|
||||
)*) => {$(
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
$vis struct $name($index);
|
||||
|
||||
impl crate::utils::Ent for $name {
|
||||
fn new(index: usize) -> Self {
|
||||
Self(index as $index)
|
||||
}
|
||||
|
||||
fn index(self) -> usize {
|
||||
self.0 as _
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl core::fmt::Display for $name {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
write!(f, concat!(stringify!($name), "{}"), self.0)
|
||||
}
|
||||
}
|
||||
)*};
|
||||
}
|
||||
pub(crate) use decl_ent;
|
48
lang/tests/son_tests_advanced_floating_point_arithmetic.txt
Normal file
48
lang/tests/son_tests_advanced_floating_point_arithmetic.txt
Normal file
|
@ -0,0 +1,48 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LI32 r32, 1148846080w
|
||||
CP r2, r32
|
||||
JAL r31, r0, :sin
|
||||
CP r33, r1
|
||||
FMUL32 r32, r33, r32
|
||||
FTI32 r32, r32, 1b
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
sin:
|
||||
CP r13, r2
|
||||
LI32 r14, 1124073472w
|
||||
LI32 r15, 1078530011w
|
||||
FMUL32 r14, r13, r14
|
||||
FDIV32 r14, r14, r15
|
||||
FTI32 r14, r14, 1b
|
||||
ANDI r15, r14, 255d
|
||||
ITF64 r16, r14
|
||||
MULI64 r15, r15, 4d
|
||||
LRA r17, r0, :sin_table
|
||||
LI32 r18, 1086918619w
|
||||
FC64T32 r16, r16, 1b
|
||||
ADDI64 r14, r14, 64d
|
||||
ADD64 r15, r17, r15
|
||||
LI32 r19, 1132462080w
|
||||
FMUL32 r16, r16, r18
|
||||
ANDI r14, r14, 255d
|
||||
LI32 r18, 1056964608w
|
||||
LD r15, r15, 0a, 4h
|
||||
FDIV32 r16, r16, r19
|
||||
MULI64 r14, r14, 4d
|
||||
FMUL32 r18, r15, r18
|
||||
FSUB32 r13, r13, r16
|
||||
ADD64 r14, r17, r14
|
||||
FMUL32 r16, r13, r18
|
||||
LD r14, r14, 0a, 4h
|
||||
FSUB32 r14, r14, r16
|
||||
FMUL32 r13, r14, r13
|
||||
FADD32 r13, r15, r13
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 1315
|
||||
ret: 826
|
||||
status: Ok(())
|
6
lang/tests/son_tests_aliasing_overoptimization.txt
Normal file
6
lang/tests/son_tests_aliasing_overoptimization.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 22
|
||||
ret: 0
|
||||
status: Ok(())
|
6
lang/tests/son_tests_arithmetic.txt
Normal file
6
lang/tests/son_tests_arithmetic.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 22
|
||||
ret: 0
|
||||
status: Ok(())
|
32
lang/tests/son_tests_arrays.txt
Normal file
32
lang/tests/son_tests_arrays.txt
Normal file
|
@ -0,0 +1,32 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -56d
|
||||
ST r31, r254, 24a, 32h
|
||||
LI64 r32, 1d
|
||||
ADDI64 r33, r254, 0d
|
||||
ST r32, r254, 0a, 8h
|
||||
LI64 r34, 2d
|
||||
ST r34, r254, 8a, 8h
|
||||
LI64 r34, 4d
|
||||
ST r34, r254, 16a, 8h
|
||||
CP r2, r33
|
||||
JAL r31, r0, :pass
|
||||
CP r33, r1
|
||||
ADD64 r32, r33, r32
|
||||
CP r1, r32
|
||||
LD r31, r254, 24a, 32h
|
||||
ADDI64 r254, r254, 56d
|
||||
JALA r0, r31, 0a
|
||||
pass:
|
||||
CP r13, r2
|
||||
LD r14, r13, 8a, 8h
|
||||
MULI64 r15, r14, 8d
|
||||
LD r16, r13, 0a, 8h
|
||||
ADD64 r13, r15, r13
|
||||
ADD64 r14, r14, r16
|
||||
LD r13, r13, 0a, 8h
|
||||
ADD64 r13, r13, r14
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 246
|
||||
ret: 8
|
||||
status: Ok(())
|
8
lang/tests/son_tests_big_array_crash.txt
Normal file
8
lang/tests/son_tests_big_array_crash.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
main:
|
||||
LRA r13, r0, :sin_table
|
||||
LD r13, r13, 80a, 8h
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 770
|
||||
ret: 1736
|
||||
status: Ok(())
|
14
lang/tests/son_tests_branch_assignments.txt
Normal file
14
lang/tests/son_tests_branch_assignments.txt
Normal file
|
@ -0,0 +1,14 @@
|
|||
main:
|
||||
CP r14, r2
|
||||
LI64 r13, 1d
|
||||
JNE r14, r13, :0
|
||||
JMP :1
|
||||
0: JNE r14, r0, :2
|
||||
LI64 r13, 2d
|
||||
JMP :1
|
||||
2: LI64 r13, 3d
|
||||
1: CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 75
|
||||
ret: 2
|
||||
status: Ok(())
|
32
lang/tests/son_tests_c_strings.txt
Normal file
32
lang/tests/son_tests_c_strings.txt
Normal file
|
@ -0,0 +1,32 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LRA r32, r0, :"abඞ\n\r\t56789\0"
|
||||
CP r2, r32
|
||||
JAL r31, r0, :str_len
|
||||
CP r32, r1
|
||||
LRA r33, r0, :"fff\0"
|
||||
CP r2, r33
|
||||
JAL r31, r0, :str_len
|
||||
CP r33, r1
|
||||
ADD64 r32, r33, r32
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
str_len:
|
||||
CP r15, r2
|
||||
CP r14, r0
|
||||
CP r13, r14
|
||||
2: LD r16, r15, 0a, 1h
|
||||
ANDI r16, r16, 255d
|
||||
JNE r16, r14, :0
|
||||
CP r1, r13
|
||||
JMP :1
|
||||
0: ADDI64 r15, r15, 1d
|
||||
ADDI64 r13, r13, 1d
|
||||
JMP :2
|
||||
1: JALA r0, r31, 0a
|
||||
code size: 216
|
||||
ret: 16
|
||||
status: Ok(())
|
13
lang/tests/son_tests_comments.txt
Normal file
13
lang/tests/son_tests_comments.txt
Normal file
|
@ -0,0 +1,13 @@
|
|||
foo:
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -8d
|
||||
ST r31, r254, 0a, 8h
|
||||
JAL r31, r0, :foo
|
||||
CP r1, r0
|
||||
LD r31, r254, 0a, 8h
|
||||
ADDI64 r254, r254, 8d
|
||||
JALA r0, r31, 0a
|
||||
code size: 88
|
||||
ret: 0
|
||||
status: Ok(())
|
|
@ -0,0 +1,8 @@
|
|||
main:
|
||||
LRA r13, r0, :a
|
||||
LD r13, r13, 0a, 8h
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 50
|
||||
ret: 50
|
||||
status: Ok(())
|
8
lang/tests/son_tests_comptime_min_reg_leak.txt
Normal file
8
lang/tests/son_tests_comptime_min_reg_leak.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
main:
|
||||
LRA r13, r0, :a
|
||||
LD r13, r13, 0a, 8h
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 50
|
||||
ret: 50
|
||||
status: Ok(())
|
19
lang/tests/son_tests_conditional_stores.txt
Normal file
19
lang/tests/son_tests_conditional_stores.txt
Normal file
|
@ -0,0 +1,19 @@
|
|||
cond:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
JAL r31, r0, :cond
|
||||
CP r33, r1
|
||||
CP r32, r0
|
||||
JNE r33, r32, :0
|
||||
JMP :1
|
||||
0: LI64 r32, 2d
|
||||
1: CP r1, r32
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
code size: 117
|
||||
ret: 0
|
||||
status: Ok(())
|
6
lang/tests/son_tests_const_folding_with_arg.txt
Normal file
6
lang/tests/son_tests_const_folding_with_arg.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 22
|
||||
ret: 0
|
||||
status: Ok(())
|
7
lang/tests/son_tests_constants.txt
Normal file
7
lang/tests/son_tests_constants.txt
Normal file
|
@ -0,0 +1,7 @@
|
|||
main:
|
||||
LI32 r13, 69w
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 28
|
||||
ret: 69
|
||||
status: Ok(())
|
6
lang/tests/son_tests_dead_code_in_loop.txt
Normal file
6
lang/tests/son_tests_dead_code_in_loop.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 22
|
||||
ret: 0
|
||||
status: Ok(())
|
5
lang/tests/son_tests_die.txt
Normal file
5
lang/tests/son_tests_die.txt
Normal file
|
@ -0,0 +1,5 @@
|
|||
main:
|
||||
UN
|
||||
code size: 9
|
||||
ret: 0
|
||||
status: Err(Unreachable)
|
81
lang/tests/son_tests_different_function_destinations.txt
Normal file
81
lang/tests/son_tests_different_function_destinations.txt
Normal file
|
@ -0,0 +1,81 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -136d
|
||||
ST r31, r254, 80a, 56h
|
||||
LRA r32, r0, :glob_stru
|
||||
JAL r31, r0, :new_stru
|
||||
ST r1, r32, 0a, 16h
|
||||
CP r33, r0
|
||||
LD r34, r32, 0a, 8h
|
||||
JEQ r34, r33, :0
|
||||
LI64 r32, 300d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
0: ST r33, r32, 0a, 8h
|
||||
LD r34, r32, 0a, 8h
|
||||
JEQ r34, r33, :2
|
||||
LI64 r32, 200d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
2: LI64 r34, 1d
|
||||
ST r34, r32, 0a, 8h
|
||||
ST r34, r32, 8a, 8h
|
||||
ADDI64 r35, r254, 32d
|
||||
ST r34, r254, 32a, 8h
|
||||
ST r34, r254, 40a, 8h
|
||||
ST r34, r254, 48a, 8h
|
||||
ST r34, r254, 56a, 8h
|
||||
ST r34, r254, 64a, 8h
|
||||
ST r34, r254, 72a, 8h
|
||||
LI64 r36, 3d
|
||||
CP r32, r33
|
||||
8: JNE r32, r36, :3
|
||||
LD r32, r254, 64a, 8h
|
||||
JEQ r32, r33, :4
|
||||
LI64 r32, 100d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
4: ST r34, r254, 32a, 8h
|
||||
ST r34, r254, 40a, 8h
|
||||
ST r34, r254, 48a, 8h
|
||||
ST r34, r254, 56a, 8h
|
||||
ST r34, r254, 64a, 8h
|
||||
ST r34, r254, 72a, 8h
|
||||
ST r33, r254, 0a, 8h
|
||||
ST r33, r254, 8a, 8h
|
||||
ST r33, r254, 16a, 8h
|
||||
ST r33, r254, 24a, 8h
|
||||
CP r32, r33
|
||||
7: LD r37, r254, 64a, 8h
|
||||
JNE r32, r36, :5
|
||||
JEQ r37, r33, :6
|
||||
LI64 r32, 10d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
6: CP r1, r33
|
||||
JMP :1
|
||||
5: MULI64 r37, r32, 16d
|
||||
ADD64 r37, r35, r37
|
||||
ST r33, r37, 0a, 8h
|
||||
ST r33, r37, 8a, 8h
|
||||
ADD64 r32, r32, r34
|
||||
JMP :7
|
||||
3: MULI64 r37, r32, 16d
|
||||
ADD64 r37, r35, r37
|
||||
JAL r31, r0, :new_stru
|
||||
ST r1, r37, 0a, 16h
|
||||
ADD64 r32, r32, r34
|
||||
JMP :8
|
||||
1: LD r31, r254, 80a, 56h
|
||||
ADDI64 r254, r254, 136d
|
||||
JALA r0, r31, 0a
|
||||
new_stru:
|
||||
ADDI64 r254, r254, -16d
|
||||
ADDI64 r13, r254, 0d
|
||||
ST r0, r254, 0a, 8h
|
||||
ST r0, r254, 8a, 8h
|
||||
LD r1, r13, 0a, 16h
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
code size: 736
|
||||
ret: 0
|
||||
status: Ok(())
|
29
lang/tests/son_tests_different_types.txt
Normal file
29
lang/tests/son_tests_different_types.txt
Normal file
|
@ -0,0 +1,29 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -12d
|
||||
LI8 r13, 255b
|
||||
ST r13, r254, 0a, 1h
|
||||
ST r0, r254, 1a, 1h
|
||||
ST r0, r254, 2a, 1h
|
||||
ST r13, r254, 3a, 1h
|
||||
ST r0, r254, 4a, 4h
|
||||
LD r13, r254, 4a, 4h
|
||||
LI32 r14, 2w
|
||||
ST r14, r254, 8a, 4h
|
||||
LD r14, r254, 8a, 4h
|
||||
LI64 r15, 2d
|
||||
ANDI r14, r14, 4294967295d
|
||||
JEQ r14, r15, :0
|
||||
CP r1, r0
|
||||
JMP :1
|
||||
0: ANDI r13, r13, 4294967295d
|
||||
JEQ r13, r0, :2
|
||||
LI64 r13, 64d
|
||||
CP r1, r13
|
||||
JMP :1
|
||||
2: LI64 r13, 512d
|
||||
CP r1, r13
|
||||
1: ADDI64 r254, r254, 12d
|
||||
JALA r0, r31, 0a
|
||||
code size: 235
|
||||
ret: 512
|
||||
status: Ok(())
|
22
lang/tests/son_tests_directives.txt
Normal file
22
lang/tests/son_tests_directives.txt
Normal file
|
@ -0,0 +1,22 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -16d
|
||||
LI64 r13, 10d
|
||||
ADDI64 r14, r254, 0d
|
||||
ST r13, r254, 0a, 8h
|
||||
LI64 r13, 20d
|
||||
ST r13, r254, 8a, 8h
|
||||
LI64 r13, 6d
|
||||
LI64 r15, 5d
|
||||
LI64 r16, 1d
|
||||
CP r2, r16
|
||||
CP r5, r15
|
||||
CP r6, r13
|
||||
LD r3, r14, 0a, 16h
|
||||
ECA
|
||||
CP r1, r0
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
ev: Ecall
|
||||
code size: 154
|
||||
ret: 0
|
||||
status: Ok(())
|
20
lang/tests/son_tests_enums.txt
Normal file
20
lang/tests/son_tests_enums.txt
Normal file
|
@ -0,0 +1,20 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -16d
|
||||
ST r31, r254, 0a, 16h
|
||||
JAL r31, r0, :some_enum
|
||||
CP r32, r1
|
||||
ANDI r32, r32, 255d
|
||||
JNE r32, r0, :0
|
||||
CP r1, r0
|
||||
JMP :1
|
||||
0: LI64 r32, 100d
|
||||
CP r1, r32
|
||||
1: LD r31, r254, 0a, 16h
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
some_enum:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 128
|
||||
ret: 0
|
||||
status: Ok(())
|
114
lang/tests/son_tests_exhaustive_loop_testing.txt
Normal file
114
lang/tests/son_tests_exhaustive_loop_testing.txt
Normal file
|
@ -0,0 +1,114 @@
|
|||
continue_and_state_change:
|
||||
CP r13, r2
|
||||
CP r15, r0
|
||||
LI64 r16, 3d
|
||||
LI64 r14, 4d
|
||||
LI64 r17, 2d
|
||||
LI64 r18, 10d
|
||||
6: JLTU r13, r18, :0
|
||||
JMP :1
|
||||
0: JNE r13, r17, :2
|
||||
CP r13, r14
|
||||
JMP :3
|
||||
2: JNE r13, r16, :4
|
||||
CP r13, r15
|
||||
1: CP r1, r13
|
||||
JMP :5
|
||||
4: ADDI64 r13, r13, 1d
|
||||
3: JMP :6
|
||||
5: JALA r0, r31, 0a
|
||||
infinite_loop:
|
||||
ADDI64 r254, r254, -32d
|
||||
ST r31, r254, 0a, 32h
|
||||
LI64 r34, 1d
|
||||
CP r33, r0
|
||||
CP r32, r33
|
||||
1: JNE r32, r34, :0
|
||||
JMP :0
|
||||
0: CP r2, r33
|
||||
JAL r31, r0, :continue_and_state_change
|
||||
CP r32, r1
|
||||
JMP :1
|
||||
LD r31, r254, 0a, 32h
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -40d
|
||||
ST r31, r254, 0a, 40h
|
||||
CP r2, r0
|
||||
JAL r31, r0, :multiple_breaks
|
||||
CP r32, r1
|
||||
LI64 r33, 3d
|
||||
JEQ r32, r33, :0
|
||||
LI64 r32, 1d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
0: LI64 r32, 4d
|
||||
CP r2, r32
|
||||
JAL r31, r0, :multiple_breaks
|
||||
CP r34, r1
|
||||
LI64 r35, 10d
|
||||
JEQ r34, r35, :2
|
||||
LI64 r32, 2d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
2: CP r2, r0
|
||||
JAL r31, r0, :state_change_in_break
|
||||
CP r34, r1
|
||||
JEQ r34, r0, :3
|
||||
CP r1, r33
|
||||
JMP :1
|
||||
3: CP r2, r32
|
||||
JAL r31, r0, :state_change_in_break
|
||||
CP r34, r1
|
||||
JEQ r34, r35, :4
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
4: CP r2, r35
|
||||
JAL r31, r0, :continue_and_state_change
|
||||
CP r32, r1
|
||||
JEQ r32, r35, :5
|
||||
LI64 r32, 5d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
5: CP r2, r33
|
||||
JAL r31, r0, :continue_and_state_change
|
||||
CP r32, r1
|
||||
JEQ r32, r0, :6
|
||||
LI64 r32, 6d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
6: JAL r31, r0, :infinite_loop
|
||||
CP r1, r0
|
||||
1: LD r31, r254, 0a, 40h
|
||||
ADDI64 r254, r254, 40d
|
||||
JALA r0, r31, 0a
|
||||
multiple_breaks:
|
||||
CP r13, r2
|
||||
LI64 r14, 3d
|
||||
LI64 r15, 10d
|
||||
4: JLTU r13, r15, :0
|
||||
JMP :1
|
||||
0: ADDI64 r13, r13, 1d
|
||||
JNE r13, r14, :2
|
||||
1: CP r1, r13
|
||||
JMP :3
|
||||
2: JMP :4
|
||||
3: JALA r0, r31, 0a
|
||||
state_change_in_break:
|
||||
CP r13, r2
|
||||
LI64 r14, 3d
|
||||
LI64 r15, 10d
|
||||
4: JLTU r13, r15, :0
|
||||
JMP :1
|
||||
0: JNE r13, r14, :2
|
||||
CP r13, r0
|
||||
1: CP r1, r13
|
||||
JMP :3
|
||||
2: ADDI64 r13, r13, 1d
|
||||
JMP :4
|
||||
3: JALA r0, r31, 0a
|
||||
timed out
|
||||
code size: 667
|
||||
ret: 10
|
||||
status: Ok(())
|
55
lang/tests/son_tests_fb_driver.txt
Normal file
55
lang/tests/son_tests_fb_driver.txt
Normal file
|
@ -0,0 +1,55 @@
|
|||
check_platform:
|
||||
ADDI64 r254, r254, -16d
|
||||
ST r31, r254, 0a, 16h
|
||||
JAL r31, r0, :x86_fb_ptr
|
||||
CP r32, r1
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 16h
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -56d
|
||||
ST r31, r254, 0a, 56h
|
||||
JAL r31, r0, :check_platform
|
||||
CP r33, r0
|
||||
LI64 r36, 30d
|
||||
LI64 r37, 100d
|
||||
CP r35, r33
|
||||
CP r34, r33
|
||||
CP r32, r33
|
||||
5: JLTU r32, r36, :0
|
||||
ADDI64 r34, r34, 1d
|
||||
CP r2, r33
|
||||
CP r3, r34
|
||||
CP r4, r36
|
||||
JAL r31, r0, :set_pixel
|
||||
CP r32, r1
|
||||
JEQ r32, r35, :1
|
||||
CP r1, r33
|
||||
JMP :2
|
||||
1: JNE r34, r37, :3
|
||||
CP r1, r35
|
||||
JMP :2
|
||||
3: CP r32, r33
|
||||
JMP :4
|
||||
0: ADDI64 r35, r35, 1d
|
||||
ADDI64 r32, r32, 1d
|
||||
4: JMP :5
|
||||
2: LD r31, r254, 0a, 56h
|
||||
ADDI64 r254, r254, 56d
|
||||
JALA r0, r31, 0a
|
||||
set_pixel:
|
||||
CP r13, r2
|
||||
CP r14, r3
|
||||
CP r15, r4
|
||||
MUL64 r14, r14, r15
|
||||
ADD64 r13, r14, r13
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
x86_fb_ptr:
|
||||
LI64 r13, 100d
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 329
|
||||
ret: 3000
|
||||
status: Ok(())
|
7
lang/tests/son_tests_floating_point_arithmetic.txt
Normal file
7
lang/tests/son_tests_floating_point_arithmetic.txt
Normal file
|
@ -0,0 +1,7 @@
|
|||
main:
|
||||
LI32 r13, 3212836864w
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 28
|
||||
ret: 3212836864
|
||||
status: Ok(())
|
29
lang/tests/son_tests_functions.txt
Normal file
29
lang/tests/son_tests_functions.txt
Normal file
|
@ -0,0 +1,29 @@
|
|||
add_one:
|
||||
CP r13, r2
|
||||
ADDI64 r13, r13, 1d
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
add_two:
|
||||
CP r13, r2
|
||||
ADDI64 r13, r13, 2d
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LI64 r32, 10d
|
||||
CP r2, r32
|
||||
JAL r31, r0, :add_one
|
||||
CP r32, r1
|
||||
LI64 r33, 20d
|
||||
CP r2, r33
|
||||
JAL r31, r0, :add_two
|
||||
CP r33, r1
|
||||
ADD64 r32, r33, r32
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
code size: 176
|
||||
ret: 33
|
||||
status: Ok(())
|
38
lang/tests/son_tests_generic_functions.txt
Normal file
38
lang/tests/son_tests_generic_functions.txt
Normal file
|
@ -0,0 +1,38 @@
|
|||
add:
|
||||
CP r13, r2
|
||||
CP r14, r3
|
||||
ADD64 r13, r13, r14
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
add:
|
||||
CP r13, r2
|
||||
CP r14, r3
|
||||
ADD32 r13, r13, r14
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
add:
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -32d
|
||||
ST r31, r254, 0a, 32h
|
||||
JAL r31, r0, :add
|
||||
LI32 r32, 2w
|
||||
CP r2, r32
|
||||
CP r3, r32
|
||||
JAL r31, r0, :add
|
||||
CP r32, r1
|
||||
LI64 r33, 3d
|
||||
LI64 r34, 1d
|
||||
CP r2, r34
|
||||
CP r3, r33
|
||||
JAL r31, r0, :add
|
||||
CP r33, r1
|
||||
ANDI r32, r32, 4294967295d
|
||||
SUB64 r32, r32, r33
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 32h
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
code size: 209
|
||||
ret: 0
|
||||
status: Ok(())
|
32
lang/tests/son_tests_generic_type_mishap.txt
Normal file
32
lang/tests/son_tests_generic_type_mishap.txt
Normal file
|
@ -0,0 +1,32 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -8d
|
||||
ST r31, r254, 0a, 8h
|
||||
JAL r31, r0, :process
|
||||
LD r31, r254, 0a, 8h
|
||||
ADDI64 r254, r254, 8d
|
||||
JALA r0, r31, 0a
|
||||
opaque:
|
||||
JALA r0, r31, 0a
|
||||
process:
|
||||
ADDI64 r254, r254, -48d
|
||||
ST r31, r254, 16a, 32h
|
||||
ADDI64 r33, r254, 0d
|
||||
ST r0, r254, 0a, 1h
|
||||
LI64 r32, 1000d
|
||||
4: JGTU r32, r0, :0
|
||||
JMP :1
|
||||
0: CP r2, r33
|
||||
JAL r31, r0, :opaque
|
||||
LD r34, r254, 0a, 1h
|
||||
ANDI r34, r34, 255d
|
||||
JEQ r34, r0, :2
|
||||
JMP :3
|
||||
2: ADDI64 r32, r32, -1d
|
||||
1: JMP :4
|
||||
3: LD r31, r254, 16a, 32h
|
||||
ADDI64 r254, r254, 48d
|
||||
JALA r0, r31, 0a
|
||||
timed out
|
||||
code size: 248
|
||||
ret: 0
|
||||
status: Ok(())
|
130
lang/tests/son_tests_generic_types.txt
Normal file
130
lang/tests/son_tests_generic_types.txt
Normal file
|
@ -0,0 +1,130 @@
|
|||
deinit:
|
||||
ADDI64 r254, r254, -40d
|
||||
ST r31, r254, 0a, 40h
|
||||
CP r32, r2
|
||||
LD r33, r32, 16a, 8h
|
||||
LI64 r34, 8d
|
||||
MUL64 r33, r33, r34
|
||||
LD r35, r32, 0a, 8h
|
||||
CP r2, r35
|
||||
CP r3, r33
|
||||
CP r4, r34
|
||||
JAL r31, r0, :free
|
||||
CP r1, r32
|
||||
JAL r31, r0, :new
|
||||
LD r31, r254, 0a, 40h
|
||||
ADDI64 r254, r254, 40d
|
||||
JALA r0, r31, 0a
|
||||
free:
|
||||
CP r13, r2
|
||||
CP r14, r3
|
||||
CP r15, r4
|
||||
LRA r16, r0, :free_sys_call
|
||||
LD r16, r16, 0a, 8h
|
||||
CP r2, r16
|
||||
CP r3, r13
|
||||
CP r4, r14
|
||||
CP r5, r15
|
||||
ECA
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -56d
|
||||
ST r31, r254, 24a, 32h
|
||||
ADDI64 r32, r254, 0d
|
||||
CP r1, r32
|
||||
JAL r31, r0, :new
|
||||
LI64 r33, 69d
|
||||
CP r2, r32
|
||||
CP r3, r33
|
||||
JAL r31, r0, :push
|
||||
CP r33, r1
|
||||
LD r34, r254, 0a, 8h
|
||||
LD r33, r34, 0a, 8h
|
||||
CP r2, r32
|
||||
JAL r31, r0, :deinit
|
||||
CP r1, r33
|
||||
LD r31, r254, 24a, 32h
|
||||
ADDI64 r254, r254, 56d
|
||||
JALA r0, r31, 0a
|
||||
malloc:
|
||||
CP r13, r2
|
||||
CP r14, r3
|
||||
LRA r15, r0, :malloc_sys_call
|
||||
LD r15, r15, 0a, 8h
|
||||
CP r2, r15
|
||||
CP r3, r13
|
||||
CP r4, r14
|
||||
ECA
|
||||
CP r13, r1
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
new:
|
||||
ADDI64 r254, r254, -24d
|
||||
CP r14, r1
|
||||
ADDI64 r13, r254, 0d
|
||||
ST r0, r254, 0a, 8h
|
||||
ST r0, r254, 8a, 8h
|
||||
ST r0, r254, 16a, 8h
|
||||
BMC r13, r14, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
push:
|
||||
ADDI64 r254, r254, -88d
|
||||
ST r31, r254, 0a, 88h
|
||||
CP r38, r2
|
||||
CP r39, r3
|
||||
LI64 r37, 1d
|
||||
LD r33, r38, 8a, 8h
|
||||
LD r32, r38, 16a, 8h
|
||||
JNE r32, r33, :0
|
||||
JNE r32, r0, :1
|
||||
CP r32, r37
|
||||
JMP :2
|
||||
1: MULI64 r32, r32, 2d
|
||||
2: LI64 r40, 8d
|
||||
MUL64 r34, r32, r40
|
||||
CP r2, r34
|
||||
CP r3, r40
|
||||
JAL r31, r0, :malloc
|
||||
CP r35, r1
|
||||
ST r32, r38, 16a, 8h
|
||||
JNE r35, r0, :3
|
||||
CP r1, r0
|
||||
JMP :4
|
||||
3: MULI64 r33, r33, 8d
|
||||
LD r32, r38, 0a, 8h
|
||||
ADD64 r41, r32, r33
|
||||
CP r34, r35
|
||||
7: LD r33, r38, 0a, 8h
|
||||
LD r36, r38, 8a, 8h
|
||||
JNE r41, r32, :5
|
||||
JEQ r36, r0, :6
|
||||
MUL64 r32, r36, r40
|
||||
CP r2, r33
|
||||
CP r3, r32
|
||||
CP r4, r40
|
||||
JAL r31, r0, :free
|
||||
JMP :6
|
||||
6: ST r35, r38, 0a, 8h
|
||||
JMP :0
|
||||
5: ADDI64 r36, r34, 8d
|
||||
ADDI64 r33, r32, 8d
|
||||
LD r32, r32, 0a, 8h
|
||||
ST r32, r34, 0a, 8h
|
||||
CP r34, r36
|
||||
CP r32, r33
|
||||
JMP :7
|
||||
0: LD r32, r38, 8a, 8h
|
||||
MULI64 r33, r32, 8d
|
||||
LD r34, r38, 0a, 8h
|
||||
ADD64 r33, r34, r33
|
||||
ST r39, r33, 0a, 8h
|
||||
ADD64 r32, r32, r37
|
||||
ST r32, r38, 8a, 8h
|
||||
CP r1, r33
|
||||
4: LD r31, r254, 0a, 88h
|
||||
ADDI64 r254, r254, 88d
|
||||
JALA r0, r31, 0a
|
||||
code size: 923
|
||||
ret: 69
|
||||
status: Ok(())
|
19
lang/tests/son_tests_global_aliasing_overptimization.txt
Normal file
19
lang/tests/son_tests_global_aliasing_overptimization.txt
Normal file
|
@ -0,0 +1,19 @@
|
|||
clobber:
|
||||
LRA r13, r0, :var
|
||||
ST r0, r13, 0a, 8h
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -24d
|
||||
ST r31, r254, 0a, 24h
|
||||
LRA r32, r0, :var
|
||||
LI64 r33, 2d
|
||||
ST r33, r32, 0a, 8h
|
||||
JAL r31, r0, :clobber
|
||||
LD r32, r32, 0a, 8h
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 24h
|
||||
ADDI64 r254, r254, 24d
|
||||
JALA r0, r31, 0a
|
||||
code size: 159
|
||||
ret: 0
|
||||
status: Ok(())
|
23
lang/tests/son_tests_global_variable_wiredness.txt
Normal file
23
lang/tests/son_tests_global_variable_wiredness.txt
Normal file
|
@ -0,0 +1,23 @@
|
|||
inb:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -32d
|
||||
ST r31, r254, 0a, 32h
|
||||
LRA r32, r0, :ports
|
||||
LD r33, r32, 0a, 1h
|
||||
ANDI r33, r33, 255d
|
||||
JNE r33, r0, :0
|
||||
JMP :1
|
||||
0: JAL r31, r0, :inb
|
||||
CP r33, r1
|
||||
CMPU r34, r33, r0
|
||||
CMPUI r34, r34, 0d
|
||||
NOT r34, r34
|
||||
ST r34, r32, 0a, 1h
|
||||
1: LD r31, r254, 0a, 32h
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
code size: 164
|
||||
ret: 0
|
||||
status: Ok(())
|
10
lang/tests/son_tests_global_variables.txt
Normal file
10
lang/tests/son_tests_global_variables.txt
Normal file
|
@ -0,0 +1,10 @@
|
|||
main:
|
||||
LRA r13, r0, :complex_global_var
|
||||
LD r14, r13, 0a, 8h
|
||||
ADDI64 r14, r14, 5d
|
||||
ST r14, r13, 0a, 8h
|
||||
CP r1, r14
|
||||
JALA r0, r31, 0a
|
||||
code size: 74
|
||||
ret: 55
|
||||
status: Ok(())
|
6
lang/tests/son_tests_hex_octal_binary_literals.txt
Normal file
6
lang/tests/son_tests_hex_octal_binary_literals.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 22
|
||||
ret: 0
|
||||
status: Ok(())
|
21
lang/tests/son_tests_idk.txt
Normal file
21
lang/tests/son_tests_idk.txt
Normal file
|
@ -0,0 +1,21 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -128d
|
||||
ADDI64 r15, r254, 0d
|
||||
LI8 r16, 69b
|
||||
LI64 r17, 128d
|
||||
CP r13, r0
|
||||
2: LD r14, r254, 42a, 1h
|
||||
JLTU r13, r17, :0
|
||||
ANDI r13, r14, 255d
|
||||
CP r1, r13
|
||||
JMP :1
|
||||
0: ADDI64 r14, r13, 1d
|
||||
ADD64 r13, r15, r13
|
||||
ST r16, r13, 0a, 1h
|
||||
CP r13, r14
|
||||
JMP :2
|
||||
1: ADDI64 r254, r254, 128d
|
||||
JALA r0, r31, 0a
|
||||
code size: 141
|
||||
ret: 69
|
||||
status: Ok(())
|
36
lang/tests/son_tests_if_statements.txt
Normal file
36
lang/tests/son_tests_if_statements.txt
Normal file
|
@ -0,0 +1,36 @@
|
|||
fib:
|
||||
ADDI64 r254, r254, -32d
|
||||
ST r31, r254, 0a, 32h
|
||||
CP r32, r2
|
||||
LI64 r33, 1d
|
||||
LI64 r34, 2d
|
||||
JGTU r32, r34, :0
|
||||
CP r1, r33
|
||||
JMP :1
|
||||
0: SUB64 r33, r32, r33
|
||||
CP r2, r33
|
||||
JAL r31, r0, :fib
|
||||
CP r33, r1
|
||||
SUB64 r32, r32, r34
|
||||
CP r2, r32
|
||||
JAL r31, r0, :fib
|
||||
CP r32, r1
|
||||
ADD64 r32, r32, r33
|
||||
CP r1, r32
|
||||
1: LD r31, r254, 0a, 32h
|
||||
ADDI64 r254, r254, 32d
|
||||
JALA r0, r31, 0a
|
||||
main:
|
||||
ADDI64 r254, r254, -16d
|
||||
ST r31, r254, 0a, 16h
|
||||
LI64 r32, 10d
|
||||
CP r2, r32
|
||||
JAL r31, r0, :fib
|
||||
CP r32, r1
|
||||
CP r1, r32
|
||||
LD r31, r254, 0a, 16h
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
code size: 229
|
||||
ret: 55
|
||||
status: Ok(())
|
9
lang/tests/son_tests_infinite_loop_after_peephole.txt
Normal file
9
lang/tests/son_tests_infinite_loop_after_peephole.txt
Normal file
|
@ -0,0 +1,9 @@
|
|||
main:
|
||||
CP r13, r0
|
||||
0: ADDI64 r13, r13, 1d
|
||||
JMP :0
|
||||
JALA r0, r31, 0a
|
||||
timed out
|
||||
code size: 38
|
||||
ret: 0
|
||||
status: Ok(())
|
21
lang/tests/son_tests_inline.txt
Normal file
21
lang/tests/son_tests_inline.txt
Normal file
|
@ -0,0 +1,21 @@
|
|||
main:
|
||||
LI64 r13, 8d
|
||||
CP r2, r13
|
||||
ECA
|
||||
LI64 r14, 6d
|
||||
LRA r13, r0, :gb
|
||||
LD r13, r13, 0a, 8h
|
||||
CMPU r13, r13, r0
|
||||
CMPUI r13, r13, 0d
|
||||
OR r13, r13, r0
|
||||
ANDI r13, r13, 255d
|
||||
JNE r13, r0, :0
|
||||
CP r13, r14
|
||||
JMP :1
|
||||
0: LI64 r13, 1d
|
||||
1: SUB64 r13, r13, r14
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 131
|
||||
ret: 0
|
||||
status: Ok(())
|
6
lang/tests/son_tests_inline_return_stack.txt
Normal file
6
lang/tests/son_tests_inline_return_stack.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
main:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 22
|
||||
ret: 0
|
||||
status: Ok(())
|
29
lang/tests/son_tests_inline_test.txt
Normal file
29
lang/tests/son_tests_inline_test.txt
Normal file
|
@ -0,0 +1,29 @@
|
|||
main:
|
||||
ADDI64 r254, r254, -16d
|
||||
ST r31, r254, 0a, 16h
|
||||
JAL r31, r0, :scalar_values
|
||||
CP r32, r1
|
||||
JEQ r32, r0, :0
|
||||
LI64 r32, 1d
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
0: JAL r31, r0, :structs
|
||||
CP r32, r1
|
||||
JEQ r32, r0, :2
|
||||
JAL r31, r0, :structs
|
||||
CP r32, r1
|
||||
CP r1, r32
|
||||
JMP :1
|
||||
2: CP r1, r0
|
||||
1: LD r31, r254, 0a, 16h
|
||||
ADDI64 r254, r254, 16d
|
||||
JALA r0, r31, 0a
|
||||
scalar_values:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
structs:
|
||||
CP r1, r0
|
||||
JALA r0, r31, 0a
|
||||
code size: 164
|
||||
ret: 0
|
||||
status: Ok(())
|
7
lang/tests/son_tests_inlined_generic_functions.txt
Normal file
7
lang/tests/son_tests_inlined_generic_functions.txt
Normal file
|
@ -0,0 +1,7 @@
|
|||
main:
|
||||
LI64 r13, 10d
|
||||
CP r1, r13
|
||||
JALA r0, r31, 0a
|
||||
code size: 32
|
||||
ret: 10
|
||||
status: Ok(())
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue