adding capability to run posts and displaing cumulative runs

This commit is contained in:
Jakub Doka 2024-11-23 10:18:05 +01:00
parent b760d9ef75
commit d8d039b67a
No known key found for this signature in database
GPG key ID: C6E9A89936B8C143
7 changed files with 161 additions and 60 deletions

View file

@ -1 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="18px" viewBox="0 -960 960 960" width="24px" fill="#e8eaed"><path d="M480-320 280-520l56-58 104 104v-326h80v326l104-104 56 58-200 200ZM240-160q-33 0-56.5-23.5T160-240v-120h80v120h480v-120h80v120q0 33-23.5 56.5T720-160H240Z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px" fill="#e8eaed"><path d="M480-320 280-520l56-58 104 104v-326h80v326l104-104 56 58-200 200ZM240-160q-33 0-56.5-23.5T160-240v-120h80v120h480v-120h80v120q0 33-23.5 56.5T720-160H240Z"/></svg>

Before

Width:  |  Height:  |  Size: 279 B

After

Width:  |  Height:  |  Size: 279 B

1
depell/src/icons/run.svg Normal file
View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="24px" viewBox="0 -960 960 960" width="24px" fill="#e8eaed"><path d="M320-200v-560l440 280-440 280Zm80-280Zm0 134 210-134-210-134v268Z"/></svg>

After

Width:  |  Height:  |  Size: 190 B

View file

@ -34,6 +34,11 @@ body {
}
div.preview {
margin: var(--small-gap) 0px;
display: flex;
flex-direction: column;
gap: var(--small-gap);
div.info {
display: flex;
gap: var(--small-gap);
@ -45,9 +50,26 @@ div.preview {
div.stat {
display: flex;
svg {
height: 18px;
}
}
margin: var(--small-gap) 0px;
div.code {
position: relative;
nav {
position: absolute;
right: 0;
padding: var(--small-gap);
button {
display: flex;
padding: 0;
}
}
}
}
svg {
@ -86,6 +108,8 @@ pre {
font-family: var(--monospace);
tab-size: 4;
overflow-x: auto;
white-space: pre-wrap;
word-wrap: break-word;
}
input {

View file

@ -14,7 +14,7 @@ const stack_pointer_offset = 1 << 20;
/** @param {WebAssembly.Instance} instance @param {Post[]} packages @param {number} fuel
* @returns {string} */
function compileCode(instance, packages, fuel) {
function compileCode(instance, packages, fuel = 1) {
let {
INPUT, INPUT_LEN,
LOG_MESSAGES, LOG_MESSAGES_LEN,
@ -34,7 +34,7 @@ function compileCode(instance, packages, fuel) {
new DataView(memory.buffer).setUint32(INPUT_LEN.value, codeLength, true);
runWasmFunction(instance, compile_and_run, fuel);
return bufToString(memory, LOG_MESSAGES, LOG_MESSAGES_LEN);
return bufToString(memory, LOG_MESSAGES, LOG_MESSAGES_LEN).trim();
}
/**@type{WebAssembly.Instance}*/ let fmtInstance;
@ -187,6 +187,61 @@ function loadCachedPackages(code, roots, buf, prevRoots) {
}
/**@type{Set<string>}*/ const prevRoots = new Set();
/**@typedef {Object} PackageCtx
* @property {AbortController} [cancelation]
* @property {string[]} keyBuf
* @property {Set<string>} prevParams
* @property {HTMLTextAreaElement} [edit] */
/** @param {string} source @param {Set<string>} importDiff @param {HTMLPreElement} errors @param {PackageCtx} ctx */
async function fetchPackages(source, importDiff, errors, ctx) {
importDiff.clear();
for (const match of source.matchAll(importRe)) {
if (localStorage["package-" + match[1]]) continue;
importDiff.add(match[1]);
}
if (importDiff.size !== 0 && (ctx.prevParams.size != importDiff.size
|| [...ctx.prevParams.keys()].every(e => importDiff.has(e)))) {
if (ctx.cancelation) ctx.cancelation.abort();
ctx.prevParams.clear();
ctx.prevParams = new Set([...importDiff]);
ctx.cancelation = new AbortController();
ctx.keyBuf.length = 0;
ctx.keyBuf.push(...importDiff.keys());
errors.textContent = "fetching: " + ctx.keyBuf.join(", ");
await fetch(`/code`, {
method: "POST",
signal: ctx.cancelation.signal,
headers: { "Content-Type": "application/json" },
body: JSON.stringify(ctx.keyBuf),
}).then(async e => {
try {
const json = await e.json();
if (e.status == 200) {
for (const [key, value] of Object.entries(json)) {
localStorage["package-" + key] = value;
}
const missing = ctx.keyBuf.filter(i => json[i] === undefined);
if (missing.length !== 0) {
errors.textContent = "deps not found: " + missing.join(", ");
} else {
ctx.cancelation = undefined;
ctx.edit?.dispatchEvent(new InputEvent("input"));
}
}
} catch (er) {
errors.textContent = "completely failed to fetch ("
+ e.status + "): " + ctx.keyBuf.join(", ");
console.error(e, er);
}
});
}
}
/** @param {HTMLElement} target */
async function bindCodeEdit(target) {
@ -205,67 +260,24 @@ async function bindCodeEdit(target) {
const hbc = await getHbcInstance(), fmt = await getFmtInstance();
let importDiff = new Set();
const keyBuf = [];
/**@type{Post[]}*/
const packages = [{ path: "local.hb", code: "" }];
const debounce = 100;
/**@type{AbortController|undefined}*/
let cancelation = undefined;
let timeout = 0;
const ctx = { keyBuf: [], prevParams: new Set(), edit };
prevRoots.clear();
const onInput = () => {
importDiff.clear();
for (const match of edit.value.matchAll(importRe)) {
if (localStorage["package-" + match[1]]) continue;
importDiff.add(match[1]);
}
fetchPackages(edit.value, importDiff, errors, ctx);
if (importDiff.size !== 0) {
if (cancelation) cancelation.abort();
cancelation = new AbortController();
keyBuf.length = 0;
keyBuf.push(...importDiff.keys());
errors.textContent = "fetching: " + keyBuf.join(", ");
fetch(`/code`, {
method: "POST",
signal: cancelation.signal,
headers: { "Content-Type": "application/json" },
body: JSON.stringify(keyBuf),
}).then(async e => {
try {
const json = await e.json();
if (e.status == 200) {
for (const [key, value] of Object.entries(json)) {
localStorage["package-" + key] = value;
}
const missing = keyBuf.filter(i => json[i] === undefined);
if (missing.length !== 0) {
errors.textContent = "deps not found: " + missing.join(", ");
} else {
cancelation = undefined;
edit.dispatchEvent(new InputEvent("input"));
}
}
} catch (er) {
errors.textContent = "completely failed to fetch ("
+ e.status + "): " + keyBuf.join(", ");
console.error(e, er);
}
});
}
if (cancelation && importDiff.size !== 0) {
if (ctx.cancelation && importDiff.size !== 0) {
return;
}
loadCachedPackages(edit.value, keyBuf, packages, prevRoots);
loadCachedPackages(edit.value, ctx.keyBuf, packages, prevRoots);
errors.textContent = compileCode(hbc, packages, 1);
errors.textContent = compileCode(hbc, packages);
const minified_size = modifyCode(fmt, edit.value, "minify")?.length;
if (minified_size) {
codeSize.textContent = (MAX_CODE_SIZE - minified_size) + "";
@ -445,7 +457,7 @@ if (window.location.hostname === 'localhost') {
code: "main:=fn():int{return 42}",
}];
const res = compileCode(await getHbcInstance(), posts, 1) ?? never();
const expected = "exit code: 42\n";
const expected = "exit code: 42";
if (expected != res) console.error(expected, res);
}
})()
@ -507,6 +519,30 @@ getFmtInstance().then(inst => {
Object.assign(window, { filterCodeDeps });
});
/** @param {HTMLElement} target */
function runPost(target) {
while (!target.matches("div[class=preview]")) target = target.parentElement ?? never();
const code = target.querySelector("pre[apply=fmt]");
if (!(code instanceof HTMLPreElement)) never();
const output = target.querySelector("pre[id=compiler-output]");
if (!(output instanceof HTMLPreElement)) never();
getHbcInstance().then(async hbc => {
const ctx = { keyBuf: [], prevParams: new Set() };
await fetchPackages(code.textContent ?? never(), new Set(), output, ctx);
const posts = [{ path: "this", code: "" }];
loadCachedPackages(code.textContent ?? never(), ctx.keyBuf, posts, new Set());
output.textContent = compileCode(hbc, posts);
output.hidden = false;
});
let author = encodeURIComponent(target.dataset.author ?? never());
let name = encodeURIComponent(target.dataset.name ?? never());
fetch(`/post/run?author=${author}&name=${name}`, { method: "POST" })
}
Object.assign(window, { runPost });
updateTab();
wireUp(document.body);

View file

@ -4,7 +4,7 @@ use {
axum::{
body::Bytes,
extract::{DefaultBodyLimit, Path},
http::{header::COOKIE, request::Parts},
http::{header::COOKIE, request::Parts, StatusCode},
response::{AppendHeaders, Html},
},
const_format::formatcp,
@ -69,6 +69,7 @@ async fn amain() {
.route("/post", get(Post::page))
.route("/post-view", get(Post::get))
.route("/post", post(Post::post))
.route("/post/run", post(Post::run))
.route("/code", post(fetch_code))
.route("/login", get(Login::page))
.route("/login-view", get(Login::get))
@ -246,6 +247,12 @@ impl Page for Post {
}
}
#[derive(Deserialize)]
struct Run {
author: String,
name: String,
}
impl Post {
pub fn from_row(r: &rusqlite::Row) -> rusqlite::Result<Self> {
Ok(Post {
@ -254,10 +261,24 @@ impl Post {
timestamp: r.get(2)?,
code: r.get(3)?,
imports: r.get(4)?,
runs: r.get(5)?,
..Default::default()
})
}
async fn run(
session: Session,
axum::extract::Query(run): axum::extract::Query<Run>,
) -> StatusCode {
match db::with(|qes| qes.creata_run.insert((run.name, run.author, session.name))) {
Ok(_) => StatusCode::OK,
Err(e) => {
log::error!("creating run record failed: {e}");
StatusCode::INTERNAL_SERVER_ERROR
}
}
}
async fn post(
session: Session,
axum::Form(mut data): axum::Form<Self>,
@ -313,7 +334,7 @@ impl Post {
impl fmt::Display for Post {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let Self { author, name, timestamp, imports, runs, dependencies, code, .. } = self;
write_html! { f <div class="preview">
write_html! { f <div class="preview" "data-author"=author "data-name"=name>
<div class="info">
<span>
<a "hx-get"={format_args!("/profile-view/{author}")} href="" "hx-target"="main"
@ -323,7 +344,7 @@ impl fmt::Display for Post {
name
</span>
<span apply="timestamp">timestamp</span>
for (name, count) in [include_str!("icons/download.svg"), "runs", "deps"]
for (name, count) in [include_str!("icons/download.svg"), include_str!("icons/run.svg"), "deps"]
.iter()
.zip([imports, runs, dependencies])
.filter(|(_, &c)| c != 0)
@ -331,7 +352,13 @@ impl fmt::Display for Post {
<div class="stat">!name count</div>
}
</div>
<div class="code">
<nav>
<button onmousedown="runPost(this)">!{include_str!("icons/run.svg")}</button>
</nav>
<pre apply="fmt">code</pre>
</div>
<pre hidden id="compiler-output"></pre>
if *timestamp == 0 {
<button "hx-get"="/post" "hx-swap"="outerHTML"
"hx-target"="[preview]">"edit"</button>
@ -772,7 +799,19 @@ mod db {
JOIN roots ON import.to_name = roots.name
AND import.to_author = roots.author
) SELECT (count(*) - 1) FROM roots
) AS imports FROM post AS outher WHERE timestamp < ?",
) AS imports, (
WITH RECURSIVE roots(name, author, code) AS (
SELECT name, author, code FROM post WHERE name = outher.name AND author = outher.author
UNION
SELECT post.name, post.author, post.code FROM post
JOIN import ON post.name = import.from_name
AND post.author = import.from_author
JOIN roots ON import.to_name = roots.name
AND import.to_author = roots.author
) SELECT count(*) FROM roots
JOIN run ON roots.name = run.code_name
AND roots.author = run.code_author
) as runs FROM post AS outher WHERE timestamp < ?",
create_post: "INSERT INTO post (name, author, timestamp, code) VALUES(?, ?, ?, ?)",
fetch_deps: "
WITH RECURSIVE roots(name, author, code) AS (
@ -787,6 +826,7 @@ mod db {
",
create_import: "INSERT INTO import(to_author, to_name, from_author, from_name)
VALUES(?, ?, ?, ?)",
creata_run: "INSERT OR IGNORE INTO run(code_name, code_author, runner) VALUES(?, ?, ?)",
}
}

View file

@ -1597,7 +1597,7 @@ impl ArenaChunk {
fn contains(&self, arg: *mut u8) -> bool {
(self.base <= arg && unsafe { self.base.add(self.size) } > arg)
|| self.next().map_or(false, |s| s.contains(arg))
|| self.next().is_some_and(|s| s.contains(arg))
}
pub fn size(&self) -> usize {

View file

@ -520,7 +520,7 @@ where
if swapped {
core::mem::swap(&mut a0, &mut a1);
}
self.write_reg(tg, a0.partial_cmp(&a1).map_or(false, |v| v == expected) as u8)
self.write_reg(tg, (a0.partial_cmp(&a1) == Some(expected)) as u8)
});
}
}