forked from AbleScript/ablescript
Merge pull request 'You know what could be cursed? Using Rust's block comment syntax as String delimiters!' (#5) from change/string-delimiters into master
Reviewed-on: https://git.ablecorp.us:443/AbleScript/able-script/pulls/5
This commit is contained in:
commit
81f713a5e2
|
@ -1,44 +1,44 @@
|
|||
functio arity_0() {
|
||||
"this function has arity 0" print;
|
||||
/*this function has arity 0*/ print;
|
||||
}
|
||||
|
||||
functio arity_1(arg1) {
|
||||
"this function has arity 1" print;
|
||||
/*this function has arity 1*/ print;
|
||||
arg1 print;
|
||||
}
|
||||
|
||||
functio arity_2(arg1, arg2) {
|
||||
"this function has arity 2" print;
|
||||
/*this function has arity 2*/ print;
|
||||
arg1 print;
|
||||
arg2 print;
|
||||
}
|
||||
|
||||
functio arity_3(arg1, arg2, arg3) {
|
||||
"this function has arity 3" print;
|
||||
/*this function has arity 3*/ print;
|
||||
arg1 print;
|
||||
arg2 print;
|
||||
arg3 print;
|
||||
}
|
||||
|
||||
owo arity_0();
|
||||
owo arity_1("foo");
|
||||
owo arity_2("foo", "bar");
|
||||
owo arity_3("foo", "bar", "baz");
|
||||
owo arity_1(/*foo*/);
|
||||
owo arity_2(/*foo*/, /*bar*/);
|
||||
owo arity_3(/*foo*/, /*bar*/, /*baz*/);
|
||||
|
||||
var i1 = arity_0 * arity_1;
|
||||
i1("second");
|
||||
i1(/*second*/);
|
||||
|
||||
"----" print;
|
||||
/*----*/ print;
|
||||
|
||||
var i2 = arity_1 * arity_0;
|
||||
i2("first");
|
||||
i2(/*first*/);
|
||||
|
||||
"----" print;
|
||||
/*----*/ print;
|
||||
|
||||
var ifancy = arity_3 * arity_3;
|
||||
ifancy("left1", "right1", "left2", "right2", "left3", "right3");
|
||||
ifancy(/*left1*/, /*right1*/, /*left2*/, /*right2*/, /*left3*/, /*right3*/);
|
||||
|
||||
"----" print;
|
||||
/*---*/" print;
|
||||
|
||||
var another = arity_0 * arity_3;
|
||||
another("right1", "right2", "right3");
|
||||
another(/*right1*/, /*right2*/, /*right3*/);
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
functio helloable() {
|
||||
"Hello, Able!" print;
|
||||
"´/*Hello, Able!*/ print;
|
||||
}
|
||||
|
||||
var cart = ["able" <= 42, helloable <= "hello"];
|
||||
var cart = [/*able*/ <= 42, helloable <= /*hello*/];
|
||||
|
||||
cart[42] print;
|
||||
cart["hello"]();
|
||||
cart[/*hello*/]();
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
functio hello(words){
|
||||
words print;
|
||||
}
|
||||
hello("wonk");
|
||||
hello(/*wonk*/);
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
var hello = "world";
|
||||
var hello = /*world*/;
|
||||
hello print;
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
var hi = "wonk";
|
||||
var hi = /*wonk*/;
|
||||
melo hi;
|
||||
hi print; owo Should error out
|
||||
|
|
|
@ -7,17 +7,17 @@ functio swap(left, right) {
|
|||
right = tmp;
|
||||
}
|
||||
|
||||
var foo = "hello";
|
||||
var bar = "world";
|
||||
var foo = /*hello*/;
|
||||
var bar = /*world*/;
|
||||
|
||||
swap(foo, bar);
|
||||
|
||||
if (foo != "world") {
|
||||
"FAILED" print;
|
||||
"foo should be 'world', is actually:" print;
|
||||
if (foo != /*world*/) {
|
||||
/*FAILED*/ print;
|
||||
/*foo should be 'world', is actually:*/ print;
|
||||
foo print;
|
||||
}
|
||||
|
||||
if (foo == "world") {
|
||||
"OK" print;
|
||||
if (foo == /*world*/) {
|
||||
/*OK*/ print;
|
||||
}
|
||||
|
|
|
@ -673,7 +673,7 @@ mod tests {
|
|||
span: 1..1
|
||||
})
|
||||
.unwrap(),
|
||||
Value::Int(-2147483648)
|
||||
Value::Int(-9223372036854775808)
|
||||
);
|
||||
|
||||
// And the same for divide by zero.
|
||||
|
@ -728,7 +728,7 @@ mod tests {
|
|||
);
|
||||
|
||||
// Assigning an existing variable.
|
||||
eval(&mut env, "foo = \"hi\";").unwrap();
|
||||
eval(&mut env, "foo = /*hi*/;").unwrap();
|
||||
assert_eq!(
|
||||
env.get_var(&Ident {
|
||||
ident: "foo".to_owned(),
|
||||
|
|
|
@ -122,7 +122,7 @@ pub enum Token {
|
|||
Abool(Abool),
|
||||
|
||||
/// String
|
||||
#[regex("\"(\\.|[^\"])*\"", get_string)]
|
||||
#[regex("/\\*(\\.|[^\\*/])*\\*/", get_string)]
|
||||
String(String),
|
||||
|
||||
/// Integer
|
||||
|
@ -149,7 +149,11 @@ fn get_value<T: std::str::FromStr>(lexer: &mut Lexer<Token>) -> Option<T> {
|
|||
}
|
||||
|
||||
fn get_string(lexer: &mut Lexer<Token>) -> String {
|
||||
lexer.slice().trim_matches('"').to_owned()
|
||||
lexer
|
||||
.slice()
|
||||
.trim_start_matches("/*")
|
||||
.trim_end_matches("*/")
|
||||
.to_owned()
|
||||
}
|
||||
|
||||
fn get_abool(lexer: &mut Lexer<Token>) -> Option<Abool> {
|
||||
|
|
|
@ -579,7 +579,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn simple_math() {
|
||||
let code = r#"1 * (a + 3) / 666 print;"#;
|
||||
let code = "1 * (a + 3) / 666 print;";
|
||||
let expected = &[Stmt {
|
||||
kind: StmtKind::Print(Expr {
|
||||
kind: ExprKind::BinOp {
|
||||
|
@ -624,7 +624,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn variable_declaration() {
|
||||
let code = r#"var a = 42;"#;
|
||||
let code = "var a = 42;";
|
||||
let expected = &[Stmt {
|
||||
kind: StmtKind::Var {
|
||||
ident: Ident {
|
||||
|
@ -645,7 +645,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn if_flow() {
|
||||
let code = r#"if (a == always) { "Buy Able products!" print; }"#;
|
||||
let code = "if (a == always) { /*Buy Able products!*/ print; }";
|
||||
let expected = &[Stmt {
|
||||
kind: StmtKind::If {
|
||||
cond: Expr {
|
||||
|
@ -681,7 +681,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn tdark() {
|
||||
let code = r#"T-Dark { var lang = "lang" + lang; }"#;
|
||||
let code = "T-Dark { var lang = /*lang*/ + lang; }";
|
||||
let expected = &[Stmt {
|
||||
kind: StmtKind::Var {
|
||||
ident: Ident {
|
||||
|
@ -712,7 +712,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn cart_construction() {
|
||||
let code = r#"["able" <= 1, "script" <= 3 - 1] print;"#;
|
||||
let code = "[/*able*/ <= 1, /*script*/ <= 3 - 1] print;";
|
||||
let expected = &[Stmt {
|
||||
kind: StmtKind::Print(Expr {
|
||||
kind: ExprKind::Cart(vec![
|
||||
|
@ -758,7 +758,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn cart_index() {
|
||||
let code = r#"["able" <= "ablecorp"]["ablecorp"] print;"#;
|
||||
let code = "[/*able*/ <= /*ablecorp*/][/*ablecorp*/] print;";
|
||||
let expected = &[Stmt {
|
||||
kind: StmtKind::Print(Expr {
|
||||
kind: ExprKind::Index {
|
||||
|
|
Loading…
Reference in a new issue