some updates
This commit is contained in:
commit
043d7fcb0d
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
/target
|
7
Cargo.lock
generated
Normal file
7
Cargo.lock
generated
Normal file
|
@ -0,0 +1,7 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "skylang"
|
||||
version = "0.1.0"
|
8
Cargo.toml
Normal file
8
Cargo.toml
Normal file
|
@ -0,0 +1,8 @@
|
|||
[package]
|
||||
name = "skylang"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
53
src/codegen/fasm.rs
Normal file
53
src/codegen/fasm.rs
Normal file
|
@ -0,0 +1,53 @@
|
|||
use crate::parse::ast::*;
|
||||
|
||||
pub fn fasm_codegen(expr: Expr) -> String {
|
||||
let mut asm = String::new();
|
||||
|
||||
asm.push_str("format ELF64 executable 3\n");
|
||||
asm.push_str("segment readable executable\n");
|
||||
asm.push_str("entry _start\n");
|
||||
asm.push_str("_start:\n");
|
||||
match expr {
|
||||
Expr::MathExpr(e) => {
|
||||
asm.push_str(format!("\tmov r10, {:?}\n", e.left).as_str());
|
||||
asm.push_str(format!("\tmov r11, {:?}\n", e.right).as_str());
|
||||
match e.operator {
|
||||
// If the operator is addition.
|
||||
MathOperator::OP_ADD => {
|
||||
asm.push_str("\tadd r10, r11\n");
|
||||
asm.push_str("\tmov rax, r10\n");
|
||||
},
|
||||
// If the operator is multiplication.
|
||||
MathOperator::OP_MULT => {
|
||||
asm.push_str("\timul r10, r11\n");
|
||||
asm.push_str("\tmov rax, r10\n");
|
||||
},
|
||||
// If the operator is division.
|
||||
MathOperator::OP_DIV => {
|
||||
asm.push_str("\tmov rax, r10\n");
|
||||
asm.push_str("\tmov rdx, r11\n");
|
||||
asm.push_str("\tidiv r10, r11\n");
|
||||
asm.push_str("\tmov rax, r10\n");
|
||||
},
|
||||
// If the operators is subtraction.
|
||||
MathOperator::OP_SUB => {
|
||||
asm.push_str("\tsub r10, r11\n");
|
||||
asm.push_str("\tmov rax, r10\n");
|
||||
},
|
||||
// If the operator is modulo.
|
||||
MathOperator::OP_MOD => {
|
||||
asm.push_str("\tmov rax, r10\n");
|
||||
asm.push_str("\tmov rdx, r11\n");
|
||||
asm.push_str("\tidiv r10, r11\n");
|
||||
asm.push_str("\tmov rax, rdx\n");
|
||||
|
||||
},
|
||||
_ => unimplemented!("sorry unimplemented"),
|
||||
}
|
||||
},
|
||||
_ => unimplemented!("sorry unimplemented"),
|
||||
}
|
||||
|
||||
println!("{}", asm);
|
||||
asm
|
||||
}
|
0
src/codegen/fasm.rs~
Normal file
0
src/codegen/fasm.rs~
Normal file
0
src/codegen/fasmarm.rs
Normal file
0
src/codegen/fasmarm.rs
Normal file
1
src/codegen/mod.rs
Normal file
1
src/codegen/mod.rs
Normal file
|
@ -0,0 +1 @@
|
|||
pub mod fasm;
|
0
src/codegen/mod.rs~
Normal file
0
src/codegen/mod.rs~
Normal file
2
src/lex/mod.rs
Normal file
2
src/lex/mod.rs
Normal file
|
@ -0,0 +1,2 @@
|
|||
pub mod tok;
|
||||
pub mod parse;
|
1
src/lex/mod.rs~
Normal file
1
src/lex/mod.rs~
Normal file
|
@ -0,0 +1 @@
|
|||
pub mod tok;
|
156
src/lex/parse.rs
Normal file
156
src/lex/parse.rs
Normal file
|
@ -0,0 +1,156 @@
|
|||
#![allow(unused)]
|
||||
|
||||
use super::tok::*;
|
||||
|
||||
|
||||
pub fn match_single_char<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
let tok = match word {
|
||||
";" => tok!(Semicolon),
|
||||
"=" => tok!(Equal),
|
||||
"(" => tok!(LeftParen),
|
||||
")" => tok!(RightParen),
|
||||
"{" => tok!(LeftBrace),
|
||||
"}" => tok!(RightBrace),
|
||||
"," => tok!(Comma),
|
||||
"." => tok!(Dot),
|
||||
"-" => tok!(Minus),
|
||||
"+" => tok!(Plus),
|
||||
"/" => tok!(Slash),
|
||||
"*" => tok!(Star),
|
||||
"%" => tok!(Percent),
|
||||
"!" => tok!(Bang),
|
||||
":" => tok!(Colon),
|
||||
"<" => tok!(Less),
|
||||
">" => tok!(Greater),
|
||||
|
||||
_ => None
|
||||
};
|
||||
|
||||
tok
|
||||
}
|
||||
|
||||
pub fn match_keyword<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
let tok = match word {
|
||||
"fn" => tok!(Fn),
|
||||
"let" => tok!(Let),
|
||||
"if" => tok!(If),
|
||||
"else" => tok!(Else),
|
||||
"while" => tok!(While),
|
||||
"elif" => tok!(Elif),
|
||||
"return" => tok!(Return),
|
||||
"for" => tok!(For),
|
||||
"in" => tok!(In),
|
||||
"break" => tok!(Break),
|
||||
"continue" => tok!(Continue),
|
||||
"true" => tok!(True),
|
||||
"false" => tok!(False),
|
||||
|
||||
_ => None
|
||||
};
|
||||
|
||||
tok
|
||||
}
|
||||
|
||||
pub fn match_two_char<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
let tok = match word {
|
||||
"==" => tok!(EqualEqual),
|
||||
"!=" => tok!(BangEqual),
|
||||
"<=" => tok!(LessEqual),
|
||||
">=" => tok!(GreaterEqual),
|
||||
|
||||
_ => None
|
||||
};
|
||||
|
||||
tok
|
||||
}
|
||||
|
||||
pub fn match_string_literal<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
let mut chars = word.chars();
|
||||
|
||||
if word.starts_with("\"") {
|
||||
chars.next();
|
||||
while let Some(char) = chars.next() {
|
||||
if char == '\"' {
|
||||
return tok!(String);
|
||||
}
|
||||
}
|
||||
}
|
||||
if word.starts_with("\'") {
|
||||
while let Some(char) = chars.next() {
|
||||
if char == '\'' {
|
||||
return tok!(String);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn match_int_literal<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
let mut chars = word.chars();
|
||||
let mut tok = None;
|
||||
while let Some(char) = chars.next() {
|
||||
if char.is_digit(10) {
|
||||
tok = tok!(Number);
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
tok
|
||||
}
|
||||
|
||||
pub fn match_identifier<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
let mut chars = word.chars().peekable();
|
||||
let mut tok: Option<Token<'a>> = None;
|
||||
if chars.peek().unwrap_or(&'❌').is_ascii_alphabetic() {
|
||||
while let Some(char) = chars.next() {
|
||||
if char.is_ascii() && match_single_char(char.to_string().as_str()).is_none() {
|
||||
tok = tok!(Identifier);
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
|
||||
tok
|
||||
}
|
149
src/lex/parse.rs~
Normal file
149
src/lex/parse.rs~
Normal file
|
@ -0,0 +1,149 @@
|
|||
#![allow(unused)]
|
||||
|
||||
use super::tok::*;
|
||||
|
||||
|
||||
fn check_single_char<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
let tok = match word {
|
||||
";" => tok!(Semicolon),
|
||||
"=" => tok!(Equal),
|
||||
"(" => tok!(LeftParen),
|
||||
")" => tok!(RightParen),
|
||||
"{" => tok!(LeftBrace),
|
||||
"}" => tok!(RightBrace),
|
||||
"," => tok!(Comma),
|
||||
"." => tok!(Dot),
|
||||
"-" => tok!(Minus),
|
||||
"+" => tok!(Plus),
|
||||
"/" => tok!(Slash),
|
||||
"*" => tok!(Star),
|
||||
"%" => tok!(Percent),
|
||||
"!" => tok!(Bang),
|
||||
":" => tok!(Colon),
|
||||
"<" => tok!(Less),
|
||||
">" => tok!(Greater),
|
||||
|
||||
_ => None
|
||||
};
|
||||
|
||||
tok
|
||||
}
|
||||
|
||||
fn check_keyword<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
let tok = match word {
|
||||
"fn" => tok!(Fn),
|
||||
"let" => tok!(Let),
|
||||
"if" => tok!(If),
|
||||
"else" => tok!(Else),
|
||||
"while" => tok!(While),
|
||||
"elif" => tok!(Elif),
|
||||
"return" => tok!(Return),
|
||||
"for" => tok!(For),
|
||||
"in" => tok!(In),
|
||||
"break" => tok!(Break),
|
||||
"continue" => tok!(Continue),
|
||||
"true" => tok!(True),
|
||||
"false" => tok!(False),
|
||||
|
||||
_ => None
|
||||
};
|
||||
|
||||
tok
|
||||
}
|
||||
|
||||
fn check_two_char<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
let tok = match word {
|
||||
"==" => tok!(EqualEqual),
|
||||
"!=" => tok!(BangEqual),
|
||||
"<=" => tok!(LessEqual),
|
||||
">=" => tok!(GreaterEqual),
|
||||
|
||||
_ => None
|
||||
};
|
||||
|
||||
tok
|
||||
}
|
||||
|
||||
fn match_string_literal<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
let chars = word.chars();
|
||||
|
||||
if word.starts_with("\"") {
|
||||
while let Some(char) = chars.next() {
|
||||
if char == '\"' {
|
||||
return tok!(String);
|
||||
}
|
||||
}
|
||||
}
|
||||
if word.starts_with("\'") {
|
||||
while let Some(char) = chars.next() {
|
||||
if char == '\'' {
|
||||
return tok!(String);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn match_int_literal<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
let chars = word.chars();
|
||||
|
||||
while let Some(char) = chars.next() {
|
||||
if char.is_digit(10) {
|
||||
return tok!(Number);
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn match_identifier<'a>(word: &'a str) -> Option<Token<'a>> {
|
||||
macro_rules! tok {
|
||||
($tt:expr) => {
|
||||
Some(Token::new($tt, word))
|
||||
};
|
||||
};
|
||||
|
||||
let chars = word.chars();
|
||||
|
||||
if chars.next().is_ascii_alphabetic() {
|
||||
while let Some(char) = chars.next() {
|
||||
if chars.next().is_ascii() {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
128
src/lex/tok.rs
Normal file
128
src/lex/tok.rs
Normal file
|
@ -0,0 +1,128 @@
|
|||
#![allow(unused)]
|
||||
pub use TokenType::*;
|
||||
use super::parse::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Token<'a> {
|
||||
tt: TokenType,
|
||||
word: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TokenType {
|
||||
EOF,
|
||||
|
||||
// SINGLE CHARACTER TOKENS
|
||||
Semicolon, // ;
|
||||
Equal, // =
|
||||
LeftParen, // (
|
||||
RightParen, // )
|
||||
LeftBrace, // {
|
||||
RightBrace, // }
|
||||
Comma, // ,
|
||||
Dot, // .
|
||||
Minus, // -
|
||||
Plus, // +
|
||||
Slash, // /
|
||||
Star, // *
|
||||
Percent, // %
|
||||
Bang, // !
|
||||
Colon, // :
|
||||
Less, // <
|
||||
Greater, // >
|
||||
|
||||
// KEYWORDS
|
||||
Fn, // fn
|
||||
Let, // let
|
||||
If, // if
|
||||
Else, // else
|
||||
While, // while
|
||||
Elif, // elif
|
||||
Return, // return
|
||||
For, // for
|
||||
In, // in
|
||||
Break, // break
|
||||
Continue, // continue
|
||||
|
||||
// TWO CHARACTER TOKENS
|
||||
EqualEqual, // ==
|
||||
BangEqual, // !=
|
||||
LessEqual, // <=
|
||||
GreaterEqual, // >=
|
||||
|
||||
// LITERALS
|
||||
String, // A string literal.
|
||||
Number, // An integer.
|
||||
Identifier, // An identifier.
|
||||
True, // true
|
||||
False, // false
|
||||
Null, // None
|
||||
|
||||
// ERROR
|
||||
Error, // A syntax error.
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Lexer<'a> {
|
||||
source: &'a str,
|
||||
tokens: Vec<Token<'a>>,
|
||||
current: usize,
|
||||
after: &'a str
|
||||
}
|
||||
|
||||
impl<'a> Lexer<'a> {
|
||||
pub fn new() -> Self {
|
||||
Lexer {
|
||||
source: "",
|
||||
tokens: Vec::new(),
|
||||
current: 0,
|
||||
after: ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::iter::Iterator for Lexer<'a> {
|
||||
type Item = Option<char>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
unimplemented!("Iterating over lexer is not implemented.");
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for Lexer<'a> {
|
||||
fn from(value: &'a str) -> Self {
|
||||
Lexer {
|
||||
source: value,
|
||||
tokens: Vec::new(),
|
||||
current: 0,
|
||||
after: value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a std::string::String> for Lexer<'a> {
|
||||
fn from(value: &'a std::string::String) -> Self {
|
||||
Lexer {
|
||||
source: value.as_str(),
|
||||
tokens: Vec::new(),
|
||||
current: 0,
|
||||
after: value.as_str()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Token<'a> {
|
||||
pub fn new(tt: TokenType, word: &'a str) -> Self {
|
||||
Token {
|
||||
tt,
|
||||
word
|
||||
}
|
||||
}
|
||||
|
||||
pub fn empty() -> Self {
|
||||
Token {
|
||||
tt: EOF,
|
||||
word: ""
|
||||
}
|
||||
}
|
||||
}
|
124
src/lex/tok.rs~
Normal file
124
src/lex/tok.rs~
Normal file
|
@ -0,0 +1,124 @@
|
|||
#![allow(unused)]
|
||||
pub use TokenType::*;
|
||||
use super::parse::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Token<'a> {
|
||||
tt: TokenType,
|
||||
word: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TokenType {
|
||||
EOF,
|
||||
|
||||
// SINGLE CHARACTER TOKENS
|
||||
Semicolon, // ;
|
||||
Equal, // =
|
||||
LeftParen, // (
|
||||
RightParen, // )
|
||||
LeftBrace, // {
|
||||
RightBrace, // }
|
||||
Comma, // ,
|
||||
Dot, // .
|
||||
Minus, // -
|
||||
Plus, // +
|
||||
Slash, // /
|
||||
Star, // *
|
||||
Percent, // %
|
||||
Bang, // !
|
||||
Colon, // :
|
||||
Less, // <
|
||||
Greater, // >
|
||||
|
||||
// KEYWORDS
|
||||
Fn, // fn
|
||||
Let, // let
|
||||
If, // if
|
||||
Else, // else
|
||||
While, // while
|
||||
Elif, // elif
|
||||
Return, // return
|
||||
For, // for
|
||||
In, // in
|
||||
Break, // break
|
||||
Continue, // continue
|
||||
|
||||
// TWO CHARACTER TOKENS
|
||||
EqualEqual, // ==
|
||||
BangEqual, // !=
|
||||
LessEqual, // <=
|
||||
GreaterEqual, // >=
|
||||
|
||||
// LITERALS
|
||||
String, // A string literal.
|
||||
Number, // An integer.
|
||||
Identifier, // An identifier.
|
||||
True, // true
|
||||
False, // false
|
||||
Null, // None
|
||||
|
||||
// ERROR
|
||||
Error, // A syntax error.
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Lexer<'a> {
|
||||
source: &'a str,
|
||||
tokens: Vec<Token<'a>>,
|
||||
current: usize,
|
||||
after: &'a str
|
||||
}
|
||||
|
||||
impl<'a> Lexer<'a> {
|
||||
pub fn new() -> Self {
|
||||
Lexer {
|
||||
source: "",
|
||||
tokens: Vec::new(),
|
||||
current: 0,
|
||||
after: ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::iter::Iterator for Lexer<'a> {
|
||||
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for Lexer<'a> {
|
||||
fn from(value: &'a str) -> Self {
|
||||
Lexer {
|
||||
source: value,
|
||||
tokens: Vec::new(),
|
||||
current: 0,
|
||||
after: value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a std::string::String> for Lexer<'a> {
|
||||
fn from(value: &'a std::string::String) -> Self {
|
||||
Lexer {
|
||||
source: value.as_str(),
|
||||
tokens: Vec::new(),
|
||||
current: 0,
|
||||
after: value.as_str()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Token<'a> {
|
||||
pub fn new(tt: TokenType, word: &'a str) -> Self {
|
||||
Token {
|
||||
tt,
|
||||
word
|
||||
}
|
||||
}
|
||||
|
||||
pub fn empty() -> Self {
|
||||
Token {
|
||||
tt: EOF,
|
||||
word: ""
|
||||
}
|
||||
}
|
||||
}
|
9
src/main.rs
Normal file
9
src/main.rs
Normal file
|
@ -0,0 +1,9 @@
|
|||
pub mod lex;
|
||||
pub mod codegen;
|
||||
use crate::codegen::fasm::*;
|
||||
use crate::parse::ast::*;
|
||||
pub mod parse;
|
||||
|
||||
fn main() {
|
||||
fasm_codegen(Expr::MathExpr(Math {left: 1, right: 2, operator: MathOperator::OP_DIV}));
|
||||
}
|
5
src/main.rs~
Normal file
5
src/main.rs~
Normal file
|
@ -0,0 +1,5 @@
|
|||
pub mod lex;
|
||||
|
||||
fn main() {
|
||||
println!("{:?}", lex::parse::match_identifier("goren-"));
|
||||
}
|
44
src/parse/ast.rs
Normal file
44
src/parse/ast.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
#[derive(Debug)]
|
||||
pub enum Expr<'a> {
|
||||
MathExpr(Math),
|
||||
FunCallExpr(FunCall<'a>),
|
||||
}
|
||||
|
||||
// MATH EXPRESSION
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Math {
|
||||
pub left: i64,
|
||||
pub right: i64,
|
||||
pub operator: MathOperator
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum MathOperator {
|
||||
OP_ADD, // Addition
|
||||
OP_SUB, // Subtraction
|
||||
OP_DIV, // Division
|
||||
OP_MULT, // Multiplication
|
||||
OP_MOD, // Modulo
|
||||
}
|
||||
|
||||
// FUNCTIONS
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FunCall<'a> {
|
||||
name: &'a str,
|
||||
params: Vec<FunParam<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FunDefention<'a> {
|
||||
name: &'a str,
|
||||
params: Vec<FunParam<'a>>,
|
||||
contents: Vec<Expr<'a>>,
|
||||
return_value: Expr<'a>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FunParam<'a> {
|
||||
name: &'a str,
|
||||
}
|
0
src/parse/ast.rs~
Normal file
0
src/parse/ast.rs~
Normal file
1
src/parse/mod.rs
Normal file
1
src/parse/mod.rs
Normal file
|
@ -0,0 +1 @@
|
|||
pub mod ast;
|
0
src/parse/mod.rs~
Normal file
0
src/parse/mod.rs~
Normal file
166
tags
Normal file
166
tags
Normal file
|
@ -0,0 +1,166 @@
|
|||
!_TAG_EXTRA_DESCRIPTION anonymous /Include tags for non-named objects like lambda/
|
||||
!_TAG_EXTRA_DESCRIPTION fileScope /Include tags of file scope/
|
||||
!_TAG_EXTRA_DESCRIPTION pseudo /Include pseudo tags/
|
||||
!_TAG_EXTRA_DESCRIPTION subparser /Include tags generated by subparsers/
|
||||
!_TAG_FIELD_DESCRIPTION epoch /the last modified time of the input file (only for F\/file kind tag)/
|
||||
!_TAG_FIELD_DESCRIPTION file /File-restricted scoping/
|
||||
!_TAG_FIELD_DESCRIPTION input /input file/
|
||||
!_TAG_FIELD_DESCRIPTION name /tag name/
|
||||
!_TAG_FIELD_DESCRIPTION pattern /pattern/
|
||||
!_TAG_FIELD_DESCRIPTION typeref /Type and name of a variable or typedef/
|
||||
!_TAG_FILE_FORMAT 2 /extended format; --format=1 will not append ;" to lines/
|
||||
!_TAG_FILE_SORTED 1 /0=unsorted, 1=sorted, 2=foldcase/
|
||||
!_TAG_KIND_DESCRIPTION!D M,module /modules/
|
||||
!_TAG_KIND_DESCRIPTION!D T,template /templates/
|
||||
!_TAG_KIND_DESCRIPTION!D V,version /version statements/
|
||||
!_TAG_KIND_DESCRIPTION!D X,mixin /mixins/
|
||||
!_TAG_KIND_DESCRIPTION!D a,alias /aliases/
|
||||
!_TAG_KIND_DESCRIPTION!D c,class /classes/
|
||||
!_TAG_KIND_DESCRIPTION!D e,enumerator /enumerators (values inside an enumeration)/
|
||||
!_TAG_KIND_DESCRIPTION!D f,function /function definitions/
|
||||
!_TAG_KIND_DESCRIPTION!D g,enum /enumeration names/
|
||||
!_TAG_KIND_DESCRIPTION!D i,interface /interfaces/
|
||||
!_TAG_KIND_DESCRIPTION!D m,member /class, struct, and union members/
|
||||
!_TAG_KIND_DESCRIPTION!D n,namespace /namespaces/
|
||||
!_TAG_KIND_DESCRIPTION!D s,struct /structure names/
|
||||
!_TAG_KIND_DESCRIPTION!D u,union /union names/
|
||||
!_TAG_KIND_DESCRIPTION!D v,variable /variable definitions/
|
||||
!_TAG_KIND_DESCRIPTION!JSON a,array /arrays/
|
||||
!_TAG_KIND_DESCRIPTION!JSON b,boolean /booleans/
|
||||
!_TAG_KIND_DESCRIPTION!JSON n,number /numbers/
|
||||
!_TAG_KIND_DESCRIPTION!JSON o,object /objects/
|
||||
!_TAG_KIND_DESCRIPTION!JSON s,string /strings/
|
||||
!_TAG_KIND_DESCRIPTION!JSON z,null /nulls/
|
||||
!_TAG_KIND_DESCRIPTION!Rust C,constant /A constant/
|
||||
!_TAG_KIND_DESCRIPTION!Rust M,macro /Macro Definition/
|
||||
!_TAG_KIND_DESCRIPTION!Rust P,method /A method/
|
||||
!_TAG_KIND_DESCRIPTION!Rust c,implementation /implementation/
|
||||
!_TAG_KIND_DESCRIPTION!Rust e,enumerator /An enum variant/
|
||||
!_TAG_KIND_DESCRIPTION!Rust f,function /Function/
|
||||
!_TAG_KIND_DESCRIPTION!Rust g,enum /Enum/
|
||||
!_TAG_KIND_DESCRIPTION!Rust i,interface /trait interface/
|
||||
!_TAG_KIND_DESCRIPTION!Rust m,field /A struct field/
|
||||
!_TAG_KIND_DESCRIPTION!Rust n,module /module/
|
||||
!_TAG_KIND_DESCRIPTION!Rust s,struct /structural type/
|
||||
!_TAG_KIND_DESCRIPTION!Rust t,typedef /Type Alias/
|
||||
!_TAG_KIND_DESCRIPTION!Rust v,variable /Global variable/
|
||||
!_TAG_OUTPUT_EXCMD mixed /number, pattern, mixed, or combineV2/
|
||||
!_TAG_OUTPUT_FILESEP slash /slash or backslash/
|
||||
!_TAG_OUTPUT_MODE u-ctags /u-ctags or e-ctags/
|
||||
!_TAG_OUTPUT_VERSION 0.0 /current.age/
|
||||
!_TAG_PARSER_VERSION!D 0.0 /current.age/
|
||||
!_TAG_PARSER_VERSION!JSON 0.0 /current.age/
|
||||
!_TAG_PARSER_VERSION!Rust 0.0 /current.age/
|
||||
!_TAG_PATTERN_LENGTH_LIMIT 96 /0 for no limit/
|
||||
!_TAG_PROC_CWD /home/goren/Code/skylang/ //
|
||||
!_TAG_PROGRAM_AUTHOR Universal Ctags Team //
|
||||
!_TAG_PROGRAM_NAME Universal Ctags /Derived from Exuberant Ctags/
|
||||
!_TAG_PROGRAM_URL https://ctags.io/ /official site/
|
||||
!_TAG_PROGRAM_VERSION 6.0.0 /p6.0.20221218.0/
|
||||
0 target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" o array:local
|
||||
0 target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" o array:local
|
||||
15729799797837862367 target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" o object:outputs
|
||||
4614504638168534921 target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" o object:outputs
|
||||
Bang src/lex/tok.rs /^ Bang, \/\/ !$/;" e enum:TokenType
|
||||
BangEqual src/lex/tok.rs /^ BangEqual, \/\/ !=$/;" e enum:TokenType
|
||||
Break src/lex/tok.rs /^ Break, \/\/ break$/;" e enum:TokenType
|
||||
CheckDepInfo target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" o object:local.0
|
||||
CheckDepInfo target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" o object:local.0
|
||||
Colon src/lex/tok.rs /^ Colon, \/\/ :$/;" e enum:TokenType
|
||||
Comma src/lex/tok.rs /^ Comma, \/\/ ,$/;" e enum:TokenType
|
||||
Continue src/lex/tok.rs /^ Continue, \/\/ continue$/;" e enum:TokenType
|
||||
Dot src/lex/tok.rs /^ Dot, \/\/ .$/;" e enum:TokenType
|
||||
EOF src/lex/tok.rs /^ EOF,$/;" e enum:TokenType
|
||||
Elif src/lex/tok.rs /^ Elif, \/\/ elif$/;" e enum:TokenType
|
||||
Else src/lex/tok.rs /^ Else, \/\/ else$/;" e enum:TokenType
|
||||
Equal src/lex/tok.rs /^ Equal, \/\/ =$/;" e enum:TokenType
|
||||
EqualEqual src/lex/tok.rs /^ EqualEqual, \/\/ ==$/;" e enum:TokenType
|
||||
Error src/lex/tok.rs /^ Error, \/\/ A syntax error.$/;" e enum:TokenType
|
||||
False src/lex/tok.rs /^ False, \/\/ false$/;" e enum:TokenType
|
||||
Fn src/lex/tok.rs /^ Fn, \/\/ fn$/;" e enum:TokenType
|
||||
For src/lex/tok.rs /^ For, \/\/ for$/;" e enum:TokenType
|
||||
Greater src/lex/tok.rs /^ Greater, \/\/ >$/;" e enum:TokenType
|
||||
GreaterEqual src/lex/tok.rs /^ GreaterEqual, \/\/ >=$/;" e enum:TokenType
|
||||
Identifier src/lex/tok.rs /^ Identifier, \/\/ An identifier.$/;" e enum:TokenType
|
||||
If src/lex/tok.rs /^ If, \/\/ if$/;" e enum:TokenType
|
||||
In src/lex/tok.rs /^ In, \/\/ in$/;" e enum:TokenType
|
||||
LeftBrace src/lex/tok.rs /^ LeftBrace, \/\/ {$/;" e enum:TokenType
|
||||
LeftParen src/lex/tok.rs /^ LeftParen, \/\/ ($/;" e enum:TokenType
|
||||
Less src/lex/tok.rs /^ Less, \/\/ <$/;" e enum:TokenType
|
||||
LessEqual src/lex/tok.rs /^ LessEqual, \/\/ <=$/;" e enum:TokenType
|
||||
Let src/lex/tok.rs /^ Let, \/\/ let$/;" e enum:TokenType
|
||||
Lexer src/lex/tok.rs /^impl<'a> From<&'a std::string::String> for Lexer<'a> {$/;" c
|
||||
Lexer src/lex/tok.rs /^impl<'a> From<&'a str> for Lexer<'a> {$/;" c
|
||||
Lexer src/lex/tok.rs /^impl<'a> Lexer<'a> {$/;" c
|
||||
Lexer src/lex/tok.rs /^pub struct Lexer<'a> {$/;" s
|
||||
Minus src/lex/tok.rs /^ Minus, \/\/ -$/;" e enum:TokenType
|
||||
Null src/lex/tok.rs /^ Null, \/\/ None$/;" e enum:TokenType
|
||||
Number src/lex/tok.rs /^ Number, \/\/ An integer.$/;" e enum:TokenType
|
||||
Percent src/lex/tok.rs /^ Percent, \/\/ %$/;" e enum:TokenType
|
||||
Plus src/lex/tok.rs /^ Plus, \/\/ +$/;" e enum:TokenType
|
||||
Return src/lex/tok.rs /^ Return, \/\/ return$/;" e enum:TokenType
|
||||
RightBrace src/lex/tok.rs /^ RightBrace, \/\/ }$/;" e enum:TokenType
|
||||
RightParen src/lex/tok.rs /^ RightParen, \/\/ )$/;" e enum:TokenType
|
||||
Semicolon src/lex/tok.rs /^ Semicolon, \/\/ ;$/;" e enum:TokenType
|
||||
Slash src/lex/tok.rs /^ Slash, \/\/ \/$/;" e enum:TokenType
|
||||
Star src/lex/tok.rs /^ Star, \/\/ *$/;" e enum:TokenType
|
||||
String src/lex/tok.rs /^ String, \/\/ A string literal.$/;" e enum:TokenType
|
||||
Token src/lex/tok.rs /^impl<'a> Token<'a> {$/;" c
|
||||
Token src/lex/tok.rs /^pub struct Token<'a> {$/;" s
|
||||
TokenType src/lex/tok.rs /^pub enum TokenType {$/;" g
|
||||
True src/lex/tok.rs /^ True, \/\/ true$/;" e enum:TokenType
|
||||
While src/lex/tok.rs /^ While, \/\/ while$/;" e enum:TokenType
|
||||
after src/lex/tok.rs /^ after: &'a str$/;" m struct:Lexer
|
||||
check_single_char src/lex/parse.rs /^fn check_single_char<'a>(word: &'a str) -> Option<Token<'a>> {$/;" f
|
||||
code target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" n object:outputs.15729799797837862367
|
||||
code target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" n object:outputs.4614504638168534921
|
||||
compile_kind target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" n
|
||||
compile_kind target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" n
|
||||
config target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" n
|
||||
config target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" n
|
||||
current src/lex/tok.rs /^ current: usize,$/;" m struct:Lexer
|
||||
dep_info target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" s object:local.0.CheckDepInfo
|
||||
dep_info target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" s object:local.0.CheckDepInfo
|
||||
deps target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" a
|
||||
deps target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" a
|
||||
empty src/lex/tok.rs /^ pub fn empty() -> Self {$/;" P implementation:Token
|
||||
features target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" s
|
||||
features target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" s
|
||||
from src/lex/tok.rs /^ fn from(value: &'a std::string::String) -> Self {$/;" P implementation:Lexer
|
||||
from src/lex/tok.rs /^ fn from(value: &'a str) -> Self {$/;" P implementation:Lexer
|
||||
lex src/main.rs /^pub mod lex;$/;" n
|
||||
local target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" a
|
||||
local target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" a
|
||||
main src/main.rs /^fn main() {$/;" f
|
||||
metadata target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" n
|
||||
metadata target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" n
|
||||
new src/lex/tok.rs /^ pub fn new(tt: TokenType, word: &'a str) -> Self {$/;" P implementation:Token
|
||||
new src/lex/tok.rs /^ pub fn new() -> Self {$/;" P implementation:Lexer
|
||||
outputs target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" o
|
||||
parse src/lex/mod.rs /^pub mod parse;$/;" n
|
||||
path target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" n
|
||||
path target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" n
|
||||
profile target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" n
|
||||
profile target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" n
|
||||
rustc target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" n
|
||||
rustc target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" n
|
||||
rustc_fingerprint target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" n
|
||||
rustflags target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" a
|
||||
rustflags target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" a
|
||||
source src/lex/tok.rs /^ source: &'a str,$/;" m struct:Lexer
|
||||
status target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" s object:outputs.15729799797837862367
|
||||
status target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" s object:outputs.4614504638168534921
|
||||
stderr target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" s object:outputs.15729799797837862367
|
||||
stderr target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" s object:outputs.4614504638168534921
|
||||
stdout target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" s object:outputs.15729799797837862367
|
||||
stdout target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" s object:outputs.4614504638168534921
|
||||
success target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" b object:outputs.15729799797837862367
|
||||
success target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" b object:outputs.4614504638168534921
|
||||
successes target/.rustc_info.json /^{"rustc_fingerprint":8401154174455286909,"outputs":{"4614504638168534921":{"success":true,"statu/;" o
|
||||
target target/debug/.fingerprint/skylang-2d34902ac17f98ba/test-bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":18326522262/;" n
|
||||
target target/debug/.fingerprint/skylang-615655681e1bc164/bin-skylang.json /^{"rustc":16895609601713598366,"features":"[]","target":5185372002992495166,"profile":13126374248/;" n
|
||||
tok src/lex/mod.rs /^pub mod tok;$/;" n
|
||||
tok src/lex/parse.rs /^ macro_rules! tok {$/;" M function:check_single_char
|
||||
tokens src/lex/tok.rs /^ tokens: Vec<Token<'a>>,$/;" m struct:Lexer
|
||||
tt src/lex/tok.rs /^ tt: TokenType,$/;" m struct:Token
|
||||
word src/lex/tok.rs /^ word: &'a str,$/;" m struct:Token
|
Loading…
Reference in a new issue