adding slices

Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
This commit is contained in:
Jakub Doka 2024-12-21 23:44:33 +01:00
parent 418fd0039e
commit 5275a7e0fd
No known key found for this signature in database
GPG key ID: C6E9A89936B8C143
11 changed files with 350 additions and 126 deletions

View file

@ -342,7 +342,7 @@ main := fn(): uint {
Foo := struct {a: ^uint, b: uint}
no_foo := fn(): ?Foo return null
new_foo := fn(): ?Foo return .(&0, 0)
use_foo := fn(foo: Foo, str: ^u8): void {
use_foo := fn(foo: Foo, str: []u8): void {
}
Bar := struct {a: ?^uint, b: uint}
@ -453,7 +453,6 @@ main := fn(): uint {
ecall_that_returns_uint := @as(uint, @eca(1, foo.Type.(10, 20), 5, 6))
embedded_array := @as([15]u8, @embed("text.txt"))
two_fields := @lenof(foo.Type)
string_length := @lenof("foo\0")
the_struct_kind := @kindof(foo.Type)
return @inline(foo.foo)
}
@ -501,9 +500,9 @@ str_len := fn(str: ^u8): uint {
main := fn(): uint {
// when string ends with '\0' its a C string and thus type is '^u8'
some_str := "abඞ\n\r\t\{35}\{36373839}\0"
some_str := "abඞ\n\r\t\{35}\{36373839}\0".ptr
len := str_len(some_str)
some_other_str := "fff\0"
some_other_str := "fff\0".ptr
lep := str_len(some_other_str)
return lep + len
}
@ -557,6 +556,22 @@ pass := fn(arr: ^[3]uint): uint {
}
```
#### slices
```hb
main := fn(): uint {
one := &10
from_ptr: []uint = one[..1]
arr := .[0, 1, 2, 3]
start := arr[..2]
mid := arr[1..3]
end := arr[2..]
all := arr[..]
return start[0] + mid[0] + end[0] + all[3] + all.len - from_ptr[0]
}
```
#### inline
```hb
main := fn(): uint {
@ -723,10 +738,10 @@ main := fn(): uint {
#### string_array
```hb
strings := (^u8).["abcdefshijklmnop\0", "abcdefghijklnnop\0", "abcdefshijklmnop\0", "abcdefghijklmnop\0", "abcdefghijflmnop\0", "dbcdefghijklmnop\0", "abcdefghijklmnop\0"]
main := fn(): uint {
return @bitcast(strings[0])
strings := (^u8).["abcdefshijklmnop\0".ptr, "abcdefghijklnnop\0".ptr, "abcdefshijklmnop\0".ptr, "abcdefghijklmnop\0".ptr, "abcdefghijflmnop\0".ptr, "dbcdefghijklmnop\0".ptr, "abcdefghijklmnop\0".ptr]
return *strings[0]
}
```
@ -822,12 +837,12 @@ Struct2 := struct {
}
main := fn(): void {
lhs := Struct.("Hello, World!\0")
rhs := Struct.("Goodbye, World!\0")
lhs := Struct.("Hello, World!\0".ptr)
rhs := Struct.("Goodbye, World!\0".ptr)
lhs.print(rhs)
lhs2 := Struct2.("Hello, World!\0")
rhs2 := Struct2.("Goodbye, World!\0")
lhs2 := Struct2.("Hello, World!\0".ptr)
rhs2 := Struct2.("Goodbye, World!\0".ptr)
lhs2.print2(&rhs2)
}
```
@ -1684,7 +1699,7 @@ main := fn(): void {
#### request_page
```hb
request_page := fn(page_count: u8): ^u8 {
msg := "\{00}\{01}xxxxxxxx\0"
msg := "\{00}\{01}xxxxxxxx\0".ptr
msg_page_count := msg + 1;
*msg_page_count = page_count
return @eca(3, 2, msg, 12)

View file

@ -51,27 +51,29 @@ enum TokenGroup {
Ctor,
}
fn token_group(kind: TokenKind) -> TokenGroup {
use {crate::lexer::TokenKind::*, TokenGroup as TG};
match kind {
BSlash | Pound | Eof | Ct => TG::Blank,
Comment => TG::Comment,
Directive => TG::Directive,
Colon => TG::Colon,
Semi | Comma => TG::Comma,
Dot => TG::Dot,
Ctor | Arr | Tupl | TArrow => TG::Ctor,
LParen | RParen => TG::Paren,
LBrace | RBrace | LBrack | RBrack => TG::Bracket,
Number | Float => TG::Number,
Under | CtIdent | Ident => TG::Identifier,
Tick | Tilde | Que | Not | Mod | Band | Bor | Xor | Mul | Add | Sub | Div | Shl | Shr
| Or | And | Lt | Gt | Eq | Le | Ge | Ne => TG::Op,
Decl | Assign | BorAss | XorAss | BandAss | AddAss | SubAss | MulAss | DivAss | ModAss
| ShrAss | ShlAss => TG::Assign,
DQuote | Quote => TG::String,
Slf | Defer | Return | If | Else | Loop | Break | Continue | Fn | Idk | Die | Struct
| Packed | True | False | Null | Match | Enum | Union | CtLoop => TG::Keyword,
impl TokenKind {
fn to_higlight_group(self) -> TokenGroup {
use {TokenGroup as TG, TokenKind::*};
match self {
BSlash | Pound | Eof | Ct => TG::Blank,
Comment => TG::Comment,
Directive => TG::Directive,
Colon => TG::Colon,
Semi | Comma => TG::Comma,
Dot => TG::Dot,
Ctor | Arr | Tupl | TArrow | Range => TG::Ctor,
LParen | RParen => TG::Paren,
LBrace | RBrace | LBrack | RBrack => TG::Bracket,
Number | Float => TG::Number,
Under | CtIdent | Ident => TG::Identifier,
Tick | Tilde | Que | Not | Mod | Band | Bor | Xor | Mul | Add | Sub | Div | Shl
| Shr | Or | And | Lt | Gt | Eq | Le | Ge | Ne => TG::Op,
Decl | Assign | BorAss | XorAss | BandAss | AddAss | SubAss | MulAss | DivAss
| ModAss | ShrAss | ShlAss => TG::Assign,
DQuote | Quote => TG::String,
Slf | Defer | Return | If | Else | Loop | Break | Continue | Fn | Idk | Die
| Struct | Packed | True | False | Null | Match | Enum | Union | CtLoop => TG::Keyword,
}
}
}
@ -89,7 +91,7 @@ pub fn get_token_kinds(mut source: &mut [u8]) -> usize {
let start = token.start as usize;
let end = token.end as usize;
source[..start].fill(0);
source[start..end].fill(token_group(token.kind) as u8);
source[start..end].fill(token.kind.to_higlight_group() as u8);
source = &mut source[end..];
}
len
@ -222,12 +224,12 @@ impl<'a> Formatter<'a> {
f.write_str(sep)?;
}
if let Some(expr) = list.get(i + 1)
&& let Some(rest) = self.source.get(expr.posi() as usize..)
&& let Some(prev) = self.source.get(..expr.posi() as usize)
{
if sep.is_empty() && insert_needed_semicolon(rest) {
if sep.is_empty() && prev.trim_end().ends_with(';') {
f.write_str(";")?;
}
if preserve_newlines(&self.source[..expr.posi() as usize]) > 1 {
if count_trailing_newlines(prev) > 1 {
f.write_str("\n")?;
}
}
@ -305,10 +307,6 @@ impl<'a> Formatter<'a> {
}
match *expr {
Expr::Ct { value, .. } => {
f.write_str("$: ")?;
self.fmt(value, f)
}
Expr::Defer { value, .. } => {
f.write_str("defer ")?;
self.fmt(value, f)
@ -324,6 +322,16 @@ impl<'a> Formatter<'a> {
f.write_str(".")?;
f.write_str(field)
}
Expr::Range { start, end, .. } => {
if let Some(start) = start {
self.fmt(start, f)?;
}
f.write_str("..")?;
if let Some(end) = end {
self.fmt(end, f)?;
}
Ok(())
}
Expr::Directive { name, args, .. } => {
f.write_str("@")?;
f.write_str(name)?;
@ -424,10 +432,10 @@ impl<'a> Formatter<'a> {
self.fmt(size, f)?;
}
f.write_str("]")?;
self.fmt(item, f)
self.fmt_paren(item, f, unary)
}
Expr::Index { base, index } => {
self.fmt(base, f)?;
self.fmt_paren(base, f, postfix)?;
f.write_str("[")?;
self.fmt(index, f)?;
f.write_str("]")
@ -550,7 +558,7 @@ impl<'a> Formatter<'a> {
prev.rfind(|c: char| c.is_ascii_whitespace()).map_or(prev.len(), |i| i + 1);
let exact_bound = lexer::Lexer::new(&prev[estimate_bound..]).last().start;
prev = &prev[..exact_bound as usize + estimate_bound];
if preserve_newlines(prev) > 0 {
if count_trailing_newlines(prev) > 0 {
f.write_str("\n")?;
for _ in 0..self.depth + 1 {
f.write_str("\t")?;
@ -575,15 +583,10 @@ impl<'a> Formatter<'a> {
}
}
pub fn preserve_newlines(source: &str) -> usize {
pub fn count_trailing_newlines(source: &str) -> usize {
source[source.trim_end().len()..].bytes().filter(|&c| c == b'\n').count()
}
pub fn insert_needed_semicolon(source: &str) -> bool {
let kind = lexer::Lexer::new(source).eat().kind;
kind.precedence().is_some() || matches!(kind, TokenKind::Ctor | TokenKind::Tupl)
}
impl core::fmt::Display for parser::Ast {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt_file(self.exprs(), &self.file, f)
@ -594,14 +597,14 @@ pub fn fmt_file(exprs: &[Expr], file: &str, f: &mut impl fmt::Write) -> fmt::Res
for (i, expr) in exprs.iter().enumerate() {
Formatter::new(file).fmt(expr, f)?;
if let Some(expr) = exprs.get(i + 1)
&& let Some(rest) = file.get(expr.pos() as usize..)
&& let Some(prefix) = file.get(..expr.pos() as usize)
{
if insert_needed_semicolon(rest) {
write!(f, ";")?;
if prefix.trim_end().ends_with(';') {
f.write_str(";")?;
}
if preserve_newlines(&file[..expr.pos() as usize]) > 1 {
writeln!(f)?;
if count_trailing_newlines(prefix) > 1 {
f.write_str("\n")?;
}
}

View file

@ -140,6 +140,7 @@ pub enum TokenKind {
Tupl,
Arr,
TArrow,
Range,
Or,
And,
@ -350,6 +351,7 @@ gen_token_kind! {
Tupl = ".(",
Arr = ".[",
TArrow = "=>",
Range = "..",
// #define OP: each `#[prec]` delimeters a level of precedence from lowest to highest
#[ops]
#[prec]
@ -432,6 +434,19 @@ impl<'a> Lexer<'a> {
Lexer { pos: self.pos, source: self.source }.eat()
}
fn peek_n<const N: usize>(&self) -> Option<&[u8; N]> {
if core::intrinsics::unlikely(self.pos as usize + N > self.source.len()) {
None
} else {
Some(unsafe {
self.source
.get_unchecked(self.pos as usize..self.pos as usize + N)
.first_chunk()
.unwrap_unchecked()
})
}
}
fn peek(&self) -> Option<u8> {
if core::intrinsics::unlikely(self.pos >= self.source.len() as u32) {
None
@ -500,7 +515,11 @@ impl<'a> Lexer<'a> {
self.advance();
}
if self.advance_if(b'.') {
if self
.peek_n()
.map_or_else(|| self.peek() == Some(b'.'), |&[a, b]| a == b'.' && b != b'.')
{
self.pos += 1;
while let Some(b'0'..=b'9') = self.peek() {
self.advance();
}
@ -553,6 +572,7 @@ impl<'a> Lexer<'a> {
b'.' if self.advance_if(b'{') => T::Ctor,
b'.' if self.advance_if(b'(') => T::Tupl,
b'.' if self.advance_if(b'[') => T::Arr,
b'.' if self.advance_if(b'.') => T::Range,
b'=' if self.advance_if(b'>') => T::TArrow,
b'&' if self.advance_if(b'&') => T::And,
b'|' if self.advance_if(b'|') => T::Or,

View file

@ -127,6 +127,8 @@ pub mod backend {
mod utils;
mod debug {
use {core::fmt::Debug, std::string::String};
pub fn panicking() -> bool {
#[cfg(feature = "std")]
{
@ -139,14 +141,14 @@ mod debug {
}
#[cfg(all(debug_assertions, feature = "std"))]
pub type Trace = std::rc::Rc<std::backtrace::Backtrace>;
pub type Trace = std::rc::Rc<(std::backtrace::Backtrace, String)>;
#[cfg(not(all(debug_assertions, feature = "std")))]
pub type Trace = ();
pub fn trace() -> Trace {
pub fn trace(_hint: impl Debug) -> Trace {
#[cfg(all(debug_assertions, feature = "std"))]
{
std::rc::Rc::new(std::backtrace::Backtrace::capture())
std::rc::Rc::new((std::backtrace::Backtrace::capture(), format!("{_hint:?}")))
}
#[cfg(not(all(debug_assertions, feature = "std")))]
{}

View file

@ -699,7 +699,7 @@ impl Nodes {
if self.free == Nid::MAX {
self.free = self.values.len() as _;
self.values.push(Err((Nid::MAX, debug::trace())));
self.values.push(Err((Nid::MAX, debug::trace(""))));
}
let free = self.free;
@ -775,13 +775,11 @@ impl Nodes {
}
self.remove_node_lookup(target);
let trace = debug::trace(&self.values[target as usize]);
if cfg!(debug_assertions) {
mem::replace(&mut self.values[target as usize], Err((Nid::MAX, debug::trace())))
.unwrap();
mem::replace(&mut self.values[target as usize], Err((Nid::MAX, trace))).unwrap();
} else {
mem::replace(&mut self.values[target as usize], Err((self.free, debug::trace())))
.unwrap();
mem::replace(&mut self.values[target as usize], Err((self.free, trace))).unwrap();
self.free = target;
}
@ -1637,6 +1635,7 @@ impl Nodes {
}
pub fn replace(&mut self, target: Nid, with: Nid) {
self.patch_aclass(target, with);
debug_assert_ne!(target, with, "{:?}", self[target]);
for out in self[target].outputs.clone() {
let index = self[out].inputs.iter().position(|&p| p == target).unwrap();
@ -1981,6 +1980,25 @@ impl Nodes {
self[blocker].peep_triggers.push(target);
}
}
fn patch_aclass(&mut self, target: Nid, with: Nid) {
let (_, region) = self.aclass_index(target);
if region == 0 {
return;
}
fn patch_aclass_inner(s: &mut Nodes, root: Nid, with: Nid, matches: Nid) {
for out in s[root].outputs.clone() {
let (_, region) = s.aclass_index(out);
if region == matches {
s.pass_aclass(with, out);
patch_aclass_inner(s, out, with, matches);
}
}
}
patch_aclass_inner(self, target, with, target);
}
}
impl ops::Index<Nid> for Nodes {

View file

@ -17,6 +17,7 @@ use {
ptr::NonNull,
sync::atomic::AtomicUsize,
},
std::panic,
};
pub type Pos = u32;
@ -31,7 +32,7 @@ pub enum FileKind {
Embed,
}
trait Trans {
pub trait Trans {
fn trans(self) -> Self;
}
@ -308,7 +309,6 @@ impl<'a, 'b> Parser<'a, 'b> {
let prev_captured = self.ctx.captured.len();
let mut must_trail = false;
let mut expr = match token.kind {
T::Ct => E::Ct { pos, value: self.ptr_expr()? },
T::Defer => E::Defer { pos, value: self.ptr_expr()? },
T::Slf => E::Slf { pos },
T::Directive if self.lexer.slice(token.range()) == "use" => {
@ -491,6 +491,15 @@ impl<'a, 'b> Parser<'a, 'b> {
},
body: self.ptr_expr()?,
},
T::Range => E::Range {
pos: token.start,
start: None,
end: if matches!(self.token.kind, TokenKind::RBrack) {
None
} else {
Some(self.ptr_expr()?)
},
},
T::Ctor => self.ctor(pos, None),
T::Tupl => self.tupl(pos, None, ListKind::Tuple),
T::Arr => self.tupl(pos, None, ListKind::Array),
@ -562,7 +571,14 @@ impl<'a, 'b> Parser<'a, 'b> {
let token = self.token;
if matches!(
token.kind,
T::LParen | T::Ctor | T::Dot | T::Tupl | T::Arr | T::LBrack | T::Colon
T::LParen
| T::Ctor
| T::Dot
| T::Tupl
| T::Arr
| T::LBrack
| T::Colon
| T::Range
) {
self.next();
}
@ -584,6 +600,15 @@ impl<'a, 'b> Parser<'a, 'b> {
self.arena.alloc(index)
},
},
T::Range => E::Range {
pos: token.start,
start: Some(self.arena.alloc(expr)),
end: if matches!(self.token.kind, TokenKind::RBrack) {
None
} else {
Some(self.ptr_expr()?)
},
},
T::Colon => E::BinOp {
left: {
self.declare_rec(&expr, false);
@ -737,7 +762,9 @@ impl<'a, 'b> Parser<'a, 'b> {
) -> &'a [T] {
let mut trailing_sep = false;
let mut view = self.ctx.stack.view();
'o: while (keep_end && self.token.kind != end) || (!keep_end && !self.advance_if(end)) {
'o: while (keep_end && self.token.kind != end)
|| (!keep_end && !self.advance_if(end)) && self.token.kind != TokenKind::Eof
{
let val = match f(self) {
Some(val) => val,
None => {
@ -810,6 +837,9 @@ impl<'a, 'b> Parser<'a, 'b> {
#[track_caller]
fn report(&mut self, pos: Pos, msg: impl fmt::Display) -> Option<!> {
if log::log_enabled!(log::Level::Error) {
if self.ctx.errors.get_mut().len() > 1024 * 10 {
panic!("{}", self.ctx.errors.get_mut());
}
use core::fmt::Write;
writeln!(
self.ctx.errors.get_mut(),
@ -916,11 +946,6 @@ generate_expr! {
/// `OP := grep for `#define OP:`
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Expr<'a> {
/// `'ct' Expr`
Ct {
pos: Pos,
value: &'a Self,
},
/// `'defer' Expr`
Defer {
pos: Pos,
@ -1079,6 +1104,12 @@ generate_expr! {
base: &'a Self,
index: &'a Self,
},
/// `[ Expr ] .. [ Expr ]`
Range {
pos: u32,
start: Option<&'a Self>,
end: Option<&'a Self>,
},
/// `Expr '.' Ident`
Field {
target: &'a Self,

View file

@ -897,16 +897,18 @@ impl<'a> Codegen<'a> {
self.gen_inferred_const(ctx, ty::Id::U8, value)
} else {
if data.last() != Some(&0) {
self.error(pos, "string literal must end with null byte (for now)");
}
let (global, ty) = self.create_string_global(&data);
let len = self.ci.nodes.new_const_lit(ty, data.len() as i64);
data.clear();
self.pool.lit_buf = data;
Some(Value::new(global).ty(ty))
let slc = self.tys.make_array(ty::Id::U8, ArrayLen::MAX);
let mem = self.new_stack(pos, slc);
for (off, value) in [(0u32, Value::ptr(global).ty(ty)), (8, len)] {
let region = self.offset(mem, off);
self.store_mem(region, value.ty, value.id);
}
Some(Value::ptr(mem).ty(slc))
}
}
Expr::Defer { pos, value } => {
@ -1255,6 +1257,80 @@ impl<'a> Codegen<'a> {
.error(pos, fa!("'{} {op} _' is not supported", self.ty_display(lhs.ty))),
}
}
Expr::Index { base, index: &Expr::Range { start, end, pos } } => {
let mut bs = self.ptr_expr(base)?;
let start = match start {
Some(s) => self.checked_expr(s, ty::Id::UINT, "range start")?.id,
None => self.ci.nodes.new_const(ty::Id::UINT, 0),
};
self.ci.nodes.lock(start);
let end = match end {
Some(e) => self.checked_expr(e, ty::Id::UINT, "range end")?.id,
None => match bs.ty.expand() {
ty::Kind::Slice(s) if let Some(len) = self.tys.ins.slices[s].len() => {
self.ci.nodes.new_const(ty::Id::UINT, len as i64)
}
ty::Kind::Slice(_) => {
// Might change
let off = self.offset(bs.id, 8);
self.load_mem(off, ty::Id::UINT)
}
ty::Kind::Ptr(_) => {
return self
.error(pos, "upper bound is required when slicing a pointer")
}
_ => NEVER,
},
};
self.ci.nodes.lock(end);
let len = self.ci.nodes.new_node_lit(
ty::Id::UINT,
Kind::BinOp { op: TokenKind::Sub },
[VOID, end, start],
self.tys,
);
self.ci.nodes.lock(len.id);
let elem = match bs.ty.expand() {
ty::Kind::Slice(s) => self.tys.ins.slices[s].elem,
ty::Kind::Ptr(_) => {
if let Some(base) = self.tys.base_of(bs.ty) {
bs.ptr = true;
bs.ty = base;
}
bs.ty
}
_ => {
return self.error(
base.pos(),
fa!(
"cant slice into '{}' which is not array nor slice not a pointer",
self.ty_display(bs.ty)
),
)
}
};
let ptr = self.offset_ptr(bs.id, elem, start);
self.ci.nodes.lock(ptr.id);
let ty = self.tys.make_array(elem, ArrayLen::MAX);
let mem = self.new_stack(pos, ty);
for (off, value) in [(0u32, ptr), (8, len)] {
let region = self.offset(mem, off);
self.store_mem(region, value.ty, value.id);
}
self.ci.nodes.unlock(start);
self.ci.nodes.unlock(len.id);
self.ci.nodes.unlock(end);
self.ci.nodes.unlock(ptr.id);
Some(Value::ptr(mem).ty(ty))
}
Expr::Index { base, index } => {
let mut bs = self.ptr_expr(base)?;
@ -1266,27 +1342,16 @@ impl<'a> Codegen<'a> {
let idx = self.checked_expr(index, ty::Id::DINT, "subscript")?;
match bs.ty.expand() {
ty::Kind::Slice(s) if self.tys.ins.slices[s].len().is_some() => {
let elem = self.tys.ins.slices[s].elem;
Some(self.offset_ptr(bs.id, elem, idx.id))
}
ty::Kind::Slice(s) => {
let elem = self.tys.ins.slices[s].elem;
let size = self.ci.nodes.new_const(ty::Id::INT, self.tys.size_of(elem));
let inps = [VOID, idx.id, size];
let offset = self.ci.nodes.new_node(
ty::Id::INT,
Kind::BinOp { op: TokenKind::Mul },
inps,
self.tys,
);
let aclass = self.ci.nodes.aclass_index(bs.id).1;
let inps = [VOID, bs.id, offset];
let ptr = self.ci.nodes.new_node(
ty::Id::INT,
Kind::BinOp { op: TokenKind::Add },
inps,
self.tys,
);
self.ci.nodes.pass_aclass(aclass, ptr);
Some(Value::ptr(ptr).ty(elem))
let off = self.offset(bs.id, 0);
let base = self.tys.make_ptr(elem);
let bs = std::dbg!(self.load_mem(off, base));
Some(self.offset_ptr(bs, elem, idx.id))
}
ty::Kind::Struct(s) => {
let Kind::CInt { value: idx } = self.ci.nodes[idx.id].kind else {
@ -2450,6 +2515,20 @@ impl<'a> Codegen<'a> {
}
}
fn offset_ptr(&mut self, bs: Nid, elem: ty::Id, idx: Nid) -> Value {
let size = self.ci.nodes.new_const(ty::Id::INT, self.tys.size_of(elem));
let inps = [VOID, idx, size];
let offset =
self.ci.nodes.new_node(ty::Id::INT, Kind::BinOp { op: TokenKind::Mul }, inps, self.tys);
let aclass = self.ci.nodes.aclass_index(bs).1;
let inps = [VOID, bs, offset];
let ptr =
self.ci.nodes.new_node(ty::Id::INT, Kind::BinOp { op: TokenKind::Add }, inps, self.tys);
self.ci.nodes.pass_aclass(aclass, ptr);
Value::ptr(ptr).ty(elem)
}
fn spill(&mut self, pos: Pos, value: &mut Value) {
debug_assert!(!value.ptr);
let stck = self.new_stack(pos, value.ty);
@ -2535,6 +2614,14 @@ impl<'a> Codegen<'a> {
)
}
}
ty::Kind::Slice(s) => {
let (offset, ty) = match name {
"len" => (8, ty::Id::UINT),
"ptr" => (0, self.tys.make_ptr(self.tys.ins.slices[s].elem)),
_ => return None,
};
Some(Value::ptr(self.offset(vtarget.id, offset)).ty(ty))
}
ty::Kind::TYPE => match self.ci.nodes.as_ty(vtarget.id).expand() {
ty::Kind::Module(m) => self.find_type_as_value(pos, m, name, ctx),
ty::Kind::Enum(e)
@ -4331,6 +4418,7 @@ mod tests {
c_strings;
struct_patterns;
arrays;
slices;
inline;
idk;
generic_functions;

View file

@ -3,15 +3,15 @@ main:
ST r31, r254, 32a, 40h
LRA r32, r0, :"Goodbye, World!\0"
LRA r33, r0, :"Hello, World!\0"
ST r32, r254, 8a, 8h
ST r32, r254, 16a, 8h
ST r33, r254, 24a, 8h
LD r2, r254, 24a, 8h
LD r3, r254, 8a, 8h
LD r3, r254, 16a, 8h
JAL r31, r0, :print
ADDI64 r34, r254, 0d
ADDI64 r35, r254, 16d
ST r32, r254, 0a, 8h
ST r33, r254, 16a, 8h
ADDI64 r34, r254, 8d
ADDI64 r35, r254, 0d
ST r32, r254, 8a, 8h
ST r33, r254, 0a, 8h
CP r2, r35
CP r3, r34
JAL r31, r0, :print2

View file

@ -3,11 +3,11 @@ decide:
CP r1, r13
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -128d
ST r31, r254, 80a, 48h
ADDI64 r254, r254, -144d
ST r31, r254, 96a, 48h
JAL r31, r0, :decide
CP r33, r0
ADDI64 r34, r254, 72d
ADDI64 r34, r254, 88d
CP r32, r1
ANDI r32, r32, 255d
JNE r32, r0, :0
@ -15,7 +15,7 @@ main:
JMP :1
0: CP r32, r34
1: LI64 r35, 1d
ST r35, r254, 72a, 8h
ST r35, r254, 88a, 8h
JNE r32, r33, :2
LI64 r32, 9001d
CP r1, r32
@ -25,12 +25,12 @@ main:
ANDI r33, r33, 255d
JNE r33, r0, :4
LI8 r33, 1b
ST r33, r254, 56a, 1h
ST r33, r254, 72a, 1h
LD r32, r32, 0a, 8h
ST r32, r254, 64a, 8h
ST r32, r254, 80a, 8h
JMP :5
4: ST r0, r254, 56a, 1h
5: LD r32, r254, 56a, 1h
4: ST r0, r254, 72a, 1h
5: LD r32, r254, 72a, 1h
ANDI r32, r32, 255d
JEQ r32, r0, :6
LI64 r32, 42d
@ -50,17 +50,20 @@ main:
LI64 r32, 69d
CP r1, r32
JMP :3
9: ADDI64 r33, r254, 40d
9: ADDI64 r33, r254, 56d
JAL r31, r0, :new_foo
ST r1, r33, 0a, 16h
LD r36, r254, 40a, 8h
LD r36, r254, 56a, 8h
JNE r36, r0, :10
LI64 r32, 999d
CP r1, r32
JMP :3
10: LRA r36, r0, :"foo\0"
ST r36, r254, 40a, 8h
LI64 r36, 4d
ST r36, r254, 48a, 8h
LD r2, r33, 0a, 16h
CP r4, r36
LD r4, r254, 40a, 16h
JAL r31, r0, :use_foo
ADDI64 r33, r254, 0d
JAL r31, r0, :no_foo
@ -98,8 +101,8 @@ main:
ANDI r32, r32, 65535d
SUB64 r32, r32, r33
CP r1, r32
3: LD r31, r254, 80a, 48h
ADDI64 r254, r254, 128d
3: LD r31, r254, 96a, 48h
ADDI64 r254, r254, 144d
JALA r0, r31, 0a
new_bar:
ADDI64 r254, r254, -24d
@ -129,11 +132,13 @@ no_foo:
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
use_foo:
ADDI64 r254, r254, -16d
ST r2, r254, 0a, 16h
ADDI64 r2, r254, 0d
ADDI64 r254, r254, 16d
ADDI64 r254, r254, -32d
ST r2, r254, 16a, 16h
ADDI64 r2, r254, 16d
ST r4, r254, 0a, 16h
ADDI64 r4, r254, 0d
ADDI64 r254, r254, 32d
JALA r0, r31, 0a
code size: 1092
code size: 1162
ret: 0
status: Ok(())

View file

@ -0,0 +1,41 @@
main:
ADDI64 r254, r254, -120d
ADDI64 r13, r254, 32d
ST r0, r254, 32a, 8h
ADDI64 r14, r13, 8d
LI64 r15, 1d
ST r15, r14, 0a, 8h
ADDI64 r15, r13, 16d
LI64 r16, 2d
ST r16, r15, 0a, 8h
LI64 r16, 3d
ADDI64 r17, r254, 112d
LI64 r18, 10d
ST r13, r254, 0a, 8h
ST r16, r254, 56a, 8h
ST r18, r254, 112a, 8h
ST r14, r254, 64a, 8h
LD r14, r254, 0a, 8h
LD r16, r254, 64a, 8h
ST r15, r254, 96a, 8h
ST r13, r254, 16a, 8h
LD r13, r14, 0a, 8h
LD r14, r16, 0a, 8h
LD r15, r254, 96a, 8h
LD r16, r254, 16a, 8h
ADD64 r13, r14, r13
LD r14, r15, 0a, 8h
ST r17, r254, 80a, 8h
ADD64 r13, r14, r13
LD r14, r16, 24a, 8h
LD r15, r254, 80a, 8h
ADD64 r13, r14, r13
LD r14, r15, 0a, 8h
ADDI64 r13, r13, 4d
SUB64 r13, r13, r14
CP r1, r13
ADDI64 r254, r254, 120d
JALA r0, r31, 0a
code size: 415
ret: 0
status: Ok(())

View file

@ -1,8 +1,9 @@
main:
LRA r13, r0, :strings
LD r13, r13, 0a, 8h
LRA r13, r0, :"abcdefshijklmnop\0"
LD r13, r13, 0a, 1h
ANDI r13, r13, 255d
CP r1, r13
JALA r0, r31, 0a
code size: 98
ret: 134998808175692
code size: 70
ret: 97
status: Ok(())