Compare commits

..

15 commits

Author SHA1 Message Date
able 6c38f2b2f1 fic? 2024-02-10 16:24:37 -06:00
able 5be66ba32e Fix Broken deps for ableos 2024-02-10 16:13:39 -06:00
able 6df69e2f1a VFS: Updated the API and worked on some implementation 2023-09-13 05:31:35 -05:00
able a7414ca01a Bare metal test 2023-09-13 05:31:08 -05:00
able 917ca121e3 reworking system_config to better build an ableOS file tree 2023-05-23 06:39:19 -05:00
able 48ca72e613 update formating rules 2023-05-23 06:38:42 -05:00
able 138428fac3 nixOS support 2023-05-23 06:38:17 -05:00
Able ca80c17819 prelim VFS work 2023-05-09 01:07:52 -05:00
Able be96cb797a add log and try debugging the BB 2023-05-08 22:31:10 -05:00
Able 9239fe7f9e add back buffer support to agl 2023-05-08 21:56:10 -05:00
Able 7881ee2881 update v2 2023-05-06 05:13:52 -05:00
Able e75565d7ef make things public *Bleh* 2023-05-06 04:39:09 -05:00
Able 6b63d00de0 public display 2023-05-06 04:37:34 -05:00
Able 824697aa7c graphics lib update 2023-05-06 04:35:44 -05:00
Able a2bca6f901 update 2023-05-04 02:38:55 -05:00
54 changed files with 648 additions and 1964 deletions

168
Cargo.lock generated
View file

@ -6,6 +6,8 @@ version = 3
name = "able_graphics_library"
version = "0.1.2"
dependencies = [
"embedded-graphics",
"log",
"versioning",
]
@ -35,14 +37,7 @@ dependencies = [
name = "aidl"
version = "0.1.0"
dependencies = [
"codespan-reporting",
"derive_more",
"itertools",
"logos",
"proc-macro2",
"quote",
"syn 2.0.15",
"thiserror",
]
[[package]]
@ -83,6 +78,12 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
name = "axel2wat"
version = "0.1.0"
[[package]]
name = "az"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b7e4c2464d97fe331d41de9d5db0def0a96f4d823b8b32a2efd503578988973"
[[package]]
name = "base64"
version = "0.13.1"
@ -105,6 +106,12 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "byteorder"
version = "1.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]]
name = "cfg-if"
version = "1.0.0"
@ -117,25 +124,9 @@ version = "0.1.0"
dependencies = [
"hashbrown 0.13.1",
"log",
"toml 0.5.9 (git+https://git.ablecorp.us/theoddgarlic/toml-rs)",
"toml 0.5.9 (git+https://git.ablecorp.us/asya/toml-rs)",
]
[[package]]
name = "codespan-reporting"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
dependencies = [
"termcolor",
"unicode-width",
]
[[package]]
name = "convert_case"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
name = "cpuio"
version = "0.3.2"
@ -163,23 +154,36 @@ name = "derelict_microarchitecture"
version = "0.1.0"
[[package]]
name = "derive_more"
version = "0.99.17"
name = "embedded-graphics"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
checksum = "750082c65094fbcc4baf9ba31583ce9a8bb7f52cadfb96f6164b1bc7f922f32b"
dependencies = [
"convert_case",
"proc-macro2",
"quote",
"rustc_version",
"syn 1.0.105",
"az",
"byteorder",
"embedded-graphics-core",
"float-cmp",
"micromath",
]
[[package]]
name = "either"
version = "1.8.1"
name = "embedded-graphics-core"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
checksum = "b8b1239db5f3eeb7e33e35bd10bd014e7b2537b17e071f726a09351431337cfa"
dependencies = [
"az",
"byteorder",
]
[[package]]
name = "float-cmp"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1267f4ac4f343772758f7b1bdcbe767c218bbab93bb432acbf5162bbf85a6c4"
dependencies = [
"num-traits",
]
[[package]]
name = "fnv"
@ -236,15 +240,6 @@ dependencies = [
"hashbrown 0.12.3",
]
[[package]]
name = "itertools"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
dependencies = [
"either",
]
[[package]]
name = "libc"
version = "0.2.138"
@ -323,10 +318,25 @@ dependencies = [
"versioning",
]
[[package]]
name = "micromath"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc4010833aea396656c2f91ee704d51a6f1329ec2ab56ffd00bfd56f7481ea94"
[[package]]
name = "no_video"
version = "0.1.0"
[[package]]
name = "num-traits"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
dependencies = [
"autocfg",
]
[[package]]
name = "number_prefix"
version = "0.4.0"
@ -410,21 +420,6 @@ dependencies = [
"serde",
]
[[package]]
name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver",
]
[[package]]
name = "semver"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
[[package]]
name = "serde"
version = "1.0.148"
@ -524,15 +519,6 @@ dependencies = [
"versioning",
]
[[package]]
name = "termcolor"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6"
dependencies = [
"winapi-util",
]
[[package]]
name = "thiserror"
version = "1.0.37"
@ -572,7 +558,7 @@ dependencies = [
[[package]]
name = "toml"
version = "0.5.9"
source = "git+https://git.ablecorp.us/theoddgarlic/toml-rs#34db433429f3ad38921d13ac9aba74c8a706f376"
source = "git+https://git.ablecorp.us/asya/toml-rs#34db433429f3ad38921d13ac9aba74c8a706f376"
dependencies = [
"hashbrown 0.12.3",
"serde",
@ -638,12 +624,6 @@ version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
[[package]]
name = "unicode-width"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
[[package]]
name = "uri"
version = "0.1.0"
@ -664,6 +644,13 @@ dependencies = [
"serde",
]
[[package]]
name = "vfs"
version = "0.1.0"
dependencies = [
"std",
]
[[package]]
name = "vgable"
version = "0.1.0"
@ -692,37 +679,6 @@ dependencies = [
"logos",
]
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "winnow"
version = "0.4.4"

View file

@ -11,6 +11,8 @@ members = [
"drivers/graphics/novideo",
"drivers/graphics/vgable",
"drivers/vfs",
"drivers/keyboards/ps2_keyboard",
"drivers/mice/ps2_mouse",

9
drivers/vfs/Cargo.toml Normal file
View file

@ -0,0 +1,9 @@
[package]
name = "vfs"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
# std = { path = "../../libraries/std" }

42
drivers/vfs/src/cache.rs Normal file
View file

@ -0,0 +1,42 @@
pub struct Cache {
// A limit on how many open files can be in this cache
// Usefull to prevent hundreds of processes caching thousands of files
// 0 == No Limit
file_id_limit: u64,
// Total bytes allowed to be cached before a file is pushed out of the cache
total_byte_size: u64,
current_byte_size: u64,
cache: Vec<(FileID, Vec<u8>)>,
}
impl Cache {
fn default() -> Self {
Self {
file_id_limit: 1024,
total_byte_size: 1024 * 16,
current_byte_size: 0,
cache: Vec::new(),
}
}
fn recalculate_cache(&mut self) {
let mut current: u64 = 0;
for (_, file) in &self.cache {
current += file.len() as u64;
}
self.current_byte_size = current;
}
}
#[test]
fn recalc_cache_test() {
let mut cache = Cache::default();
let mut temp_file_vec: Vec<u8> = Vec::new();
for x in 0u64..=10 {
temp_file_vec.push(x.try_into().unwrap());
let file = (x, temp_file_vec.clone());
cache.cache.push(file);
}
cache.recalculate_cache();
assert_eq!(cache.current_byte_size, 66);
}

105
drivers/vfs/src/main.rs Normal file
View file

@ -0,0 +1,105 @@
// #![no_std]
#![feature(async_fn_in_trait)]
pub mod cache;
extern crate alloc;
use alloc::vec::Vec;
pub type Path = String;
fn main() {
let vfs = VFS::new();
// TODO: load config for the vfs
// advertise the VFS service
// wait on proccesses to subscribe and send messages
}
// NOTE: in the implementation this should be a basevalue of some randomized value to
// prevent the ability to check how many files are open on the system
pub type FileID = u64;
pub trait FileIO {
// Store the FileID in the VFS to allow multiple programs to have a FileID without conflicting
fn s_open(path: Path) -> Result<FileID, FileIOError>;
async fn a_open(path: Path) -> Result<FileID, FileIOError>;
// Close the file and flush changes to disk
fn s_close(file_id: FileID) -> Result<(), FileIOError>;
async fn a_close(file_id: FileID) -> Result<(), FileIOError>;
// Offset into the file to allow for things like reading a specific value
// Length from the offset
fn s_read(file_id: FileID, offset: usize, length: u64) -> Result<Vec<u8>, FileIOError>;
async fn a_read(file_id: FileID, offset: usize, length: u64) -> Result<Vec<u8>, FileIOError>;
// Offset into the file to allow for things like reading a specific value
fn s_write(file_id: FileID, offset: usize, data: Vec<u8>) -> Result<(), FileIOError>;
async fn a_write(file_id: FileID, offset: usize, data: Vec<u8>) -> Result<(), FileIOError>;
}
pub enum FileIOError {
NoMountPoint,
}
pub struct MountPoint {
mount: Path,
// Use this to send the file requests to the right filesystem driver
filesystem_proc_id: u64,
}
pub struct VFS {
// If a file is used twice move it from first cache to second cache
// This is under the assumption that if you write to a file twice you will write again
first_layer: Cache,
second_layer: Cache,
mount_point_list: Vec<MountPoint>,
}
impl VFS {
fn new() -> Self {
Self {
first_layer: Cache::default(),
second_layer: Cache::default(),
mount_point_list: Vec::new(),
}
}
fn resolve_mountpoint(self, path: Path) -> Result<MountPoint, FileIOError> {
return Err(FileIOError::NoMountPoint);
}
}
impl FileIO for VFS {
fn s_open(path: Path) -> Result<FileID, FileIOError> {
// Break up the path into a mountpoint and a path fragment
todo!()
}
async fn a_open(path: Path) -> Result<FileID, FileIOError> {
todo!()
}
fn s_close(file_id: FileID) -> Result<(), FileIOError> {
todo!()
}
async fn a_close(file_id: FileID) -> Result<(), FileIOError> {
todo!()
}
fn s_read(file_id: FileID, offset: usize, length: u64) -> Result<Vec<u8>, FileIOError> {
todo!()
}
async fn a_read(file_id: FileID, offset: usize, length: u64) -> Result<Vec<u8>, FileIOError> {
todo!()
}
fn s_write(file_id: FileID, offset: usize, data: Vec<u8>) -> Result<(), FileIOError> {
todo!()
}
async fn a_write(file_id: FileID, offset: usize, data: Vec<u8>) -> Result<(), FileIOError> {
todo!()
}
}

View file

@ -7,3 +7,5 @@ edition = "2021"
[dependencies]
versioning = { path = "../versioning" }
embedded-graphics = "0.7.1"
log = "*"

View file

@ -6,8 +6,6 @@ use alloc::vec::Vec;
use super::error::GPUError;
pub type VertexBuffer = Vec<u8>;
#[repr(C)]
pub struct XYZ {
x: f64,
@ -22,28 +20,3 @@ pub struct XYZW {
z: f64,
w: f64,
}
pub struct GraphicsEngine {
vertex_buffer_attributes_editable: bool,
vertex_buffer: VertexBuffer,
vertex_size: usize,
}
impl GraphicsEngine {
pub fn new() -> Self {
Self {
vertex_buffer_attributes_editable: true,
vertex_buffer: Vec::new(),
vertex_size: 0,
}
}
pub fn append_vertex(&mut self, mut vertex: Vec<u8>) -> Result<(), GPUError> {
if self.vertex_buffer_attributes_editable {
return Err(GPUError::EngineNotInitialized);
}
if !vertex.len() == self.vertex_size {
return Err(GPUError::ImproperVertexSize);
}
self.vertex_buffer.append(&mut vertex);
Ok(())
}
}

View file

@ -1,21 +0,0 @@
pub struct Color3 {
pub r: u8,
pub g: u8,
pub b: u8,
}
impl Color3 {
/// Usage
/// The alpha value is a number between 0.0 and 1.0, which represents the transparency
/// or translucency of the other color.
pub fn blend(&self, other: &Self, alpha: f32) -> Self {
let r = (1.0 - alpha) * f32::from(self.r) + alpha * f32::from(other.r);
let g = (1.0 - alpha) * f32::from(self.g) + alpha * f32::from(other.g);
let b = (1.0 - alpha) * f32::from(self.b) + alpha * f32::from(other.b);
Self {
r: r as u8,
g: g as u8,
b: b as u8,
}
}
}

View file

@ -1,35 +0,0 @@
use alloc::vec::Vec;
pub enum DisplayError {
InvalidFormat,
InvalidWidth,
InvalidHeight,
InvalidBPP,
InvalidFPS,
}
pub fn parse_display_string(s: &str) -> Result<(u32, u32, u32, u32), DisplayError> {
let parts: Vec<&str> = s.split('x').collect();
if parts.len() != 3 {
return Err(DisplayError::InvalidFormat);
}
let width = parts[0]
.parse::<u32>()
.map_err(|_| DisplayError::InvalidWidth)?;
let height = parts[1]
.parse::<u32>()
.map_err(|_| DisplayError::InvalidHeight)?;
let bits: Vec<&str> = parts[2].split('@').collect();
if bits.len() != 2 {
return Err(DisplayError::InvalidFormat);
}
let bpp = bits[0]
.parse::<u32>()
.map_err(|_| DisplayError::InvalidBPP)?;
let fps = bits[1]
.parse::<u32>()
.map_err(|_| DisplayError::InvalidFPS)?;
Ok((width, height, bpp, fps))
}

View file

@ -1,4 +0,0 @@
pub enum GPUError {
ImproperVertexSize = 10,
EngineNotInitialized = 100,
}

View file

@ -1,54 +0,0 @@
use core::ptr;
use alloc::vec::Vec;
use super::color::Color3;
/// NOTE: Assumes the layout of RGBA
pub struct FrameBuffer {
pub width: u32,
pub height: u32,
pub data: Vec<u32>,
}
impl FrameBuffer {
pub fn new(width: u32, height: u32) -> Self {
let data = vec![0; (width * height) as usize];
FrameBuffer {
width,
height,
data,
}
}
/// WARNING: Slow
pub fn set_pixel(&mut self, x: u32, y: u32, color: u32) {
let index = (y * self.width + x) as usize;
self.data[index] = color;
}
/// WARNING: Slow
pub fn get_pixel(&self, x: u32, y: u32) -> u32 {
let index = (y * self.width + x) as usize;
self.data[index]
}
/// Quickly writes the provided color to the whole framebuffer
pub fn clear(&mut self, color: Color3) {
unsafe {
// TODO: properly clear instead of only copying red
ptr::write_bytes(self.data.as_mut_ptr(), color.r, self.data.len());
}
}
// TODO(Able): Test preformance of clear2
#[allow(dead_code)]
fn clear2(&mut self, color: u32) {
self.data.fill(color);
}
/// Check the size of one framebuffer vs the other.
/// NOTE: Just because this returns false does not mean that something is wrong
/// there are cases that this makes sense
pub fn check_size(&self, other: &FrameBuffer) -> bool {
self.width == other.width && self.height == other.height
}
}

View file

@ -1,5 +1 @@
pub mod buffer;
pub mod color;
pub mod display;
pub mod error;
pub mod framebuffer;
pub mod vertex;

View file

@ -0,0 +1,48 @@
use alloc::vec::Vec;
pub type VertexBuffer = Vec<u8>;
pub struct GraphicsEngine {
vertex_buffer_attributes_editable: bool,
vertex_buffer: VertexBuffer,
vertex_size: usize,
}
impl GraphicsEngine {
pub fn new() -> Self {
Self {
vertex_buffer_attributes_editable: true,
vertex_buffer: Vec::new(),
vertex_size: 0,
}
}
pub fn append_vertex(&mut self, mut vertex: Vec<u8>) -> Result<(), GPUError> {
if self.vertex_buffer_attributes_editable {
return Err(GPUError::EngineNotInitialized);
}
if !vertex.len() == self.vertex_size {
return Err(GPUError::ImproperVertexSize);
}
self.vertex_buffer.append(&mut vertex);
Ok(())
}
}
#[repr(C)]
pub struct XYZ {
x: f64,
y: f64,
z: f64,
}
#[repr(C)]
pub struct XYZW {
x: f64,
y: f64,
z: f64,
w: f64,
}
pub enum GPUError {
ImproperVertexSize,
EngineNotInitialized,
}

View file

@ -0,0 +1 @@
pub mod x86;

View file

@ -0,0 +1,90 @@
use embedded_graphics::{
pixelcolor::Rgb888,
prelude::{DrawTarget, IntoStorage, OriginDimensions, PixelColor, Point, Size},
primitives::{Line, Primitive, PrimitiveStyle},
Drawable, Pixel,
};
pub struct Display {
pub fb: *mut u32,
// Back buffer
pub bb: *mut u32,
pub size: Size,
pub color: Rgb888,
}
impl Display {
pub fn set_color(&mut self, color: Rgb888) {
self.color = color;
}
pub fn swap_buffers(&mut self) {
let size: usize = (self.size.height * self.size.width).try_into().unwrap();
unsafe {
let dst_ptr = self.fb;
let src_ptr = self.bb;
core::ptr::copy_nonoverlapping(src_ptr, dst_ptr, size);
}
log::trace!("Update");
}
pub fn line(
&mut self,
x1: i32,
y1: i32,
x2: i32,
y2: i32,
thickness: u32,
) -> Result<(), BlitOutOfBoundsError> {
let color = self.color;
let style = PrimitiveStyle::with_stroke(color, thickness);
Line::new(Point::new(x1, y1), Point::new(x2, y2))
.into_styled(style)
.draw(&mut *self)?;
Ok(())
}
}
unsafe impl Send for Display {}
impl DrawTarget for Display {
type Color = Rgb888;
type Error = BlitOutOfBoundsError;
fn draw_iter<I>(&mut self, pixels: I) -> Result<(), Self::Error>
where
I: IntoIterator<Item = Pixel<Self::Color>>,
{
for Pixel(pos, rgb) in pixels {
let pos_x = u32::try_from(pos.x).map_err(|_| BlitOutOfBoundsError)?;
let pos_y = u32::try_from(pos.y).map_err(|_| BlitOutOfBoundsError)?;
unsafe {
if pos_x >= self.size.width || pos_y >= self.size.height {
return Err(BlitOutOfBoundsError);
}
self.bb
.add(
(pos_y * self.size.width + pos_x)
.try_into()
.map_err(|_| BlitOutOfBoundsError)?,
)
.write_volatile(rgb.into_storage());
}
}
Ok(())
}
}
impl OriginDimensions for Display {
#[inline]
fn size(&self) -> Size {
self.size
}
}
#[derive(Debug)]
pub struct BlitOutOfBoundsError;

View file

@ -1,18 +1,9 @@
mod arch;
use core::fmt::Error;
use alloc::vec::Vec;
pub struct Color {
r: u8,
g: u8,
b: u8,
a: u8,
}
pub use arch::x86::Display;
pub struct PixelBuffer {
width: usize,
height: usize,
data: Vec<Color>,
}
impl PixelBuffer {
pub fn xy_calc(x: usize, y: isize) {}
pub fn blit(&mut self, x: isize, y: isize, buff: PixelBuffer) {}
}
use self::arch::x86::BlitOutOfBoundsError;

View file

@ -0,0 +1,5 @@
Type Coordinate = i32;
Type Coordinates = (Coordinate, Coordinate);
Function line Takes(Coordinates, Coordinates) Returns(Nothing);

View file

@ -15,7 +15,7 @@ hashbrown = "*"
[dependencies.toml]
git = "https://git.ablecorp.us:443/theoddgarlic/toml-rs"
git = "https://git.ablecorp.us:443/asya/toml-rs"
# version = "0.5.8"
default-features = false

View file

@ -24,3 +24,7 @@ pub struct Result<T> {
pub ok: T,
pub err: ExternErrors,
}
pub struct Path {
parts: OSString,
}

View file

@ -0,0 +1,16 @@
use alloc::alloc::{GlobalAlloc, Layout};
struct MyAllocator;
unsafe impl GlobalAlloc for MyAllocator {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
panic!();
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
panic!();
}
}
#[global_allocator]
static GLOBAL: MyAllocator = MyAllocator;

View file

@ -1,12 +1,3 @@
pub enum IOErrors {
UnknownError,
}
// pub struct Port<T> {
// inner: T,
// }
// impl<T> Port<T> {
// pub fn read(&self) -> Result<T, IOErrors> {
// Ok(self.inner)
// }
// }

View file

@ -9,6 +9,7 @@ pub mod env;
pub mod exit;
pub mod io;
pub mod allocator;
#[cfg(not(test))]
pub mod panic;
@ -23,6 +24,8 @@ pub mod prelude;
use versioning::Version;
pub mod path;
pub const VERSION: Version = Version::new(0, 1, 0);
// extern crate alloc;

18
libraries/std/src/path.rs Normal file
View file

@ -0,0 +1,18 @@
use alloc::string::{String, ToString};
pub struct Path {
pub path: Vec<String>,
}
impl Path {
pub fn new(path: String) -> Self {
let mut path_vec_string = alloc::vec![];
for part in path.split(&['\\', '/'][..]) {
path_vec_string.push(part.to_string());
}
Path {
path: path_vec_string,
}
}
}

View file

@ -6,3 +6,6 @@ pub use crate::print_char;
pub use core::panic;
pub use versioning;
extern crate alloc;
pub use alloc::vec::Vec;

View file

@ -6,11 +6,4 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
codespan-reporting = "0.11.1"
derive_more = "0.99.17"
itertools = "0.10.5"
logos = "0"
proc-macro2 = "1.0.56"
quote = "1.0.26"
syn = "2.0.15"
thiserror = "1"
logos = "0.13.0"

View file

@ -4,7 +4,7 @@ The example implementation will be in rust
IDL | Rust
__________
Boolean | bool
boolean | bool
I8 | i8
I16 | i16
I32 | i32
@ -15,8 +15,8 @@ U32 | u32
U64 | u64
F32 | f32
F64 | f64
Constant X = Make Y { Z } | const X: Y = Y { Z };
Alias | type
Constant X Y Z | const X: Y = Z;
Type | type
Vector<X> | Vec<X>
Array<X, Y> | [X; Y]
Function X Takes(YX) Returns(ZX) | fn X(YX) -> ZX
Array[X;Y] | [X;Y]
Function X accepts(YX) returns(ZX) | fn X(YX) -> ZX

View file

@ -1,20 +1,20 @@
Alias Byte = u8;
Alias Int = u32;
Type Byte = U8;
Type String = Vector<Byte>;
Enumeration Nothing {}
Enumurate Boolean{
False = 0,
True = 1,
}
Enumeration Option<T> {
Union Option<T>{
None,
Some(T)
Some<T>
}
Enumeration Result<T, E> {
Ok(T),
Err(E)
}
Structure Version {
major: Byte,
minor: Byte,
patch: Byte,
}
};

View file

@ -1,21 +0,0 @@
#![crate_name = "aidl_core"]
#![crate_type = "rlib"]
#![no_implicit_prelude]
extern crate core as rust_core;
extern crate alloc as rust_alloc;
pub use self::rust_core::{option::Option, result::Result};
pub use self::rust_alloc::{vec::Vec as Vector, string::String};
pub type Nothing = ();
pub type Byte = u8;
pub type Int = u32;
#[derive(Debug, Clone, Copy)]
pub struct Version {
pub major: Byte,
pub minor: Byte,
pub patch: Byte
}

View file

@ -1,19 +0,0 @@
Type UUID = Array<U8, 16>;
Type Nanoseconds = U32;
Structure Duration {
secs: U64
nanos: Nanoseconds
}
Structure LinkedList<T> {
data: T,
child: Option<LinkedList<T>>,
}
Constant VERSION = Version {
major: 1,
minor: 0,
patch: 0
};

View file

@ -0,0 +1,13 @@
Type UUID = Array[U8; 16];
Type Nanoseconds = U32;
Structure Duration{
secs: U64,
nanos: Nanoseconds,
}
Structure LinkedList{
data: Any,
child: Option<LinkedList>,
}

View file

@ -1,6 +0,0 @@
Interface Thing {
Function moves Takes(Move Self) Returns(Self);
Function immut_ref Takes(Reference Self);
Function mut_ref Takes(Mutable Reference Self);
}

View file

@ -1,16 +1,13 @@
Module vfs;
// core provides lots of useful types like String and Byte
Use core.Version;
Use core.Vector;
Use core.String;
use core;
Constant VERSION = Make Version {
Constant VERSION Version{
major: 1,
minor: 0,
patch: 0,
};
}
Alias Path = String;
Type Path = String;
Structure File {
name: String,
@ -18,9 +15,10 @@ Structure File {
}
Interface File{
Function create Takes(Path) Returns(Nothing);
function new accepts(Path) returns(None);
Function open Takes(Path) Returns(File);
// Open in this iteration assumes the file exists
function open accepts(Path) returns(File);
Function close Takes(File) Returns(Nothing);
function close accepts(File) returns(None);
}

View file

@ -1,26 +0,0 @@
#![crate_name = "aidl_vfs"]
#![crate_type = "rlib"]
#![no_implicit_prelude]
extern crate aidl_core;
use aidl_core::{Vector, Version, String};
pub const VERSION: Version = Version {
major: 1,
minor: 0,
patch: 0,
};
pub type Path = String;
pub struct FFile {
pub name: String
}
pub trait File {
fn fields(&self) -> &FFile;
fn fields_mut(&mut self) -> &mut FFile;
fn into_fields(self) -> FFile where Self: Sized;
fn new(path: Path) -> Self;
}

View file

@ -1,28 +0,0 @@
Use core.Byte;
Use core.Int;
Constant Hi = "WHY???/\n";
Alias Yo = Byte;
Constant Version = Make Version {
major: 1, minor: 0, patch: 0
};
Interface Iface {
Function hello Takes(Int, Boolean,) Returns(Int);
}
Function a_free_function Returns(Boolean);
Structure Hello {
world: Boolean,
prompt: Option<String>,
}
Enumeration Reality {
Dead(Boolean, Boolean),
Alive {
health: Int,
dying: Boolean,
},
}

View file

@ -1,181 +0,0 @@
//! **note** the order of fields is the order of parsing.
use std::collections::HashMap;
/// An IDL module.
///
/// Parsing order:
/// - use declarations,
/// - items
#[derive(Debug)]
pub struct IDLModule {
pub name: String,
// why: only allow use before other items
// parser will error if use is present in any other place
pub uses: Vec<UseDecl>,
pub items: Vec<Item>,
}
#[derive(Debug)]
pub enum Item {
Interface(ItemInterface),
Alias(ItemAlias),
Constant(ItemConstant),
Function(Function),
Structure(ItemStructure),
Enumeration(ItemEnumeration),
}
#[derive(Debug, Default)]
pub struct Function {
pub name: String,
pub takes: Vec<Type>,
pub returns: Type,
}
#[derive(Debug)]
pub struct Type {
pub name: String,
pub arguments: TypeArguments,
}
impl Type {
pub fn infer() -> Self {
Self {
name: String::from(INFER_TYPE),
arguments: TypeArguments::None,
}
}
}
pub const NOTHING_TYPE: &str = "Nothing";
pub const INFER_TYPE: &str = "_";
impl Default for Type {
fn default() -> Self {
Self {
name: String::from(NOTHING_TYPE),
arguments: TypeArguments::None,
}
}
}
#[derive(Debug, Default)]
pub enum TypeArguments {
/// TypeName
#[default]
None,
/// TypeName<T1, T2, T3, TN>
AngleBracketed(Vec<Box<Type>>),
}
pub fn nothing() -> Type {
Type::default()
}
#[derive(Debug)]
pub struct ItemInterface {
pub name: String,
pub functions: Vec<Function>,
}
#[derive(Debug)]
pub struct ItemStructure {
pub name: String,
pub fields: HashMap<String, Type>,
pub arguments: TypeArguments,
}
#[derive(Debug)]
pub struct ItemAlias {
pub name: String,
pub referree: Type,
}
#[derive(Debug)]
pub struct ItemConstant {
pub name: String,
pub expr: Expr,
}
#[derive(Debug)]
pub struct ItemEnumeration {
pub name: String,
pub arguments: TypeArguments,
pub variants: Vec<EnumerationVariant>,
}
#[derive(Debug)]
pub struct EnumerationVariant {
pub name: String,
pub content: EnumerationContent,
}
#[derive(Debug, Default)]
pub enum EnumerationContent {
#[default]
None,
Tuple(Vec<Type>),
Structure(HashMap<String, Type>),
Value(NumberLiteral),
}
#[derive(Debug)]
pub struct UseDecl {
pub path: (String, Option<String>)
}
#[derive(Debug)]
pub enum Expr {
Literal(Literal),
_IdentAccess(String),
Make(Box<ExprMake>),
}
#[derive(Debug)]
pub struct ExprMake {
pub name: String,
pub params: HashMap<String, Expr>,
}
#[derive(Debug)]
pub enum Literal {
String(String),
Number(NumberLiteral),
Char(char),
}
#[derive(Debug, derive_more::Display)]
pub enum NumberLiteral {
#[display(fmt = "{_0}ptr")]
Ptr(usize),
#[display(fmt = "{_0}u8")]
U8(u8),
#[display(fmt = "{_0}i8")]
I8(i8),
#[display(fmt = "{_0}u16")]
U16(u16),
#[display(fmt = "{_0}i16")]
I16(i16),
#[display(fmt = "{_0}u32")]
U32(u32),
#[display(fmt = "{_0}i32")]
I32(i32),
#[display(fmt = "{_0}u64")]
U64(u64),
#[display(fmt = "{_0}i64")]
I64(i64),
#[display(fmt = "{_0}")]
Infer(i64),
}
/// seg1.seg2.seg3.segN
#[derive(Debug)]
pub struct ModulePath {
pub segments: Vec<String>,
}

View file

@ -1,130 +0,0 @@
use std::default::default;
use crate::{ast::{IDLModule, ItemInterface, TypeArguments, Type}, unwrap_match};
use itertools::Itertools;
use proc_macro2::{Ident, Span, TokenStream};
use quote::{quote, ToTokens};
use syn::{Attribute, ItemExternCrate, ItemTrait, ItemUse, LitStr, Meta, Path, UsePath, Generics, punctuated::Punctuated, TypeArray, LitInt};
fn attr_inner(meta: Meta) -> Attribute {
Attribute {
pound_token: default(),
style: syn::AttrStyle::Inner(default()),
bracket_token: default(),
meta,
}
}
fn attr_just(name: &'static str) -> Attribute {
attr_inner(Meta::Path(Path::from(Ident::new(name, Span::call_site()))))
}
fn attr_inner_eq(name: &'static str, expr: &str) -> Attribute {
attr_inner(Meta::NameValue(syn::MetaNameValue {
path: Path::from(Ident::new(name, Span::call_site())),
eq_token: default(),
value: syn::Expr::Lit(syn::ExprLit {
attrs: vec![],
lit: syn::Lit::Str(LitStr::new(expr, Span::call_site())),
}),
}))
}
fn extern_crate(name: &str) -> ItemExternCrate {
ItemExternCrate {
attrs: vec![],
vis: syn::Visibility::Inherited,
extern_token: default(),
crate_token: default(),
ident: Ident::new(name, Span::call_site()),
rename: None,
semi_token: default(),
}
}
fn make_use(a: &str, b: &str) -> ItemUse {
ItemUse {
attrs: vec![],
vis: syn::Visibility::Inherited,
use_token: default(),
tree: syn::UseTree::Path(UsePath {
tree: Box::new(syn::UseTree::Name(syn::UseName {
ident: Ident::new(b, Span::call_site()),
})),
ident: Ident::new(a, Span::call_site()),
colon2_token: default(),
}),
semi_token: default(),
leading_colon: None,
}
}
fn _gen_type(ty: Type) -> syn::Type {
fn make_array(mut args: Vec<Box<Type>>) -> TypeArray {
let box arity = args.pop().unwrap();
let box real = args.pop().unwrap();
drop(args);
TypeArray { bracket_token: default(), elem: Box::new(gen_type(real)), semi_token: default(), len: syn::Expr::Lit(syn::ExprLit { attrs: vec![], lit: syn::Lit::Int(LitInt::new(&arity.name, Span::call_site())) }) }
}
match ty.name.as_str() {
"Array" => syn::Type::Array(make_array(unwrap_match!(ty.arguments, TypeArguments::AngleBracketed(angle) => angle))),
name => syn::Type::Path(syn::TypePath { qself: None, path: Path::from(Ident::new(name, Span::call_site())) })
}
}
// fn gen_iface(interface: ItemInterface) -> ItemTrait {
// ItemTrait {
// attrs: default(),
// vis: syn::Visibility::Public(default()),
// unsafety: None,
// auto_token: None,
// restriction: None,
// trait_token: default(),
// ident: Ident::new(&interface.name, Span::call_site()),
// generics: ,
// colon_token: (),
// supertraits: (),
// brace_token: (),
// items: (),
// }
// }
pub fn generate(module: IDLModule) -> TokenStream {
let name = String::from("aidl_") + &module.name;
let attrs: TokenStream = [
attr_inner_eq("crate_name", &name),
attr_inner_eq("crate_type", "rlib"),
attr_just("no_implicit_prelude"),
]
.into_iter()
.map(ToTokens::into_token_stream)
.collect();
let uses: Vec<_> = module
.uses
.into_iter()
.map(|a| a.path)
.map(|(a, b)| (String::from("aidl_") + &a, b)) // aidl_core.Something
.collect();
let extern_crates: TokenStream = uses
.iter()
.map(|(a, _)| a.as_str())
.unique()
.map(extern_crate)
.map(ToTokens::into_token_stream)
.collect();
let use_defs: TokenStream = uses
.iter()
.filter_map(|(ref a, ref b)| b.as_ref().map(|b| make_use(a.as_str(), b.as_str())))
.map(ToTokens::into_token_stream)
.collect();
quote! {
#attrs
#extern_crates
#use_defs
}
}

View file

@ -1,232 +0,0 @@
use std::{
fmt::Display,
ops::{Add, AddAssign, Range},
};
use logos::Logos;
#[derive(Logos, Debug, PartialEq, derive_more::Display, Clone)]
#[logos(skip r"[ \t\n\f]+")]
pub enum Token {
#[token("{")]
#[display(fmt = "{{")]
LeftCurly,
#[token("}")]
#[display(fmt = "}}")]
RightCurly,
#[token("(")]
#[display(fmt = "(")]
LeftParen,
#[token(")")]
#[display(fmt = ")")]
RightParen,
#[token(";")]
#[display(fmt = ";")]
Semicolon,
#[token(":")]
#[display(fmt = ":")]
Colon,
#[token("<")]
#[display(fmt = "<")]
LeftArrow,
#[token(">")]
#[display(fmt = ">")]
RightArrow,
#[token(",")]
#[display(fmt = ",")]
Comma,
#[token("=")]
#[display(fmt = "=")]
Equals,
#[token(".")]
#[display(fmt = ".")]
Dot,
// why
#[regex("\"(?s:[^\"\\\\]|\\\\.)*\"", |lex| lex.slice().strip_prefix('"')?.strip_suffix('"').map(ToOwned::to_owned))]
#[display(fmt = "\"{_0}\"")]
StringLiteral(String),
#[regex(r"'.'", |lex| lex.slice().strip_prefix('\'')?.strip_suffix('\'')?.parse().ok())]
#[display(fmt = "{_0}")]
CharLiteral(char),
#[regex(r#"(-)?\d+"#, |lex| lex.slice().parse().ok())]
#[display(fmt = "{_0}")]
NumberLiteral(i64),
#[regex(
"(ptr|u8|i8|u16|i16|u32|i32|u64|i64|f32|f64)",
|lex| NumberSuffix::lexer(lex.slice()).next().and_then(Result::ok)
)]
#[display(fmt = "{_0}")]
NumberSuffix(NumberSuffix),
#[regex(r#"[a-zA-Z_][a-zA-Z\d_]*"#, |lex| Ident::lexer(lex.slice()).next().and_then(Result::ok))]
#[display(fmt = "{_0}")]
Ident(Ident),
#[regex(r"//.*", |lex| lex.slice().parse().ok())]
#[display(fmt = "//{_0}")]
Comment(String),
}
#[derive(Logos, Debug, Clone, PartialEq, Eq, derive_more::Display)]
pub enum Ident {
#[token("Module")]
#[display(fmt = "Module")]
Module,
#[token("Interface")]
#[display(fmt = "Interface")]
Interface,
#[token("Function")]
#[display(fmt = "Function")]
Function,
#[token("Constant")]
#[display(fmt = "Constant")]
Constant,
#[token("Structure")]
#[display(fmt = "Structure")]
Structure,
#[token("Alias")]
#[display(fmt = "Alias")]
Alias,
#[token("Enumeration")]
#[display(fmt = "Enumeration")]
Enumeration,
#[token("Use")]
#[display(fmt = "Use")]
Use,
#[token("Make")]
#[display(fmt = "Make")]
Make,
#[token("Takes")]
#[display(fmt = "Takes")]
Takes,
#[token("Returns")]
#[display(fmt = "Returns")]
Returns,
#[token("_")]
#[display(fmt = "_")]
Underscore,
#[regex(r"[a-zA-Z_][a-zA-Z\d_]*", |lex| lex.slice().parse().ok())]
#[display(fmt = "{_0}")]
Other(String),
}
#[derive(Logos, Debug, Clone, Copy, PartialEq, Eq, derive_more::Display)]
pub enum NumberSuffix {
#[token("ptr")]
#[display(fmt = "ptr")]
Ptr,
#[token("u8")]
#[display(fmt = "u8")]
U8,
#[token("i8")]
#[display(fmt = "i8")]
I8,
#[token("u16")]
#[display(fmt = "u16")]
U16,
#[token("i16")]
#[display(fmt = "i16")]
I16,
#[token("u32")]
#[display(fmt = "u32")]
U32,
#[token("i32")]
#[display(fmt = "i32")]
I32,
#[token("u64")]
#[display(fmt = "u64")]
U64,
#[token("i64")]
#[display(fmt = "i64")]
I64,
#[token("f32")]
#[display(fmt = "f32")]
F32,
#[token("f64")]
#[display(fmt = "f64")]
F64,
}
impl NumberSuffix {
pub const ALL_SUFFIXES: [&str; 11] = [
"ptr",
"u8",
"i8",
"u16",
"i16",
"u32",
"i32",
"u64",
"i64",
"f32",
"f64"
];
}
#[derive(Debug, Clone)]
pub struct Span(pub Range<usize>);
impl Span {
pub const ZERO: Self = Self(0..0);
pub fn lower(&self) -> usize {
self.0.start
}
pub fn upper(&self) -> usize {
self.0.end
}
pub fn concat(self, other: Span) -> Self {
use std::cmp::{max, min};
Self(min(self.lower(), other.lower())..max(self.upper(), other.upper()))
}
}
impl Display for Span {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}..{}", self.lower(), self.upper())
}
}
impl Add for Span {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
self.concat(rhs)
}
}
impl AddAssign for Span {
fn add_assign(&mut self, rhs: Self) {
*self = self.clone() + rhs;
}
}
#[derive(Debug, Clone)]
pub struct Spanned<T>(pub T, pub Span);
impl<T> Spanned<T> {
pub fn new<const N: usize>(thing: T, spans: [Span; N]) -> Self {
Self(thing, spans.into_iter().fold(Span::ZERO, Span::concat))
}
pub fn map<R>(self, f: impl Fn(T) -> R) -> Spanned<R> {
Spanned(f(self.0), self.1)
}
}
impl<T: Display> Display for Spanned<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} @ {}", self.0, self.1)
}
}

View file

@ -1,168 +1,64 @@
#![feature(result_option_inspect)]
#![feature(box_patterns)]
#![feature(default_free_fn)]
#![allow(non_snake_case)]
use logos::Logos;
use std::{fmt::Display, path::Path, process::exit};
#[derive(Logos, Debug, PartialEq)]
#[logos(skip r"[ \t\n\f]+")] // Ignore this regex pattern between tokens
enum Token {
#[token("{")]
LeftBrace,
use ast::IDLModule;
use codegen::generate;
use codespan_reporting::{
diagnostic::{Diagnostic, Label, Severity},
files::SimpleFile,
term::{
emit,
termcolor::{StandardStream, StandardStreamLock},
Config,
},
};
use lexer::{NumberSuffix, Token};
use parser::TokenIterator;
#[token("}")]
RightBrace,
use crate::lexer::Spanned;
#[token("(")]
LeftParen,
mod ast;
mod codegen;
mod lexer;
mod parser;
#[token(")")]
RightParen,
fn precheck<N: Display + Clone, S: AsRef<str>>(
writer: &mut StandardStreamLock<'_>,
config: &Config,
file: &SimpleFile<N, S>,
) {
let mut lexer = TokenIterator::new(file.source().as_ref());
let mut diagnostics = vec![];
let mut previous = lexer.peek().ok().map(|Spanned(a, b)| Spanned(a.clone(), b));
#[token(";")]
Semicolon,
while let Ok(Spanned(token, span)) = lexer.next() {
let prev = Spanned(token.clone(), span.clone());
match token {
Token::Ident(lexer::Ident::Other(t)) if t == "Type" => {
diagnostics.push(
Diagnostic::error()
.with_labels(vec![Label::primary((), span.0)])
.with_message("`Type` is not supported anymore.")
.with_notes(vec!["use `Alias` instead of `Type`".into()]),
);
}
Token::Ident(lexer::Ident::Other(ident))
if lexer
.peek()
.is_ok_and(|Spanned(a, _)| matches!(a, Token::LeftCurly))
&& previous.is_some_and(|Spanned(ref a, _)| matches!(a, Token::Equals)) =>
{
diagnostics.push(
Diagnostic::error()
.with_message("Unknown expression")
.with_labels(vec![Label::primary((), span.0.clone())])
.with_notes(vec![
format!("add `Make` before the structure name to create a Make expression that will construct the `{ident}` structure"
),
]),
);
}
Token::Ident(lexer::Ident::Other(ident))
if NumberSuffix::ALL_SUFFIXES.contains(&ident.to_lowercase().as_str()) =>
{
diagnostics.push(
Diagnostic::warning()
.with_message("Potentially invalid use of an uppercased number type")
.with_labels(vec![Label::primary((), span.0)])
.with_notes(vec![
format!("Replace {ident} with {}", ident.to_lowercase()),
"Code generation might fail".into(),
]),
);
}
_ => {}
}
previous = Some(prev);
}
#[token(":")]
Colon,
if !diagnostics.is_empty() {
let mut was_fatal = false;
for diagnostic in diagnostics {
if let Severity::Error | Severity::Bug = &diagnostic.severity {
was_fatal = true;
}
#[token("<")]
LeftArrow,
emit(writer, config, file, &diagnostic).unwrap();
}
#[token(">")]
RightArrow,
if was_fatal {
exit(1);
}
}
#[token(",")]
Comma,
#[token("=")]
Equals,
#[regex(r#"[A-z]+"#, |lex| lex.slice().parse().ok())]
Literal(String),
#[regex("use [a-zA-Z/]+;", |lex| lex.slice().parse().ok())]
Component(String),
#[regex("U[0-9]+", |lex| lex.slice().parse().ok())]
UnsignedType(String),
#[regex("I[0-9]+", |lex| lex.slice().parse().ok())]
SignedType(String),
#[regex(r"//[ a-zA-Z!-+]+", |lex| lex.slice().parse().ok())]
Comment(String),
#[regex("[0-9]+", |lex| lex.slice().parse().ok())]
Number(String),
}
fn main() {
let mut args = std::env::args();
args.next().unwrap();
let mut lex = Token::lexer(include_str!("../../../programs/aidl/assets/vfs.idl"));
let mut ast: Option<IDLModule> = None;
if let Some(file) = args.next() {
let path = Path::new(&file);
let codespan_file = codespan_reporting::files::SimpleFile::new(
&file,
std::fs::read_to_string(path).unwrap(),
);
let writer = StandardStream::stdout(codespan_reporting::term::termcolor::ColorChoice::Auto);
let config = Config {
tab_width: 2,
..Default::default()
};
precheck(&mut writer.lock(), &config, &codespan_file);
match parser::parse(codespan_file.source()) {
Ok(ast_) => {
println!("{:#?}", ast_);
ast = Some(ast_);
}
Err(e) => {
let msg = e.to_string();
let label = match e {
parser::ParserError::UnexpectedEOF => Label::primary(
(),
(codespan_file.source().len() - 1)..codespan_file.source().len(),
)
.with_message("Unexpected end of file here"),
parser::ParserError::Unexpected(expected, Spanned(got, span)) => {
Label::primary((), span.0)
.with_message(format!("Unexpected `{got}`, expected {expected}"))
}
parser::ParserError::PleaseStopParsingUse => unsafe {
std::hint::unreachable_unchecked()
},
};
let diagnostic = codespan_reporting::diagnostic::Diagnostic::error()
.with_message(msg)
.with_labels(vec![label]);
codespan_reporting::term::emit(
&mut writer.lock(),
&config,
&codespan_file,
&diagnostic,
)
.unwrap();
for token in lex {
let ok_token = token.ok();
if ok_token.is_some() {
println!("{:?}", ok_token.unwrap());
}
}
} else {
eprintln!("No file given. Aborting.");
}
let rust = generate(ast.unwrap());
println!("{}", rust);
}
#[macro_export]
macro_rules! unwrap_match {
($x:expr, $m:pat => $a:expr) => {
match $x {
$m => $a,
_ => unreachable!(),
}
};
}

View file

@ -1,145 +0,0 @@
use std::collections::HashMap;
use crate::{
ast::{EnumerationContent, EnumerationVariant, ItemEnumeration, Type},
lexer::{Ident, Spanned, Token},
};
use super::{Parser, ParserError};
impl<'a> Parser<'a> {
pub fn ask_enumeration(&mut self) -> Result<Spanned<ItemEnumeration>, ParserError> {
let Spanned(_, span) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Enumeration)),
"the `Enumeration` keyword",
)?;
let Spanned(Type { name, arguments }, _) = self.ask_type()?;
self.get_real(
|token| matches!(token, Token::LeftCurly),
"an opening curly brace",
)?;
let mut variants = vec![];
loop {
match self.tokens.peek()?.0 {
Token::Ident(Ident::Other(_)) => {
let Spanned(variant_name, _) = self.ask_ident()?;
let mut content = EnumerationContent::None;
loop {
match self.tokens.peek()?.0 {
Token::LeftParen => {
self.eat();
let mut tuple = vec![];
loop {
match self.tokens.peek()?.0 {
Token::Ident(Ident::Other(_)) => {
tuple.push(self.ask_type()?.0);
match self.tokens.peek()?.0 {
Token::Comma => {
self.eat();
}
Token::RightParen => {}
_ => return Err(self.expected("a comma or closing parentheses")),
};
}
Token::RightParen => {
self.eat();
break;
}
_ => {
return Err(
self.expected("closing parentheses or a type")
)
}
}
}
content = EnumerationContent::Tuple(tuple);
}
Token::LeftCurly => {
self.eat();
let mut structure = HashMap::<String, Type>::new();
loop {
match self.tokens.peek()?.0 {
Token::Ident(Ident::Other(_)) => {
let Spanned(field_name, _) = self.ask_ident()?;
self.get_real(
|token| matches!(token, Token::Colon),
"a colon",
)?;
structure.insert(field_name, self.ask_type()?.0);
match self.tokens.peek()?.0 {
Token::Comma => {
self.eat();
}
Token::RightCurly => {}
_ => return Err(self.expected("a comma or closing curly braces")),
};
}
Token::RightCurly => {
self.eat();
break;
}
_ => {
return Err(
self.expected("closing parentheses or a type")
)
}
}
}
content = EnumerationContent::Structure(structure);
}
Token::Equals => {
self.eat();
content = EnumerationContent::Value(self._ask_number_literal()?.0);
}
_ => break,
}
}
match self.tokens.peek()?.0 {
Token::Comma => {
self.eat();
}
Token::RightCurly => {}
_ => return Err(self.expected("a comma or closing curly braces")),
}
variants.push(EnumerationVariant {
name: variant_name,
content,
});
}
Token::RightCurly => break,
_ => return Err(self.expected("a closing curly brace or a variant")),
}
}
if let Spanned(Token::RightCurly, _) = self.tokens.peek()? {
self.eat();
return Ok(Spanned(
ItemEnumeration {
name,
arguments,
variants,
},
span + self.tokens.span(),
));
};
Err(self.expected("???"))
}
}

View file

@ -1,103 +0,0 @@
use std::collections::HashMap;
use crate::{
ast::{Expr, ExprMake, Literal, NumberLiteral},
lexer::{Ident, NumberSuffix, Spanned, Token},
unwrap_match,
};
use super::{Parser, ParserError};
impl<'a> Parser<'a> {
pub fn ask_expr(&mut self) -> Result<Spanned<Expr>, ParserError> {
let Spanned(token, _) = self.tokens.peek()?;
Ok(match token {
Token::StringLiteral(_) | Token::NumberLiteral(_) | Token::CharLiteral(_) => {
self._ask_literal()?.map(Expr::Literal)
}
Token::Ident(Ident::Make) => {
self.eat();
self._ask_struct_init()?.map(Box::new).map(Expr::Make)
}
_ => return Err(self.expected("an expression")),
})
}
pub fn _ask_number_literal(&mut self) -> Result<Spanned<NumberLiteral>, ParserError> {
match self.tokens.next()? {
Spanned(Token::NumberLiteral(number), mut span) => {
let lit = if let Spanned(Token::NumberSuffix(_), sp) = self.tokens.peek()? {
span += sp;
use NumberLiteral::*;
match unwrap_match!(
self.tokens.next()?, Spanned(Token::NumberSuffix(suffering), _) => suffering) // eat suffix
{
NumberSuffix::Ptr => Ptr(number as usize),
NumberSuffix::U8 => U8(number as u8),
NumberSuffix::I8 => I8(number as i8),
NumberSuffix::U16 => U16(number as u16),
NumberSuffix::I16 => I16(number as i16),
NumberSuffix::U32 => U32(number as u32),
NumberSuffix::I32 => I32(number as i32),
NumberSuffix::U64 => U64(number as u64),
NumberSuffix::I64 => I64(number),
_ => return Err(self.expected("a non-floating number suffix"))
}
} else {
NumberLiteral::Infer(number)
};
Ok(Spanned(lit, span))
}
_ => Err(self.expected("a number literal")),
}
}
pub fn _ask_literal(&mut self) -> Result<Spanned<Literal>, ParserError> {
if let Spanned(Token::NumberLiteral(_), _) = self.tokens.peek()? {
return Ok(self._ask_number_literal()?.map(Literal::Number));
};
let Spanned(token, span) = self.tokens.next()?;
Ok(match token {
Token::StringLiteral(string) => Spanned(Literal::String(string), span),
Token::CharLiteral(chr) => Spanned(Literal::Char(chr), span),
_ => return Err(self.expected("a literal")),
})
}
fn _ask_struct_init(&mut self) -> Result<Spanned<ExprMake>, ParserError> {
let Spanned(name, nSp) = self.ask_ident()?;
let Spanned(_, _) = self.get_real(
|token| matches!(token, Token::LeftCurly),
"an opening curly brace (`{`)",
)?;
let mut params = HashMap::<String, Expr>::new();
loop {
match self.tokens.peek()?.0 {
Token::Ident(_) => {
let Spanned(ident, _) = self.ask_ident().unwrap();
self.get_real(|token| matches!(token, Token::Colon), "a colon")?;
let Spanned(value, _) = self.ask_expr()?;
params.insert(ident, value);
match self.tokens.peek()?.0 {
Token::Comma => self.eat(),
Token::RightCurly => {},
_ => return Err(self.expected("a comma or a closing curly brace"))
}
}
Token::RightCurly => break,
_ => return Err(self.expected("an identifier or a closing curly brace (`}`)")),
}
}
if let Spanned(Token::RightCurly, ccSp) = self.tokens.next()? {
return Ok(Spanned(ExprMake { name, params }, nSp + ccSp));
};
Err(self.expected("closing curly braces"))
}
}

View file

@ -1,144 +0,0 @@
use crate::{
ast::{nothing, Function, ItemInterface},
lexer::{Ident, Span, Spanned, Token},
};
use super::{Parser, ParserError};
impl<'a> Parser<'a> {
pub fn ask_interface(&mut self) -> Result<Spanned<ItemInterface>, ParserError> {
// Interface
let Spanned(_, mut span) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Interface)),
"the `Interface` keyword",
)?;
// InterfaceName
let Spanned(name, _) = self.ask_ident()?;
// {
self.get_real(
|token| matches!(token, Token::LeftCurly),
"opening curly brackets",
)?;
let mut functions = vec![];
loop {
match self.tokens.peek()? {
Spanned(Token::RightCurly, end) => {
self.eat();
span += end;
break;
}
Spanned(Token::Ident(Ident::Function), _) => functions.push(self.ask_function()?.0),
_ => return Err(self.expected("A function or closing curly braces")),
}
}
Ok(Spanned(ItemInterface { name, functions }, span))
}
pub fn ask_function(&mut self) -> Result<Spanned<Function>, ParserError> {
let Spanned(_, bsp) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Function)),
"the `Function` keyword",
)?;
let Spanned(name, _) = self.ask_ident()?;
let Spanned(next, esp) = self.tokens.next()?;
match next {
Token::Ident(Ident::Takes) => {
self.get_real(
|token| matches!(token, Token::LeftParen),
"Opening parentheses",
)?;
let mut takes = vec![];
let mut returns = nothing();
loop {
let Spanned(peeked, _) = self.tokens.peek()?;
match peeked {
Token::Ident(_) => {
takes.push(self.ask_type()?.0);
match self.tokens.peek()?.0 {
Token::Comma => {
self.eat();
}
Token::RightParen => {}
_ => return Err(self.expected("a comma or closing parentheses")),
};
}
Token::RightParen => {
self.eat();
break;
}
_ => return Err(self.expected("closing parentheses or a type name")),
}
}
match self.tokens.next()?.0 {
Token::Semicolon => {}
Token::Ident(Ident::Returns) => {
self.get_real(
|token| matches!(token, Token::LeftParen),
"opening parentheses",
)?;
let Spanned(returns_, _) = self.ask_type()?;
returns = returns_;
self.get_real(
|token| matches!(token, Token::RightParen),
"closing parentheses",
)?;
self.semi()?;
}
_ => return Err(self.expected("a semicolon or a Returns clause")),
}
Ok(Spanned(
Function {
name,
takes,
returns,
},
bsp + Span(self.tokens.lexer.span()),
))
}
Token::Ident(Ident::Returns) => {
self.get_real(
|token| matches!(token, Token::LeftParen),
"Opening parentheses",
)?;
let Spanned(returns, _) = self.ask_type()?;
self.get_real(
|token| matches!(token, Token::RightParen),
"Closing parentheses",
)?;
Ok(Spanned(
Function {
name,
takes: Vec::new(),
returns,
},
bsp + self.semi()?,
))
}
Token::Semicolon => Ok(Spanned(
Function {
name,
takes: Vec::new(),
returns: nothing(),
},
bsp + esp,
)),
_ => Err(self.expected("a Takes clause, a Returns clause or a semicolon")),
}
}
}

View file

@ -1,244 +0,0 @@
mod enumeration;
mod expr;
mod interface;
mod structure;
mod types;
use logos::{Lexer, Logos};
use crate::{
ast::{IDLModule, Item, ItemAlias, ItemConstant, UseDecl},
lexer::{Ident, Span, Spanned, Token},
};
use std::iter::Iterator;
pub struct TokenIterator<'a> {
lexer: Lexer<'a, Token>,
peeked: Option<Option<Token>>,
}
fn token_is_not_comment(a: &Result<Token, ()>) -> bool {
!matches!(a, Err(_) | Ok(Token::Comment(..)))
}
impl<'a> TokenIterator<'a> {
pub fn new(src: &'a str) -> Self {
let lexer = Token::lexer(src);
Self {
lexer,
peeked: None,
}
}
pub fn next(&mut self) -> Result<Spanned<Token>, ParserError> {
let n = match self.peeked.take() {
Some(thing) => thing,
None => self.lexer.find(token_is_not_comment).and_then(Result::ok),
};
let nxt = n.map(|token| Spanned(token, Span(self.lexer.span())));
// println!("[NEXT] {:#?}", nxt);
nxt.ok_or(ParserError::UnexpectedEOF)
}
fn _peek(&mut self) -> Option<&Token> {
self.peeked
.get_or_insert_with(|| self.lexer.find(token_is_not_comment).and_then(Result::ok))
.as_ref()
}
pub fn peek(&mut self) -> Result<Spanned<&Token>, ParserError> {
let span = Span(self.lexer.span());
let peek = self._peek().map(|token| Spanned(token, span));
// println!("[PEEK] {:#?}", peek);
peek.ok_or(ParserError::UnexpectedEOF)
}
pub fn current(&self) -> Spanned<String> {
Spanned(self.lexer.slice().to_owned(), self.span())
}
pub fn span(&self) -> Span {
Span(self.lexer.span())
}
}
pub(crate) struct Parser<'a> {
tokens: TokenIterator<'a>,
}
impl<'a> Parser<'a> {
pub fn new(src: &'a str) -> Self {
Self {
tokens: TokenIterator::new(src),
}
}
fn eat(&mut self) {
let _ = self.tokens.next();
}
fn get_real(
&mut self,
matcher: impl Fn(&Token) -> bool,
expected: &'static str,
) -> Result<Spanned<Token>, ParserError> {
if matcher(self.tokens.peek()?.0) {
self.tokens.next()
} else {
Err(self.expected(expected))
}
}
fn expected(&self, expected: &'static str) -> ParserError {
ParserError::Unexpected(expected.to_owned(), self.tokens.current())
}
fn semi(&mut self) -> Result<Span, ParserError> {
Ok(self
.get_real(|token| matches!(token, Token::Semicolon), "a semicolon")?
.1)
}
fn ask_ident(&mut self) -> Result<Spanned<String>, ParserError> {
Ok(
match self.get_real(
|token| {
matches!(
token,
Token::Ident(Ident::Other(_) | Ident::Underscore) | Token::NumberSuffix(_)
)
},
"an identifier",
)? {
Spanned(Token::Ident(Ident::Other(ident)), span) => Spanned(ident, span),
Spanned(Token::Ident(Ident::Underscore), span) => Spanned("_".to_owned(), span),
Spanned(Token::NumberSuffix(suffix), span) => Spanned(suffix.to_string(), span),
_ => unreachable!(),
},
)
}
fn ask_alias(&mut self) -> Result<Spanned<ItemAlias>, ParserError> {
let Spanned(_, kSp) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Alias)),
"`Alias`",
)?;
let Spanned(name, nSp) = self.ask_ident()?;
let Spanned(_, eqSp) = self.get_real(|token| matches!(token, Token::Equals), "`=`")?;
let Spanned(referree, rSp) = self.ask_type()?;
Ok(Spanned::new(
ItemAlias { name, referree },
[kSp, nSp, eqSp, rSp, self.semi()?],
))
}
fn ask_constant(&mut self) -> Result<Spanned<ItemConstant>, ParserError> {
let Spanned(_, kSp) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Constant)),
"`Constant`",
)?;
let Spanned(name, nSp) = self.ask_ident()?;
let Spanned(_, eqSp) = self.get_real(|token| matches!(token, Token::Equals), "`=`")?;
let Spanned(expr, exprSp) = self.ask_expr()?;
Ok(Spanned::new(
ItemConstant { name, expr },
[kSp, nSp, eqSp, exprSp, self.semi()?],
))
}
fn ask_item(&mut self) -> Result<Spanned<Item>, ParserError> {
Ok(match self.tokens.peek()?.0 {
Token::Ident(Ident::Other(_)) => {
Err(self.expected("a keyword, not just an identifier"))?
}
Token::Ident(keyword) => match keyword {
Ident::Interface => self.ask_interface()?.map(Item::Interface),
Ident::Structure => self.ask_structure()?.map(Item::Structure),
Ident::Alias => self.ask_alias()?.map(Item::Alias),
Ident::Constant => self.ask_constant()?.map(Item::Constant),
Ident::Function => self.ask_function()?.map(Item::Function),
Ident::Enumeration => self.ask_enumeration()?.map(Item::Enumeration),
_ => Err(self.expected("an item denoting keyword (Interface, Structure, Alias, Constant, Function, Enumeration)"))?,
},
_ => Err(self.expected("a keyword"))?,
})
}
fn ask_use(&mut self) -> Result<Spanned<UseDecl>, ParserError> {
let Spanned(_, span) = {
match self.tokens.peek()? {
Spanned(Token::Ident(Ident::Use), _) => Ok(self.tokens.next()?),
_ => Err(ParserError::PleaseStopParsingUse),
}
}?;
let mut path = (self.ask_ident()?.0, None);
if let Token::Dot = self.tokens.peek()?.0 {
self.eat();
path.1 = Some(self.ask_ident()?.0);
}
Ok(Spanned::new(UseDecl { path }, [span, self.semi()?]))
}
pub fn parse(mut self) -> Result<IDLModule, ParserError> {
Ok(IDLModule {
name: {
self.get_real(
|token| matches!(token, Token::Ident(Ident::Module)),
"the `Module` keyword",
)?;
let name = self.ask_ident()?.0;
self.semi()?;
name
},
uses: {
let mut real = vec![];
loop {
let r = self.ask_use();
match r {
Ok(Spanned(a, _)) => real.push(a),
Err(ParserError::UnexpectedEOF) => return Err(ParserError::UnexpectedEOF),
Err(ParserError::PleaseStopParsingUse) => break,
Err(unexpected @ ParserError::Unexpected(..)) => return Err(unexpected),
}
}
Ok(real)
}?,
items: fill_while(|| self.ask_item())?,
})
}
}
fn fill_while<T>(
mut f: impl FnMut() -> Result<Spanned<T>, ParserError>,
) -> Result<Vec<T>, ParserError> {
let mut real = vec![];
loop {
match f() {
Ok(Spanned(next, _)) => real.push(next),
Err(unexpected @ ParserError::Unexpected(..)) => return Err(unexpected),
Err(ParserError::UnexpectedEOF) => break,
Err(ParserError::PleaseStopParsingUse) => unreachable!(),
}
}
Ok(real)
}
pub fn parse(source: &str) -> Result<IDLModule, ParserError> {
Parser::new(source).parse()
}
#[derive(thiserror::Error, Debug)]
pub enum ParserError {
// expected, got
#[error("Unexpected `{_1}`, expected {_0}")]
Unexpected(String, Spanned<String>),
#[error("Unexpected end of file")]
UnexpectedEOF,
#[error("please stop.")]
PleaseStopParsingUse,
}

View file

@ -1,58 +0,0 @@
use std::collections::HashMap;
use crate::{
ast::{ItemStructure, Type},
lexer::{Ident, Spanned, Token},
};
use super::{Parser, ParserError};
impl<'a> Parser<'a> {
pub fn ask_structure(&mut self) -> Result<Spanned<ItemStructure>, ParserError> {
let Spanned(_, span) = self.get_real(
|token| matches!(token, Token::Ident(Ident::Structure)),
"the `Structure` keyword",
)?;
let Spanned(Type { name, arguments }, _) = self.ask_type()?;
let Spanned(_, _) = self.get_real(
|token| matches!(token, Token::LeftCurly),
"an opening curly brace (`{`)",
)?;
let mut fields = HashMap::<String, Type>::new();
loop {
match self.tokens.peek()?.0 {
Token::Ident(_) => {
let Spanned(ident, _) = self.ask_ident().unwrap();
self.get_real(|token| matches!(token, Token::Colon), "a colon")?;
let Spanned(value, _) = self.ask_type()?;
fields.insert(ident, value);
match self.tokens.peek()?.0 {
Token::Comma => {
self.eat();
}
Token::RightCurly => {}
_ => return Err(self.expected("a comma or closing curly braces")),
};
}
Token::RightCurly => break,
_ => return Err(self.expected("an identifier or a closing curly brace (`}`)")),
}
}
if let Spanned(Token::RightCurly, end) = self.tokens.next()? {
return Ok(Spanned(
ItemStructure {
name,
fields,
arguments,
},
span + end,
));
};
Err(self.expected("closing curly braces"))
}
}

View file

@ -1,66 +0,0 @@
use crate::{
ast::{Type, TypeArguments, INFER_TYPE},
lexer::{Spanned, Token},
};
use super::{Parser, ParserError};
impl<'a> Parser<'a> {
pub fn ask_type(&mut self) -> Result<Spanned<Type>, ParserError> {
if let Spanned(Token::NumberLiteral(_), _) = self.tokens.peek()? {
let Spanned(number, span) = self._ask_number_literal()?;
return Ok(Spanned(
Type {
name: number.to_string(),
arguments: TypeArguments::None,
},
span,
));
};
let Spanned(name, span) = self.ask_ident()?;
if name == INFER_TYPE {
return Ok(Spanned(Type::infer(), span));
}
let mut arguments = TypeArguments::None;
if let Spanned(crate::lexer::Token::LeftArrow, _) = self.tokens.peek()? {
self.eat(); // eat `<`
let mut args = vec![];
args.push(Box::new(self.ask_type()?.0));
match self.tokens.peek()?.0 {
Token::Comma => self.eat(),
Token::RightArrow => {}
_ => return Err(self.expected("a comma or closing angle brackets")),
};
loop {
match self.tokens.peek()? {
Spanned(Token::Ident(_) | Token::NumberLiteral(_), _) => {
args.push(Box::new(self.ask_type()?.0));
match self.tokens.peek()?.0 {
Token::Comma => self.eat(),
Token::RightArrow => {}
_ => return Err(self.expected("a comma or closing angle brackets")),
}
}
Spanned(Token::RightArrow, _) => {
self.eat();
break;
}
_ => return Err(self.expected("closing angle brackets or a type name")),
}
}
arguments = TypeArguments::AngleBracketed(args);
};
Ok(Spanned(Type { name, arguments }, span + self.tokens.span()))
}
}

View file

@ -1,8 +0,0 @@
# aidl status
## parser
[x] `Use module.thing`
[x] `Alias Thing = OtherThing`
## codegen
TODO!

0
programs/std_test/run.sh Executable file → Normal file
View file

View file

@ -20,37 +20,42 @@ resolution = "1024x768x24"
core = "https://git.ablecorp.us/AbleOS/core"
userspace = "https://git.ablecorp.us/AbleOS/ableos_userland"
[packages]
[packages.list_files]
[apps]
[apps.list_files]
version = "0.1.1"
hash = ""
repo = "userspace"
authors = []
[packages.list_files.configuration]
[users]
[users.able]
home = "/home/able/"
password_hash = "abc123"
[users.able.repositories]
able_repo = "https://git.ablecorp.us/able/ableos_packages"
[users.able.packages]
[users.able.packages.ablecraft]
able_repo = "https://git.ablecorp.us/able/ableos_apps"
[users.able.apps]
[users.able.apps.able_editor]
version = "1.0"
[users.able.apps.able_editor.plugins.rustfmt]
version = "1.0"
hash = ""
repo = "default"
[users.able.apps.able_editor.plugins.repositories]
default = ["https://github.com/able_editor/plugins"]
[users.able.apps.ablecraft]
version = "0.1.1"
hash = ""
repo = "able_repo"
[users.able.packages.ablecraft.configuration]
[users.able.packages.list_files.configuration]
# [users.able.apps.ablecraft.configuration]
[users.able.apps.list_files.configuration]
use_color = true
[users.able.packages.list_files.permissions]
[users.able.apps.list_files.permissions]
file_paths = ["/"]
[users.chad]
home = "/chad/"
password_hash = "abc123"
[users.chad.repositories]
[users.chad.packages]

View file

@ -1,20 +1,25 @@
use std::{
use {
std::{
env,
error::Error,
fmt::format,
fs::{self, File},
io::{Read, Write},
path,
},
toml::{self, Table, Value},
};
use toml::Table;
use toml::{self, Value};
fn main() -> Result<(), Box<dyn Error>> {
let mut args: Vec<String> = env::args().collect();
// #[cfg(target_os="linux")]
if args.len() == 1 {
println!("Pass a file");
}
args.remove(0);
let file_path = &args[0];
println!("{}", file_path);
println!("configuration path - {}", file_path);
let mut file = File::open(file_path).expect("Unable to open the file");
let mut file = File::open(file_path).unwrap(); //.expect("Unable to open the file");
let mut contents = String::new();
file.read_to_string(&mut contents)
@ -28,7 +33,8 @@ fn main() -> Result<(), Box<dyn Error>> {
// HANDLE repolist
{
let repolist = value.get("repositories").unwrap();
let ret = make_repolist("repolist.toml".to_string(), repolist);
fs::create_dir("disk/System")?;
let ret = make_repolist("System/repolist.toml".to_string(), repolist);
println!("making repolist {:?}", ret);
}
@ -55,7 +61,8 @@ fn main() -> Result<(), Box<dyn Error>> {
for user in users_table.keys() {
let ut = users_table.get(user).unwrap();
let home_path = ut.get("home").unwrap();
// let home_path = ut.get("home").unwrap();
let home_path = format!("/Users/{}", user);
let pass_hash = ut.get("password_hash").unwrap();
passhash_list.push((user.to_string(), pass_hash.to_string()));
@ -63,27 +70,32 @@ fn main() -> Result<(), Box<dyn Error>> {
let ret = make_user(home_path.to_string());
println!("making user return {:?}", ret);
// Handle homepath generation of USER
{
let mut hp = home_path.clone().to_string();
hp.remove(0);
hp.remove(0);
hp.remove(hp.len() - 1);
hp.remove(hp.len() - 1);
for package in ut.get("packages").unwrap().as_table().unwrap().keys() {
let pack_folder: String = format!("disk/{}/{}", hp, package);
let pack_config: String = format!("disk/{}/{}/config.toml", hp, package);
fs::create_dir(pack_folder)?;
{
let hp = home_path.clone().to_string();
let apps = ut.get("apps");
fs::create_dir_all(format!("disk/{}/Apps/", hp))?;
println!("ok");
if apps.is_some() {
let apps = apps.unwrap();
for app in apps.as_table().unwrap().keys() {
let pack_folder: String = format!("disk/{}/Apps/{}", hp, app);
let pack_config: String = format!("disk/{}/Apps/{}/config.toml", hp, app);
fs::create_dir_all(pack_folder)?;
let mut file = File::create(pack_config)?;
// repo_list_str.as_bytes()
let abc = ut
.get("packages")
.unwrap()
.get(package)
.unwrap()
.get("configuration")
.unwrap();
let mut abc = abc.to_string();
let abc = apps.get(app).unwrap().get("configuration");
let tab = match abc {
Some(val) => val.clone(),
None => {
let tab = Table::new();
Value::Table(tab)
}
};
// .unwrap_or(&Value::Table(Table::new()));
let mut abc = tab.to_string();
if abc.len() > 2 {
abc.remove(0);
abc.remove(0);
@ -98,26 +110,33 @@ fn main() -> Result<(), Box<dyn Error>> {
file.write_all(abc.as_bytes())?;
}
}
{
let repolist = ut.get("repositories").unwrap();
let mut hp = home_path.clone().to_string();
hp.remove(0);
hp.remove(0);
hp.remove(hp.len() - 1);
hp.remove(hp.len() - 1);
}
let repolist = ut.get("repositories");
let hp = home_path.clone().to_string();
let user_path_repolist = format!("{}/repolist.toml", hp.to_string());
println!("{}", user_path_repolist);
match repolist {
Some(repolist) => {
let ret = make_repolist(user_path_repolist, repolist);
println!("making repolist {:?}", ret);
}
None => {
let repolist = Table::new();
let ret = make_repolist(user_path_repolist, &Value::Table(repolist));
println!("making repolist {:?}", ret);
}
}
}
let ret = generate_password_hashlist(passhash_list);
println!("making password hashlist {:?}", ret);
}
// TODO: Handle system wide things like repolist and Apps/
{}
// let mut file = File::create("disk/foo.txt")?;
// file.write_all(b"Hello, world!")?;
@ -192,10 +211,7 @@ TERM_BACKDROP={}
Ok(())
}
pub fn make_user(mut home_path: String) -> std::io::Result<()> {
home_path.remove(0);
home_path.remove(home_path.len() - 1);
pub fn make_user(home_path: String) -> std::io::Result<()> {
let path = format!("disk/{}", home_path);
fs::create_dir_all(path)?;
Ok(())
@ -203,6 +219,7 @@ pub fn make_user(mut home_path: String) -> std::io::Result<()> {
pub fn make_repolist(path: String, repolist: &Value) -> std::io::Result<()> {
let path = format!("disk/{}", path);
let mut file = File::create(path)?;
let mut repo_list_str = String::new();
for (repo_name, repo_url) in repolist.as_table().unwrap() {

1
rust-toolchain Normal file
View file

@ -0,0 +1 @@
nightly

View file

@ -1,3 +0,0 @@
[toolchain]
channel = "nightly"
components = ["cargo", "clippy", "rustfmt", "rust-analyzer"]

3
rustfmt.toml Normal file
View file

@ -0,0 +1,3 @@
hex_literal_case = "Upper"
imports_granularity = "One"
struct_field_align_threshold = 5

31
shell.nix Normal file
View file

@ -0,0 +1,31 @@
{ pkgs ? import <nixpkgs> { } }:
pkgs.mkShell rec {
buildInputs = with pkgs; [ clang llvmPackages.bintools rustup ];
RUSTC_VERSION = pkgs.lib.readFile ./rust-toolchain;
# https://github.com/rust-lang/rust-bindgen#environment-variables
LIBCLANG_PATH =
pkgs.lib.makeLibraryPath [ pkgs.llvmPackages_latest.libclang.lib ];
shellHook = ''
export PATH=$PATH:''${CARGO_HOME:-~/.cargo}/bin
export PATH=$PATH:''${RUSTUP_HOME:-~/.rustup}/toolchains/$RUSTC_VERSION-x86_64-unknown-linux-gnu/bin/
'';
# Add precompiled library to rustc search path
RUSTFLAGS = (builtins.map (a: "-L ${a}/lib") [
# add libraries here (e.g. pkgs.libvmi)
]);
# Add glibc, clang, glib and other headers to bindgen search path
BINDGEN_EXTRA_CLANG_ARGS =
# Includes with normal include path
(builtins.map (a: ''-I"${a}/include"'') [
# add dev libraries here (e.g. pkgs.libvmi.dev)
pkgs.glibc.dev
])
# Includes with special directory paths
++ [
''
-I"${pkgs.llvmPackages_latest.libclang.lib}/lib/clang/${pkgs.llvmPackages_latest.libclang.version}/include"''
''-I"${pkgs.glib.dev}/include/glib-2.0"''
"-I${pkgs.glib.out}/lib/glib-2.0/include/"
];
}