Compare commits

..

7 commits

Author SHA1 Message Date
griffi-gh 7108afa662 remove unused 2024-05-04 00:07:11 +02:00
griffi-gh 11ad2cdc77 preheat chunks on server 2024-05-04 00:05:16 +02:00
griffi-gh dac3c10aee fix mesh not updating 2024-05-03 23:43:01 +02:00
griffi-gh e9000af878 handle unsub requests 2024-05-03 23:39:37 +02:00
griffi-gh 49753ecc4c refactor state transitions 2024-05-03 23:29:52 +02:00
griffi-gh 94fa5268fa remove outdated info from readme 2024-05-03 20:27:39 +02:00
griffi-gh e3692bca94 update 2024-05-03 20:18:45 +02:00
9 changed files with 278 additions and 153 deletions

9
.gitignore vendored
View file

@ -18,3 +18,12 @@ _visualizer.json
/*_log*.txt /*_log*.txt
/*.log /*.log
# make sure build artifacts and binaries are not committed
*.d
*.pdb
*.exe
*.dll
*.so
*.dylib
*.rlib

View file

@ -11,23 +11,22 @@
<p> <p>
<ul> <ul>
<li>multithreaded procedural world generation</li> <li>multithreaded procedural world generation</li>
<li>procedural structures</li> <li>procedural structures and block queue</li>
<li>multithreaded mesh generation</li> <li>multithreaded mesh generation</li>
<li>cubic chunks (32x32x32)</li> <li>cubic chunks (32x32x32)</li>
<li>low-level OpenGL renderer, targetting OpenGL ES 3.0</li> <li>low-level OpenGL renderer, targetting OpenGL ES 3.0</li>
<li>frustum culling</li> <li>frustum culling</li>
<li>multiplayer support <sup><code>[1]</code></sup></li> <li>multiplayer networking</li>
<li>block placement system</li> <li>immediate ui system <sup><code>[1]</code></sup></li>
<li>basic gui<sup><code>[5]</code></sup></li> <li>cross platform: windows, linux, osx, android <sup><code>[2]</code></sup></li>
<li>cross platform: windows, linux, osx <sup><code>[2]</code></sup>, android <sup><code>[3]</code></sup></li> <li>universal input system: supports keyboard, mouse, gamepad and touch input <sup><code>[3]</code></sup></li>
<li>universal input system: supports keyboard, mouse, gamepad and touch input <sup><code>[4]</code></sup></li> <li>support for semi-transparet blocks<sup><code>[4]</code></sup></li>
</ul> </ul>
<h6> <h6>
<code>[1]</code> - multiplayer is work-in-progress<br> <code>[1]</code> - developed out-of-tree <a href="https://github.com/griffi-gh/hUI">here</a> since 2024<br>
<code>[2]</code> - not tested on macos<br> <code>[2]</code> - android support is experimental<br>
<code>[3]</code> - android support is experimental<br> <code>[3]</code> - mouse and gamepad input is not supported on android<br>
<code>[4]</code> - mouse/gamepad input is not supported on android<br> <code>[4]</code> - work in progress, may cause issues<br>
<code>[5]</code> - currently only used on the loading screen
</h6> </h6>
</p> </p>
@ -36,24 +35,17 @@
<h2>build for windows/linux</h2> <h2>build for windows/linux</h2>
**build/run**
```bash ```bash
cargo build --bin kubi cargo build -p kubi
cargo run --bin kubi cargo run -p kubi
``` #or, build with release mode optimizations:
cargo run -p kubi --release
**build in release mode, with nightly optimizations**
```bash
cargo +nightly build --bin kubi --features nightly --release
``` ```
<h2>build for android</h2> <h2>build for android</h2>
please note that android support is highly experimental!\ please note that android support is highly experimental!\
gamepad, mouse input is currently borked, and proper touch controls are not available.\ gamepad, mouse input is currently borked, as well as srgb, which leads to dark textures.
srgb and blending are broken too, which leads to many rendering issues
prerequisites: Android SDK, NDK, command line tools, platform-tools, latest JDK\ prerequisites: Android SDK, NDK, command line tools, platform-tools, latest JDK\
(make sure that your `PATH`, `ANDROID_HOME` and `ANDROID_NDK_ROOT` variables are configured properly) (make sure that your `PATH`, `ANDROID_HOME` and `ANDROID_NDK_ROOT` variables are configured properly)
@ -95,7 +87,7 @@ cargo apk run -p kubi --lib --no-default-features
<h2>mutiplayer</h2> <h2>mutiplayer</h2>
to join a multiplayer server, just pass the ip address as an argument to join a multiplayer server, just pass the ip address as the first argument
```sh ```sh
cargo run -p kubi -- 127.0.0.1:1234 cargo run -p kubi -- 127.0.0.1:1234
@ -119,7 +111,7 @@ name = "Kubi Server" # server name
<h2>"In-house" libraries</h2> <h2>"In-house" libraries</h2>
- [`hui`, `hui-glium`, `hui-winit`](https://github.com/griffi-gh/hui): semi-imm.mode backend-agnostic ui system\ - [`hui`, `hui-glium`, `hui-winit`](https://github.com/griffi-gh/hui): semi-imm.mode backend-agnostic ui system\
- [`kubi-logging`](kubi-logging) fancy-ass custom formatter for `env-logger` - [`kubi-logging`](kubi-logging) fancy custom formatter for `env-logger`
deprecated: deprecated:

View file

@ -5,6 +5,7 @@ timeout_ms = 10000
[world] [world]
seed = 0xfeb_face_dead_cafe seed = 0xfeb_face_dead_cafe
preheat_radius = 8
[query] [query]
name = "Kubi Server" name = "Kubi Server"

View file

@ -13,6 +13,7 @@ pub struct ConfigTableServer {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct ConfigTableWorld { pub struct ConfigTableWorld {
pub seed: u64, pub seed: u64,
pub preheat_radius: u32,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]

View file

@ -1,4 +1,4 @@
use shipyard::{World, Workload, IntoWorkload}; use shipyard::{IntoWorkload, Workload, WorkloadModificator, World};
use std::{thread, time::Duration}; use std::{thread, time::Duration};
mod util; mod util;
@ -19,7 +19,7 @@ fn initialize() -> Workload {
read_config, read_config,
bind_server, bind_server,
init_client_maps, init_client_maps,
init_world, init_world.after_all(read_config),
).into_workload() ).into_workload()
} }

View file

@ -1,4 +1,4 @@
use shipyard::{Unique, UniqueView, UniqueViewMut, Workload, IntoWorkload, AllStoragesView, View, Get, NonSendSync, IntoIter}; use shipyard::{AllStoragesView, Get, IntoIter, IntoWorkload, NonSendSync, SystemModificator, Unique, UniqueView, UniqueViewMut, View, Workload};
use glam::IVec3; use glam::IVec3;
use hashbrown::HashMap; use hashbrown::HashMap;
use kubi_shared::{ use kubi_shared::{
@ -13,7 +13,7 @@ use kubi_shared::{
use uflow::{server::RemoteClient, SendMode}; use uflow::{server::RemoteClient, SendMode};
use lz4_flex::compress_prepend_size as lz4_compress; use lz4_flex::compress_prepend_size as lz4_compress;
use anyhow::Result; use anyhow::Result;
use std::{rc::Rc, cell::RefCell}; use std::{cell::RefCell, rc::Rc};
use kubi_shared::networking::client::ClientIdMap; use kubi_shared::networking::client::ClientIdMap;
use crate::{ use crate::{
server::{UdpServer, ServerEvents}, server::{UdpServer, ServerEvents},
@ -64,7 +64,7 @@ pub fn send_chunk_compressed(
let ser_message = ser_message.into_boxed_slice(); let ser_message = ser_message.into_boxed_slice();
client.borrow_mut().send( client.borrow_mut().send(
ser_message, ser_message,
Channel::World as usize, Channel::WorldData as usize,
SendMode::Reliable SendMode::Reliable
); );
Ok(()) Ok(())
@ -168,6 +168,32 @@ fn process_finished_tasks(
} }
} }
fn process_chunk_unsubscribe_events(
server: NonSendSync<UniqueView<UdpServer>>,
events: UniqueView<ServerEvents>,
mut chunk_manager: UniqueViewMut<ChunkManager>,
addr_map: UniqueView<ClientAddressMap>,
clients: View<Client>
) {
for event in &events.0 {
let Some(message) = check_message_auth
::<{ClientToServerMessageType::ChunkUnsubscribe as u8}>
(&server, event, &clients, &addr_map) else { continue };
let ClientToServerMessage::ChunkUnsubscribe { chunk: chunk_position } = message.message else {
unreachable!()
};
let Some(chunk) = chunk_manager.chunks.get_mut(&chunk_position) else {
log::warn!("tried to unsubscribe from non-existent chunk");
continue
};
chunk.subscriptions.remove(&message.client_id);
//TODO unload chunk if no more subscribers
}
}
fn process_block_queue_messages( fn process_block_queue_messages(
server: NonSendSync<UniqueView<UdpServer>>, server: NonSendSync<UniqueView<UdpServer>>,
events: UniqueView<ServerEvents>, events: UniqueView<ServerEvents>,
@ -239,10 +265,33 @@ fn init_chunk_manager_and_block_queue(
storages.add_unique(LocalBlockQueue::default()); storages.add_unique(LocalBlockQueue::default());
} }
pub fn preheat_world(
mut chunk_manager: UniqueViewMut<ChunkManager>,
task_manager: UniqueView<ChunkTaskManager>,
config: UniqueView<ConfigTable>,
) {
let r = config.world.preheat_radius as i32;
for x in -r..=r {
for y in -r..=r {
for z in -r..=r {
let chunk_position = IVec3::new(x, y, z);
let mut chunk = Chunk::new();
chunk.state = ChunkState::Loading;
chunk_manager.chunks.insert(chunk_position, chunk);
task_manager.spawn_task(ChunkTask::LoadChunk {
position: chunk_position,
seed: config.world.seed,
});
}
}
}
}
pub fn init_world() -> Workload { pub fn init_world() -> Workload {
( (
init_chunk_manager_and_block_queue, init_chunk_manager_and_block_queue.before_all(preheat_world),
init_chunk_task_manager, init_chunk_task_manager.before_all(preheat_world),
preheat_world,
).into_workload() ).into_workload()
} }
@ -251,6 +300,7 @@ pub fn update_world() -> Workload {
process_finished_tasks, process_finished_tasks,
process_block_queue_messages, process_block_queue_messages,
process_block_queue, process_block_queue,
process_chunk_unsubscribe_events,
process_chunk_requests, process_chunk_requests,
).into_sequential_workload() ).into_sequential_workload()
} }

View file

@ -41,9 +41,8 @@ impl ChunkTaskManager {
self.pool.spawn(move || { self.pool.spawn(move || {
sender.send(match task { sender.send(match task {
ChunkTask::LoadChunk { position: chunk_position, seed } => { ChunkTask::LoadChunk { position: chunk_position, seed } => {
let Some((blocks, queue)) = generate_world(chunk_position, seed, None) else { //unwrap is fine because abort is not possible
return let (blocks, queue) = generate_world(chunk_position, seed, None).unwrap();
};
ChunkTaskResponse::ChunkLoaded { chunk_position, blocks, queue } ChunkTaskResponse::ChunkLoaded { chunk_position, blocks, queue }
} }
}).unwrap() }).unwrap()

View file

@ -1,9 +1,17 @@
#[repr(u8)] #[repr(u8)]
pub enum Channel { pub enum Channel {
#[deprecated]
Generic = 0, Generic = 0,
/// Used during the initial handshake process
Auth = 1, Auth = 1,
World = 2, /// Used for sending chunk data from server to client
WorldData = 2,
/// Used for sending/receiving block place events
Block = 3, Block = 3,
/// Used for sending/receiving player movements
Move = 4, Move = 4,
/// Used for system events, like players joining or leaving
SysEvt = 5, SysEvt = 5,
/// Used for subscribing and unsubscribing from chunks
SubReq = 6,
} }

View file

@ -2,7 +2,7 @@ use std::sync::Arc;
use atomic::{Atomic, Ordering}; use atomic::{Atomic, Ordering};
use glam::{IVec3, ivec3}; use glam::{IVec3, ivec3};
use glium::{VertexBuffer, IndexBuffer, index::PrimitiveType}; use glium::{VertexBuffer, IndexBuffer, index::PrimitiveType};
use kubi_shared::{networking::messages::ClientToServerMessage, worldgen::AbortState}; use kubi_shared::{networking::{channels::Channel, messages::ClientToServerMessage}, worldgen::AbortState};
use shipyard::{View, UniqueView, UniqueViewMut, IntoIter, Workload, IntoWorkload, NonSendSync, track}; use shipyard::{View, UniqueView, UniqueViewMut, IntoIter, Workload, IntoWorkload, NonSendSync, track};
use uflow::SendMode; use uflow::SendMode;
use crate::{ use crate::{
@ -26,8 +26,7 @@ const MAX_CHUNK_OPS: usize = 32;
pub fn update_loaded_world_around_player() -> Workload { pub fn update_loaded_world_around_player() -> Workload {
( (
update_chunks_if_player_moved, update_chunks_if_player_moved,
unload_downgrade_chunks, process_state_changes,
start_required_tasks,
process_completed_tasks, process_completed_tasks,
).into_sequential_workload() ).into_sequential_workload()
} }
@ -91,64 +90,88 @@ pub fn update_chunks_if_player_moved(
} }
} }
fn unload_downgrade_chunks( fn process_state_changes(
mut vm_world: UniqueViewMut<ChunkStorage>,
mut vm_meshes: NonSendSync<UniqueViewMut<ChunkMeshStorage>>
) {
if !vm_world.is_modified() {
return
}
//TODO refactor this
//TODO unsubscibe if in multiplayer
vm_world.chunks.retain(|_, chunk| {
if chunk.desired_state == DesiredChunkState::Unloaded {
if let Some(mesh_index) = chunk.mesh_index {
vm_meshes.remove(mesh_index).unwrap();
}
if let Some(abortion) = &chunk.abortion {
let _ = abortion.compare_exchange(
AbortState::Continue, AbortState::Abort,
Ordering::Relaxed, Ordering::Relaxed
);
}
false
} else {
match chunk.desired_state {
DesiredChunkState::Nothing if matches!(chunk.current_state, CurrentChunkState::Loading) => {
if let Some(abortion) = &chunk.abortion {
let _ = abortion.compare_exchange(
AbortState::Continue, AbortState::Abort,
Ordering::Relaxed, Ordering::Relaxed
);
}
},
DesiredChunkState::Loaded if matches!(chunk.current_state, CurrentChunkState::Rendered | CurrentChunkState::CalculatingMesh | CurrentChunkState::RecalculatingMesh) => {
if let Some(mesh_index) = chunk.mesh_index {
vm_meshes.remove(mesh_index).unwrap();
}
chunk.mesh_index = None;
chunk.current_state = CurrentChunkState::Loaded;
},
_ => (),
}
true
}
})
}
fn start_required_tasks(
task_manager: UniqueView<ChunkTaskManager>, task_manager: UniqueView<ChunkTaskManager>,
mut udp_client: Option<UniqueViewMut<UdpClient>>, mut udp_client: Option<UniqueViewMut<UdpClient>>,
mut world: UniqueViewMut<ChunkStorage>, mut world: UniqueViewMut<ChunkStorage>,
mut vm_meshes: NonSendSync<UniqueViewMut<ChunkMeshStorage>>,
) { ) {
if !world.is_modified() { if !world.is_modified() {
return return
} }
//HACK: cant iterate over chunks.keys() or chunk directly! //HACK: cant iterate over chunks.keys() or chunk directly!
let hashmap_keys: Vec<IVec3> = world.chunks.keys().copied().collect(); let hashmap_keys: Vec<IVec3> = world.chunks.keys().copied().collect();
for position in hashmap_keys { for position in hashmap_keys {
let chunk = world.chunks.get(&position).unwrap(); let chunk = world.chunks.get_mut(&position).unwrap();
//If the chunk is being unloaded, it's essentially dead at this point and we shouldn't bother it
if chunk.current_state == CurrentChunkState::Unloading {
continue
}
// If the chunk is already in the desired state, skip it
// (except one annoying edge case where chunk is rendered but dirty, then we need to recalculate the mesh)
if chunk.desired_state.matches_current(chunk.current_state) &&
!(chunk.desired_state == DesiredChunkState::Rendered && chunk.mesh_dirty) {
continue
}
match chunk.desired_state { match chunk.desired_state {
// DesiredChunkState::Unloaded | DesiredChunkState::Nothing:
// Loading -> Nothing
DesiredChunkState::Unloaded | DesiredChunkState::Nothing if chunk.current_state == CurrentChunkState::Loading => {
if let Some(abortion) = &chunk.abortion {
let _ = abortion.compare_exchange(
AbortState::Continue, AbortState::Abort,
Ordering::Relaxed, Ordering::Relaxed
);
}
chunk.abortion = None;
chunk.current_state = CurrentChunkState::Nothing;
},
// DesiredChunkState::Unloaded | DesiredChunkState::Nothing:
// (Loaded, CalculatingMesh) -> Nothing
DesiredChunkState::Unloaded | DesiredChunkState::Nothing if matches!(
chunk.current_state,
CurrentChunkState::Loaded | CurrentChunkState::CalculatingMesh,
) => {
chunk.block_data = None;
chunk.current_state = CurrentChunkState::Nothing;
},
// DesiredChunkState::Unloaded | DesiredChunkState::Nothing:
// (Rendered | RecalculatingMesh) -> Nothing
DesiredChunkState::Unloaded | DesiredChunkState::Nothing if matches!(
chunk.current_state,
CurrentChunkState::Rendered | CurrentChunkState::RecalculatingMesh,
) => {
if let Some(mesh_index) = chunk.mesh_index {
vm_meshes.remove(mesh_index).unwrap();
}
chunk.mesh_index = None;
chunk.current_state = CurrentChunkState::Nothing;
},
// DesiredChunkState::Loaded:
// CalculatingMesh -> Loaded
DesiredChunkState::Loaded if chunk.current_state == CurrentChunkState::CalculatingMesh => {
chunk.current_state = CurrentChunkState::Loaded;
},
// DesiredChunkState::Unloaded | DesiredChunkState::Nothing | DesiredChunkState::Loaded:
// (Rendered | RecalculatingMesh) -> Loaded
DesiredChunkState::Unloaded | DesiredChunkState::Nothing | DesiredChunkState::Loaded if matches!(
chunk.current_state, CurrentChunkState::Rendered | CurrentChunkState::RecalculatingMesh
) => {
if let Some(mesh_index) = chunk.mesh_index {
vm_meshes.remove(mesh_index).unwrap();
}
chunk.mesh_index = None;
chunk.current_state = CurrentChunkState::Loaded;
},
// DesiredChunkState::Loaded | DesiredChunkState::Rendered:
// Nothing -> Loading
DesiredChunkState::Loaded | DesiredChunkState::Rendered if chunk.current_state == CurrentChunkState::Nothing => { DesiredChunkState::Loaded | DesiredChunkState::Rendered if chunk.current_state == CurrentChunkState::Nothing => {
let mut abortion = None; let mut abortion = None;
//start load task //start load task
@ -157,7 +180,7 @@ fn start_required_tasks(
postcard::to_allocvec(&ClientToServerMessage::ChunkSubRequest { postcard::to_allocvec(&ClientToServerMessage::ChunkSubRequest {
chunk: position, chunk: position,
}).unwrap().into_boxed_slice(), }).unwrap().into_boxed_slice(),
0, Channel::SubReq as usize,
SendMode::Reliable SendMode::Reliable
); );
} else { } else {
@ -176,6 +199,10 @@ fn start_required_tasks(
// =========== // ===========
//log::trace!("Started loading chunk {position}"); //log::trace!("Started loading chunk {position}");
}, },
// DesiredChunkState::Rendered:
// Loaded -> CalculatingMesh
// Rendered (dirty) -> RecalculatingMesh
DesiredChunkState::Rendered if (chunk.current_state == CurrentChunkState::Loaded || chunk.mesh_dirty) => { DesiredChunkState::Rendered if (chunk.current_state == CurrentChunkState::Loaded || chunk.mesh_dirty) => {
//get needed data //get needed data
let Some(neighbors) = world.neighbors_all(position) else { let Some(neighbors) = world.neighbors_all(position) else {
@ -198,9 +225,41 @@ fn start_required_tasks(
// =========== // ===========
//log::trace!("Started generating mesh for chunk {position}"); //log::trace!("Started generating mesh for chunk {position}");
} }
_ => ()
_ => {}, //panic!("Illegal state transition: {:?} -> {:?}", chunk.current_state, chunk.desired_state),
} }
} }
//Now, separately process state change the state from Nothing to Unloading or Unloaded
world.chunks.retain(|&position, chunk: &mut Chunk| {
if chunk.desired_state == DesiredChunkState::Unloaded {
assert!(chunk.current_state == CurrentChunkState::Nothing, "looks like chunk did not get properly downgraded to Nothing before unloading, this is a bug");
chunk.current_state = CurrentChunkState::Unloading;
//If in multiplayer, send a message to the server to unsubscribe from the chunk
if let Some(client) = &mut udp_client {
client.0.send(
postcard::to_allocvec(
&ClientToServerMessage::ChunkUnsubscribe { chunk: position }
).unwrap().into_boxed_slice(),
Channel::SubReq as usize,
SendMode::Reliable
);
// and i think that's it, just kill the chunk right away, the server will take care of the rest
//
// because uflow's reliable packets are ordered, there should be no need to wait for the server to confirm the unsubscription
// because client won't be able to subscribe to it again until the server finishes processing the unsubscription
// :ferrisClueless:
return false
}
//HACK, since save files are not implemented, just unload immediately
return false
}
true
});
} }
fn process_completed_tasks( fn process_completed_tasks(
@ -215,6 +274,10 @@ fn process_completed_tasks(
while let Some(res) = task_manager.receive() { while let Some(res) = task_manager.receive() {
match res { match res {
ChunkTaskResponse::LoadedChunk { position, chunk_data, mut queued } => { ChunkTaskResponse::LoadedChunk { position, chunk_data, mut queued } => {
//If unwanted chunk is already loaded
//It would be ~~...unethical~~ impossible to abort the operation at this point
//Instead, we'll just throw it away
//check if chunk exists //check if chunk exists
let Some(chunk) = world.chunks.get_mut(&position) else { let Some(chunk) = world.chunks.get_mut(&position) else {
//to compensate, actually push the ops counter back by one //to compensate, actually push the ops counter back by one
@ -222,6 +285,8 @@ fn process_completed_tasks(
continue continue
}; };
chunk.abortion = None;
//check if chunk still wants it //check if chunk still wants it
if !matches!(chunk.desired_state, DesiredChunkState::Loaded | DesiredChunkState::Rendered) { if !matches!(chunk.desired_state, DesiredChunkState::Loaded | DesiredChunkState::Rendered) {
log::warn!("block data discarded: state undesirable: {:?}", chunk.desired_state); log::warn!("block data discarded: state undesirable: {:?}", chunk.desired_state);