move away from the system that a layer stores it's parent index, but let a layer store their index used to access the child data. which makes everything much clearer and nicer to read and easier to develop on

Redone the whole implementation again on this and now use a trait for HashResult and VecResult which solves the .get() problem and .mat() problem
added benchmarks and (with repeated tests) we gain 14ns for 1 hash lookup and 2 veclookups and the result vec lookup
This commit is contained in:
Marcel Märtens 2019-10-14 15:22:35 +02:00
parent 1605fe202e
commit 7039d5a29c
12 changed files with 632 additions and 562 deletions

1
Cargo.lock generated
View File

@ -6648,6 +6648,7 @@ version = "0.1.0"
dependencies = [
"bincode",
"dot_vox",
"fxhash",
"log",
"mio 0.6.23",
"mio-extras",

View File

@ -19,3 +19,4 @@ serde_derive = "1.0"
bincode = "1.0"
log = "0.4"
rand = "0.5"
fxhash = "0.2"

View File

@ -1,5 +1,5 @@
use super::index::{
LodIndex,
use super::lodpos::{
LodPos,
};
/*
@ -8,19 +8,19 @@ use super::index::{
#[derive(PartialEq, Eq, Clone, Copy, Hash, Debug)]
pub struct LodArea {
pub lower: LodIndex,
pub upper: LodIndex,
pub lower: LodPos,
pub upper: LodPos,
}
impl LodArea {
pub fn new(lower: LodIndex, upper: LodIndex) -> Self {
pub fn new(lower: LodPos, upper: LodPos) -> Self {
LodArea {
lower,
upper,
}
}
pub fn is_inside(&self, lod: LodIndex) -> bool {
pub fn is_inside(&self, lod: LodPos) -> bool {
let lower = self.lower.get();
let upper = self.upper.get();
let lod = lod.get();

View File

@ -1,9 +1,9 @@
use std::u32;
use std::collections::HashMap;
use vek::*;
use super::index::{
use super::lodpos::{
self,
LodIndex,
LodPos,
AbsIndex,
relative_to_1d,
two_pow_u,
@ -15,6 +15,8 @@ use super::delta::{
LodDelta,
};
pub type LodIndex = LodPos;
/*
LOD Data contains different Entries in different Vecs, every entry has a "pointer" to it's child start.
This is the structure to store a region and all subscribed information
@ -228,7 +230,7 @@ impl<X: LodConfig> LodData<X>
// uses parent_child_index as a buffer, to not calculate it again
fn int_get(parent_abs: AbsIndex, child_lod: LodIndex, parent_lod: LodIndex, parent_child_index: usize) -> AbsIndex {
let child_layer = X::child_layer_id[parent_abs.layer as usize].unwrap();
let child_lod = child_lod.align_to_layer_id(child_layer);
let child_lod = child_lod.align_to_level(child_layer);
let child_offset = relative_to_1d(child_lod, parent_lod, child_layer, X::layer_volume[child_layer as usize]);
//println!("{} int_get - parent_abs {} child_lod {} parent_lod {} parent_child_index {} child_offset {}", Self::debug_offset(parent_abs.layer), parent_abs, child_lod, parent_lod, parent_child_index, child_offset);
AbsIndex::new(child_layer, parent_child_index + child_offset)
@ -236,7 +238,7 @@ impl<X: LodConfig> LodData<X>
// slower variant of int_get which requiere self lookups
fn int_get_lockup(&self, parent_abs: AbsIndex, child_lod: LodIndex) -> AbsIndex {
let parent_lod = child_lod.align_to_layer_id(parent_abs.layer);
let parent_lod = child_lod.align_to_level(parent_abs.layer);
let parent_child_index = self.int_get_child_index(parent_abs);
Self::int_get(parent_abs, child_lod, parent_lod, parent_child_index)
}
@ -255,7 +257,7 @@ impl<X: LodConfig> LodData<X>
}
pub fn int_get_n(&self, index: LodIndex, layer: u8) -> AbsIndex {
let anchor_lod = index.align_to_layer_id(X::anchor_layer_id);
let anchor_lod = index.align_to_level(X::anchor_layer_id);
let anchor_abs = AbsIndex::new(X::anchor_layer_id, self.anchor[&anchor_lod]);
let wanted_abs = self.int_recursive_get(anchor_abs, index, layer);
debug_assert_eq!(wanted_abs.layer, layer);
@ -272,10 +274,10 @@ impl<X: LodConfig> LodData<X>
parent_abs = self.int_get_lockup(parent_abs, child_lod);
if parent_abs.layer <= target_layer {
let parent_width = two_pow_u(old_parent_abs.layer ) as u32;
let parent_lod = child_lod.align_to_layer_id(old_parent_abs.layer);
let parent_lod = child_lod.align_to_level(old_parent_abs.layer);
//TODO: Dont recalc the first 3 values
cache.last_parent_area = LodArea::new(parent_lod, parent_lod + LodIndex::new(Vec3::new(parent_width,parent_width,parent_width)));
cache.last_parent_lod = child_lod.align_to_layer_id(old_parent_abs.layer);
cache.last_parent_lod = child_lod.align_to_level(old_parent_abs.layer);
cache.last_parent_child_index = self.int_get_child_index(old_parent_abs);
cache.last_parent_abs = old_parent_abs;
return parent_abs;
@ -292,7 +294,7 @@ impl<X: LodConfig> LodData<X>
//println!("nay");
//println!("{} {}", cache.last_parent_area.lower, cache.last_parent_area.upper);
//println!("{}", index);
let anchor_lod = index.align_to_layer_id(X::anchor_layer_id);
let anchor_lod = index.align_to_level(X::anchor_layer_id);
let anchor_abs = AbsIndex::new(X::anchor_layer_id, self.anchor[&anchor_lod]);
self.int_recursive_get_cached(cache, anchor_abs, index, layer)
};
@ -472,7 +474,7 @@ impl<X: LodConfig> LodData<X>
fn int_make_at_least(&mut self, parent: AbsIndex, /*parent_lod2: LodIndex,*/ area: LodArea, target_layer: u8, delta: &Option<&mut LodDelta<X>>) {
let child_layer = X::child_layer_id[parent.layer as usize];
let parent_lod_width = two_pow_u(parent.layer) as u32;
let parent_lod = area.lower.align_to_layer_id(parent.layer);
let parent_lod = area.lower.align_to_level(parent.layer);
//assert_eq!(parent_lod, parent_lod2);
//println!("{} lower, upper {} {} {} - {:?}", Self::debug_offset(parent.layer), area.lower, area.upper, parent_lod_width, child_layer);
//let delta = delta.unwrap();
@ -497,8 +499,8 @@ impl<X: LodConfig> LodData<X>
let child_layer = child_layer.unwrap();
let child_lod_width = two_pow_u(child_layer) as u32;
//calc childs which needs to be called recusivly, there childs will be the new parents
let child_lower = area.lower.align_to_layer_id(child_layer);
let child_upper = area.upper.align_to_layer_id(child_layer);
let child_lower = area.lower.align_to_level(child_layer);
let child_upper = area.upper.align_to_level(child_layer);
let child_base_abs_index = self.int_get_child_index(parent);
let child_volume = X::layer_volume[child_layer as usize];
// loop over childs and calculate correct lower and
@ -517,8 +519,8 @@ impl<X: LodConfig> LodData<X>
let child_lower = parent_lod + LodIndex::new(Vec3::new(x * child_lod_width, y * child_lod_width, z * child_lod_width));
let child_upper = child_lower + LodIndex::new(Vec3::new(child_lod_width-1, child_lod_width-1, child_lod_width-1));
let inner_lower = index::max(area.lower, child_lower);
let inner_upper = index::min(area.upper, child_upper);
let inner_lower = lodpos::max(area.lower, child_lower);
let inner_upper = lodpos::min(area.upper, child_upper);
//println!("{} restrict {} {} to {} {}", Self::debug_offset(parent.layer), area.lower, area.upper, inner_lower, inner_upper);
let inner_area = LodArea::new(inner_lower, inner_upper);
Self::int_make_at_least(self, child_abs, inner_area, target_layer, delta);
@ -536,11 +538,11 @@ impl<X: LodConfig> LodData<X>
pub fn make_at_least(&mut self, area: LodArea, target_layer: u8, delta: Option<&mut LodDelta<X>>) {
let anchor_layer_id = X::anchor_layer_id;
let anchor_lower = area.lower.align_to_layer_id(anchor_layer_id);
let anchor_upper = area.upper.align_to_layer_id(anchor_layer_id);
let anchor_lower = area.lower.align_to_level(anchor_layer_id);
let anchor_upper = area.upper.align_to_level(anchor_layer_id);
let lower_xyz = anchor_lower.get();
let upper_xyz = anchor_upper.get();
let anchor_width = index::two_pow_u(anchor_layer_id) as u32;
let anchor_width = lodpos::two_pow_u(anchor_layer_id) as u32;
let mut x = lower_xyz[0];
//println!("{} xxx lower, upper {} {} {}", Self::debug_offset(anchor_layer_id), lower_xyz, upper_xyz, anchor_width);
while x <= upper_xyz[0] {
@ -553,8 +555,8 @@ impl<X: LodConfig> LodData<X>
if anchor_abs.layer > target_layer {
let child_lod_upper = Self::get_last_child_lod(anchor_lod, anchor_abs.layer);
let inner_lower = index::max(area.lower, anchor_lod);
let inner_upper = index::min(area.upper, child_lod_upper);
let inner_lower = lodpos::max(area.lower, anchor_lod);
let inner_upper = lodpos::min(area.upper, child_lod_upper);
//println!("{}call child with lower, upper {} {} instead of {} {} ", Self::debug_offset(anchor_layer_id), inner_lower, inner_upper, anchor_lod, child_lod_upper);
let inner_area = LodArea::new(inner_lower, inner_upper);

View File

@ -3,10 +3,12 @@ use super::{
LodData,
LodConfig,
},
index::LodIndex,
lodpos::LodPos,
area::LodArea,
};
pub type LodIndex = LodPos;
/*
A LodDelta applies a change to a Lod
The rules for LodDeltas are strict in order to make them as simple as possible.

View File

@ -3,256 +3,27 @@ use std::ops::Sub;
use std::ops::Add;
use std::cmp;
use std::fmt;
use std::{u16, u32};
/*
A region owns the Values from in (0, 2048) in steps of 1/32.
But because regions can also subscribe we add support to the range (0, 2048*3).
which is 13 bits for the digits before the decimal point and 5 bits for the digits after the decimal point.
We use our own LodIndex type to store and compute based on these values, because u16 arithmetic (inside the owned area) is super easy to archive and allows us to optimize a lot.
-- lower neighbor
0 -> 0
65535 -> 2047 31/32
-- owned
65536 -> 2048
131071 -> 4095 31/32
-- upper neighbor
196607 -> 6143 31/32
*/
#[derive(PartialEq, Eq, Clone, Copy, Hash, Debug)]
pub struct LodIndex {
/*
bit 0..17 -> x
bit 18..35 -> y
bit 36..53 -> z
bit 54..63 -> unused
*/
data: u64,
pub trait ToOptionUsize: Copy {
fn is_some(self) -> bool;
fn into_usize(self) -> usize;
}
/*does not work on big endian!*/
const BIT_X_MASK: u64 = 0b0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0011_1111_1111_1111_1111;
const BIT_Y_MASK: u64 = 0b0000_0000_0000_0000_0000_0000_0000_1111_1111_1111_1111_1100_0000_0000_0000_0000;
const BIT_Z_MASK: u64 = 0b0000_0000_0011_1111_1111_1111_1111_0000_0000_0000_0000_0000_0000_0000_0000_0000;
const BIT_X_MASK32: u32 = 0b0000_0000_0000_0011_1111_1111_1111_1111;
//TODO: Optimize!
impl LodIndex {
pub fn new(data: Vec3<u32>) -> Self {
let mut index = LodIndex {data: 0};
index.set(data);
index
impl ToOptionUsize for u32 {
fn is_some(self) -> bool {
self != u32::MAX
}
pub fn get(&self) -> Vec3<u32> {
let x = (self.data & BIT_X_MASK) as u32;
let y = ((self.data & BIT_Y_MASK) >> 18 ) as u32;
let z = ((self.data & BIT_Z_MASK) >> 36 ) as u32;
Vec3{x,y,z}
}
pub fn set(&mut self, data: Vec3<u32>) {
let x = (data.x & BIT_X_MASK32) as u64;
let y = ((data.y & BIT_X_MASK32) as u64 ) << 18;
let z = ((data.z & BIT_X_MASK32) as u64 ) << 36;
self.data = x + y + z;
}
pub fn align_to_layer_id(&self, layer: u8) -> LodIndex {
let xyz = self.get();
let f = two_pow_u(layer) as u32;
LodIndex::new(xyz.map(|i| {
(i / f) * f
}))
}
pub fn get_highest_layer_that_fits(&self) -> u8 {
let pos = self.get();
cmp::min( cmp::min(cmp::min(pos[0].trailing_zeros(),
pos[1].trailing_zeros()), pos[2].trailing_zeros()), 15) as u8
fn into_usize(self) -> usize {
self as usize
}
}
impl fmt::Display for LodIndex {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let xyz = self.get();
//write!(f, "({}|{}|{}) <{}>", xyz[0], xyz[1], xyz[2], self.data)
write!(f, "({}|{}|{})", xyz[0], xyz[1], xyz[2])
impl ToOptionUsize for u16 {
fn is_some(self) -> bool {
self != u16::MAX
}
}
#[derive(PartialEq, Eq, Clone, Copy, Hash, Debug)]
pub struct AbsIndex {
pub layer: u8,
pub index: usize,
}
impl AbsIndex {
pub fn new(layer: u8, index: usize) -> Self {
AbsIndex {
layer,
index,
}
fn into_usize(self) -> usize {
self as usize
}
}
impl fmt::Display for AbsIndex {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[{}:{}]", self.layer, self.index)
}
}
impl Sub for LodIndex {
type Output = LodIndex;
fn sub(self, rhs: LodIndex) -> Self::Output {
LodIndex {
data: self.data - rhs.data /*fast but has overflow issues*/
}
}
}
impl Add for LodIndex {
type Output = LodIndex;
fn add(self, rhs: LodIndex) -> Self::Output {
LodIndex {
data: self.data + rhs.data /*fast but has overflow issues*/
}
}
}
pub const fn two_pow_u(n: u8) -> u16 {
1 << n
}
pub fn relative_to_1d(child_lod: LodIndex, parent_lod: LodIndex, child_layer: u8, relative_size: Vec3<u32>) -> usize {
let width = two_pow_u(child_layer) as u32;
let index = (child_lod.get() - parent_lod.get()).map(|e| e / width);
(index[0] * relative_size[2] * relative_size[1] + index[1] * relative_size[2] + index[2]) as usize
}
pub fn min(lhs: LodIndex, rhs: LodIndex) -> LodIndex {
let lhs = lhs.get();
let rhs = rhs.get();
LodIndex::new(lhs.map2(rhs, |a,b| cmp::min(a,b)))
}
pub fn max(lhs: LodIndex, rhs: LodIndex) -> LodIndex {
let lhs = lhs.get();
let rhs = rhs.get();
LodIndex::new(lhs.map2(rhs, |a,b| cmp::max(a,b)))
}
/*************
TESTS
**************/
#[cfg(test)]
mod tests {
use crate::{
lodstore::index::LodIndex,
};
use vek::*;
#[test]
fn setter_getter() {
let i = LodIndex::new(Vec3::new(0,0,0));
assert_eq!(i.get(), Vec3::new(0,0,0));
let i = LodIndex::new(Vec3::new(1337,0,0));
assert_eq!(i.get(), Vec3::new(1337,0,0));
let i = LodIndex::new(Vec3::new(0,1337,0));
assert_eq!(i.get(), Vec3::new(0,1337,0));
let i = LodIndex::new(Vec3::new(0,0,1337));
assert_eq!(i.get(), Vec3::new(0,0,1337));
let i = LodIndex::new(Vec3::new(1,1,1));
assert_eq!(i.get(), Vec3::new(1,1,1));
let i = LodIndex::new(Vec3::new(262143,262143,262143));
assert_eq!(i.get(), Vec3::new(262143,262143,262143));
let i = LodIndex::new(Vec3::new(262144,262144,262144)); //overflow
assert_eq!(i.get(), Vec3::new(0,0,0));
let i = LodIndex::new(Vec3::new(42,1337,69));
assert_eq!(i.get(), Vec3::new(42,1337,69));
}
#[test]
fn align() {
let i = LodIndex::new(Vec3::new(1337,0,0)).align_to_layer_id(4);
assert_eq!(i.get(), Vec3::new(1328,0,0));
let i = LodIndex::new(Vec3::new(1337,1800,0)).align_to_layer_id(5);
assert_eq!(i.get(), Vec3::new(1312,1792,0));
let i = LodIndex::new(Vec3::new(1337,0,50)).align_to_layer_id(3);
assert_eq!(i.get(), Vec3::new(1336,0,48));
let i = LodIndex::new(Vec3::new(1335,0,0)).align_to_layer_id(3);
assert_eq!(i.get(), Vec3::new(1328,0,0));
let i = LodIndex::new(Vec3::new(31337,22000,25000)).align_to_layer_id(7);
assert_eq!(i.get(), Vec3::new(31232,21888,24960));
let i = LodIndex::new(Vec3::new(31337,22000,25000)).align_to_layer_id(0);
assert_eq!(i.get(), Vec3::new(31337,22000,25000));
let i = LodIndex::new(Vec3::new(0,0,0)).align_to_layer_id(4);
assert_eq!(i.get(), Vec3::new(0,0,0));
}
#[test]
fn get_highest_layer_that_fits() {
let i = LodIndex::new(Vec3::new(0,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 15);
let i = LodIndex::new(Vec3::new(1,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 0);
let i = LodIndex::new(Vec3::new(2,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 1);
let i = LodIndex::new(Vec3::new(3,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 0);
let i = LodIndex::new(Vec3::new(4,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 2);
let i = LodIndex::new(Vec3::new(5,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 0);
let i = LodIndex::new(Vec3::new(1337,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 0);
let i = LodIndex::new(Vec3::new(1337,1800,0));
assert_eq!(i.get_highest_layer_that_fits(), 0);
let i = LodIndex::new(Vec3::new(1338,0,50));
assert_eq!(i.get_highest_layer_that_fits(), 1);
let i = LodIndex::new(Vec3::new(1336,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 3);
let i = LodIndex::new(Vec3::new(31348,22000,25000));
assert_eq!(i.get_highest_layer_that_fits(), 2);
let i = LodIndex::new(Vec3::new(0,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 15);
let i = LodIndex::new(Vec3::new(65536,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 15);
let i = LodIndex::new(Vec3::new(32768,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 15);
let i = LodIndex::new(Vec3::new(16384,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 14);
let i = LodIndex::new(Vec3::new(8192,0,0));
assert_eq!(i.get_highest_layer_that_fits(), 13);
let i = LodIndex::new(Vec3::new(65536,0,8192));
assert_eq!(i.get_highest_layer_that_fits(), 13);
}
}
}

View File

@ -0,0 +1,292 @@
use vek::*;
use std::ops::Sub;
use std::ops::Add;
use std::cmp;
use std::fmt;
/*
A region owns the Values from in (0, 2048) in steps of 1/32.
But because regions can also subscribe we add support to the range (0, 2048*3).
which is 13 bits for the digits before the decimal point and 5 bits for the digits after the decimal point.
We use our own LodPos type to store and compute based on these values, because u16 arithmetic (inside the owned area) is super easy to archive and allows us to optimize a lot.
-- lower neighbor
0 -> 0
65535 -> 2047 31/32
-- owned
65536 -> 2048
131071 -> 4095 31/32
-- upper neighbor
196607 -> 6143 31/32
*/
#[derive(PartialEq, Eq, Clone, Copy, Hash, Debug)]
pub struct LodPos {
/*
bit 0..17 -> x
bit 18..35 -> y
bit 36..53 -> z
bit 54..63 -> unused
*/
data: u64,
}
/*does not work on big endian!*/
const BIT_X_MASK: u64 = 0b0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0011_1111_1111_1111_1111;
const BIT_Y_MASK: u64 = 0b0000_0000_0000_0000_0000_0000_0000_1111_1111_1111_1111_1100_0000_0000_0000_0000;
const BIT_Z_MASK: u64 = 0b0000_0000_0011_1111_1111_1111_1111_0000_0000_0000_0000_0000_0000_0000_0000_0000;
const BIT_X_MASK32: u32 = 0b0000_0000_0000_0011_1111_1111_1111_1111;
const BIT_Y_OFFSET: u8 = 18;
const BIT_Z_OFFSET: u8 = 36;
//TODO: Optimize!
impl LodPos {
pub fn new(data: Vec3<u32>) -> Self {
let mut index = LodPos {data: 0};
index.set(data);
index
}
pub fn xyz(x: u32, y: u32, z: u32) -> Self {
LodPos {data: Self::encode(&x, &y, &z)}
}
pub fn get(&self) -> Vec3<u32> {
Vec3::from(Self::decode(&self.data))
}
pub fn set(&mut self, data: Vec3<u32>) {
self.data = Self::encode(&data.x, &data.y, &data.z);
}
fn encode(x: &u32, y: &u32, z: &u32) -> u64 {
let x = (x & BIT_X_MASK32) as u64;
let y = ((y & BIT_X_MASK32) as u64 ) << BIT_Y_OFFSET;
let z = ((z & BIT_X_MASK32) as u64 ) << BIT_Z_OFFSET;
x + y + z
}
fn decode(data: &u64) -> (u32, u32, u32) {
let x = (data & BIT_X_MASK) as u32;
let y = ((data & BIT_Y_MASK) >> BIT_Y_OFFSET ) as u32;
let z = ((data & BIT_Z_MASK) >> BIT_Z_OFFSET ) as u32;
(x,y,z)
}
pub fn align_to_level(&self, layer: u8) -> LodPos {
let xyz = self.get();
let f = two_pow_u(layer) as u32;
LodPos::new(xyz.map(|i| {
(i / f) * f
}))
}
pub fn get_highest_level_that_fits(&self) -> u8 {
let pos = self.get();
cmp::min( cmp::min(cmp::min(pos[0].trailing_zeros(),
pos[1].trailing_zeros()), pos[2].trailing_zeros()), 15) as u8
}
}
impl fmt::Display for LodPos {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let xyz = self.get();
//write!(f, "({}|{}|{}) <{}>", xyz[0], xyz[1], xyz[2], self.data)
write!(f, "({}|{}|{})", xyz[0], xyz[1], xyz[2])
}
}
#[derive(PartialEq, Eq, Clone, Copy, Hash, Debug)]
pub struct AbsIndex {
pub layer: u8,
pub index: usize,
}
impl AbsIndex {
pub fn new(layer: u8, index: usize) -> Self {
AbsIndex {
layer,
index,
}
}
}
impl fmt::Display for AbsIndex {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[{}:{}]", self.layer, self.index)
}
}
impl Sub for LodPos {
type Output = LodPos;
fn sub(self, rhs: LodPos) -> Self::Output {
LodPos {
data: self.data - rhs.data /*fast but has overflow issues*/
}
}
}
impl Add for LodPos {
type Output = LodPos;
fn add(self, rhs: LodPos) -> Self::Output {
LodPos {
data: self.data + rhs.data /*fast but has overflow issues*/
}
}
}
pub const fn two_pow_u(n: u8) -> u16 {
1 << n
}
pub const fn two_pow_u32(n: u8) -> u32 {
1 << n
}
pub const fn multily_with_2_pow_n(a: usize, n: u8) -> usize { //return a * 2^n but fast
a << n
}
pub fn relative_to_1d(child_lod: LodPos, parent_lod: LodPos, child_layer: u8, relative_size: Vec3<u32>) -> usize {
let width = two_pow_u32(child_layer) as u32;
let index = (child_lod.get() - parent_lod.get()).map(|e| e / width);
(index[0] * relative_size[2] * relative_size[1] + index[1] * relative_size[2] + index[2]) as usize
}
pub fn min(lhs: LodPos, rhs: LodPos) -> LodPos {
let lhs = lhs.get();
let rhs = rhs.get();
LodPos::new(lhs.map2(rhs, |a,b| cmp::min(a,b)))
}
pub fn max(lhs: LodPos, rhs: LodPos) -> LodPos {
let lhs = lhs.get();
let rhs = rhs.get();
LodPos::new(lhs.map2(rhs, |a,b| cmp::max(a,b)))
}
/*************
TESTS
**************/
#[cfg(test)]
mod tests {
use crate::{
lodstore::lodpos::LodPos,
lodstore::lodpos::two_pow_u32,
};
use vek::*;
use test::Bencher;
#[test]
fn setter_getter() {
let i = LodPos::xyz(0,0,0);
assert_eq!(i.get(), Vec3::new(0,0,0));
let i = LodPos::xyz(1337,0,0);
assert_eq!(i.get(), Vec3::new(1337,0,0));
let i = LodPos::xyz(0,1337,0);
assert_eq!(i.get(), Vec3::new(0,1337,0));
let i = LodPos::xyz(0,0,1337);
assert_eq!(i.get(), Vec3::new(0,0,1337));
let i = LodPos::xyz(1,1,1);
assert_eq!(i.get(), Vec3::new(1,1,1));
let i = LodPos::xyz(262143,262143,262143);
assert_eq!(i.get(), Vec3::new(262143,262143,262143));
let i = LodPos::xyz(262144,262144,262144); //overflow
assert_eq!(i.get(), Vec3::new(0,0,0));
let i = LodPos::xyz(42,1337,69);
assert_eq!(i.get(), Vec3::new(42,1337,69));
}
#[test]
fn align() {
let i = LodPos::xyz(1337,0,0).align_to_level(4);
assert_eq!(i.get(), Vec3::new(1328,0,0));
let i = LodPos::xyz(1337,1800,0).align_to_level(5);
assert_eq!(i.get(), Vec3::new(1312,1792,0));
let i = LodPos::xyz(1337,0,50).align_to_level(3);
assert_eq!(i.get(), Vec3::new(1336,0,48));
let i = LodPos::xyz(1335,0,0).align_to_level(3);
assert_eq!(i.get(), Vec3::new(1328,0,0));
let i = LodPos::xyz(31337,22000,25000).align_to_level(7);
assert_eq!(i.get(), Vec3::new(31232,21888,24960));
let i = LodPos::xyz(31337,22000,25000).align_to_level(0);
assert_eq!(i.get(), Vec3::new(31337,22000,25000));
let i = LodPos::xyz(0,0,0).align_to_level(4);
assert_eq!(i.get(), Vec3::new(0,0,0));
}
#[test]
fn get_highest_level_that_fits() {
let i = LodPos::xyz(0,0,0);
assert_eq!(i.get_highest_level_that_fits(), 15);
let i = LodPos::xyz(1,0,0);
assert_eq!(i.get_highest_level_that_fits(), 0);
let i = LodPos::xyz(2,0,0);
assert_eq!(i.get_highest_level_that_fits(), 1);
let i = LodPos::xyz(3,0,0);
assert_eq!(i.get_highest_level_that_fits(), 0);
let i = LodPos::xyz(4,0,0);
assert_eq!(i.get_highest_level_that_fits(), 2);
let i = LodPos::xyz(5,0,0);
assert_eq!(i.get_highest_level_that_fits(), 0);
let i = LodPos::xyz(1337,0,0);
assert_eq!(i.get_highest_level_that_fits(), 0);
let i = LodPos::xyz(1337,1800,0);
assert_eq!(i.get_highest_level_that_fits(), 0);
let i = LodPos::xyz(1338,0,50);
assert_eq!(i.get_highest_level_that_fits(), 1);
let i = LodPos::xyz(1336,0,0);
assert_eq!(i.get_highest_level_that_fits(), 3);
let i = LodPos::xyz(31348,22000,25000);
assert_eq!(i.get_highest_level_that_fits(), 2);
let i = LodPos::xyz(0,0,0);
assert_eq!(i.get_highest_level_that_fits(), 15);
let i = LodPos::xyz(65536,0,0);
assert_eq!(i.get_highest_level_that_fits(), 15);
let i = LodPos::xyz(32768,0,0);
assert_eq!(i.get_highest_level_that_fits(), 15);
let i = LodPos::xyz(16384,0,0);
assert_eq!(i.get_highest_level_that_fits(), 14);
let i = LodPos::xyz(8192,0,0);
assert_eq!(i.get_highest_level_that_fits(), 13);
let i = LodPos::xyz(65536,0,8192);
assert_eq!(i.get_highest_level_that_fits(), 13);
}
#[bench]
fn bench_access_two_pow(b: &mut Bencher) {
b.iter(|| two_pow_u32(6));
}
#[bench]
fn bench_access_align(b: &mut Bencher) {
let access = LodPos::xyz(0, 0, 0);
b.iter(|| access.align_to_level(6));
}
}

View File

@ -1,5 +1,6 @@
pub mod index;
pub mod lodpos;
pub mod area;
pub mod index;
pub mod newdata;
pub mod data;
pub mod delta;

View File

@ -1,10 +1,12 @@
use super::area::LodArea;
use super::delta::LodDelta;
use super::index::{self, relative_to_1d, two_pow_u, AbsIndex, LodIndex};
use super::lodpos::{self, multily_with_2_pow_n, relative_to_1d, two_pow_u32, AbsIndex, LodPos};
use super::index::ToOptionUsize;
use fxhash::FxHashMap;
use std::collections::HashMap;
use std::u32;
use std::{u16, u32};
use vek::*;
use std::marker::PhantomData;
/*
Terminology:
- Layer: the layer of the LoDTree, a tree can have n layers, every layer contains their child layer, except for the last one.
@ -15,61 +17,65 @@ use std::marker::PhantomData;
- Index: This refers to the actually storage for the index for the next layer (often a u16,u32).
The Index is used to find the child in a spare storage.
- Key: always refers to the storage of a LAYER. Any keyword with KEY is either of type usize or LodPos.
- Prefix P always means parent, Prexix C always child, no prefix means for this layer.
traits:
- IndexStore: Every layer must implement this for either KEY = usize or KEY = LodPos and INDEX is often u16/u32. depending on the store of the parent detail.
It is accessed by parent layer to store the index when a detail is added or removed.
Every IndexStore has a parent, and a constant OWN_PER_PARENT which says, how many details fit into one element of the parent.
- DetailStore: Every layer must implement this for either KEY = usize or KEY = LodPos, independent from the parent.
This is used to store the actual detail of every layer.
- Nestable: All layers, except the lowest one implement this trait. It links the below layer to interact with the child layer.
!!Calculations will be implemented on these 3 Stores, rather than the actual structs to reduce duplciate coding!!
- Layer: Every layer must implement this. KEY is the storage Type and either usize/LodPos. Layer is also defined here.
- ParentLayer: Is a Layer that contains a CHILD layer and some const functions based on their const properties
- IndexStore: Every layer must implement this for their Layer::KEY and INDEX is often u16/u32.
The index is accessed by this layer to get the corresponding child.
Every Indexstore is a ParentLayer.
- DetailStore: Every layer must implement this for their KEY.
This is used to store the actual DETAIL of every layer.
!!Calculations will be implemented on these 2 Stores, rather than the actual structs to reduce duplciate coding!!
- ToOptionUsize: to store INDEX in z16/u32 efficiently and move up to usize on calculation
- Traversable: trait is used to get child layer and child Index for a concrete position.
- Materializeable: trait is used to actually return a Detail for a concrete position.
Actual structs regarding of position in the chain. They represent the Layers and contain the Details, they implement (some of) the 3 Store traits
Naming Scheme is <Own Detail Type><Parent Detail Type>[Nest]Layer
- VecVecLayer/VecHashLayer: Vec Leaf Layers that have a vec/hash index and dont have a child layer.
- VecVecNestLayer/VecHashNestLayer: Vec Layers that have a vec/hash index and are middle layers
- HashNoneNestLayer: Hash Layer that has no index and must be parent layer
Actual structs regarding of position in the chain. They represent the Layers and contain the Details, they implement (some of) the 2 Store traits
Naming Scheme is <Own Detail Type>[Nest]Layer
- VecLayer: KEY=usize, stores in Vec, leaf layer
- HashLayer:KEY=LodPos, stores in Vec, leaf layer
- VecNestLayer: KEY=usize, stores in Vec, has childs
- HashNestLayer: KEY=LodPos, stores in Vec, has childs
Result Structs:
- LayerResult: Is used to access a layer meta information or Detail via LoDTree.traverse().get().get().get().mat().
When LoDTree.traverse() returns a LayerResult.
- HashIter/VecIter: Is used to access a layer meta information or Detail via LoDTree.trav().get().get().get().mat().
When LoDTree.trav() returns a HashIter.
It keeps information to next layer to not recalculate it
*/
pub type LodPos = LodIndex;
pub trait Index {}
pub trait Key {
fn from_index<T: Index>(index: T) -> Self;
fn to_index<T: Index>(self) -> T;
}
pub trait IndexStore {
fn load(&self, pos: LodPos) -> usize;
}
pub trait DetailStore {
type KEY: Key;
type DETAIL;
pub trait Layer {
type KEY;
const LEVEL: u8;
DER RETURNWERT MUSS EIN EIGENER TYPE SEIN JE NACHDEM VEC ODER HASH
fn load(&self, pos: LodPos) -> usize;
}
pub trait Nestable: DetailStore {
type NESTED: IndexStore<KEY = <Self as DetailStore>::KEY> + DetailStore;
fn nested(&self) -> &Self::NESTED;
pub trait ParentLayer: Layer {
type CHILD: Layer;
fn child(&self) -> &Self::CHILD;
fn CHILDS_PER_OWN_TOTAL() -> usize {
two_pow_u32(Self::LOG2_OF_CHILDS_PER_OWN_TOTAL()) as usize
}
fn LOG2_OF_CHILDS_PER_OWN_TOTAL() -> u8 {
3 * ({ Self::LEVEL } - Self::CHILD::LEVEL)
}
fn CHILDS_PER_OWN() -> Vec3<u32> {
Vec3 {
x: two_pow_u32(Self::LEVEL - Self::CHILD::LEVEL) as u32,
y: two_pow_u32(Self::LEVEL - Self::CHILD::LEVEL) as u32,
z: two_pow_u32(Self::LEVEL - Self::CHILD::LEVEL) as u32,
}
}
}
//TODO: make LodTree trait and make traverse a function which returns a LayerResult to the TOP Layer (and not one layer below that), or call it iter, lets see
pub trait IndexStore: ParentLayer {
type INDEX: ToOptionUsize;
fn load(&self, key: Self::KEY) -> Self::INDEX;
}
pub trait DetailStore: Layer {
type DETAIL;
fn load(&self, key: Self::KEY) -> &Self::DETAIL;
}
pub trait Traversable<C> {
fn get(self) -> C;
@ -78,280 +84,269 @@ pub trait Materializeable<T> {
fn mat(self) -> T;
}
//CK is childs key of IndexStore, its needed for Traversable, but IndexStore cannot be a dependency, because of last node which acets materializeable but not Traversable
pub struct LayerResult<'a, N: DetailStore> {
child: &'a N,
wanted: LodPos,
key: N::KEY,
}
//#######################################################
#[derive(Default)]
pub struct VecLayer<T, const L: u8> {
pub detail: Vec<T>,
}
#[derive(Default)]
pub struct HashLayer<T, const L: u8> {
pub detail: HashMap<LodPos, T>,
pub detail: FxHashMap<LodPos, T>,
}
pub struct VecNestLayer<N: DetailStore, T, I: Copy, const L: u8> {
#[derive(Default)]
pub struct VecNestLayer<C: DetailStore, T, I: ToOptionUsize, const L: u8> {
pub detail: Vec<T>,
pub index: Vec<I>,
pub nested: N,
pub child: C,
}
#[derive(Default)]
pub struct HashNestLayer<C: DetailStore, T, I: ToOptionUsize, const L: u8> {
pub detail_index: FxHashMap<LodPos, (T, I)>,
pub child: C,
}
pub struct HashNestLayer<N: DetailStore, T, I: Copy, const L: u8> {
pub detail_index: HashMap<LodPos, (T, I)>,
pub nested: N,
pub struct HashIter<'a, C: DetailStore> {
layer: &'a C,
wanted: LodPos,
layer_lod: LodPos, //LodPos aligned to layer::LEVEL
}
pub struct VecIter<'a, C: DetailStore> {
layer: &'a C,
wanted: LodPos,
layer_lod: LodPos, //LodPos aligned to layer::LEVEL
layer_key: usize,
}
#[rustfmt::skip]
impl<N: DetailStore, T, I: Copy, const L: u8> IndexStore for VecNestLayer<N, T, I, { L }> {
fn load(&self, pos: LodPos) -> usize {
let adjusted_pos = pos;
let pos_offset = 0;
let childs_per_own = 8;
let u1 = ( self.index[adjusted_pos * childs_per_own ) as usize + pos_offset;
return u1;
}
impl<T, const L: u8> Layer for VecLayer<T, { L }> {
type KEY = ( usize ); const LEVEL: u8 = { L };
}
#[rustfmt::skip]
impl<N: DetailStore, T, I: Copy, const L: u8> IndexStore for HashNestLayer<N, T, I, { L }> {
fn load(&self, pos: LodPos) -> usize {
let adjusted_pos = pos;
let pos_offset = 0;
let childs_per_own = 8;
let u1 = ( self.index[adjusted_pos * childs_per_own ) as usize + pos_offset;
return u1;
}
impl<T, const L: u8> Layer for HashLayer<T, { L }> {
type KEY = ( LodPos ); const LEVEL: u8 = { L };
}
#[rustfmt::skip]
impl<T, PI: Copy, const L: u8> IndexStore for HashNestLayer<T, PI, { L }> {
type KEY = LodPos; type INDEX=PI; const OWN_PER_PARENT: usize = 1337;
fn load(&self, key: LodPos) -> PI { *self.index.get(&key).unwrap() }
fn store(&mut self, key: LodPos, index: PI) { self.index.insert(key, index); }
impl<C: DetailStore, T, I: ToOptionUsize, const L: u8> Layer for VecNestLayer<C, T, I, { L }> {
type KEY = ( usize ); const LEVEL: u8 = { L };
}
#[rustfmt::skip]
impl<C: DetailStore, T, I: ToOptionUsize, const L: u8> Layer for HashNestLayer<C, T, I, { L }> {
type KEY = ( LodPos ); const LEVEL: u8 = { L };
}
#[rustfmt::skip]
impl<N: IndexStore<KEY = usize> + DetailStore, T, PI: Copy, const L: u8> IndexStore for VecHashNestLayer<N, T, PI, { L }> {
type KEY = LodPos; type INDEX=PI; const OWN_PER_PARENT: usize = 4096;
fn load(&self, key: LodPos) -> PI { *self.index.get(&key).unwrap() }
fn store(&mut self, key: LodPos, index: PI) { self.index.insert(key, index); }
impl<C: DetailStore, T, I: ToOptionUsize, const L: u8> ParentLayer for VecNestLayer<C, T, I, { L }> {
type CHILD = C;
fn child(&self) -> &Self::CHILD { &self.child }
}
#[rustfmt::skip]
impl<C: DetailStore, T, I: ToOptionUsize, const L: u8> ParentLayer for HashNestLayer<C, T, I, { L }> {
type CHILD = C;
fn child(&self) -> &Self::CHILD { &self.child }
}
#[rustfmt::skip]
impl<T, PI: Copy, const L: u8> DetailStore for VecVecLayer<T, PI, { L }> {
type KEY = usize; type DETAIL=T; const LEVEL: u8 = { L };
fn load(&self, key: usize) -> &T { self.detail.get(key).unwrap() }
fn load_mut(&mut self, key: usize) -> &mut T { self.detail.get_mut(key).unwrap() }
fn store(&mut self, key: usize, detail: T) { self.detail.insert(key, detail); }
}
#[rustfmt::skip]
impl<N: IndexStore<KEY = usize> + DetailStore, T, PI: Copy, const L: u8> DetailStore for VecVecNestLayer<N, T, PI, { L }> {
type KEY = usize; type DETAIL=T; const LEVEL: u8 = { L };
fn load(&self, key: usize) -> &T { self.detail.get(key).unwrap() }
fn load_mut(&mut self, key: usize) -> &mut T { self.detail.get_mut(key).unwrap() }
fn store(&mut self, key: usize, detail: T) { self.detail.insert(key, detail); }
}
#[rustfmt::skip]
impl<T, PI: Copy, const L: u8> DetailStore for VecHashLayer<T, PI, { L }> {
type KEY = usize; type DETAIL=T; const LEVEL: u8 = { L };
fn load(&self, key: usize) -> &T { self.detail.get(key).unwrap() }
fn load_mut(&mut self, key: usize) -> &mut T { self.detail.get_mut(key).unwrap() }
fn store(&mut self, key: usize, detail: T) { self.detail.insert(key, detail); }
}
#[rustfmt::skip]
impl<N: IndexStore<KEY = usize> + DetailStore, T, PI: Copy, const L: u8> DetailStore for VecHashNestLayer<N, T, PI, { L }> {
type KEY = usize; type DETAIL=T; const LEVEL: u8 = { L };
fn load(&self, key: usize) -> &T { self.detail.get(key).unwrap() }
fn load_mut(&mut self, key: usize) -> &mut T { self.detail.get_mut(key).unwrap() }
fn store(&mut self, key: usize, detail: T) { self.detail.insert(key, detail); }
}
#[rustfmt::skip]
impl<N: IndexStore<KEY = LodPos> + DetailStore, T, const L: u8> DetailStore for HashNoneNestLayer<N, T, { L }> {
type KEY = LodPos; type DETAIL=T; const LEVEL: u8 = { L };
fn load(&self, key: LodPos) -> &T { self.detail.get(&key).unwrap() }
fn load_mut(&mut self, key: LodPos) -> &mut T { self.detail.get_mut(&key).unwrap() }
fn store(&mut self, key: LodPos, detail: T) { self.detail.insert(key, detail); }
}
#[rustfmt::skip]
impl<N: IndexStore<KEY = usize> + DetailStore, T, PI: Copy, const L: u8> Nestable for VecVecNestLayer<N, T, PI, { L }> {
type NESTED=N;
fn nested(&self) -> &N { &self.nested }
}
#[rustfmt::skip]
impl<N: IndexStore<KEY = usize> + DetailStore, T, PI: Copy, const L: u8> Nestable for VecHashNestLayer<N, T, PI, { L }> {
type NESTED=N;
fn nested(&self) -> &N { &self.nested }
}
#[rustfmt::skip]
impl<N: IndexStore<KEY = LodPos> + DetailStore, T, const L: u8> Nestable for HashNoneNestLayer<N, T, { L }> {
type NESTED=N;
fn nested(&self) -> &N { &self.nested }
}
//#######################################################
impl<NC: IndexStore<KEY = LodPos> + DetailStore, T, const L: u8> HashNoneNestLayer<NC, T, { L }>
{
pub fn trav<'a>(&'a self, pos: LodPos) -> LayerResult<'a, Self> {
LayerResult {
child: self,
impl<C: DetailStore, T, I: ToOptionUsize, const L: u8> HashNestLayer<C, T, I, { L }> {
fn trav(&self, pos: LodPos) -> HashIter<Self> {
HashIter {
layer: &self,
wanted: pos,
key: pos.align_to_layer_id(Self::LEVEL),
layer_lod: pos.align_to_level({ L }),
}
}
}
/*impl<'a, N: DetailStore + Nestable>
Traversable<LayerResult<'a, N::NESTED, <N::NESTED as IndexStore>::KEY>> for LayerResult<'a, N, <N::NESTED as IndexStore>::KEY>
where N::NESTED: IndexStore,
<N::NESTED as IndexStore>::KEY: Copy,
<N::NESTED as IndexStore>::KEY: KeyConvertable<KEY=<N::NESTED as IndexStore>::KEY, INDEX=<N::NESTED as IndexStore>::INDEX>
{
fn get(self) -> LayerResult<'a, N::NESTED, <N::NESTED as IndexStore>::KEY> {
println!("{}", N::LEVEL);
let child = self.child.nested();
let key = self.key;
//let index = self.index.align_to_layer_id(N::LEVEL);
LayerResult {
child,
wanted: self.wanted,
key: key.uncompress(&IndexStore::load(child, key)),
}
}
}*/
impl<'a, N: Nestable + DetailStore<KEY=Nestable::NESTED::KEY>>
Traversable<LayerResult<'a, N::NESTED>> for LayerResult<'a, N>
where N::NESTED: IndexStore,
<N::NESTED as IndexStore>::KEY: Copy,
{
fn get(self) -> LayerResult<'a, N::NESTED> {
println!("{}", N::LEVEL);
unimplemented!();
}
}
/*
impl<'a, N: Nestable>
Traversable<LayerResult<'a, N::NESTED>> for LayerResult<'a, N>
where N::NESTED: IndexStore,
<N::NESTED as IndexStore>::KEY: Copy,
<N as DetailStore>::KEY: KeyConvertable<KEY=<N::NESTED as DetailStore>::KEY, INDEX=<N::NESTED as IndexStore>::INDEX>,
{
fn get(self) -> LayerResult<'a, N::NESTED> {
println!("{}", N::LEVEL);
let child = self.child.nested();
let key = self.key;
//let index = self.index.align_to_layer_id(N::LEVEL);
LayerResult {
child,
wanted: self.wanted,
key: key.uncompress(&IndexStore::load(child, key)),
}
}
}
*/
impl<'a, N: IndexStore + DetailStore> Materializeable<N::DETAIL> for LayerResult<'a, N> {
fn mat(self) -> N::DETAIL {
unimplemented!();
}
}
#[rustfmt::skip]
pub type ExampleDelta =
impl<C: DetailStore, T, I: ToOptionUsize, const L: u8> IndexStore for VecNestLayer<C, T, I, { L }> {
type INDEX = I;
fn load(&self, key: Self::KEY) -> Self::INDEX { self.index[key] }
}
#[rustfmt::skip]
impl<C: DetailStore, T, I: ToOptionUsize, const L: u8> IndexStore for HashNestLayer<C, T, I, { L }> {
type INDEX = I;
fn load(&self, key: Self::KEY) -> Self::INDEX {
debug_assert_eq!(key, key.align_to_level({ L }));
self.detail_index[&key].1
}
}
#[rustfmt::skip]
impl<C: DetailStore, I: ToOptionUsize, T, const L: u8> DetailStore for VecNestLayer<C, T, I, { L }> {
type DETAIL = T;
fn load(&self, key: Self::KEY) -> &Self::DETAIL {
&self.detail[key]
}
}
#[rustfmt::skip]
impl<C: DetailStore, I: ToOptionUsize, T, const L: u8> DetailStore for HashNestLayer<C, T, I, { L }> {
type DETAIL = T;
fn load(&self, key: LodPos) -> &Self::DETAIL {
debug_assert_eq!(key, key.align_to_level({ L }));
&self.detail_index[&key].0
}
}
#[rustfmt::skip]
impl<T, const L: u8> DetailStore for VecLayer<T, { L }> {
type DETAIL = T;
fn load(&self, key: usize) -> &Self::DETAIL {
&self.detail[key]
}
}
#[rustfmt::skip]
impl<T, const L: u8> DetailStore for HashLayer<T, { L }> {
type DETAIL = T;
fn load(&self, key: LodPos) -> &Self::DETAIL {
debug_assert_eq!(key, key.align_to_level({ L }));
&self.detail[&key]
}
}
impl<'a, L: DetailStore<KEY = LodPos> + IndexStore> Traversable<VecIter<'a, L::CHILD>>
for HashIter<'a, L>
where
L::CHILD: DetailStore, {
fn get(self) -> VecIter<'a, L::CHILD> {
let child_lod = self.wanted.align_to_level(L::CHILD::LEVEL );
let pos_offset = relative_to_1d(child_lod, self.layer_lod, L::CHILD::LEVEL, L::CHILDS_PER_OWN());
let layer_key = ( multily_with_2_pow_n( IndexStore::load(self.layer, self.layer_lod).into_usize(), L::LOG2_OF_CHILDS_PER_OWN_TOTAL()) ) + pos_offset;
VecIter {
layer: self.layer.child(),
wanted: self.wanted,
layer_key,
layer_lod: child_lod,
}
}
}
impl<'a, L: DetailStore<KEY = usize> + IndexStore> Traversable<VecIter<'a, L::CHILD>>
for VecIter<'a, L>
where
L::CHILD: DetailStore, {
fn get(self) -> VecIter<'a, L::CHILD> {
let child_lod = self.wanted.align_to_level(L::CHILD::LEVEL );
let pos_offset = relative_to_1d(child_lod, self.layer_lod, L::CHILD::LEVEL, L::CHILDS_PER_OWN());
let layer_key = ( multily_with_2_pow_n( IndexStore::load(self.layer, self.layer_key).into_usize(), L::LOG2_OF_CHILDS_PER_OWN_TOTAL()) ) + pos_offset;
VecIter {
layer: self.layer.child(),
wanted: self.wanted,
layer_key,
layer_lod: child_lod,
}
}
}
impl<'a, L: DetailStore<KEY=LodPos>> Materializeable<&'a L::DETAIL> for HashIter<'a, L> {
fn mat(self) -> &'a L::DETAIL {
DetailStore::load(self.layer, self.layer_lod)
}
}
impl<'a, L: DetailStore<KEY=usize>> Materializeable<&'a L::DETAIL> for VecIter<'a, L> {
fn mat(self) -> &'a L::DETAIL {
DetailStore::load(self.layer, self.layer_key)
}
}
#[rustfmt::skip]
pub type ExampleData =
HashNestLayer<
VecNestLayer<
VecNestLayer<
VecLayer<
(), 0
> ,(), u16, 4
> ,Option<()> , u32, 9
> ,() ,u16. 13
i8, 0
> ,Option<()>, u16, 2
> ,() , u32, 3
> ,() ,u16, 4
>;
/*
UNS INTERESSIERT DOCH GARNICHT der speicher DES INDEXES, OB ES EIN LodPOS oder u8 Index ist, es interessiert mich was für eine
ART INDEX DAS IST!!!
DIRECT HASH LOOKUP?
NESTED VEC LOOKUP!
und unterschiedliche indexe (index trait) haben unterschliedlichen speicher undder verhält sich unterschiedlich !!!!!!!
A (data: Hashmap)
B (data: Vec, Index: Hashmap)
C (data: Vec, Index: Vec)
D (data: Vec, Index: Vec)
A.data = [1 entry]
B.data = [8 entries]
C.data = [64 entries]
D.data = [8 entries]
B.index = [0] <- every entry in A has 8 childs. there are 1 entry in A, it has ... 8 child. The first of his childs can be found at 0*8 in B.data
C.index = [0, 1, 2, 3, 4, 5, 6, 7] <- because created in order, these are in order to, they mark the positions i*8 = [0,8,16,24,32,4,48,56] in C.data
D.index = [/,/,/,/,0,/,/,/,<56 more />] <- the first 4 od C dont have childs, then 1 C has 8 childs starting at 0*8 in D.data, the last of the 59 are again empty
NEUE IDEE: IndexStore
DirectHashLookup {
store() -> <empt<>
load(LodPos) -> LodAbs
}
NestedVecLokup {
store(DIESE VERDAMTEN PARAMETER SIND NICHT GLEICH) -> <empt<>
load(LodPos) -> LodPos
}
Tree->iter() -> A()
A.mat() -> A.data[a.pos]
a.pos = B.index[LodPos]
A.get() -> B()
B.mat() -> B.data[b.pos]
b.pos = C.index[]
*/
#[cfg(test)]
mod tests {
use crate::lodstore::newdata::*;
use test::Bencher;
#[test]
fn newdata() {
let x = ExampleDelta {
detail: HashMap::new(),
nested: VecHashNestLayer {
detail: Vec::new(),
index: HashMap::new(),
nested: VecVecNestLayer {
detail: Vec::new(),
index: Vec::new(),
nested: VecVecLayer {
detail: Vec::new(),
index: Vec::new(),
fn gen_simple_example() -> ExampleData {
let mut detail_index = FxHashMap::default();
detail_index.insert(LodPos::xyz(0, 0, 0), ((), 0));
ExampleData {
detail_index,
child: VecNestLayer {
detail: vec!((),(),()),
index: vec!(0,1,u32::MAX),
child: VecNestLayer {
detail: vec!(None,None,None,Some(()),Some(()),None,None,None,None,None,None,None,None,None,None,None),
index: vec!(0,u16::MAX,u16::MAX,0,u16::MAX,u16::MAX,u16::MAX,u16::MAX,u16::MAX,u16::MAX,u16::MAX,u16::MAX,u16::MAX,u16::MAX,u16::MAX,u16::MAX),
child: VecLayer {
detail: vec!(7,6,5,4,3,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
},
},
},
};
let i = LodPos::new(Vec3::new(0, 1, 2));
let y = x.trav(i);
let ttc = y.get().get().get();
let tt = ttc.mat();
}
}
}
// TODO: instead of storing the absolute index in index, we store (index / number of entities), which means a u16 in Block can not only hold 2 full Subblocks (32^3 subblocks per block). but the full 2^16-1 ones.
#[test]
fn compilation() {
let x = ExampleData::default();
let i = LodPos::xyz(0, 1, 2);
if false {
let y = x.trav(i);
let ttc = y.get().get().get();
let tt = ttc.mat();
}
}
#[test]
fn access_first_element() {
let x = gen_simple_example();
let i = LodPos::xyz(0, 0, 0);
assert_eq!(*x.trav(i).get().get().get().mat(), 7_i8);
}
#[test]
fn access_simple_elements() {
let x = gen_simple_example();
assert_eq!(*x.trav(LodPos::xyz(0, 0, 0)).get().get().get().mat(), 7_i8);
assert_eq!(*x.trav(LodPos::xyz(0, 0, 1)).get().get().get().mat(), 6_i8);
assert_eq!(*x.trav(LodPos::xyz(0, 0, 2)).get().get().get().mat(), 5_i8);
assert_eq!(*x.trav(LodPos::xyz(0, 0, 3)).get().get().get().mat(), 4_i8);
assert_eq!(*x.trav(LodPos::xyz(0, 1, 0)).get().get().get().mat(), 3_i8);
assert_eq!(*x.trav(LodPos::xyz(0, 1, 1)).get().get().get().mat(), 2_i8);
assert_eq!(*x.trav(LodPos::xyz(0, 1, 2)).get().get().get().mat(), 1_i8);
assert_eq!(*x.trav(LodPos::xyz(0, 1, 3)).get().get().get().mat(), 0_i8);
assert_eq!(*x.trav(LodPos::xyz(0, 3, 0)).get().get().get().mat(), 0_i8);
assert_eq!(*x.trav(LodPos::xyz(1, 0, 0)).get().get().get().mat(), 0_i8);
assert_eq!(*x.trav(LodPos::xyz(0, 2, 0)).get().get().get().mat(), 0_i8);
}
#[bench]
fn bench_access_trav(b: &mut Bencher) {
let x = gen_simple_example();
let access = LodPos::xyz(0, 0, 0);
b.iter(|| x.trav(access));
}
#[bench]
fn bench_access_3(b: &mut Bencher) {
let x = gen_simple_example();
let access = LodPos::xyz(0, 0, 0);
b.iter(|| x.trav(access).mat());
}
#[bench]
fn bench_access_0(b: &mut Bencher) {
let x = gen_simple_example();
let access = LodPos::xyz(0, 0, 0);
b.iter(|| x.trav(access).get().get().get().mat());
}
#[bench]
fn bench_access_0_best_time(b: &mut Bencher) {
let x = gen_simple_example();
let access = LodPos::xyz(0, 0, 0);
for _ in 0..10000 {
//fill up the caches
x.trav(access).get().get().get().mat();
}
b.iter(|| x.trav(access).get().get().get().mat());
}
}

View File

@ -2,14 +2,16 @@ use crate::lodstore::{
LodData,
LodConfig,
data::CacheLine,
index::LodIndex,
index::AbsIndex,
lodpos::LodPos,
lodpos::AbsIndex,
area::LodArea,
delta::LodDelta,
};
use vek::*;
use std::u32;
pub type LodIndex = LodPos;
#[derive(Clone)]
pub struct Example9 {
data: [u8; 700],
@ -215,14 +217,15 @@ mod tests {
region::lod::example::ExampleLodConfig,
region::lod::example::*,
lodstore::LodData,
lodstore::index::LodIndex,
lodstore::index,
lodstore::lodpos::LodPos,
lodstore::lodpos,
};
use std::{thread, time, mem::size_of};
use vek::*;
use rand::Rng;
use rand::ThreadRng;
use test::Bencher;
pub type LodIndex = LodPos;
fn randIndex(rng: &mut ThreadRng) -> LodIndex {
let x: u16 = rng.gen();
@ -239,10 +242,10 @@ mod tests {
let mut rng = rand::thread_rng();
let mut delta = ExampleDelta::new();
let mut result = Example::new();
let abs9 = (index::two_pow_u(15-13) as u64).pow(3);
let abs5 = (index::two_pow_u(15-9) as u64).pow(3);
let abs0 = (index::two_pow_u(15-4) as u64).pow(3);
let abs_4 = (index::two_pow_u(15) as u64).pow(3);
let abs9 = (lodpos::two_pow_u(15-13) as u64).pow(3);
let abs5 = (lodpos::two_pow_u(15-9) as u64).pow(3);
let abs0 = (lodpos::two_pow_u(15-4) as u64).pow(3);
let abs_4 = (lodpos::two_pow_u(15) as u64).pow(3);
let p_e9 = 1.0+p_foreign;
let p_e5 = p_e9*p_e5;
let p_e0 = p_e5*p_e0;
@ -252,7 +255,7 @@ mod tests {
let act0 = (abs0 as f32 * p_e0 ) as u32;
let act_4 = (abs_4 as f32 * p_e_4 ) as u32;
let w9 = index::two_pow_u(13) as u32;
let w9 = lodpos::two_pow_u(13) as u32;
result.layer13 = vec![Example9::new(); 8*8*8];
result.child13 = vec![u32::MAX; 8*8*8];
println!("size test {} -- {}", size_of::<usize>(), size_of::<Option<usize>>());
@ -267,19 +270,19 @@ mod tests {
println!("creating Region with {} 5er, {} 0er, {} -4er", act5, act0 , act_4);
while result.layer9.len() < act5 as usize {
let index = randIndex(&mut rng);
let low = index.align_to_layer_id(9);
let low = index.align_to_level(9);
let area = LodArea::new(low, low);
result.make_at_least(area,9, Some(&mut delta));
}
while result.layer4.len() < act0 as usize {
let index = randIndex(&mut rng);
let low = index.align_to_layer_id(4);
let low = index.align_to_level(4);
let area = LodArea::new(low, low);
result.make_at_least(area, 4, Some(&mut delta));
}
while result.layer0.len() < act_4 as usize {
let index = randIndex(&mut rng);
let low = index.align_to_layer_id(0);
let low = index.align_to_level(0);
let area = LodArea::new(low, low);
result.make_at_least(area, 0, Some(&mut delta));
}

View File

@ -1,12 +1,13 @@
use crate::lodstore::{
LodData,
LodConfig,
index::LodIndex,
index::LodPos,
index::AbsIndex,
delta::LodDelta,
};
use vek::*;
use std::u32;
pub type LodIndex = LodPos;
#[derive(Debug, Clone)]
pub struct Region9 {

View File

@ -1,12 +1,13 @@
use crate::lodstore::{
LodData,
LodConfig,
index::LodIndex,
index::AbsIndex,
lodpos::LodPos,
lodpos::AbsIndex,
delta::LodDelta,
};
use vek::*;
use std::u32;
pub type LodIndex = LodPos;
#[derive(Debug, Clone)]
pub struct Region9 {