veloren_common/volumes/
chunk.rs

1use crate::vol::{
2    BaseVol, IntoPosIterator, IntoVolIterator, RasterableVol, ReadVol, VolSize, WriteVol,
3};
4use core::{hash::Hash, iter::Iterator, marker::PhantomData, mem};
5use hashbrown::HashMap;
6use serde::{Deserialize, Serialize};
7use vek::*;
8
9#[derive(Debug)]
10pub enum ChunkError {
11    OutOfBounds,
12}
13
14/// The volume is spatially subdivided into groups of `4*4*4` blocks. Since a
15/// `Chunk` is of total size `32*32*16`, this implies that there are `8*8*4`
16/// groups. (These numbers are generic in the actual code such that there are
17/// always `256` groups. I.e. the group size is chosen depending on the desired
18/// total size of the `Chunk`.)
19///
20/// There's a single vector `self.vox` which consecutively stores these groups.
21/// Each group might or might not be contained in `self.vox`. A group that is
22/// not contained represents that the full group consists only of `self.default`
23/// voxels. This saves a lot of memory because oftentimes a `Chunk` consists of
24/// either a lot of air or a lot of stone.
25///
26/// To track whether a group is contained in `self.vox`, there's an index buffer
27/// `self.indices : [u8; 256]`. It contains for each group
28///
29/// * (a) the order in which it has been inserted into `self.vox`, if the group
30///   is contained in `self.vox` or
31/// * (b) 255, otherwise. That case represents that the whole group consists
32///   only of `self.default` voxels.
33///
34/// (Note that 255 is a valid insertion order for case (a) only if `self.vox` is
35/// full and then no other group has the index 255. Therefore there's no
36/// ambiguity.)
37///
38/// ## Rationale:
39///
40/// The index buffer should be small because:
41///
42/// * Small size increases the probability that it will always be in cache.
43/// * The index buffer is allocated for every `Chunk` and an almost empty
44///   `Chunk` shall not consume too much memory.
45///
46/// The number of 256 groups is particularly nice because it means that the
47/// index buffer can consist of `u8`s. This keeps the space requirement for the
48/// index buffer as low as 4 cache lines.
49#[derive(Debug, Clone, Serialize, Deserialize)]
50pub struct Chunk<V, S: VolSize, M> {
51    indices: Vec<u8>, /* TODO (haslersn): Box<[u8; S::SIZE.x * S::SIZE.y * S::SIZE.z]>, this is
52                       * however not possible in Rust yet */
53    vox: Vec<V>,
54    default: V,
55    meta: M,
56    phantom: PhantomData<S>,
57}
58
59impl<V, S: VolSize, M> Chunk<V, S, M> {
60    pub const GROUP_COUNT: Vec3<u32> = Vec3::new(
61        S::SIZE.x / Self::GROUP_SIZE.x,
62        S::SIZE.y / Self::GROUP_SIZE.y,
63        S::SIZE.z / Self::GROUP_SIZE.z,
64    );
65    /// `GROUP_COUNT_TOTAL` is always `256`, except if `VOLUME < 256`
66    const GROUP_COUNT_TOTAL: u32 = Self::VOLUME / Self::GROUP_VOLUME;
67    const GROUP_LONG_SIDE_LEN: u32 = 1 << ((Self::GROUP_VOLUME * 4 - 1).count_ones() / 3);
68    const GROUP_SIZE: Vec3<u32> = Vec3::new(
69        Self::GROUP_LONG_SIDE_LEN,
70        Self::GROUP_LONG_SIDE_LEN,
71        Self::GROUP_VOLUME / (Self::GROUP_LONG_SIDE_LEN * Self::GROUP_LONG_SIDE_LEN),
72    );
73    const GROUP_VOLUME: u32 = [Self::VOLUME / 256, 1][(Self::VOLUME < 256) as usize];
74    const VOLUME: u32 = S::SIZE.x * S::SIZE.y * S::SIZE.z;
75
76    /// Creates a new `Chunk` with the provided dimensions and all voxels filled
77    /// with duplicates of the provided voxel.
78    pub fn filled(default: V, meta: M) -> Self {
79        // TODO (haslersn): Alter into compile time assertions
80        //
81        // An extent is valid if it fulfils the following conditions.
82        //
83        // 1. In each direction, the extent is a power of two.
84        // 2. In each direction, the group size is in [1, 256].
85        // 3. In each direction, the group count is in [1, 256].
86        //
87        // Rationales:
88        //
89        // 1. We have code in the implementation that assumes it. In particular, code
90        //    using `.count_ones()`.
91        // 2. The maximum group size is `256x256x256`, because there's code that stores
92        //    group relative indices as `u8`.
93        // 3. There's code that stores group indices as `u8`.
94        debug_assert!(S::SIZE.x.is_power_of_two());
95        debug_assert!(S::SIZE.y.is_power_of_two());
96        debug_assert!(S::SIZE.z.is_power_of_two());
97        debug_assert!(0 < Self::GROUP_SIZE.x);
98        debug_assert!(0 < Self::GROUP_SIZE.y);
99        debug_assert!(0 < Self::GROUP_SIZE.z);
100        debug_assert!(Self::GROUP_SIZE.x <= 256);
101        debug_assert!(Self::GROUP_SIZE.y <= 256);
102        debug_assert!(Self::GROUP_SIZE.z <= 256);
103        debug_assert!(0 < Self::GROUP_COUNT.x);
104        debug_assert!(0 < Self::GROUP_COUNT.y);
105        debug_assert!(0 < Self::GROUP_COUNT.z);
106        debug_assert!(Self::GROUP_COUNT.x <= 256);
107        debug_assert!(Self::GROUP_COUNT.y <= 256);
108        debug_assert!(Self::GROUP_COUNT.z <= 256);
109
110        Self {
111            indices: vec![255; Self::GROUP_COUNT_TOTAL as usize],
112            vox: Vec::new(),
113            default,
114            meta,
115            phantom: PhantomData,
116        }
117    }
118
119    /// Compress this subchunk by frequency.
120    pub fn defragment(&mut self)
121    where
122        V: Clone + Eq + Hash,
123    {
124        // First, construct a HashMap with max capacity equal to GROUP_COUNT (since each
125        // filled group can have at most one slot).
126        let mut map: HashMap<_, Vec<_>> = HashMap::with_capacity(Self::GROUP_COUNT_TOTAL as usize);
127        let vox = &self.vox;
128        let default = &self.default;
129        self.indices
130            .iter()
131            .enumerate()
132            .for_each(|(grp_idx, &base)| {
133                let start = usize::from(base) * Self::GROUP_VOLUME as usize;
134                let end = start + Self::GROUP_VOLUME as usize;
135                if let Some(group) = vox.get(start..end) {
136                    // Check to see if all blocks in this group are the same.
137                    let mut group = group.iter();
138                    let first = group.next().expect("GROUP_VOLUME ≥ 1");
139                    if group.all(|block| block == first) {
140                        // All blocks in the group were the same, so add our position to this entry
141                        // in the HashMap.
142                        map.entry(first).or_default().push(grp_idx);
143                    }
144                } else {
145                    // This slot is empty (i.e. has the default value).
146                    map.entry(default).or_default().push(grp_idx);
147                }
148            });
149        // Now, find the block with max frequency in the HashMap and make that our new
150        // default.
151        let (new_default, default_groups) = if let Some((new_default, default_groups)) = map
152            .into_iter()
153            .max_by_key(|(_, default_groups)| default_groups.len())
154        {
155            (new_default.clone(), default_groups)
156        } else {
157            // There is no good choice for default group, so leave it as is.
158            return;
159        };
160
161        // For simplicity, we construct a completely new voxel array rather than
162        // attempting in-place updates (TODO: consider changing this).
163        let mut new_vox =
164            Vec::with_capacity(Self::GROUP_COUNT_TOTAL as usize - default_groups.len());
165        let num_groups = self.num_groups();
166        self.indices
167            .iter_mut()
168            .enumerate()
169            .for_each(|(grp_idx, base)| {
170                if default_groups.contains(&grp_idx) {
171                    // Default groups become 255
172                    *base = 255;
173                } else {
174                    // Other groups are allocated in increasing order by group index.
175                    // NOTE: Cannot overflow since the current implicit group index can't be at the
176                    // end of the vector until at the earliest after the 256th iteration.
177                    let old_base = usize::from(mem::replace(
178                        base,
179                        (new_vox.len() / Self::GROUP_VOLUME as usize) as u8,
180                    ));
181                    if old_base >= num_groups {
182                        // Old default, which (since we reached this branch) is not equal to the new
183                        // default, so we have to write out the old default.
184                        new_vox
185                            .resize(new_vox.len() + Self::GROUP_VOLUME as usize, default.clone());
186                    } else {
187                        let start = old_base * Self::GROUP_VOLUME as usize;
188                        let end = start + Self::GROUP_VOLUME as usize;
189                        new_vox.extend_from_slice(&vox[start..end]);
190                    }
191                }
192            });
193
194        // Finally, reset our vox and default values to the new ones.
195        self.vox = new_vox;
196        self.default = new_default;
197    }
198
199    /// Get a reference to the internal metadata.
200    pub fn metadata(&self) -> &M { &self.meta }
201
202    /// Get a mutable reference to the internal metadata.
203    pub fn metadata_mut(&mut self) -> &mut M { &mut self.meta }
204
205    pub fn num_groups(&self) -> usize { self.vox.len() / Self::GROUP_VOLUME as usize }
206
207    /// Returns `Some(v)` if the block is homogeneous and contains nothing but
208    /// voxels of value `v`, and `None` otherwise.  This method is
209    /// conservative (it may return None when the chunk is
210    /// actually homogeneous) unless called immediately after `defragment`.
211    pub fn homogeneous(&self) -> Option<&V> {
212        if self.num_groups() == 0 {
213            Some(&self.default)
214        } else {
215            None
216        }
217    }
218
219    #[inline(always)]
220    fn grp_idx(pos: Vec3<i32>) -> u32 {
221        let grp_pos = pos.map2(Self::GROUP_SIZE, |e, s| e as u32 / s);
222        (grp_pos.z * (Self::GROUP_COUNT.y * Self::GROUP_COUNT.x))
223            + (grp_pos.y * Self::GROUP_COUNT.x)
224            + (grp_pos.x)
225    }
226
227    #[inline(always)]
228    fn rel_idx(pos: Vec3<i32>) -> u32 {
229        let rel_pos = pos.map2(Self::GROUP_SIZE, |e, s| e as u32 % s);
230        (rel_pos.z * (Self::GROUP_SIZE.y * Self::GROUP_SIZE.x))
231            + (rel_pos.y * Self::GROUP_SIZE.x)
232            + (rel_pos.x)
233    }
234
235    #[inline(always)]
236    fn idx_unchecked(&self, pos: Vec3<i32>) -> Option<usize> {
237        let grp_idx = Self::grp_idx(pos);
238        let rel_idx = Self::rel_idx(pos);
239        let base = u32::from(self.indices[grp_idx as usize]);
240        let num_groups = self.vox.len() as u32 / Self::GROUP_VOLUME;
241        if base >= num_groups {
242            None
243        } else {
244            Some((base * Self::GROUP_VOLUME + rel_idx) as usize)
245        }
246    }
247
248    #[inline(always)]
249    fn force_idx_unchecked(&mut self, pos: Vec3<i32>) -> usize
250    where
251        V: Clone,
252    {
253        let grp_idx = Self::grp_idx(pos);
254        let rel_idx = Self::rel_idx(pos);
255        let base = &mut self.indices[grp_idx as usize];
256        let num_groups = self.vox.len() as u32 / Self::GROUP_VOLUME;
257        if u32::from(*base) >= num_groups {
258            *base = num_groups as u8;
259            self.vox.extend(std::iter::repeat_n(
260                self.default.clone(),
261                Self::GROUP_VOLUME as usize,
262            ));
263        }
264        (u32::from(*base) * Self::GROUP_VOLUME + rel_idx) as usize
265    }
266
267    #[inline(always)]
268    fn get_unchecked(&self, pos: Vec3<i32>) -> &V {
269        match self.idx_unchecked(pos) {
270            Some(idx) => &self.vox[idx],
271            None => &self.default,
272        }
273    }
274
275    #[inline(always)]
276    fn set_unchecked(&mut self, pos: Vec3<i32>, vox: V) -> V
277    where
278        V: Clone + PartialEq,
279    {
280        if vox != self.default {
281            let idx = self.force_idx_unchecked(pos);
282            mem::replace(&mut self.vox[idx], vox)
283        } else if let Some(idx) = self.idx_unchecked(pos) {
284            mem::replace(&mut self.vox[idx], vox)
285        } else {
286            self.default.clone()
287        }
288    }
289}
290
291impl<V, S: VolSize, M> BaseVol for Chunk<V, S, M> {
292    type Error = ChunkError;
293    type Vox = V;
294}
295
296impl<V, S: VolSize, M> RasterableVol for Chunk<V, S, M> {
297    const SIZE: Vec3<u32> = S::SIZE;
298}
299
300impl<V, S: VolSize, M> ReadVol for Chunk<V, S, M> {
301    #[inline(always)]
302    fn get(&self, pos: Vec3<i32>) -> Result<&Self::Vox, Self::Error> {
303        if !pos
304            .map2(S::SIZE, |e, s| 0 <= e && e < s as i32)
305            .reduce_and()
306        {
307            Err(Self::Error::OutOfBounds)
308        } else {
309            Ok(self.get_unchecked(pos))
310        }
311    }
312
313    #[inline(always)]
314    fn get_unchecked(&self, pos: Vec3<i32>) -> &Self::Vox { self.get_unchecked(pos) }
315
316    fn for_each_in(&self, mut aabb: Aabb<i32>, mut f: impl FnMut(Vec3<i32>, Self::Vox))
317    where
318        Self::Vox: Copy,
319    {
320        aabb.intersect(Aabb {
321            min: Vec3::zero(),
322            max: S::SIZE.map(|e| e as i32) - 1,
323        });
324        for z in aabb.min.z..aabb.max.z + 1 {
325            for y in aabb.min.y..aabb.max.y + 1 {
326                for x in aabb.min.x..aabb.max.x + 1 {
327                    f(Vec3::new(x, y, z), *self.get_unchecked(Vec3::new(x, y, z)));
328                }
329            }
330        }
331    }
332}
333
334impl<V: Clone + PartialEq, S: VolSize, M> WriteVol for Chunk<V, S, M> {
335    #[inline(always)]
336    fn set(&mut self, pos: Vec3<i32>, vox: Self::Vox) -> Result<Self::Vox, Self::Error> {
337        if !pos
338            .map2(S::SIZE, |e, s| 0 <= e && e < s as i32)
339            .reduce_and()
340        {
341            Err(Self::Error::OutOfBounds)
342        } else {
343            Ok(self.set_unchecked(pos, vox))
344        }
345    }
346}
347
348pub struct ChunkPosIter<V, S: VolSize, M> {
349    // Store as `u8`s so as to reduce memory footprint.
350    lb: Vec3<i32>,
351    ub: Vec3<i32>,
352    pos: Vec3<i32>,
353    phantom: PhantomData<Chunk<V, S, M>>,
354}
355
356impl<V, S: VolSize, M> ChunkPosIter<V, S, M> {
357    fn new(lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> Self {
358        // If the range is empty, then we have the special case `ub = lower_bound`.
359        let ub = if lower_bound.map2(upper_bound, |l, u| l < u).reduce_and() {
360            upper_bound
361        } else {
362            lower_bound
363        };
364        Self {
365            lb: lower_bound,
366            ub,
367            pos: lower_bound,
368            phantom: PhantomData,
369        }
370    }
371}
372
373impl<V, S: VolSize, M> Iterator for ChunkPosIter<V, S, M> {
374    type Item = Vec3<i32>;
375
376    #[inline(always)]
377    fn next(&mut self) -> Option<Self::Item> {
378        if self.pos.z >= self.ub.z {
379            return None;
380        }
381        let res = Some(self.pos);
382
383        self.pos.x += 1;
384        if self.pos.x != self.ub.x && self.pos.x % Chunk::<V, S, M>::GROUP_SIZE.x as i32 != 0 {
385            return res;
386        }
387        self.pos.x = std::cmp::max(
388            self.lb.x,
389            (self.pos.x - 1) & !(Chunk::<V, S, M>::GROUP_SIZE.x as i32 - 1),
390        );
391
392        self.pos.y += 1;
393        if self.pos.y != self.ub.y && self.pos.y % Chunk::<V, S, M>::GROUP_SIZE.y as i32 != 0 {
394            return res;
395        }
396        self.pos.y = std::cmp::max(
397            self.lb.y,
398            (self.pos.y - 1) & !(Chunk::<V, S, M>::GROUP_SIZE.y as i32 - 1),
399        );
400
401        self.pos.z += 1;
402        if self.pos.z != self.ub.z && self.pos.z % Chunk::<V, S, M>::GROUP_SIZE.z as i32 != 0 {
403            return res;
404        }
405        self.pos.z = std::cmp::max(
406            self.lb.z,
407            (self.pos.z - 1) & !(Chunk::<V, S, M>::GROUP_SIZE.z as i32 - 1),
408        );
409
410        self.pos.x = (self.pos.x | (Chunk::<V, S, M>::GROUP_SIZE.x as i32 - 1)) + 1;
411        if self.pos.x < self.ub.x {
412            return res;
413        }
414        self.pos.x = self.lb.x;
415
416        self.pos.y = (self.pos.y | (Chunk::<V, S, M>::GROUP_SIZE.y as i32 - 1)) + 1;
417        if self.pos.y < self.ub.y {
418            return res;
419        }
420        self.pos.y = self.lb.y;
421
422        self.pos.z = (self.pos.z | (Chunk::<V, S, M>::GROUP_SIZE.z as i32 - 1)) + 1;
423
424        res
425    }
426}
427
428pub struct ChunkVolIter<'a, V, S: VolSize, M> {
429    chunk: &'a Chunk<V, S, M>,
430    iter_impl: ChunkPosIter<V, S, M>,
431}
432
433impl<'a, V, S: VolSize, M> Iterator for ChunkVolIter<'a, V, S, M> {
434    type Item = (Vec3<i32>, &'a V);
435
436    #[inline(always)]
437    fn next(&mut self) -> Option<Self::Item> {
438        self.iter_impl
439            .next()
440            .map(|pos| (pos, self.chunk.get_unchecked(pos)))
441    }
442}
443
444impl<V, S: VolSize, M> Chunk<V, S, M> {
445    /// It's possible to obtain a positional iterator without having a `Chunk`
446    /// instance.
447    pub fn pos_iter(lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> ChunkPosIter<V, S, M> {
448        ChunkPosIter::<V, S, M>::new(lower_bound, upper_bound)
449    }
450}
451
452impl<V, S: VolSize, M> IntoPosIterator for &Chunk<V, S, M> {
453    type IntoIter = ChunkPosIter<V, S, M>;
454
455    fn pos_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> Self::IntoIter {
456        Chunk::<V, S, M>::pos_iter(lower_bound, upper_bound)
457    }
458}
459
460impl<'a, V, S: VolSize, M> IntoVolIterator<'a> for &'a Chunk<V, S, M> {
461    type IntoIter = ChunkVolIter<'a, V, S, M>;
462
463    fn vol_iter(self, lower_bound: Vec3<i32>, upper_bound: Vec3<i32>) -> Self::IntoIter {
464        ChunkVolIter::<'a, V, S, M> {
465            chunk: self,
466            iter_impl: ChunkPosIter::<V, S, M>::new(lower_bound, upper_bound),
467        }
468    }
469}