iced_graphics/
cache.rs

1//! Cache computations and efficiently reuse them.
2use std::cell::RefCell;
3use std::fmt;
4use std::mem;
5use std::sync::atomic::{self, AtomicU64};
6
7/// A simple cache that stores generated values to avoid recomputation.
8///
9/// Keeps track of the last generated value after clearing.
10pub struct Cache<T> {
11    group: Group,
12    state: RefCell<State<T>>,
13}
14
15impl<T> Cache<T> {
16    /// Creates a new empty [`Cache`].
17    pub fn new() -> Self {
18        Cache {
19            group: Group::singleton(),
20            state: RefCell::new(State::Empty { previous: None }),
21        }
22    }
23
24    /// Creates a new empty [`Cache`] with the given [`Group`].
25    ///
26    /// Caches within the same group may reuse internal rendering storage.
27    ///
28    /// You should generally group caches that are likely to change
29    /// together.
30    pub fn with_group(group: Group) -> Self {
31        assert!(
32            !group.is_singleton(),
33            "The group {group:?} cannot be shared!"
34        );
35
36        Cache {
37            group,
38            state: RefCell::new(State::Empty { previous: None }),
39        }
40    }
41
42    /// Returns the [`Group`] of the [`Cache`].
43    pub fn group(&self) -> Group {
44        self.group
45    }
46
47    /// Puts the given value in the [`Cache`].
48    ///
49    /// Notice that, given this is a cache, a mutable reference is not
50    /// necessary to call this method. You can safely update the cache in
51    /// rendering code.
52    pub fn put(&self, value: T) {
53        *self.state.borrow_mut() = State::Filled { current: value };
54    }
55
56    /// Returns a reference cell to the internal [`State`] of the [`Cache`].
57    pub fn state(&self) -> &RefCell<State<T>> {
58        &self.state
59    }
60
61    /// Clears the [`Cache`].
62    pub fn clear(&self) {
63        let mut state = self.state.borrow_mut();
64
65        let previous = mem::replace(&mut *state, State::Empty { previous: None });
66
67        let previous = match previous {
68            State::Empty { previous } => previous,
69            State::Filled { current } => Some(current),
70        };
71
72        *state = State::Empty { previous };
73    }
74}
75
76/// A cache group.
77///
78/// Caches that share the same group generally change together.
79///
80/// A cache group can be used to implement certain performance
81/// optimizations during rendering, like batching or sharing atlases.
82#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
83pub struct Group {
84    id: u64,
85    is_singleton: bool,
86}
87
88impl Group {
89    /// Generates a new unique cache [`Group`].
90    pub fn unique() -> Self {
91        static NEXT: AtomicU64 = AtomicU64::new(0);
92
93        Self {
94            id: NEXT.fetch_add(1, atomic::Ordering::Relaxed),
95            is_singleton: false,
96        }
97    }
98
99    /// Returns `true` if the [`Group`] can only ever have a
100    /// single [`Cache`] in it.
101    ///
102    /// This is the default kind of [`Group`] assigned when using
103    /// [`Cache::new`].
104    ///
105    /// Knowing that a [`Group`] will never be shared may be
106    /// useful for rendering backends to perform additional
107    /// optimizations.
108    pub fn is_singleton(self) -> bool {
109        self.is_singleton
110    }
111
112    fn singleton() -> Self {
113        Self {
114            is_singleton: true,
115            ..Self::unique()
116        }
117    }
118}
119
120impl<T> fmt::Debug for Cache<T>
121where
122    T: fmt::Debug,
123{
124    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
125        use std::ops::Deref;
126
127        let state = self.state.borrow();
128
129        match state.deref() {
130            State::Empty { previous } => {
131                write!(f, "Cache::Empty {{ previous: {previous:?} }}")
132            }
133            State::Filled { current } => {
134                write!(f, "Cache::Filled {{ current: {current:?} }}")
135            }
136        }
137    }
138}
139
140impl<T> Default for Cache<T> {
141    fn default() -> Self {
142        Self::new()
143    }
144}
145
146/// The state of a [`Cache`].
147#[derive(Debug, Clone, Copy, PartialEq, Eq)]
148pub enum State<T> {
149    /// The [`Cache`] is empty.
150    Empty {
151        /// The previous value of the [`Cache`].
152        previous: Option<T>,
153    },
154    /// The [`Cache`] is filled.
155    Filled {
156        /// The current value of the [`Cache`]
157        current: T,
158    },
159}
160
161/// A piece of data that can be cached.
162pub trait Cached: Sized {
163    /// The type of cache produced.
164    type Cache: Clone;
165
166    /// Loads the [`Cache`] into a proper instance.
167    ///
168    /// [`Cache`]: Self::Cache
169    fn load(cache: &Self::Cache) -> Self;
170
171    /// Caches this value, producing its corresponding [`Cache`].
172    ///
173    /// [`Cache`]: Self::Cache
174    fn cache(self, group: Group, previous: Option<Self::Cache>) -> Self::Cache;
175}
176
177#[cfg(debug_assertions)]
178impl Cached for () {
179    type Cache = ();
180
181    fn load(_cache: &Self::Cache) -> Self {}
182
183    fn cache(self, _group: Group, _previous: Option<Self::Cache>) -> Self::Cache {}
184}