iced_graphics/
cache.rs

1//! Cache computations and efficiently reuse them.
2use std::cell::RefCell;
3use std::fmt;
4use std::mem;
5use std::sync::atomic::{self, AtomicU64};
6
7/// A simple cache that stores generated values to avoid recomputation.
8///
9/// Keeps track of the last generated value after clearing.
10pub struct Cache<T> {
11    group: Group,
12    state: RefCell<State<T>>,
13}
14
15impl<T> Cache<T> {
16    /// Creates a new empty [`Cache`].
17    pub fn new() -> Self {
18        Cache {
19            group: Group::singleton(),
20            state: RefCell::new(State::Empty { previous: None }),
21        }
22    }
23
24    /// Creates a new empty [`Cache`] with the given [`Group`].
25    ///
26    /// Caches within the same group may reuse internal rendering storage.
27    ///
28    /// You should generally group caches that are likely to change
29    /// together.
30    pub fn with_group(group: Group) -> Self {
31        assert!(
32            !group.is_singleton(),
33            "The group {group:?} cannot be shared!"
34        );
35
36        Cache {
37            group,
38            state: RefCell::new(State::Empty { previous: None }),
39        }
40    }
41
42    /// Returns the [`Group`] of the [`Cache`].
43    pub fn group(&self) -> Group {
44        self.group
45    }
46
47    /// Puts the given value in the [`Cache`].
48    ///
49    /// Notice that, given this is a cache, a mutable reference is not
50    /// necessary to call this method. You can safely update the cache in
51    /// rendering code.
52    pub fn put(&self, value: T) {
53        *self.state.borrow_mut() = State::Filled { current: value };
54    }
55
56    /// Returns a reference cell to the internal [`State`] of the [`Cache`].
57    pub fn state(&self) -> &RefCell<State<T>> {
58        &self.state
59    }
60
61    /// Clears the [`Cache`].
62    pub fn clear(&self) {
63        let mut state = self.state.borrow_mut();
64
65        let previous =
66            mem::replace(&mut *state, State::Empty { previous: None });
67
68        let previous = match previous {
69            State::Empty { previous } => previous,
70            State::Filled { current } => Some(current),
71        };
72
73        *state = State::Empty { previous };
74    }
75}
76
77/// A cache group.
78///
79/// Caches that share the same group generally change together.
80///
81/// A cache group can be used to implement certain performance
82/// optimizations during rendering, like batching or sharing atlases.
83#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
84pub struct Group {
85    id: u64,
86    is_singleton: bool,
87}
88
89impl Group {
90    /// Generates a new unique cache [`Group`].
91    pub fn unique() -> Self {
92        static NEXT: AtomicU64 = AtomicU64::new(0);
93
94        Self {
95            id: NEXT.fetch_add(1, atomic::Ordering::Relaxed),
96            is_singleton: false,
97        }
98    }
99
100    /// Returns `true` if the [`Group`] can only ever have a
101    /// single [`Cache`] in it.
102    ///
103    /// This is the default kind of [`Group`] assigned when using
104    /// [`Cache::new`].
105    ///
106    /// Knowing that a [`Group`] will never be shared may be
107    /// useful for rendering backends to perform additional
108    /// optimizations.
109    pub fn is_singleton(self) -> bool {
110        self.is_singleton
111    }
112
113    fn singleton() -> Self {
114        Self {
115            is_singleton: true,
116            ..Self::unique()
117        }
118    }
119}
120
121impl<T> fmt::Debug for Cache<T>
122where
123    T: fmt::Debug,
124{
125    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
126        use std::ops::Deref;
127
128        let state = self.state.borrow();
129
130        match state.deref() {
131            State::Empty { previous } => {
132                write!(f, "Cache::Empty {{ previous: {previous:?} }}")
133            }
134            State::Filled { current } => {
135                write!(f, "Cache::Filled {{ current: {current:?} }}")
136            }
137        }
138    }
139}
140
141impl<T> Default for Cache<T> {
142    fn default() -> Self {
143        Self::new()
144    }
145}
146
147/// The state of a [`Cache`].
148#[derive(Debug, Clone, Copy, PartialEq, Eq)]
149pub enum State<T> {
150    /// The [`Cache`] is empty.
151    Empty {
152        /// The previous value of the [`Cache`].
153        previous: Option<T>,
154    },
155    /// The [`Cache`] is filled.
156    Filled {
157        /// The current value of the [`Cache`]
158        current: T,
159    },
160}
161
162/// A piece of data that can be cached.
163pub trait Cached: Sized {
164    /// The type of cache produced.
165    type Cache: Clone;
166
167    /// Loads the [`Cache`] into a proper instance.
168    ///
169    /// [`Cache`]: Self::Cache
170    fn load(cache: &Self::Cache) -> Self;
171
172    /// Caches this value, producing its corresponding [`Cache`].
173    ///
174    /// [`Cache`]: Self::Cache
175    fn cache(self, group: Group, previous: Option<Self::Cache>) -> Self::Cache;
176}
177
178#[cfg(debug_assertions)]
179impl Cached for () {
180    type Cache = ();
181
182    fn load(_cache: &Self::Cache) -> Self {}
183
184    fn cache(
185        self,
186        _group: Group,
187        _previous: Option<Self::Cache>,
188    ) -> Self::Cache {
189    }
190}