arti_rpcserver/objmap/
weakrefs.rs

1//! Support for weak references.
2//!
3//! Currently, this is unused in Arti; we may eventually remove it, which will let us simplify our
4//! code.
5//!
6//! In any case, we should not use this until we have a solid idea about how weak references should
7//! behave; see #868.
8
9#![allow(dead_code)]
10
11use std::{
12    any,
13    sync::{Arc, Weak},
14};
15
16use super::{raw_addr_of, raw_addr_of_weak, GenIdx, ObjMap, TaggedAddr};
17use tor_rpcbase as rpc;
18
19/// A single entry to a weak Object stored in the generational arena.
20pub(super) struct WeakArenaEntry {
21    /// The actual Arc or Weak reference for the object that we're storing here.
22    pub(super) obj: Weak<dyn rpc::Object>,
23    ///
24    /// This contains a strong or weak reference, along with the object's true TypeId.
25    /// See the [`TaggedAddr`] for more info on
26    /// why this is needed.
27    id: any::TypeId,
28}
29
30impl WeakArenaEntry {
31    /// Create a new `WeakArenaEntry` for a weak reference.
32    pub(super) fn new(object: &Arc<dyn rpc::Object>) -> Self {
33        let id = (**object).type_id();
34        Self {
35            obj: Arc::downgrade(object),
36            id,
37        }
38    }
39
40    /// Return true if this `ArenaEntry` is really present.
41    ///
42    /// Note that this function can produce false positives (if the entry's
43    /// last strong reference is dropped in another thread), but it can
44    /// never produce false negatives.
45    pub(super) fn is_present(&self) -> bool {
46        // This is safe from false negatives because: if we can ever
47        // observe strong_count == 0, then there is no way for anybody
48        // else to "resurrect" the object.
49        self.obj.strong_count() > 0
50    }
51
52    /// Return a strong reference to the object in this entry, if possible.
53    pub(super) fn strong(&self) -> Option<Arc<dyn rpc::Object>> {
54        Weak::upgrade(&self.obj)
55    }
56
57    /// Return the [`TaggedAddr`] that can be used to identify this entry's object.
58    pub(super) fn tagged_addr(&self) -> TaggedAddr {
59        TaggedAddr {
60            addr: raw_addr_of_weak(&self.obj),
61            type_id: self.id,
62        }
63    }
64}
65
66impl TaggedAddr {
67    /// Return the `TaggedAddr` to uniquely identify `obj` over the course of
68    /// its existence.
69    pub(super) fn for_object(obj: &Arc<dyn rpc::Object>) -> Self {
70        let type_id = (*obj).type_id();
71        let addr = raw_addr_of(obj);
72        TaggedAddr { addr, type_id }
73    }
74}
75
76impl ObjMap {
77    /// Reclaim unused space in this map's weak arena.
78    ///
79    /// This runs in `O(n)` time.
80    pub(super) fn tidy(&mut self) {
81        #[cfg(test)]
82        {
83            self.n_tidies += 1;
84        }
85        self.weak_arena.retain(|index, entry| {
86            let present = entry.is_present();
87            if !present {
88                // For everything we are removing from the `arena`, we must also
89                // remove it from `reverse_map`.
90                let ptr = entry.tagged_addr();
91                let found = self.reverse_map.remove(&ptr);
92                debug_assert_eq!(found, Some(index));
93            }
94            present
95        });
96    }
97
98    /// If needed, clean the weak arena and resize it.
99    ///
100    /// (We call this whenever we're about to add an entry.  This ensures that
101    /// our insertion operations run in `O(1)` time.)
102    fn adjust_size(&mut self) {
103        // If we're about to fill the arena...
104        if self.weak_arena.len() >= self.weak_arena.capacity() {
105            // ... we delete any dead `Weak` entries.
106            self.tidy();
107            // Then, if the arena is still above half-full, we double the
108            // capacity of the arena.
109            //
110            // (We have to grow the arena this even if tidy() removed _some_
111            // entries, or else we might re-run tidy() too soon.  But we don't
112            // want to grow the arena if tidy() removed _most_ entries, or some
113            // normal usage patterns will lead to unbounded growth.)
114            if self.weak_arena.len() > self.weak_arena.capacity() / 2 {
115                self.weak_arena.reserve(self.weak_arena.capacity());
116            }
117        }
118    }
119
120    /// Ensure that there is a weak entry for `value` in self, and return an
121    /// index for it.
122    /// If there is no entry, create a weak entry.
123    #[allow(clippy::needless_pass_by_value)] // TODO: Decide whether to make this take a reference.
124    pub(crate) fn insert_weak(&mut self, value: Arc<dyn rpc::Object>) -> GenIdx {
125        let ptr = TaggedAddr::for_object(&value);
126        if let Some(idx) = self.reverse_map.get(&ptr) {
127            #[cfg(debug_assertions)]
128            match self.weak_arena.get(*idx) {
129                Some(entry) => debug_assert!(entry.tagged_addr() == ptr),
130                None => panic!("Found a dangling reference"),
131            }
132            return GenIdx::Weak(*idx);
133        }
134
135        self.adjust_size();
136        let idx = self.weak_arena.insert(WeakArenaEntry::new(&value));
137        self.reverse_map.insert(ptr, idx);
138        GenIdx::Weak(idx)
139    }
140}