Skip to main content

hashiverse_lib/client/timeline/
recent_posts_pen.rs

1//! # Scratch pad for just-authored local posts
2//!
3//! When the user hits "post", the resulting signed post must make its way out onto the
4//! DHT before it can be read back through the usual bucket fetch. The network path
5//! takes at least a round trip plus proof-of-work — far too long for "post disappears
6//! for 10 seconds then reappears in my own timeline" to feel acceptable.
7//!
8//! `RecentPostsPen` closes that gap. Every locally-authored post is deposited here
9//! indexed by `(BucketType, base_id)` with a short TTL (~10 minutes). Every timeline
10//! walk consults it before diving into the bucket fetch, so the user's own posts surface
11//! in their own timelines immediately. Old entries age out automatically, and
12//! deduplication against the timeline's seen set prevents duplicate rendering once the
13//! network fetch catches up.
14
15use bytes::Bytes;
16use std::collections::HashSet;
17
18use crate::tools::buckets::{BucketLocation, BucketType};
19use crate::tools::time::{DurationMillis, MILLIS_IN_MINUTE, TimeMillis};
20use crate::tools::types::Id;
21
22const RECENT_POSTS_PEN_TTL: DurationMillis = MILLIS_IN_MINUTE.const_mul(10);
23
24pub struct RecentPostsPenEntry {
25    pub bucket_location: BucketLocation,
26    pub post_id: Id,
27    pub encoded_post_bytes: Bytes,
28    pub time_millis: TimeMillis,
29}
30
31/// Short-lived scratch space for posts the local client has just submitted.
32///
33/// After a successful `submit_post`, the resulting commit tokens and encoded post bytes are
34/// recorded here. When any `SingleTimeline` calls `get_more_posts`, it consults the pen for
35/// entries whose `BucketLocation` matches the timeline being viewed. This ensures the user's
36/// own posts appear immediately — even before the post bundles have propagated through the
37/// network caches — and works across all timeline types (User, Hashtag, Mention, Reply, Sequel, etc.).
38///
39/// Entries expire after 10 minutes (by which time the network caches will have refreshed and
40/// the posts will appear naturally from the `PostBundleManager`). Deduplication against the
41/// `SingleTimeline`'s `post_ids_already_seen` set prevents a post from showing up twice once
42/// it does arrive from the network.
43pub struct RecentPostsPen {
44    entries: Vec<RecentPostsPenEntry>,
45}
46
47impl RecentPostsPen {
48    pub fn new() -> Self {
49        Self { entries: Vec::new() }
50    }
51
52    /// Add pen entries from commit tokens. Each token represents the post committed to a
53    /// particular timeline (User, Hashtag, Mention, etc.) — we store one entry per token.
54    pub fn add_all(&mut self, bucket_locations_and_post_ids: &[(BucketLocation, Id)], encoded_post_bytes: Bytes, time_millis: TimeMillis) {
55        for (bucket_location, post_id) in bucket_locations_and_post_ids {
56            self.entries.push(RecentPostsPenEntry {
57                bucket_location: bucket_location.clone(),
58                post_id: *post_id,
59                encoded_post_bytes: encoded_post_bytes.clone(),
60                time_millis,
61            });
62        }
63    }
64
65    /// Returns matching pen posts for the given timeline, excluding expired and already-seen entries.
66    /// Multiple entries for the same post_id may be returned (e.g. from multiple commit tokens);
67    /// `SingleTimeline` handles deduplication via `post_ids_already_seen`.
68    pub fn get_matching_posts(
69        &mut self,
70        bucket_type: BucketType,
71        base_id: &Id,
72        already_seen_ids: &HashSet<Id>,
73        current_time: TimeMillis,
74    ) -> Vec<(BucketLocation, Bytes, Id)> {
75        // Prune expired entries
76        let cutoff = current_time - RECENT_POSTS_PEN_TTL;
77        self.entries.retain(|entry| entry.time_millis >= cutoff);
78
79        let mut matching_posts: Vec<(BucketLocation, Bytes, Id)> = Vec::new();
80
81        for entry in &self.entries {
82            if entry.bucket_location.bucket_type != bucket_type || entry.bucket_location.base_id != *base_id {
83                continue;
84            }
85            if already_seen_ids.contains(&entry.post_id) {
86                continue;
87            }
88
89            matching_posts.push((entry.bucket_location.clone(), entry.encoded_post_bytes.clone(), entry.post_id));
90        }
91
92        matching_posts
93    }
94}
95
96#[cfg(test)]
97mod tests {
98    use super::*;
99    use crate::tools::time::MILLIS_IN_MINUTE;
100
101    fn make_entry(bucket_type: BucketType, base_id: Id, post_id: Id, time: TimeMillis) -> (BucketLocation, Id) {
102        let bucket_location = BucketLocation::new(bucket_type, base_id, MILLIS_IN_MINUTE, time).unwrap();
103        (bucket_location, post_id)
104    }
105
106    #[test]
107    fn test_matching_by_bucket_type_and_base_id() {
108        let mut pen = RecentPostsPen::new();
109        let base_id = Id::random();
110        let other_base_id = Id::random();
111        let post_id = Id::random();
112        let time = TimeMillis::from_epoch_offset_str("1M").unwrap();
113
114        let entries = vec![
115            make_entry(BucketType::User, base_id, post_id, time),
116            make_entry(BucketType::Hashtag, other_base_id, post_id, time),
117        ];
118        pen.add_all(&entries, Bytes::from_static(b"test post"), time);
119
120        let already_seen = HashSet::new();
121
122        // Should match User timeline for base_id
123        let result = pen.get_matching_posts(BucketType::User, &base_id, &already_seen, time);
124        assert_eq!(result.len(), 1);
125
126        // Should match Hashtag timeline for other_base_id
127        let result = pen.get_matching_posts(BucketType::Hashtag, &other_base_id, &already_seen, time);
128        assert_eq!(result.len(), 1);
129
130        // Should NOT match User timeline for other_base_id
131        let result = pen.get_matching_posts(BucketType::User, &other_base_id, &already_seen, time);
132        assert_eq!(result.len(), 0);
133
134        // Should NOT match Hashtag timeline for base_id
135        let result = pen.get_matching_posts(BucketType::Hashtag, &base_id, &already_seen, time);
136        assert_eq!(result.len(), 0);
137    }
138
139    #[test]
140    fn test_ttl_expiration() {
141        let mut pen = RecentPostsPen::new();
142        let base_id = Id::random();
143        let post_id = Id::random();
144        let time = TimeMillis::from_epoch_offset_str("1M").unwrap();
145
146        pen.add_all(&[make_entry(BucketType::User, base_id, post_id, time)], Bytes::from_static(b"post"), time);
147
148        let already_seen = HashSet::new();
149
150        // Still within TTL
151        let within_ttl = time + MILLIS_IN_MINUTE.const_mul(9);
152        let result = pen.get_matching_posts(BucketType::User, &base_id, &already_seen, within_ttl);
153        assert_eq!(result.len(), 1);
154
155        // Past TTL
156        let past_ttl = time + MILLIS_IN_MINUTE.const_mul(11);
157        let result = pen.get_matching_posts(BucketType::User, &base_id, &already_seen, past_ttl);
158        assert_eq!(result.len(), 0);
159    }
160
161    #[test]
162    fn test_multiple_tokens_same_post_returns_all() {
163        let mut pen = RecentPostsPen::new();
164        let base_id = Id::random();
165        let post_id = Id::random();
166        let time = TimeMillis::from_epoch_offset_str("1M").unwrap();
167
168        // 3 commit tokens from 3 different peers for the same post on the same timeline —
169        // the pen returns all of them; deduplication by post_id is SingleTimeline's job.
170        let entries = vec![
171            make_entry(BucketType::User, base_id, post_id, time),
172            make_entry(BucketType::User, base_id, post_id, time),
173            make_entry(BucketType::User, base_id, post_id, time),
174        ];
175        pen.add_all(&entries, Bytes::from_static(b"post"), time);
176
177        let already_seen = HashSet::new();
178        let result = pen.get_matching_posts(BucketType::User, &base_id, &already_seen, time);
179        assert_eq!(result.len(), 3);
180    }
181
182    #[test]
183    fn test_already_seen_filtering() {
184        let mut pen = RecentPostsPen::new();
185        let base_id = Id::random();
186        let post_id = Id::random();
187        let time = TimeMillis::from_epoch_offset_str("1M").unwrap();
188
189        pen.add_all(&[make_entry(BucketType::User, base_id, post_id, time)], Bytes::from_static(b"post"), time);
190
191        let mut already_seen = HashSet::new();
192        already_seen.insert(post_id);
193
194        let result = pen.get_matching_posts(BucketType::User, &base_id, &already_seen, time);
195        assert_eq!(result.len(), 0);
196    }
197
198    #[test]
199    fn test_single_post_multiple_timelines() {
200        let mut pen = RecentPostsPen::new();
201        let user_id = Id::random();
202        let hashtag_id = Id::random();
203        let mention_id = Id::random();
204        let post_id = Id::random();
205        let time = TimeMillis::from_epoch_offset_str("1M").unwrap();
206
207        let entries = vec![
208            make_entry(BucketType::User, user_id, post_id, time),
209            make_entry(BucketType::Hashtag, hashtag_id, post_id, time),
210            make_entry(BucketType::Mention, mention_id, post_id, time),
211        ];
212        pen.add_all(&entries, Bytes::from_static(b"post"), time);
213
214        let already_seen = HashSet::new();
215
216        // Each timeline should independently find the post
217        assert_eq!(pen.get_matching_posts(BucketType::User, &user_id, &already_seen, time).len(), 1);
218        assert_eq!(pen.get_matching_posts(BucketType::Hashtag, &hashtag_id, &already_seen, time).len(), 1);
219        assert_eq!(pen.get_matching_posts(BucketType::Mention, &mention_id, &already_seen, time).len(), 1);
220    }
221}