Skip to content

Commit

Permalink
Merge rewrite deck serialization, timeline cache, add algo timelines #…
Browse files Browse the repository at this point in the history
…712

William Casarin (19):
      algos: introduce last_n_per_pubkey_from_tags
      wip algo timelines
      Initial token parser combinator
      token_parser: unify parsing and serialization
      token_serializer: introduce TokenWriter
      token_parser: simplify AddColumnRoute serialization
      tokens: add a more advanced tokens parser
      tokens: add AccountsRoute token serializer
      tokens: add PubkeySource and ListKinds token serializer
      tokens: add TimelineRoute token serializer
      tokens: initial Route token serializer
      add tokenator crate
      note_id: add hex helpers for root notes
      tokens: add token serialization for AlgoTimeline
      tokens: add token serialization for TimelineKind
      tokens: switch over to using token serialization
      Switch to unified timeline cache via TimelineKinds
      hashtags: click hashtags to open them
  • Loading branch information
jb55 committed Feb 6, 2025
2 parents ae85f2d + ac10c7e commit fd030f5
Show file tree
Hide file tree
Showing 46 changed files with 2,249 additions and 2,302 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
.buildcmd
build.log
perf.data
rusty-tags.vi
notedeck-settings
perf.data.old
crates/notedeck_chrome/android/app/build
.privenv
Expand Down
8 changes: 8 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ members = [
"crates/notedeck_chrome",
"crates/notedeck_columns",

"crates/enostr",
"crates/enostr", "crates/tokenator",
]

[workspace.dependencies]
Expand Down Expand Up @@ -35,6 +35,7 @@ nostrdb = { git = "https://github.com/damus-io/nostrdb-rs", rev = "2111948b078b2
notedeck = { path = "crates/notedeck" }
notedeck_chrome = { path = "crates/notedeck_chrome" }
notedeck_columns = { path = "crates/notedeck_columns" }
tokenator = { path = "crates/tokenator" }
open = "5.3.0"
poll-promise = { version = "0.3.0", features = ["tokio"] }
puffin = { git = "https://github.com/jb55/puffin", package = "puffin", rev = "70ff86d5503815219b01a009afd3669b7903a057" }
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ check:
cargo check

tags: fake
find . -type d -name target -prune -o -type f -name '*.rs' -print | xargs ctags
rusty-tags vi

jni: fake
cargo ndk --target arm64-v8a -o $(ANDROID_DIR)/app/src/main/jniLibs/ build --profile release
Expand Down
2 changes: 1 addition & 1 deletion crates/enostr/src/note.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ pub struct NoteId([u8; 32]);

impl fmt::Debug for NoteId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.hex())
write!(f, "NoteId({})", self.hex())
}
}

Expand Down
6 changes: 6 additions & 0 deletions crates/notedeck/src/accounts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,12 @@ impl Accounts {
.or_else(|| self.accounts.iter().find_map(|a| a.to_full()))
}

/// Get the selected account's pubkey as bytes. Common operation so
/// we make it a helper here.
pub fn selected_account_pubkey_bytes(&self) -> Option<&[u8; 32]> {
self.get_selected_account().map(|kp| kp.pubkey.bytes())
}

pub fn get_selected_account(&self) -> Option<&UserAccount> {
if let Some(account_index) = self.currently_selected_account {
if let Some(account) = self.get_account(account_index) {
Expand Down
3 changes: 3 additions & 0 deletions crates/notedeck/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,9 @@ impl From<String> for Error {
pub enum FilterError {
#[error("empty contact list")]
EmptyContactList,

#[error("filter not ready")]
FilterNotReady,
}

#[derive(Debug, Eq, PartialEq, Copy, Clone, thiserror::Error)]
Expand Down
67 changes: 64 additions & 3 deletions crates/notedeck/src/filter.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use crate::error::{Error, FilterError};
use crate::note::NoteRef;
use crate::Result;
use nostrdb::{Filter, FilterBuilder, Note, Subscription};
use std::collections::HashMap;
use tracing::{debug, warn};
Expand All @@ -24,7 +23,7 @@ pub struct FilterStates {
}

impl FilterStates {
pub fn get(&mut self, relay: &str) -> &FilterState {
pub fn get_mut(&mut self, relay: &str) -> &FilterState {
// if our initial state is ready, then just use that
if let FilterState::Ready(_) = self.initial_state {
&self.initial_state
Expand Down Expand Up @@ -190,13 +189,67 @@ impl FilteredTags {
}
}

/// Create a "last N notes per pubkey" query.
pub fn last_n_per_pubkey_from_tags(
note: &Note,
kind: u64,
notes_per_pubkey: u64,
) -> Result<Vec<Filter>, Error> {
let mut filters: Vec<Filter> = vec![];

for tag in note.tags() {
// TODO: fix arbitrary MAX_FILTER limit in nostrdb
if filters.len() == 15 {
break;
}

if tag.count() < 2 {
continue;
}

let t = if let Some(t) = tag.get_unchecked(0).variant().str() {
t
} else {
continue;
};

if t == "p" {
let author = if let Some(author) = tag.get_unchecked(1).variant().id() {
author
} else {
continue;
};

let mut filter = Filter::new();
filter.start_authors_field()?;
filter.add_id_element(author)?;
filter.end_field();
filters.push(filter.kinds([kind]).limit(notes_per_pubkey).build());
} else if t == "t" {
let hashtag = if let Some(hashtag) = tag.get_unchecked(1).variant().str() {
hashtag
} else {
continue;
};

let mut filter = Filter::new();
filter.start_tags_field('t')?;
filter.add_str_element(hashtag)?;
filter.end_field();
filters.push(filter.kinds([kind]).limit(notes_per_pubkey).build());
}
}

Ok(filters)
}

/// Create a filter from tags. This can be used to create a filter
/// from a contact list
pub fn filter_from_tags(
note: &Note,
add_pubkey: Option<&[u8; 32]>,
with_hashtags: bool,
) -> Result<FilteredTags> {
) -> Result<FilteredTags, Error> {
let mut author_filter = Filter::new();
let mut hashtag_filter = Filter::new();
let mut author_res: Option<FilterBuilder> = None;
Expand Down Expand Up @@ -284,3 +337,11 @@ pub fn filter_from_tags(
hashtags: hashtag_res,
})
}

pub fn make_filters_since(raw: &[Filter], since: u64) -> Vec<Filter> {
let mut filters = Vec::with_capacity(raw.len());
for builder in raw {
filters.push(Filter::copy_from(builder).since(since).build());
}
filters
}
17 changes: 16 additions & 1 deletion crates/notedeck/src/note.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,23 @@ use enostr::NoteId;
use nostrdb::{Ndb, Note, NoteKey, QueryResult, Transaction};
use std::borrow::Borrow;
use std::cmp::Ordering;
use std::fmt;

#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)]
pub struct NoteRef {
pub key: NoteKey,
pub created_at: u64,
}

#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
#[derive(Clone, Copy, Eq, PartialEq, Hash)]
pub struct RootNoteIdBuf([u8; 32]);

impl fmt::Debug for RootNoteIdBuf {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "RootNoteIdBuf({})", self.hex())
}
}

#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
pub struct RootNoteId<'a>(&'a [u8; 32]);

Expand All @@ -34,6 +41,10 @@ impl RootNoteIdBuf {
root_note_id_from_selected_id(ndb, note_cache, txn, id).map(|rnid| Self(*rnid.bytes()))
}

pub fn hex(&self) -> String {
hex::encode(self.bytes())
}

pub fn new_unsafe(id: [u8; 32]) -> Self {
Self(id)
}
Expand All @@ -52,6 +63,10 @@ impl<'a> RootNoteId<'a> {
self.0
}

pub fn hex(&self) -> String {
hex::encode(self.bytes())
}

pub fn to_owned(&self) -> RootNoteIdBuf {
RootNoteIdBuf::new_unsafe(*self.bytes())
}
Expand Down
15 changes: 7 additions & 8 deletions crates/notedeck_chrome/src/notedeck.rs
Original file line number Diff line number Diff line change
Expand Up @@ -183,21 +183,20 @@ mod tests {
.column(0)
.router()
.top()
.timeline_id();
.timeline_id()
.unwrap();

let tl2 = app
.columns(app_ctx.accounts)
.column(1)
.router()
.top()
.timeline_id();
.timeline_id()
.unwrap();

assert_eq!(tl1.is_some(), true);
assert_eq!(tl2.is_some(), true);

let timelines = app.columns(app_ctx.accounts).timelines();
assert!(timelines[0].kind.is_notifications());
assert!(timelines[1].kind.is_contacts());
let timelines = app.timeline_cache.timelines.len() == 2;
assert!(app.timeline_cache.timelines.get(&tl1).is_some());
assert!(app.timeline_cache.timelines.get(&tl2).is_some());

rmrf(tmpdir);
}
Expand Down
1 change: 1 addition & 0 deletions crates/notedeck_columns/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ crate-type = ["lib", "cdylib"]

[dependencies]
notedeck = { workspace = true }
tokenator = { workspace = true }
bitflags = { workspace = true }
dirs = { workspace = true }
eframe = { workspace = true }
Expand Down
75 changes: 75 additions & 0 deletions crates/notedeck_columns/src/accounts/route.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use super::{AccountLoginResponse, AccountsViewResponse};
use serde::{Deserialize, Serialize};
use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};

pub enum AccountsRouteResponse {
Accounts(AccountsViewResponse),
Expand All @@ -11,3 +12,77 @@ pub enum AccountsRoute {
Accounts,
AddAccount,
}

impl AccountsRoute {
/// Route tokens use in both serialization and deserialization
fn tokens(&self) -> &'static [&'static str] {
match self {
Self::Accounts => &["accounts", "show"],
Self::AddAccount => &["accounts", "new"],
}
}
}

impl TokenSerializable for AccountsRoute {
fn serialize_tokens(&self, writer: &mut TokenWriter) {
for token in self.tokens() {
writer.write_token(token);
}
}

fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
parser.peek_parse_token("accounts")?;

TokenParser::alt(
parser,
&[
|p| parse_accounts_route(p, AccountsRoute::Accounts),
|p| parse_accounts_route(p, AccountsRoute::AddAccount),
],
)
}
}

fn parse_accounts_route<'a>(
parser: &mut TokenParser<'a>,
route: AccountsRoute,
) -> Result<AccountsRoute, ParseError<'a>> {
parser.parse_all(|p| {
for token in route.tokens() {
p.parse_token(token)?;
}
Ok(route)
})
}

#[cfg(test)]
mod tests {
use super::*;
use tokenator::{TokenParser, TokenSerializable, TokenWriter};

#[test]
fn test_accounts_route_serialize() {
let data_str = "accounts:show";
let data = &data_str.split(":").collect::<Vec<&str>>();
let mut token_writer = TokenWriter::default();
let mut parser = TokenParser::new(&data);
let parsed = AccountsRoute::parse_from_tokens(&mut parser).unwrap();
let expected = AccountsRoute::Accounts;
parsed.serialize_tokens(&mut token_writer);
assert_eq!(expected, parsed);
assert_eq!(token_writer.str(), data_str);
}

#[test]
fn test_new_accounts_route_serialize() {
let data_str = "accounts:new";
let data = &data_str.split(":").collect::<Vec<&str>>();
let mut token_writer = TokenWriter::default();
let mut parser = TokenParser::new(data);
let parsed = AccountsRoute::parse_from_tokens(&mut parser).unwrap();
let expected = AccountsRoute::AddAccount;
parsed.serialize_tokens(&mut token_writer);
assert_eq!(expected, parsed);
assert_eq!(token_writer.str(), data_str);
}
}
Loading

0 comments on commit fd030f5

Please sign in to comment.