Feed pagination, duplicate identity detection, pkarr leak fix, Android SAF
Feed pagination: - Cursor-based pagination: get_feed_page/get_all_posts_page (20 posts/page) - Batched engagement queries (3 bulk SQL queries instead of 4 per post) - IntersectionObserver for infinite scroll (sentinel at midpoint) - Viewport-based media loading (blobs only load when post enters view) - Pre-fetch next page immediately after current page renders Duplicate identity detection: - Anchor detects when a NodeId is already mesh-connected during initial exchange and sets duplicate_active flag in response - Client skips sync tasks when duplicate detected - Frontend shows red warning banner Privacy: - Fixed pkarr leak: clear_address_lookup() removes default dns.iroh.link publishing. Only mDNS (local network) discovery enabled. Android: - SAF integration via tauri-plugin-android-fs: exports open native "Save As" dialog so users can save to Downloads/Drive/etc. - Download/export paths use app data dir on Android (writable) - File picker gated behind desktop cfg (blocking_pick not on Android) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
5e7eed9638
commit
288b53ffb1
12 changed files with 910 additions and 120 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -31,3 +31,4 @@ CLAUDE.md
|
||||||
|
|
||||||
# Nextcloud sync
|
# Nextcloud sync
|
||||||
.sync-exclude.lst
|
.sync-exclude.lst
|
||||||
|
*.zip
|
||||||
|
|
|
||||||
21
Cargo.lock
generated
21
Cargo.lock
generated
|
|
@ -2732,7 +2732,7 @@ checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itsgoin-cli"
|
name = "itsgoin-cli"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"hex",
|
"hex",
|
||||||
|
|
@ -2744,7 +2744,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itsgoin-core"
|
name = "itsgoin-core"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"base64 0.22.1",
|
"base64 0.22.1",
|
||||||
|
|
@ -2767,7 +2767,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itsgoin-desktop"
|
name = "itsgoin-desktop"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"base64 0.22.1",
|
"base64 0.22.1",
|
||||||
|
|
@ -2780,6 +2780,7 @@ dependencies = [
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tauri",
|
"tauri",
|
||||||
"tauri-build",
|
"tauri-build",
|
||||||
|
"tauri-plugin-android-fs",
|
||||||
"tauri-plugin-dialog",
|
"tauri-plugin-dialog",
|
||||||
"tauri-plugin-notification",
|
"tauri-plugin-notification",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
|
@ -5623,6 +5624,20 @@ dependencies = [
|
||||||
"walkdir",
|
"walkdir",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tauri-plugin-android-fs"
|
||||||
|
version = "8.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c63a3cf10132c76d0194cb229411a12d9af4493f15695a27bbe5ab184f20d711"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"tauri",
|
||||||
|
"tauri-plugin",
|
||||||
|
"tauri-plugin-fs",
|
||||||
|
"thiserror 2.0.18",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tauri-plugin-dialog"
|
name = "tauri-plugin-dialog"
|
||||||
version = "2.7.0"
|
version = "2.7.0"
|
||||||
|
|
|
||||||
|
|
@ -74,7 +74,7 @@ pub type IntroId = [u8; 16];
|
||||||
|
|
||||||
/// Result of initial exchange: accepted or refused with optional redirect peer.
|
/// Result of initial exchange: accepted or refused with optional redirect peer.
|
||||||
pub enum ExchangeResult {
|
pub enum ExchangeResult {
|
||||||
Accepted,
|
Accepted { duplicate_active: bool },
|
||||||
Refused { redirect: Option<PeerWithAddress> },
|
Refused { redirect: Option<PeerWithAddress> },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1519,6 +1519,7 @@ impl ConnectionManager {
|
||||||
http_addr: self.http_addr.clone(),
|
http_addr: self.http_addr.clone(),
|
||||||
device_role: None,
|
device_role: None,
|
||||||
cache_pressure: None,
|
cache_pressure: None,
|
||||||
|
duplicate_active: None,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -1658,6 +1659,7 @@ impl ConnectionManager {
|
||||||
http_addr: self.http_addr.clone(),
|
http_addr: self.http_addr.clone(),
|
||||||
device_role: None,
|
device_role: None,
|
||||||
cache_pressure: None,
|
cache_pressure: None,
|
||||||
|
duplicate_active: None,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -3937,7 +3939,7 @@ impl ConnectionManager {
|
||||||
let session_conn = session.connection.clone();
|
let session_conn = session.connection.clone();
|
||||||
drop(cm); // release lock before async work
|
drop(cm); // release lock before async work
|
||||||
match initial_exchange_connect(&storage_clone, &our_node_id, &session_conn, requester, None, our_nat_type, our_http_capable, our_http_addr.clone(), None, None).await {
|
match initial_exchange_connect(&storage_clone, &our_node_id, &session_conn, requester, None, our_nat_type, our_http_capable, our_http_addr.clone(), None, None).await {
|
||||||
Ok(ExchangeResult::Accepted) => {
|
Ok(ExchangeResult::Accepted { .. }) => {
|
||||||
tracing::info!(peer = hex::encode(requester), "Target-side: initial exchange after hole punch");
|
tracing::info!(peer = hex::encode(requester), "Target-side: initial exchange after hole punch");
|
||||||
}
|
}
|
||||||
Ok(ExchangeResult::Refused { .. }) => {
|
Ok(ExchangeResult::Refused { .. }) => {
|
||||||
|
|
@ -5474,11 +5476,13 @@ impl ConnectionManager {
|
||||||
ConnectionManager::handle_pull_request_unlocked(&storage, our_node_id, remote_node_id, recv, send).await?;
|
ConnectionManager::handle_pull_request_unlocked(&storage, our_node_id, remote_node_id, recv, send).await?;
|
||||||
}
|
}
|
||||||
MessageType::InitialExchange => {
|
MessageType::InitialExchange => {
|
||||||
let (storage, our_node_id, anchor_addr, our_nat_type, our_http_capable, our_http_addr) = {
|
let (storage, our_node_id, anchor_addr, our_nat_type, our_http_capable, our_http_addr, is_duplicate) = {
|
||||||
let cm = conn_mgr.lock().await;
|
let cm = conn_mgr.lock().await;
|
||||||
(cm.storage_ref(), *cm.our_node_id(), cm.build_anchor_advertised_addr(), cm.nat_type(), cm.http_capable, cm.http_addr.clone())
|
// Duplicate identity detection: is this NodeId already mesh-connected?
|
||||||
|
let dup = cm.connections.contains_key(&remote_node_id);
|
||||||
|
(cm.storage_ref(), *cm.our_node_id(), cm.build_anchor_advertised_addr(), cm.nat_type(), cm.http_capable, cm.http_addr.clone(), dup)
|
||||||
};
|
};
|
||||||
initial_exchange_accept(&storage, &our_node_id, send, recv, remote_node_id, anchor_addr, None, our_nat_type, our_http_capable, our_http_addr, None, None)
|
initial_exchange_accept(&storage, &our_node_id, send, recv, remote_node_id, anchor_addr, None, our_nat_type, our_http_capable, our_http_addr, None, None, is_duplicate)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
MessageType::AddressRequest => {
|
MessageType::AddressRequest => {
|
||||||
|
|
@ -8331,6 +8335,7 @@ pub async fn initial_exchange_connect(
|
||||||
http_addr: our_http_addr,
|
http_addr: our_http_addr,
|
||||||
device_role: our_device_role.map(|r| r.as_str().to_string()),
|
device_role: our_device_role.map(|r| r.as_str().to_string()),
|
||||||
cache_pressure: our_cache_pressure,
|
cache_pressure: our_cache_pressure,
|
||||||
|
duplicate_active: None,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -8349,8 +8354,12 @@ pub async fn initial_exchange_connect(
|
||||||
anyhow::bail!("expected InitialExchange, got {:?}", msg_type);
|
anyhow::bail!("expected InitialExchange, got {:?}", msg_type);
|
||||||
}
|
}
|
||||||
let their_payload: InitialExchangePayload = read_payload(&mut recv, MAX_PAYLOAD).await?;
|
let their_payload: InitialExchangePayload = read_payload(&mut recv, MAX_PAYLOAD).await?;
|
||||||
|
let dup = their_payload.duplicate_active.unwrap_or(false);
|
||||||
|
if dup {
|
||||||
|
tracing::warn!(peer = hex::encode(remote_node_id), "Anchor reports duplicate identity active on network");
|
||||||
|
}
|
||||||
process_exchange_payload(storage, our_node_id, &remote_node_id, &their_payload).await?;
|
process_exchange_payload(storage, our_node_id, &remote_node_id, &their_payload).await?;
|
||||||
Ok(ExchangeResult::Accepted)
|
Ok(ExchangeResult::Accepted { duplicate_active: dup })
|
||||||
};
|
};
|
||||||
|
|
||||||
match tokio::time::timeout(std::time::Duration::from_secs(10), exchange_fut).await {
|
match tokio::time::timeout(std::time::Duration::from_secs(10), exchange_fut).await {
|
||||||
|
|
@ -8377,6 +8386,7 @@ pub async fn initial_exchange_accept(
|
||||||
our_http_addr: Option<String>,
|
our_http_addr: Option<String>,
|
||||||
our_device_role: Option<crate::types::DeviceRole>,
|
our_device_role: Option<crate::types::DeviceRole>,
|
||||||
our_cache_pressure: Option<u8>,
|
our_cache_pressure: Option<u8>,
|
||||||
|
duplicate_detected: bool,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
let their_payload: InitialExchangePayload = read_payload(&mut recv, MAX_PAYLOAD).await?;
|
let their_payload: InitialExchangePayload = read_payload(&mut recv, MAX_PAYLOAD).await?;
|
||||||
|
|
||||||
|
|
@ -8404,9 +8414,14 @@ pub async fn initial_exchange_accept(
|
||||||
http_addr: our_http_addr,
|
http_addr: our_http_addr,
|
||||||
device_role: our_device_role.map(|r| r.as_str().to_string()),
|
device_role: our_device_role.map(|r| r.as_str().to_string()),
|
||||||
cache_pressure: our_cache_pressure,
|
cache_pressure: our_cache_pressure,
|
||||||
|
duplicate_active: if duplicate_detected { Some(true) } else { None },
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if duplicate_detected {
|
||||||
|
tracing::warn!(peer = hex::encode(remote_node_id), "Duplicate identity detected — notifying connecting node");
|
||||||
|
}
|
||||||
|
|
||||||
write_typed_message(&mut send, MessageType::InitialExchange, &our_payload).await?;
|
write_typed_message(&mut send, MessageType::InitialExchange, &our_payload).await?;
|
||||||
send.finish()?;
|
send.finish()?;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -46,6 +46,8 @@ pub struct Network {
|
||||||
bind_addr: Option<SocketAddr>,
|
bind_addr: Option<SocketAddr>,
|
||||||
/// CDN replication role: determines budget limits and pull ordering
|
/// CDN replication role: determines budget limits and pull ordering
|
||||||
device_role: DeviceRole,
|
device_role: DeviceRole,
|
||||||
|
/// True if an anchor reported this identity is already connected from elsewhere
|
||||||
|
pub duplicate_detected: Arc<AtomicBool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_public_ip(ip: IpAddr) -> bool {
|
fn is_public_ip(ip: IpAddr) -> bool {
|
||||||
|
|
@ -100,9 +102,10 @@ impl Network {
|
||||||
let mut builder = iroh::Endpoint::builder()
|
let mut builder = iroh::Endpoint::builder()
|
||||||
.secret_key(secret_key)
|
.secret_key(secret_key)
|
||||||
.relay_mode(iroh::RelayMode::Disabled)
|
.relay_mode(iroh::RelayMode::Disabled)
|
||||||
.alpns(vec![ALPN_V2.to_vec()]);
|
.alpns(vec![ALPN_V2.to_vec()])
|
||||||
|
.clear_address_lookup(); // Remove default pkarr + DNS (no dns.iroh.link publishing)
|
||||||
|
|
||||||
// mDNS LAN discovery: enables automatic peer discovery on local network
|
// mDNS LAN discovery only: enables automatic peer discovery on local network
|
||||||
builder = builder.address_lookup(
|
builder = builder.address_lookup(
|
||||||
iroh::address_lookup::MdnsAddressLookupBuilder::default(),
|
iroh::address_lookup::MdnsAddressLookupBuilder::default(),
|
||||||
);
|
);
|
||||||
|
|
@ -271,6 +274,7 @@ impl Network {
|
||||||
has_public_v6,
|
has_public_v6,
|
||||||
bind_addr,
|
bind_addr,
|
||||||
device_role,
|
device_role,
|
||||||
|
duplicate_detected: Arc::new(AtomicBool::new(false)),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -672,7 +676,7 @@ impl Network {
|
||||||
let our_nat_type = conn_handle.nat_type().await;
|
let our_nat_type = conn_handle.nat_type().await;
|
||||||
let our_http_capable = conn_handle.is_http_capable();
|
let our_http_capable = conn_handle.is_http_capable();
|
||||||
let our_http_addr = conn_handle.http_addr();
|
let our_http_addr = conn_handle.http_addr();
|
||||||
match initial_exchange_accept(storage, &our_node_id, send, recv, remote_node_id, anchor_addr, Some(remote_sock), our_nat_type, our_http_capable, our_http_addr, conn_handle.device_role(), None).await {
|
match initial_exchange_accept(storage, &our_node_id, send, recv, remote_node_id, anchor_addr, Some(remote_sock), our_nat_type, our_http_capable, our_http_addr, conn_handle.device_role(), None, false).await {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
info!(peer = hex::encode(remote_node_id), "Initial exchange complete (upgraded to mesh)");
|
info!(peer = hex::encode(remote_node_id), "Initial exchange complete (upgraded to mesh)");
|
||||||
conn_handle.log_activity(ActivityLevel::Info, ActivityCategory::Connection, format!("Upgraded {} to mesh", &hex::encode(remote_node_id)[..8]), Some(remote_node_id));
|
conn_handle.log_activity(ActivityLevel::Info, ActivityCategory::Connection, format!("Upgraded {} to mesh", &hex::encode(remote_node_id)[..8]), Some(remote_node_id));
|
||||||
|
|
@ -723,7 +727,12 @@ impl Network {
|
||||||
|
|
||||||
// Initial exchange WITHOUT holding conn_mgr lock
|
// Initial exchange WITHOUT holding conn_mgr lock
|
||||||
match initial_exchange_connect(&self.storage, &self.our_node_id, &conn, peer_id, anchor_addr, our_nat_type, self.is_http_capable(), self.http_addr(), Some(self.device_role), None).await? {
|
match initial_exchange_connect(&self.storage, &self.our_node_id, &conn, peer_id, anchor_addr, our_nat_type, self.is_http_capable(), self.http_addr(), Some(self.device_role), None).await? {
|
||||||
ExchangeResult::Accepted => {
|
ExchangeResult::Accepted { duplicate_active } => {
|
||||||
|
if duplicate_active {
|
||||||
|
self.duplicate_detected.store(true, std::sync::atomic::Ordering::Relaxed);
|
||||||
|
warn!(peer = hex::encode(peer_id), "Duplicate identity detected by anchor — this identity is active elsewhere");
|
||||||
|
self.log_activity(ActivityLevel::Warn, ActivityCategory::Connection, "Duplicate identity active on network".into(), None);
|
||||||
|
}
|
||||||
// Spawn the per-connection stream loop
|
// Spawn the per-connection stream loop
|
||||||
let conn_data = self.conn_handle.get_connection_map().await;
|
let conn_data = self.conn_handle.get_connection_map().await;
|
||||||
if let Some((_, conn, _, last_activity)) = conn_data.into_iter().find(|(nid, _, _, _)| *nid == peer_id) {
|
if let Some((_, conn, _, last_activity)) = conn_data.into_iter().find(|(nid, _, _, _)| *nid == peer_id) {
|
||||||
|
|
@ -1365,7 +1374,7 @@ impl Network {
|
||||||
let our_nat_type = self.conn_handle.nat_type().await;
|
let our_nat_type = self.conn_handle.nat_type().await;
|
||||||
|
|
||||||
match initial_exchange_connect(&self.storage, &self.our_node_id, &conn, peer_id, anchor_addr, our_nat_type, self.is_http_capable(), self.http_addr(), Some(self.device_role), None).await {
|
match initial_exchange_connect(&self.storage, &self.our_node_id, &conn, peer_id, anchor_addr, our_nat_type, self.is_http_capable(), self.http_addr(), Some(self.device_role), None).await {
|
||||||
Ok(ExchangeResult::Accepted) => {
|
Ok(ExchangeResult::Accepted { .. }) => {
|
||||||
self.conn_handle.register_connection(peer_id, conn.clone(), vec![], PeerSlotKind::Local).await;
|
self.conn_handle.register_connection(peer_id, conn.clone(), vec![], PeerSlotKind::Local).await;
|
||||||
{
|
{
|
||||||
let s = self.storage.get().await;
|
let s = self.storage.get().await;
|
||||||
|
|
@ -1470,7 +1479,7 @@ impl Network {
|
||||||
let conn = self.conn_handle.get_connection(peer_id).await;
|
let conn = self.conn_handle.get_connection(peer_id).await;
|
||||||
if let Some(conn) = conn {
|
if let Some(conn) = conn {
|
||||||
match initial_exchange_connect(&self.storage, &self.our_node_id, &conn, *peer_id, anchor_addr.clone(), our_nat_type, self.is_http_capable(), self.http_addr(), Some(self.device_role), None).await {
|
match initial_exchange_connect(&self.storage, &self.our_node_id, &conn, *peer_id, anchor_addr.clone(), our_nat_type, self.is_http_capable(), self.http_addr(), Some(self.device_role), None).await {
|
||||||
Ok(ExchangeResult::Accepted) => {}
|
Ok(ExchangeResult::Accepted { .. }) => {}
|
||||||
Ok(ExchangeResult::Refused { redirect }) => {
|
Ok(ExchangeResult::Refused { redirect }) => {
|
||||||
debug!(peer = hex::encode(peer_id), "Auto-connect refused, disconnecting");
|
debug!(peer = hex::encode(peer_id), "Auto-connect refused, disconnecting");
|
||||||
self.conn_handle.disconnect_peer(peer_id).await;
|
self.conn_handle.disconnect_peer(peer_id).await;
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
use std::net::SocketAddr;
|
use std::net::SocketAddr;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::atomic::{AtomicU64, Ordering as AtomicOrdering};
|
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering as AtomicOrdering};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use tracing::{debug, info, warn};
|
use tracing::{debug, info, warn};
|
||||||
|
|
@ -30,6 +30,8 @@ pub struct Node {
|
||||||
pub blob_store: Arc<BlobStore>,
|
pub blob_store: Arc<BlobStore>,
|
||||||
secret_seed: [u8; 32],
|
secret_seed: [u8; 32],
|
||||||
bootstrap_anchors: tokio::sync::Mutex<Vec<(NodeId, iroh::EndpointAddr)>>,
|
bootstrap_anchors: tokio::sync::Mutex<Vec<(NodeId, iroh::EndpointAddr)>>,
|
||||||
|
/// True if an anchor reported another instance of this identity is already active
|
||||||
|
pub duplicate_detected: Arc<AtomicBool>,
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
profile: DeviceProfile,
|
profile: DeviceProfile,
|
||||||
pub activity_log: Arc<std::sync::Mutex<ActivityLog>>,
|
pub activity_log: Arc<std::sync::Mutex<ActivityLog>>,
|
||||||
|
|
@ -136,6 +138,7 @@ impl Node {
|
||||||
blob_store,
|
blob_store,
|
||||||
secret_seed,
|
secret_seed,
|
||||||
bootstrap_anchors: tokio::sync::Mutex::new(Vec::new()),
|
bootstrap_anchors: tokio::sync::Mutex::new(Vec::new()),
|
||||||
|
duplicate_detected: Arc::new(AtomicBool::new(false)),
|
||||||
profile,
|
profile,
|
||||||
activity_log: activity_log_ref,
|
activity_log: activity_log_ref,
|
||||||
last_rebalance_ms,
|
last_rebalance_ms,
|
||||||
|
|
@ -927,6 +930,34 @@ impl Node {
|
||||||
Ok(self.decrypt_posts(raw, &group_seeds))
|
Ok(self.decrypt_posts(raw, &group_seeds))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn get_feed_page(
|
||||||
|
&self,
|
||||||
|
before_ms: Option<u64>,
|
||||||
|
limit: usize,
|
||||||
|
) -> anyhow::Result<Vec<(PostId, Post, PostVisibility, Option<String>)>> {
|
||||||
|
let (raw, group_seeds) = {
|
||||||
|
let storage = self.storage.get().await;
|
||||||
|
let posts = storage.get_feed_page(before_ms, limit)?;
|
||||||
|
let seeds = storage.get_all_group_seeds_map().unwrap_or_default();
|
||||||
|
(posts, seeds)
|
||||||
|
};
|
||||||
|
Ok(self.decrypt_posts(raw, &group_seeds))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_all_posts_page(
|
||||||
|
&self,
|
||||||
|
before_ms: Option<u64>,
|
||||||
|
limit: usize,
|
||||||
|
) -> anyhow::Result<Vec<(PostId, Post, PostVisibility, Option<String>)>> {
|
||||||
|
let (raw, group_seeds) = {
|
||||||
|
let storage = self.storage.get().await;
|
||||||
|
let posts = storage.list_posts_page(before_ms, limit)?;
|
||||||
|
let seeds = storage.get_all_group_seeds_map().unwrap_or_default();
|
||||||
|
(posts, seeds)
|
||||||
|
};
|
||||||
|
Ok(self.decrypt_posts(raw, &group_seeds))
|
||||||
|
}
|
||||||
|
|
||||||
fn decrypt_posts(
|
fn decrypt_posts(
|
||||||
&self,
|
&self,
|
||||||
posts: Vec<(PostId, Post, PostVisibility)>,
|
posts: Vec<(PostId, Post, PostVisibility)>,
|
||||||
|
|
|
||||||
|
|
@ -184,6 +184,9 @@ pub struct InitialExchangePayload {
|
||||||
/// CDN cache pressure: 0-255 availability score (255 = lots of capacity)
|
/// CDN cache pressure: 0-255 availability score (255 = lots of capacity)
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub cache_pressure: Option<u8>,
|
pub cache_pressure: Option<u8>,
|
||||||
|
/// Set by anchor when it detects this NodeId is already connected from a different address
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub duplicate_active: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Incremental N1/N2 changes
|
/// Incremental N1/N2 changes
|
||||||
|
|
|
||||||
|
|
@ -892,6 +892,158 @@ impl Storage {
|
||||||
Ok(posts)
|
Ok(posts)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Feed: paginated — posts from followed users, cursor-based by timestamp
|
||||||
|
pub fn get_feed_page(&self, before_ms: Option<u64>, limit: usize) -> anyhow::Result<Vec<(PostId, Post, PostVisibility)>> {
|
||||||
|
let sql = if before_ms.is_some() {
|
||||||
|
"SELECT p.id, p.author, p.content, p.attachments, p.timestamp_ms, p.visibility
|
||||||
|
FROM posts p INNER JOIN follows f ON p.author = f.node_id
|
||||||
|
WHERE p.timestamp_ms < ?1
|
||||||
|
ORDER BY p.timestamp_ms DESC LIMIT ?2"
|
||||||
|
} else {
|
||||||
|
"SELECT p.id, p.author, p.content, p.attachments, p.timestamp_ms, p.visibility
|
||||||
|
FROM posts p INNER JOIN follows f ON p.author = f.node_id
|
||||||
|
ORDER BY p.timestamp_ms DESC LIMIT ?2"
|
||||||
|
};
|
||||||
|
let mut stmt = self.conn.prepare(sql)?;
|
||||||
|
let rows = if let Some(bms) = before_ms {
|
||||||
|
stmt.query_map(rusqlite::params![bms as i64, limit as i64], Self::parse_post_row)?
|
||||||
|
} else {
|
||||||
|
stmt.query_map(rusqlite::params![i64::MAX, limit as i64], Self::parse_post_row)?
|
||||||
|
};
|
||||||
|
Self::collect_posts(rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// All posts: paginated — cursor-based by timestamp
|
||||||
|
pub fn list_posts_page(&self, before_ms: Option<u64>, limit: usize) -> anyhow::Result<Vec<(PostId, Post, PostVisibility)>> {
|
||||||
|
let sql = if before_ms.is_some() {
|
||||||
|
"SELECT id, author, content, attachments, timestamp_ms, visibility
|
||||||
|
FROM posts WHERE timestamp_ms < ?1
|
||||||
|
ORDER BY timestamp_ms DESC LIMIT ?2"
|
||||||
|
} else {
|
||||||
|
"SELECT id, author, content, attachments, timestamp_ms, visibility
|
||||||
|
FROM posts ORDER BY timestamp_ms DESC LIMIT ?2"
|
||||||
|
};
|
||||||
|
let mut stmt = self.conn.prepare(sql)?;
|
||||||
|
let rows = if let Some(bms) = before_ms {
|
||||||
|
stmt.query_map(rusqlite::params![bms as i64, limit as i64], Self::parse_post_row)?
|
||||||
|
} else {
|
||||||
|
stmt.query_map(rusqlite::params![i64::MAX, limit as i64], Self::parse_post_row)?
|
||||||
|
};
|
||||||
|
Self::collect_posts(rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Batch: reaction counts for multiple posts at once
|
||||||
|
pub fn get_reaction_counts_batch(&self, post_ids: &[PostId], our_node_id: &NodeId) -> anyhow::Result<std::collections::HashMap<PostId, Vec<(String, u64, bool)>>> {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
let mut result: HashMap<PostId, Vec<(String, u64, bool)>> = HashMap::new();
|
||||||
|
if post_ids.is_empty() { return Ok(result); }
|
||||||
|
let placeholders: String = (0..post_ids.len()).map(|i| format!("?{}", i + 1)).collect::<Vec<_>>().join(",");
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT post_id, emoji, COUNT(*) as cnt, SUM(CASE WHEN reactor = ?{} THEN 1 ELSE 0 END) as my_count
|
||||||
|
FROM reactions WHERE post_id IN ({}) AND deleted_at IS NULL
|
||||||
|
GROUP BY post_id, emoji ORDER BY cnt DESC",
|
||||||
|
post_ids.len() + 1, placeholders
|
||||||
|
);
|
||||||
|
let mut stmt = self.conn.prepare(&sql)?;
|
||||||
|
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = post_ids.iter().map(|id| Box::new(id.to_vec()) as Box<dyn rusqlite::types::ToSql>).collect();
|
||||||
|
params.push(Box::new(our_node_id.to_vec()));
|
||||||
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
let rows = stmt.query_map(param_refs.as_slice(), |row| {
|
||||||
|
let pid: Vec<u8> = row.get(0)?;
|
||||||
|
let emoji: String = row.get(1)?;
|
||||||
|
let count: i64 = row.get(2)?;
|
||||||
|
let my_count: i64 = row.get(3)?;
|
||||||
|
Ok((pid, emoji, count as u64, my_count > 0))
|
||||||
|
})?;
|
||||||
|
for row in rows {
|
||||||
|
let (pid, emoji, count, reacted_by_me) = row?;
|
||||||
|
if let Ok(id) = blob_to_postid(pid) {
|
||||||
|
result.entry(id).or_default().push((emoji, count, reacted_by_me));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Batch: comment counts for multiple posts at once
|
||||||
|
pub fn get_comment_counts_batch(&self, post_ids: &[PostId]) -> anyhow::Result<std::collections::HashMap<PostId, u64>> {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
let mut result: HashMap<PostId, u64> = HashMap::new();
|
||||||
|
if post_ids.is_empty() { return Ok(result); }
|
||||||
|
let placeholders: String = (0..post_ids.len()).map(|i| format!("?{}", i + 1)).collect::<Vec<_>>().join(",");
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT post_id, COUNT(*) FROM comments WHERE post_id IN ({}) AND deleted_at IS NULL GROUP BY post_id",
|
||||||
|
placeholders
|
||||||
|
);
|
||||||
|
let mut stmt = self.conn.prepare(&sql)?;
|
||||||
|
let params: Vec<Box<dyn rusqlite::types::ToSql>> = post_ids.iter().map(|id| Box::new(id.to_vec()) as Box<dyn rusqlite::types::ToSql>).collect();
|
||||||
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
let rows = stmt.query_map(param_refs.as_slice(), |row| {
|
||||||
|
let pid: Vec<u8> = row.get(0)?;
|
||||||
|
let count: i64 = row.get(1)?;
|
||||||
|
Ok((pid, count as u64))
|
||||||
|
})?;
|
||||||
|
for row in rows {
|
||||||
|
let (pid, count) = row?;
|
||||||
|
if let Ok(id) = blob_to_postid(pid) {
|
||||||
|
result.insert(id, count);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Batch: visibility intents for multiple posts at once
|
||||||
|
pub fn get_post_intents_batch(&self, post_ids: &[PostId]) -> anyhow::Result<std::collections::HashMap<PostId, String>> {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
let mut result: HashMap<PostId, String> = HashMap::new();
|
||||||
|
if post_ids.is_empty() { return Ok(result); }
|
||||||
|
let placeholders: String = (0..post_ids.len()).map(|i| format!("?{}", i + 1)).collect::<Vec<_>>().join(",");
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT id, visibility_intent FROM posts WHERE id IN ({})",
|
||||||
|
placeholders
|
||||||
|
);
|
||||||
|
let mut stmt = self.conn.prepare(&sql)?;
|
||||||
|
let params: Vec<Box<dyn rusqlite::types::ToSql>> = post_ids.iter().map(|id| Box::new(id.to_vec()) as Box<dyn rusqlite::types::ToSql>).collect();
|
||||||
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||||
|
let rows = stmt.query_map(param_refs.as_slice(), |row| {
|
||||||
|
let pid: Vec<u8> = row.get(0)?;
|
||||||
|
let intent: Option<String> = row.get(1)?;
|
||||||
|
Ok((pid, intent.unwrap_or_default()))
|
||||||
|
})?;
|
||||||
|
for row in rows {
|
||||||
|
let (pid, intent) = row?;
|
||||||
|
if let Ok(id) = blob_to_postid(pid) {
|
||||||
|
result.insert(id, intent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper: parse a post row from a query
|
||||||
|
fn parse_post_row(row: &rusqlite::Row<'_>) -> rusqlite::Result<(Vec<u8>, Vec<u8>, String, String, i64, String)> {
|
||||||
|
Ok((row.get(0)?, row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?, row.get(5)?))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper: collect parsed post rows into typed results
|
||||||
|
fn collect_posts(rows: rusqlite::MappedRows<'_, impl FnMut(&rusqlite::Row<'_>) -> rusqlite::Result<(Vec<u8>, Vec<u8>, String, String, i64, String)>>) -> anyhow::Result<Vec<(PostId, Post, PostVisibility)>> {
|
||||||
|
let mut posts = Vec::new();
|
||||||
|
for row in rows {
|
||||||
|
let (id_bytes, author_bytes, content, attachments_json, timestamp_ms, vis_json) = row?;
|
||||||
|
let attachments: Vec<Attachment> = serde_json::from_str(&attachments_json).unwrap_or_default();
|
||||||
|
let visibility: PostVisibility = serde_json::from_str(&vis_json).unwrap_or_default();
|
||||||
|
posts.push((
|
||||||
|
blob_to_postid(id_bytes)?,
|
||||||
|
Post {
|
||||||
|
author: blob_to_nodeid(author_bytes)?,
|
||||||
|
content,
|
||||||
|
attachments,
|
||||||
|
timestamp_ms: timestamp_ms as u64,
|
||||||
|
},
|
||||||
|
visibility,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(posts)
|
||||||
|
}
|
||||||
|
|
||||||
/// All posts with visibility (for sync protocol)
|
/// All posts with visibility (for sync protocol)
|
||||||
pub fn list_posts_with_visibility(&self) -> anyhow::Result<Vec<(PostId, Post, PostVisibility)>> {
|
pub fn list_posts_with_visibility(&self) -> anyhow::Result<Vec<(PostId, Post, PostVisibility)>> {
|
||||||
self.list_posts_reverse_chron()
|
self.list_posts_reverse_chron()
|
||||||
|
|
|
||||||
|
|
@ -26,3 +26,6 @@ open = "5"
|
||||||
tauri-plugin-notification = "2"
|
tauri-plugin-notification = "2"
|
||||||
tauri-plugin-dialog = "2"
|
tauri-plugin-dialog = "2"
|
||||||
notify-rust = "4"
|
notify-rust = "4"
|
||||||
|
|
||||||
|
[target.'cfg(target_os = "android")'.dependencies]
|
||||||
|
tauri-plugin-android-fs = "8"
|
||||||
|
|
|
||||||
|
|
@ -176,6 +176,18 @@
|
||||||
"Identifier": {
|
"Identifier": {
|
||||||
"description": "Permission identifier",
|
"description": "Permission identifier",
|
||||||
"oneOf": [
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"description": "Default permissions for the plugin\n#### This default permission set includes:\n\n- `allow-noop`",
|
||||||
|
"type": "string",
|
||||||
|
"const": "android-fs:default",
|
||||||
|
"markdownDescription": "Default permissions for the plugin\n#### This default permission set includes:\n\n- `allow-noop`"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the noop command.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "android-fs:allow-noop",
|
||||||
|
"markdownDescription": "Enables the noop command."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"description": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`",
|
"description": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`",
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
|
@ -2144,6 +2156,72 @@
|
||||||
"const": "core:window:deny-unminimize",
|
"const": "core:window:deny-unminimize",
|
||||||
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
|
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"description": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-message`\n- `allow-save`\n- `allow-open`",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:default",
|
||||||
|
"markdownDescription": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-message`\n- `allow-save`\n- `allow-open`"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the ask command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `allow-message` and will be removed in v3)",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:allow-ask",
|
||||||
|
"markdownDescription": "Enables the ask command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `allow-message` and will be removed in v3)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the confirm command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `allow-message` and will be removed in v3)",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:allow-confirm",
|
||||||
|
"markdownDescription": "Enables the confirm command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `allow-message` and will be removed in v3)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the message command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:allow-message",
|
||||||
|
"markdownDescription": "Enables the message command without any pre-configured scope."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the open command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:allow-open",
|
||||||
|
"markdownDescription": "Enables the open command without any pre-configured scope."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the save command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:allow-save",
|
||||||
|
"markdownDescription": "Enables the save command without any pre-configured scope."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Denies the ask command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `deny-message` and will be removed in v3)",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:deny-ask",
|
||||||
|
"markdownDescription": "Denies the ask command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `deny-message` and will be removed in v3)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Denies the confirm command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `deny-message` and will be removed in v3)",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:deny-confirm",
|
||||||
|
"markdownDescription": "Denies the confirm command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `deny-message` and will be removed in v3)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Denies the message command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:deny-message",
|
||||||
|
"markdownDescription": "Denies the message command without any pre-configured scope."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Denies the open command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:deny-open",
|
||||||
|
"markdownDescription": "Denies the open command without any pre-configured scope."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Denies the save command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:deny-save",
|
||||||
|
"markdownDescription": "Denies the save command without any pre-configured scope."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"description": "This permission set configures which\nnotification features are by default exposed.\n\n#### Granted Permissions\n\nIt allows all notification related features.\n\n\n#### This default permission set includes:\n\n- `allow-is-permission-granted`\n- `allow-request-permission`\n- `allow-notify`\n- `allow-register-action-types`\n- `allow-register-listener`\n- `allow-cancel`\n- `allow-get-pending`\n- `allow-remove-active`\n- `allow-get-active`\n- `allow-check-permissions`\n- `allow-show`\n- `allow-batch`\n- `allow-list-channels`\n- `allow-delete-channel`\n- `allow-create-channel`\n- `allow-permission-state`",
|
"description": "This permission set configures which\nnotification features are by default exposed.\n\n#### Granted Permissions\n\nIt allows all notification related features.\n\n\n#### This default permission set includes:\n\n- `allow-is-permission-granted`\n- `allow-request-permission`\n- `allow-notify`\n- `allow-register-action-types`\n- `allow-register-listener`\n- `allow-cancel`\n- `allow-get-pending`\n- `allow-remove-active`\n- `allow-get-active`\n- `allow-check-permissions`\n- `allow-show`\n- `allow-batch`\n- `allow-list-channels`\n- `allow-delete-channel`\n- `allow-create-channel`\n- `allow-permission-state`",
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
|
|
||||||
|
|
@ -176,6 +176,18 @@
|
||||||
"Identifier": {
|
"Identifier": {
|
||||||
"description": "Permission identifier",
|
"description": "Permission identifier",
|
||||||
"oneOf": [
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"description": "Default permissions for the plugin\n#### This default permission set includes:\n\n- `allow-noop`",
|
||||||
|
"type": "string",
|
||||||
|
"const": "android-fs:default",
|
||||||
|
"markdownDescription": "Default permissions for the plugin\n#### This default permission set includes:\n\n- `allow-noop`"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the noop command.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "android-fs:allow-noop",
|
||||||
|
"markdownDescription": "Enables the noop command."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"description": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`",
|
"description": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`",
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
|
@ -2144,6 +2156,72 @@
|
||||||
"const": "core:window:deny-unminimize",
|
"const": "core:window:deny-unminimize",
|
||||||
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
|
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"description": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-message`\n- `allow-save`\n- `allow-open`",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:default",
|
||||||
|
"markdownDescription": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-message`\n- `allow-save`\n- `allow-open`"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the ask command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `allow-message` and will be removed in v3)",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:allow-ask",
|
||||||
|
"markdownDescription": "Enables the ask command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `allow-message` and will be removed in v3)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the confirm command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `allow-message` and will be removed in v3)",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:allow-confirm",
|
||||||
|
"markdownDescription": "Enables the confirm command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `allow-message` and will be removed in v3)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the message command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:allow-message",
|
||||||
|
"markdownDescription": "Enables the message command without any pre-configured scope."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the open command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:allow-open",
|
||||||
|
"markdownDescription": "Enables the open command without any pre-configured scope."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Enables the save command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:allow-save",
|
||||||
|
"markdownDescription": "Enables the save command without any pre-configured scope."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Denies the ask command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `deny-message` and will be removed in v3)",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:deny-ask",
|
||||||
|
"markdownDescription": "Denies the ask command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `deny-message` and will be removed in v3)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Denies the confirm command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `deny-message` and will be removed in v3)",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:deny-confirm",
|
||||||
|
"markdownDescription": "Denies the confirm command without any pre-configured scope. (**DEPRECATED**: This is now an alias to `deny-message` and will be removed in v3)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Denies the message command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:deny-message",
|
||||||
|
"markdownDescription": "Denies the message command without any pre-configured scope."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Denies the open command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:deny-open",
|
||||||
|
"markdownDescription": "Denies the open command without any pre-configured scope."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Denies the save command without any pre-configured scope.",
|
||||||
|
"type": "string",
|
||||||
|
"const": "dialog:deny-save",
|
||||||
|
"markdownDescription": "Denies the save command without any pre-configured scope."
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"description": "This permission set configures which\nnotification features are by default exposed.\n\n#### Granted Permissions\n\nIt allows all notification related features.\n\n\n#### This default permission set includes:\n\n- `allow-is-permission-granted`\n- `allow-request-permission`\n- `allow-notify`\n- `allow-register-action-types`\n- `allow-register-listener`\n- `allow-cancel`\n- `allow-get-pending`\n- `allow-remove-active`\n- `allow-get-active`\n- `allow-check-permissions`\n- `allow-show`\n- `allow-batch`\n- `allow-list-channels`\n- `allow-delete-channel`\n- `allow-create-channel`\n- `allow-permission-state`",
|
"description": "This permission set configures which\nnotification features are by default exposed.\n\n#### Granted Permissions\n\nIt allows all notification related features.\n\n\n#### This default permission set includes:\n\n- `allow-is-permission-granted`\n- `allow-request-permission`\n- `allow-notify`\n- `allow-register-action-types`\n- `allow-register-listener`\n- `allow-cancel`\n- `allow-get-pending`\n- `allow-remove-active`\n- `allow-get-active`\n- `allow-check-permissions`\n- `allow-show`\n- `allow-batch`\n- `allow-list-channels`\n- `allow-delete-channel`\n- `allow-create-channel`\n- `allow-permission-state`",
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
|
|
||||||
|
|
@ -159,6 +159,7 @@ struct NodeInfoDto {
|
||||||
connect_string: String,
|
connect_string: String,
|
||||||
display_name: Option<String>,
|
display_name: Option<String>,
|
||||||
has_profile: bool,
|
has_profile: bool,
|
||||||
|
duplicate_detected: bool,
|
||||||
anchors: Vec<String>,
|
anchors: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -359,6 +360,7 @@ async fn get_node_info(state: State<'_, AppNode>) -> Result<NodeInfoDto, String>
|
||||||
connect_string,
|
connect_string,
|
||||||
display_name: profile.as_ref().map(|p| p.display_name.clone()),
|
display_name: profile.as_ref().map(|p| p.display_name.clone()),
|
||||||
has_profile: profile.is_some(),
|
has_profile: profile.is_some(),
|
||||||
|
duplicate_detected: node.network.duplicate_detected.load(std::sync::atomic::Ordering::Relaxed),
|
||||||
anchors,
|
anchors,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
@ -582,10 +584,9 @@ async fn save_and_open_blob(
|
||||||
let data = resolve_blob_data(&node, &cid, post_id_hex.as_deref()).await?;
|
let data = resolve_blob_data(&node, &cid, post_id_hex.as_deref()).await?;
|
||||||
let safe_name = sanitize_download_filename(&filename);
|
let safe_name = sanitize_download_filename(&filename);
|
||||||
|
|
||||||
// Save to Downloads
|
// Save to Downloads — use app cache dir on Android (no access to shared storage without SAF)
|
||||||
let downloads = dirs::download_dir()
|
let downloads = get_writable_download_dir(&node);
|
||||||
.or_else(|| dirs::home_dir().map(|h| h.join("Downloads")))
|
std::fs::create_dir_all(&downloads).map_err(|e| e.to_string())?;
|
||||||
.unwrap_or_else(|| std::path::PathBuf::from("/tmp"));
|
|
||||||
let dest = downloads.join(&safe_name);
|
let dest = downloads.join(&safe_name);
|
||||||
tokio::fs::write(&dest, &data).await.map_err(|e| e.to_string())?;
|
tokio::fs::write(&dest, &data).await.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
|
@ -595,6 +596,22 @@ async fn save_and_open_blob(
|
||||||
Ok(dest.to_string_lossy().to_string())
|
Ok(dest.to_string_lossy().to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get a writable directory for downloads/exports.
|
||||||
|
/// On desktop: ~/Downloads. On Android: app data dir + "exports".
|
||||||
|
fn get_writable_download_dir(node: &Node) -> std::path::PathBuf {
|
||||||
|
#[cfg(target_os = "android")]
|
||||||
|
{
|
||||||
|
node.data_dir.join("exports")
|
||||||
|
}
|
||||||
|
#[cfg(not(target_os = "android"))]
|
||||||
|
{
|
||||||
|
let _ = node;
|
||||||
|
dirs::download_dir()
|
||||||
|
.or_else(|| dirs::home_dir().map(|h| h.join("Downloads")))
|
||||||
|
.unwrap_or_else(|| std::path::PathBuf::from("/tmp"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Save a blob to Downloads without opening it.
|
/// Save a blob to Downloads without opening it.
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
async fn save_blob(
|
async fn save_blob(
|
||||||
|
|
@ -612,9 +629,8 @@ async fn save_blob(
|
||||||
let data = resolve_blob_data(&node, &cid, post_id_hex.as_deref()).await?;
|
let data = resolve_blob_data(&node, &cid, post_id_hex.as_deref()).await?;
|
||||||
let safe_name = sanitize_download_filename(&filename);
|
let safe_name = sanitize_download_filename(&filename);
|
||||||
|
|
||||||
let downloads = dirs::download_dir()
|
let downloads = get_writable_download_dir(&node);
|
||||||
.or_else(|| dirs::home_dir().map(|h| h.join("Downloads")))
|
std::fs::create_dir_all(&downloads).map_err(|e| e.to_string())?;
|
||||||
.unwrap_or_else(|| std::path::PathBuf::from("/tmp"));
|
|
||||||
let dest = downloads.join(&safe_name);
|
let dest = downloads.join(&safe_name);
|
||||||
tokio::fs::write(&dest, &data).await.map_err(|e| e.to_string())?;
|
tokio::fs::write(&dest, &data).await.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
|
@ -649,6 +665,141 @@ async fn get_feed(state: State<'_, AppNode>) -> Result<Vec<PostDto>, String> {
|
||||||
Ok(dtos)
|
Ok(dtos)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct FeedPageDto {
|
||||||
|
posts: Vec<PostDto>,
|
||||||
|
has_more: bool,
|
||||||
|
oldest_ms: Option<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
async fn get_feed_page(
|
||||||
|
state: State<'_, AppNode>,
|
||||||
|
before_ms: Option<u64>,
|
||||||
|
limit: Option<usize>,
|
||||||
|
) -> Result<FeedPageDto, String> {
|
||||||
|
let node = get_node(&state).await;
|
||||||
|
let page_size = limit.unwrap_or(20);
|
||||||
|
// Fetch one extra to know if there are more
|
||||||
|
let posts = node.get_feed_page(before_ms, page_size + 1).await.map_err(|e| e.to_string())?;
|
||||||
|
let has_more = posts.len() > page_size;
|
||||||
|
let page: Vec<_> = posts.into_iter().take(page_size).collect();
|
||||||
|
let oldest_ms = page.last().map(|(_, p, _, _)| p.timestamp_ms);
|
||||||
|
let dtos = post_to_dto_batch(&page, &node).await;
|
||||||
|
Ok(FeedPageDto { posts: dtos, has_more, oldest_ms })
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
async fn get_all_posts_page(
|
||||||
|
state: State<'_, AppNode>,
|
||||||
|
before_ms: Option<u64>,
|
||||||
|
limit: Option<usize>,
|
||||||
|
) -> Result<FeedPageDto, String> {
|
||||||
|
let node = get_node(&state).await;
|
||||||
|
let page_size = limit.unwrap_or(20);
|
||||||
|
let posts = node.get_all_posts_page(before_ms, page_size + 1).await.map_err(|e| e.to_string())?;
|
||||||
|
let has_more = posts.len() > page_size;
|
||||||
|
let page: Vec<_> = posts.into_iter().take(page_size).collect();
|
||||||
|
let oldest_ms = page.last().map(|(_, p, _, _)| p.timestamp_ms);
|
||||||
|
let dtos = post_to_dto_batch(&page, &node).await;
|
||||||
|
Ok(FeedPageDto { posts: dtos, has_more, oldest_ms })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Batched DTO assembly: 3 bulk queries instead of 4 per post
|
||||||
|
async fn post_to_dto_batch(
|
||||||
|
posts: &[(itsgoin_core::types::PostId, itsgoin_core::types::Post, itsgoin_core::types::PostVisibility, Option<String>)],
|
||||||
|
node: &Node,
|
||||||
|
) -> Vec<PostDto> {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
if posts.is_empty() { return vec![]; }
|
||||||
|
|
||||||
|
let post_ids: Vec<itsgoin_core::types::PostId> = posts.iter().map(|(id, _, _, _)| *id).collect();
|
||||||
|
|
||||||
|
// Batch queries — 3 queries total instead of 4 × N
|
||||||
|
let (reaction_map, comment_map, intent_map) = {
|
||||||
|
let storage = node.storage.get().await;
|
||||||
|
let reactions = storage.get_reaction_counts_batch(&post_ids, &node.node_id).unwrap_or_default();
|
||||||
|
let comments = storage.get_comment_counts_batch(&post_ids).unwrap_or_default();
|
||||||
|
let intents = storage.get_post_intents_batch(&post_ids).unwrap_or_default();
|
||||||
|
(reactions, comments, intents)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Batch resolve display names
|
||||||
|
let mut name_cache: HashMap<itsgoin_core::types::NodeId, Option<String>> = HashMap::new();
|
||||||
|
|
||||||
|
let mut dtos = Vec::with_capacity(posts.len());
|
||||||
|
for (id, post, vis, decrypted) in posts {
|
||||||
|
let is_me = post.author == node.node_id;
|
||||||
|
|
||||||
|
let author_name = if let Some(cached) = name_cache.get(&post.author) {
|
||||||
|
cached.clone()
|
||||||
|
} else {
|
||||||
|
let name = match node.resolve_display_name(&post.author).await {
|
||||||
|
Ok((name, _, _)) if !name.is_empty() => Some(name),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
name_cache.insert(post.author, name.clone());
|
||||||
|
name
|
||||||
|
};
|
||||||
|
|
||||||
|
let intent_kind = if let Some(intent_json) = intent_map.get(id) {
|
||||||
|
match serde_json::from_str::<VisibilityIntent>(intent_json) {
|
||||||
|
Ok(VisibilityIntent::Public) => "public",
|
||||||
|
Ok(VisibilityIntent::Friends) => "friends",
|
||||||
|
Ok(VisibilityIntent::Circle(_)) => "circle",
|
||||||
|
Ok(VisibilityIntent::Direct(_)) => "direct",
|
||||||
|
_ => "unknown",
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
"unknown"
|
||||||
|
}.to_string();
|
||||||
|
|
||||||
|
let (visibility, decrypted_content) = match vis {
|
||||||
|
PostVisibility::Public => ("public".to_string(), None),
|
||||||
|
PostVisibility::Encrypted { .. } | PostVisibility::GroupEncrypted { .. } => match decrypted {
|
||||||
|
Some(text) if is_me => ("encrypted".to_string(), Some(text.clone())),
|
||||||
|
Some(text) => ("encrypted-for-me".to_string(), Some(text.clone())),
|
||||||
|
None => ("encrypted".to_string(), None),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
let recipients = match vis {
|
||||||
|
PostVisibility::Encrypted { recipients } => {
|
||||||
|
recipients.iter().map(|wk| hex::encode(wk.recipient)).collect()
|
||||||
|
}
|
||||||
|
_ => vec![],
|
||||||
|
};
|
||||||
|
let attachments = post.attachments.iter().map(|a| AttachmentDto {
|
||||||
|
cid: hex::encode(a.cid),
|
||||||
|
mime_type: a.mime_type.clone(),
|
||||||
|
size_bytes: a.size_bytes,
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
let reaction_counts = reaction_map.get(id).cloned().unwrap_or_default()
|
||||||
|
.into_iter()
|
||||||
|
.map(|(emoji, count, reacted_by_me)| ReactionCountDto { emoji, count, reacted_by_me })
|
||||||
|
.collect();
|
||||||
|
let comment_count = comment_map.get(id).copied().unwrap_or(0);
|
||||||
|
|
||||||
|
dtos.push(PostDto {
|
||||||
|
id: hex::encode(id),
|
||||||
|
author: hex::encode(post.author),
|
||||||
|
author_name,
|
||||||
|
content: post.content.clone(),
|
||||||
|
timestamp_ms: post.timestamp_ms,
|
||||||
|
is_me,
|
||||||
|
visibility,
|
||||||
|
intent_kind,
|
||||||
|
decrypted_content,
|
||||||
|
attachments,
|
||||||
|
recipients,
|
||||||
|
reaction_counts,
|
||||||
|
comment_count,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
dtos
|
||||||
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
async fn get_all_posts(state: State<'_, AppNode>) -> Result<Vec<PostDto>, String> {
|
async fn get_all_posts(state: State<'_, AppNode>) -> Result<Vec<PostDto>, String> {
|
||||||
let node = get_node(&state).await;
|
let node = get_node(&state).await;
|
||||||
|
|
@ -2311,13 +2462,23 @@ async fn export_data(
|
||||||
"everything" => itsgoin_core::export::ExportScope::Everything,
|
"everything" => itsgoin_core::export::ExportScope::Everything,
|
||||||
_ => return Err("Invalid scope".to_string()),
|
_ => return Err("Invalid scope".to_string()),
|
||||||
};
|
};
|
||||||
// Resolve relative paths against user's home directory
|
// Resolve output directory — on Android use app data dir, on desktop resolve relative to home
|
||||||
let resolved_dir = if std::path::Path::new(&output_dir).is_relative() {
|
let resolved_dir = {
|
||||||
dirs::home_dir()
|
#[cfg(target_os = "android")]
|
||||||
.unwrap_or_else(|| std::path::PathBuf::from("."))
|
{
|
||||||
.join(&output_dir)
|
let _ = &output_dir;
|
||||||
} else {
|
node.data_dir.join("exports")
|
||||||
std::path::PathBuf::from(&output_dir)
|
}
|
||||||
|
#[cfg(not(target_os = "android"))]
|
||||||
|
{
|
||||||
|
if std::path::Path::new(&output_dir).is_relative() {
|
||||||
|
dirs::home_dir()
|
||||||
|
.unwrap_or_else(|| std::path::PathBuf::from("."))
|
||||||
|
.join(&output_dir)
|
||||||
|
} else {
|
||||||
|
std::path::PathBuf::from(&output_dir)
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
let result = itsgoin_core::export::export_data(
|
let result = itsgoin_core::export::export_data(
|
||||||
&node.data_dir,
|
&node.data_dir,
|
||||||
|
|
@ -2336,6 +2497,37 @@ async fn export_data(
|
||||||
paths.join(", ")))
|
paths.join(", ")))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// On Android: save a file from the app's internal storage to a user-chosen location via SAF.
|
||||||
|
/// On desktop: no-op (files are already in ~/Downloads).
|
||||||
|
#[tauri::command]
|
||||||
|
async fn share_file(app: tauri::AppHandle, file_path: String, mime_type: String) -> Result<String, String> {
|
||||||
|
#[cfg(target_os = "android")]
|
||||||
|
{
|
||||||
|
use tauri_plugin_android_fs::{AndroidFsExt, AndroidFs};
|
||||||
|
let path = std::path::Path::new(&file_path);
|
||||||
|
let filename = path.file_name()
|
||||||
|
.map(|n| n.to_string_lossy().to_string())
|
||||||
|
.unwrap_or_else(|| "export.zip".to_string());
|
||||||
|
let data = std::fs::read(path).map_err(|e| format!("Failed to read file: {}", e))?;
|
||||||
|
let api = app.android_fs();
|
||||||
|
let uri = api.show_save_file_dialog(None, &filename, Some(&mime_type))
|
||||||
|
.map_err(|e| format!("Save dialog failed: {}", e))?;
|
||||||
|
match uri {
|
||||||
|
Some(uri) => {
|
||||||
|
api.write(&uri, &data).map_err(|e| format!("Write failed: {}", e))?;
|
||||||
|
Ok(format!("Saved to device"))
|
||||||
|
}
|
||||||
|
None => Ok("Cancelled".to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[cfg(not(target_os = "android"))]
|
||||||
|
{
|
||||||
|
let _ = (app, mime_type);
|
||||||
|
// Desktop: just return the path — file is already accessible
|
||||||
|
Ok(file_path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
async fn import_summary(zip_path: String) -> Result<String, String> {
|
async fn import_summary(zip_path: String) -> Result<String, String> {
|
||||||
let summary = itsgoin_core::import::read_import_summary(std::path::Path::new(&zip_path))
|
let summary = itsgoin_core::import::read_import_summary(std::path::Path::new(&zip_path))
|
||||||
|
|
@ -2416,6 +2608,12 @@ pub fn run() {
|
||||||
tauri::Builder::default()
|
tauri::Builder::default()
|
||||||
.plugin(tauri_plugin_notification::init())
|
.plugin(tauri_plugin_notification::init())
|
||||||
.plugin(tauri_plugin_dialog::init())
|
.plugin(tauri_plugin_dialog::init())
|
||||||
|
.plugin({
|
||||||
|
#[cfg(target_os = "android")]
|
||||||
|
{ tauri_plugin_android_fs::init() }
|
||||||
|
#[cfg(not(target_os = "android"))]
|
||||||
|
{ tauri::plugin::Builder::<tauri::Wry>::new("android-fs-stub").build() }
|
||||||
|
})
|
||||||
.setup(move |app| {
|
.setup(move |app| {
|
||||||
// Desktop: store data next to the AppImage/executable so each copy
|
// Desktop: store data next to the AppImage/executable so each copy
|
||||||
// gets its own identity. Mobile: use the standard app data dir.
|
// gets its own identity. Mobile: use the standard app data dir.
|
||||||
|
|
@ -2476,6 +2674,12 @@ pub fn run() {
|
||||||
tracing::warn!(error = %e, "Background bootstrap failed");
|
tracing::warn!(error = %e, "Background bootstrap failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Skip sync if duplicate identity detected
|
||||||
|
if boot_node.network.duplicate_detected.load(std::sync::atomic::Ordering::Relaxed) {
|
||||||
|
tracing::warn!("Duplicate identity detected — skipping sync tasks");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Start all background networking tasks
|
// Start all background networking tasks
|
||||||
boot_node.start_accept_loop();
|
boot_node.start_accept_loop();
|
||||||
boot_node.start_pull_cycle(300);
|
boot_node.start_pull_cycle(300);
|
||||||
|
|
@ -2520,6 +2724,8 @@ pub fn run() {
|
||||||
save_and_open_blob,
|
save_and_open_blob,
|
||||||
save_blob,
|
save_blob,
|
||||||
get_feed,
|
get_feed,
|
||||||
|
get_feed_page,
|
||||||
|
get_all_posts_page,
|
||||||
get_all_posts,
|
get_all_posts,
|
||||||
get_stats,
|
get_stats,
|
||||||
connect_peer,
|
connect_peer,
|
||||||
|
|
@ -2596,6 +2802,7 @@ pub fn run() {
|
||||||
import_identity_key,
|
import_identity_key,
|
||||||
get_active_identity,
|
get_active_identity,
|
||||||
export_data,
|
export_data,
|
||||||
|
share_file,
|
||||||
import_summary,
|
import_summary,
|
||||||
import_public_posts,
|
import_public_posts,
|
||||||
import_as_new_identity,
|
import_as_new_identity,
|
||||||
|
|
|
||||||
378
frontend/app.js
378
frontend/app.js
|
|
@ -713,130 +713,223 @@ async function loadStats() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// --- Feed pagination state ---
|
||||||
|
let _feedCursor = null; // oldest_ms for next page
|
||||||
|
let _feedHasMore = true;
|
||||||
|
let _feedPrefetch = null; // pre-fetched next page (Promise)
|
||||||
|
let _feedLoading = false;
|
||||||
|
let _feedMediaObserver = null; // IntersectionObserver for viewport media
|
||||||
|
let _feedScrollObserver = null; // IntersectionObserver for infinite scroll
|
||||||
|
let _feedPostIds = new Set(); // track loaded post IDs to avoid duplicates
|
||||||
|
|
||||||
|
function filterFeedPosts(posts) {
|
||||||
|
return posts.filter(p => p.intentKind !== 'direct' && !(p.intentKind === 'unknown' && (p.visibility === 'encrypted-for-me' || (p.isMe && p.recipients && p.recipients.length > 0))));
|
||||||
|
}
|
||||||
|
|
||||||
async function loadFeed(force) {
|
async function loadFeed(force) {
|
||||||
|
if (_feedLoading) return;
|
||||||
|
_feedLoading = true;
|
||||||
try {
|
try {
|
||||||
const allPosts = await invoke('get_feed');
|
// First page or refresh: load newest 20
|
||||||
const posts = allPosts.filter(p => p.intentKind !== 'direct' && !(p.intentKind === 'unknown' && (p.visibility === 'encrypted-for-me' || (p.isMe && p.recipients && p.recipients.length > 0))));
|
const result = await invoke('get_feed_page', { limit: 20 });
|
||||||
// Fingerprint: post IDs + reaction counts + comment counts
|
const posts = filterFeedPosts(result.posts);
|
||||||
|
|
||||||
|
// Fingerprint first page for refresh detection
|
||||||
const fp = posts.map(p => `${p.id}:${(p.reactionCounts||[]).map(r=>r.emoji+r.count).join(',')}:${p.commentCount||0}`).join('|');
|
const fp = posts.map(p => `${p.id}:${(p.reactionCounts||[]).map(r=>r.emoji+r.count).join(',')}:${p.commentCount||0}`).join('|');
|
||||||
if (!force && fp === _feedFingerprint) return;
|
if (!force && fp === _feedFingerprint) { _feedLoading = false; return; }
|
||||||
const oldFp = _feedFingerprint;
|
const oldFp = _feedFingerprint;
|
||||||
_feedFingerprint = fp;
|
_feedFingerprint = fp;
|
||||||
|
|
||||||
// Ticker for new posts from others
|
// Ticker for new posts
|
||||||
if (_notifReady && oldFp) {
|
if (_notifReady && oldFp) {
|
||||||
const oldIds = new Set(oldFp.split('|').map(s => s.split(':')[0]));
|
const oldIds = new Set(oldFp.split('|').map(s => s.split(':')[0]));
|
||||||
for (const p of posts) {
|
for (const p of posts) {
|
||||||
if (!p.isMe && !oldIds.has(p.id)) {
|
if (!p.isMe && !oldIds.has(p.id)) {
|
||||||
const name = p.authorName || p.author.substring(0, 8);
|
showTicker(`New post from ${p.authorName || p.author.substring(0, 8)}`);
|
||||||
showTicker(`New post from ${name}`);
|
break;
|
||||||
break; // one ticker per cycle
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Notify on engagement (DB-backed seen tracking)
|
|
||||||
if (_notifReady && oldFp) {
|
|
||||||
try {
|
|
||||||
const notifReacts = await invoke('get_setting', { key: 'notif_reacts' }).catch(() => null) || 'on';
|
|
||||||
for (const p of posts) {
|
|
||||||
if (!p.isMe) continue;
|
|
||||||
if (notifReacts === 'off') continue;
|
|
||||||
// Get DB-persisted seen counts
|
|
||||||
const seen = await invoke('get_seen_engagement', { postId: p.id }).catch(() => ({ seenReactCount: 0, seenCommentCount: 0 }));
|
|
||||||
const totalReacts = (p.reactionCounts || []).reduce((sum, r) => sum + r.count, 0);
|
|
||||||
const totalComments = p.commentCount || 0;
|
|
||||||
if (totalReacts > seen.seenReactCount) {
|
|
||||||
const newReacts = totalReacts - seen.seenReactCount;
|
|
||||||
maybeNotify('New reactions on your post', `${newReacts} new reaction${newReacts > 1 ? 's' : ''}`, `react-${p.id}`);
|
|
||||||
showTicker(`New reaction on your post`);
|
|
||||||
}
|
|
||||||
if (totalComments > seen.seenCommentCount) {
|
|
||||||
const newComments = totalComments - seen.seenCommentCount;
|
|
||||||
maybeNotify('New comment on your post', (p.content || '').slice(0, 40), `comment-${p.id}`);
|
|
||||||
showTicker(`New comment on your post`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (_) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip full re-render if any video/audio is actively playing (prevents echo/restart)
|
// Skip re-render if media playing
|
||||||
const mediaPlaying = [...feedList.querySelectorAll('video, audio')].some(el => !el.paused);
|
const mediaPlaying = [...feedList.querySelectorAll('video, audio')].some(el => !el.paused);
|
||||||
if (mediaPlaying) {
|
if (mediaPlaying) { _feedLoading = false; return; }
|
||||||
// Don't destroy the DOM while media is playing — re-render on next cycle when stopped
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Revoke old object URLs to prevent memory leaks
|
// Revoke old blob URLs
|
||||||
feedList.querySelectorAll('video[src^="blob:"], audio[src^="blob:"], img[src^="blob:"]').forEach(el => {
|
feedList.querySelectorAll('video[src^="blob:"], audio[src^="blob:"], img[src^="blob:"]').forEach(el => {
|
||||||
if (el.src.startsWith('blob:')) URL.revokeObjectURL(el.src);
|
if (el.src.startsWith('blob:')) URL.revokeObjectURL(el.src);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Preserve expanded comment threads
|
// Reset pagination state
|
||||||
const expandedComments = new Set();
|
_feedCursor = result.oldestMs || null;
|
||||||
feedList.querySelectorAll('.comment-thread:not(.hidden)').forEach(el => {
|
_feedHasMore = result.hasMore;
|
||||||
const postEl = el.closest('.post');
|
_feedPostIds = new Set(posts.map(p => p.id));
|
||||||
if (postEl) expandedComments.add(postEl.dataset.postId);
|
|
||||||
});
|
|
||||||
if (posts.length === 0) {
|
if (posts.length === 0) {
|
||||||
// Don't lock in empty fingerprint — let next refresh re-render when posts arrive
|
|
||||||
_feedFingerprint = null;
|
_feedFingerprint = null;
|
||||||
feedList.innerHTML = renderEmptyState(
|
feedList.innerHTML = renderEmptyState('Your feed is empty', 'Follow peers on the People tab to see their posts here.');
|
||||||
'Your feed is empty',
|
|
||||||
'Follow peers on the People tab to see their posts here.'
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
feedList.innerHTML = posts.map(renderPost).join('');
|
feedList.innerHTML = posts.map(renderPost).join('');
|
||||||
loadPostMedia(feedList);
|
// Add scroll sentinel at midpoint
|
||||||
// Restore expanded comment threads
|
if (_feedHasMore) {
|
||||||
for (const postId of expandedComments) {
|
const sentinel = document.createElement('div');
|
||||||
const thread = feedList.querySelector(`#comments-${postId}`);
|
sentinel.id = 'feed-scroll-sentinel';
|
||||||
if (thread) {
|
const children = feedList.children;
|
||||||
thread.classList.remove('hidden');
|
const mid = Math.min(Math.floor(children.length / 2), children.length - 1);
|
||||||
loadCommentThread(postId, thread);
|
children[mid].after(sentinel);
|
||||||
}
|
setupFeedScrollObserver();
|
||||||
}
|
}
|
||||||
|
setupFeedMediaObserver();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pre-fetch next page immediately
|
||||||
|
if (_feedHasMore && _feedCursor) {
|
||||||
|
_feedPrefetch = invoke('get_feed_page', { beforeMs: _feedCursor, limit: 20 }).catch(() => null);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
feedList.innerHTML = `<p class="status-err">Error: ${e}</p>`;
|
feedList.innerHTML = `<p class="status-err">Error: ${e}</p>`;
|
||||||
|
} finally {
|
||||||
|
_feedLoading = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function loadMyPosts(force) {
|
async function appendFeedPage() {
|
||||||
|
if (_feedLoading || !_feedHasMore) return;
|
||||||
|
_feedLoading = true;
|
||||||
try {
|
try {
|
||||||
const posts = await invoke('get_all_posts');
|
// Use pre-fetched data if available, otherwise fetch now
|
||||||
const mine = posts.filter(p => p.isMe && p.intentKind !== 'direct' && !(p.intentKind === 'unknown' && p.recipients && p.recipients.length > 0));
|
let result;
|
||||||
const fp = mine.map(p => `${p.id}:${(p.reactionCounts||[]).map(r=>r.emoji+r.count).join(',')}:${p.commentCount||0}`).join('|');
|
if (_feedPrefetch) {
|
||||||
if (!force && fp === _myPostsFingerprint) return;
|
result = await _feedPrefetch;
|
||||||
_myPostsFingerprint = fp;
|
_feedPrefetch = null;
|
||||||
// Skip re-render if media is playing
|
}
|
||||||
const mediaPlaying = [...myPostsList.querySelectorAll('video, audio')].some(el => !el.paused);
|
if (!result) {
|
||||||
if (mediaPlaying) return;
|
result = await invoke('get_feed_page', { beforeMs: _feedCursor, limit: 20 });
|
||||||
// Revoke old blob URLs
|
}
|
||||||
myPostsList.querySelectorAll('video[src^="blob:"], audio[src^="blob:"], img[src^="blob:"]').forEach(el => {
|
const posts = filterFeedPosts(result.posts).filter(p => !_feedPostIds.has(p.id));
|
||||||
if (el.src.startsWith('blob:')) URL.revokeObjectURL(el.src);
|
if (posts.length === 0) { _feedHasMore = false; _feedLoading = false; return; }
|
||||||
});
|
|
||||||
const expandedComments = new Set();
|
_feedCursor = result.oldestMs || null;
|
||||||
myPostsList.querySelectorAll('.comment-thread:not(.hidden)').forEach(el => {
|
_feedHasMore = result.hasMore;
|
||||||
const postEl = el.closest('.post');
|
posts.forEach(p => _feedPostIds.add(p.id));
|
||||||
if (postEl) expandedComments.add(postEl.dataset.postId);
|
|
||||||
});
|
// Remove old sentinel
|
||||||
if (mine.length === 0) {
|
const oldSentinel = document.getElementById('feed-scroll-sentinel');
|
||||||
myPostsList.innerHTML = renderEmptyState(
|
if (oldSentinel) oldSentinel.remove();
|
||||||
'No posts yet',
|
|
||||||
'Write your first post above!'
|
// Append posts
|
||||||
);
|
const fragment = document.createDocumentFragment();
|
||||||
} else {
|
const temp = document.createElement('div');
|
||||||
myPostsList.innerHTML = mine.map(renderPost).join('');
|
temp.innerHTML = posts.map(renderPost).join('');
|
||||||
loadPostMedia(myPostsList);
|
while (temp.firstChild) fragment.appendChild(temp.firstChild);
|
||||||
for (const postId of expandedComments) {
|
|
||||||
const thread = myPostsList.querySelector(`#comments-${postId}`);
|
// Insert new sentinel at midpoint of new posts
|
||||||
if (thread) {
|
if (_feedHasMore) {
|
||||||
thread.classList.remove('hidden');
|
const sentinel = document.createElement('div');
|
||||||
loadCommentThread(postId, thread);
|
sentinel.id = 'feed-scroll-sentinel';
|
||||||
|
const newNodes = [...fragment.children];
|
||||||
|
const mid = Math.min(Math.floor(newNodes.length / 2), newNodes.length - 1);
|
||||||
|
if (newNodes[mid]) newNodes[mid].after(sentinel);
|
||||||
|
}
|
||||||
|
|
||||||
|
feedList.appendChild(fragment);
|
||||||
|
setupFeedScrollObserver();
|
||||||
|
// Media observer auto-picks up new posts
|
||||||
|
|
||||||
|
// Pre-fetch next page
|
||||||
|
if (_feedHasMore && _feedCursor) {
|
||||||
|
_feedPrefetch = invoke('get_feed_page', { beforeMs: _feedCursor, limit: 20 }).catch(() => null);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error('appendFeedPage:', e);
|
||||||
|
} finally {
|
||||||
|
_feedLoading = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function setupFeedScrollObserver() {
|
||||||
|
if (_feedScrollObserver) _feedScrollObserver.disconnect();
|
||||||
|
const sentinel = document.getElementById('feed-scroll-sentinel');
|
||||||
|
if (!sentinel) return;
|
||||||
|
_feedScrollObserver = new IntersectionObserver((entries) => {
|
||||||
|
if (entries[0].isIntersecting) appendFeedPage();
|
||||||
|
}, { rootMargin: '200px' });
|
||||||
|
_feedScrollObserver.observe(sentinel);
|
||||||
|
}
|
||||||
|
|
||||||
|
function setupFeedMediaObserver() {
|
||||||
|
if (_feedMediaObserver) _feedMediaObserver.disconnect();
|
||||||
|
_feedMediaObserver = new IntersectionObserver((entries) => {
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isIntersecting) {
|
||||||
|
const post = entry.target;
|
||||||
|
if (!post.dataset.mediaLoaded) {
|
||||||
|
post.dataset.mediaLoaded = '1';
|
||||||
|
loadPostMedia(post);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Mark all visible own posts' engagement as seen (DB-backed) when viewing tab
|
}, { rootMargin: '400px' }); // start loading 400px before viewport
|
||||||
|
feedList.querySelectorAll('.post').forEach(post => _feedMediaObserver.observe(post));
|
||||||
|
// Also observe new posts added later via MutationObserver
|
||||||
|
const mutObs = new MutationObserver((mutations) => {
|
||||||
|
for (const m of mutations) {
|
||||||
|
for (const node of m.addedNodes) {
|
||||||
|
if (node.classList && node.classList.contains('post')) {
|
||||||
|
_feedMediaObserver.observe(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
mutObs.observe(feedList, { childList: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- My Posts pagination state ---
|
||||||
|
let _myPostsCursor = null;
|
||||||
|
let _myPostsHasMore = true;
|
||||||
|
let _myPostsPrefetch = null;
|
||||||
|
let _myPostsLoading = false;
|
||||||
|
let _myPostsMediaObserver = null;
|
||||||
|
let _myPostsScrollObserver = null;
|
||||||
|
let _myPostsIds = new Set();
|
||||||
|
|
||||||
|
async function loadMyPosts(force) {
|
||||||
|
if (_myPostsLoading) return;
|
||||||
|
_myPostsLoading = true;
|
||||||
|
try {
|
||||||
|
const result = await invoke('get_all_posts_page', { limit: 20 });
|
||||||
|
const mine = result.posts.filter(p => p.isMe && p.intentKind !== 'direct' && !(p.intentKind === 'unknown' && p.recipients && p.recipients.length > 0));
|
||||||
|
const fp = mine.map(p => `${p.id}:${(p.reactionCounts||[]).map(r=>r.emoji+r.count).join(',')}:${p.commentCount||0}`).join('|');
|
||||||
|
if (!force && fp === _myPostsFingerprint) { _myPostsLoading = false; return; }
|
||||||
|
_myPostsFingerprint = fp;
|
||||||
|
|
||||||
|
const mediaPlaying = [...myPostsList.querySelectorAll('video, audio')].some(el => !el.paused);
|
||||||
|
if (mediaPlaying) { _myPostsLoading = false; return; }
|
||||||
|
|
||||||
|
myPostsList.querySelectorAll('video[src^="blob:"], audio[src^="blob:"], img[src^="blob:"]').forEach(el => {
|
||||||
|
if (el.src.startsWith('blob:')) URL.revokeObjectURL(el.src);
|
||||||
|
});
|
||||||
|
|
||||||
|
_myPostsCursor = result.oldestMs || null;
|
||||||
|
_myPostsHasMore = result.hasMore;
|
||||||
|
_myPostsIds = new Set(mine.map(p => p.id));
|
||||||
|
|
||||||
|
if (mine.length === 0) {
|
||||||
|
myPostsList.innerHTML = renderEmptyState('No posts yet', 'Write your first post above!');
|
||||||
|
} else {
|
||||||
|
myPostsList.innerHTML = mine.map(renderPost).join('');
|
||||||
|
if (_myPostsHasMore) {
|
||||||
|
const sentinel = document.createElement('div');
|
||||||
|
sentinel.id = 'myposts-scroll-sentinel';
|
||||||
|
const children = myPostsList.children;
|
||||||
|
const mid = Math.min(Math.floor(children.length / 2), children.length - 1);
|
||||||
|
children[mid].after(sentinel);
|
||||||
|
setupMyPostsScrollObserver();
|
||||||
|
}
|
||||||
|
setupMyPostsMediaObserver();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mark visible own posts as seen
|
||||||
if (currentTab === 'myposts') {
|
if (currentTab === 'myposts') {
|
||||||
for (const p of mine) {
|
for (const p of mine) {
|
||||||
const totalReacts = (p.reactionCounts || []).reduce((sum, r) => sum + r.count, 0);
|
const totalReacts = (p.reactionCounts || []).reduce((sum, r) => sum + r.count, 0);
|
||||||
|
|
@ -846,11 +939,88 @@ async function loadMyPosts(force) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (_myPostsHasMore && _myPostsCursor) {
|
||||||
|
_myPostsPrefetch = invoke('get_all_posts_page', { beforeMs: _myPostsCursor, limit: 20 }).catch(() => null);
|
||||||
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
myPostsList.innerHTML = `<p class="status-err">Error: ${e}</p>`;
|
myPostsList.innerHTML = `<p class="status-err">Error: ${e}</p>`;
|
||||||
|
} finally {
|
||||||
|
_myPostsLoading = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function appendMyPostsPage() {
|
||||||
|
if (_myPostsLoading || !_myPostsHasMore) return;
|
||||||
|
_myPostsLoading = true;
|
||||||
|
try {
|
||||||
|
let result = _myPostsPrefetch ? await _myPostsPrefetch : null;
|
||||||
|
_myPostsPrefetch = null;
|
||||||
|
if (!result) result = await invoke('get_all_posts_page', { beforeMs: _myPostsCursor, limit: 20 });
|
||||||
|
const mine = result.posts.filter(p => p.isMe && p.intentKind !== 'direct' && !(p.intentKind === 'unknown' && p.recipients && p.recipients.length > 0))
|
||||||
|
.filter(p => !_myPostsIds.has(p.id));
|
||||||
|
if (mine.length === 0) { _myPostsHasMore = false; _myPostsLoading = false; return; }
|
||||||
|
|
||||||
|
_myPostsCursor = result.oldestMs || null;
|
||||||
|
_myPostsHasMore = result.hasMore;
|
||||||
|
mine.forEach(p => _myPostsIds.add(p.id));
|
||||||
|
|
||||||
|
const oldSentinel = document.getElementById('myposts-scroll-sentinel');
|
||||||
|
if (oldSentinel) oldSentinel.remove();
|
||||||
|
|
||||||
|
const fragment = document.createDocumentFragment();
|
||||||
|
const temp = document.createElement('div');
|
||||||
|
temp.innerHTML = mine.map(renderPost).join('');
|
||||||
|
while (temp.firstChild) fragment.appendChild(temp.firstChild);
|
||||||
|
|
||||||
|
if (_myPostsHasMore) {
|
||||||
|
const sentinel = document.createElement('div');
|
||||||
|
sentinel.id = 'myposts-scroll-sentinel';
|
||||||
|
const newNodes = [...fragment.children];
|
||||||
|
const mid = Math.min(Math.floor(newNodes.length / 2), newNodes.length - 1);
|
||||||
|
if (newNodes[mid]) newNodes[mid].after(sentinel);
|
||||||
|
}
|
||||||
|
|
||||||
|
myPostsList.appendChild(fragment);
|
||||||
|
setupMyPostsScrollObserver();
|
||||||
|
|
||||||
|
if (_myPostsHasMore && _myPostsCursor) {
|
||||||
|
_myPostsPrefetch = invoke('get_all_posts_page', { beforeMs: _myPostsCursor, limit: 20 }).catch(() => null);
|
||||||
|
}
|
||||||
|
} catch (_) {} finally { _myPostsLoading = false; }
|
||||||
|
}
|
||||||
|
|
||||||
|
function setupMyPostsScrollObserver() {
|
||||||
|
if (_myPostsScrollObserver) _myPostsScrollObserver.disconnect();
|
||||||
|
const sentinel = document.getElementById('myposts-scroll-sentinel');
|
||||||
|
if (!sentinel) return;
|
||||||
|
_myPostsScrollObserver = new IntersectionObserver((entries) => {
|
||||||
|
if (entries[0].isIntersecting) appendMyPostsPage();
|
||||||
|
}, { rootMargin: '200px' });
|
||||||
|
_myPostsScrollObserver.observe(sentinel);
|
||||||
|
}
|
||||||
|
|
||||||
|
function setupMyPostsMediaObserver() {
|
||||||
|
if (_myPostsMediaObserver) _myPostsMediaObserver.disconnect();
|
||||||
|
_myPostsMediaObserver = new IntersectionObserver((entries) => {
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isIntersecting && !entry.target.dataset.mediaLoaded) {
|
||||||
|
entry.target.dataset.mediaLoaded = '1';
|
||||||
|
loadPostMedia(entry.target);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, { rootMargin: '400px' });
|
||||||
|
myPostsList.querySelectorAll('.post').forEach(post => _myPostsMediaObserver.observe(post));
|
||||||
|
const mutObs = new MutationObserver((mutations) => {
|
||||||
|
for (const m of mutations) {
|
||||||
|
for (const node of m.addedNodes) {
|
||||||
|
if (node.classList && node.classList.contains('post')) _myPostsMediaObserver.observe(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
mutObs.observe(myPostsList, { childList: true });
|
||||||
|
}
|
||||||
|
|
||||||
async function loadMessages(force) {
|
async function loadMessages(force) {
|
||||||
try {
|
try {
|
||||||
const [posts, follows] = await Promise.all([
|
const [posts, follows] = await Promise.all([
|
||||||
|
|
@ -3401,6 +3571,18 @@ $('#export-btn').addEventListener('click', () => {
|
||||||
try {
|
try {
|
||||||
const result = await invoke('export_data', { scope, outputDir });
|
const result = await invoke('export_data', { scope, outputDir });
|
||||||
status.textContent = result;
|
status.textContent = result;
|
||||||
|
// On mobile: extract file path from result and offer to save via SAF
|
||||||
|
const pathMatch = result.match(/:\s*(.+\.zip)/);
|
||||||
|
if (pathMatch) {
|
||||||
|
try {
|
||||||
|
status.textContent = 'Saving to device...';
|
||||||
|
const shareResult = await invoke('share_file', { filePath: pathMatch[1], mimeType: 'application/zip' });
|
||||||
|
status.textContent = shareResult === 'Cancelled' ? 'Export saved internally. ' + result : shareResult;
|
||||||
|
} catch (shareErr) {
|
||||||
|
// share_file not available (desktop) or failed — that's ok, file is in app dir
|
||||||
|
status.textContent = result;
|
||||||
|
}
|
||||||
|
}
|
||||||
toast('Export complete!');
|
toast('Export complete!');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
status.textContent = 'Error: ' + e;
|
status.textContent = 'Error: ' + e;
|
||||||
|
|
@ -3645,6 +3827,7 @@ async function init() {
|
||||||
if (feedTab) feedTab.classList.add('active');
|
if (feedTab) feedTab.classList.add('active');
|
||||||
document.getElementById('view-feed').classList.add('active');
|
document.getElementById('view-feed').classList.add('active');
|
||||||
currentTab = 'feed';
|
currentTab = 'feed';
|
||||||
|
loadFeed(true);
|
||||||
_lastFeedViewMs = Date.now();
|
_lastFeedViewMs = Date.now();
|
||||||
updateTabBadge('feed', 0);
|
updateTabBadge('feed', 0);
|
||||||
});
|
});
|
||||||
|
|
@ -3721,12 +3904,27 @@ async function init() {
|
||||||
|
|
||||||
// Auto-refresh every 10 seconds — only the active tab
|
// Auto-refresh every 10 seconds — only the active tab
|
||||||
const _initTime = Date.now();
|
const _initTime = Date.now();
|
||||||
setInterval(() => {
|
let _duplicateWarningShown = false;
|
||||||
|
setInterval(async () => {
|
||||||
const startup = Date.now() - _initTime < 30000; // force during first 30s
|
const startup = Date.now() - _initTime < 30000; // force during first 30s
|
||||||
if (currentTab === 'feed') loadFeed(startup);
|
if (currentTab === 'feed') loadFeed(startup);
|
||||||
if (currentTab === 'myposts') loadMyPosts(startup);
|
if (currentTab === 'myposts') loadMyPosts(startup);
|
||||||
if (currentTab === 'people') { loadFollows(); loadPeers(); loadAudience(); }
|
if (currentTab === 'people') { loadFollows(); loadPeers(); loadAudience(); }
|
||||||
updateNetworkIndicator();
|
updateNetworkIndicator();
|
||||||
|
// Check for duplicate identity (set by anchor during bootstrap)
|
||||||
|
if (!_duplicateWarningShown) {
|
||||||
|
try {
|
||||||
|
const info = await invoke('get_node_info');
|
||||||
|
if (info && info.duplicateDetected) {
|
||||||
|
_duplicateWarningShown = true;
|
||||||
|
const banner = document.createElement('div');
|
||||||
|
banner.style.cssText = 'position:fixed;top:0;left:0;right:0;background:#c0392b;color:#fff;padding:0.5rem;text-align:center;font-size:0.8rem;z-index:999;';
|
||||||
|
banner.textContent = 'This identity is active on another device. Sync paused to prevent data conflicts.';
|
||||||
|
document.body.prepend(banner);
|
||||||
|
toast('Duplicate identity detected — sync paused');
|
||||||
|
}
|
||||||
|
} catch (_) {}
|
||||||
|
}
|
||||||
}, 10000);
|
}, 10000);
|
||||||
|
|
||||||
// Badge updates for non-active tabs — every 30 seconds (single IPC call)
|
// Badge updates for non-active tabs — every 30 seconds (single IPC call)
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue