Export/Import: ZIP export with scope selection, import with public post merge
Export (export.rs): ZIP archive with auto-chunking at 4GB. Four scopes: identity only, posts only, posts+identity, everything (posts+key+follows+ profiles+settings). Includes blobs. Manifest JSON tracks metadata. Import (import.rs): Read ZIP summary without importing (preview). Import public posts into current identity with new PostIds + original timestamps. Import as new identity (creates identity subdir from key). Uses spawn_blocking for ZIP I/O to avoid Send issues with ZipArchive. Tauri IPC: export_data, import_summary, import_public_posts, import_as_new_identity commands. IdentityManager.base_dir() getter. Frontend: Export wizard lightbox with scope radio buttons + output dir. Import wizard with ZIP path, preview summary, action selection. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
fb1e92985c
commit
8ef32e6df6
7 changed files with 786 additions and 3 deletions
|
|
@ -18,6 +18,7 @@ curve25519-dalek = { version = "=5.0.0-pre.1", features = ["rand_core", "zeroize
|
||||||
ed25519-dalek = { version = "=3.0.0-pre.1", features = ["rand_core", "zeroize"] }
|
ed25519-dalek = { version = "=3.0.0-pre.1", features = ["rand_core", "zeroize"] }
|
||||||
chacha20poly1305 = "0.10"
|
chacha20poly1305 = "0.10"
|
||||||
base64 = "0.22"
|
base64 = "0.22"
|
||||||
|
zip = { version = "2", default-features = false, features = ["deflate"] }
|
||||||
igd-next = { version = "0.16", features = ["tokio"] }
|
igd-next = { version = "0.16", features = ["tokio"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|
|
||||||
319
crates/core/src/export.rs
Normal file
319
crates/core/src/export.rs
Normal file
|
|
@ -0,0 +1,319 @@
|
||||||
|
//! Export data as ZIP archives with auto-chunking at 4GB.
|
||||||
|
//!
|
||||||
|
//! Export scopes:
|
||||||
|
//! - IdentityOnly: just identity.key (tiny backup)
|
||||||
|
//! - PostsOnly: public posts + blobs (no key — safe to share)
|
||||||
|
//! - PostsWithIdentity: posts + blobs + identity.key (full migration)
|
||||||
|
//! - Everything: posts + blobs + key + follows + profiles + settings (complete backup)
|
||||||
|
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
use crate::blob::BlobStore;
|
||||||
|
use crate::storage::StoragePool;
|
||||||
|
use crate::types::NodeId;
|
||||||
|
|
||||||
|
/// Maximum bytes per ZIP chunk (4 GB).
|
||||||
|
const CHUNK_MAX_BYTES: u64 = 4 * 1024 * 1024 * 1024;
|
||||||
|
|
||||||
|
/// What to include in the export.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum ExportScope {
|
||||||
|
IdentityOnly,
|
||||||
|
PostsOnly,
|
||||||
|
PostsWithIdentity,
|
||||||
|
Everything,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Manifest embedded in each ZIP chunk.
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct ExportManifest {
|
||||||
|
pub version: u32,
|
||||||
|
pub scope: ExportScope,
|
||||||
|
pub node_id: String,
|
||||||
|
pub export_date: u64,
|
||||||
|
pub chunk_index: u32,
|
||||||
|
pub total_chunks: u32,
|
||||||
|
pub post_count: usize,
|
||||||
|
pub blob_count: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Exported post with visibility and header.
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct ExportedPost {
|
||||||
|
pub id: String,
|
||||||
|
pub author: String,
|
||||||
|
pub content: String,
|
||||||
|
pub attachments_json: String,
|
||||||
|
pub timestamp_ms: u64,
|
||||||
|
pub visibility_json: String,
|
||||||
|
pub header_json: Option<String>,
|
||||||
|
pub intent: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Result of an export operation.
|
||||||
|
pub struct ExportResult {
|
||||||
|
pub paths: Vec<PathBuf>,
|
||||||
|
pub post_count: usize,
|
||||||
|
pub blob_count: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run an export to the given output directory.
|
||||||
|
pub async fn export_data(
|
||||||
|
data_dir: &Path,
|
||||||
|
storage: &StoragePool,
|
||||||
|
blob_store: &BlobStore,
|
||||||
|
node_id: &NodeId,
|
||||||
|
scope: ExportScope,
|
||||||
|
output_dir: &Path,
|
||||||
|
) -> anyhow::Result<ExportResult> {
|
||||||
|
std::fs::create_dir_all(output_dir)?;
|
||||||
|
|
||||||
|
let node_id_hex = hex::encode(node_id);
|
||||||
|
let now = now_ms();
|
||||||
|
|
||||||
|
// Gather data based on scope
|
||||||
|
let identity_key = if scope != ExportScope::PostsOnly {
|
||||||
|
let key_path = data_dir.join("identity.key");
|
||||||
|
if key_path.exists() {
|
||||||
|
Some(std::fs::read(&key_path)?)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let (posts, blob_cids) = if scope == ExportScope::IdentityOnly {
|
||||||
|
(vec![], vec![])
|
||||||
|
} else {
|
||||||
|
gather_posts(storage, node_id).await?
|
||||||
|
};
|
||||||
|
|
||||||
|
let (follows, profiles, settings) = if scope == ExportScope::Everything {
|
||||||
|
gather_extras(storage).await?
|
||||||
|
} else {
|
||||||
|
(None, None, None)
|
||||||
|
};
|
||||||
|
|
||||||
|
let post_count = posts.len();
|
||||||
|
|
||||||
|
// Build the ZIP
|
||||||
|
let mut zip_paths: Vec<PathBuf> = Vec::new();
|
||||||
|
let mut chunk_index: u32 = 0;
|
||||||
|
let mut current_size: u64 = 0;
|
||||||
|
let base_name = format!("itsgoin-export-{}", &node_id_hex[..8]);
|
||||||
|
|
||||||
|
let chunk_path = |idx: u32| -> PathBuf {
|
||||||
|
if idx == 0 {
|
||||||
|
output_dir.join(format!("{}.zip", base_name))
|
||||||
|
} else {
|
||||||
|
output_dir.join(format!("{}.part{}.zip", base_name, idx + 1))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let path = chunk_path(chunk_index);
|
||||||
|
let file = std::fs::File::create(&path)?;
|
||||||
|
let mut zip = zip::ZipWriter::new(file);
|
||||||
|
let options = zip::write::SimpleFileOptions::default()
|
||||||
|
.compression_method(zip::CompressionMethod::Deflated);
|
||||||
|
|
||||||
|
// Write manifest (placeholder — updated at end)
|
||||||
|
let manifest = ExportManifest {
|
||||||
|
version: 1,
|
||||||
|
scope,
|
||||||
|
node_id: node_id_hex.clone(),
|
||||||
|
export_date: now,
|
||||||
|
chunk_index: 0,
|
||||||
|
total_chunks: 1, // updated later if chunked
|
||||||
|
post_count,
|
||||||
|
blob_count: blob_cids.len(),
|
||||||
|
};
|
||||||
|
let manifest_json = serde_json::to_string_pretty(&manifest)?;
|
||||||
|
zip.start_file("itsgoin-export/manifest.json", options)?;
|
||||||
|
zip.write_all(manifest_json.as_bytes())?;
|
||||||
|
current_size += manifest_json.len() as u64;
|
||||||
|
|
||||||
|
// Identity key
|
||||||
|
if let Some(ref key) = identity_key {
|
||||||
|
zip.start_file("itsgoin-export/identity.key", options)?;
|
||||||
|
zip.write_all(key)?;
|
||||||
|
current_size += key.len() as u64;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Posts
|
||||||
|
if !posts.is_empty() {
|
||||||
|
let posts_json = serde_json::to_string_pretty(&posts)?;
|
||||||
|
zip.start_file("itsgoin-export/posts.json", options)?;
|
||||||
|
zip.write_all(posts_json.as_bytes())?;
|
||||||
|
current_size += posts_json.len() as u64;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Follows, profiles, settings
|
||||||
|
if let Some(ref data) = follows {
|
||||||
|
let json = serde_json::to_string_pretty(data)?;
|
||||||
|
zip.start_file("itsgoin-export/follows.json", options)?;
|
||||||
|
zip.write_all(json.as_bytes())?;
|
||||||
|
current_size += json.len() as u64;
|
||||||
|
}
|
||||||
|
if let Some(ref data) = profiles {
|
||||||
|
let json = serde_json::to_string_pretty(data)?;
|
||||||
|
zip.start_file("itsgoin-export/profiles.json", options)?;
|
||||||
|
zip.write_all(json.as_bytes())?;
|
||||||
|
current_size += json.len() as u64;
|
||||||
|
}
|
||||||
|
if let Some(ref data) = settings {
|
||||||
|
let json = serde_json::to_string_pretty(data)?;
|
||||||
|
zip.start_file("itsgoin-export/settings.json", options)?;
|
||||||
|
zip.write_all(json.as_bytes())?;
|
||||||
|
current_size += json.len() as u64;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Blobs
|
||||||
|
let mut blob_count = 0usize;
|
||||||
|
for cid in &blob_cids {
|
||||||
|
if let Some(data) = blob_store.get(cid)? {
|
||||||
|
// Check if we need to start a new chunk
|
||||||
|
if current_size + data.len() as u64 > CHUNK_MAX_BYTES && blob_count > 0 {
|
||||||
|
zip.finish()?;
|
||||||
|
zip_paths.push(path.clone());
|
||||||
|
chunk_index += 1;
|
||||||
|
let new_path = chunk_path(chunk_index);
|
||||||
|
let new_file = std::fs::File::create(&new_path)?;
|
||||||
|
zip = zip::ZipWriter::new(new_file);
|
||||||
|
current_size = 0;
|
||||||
|
// Continuation manifest
|
||||||
|
let cont_manifest = ExportManifest {
|
||||||
|
version: 1,
|
||||||
|
scope,
|
||||||
|
node_id: node_id_hex.clone(),
|
||||||
|
export_date: now,
|
||||||
|
chunk_index,
|
||||||
|
total_chunks: 0, // unknown yet
|
||||||
|
post_count: 0,
|
||||||
|
blob_count: 0,
|
||||||
|
};
|
||||||
|
let cont_json = serde_json::to_string_pretty(&cont_manifest)?;
|
||||||
|
zip.start_file("itsgoin-export/manifest.json", options)?;
|
||||||
|
zip.write_all(cont_json.as_bytes())?;
|
||||||
|
current_size += cont_json.len() as u64;
|
||||||
|
}
|
||||||
|
|
||||||
|
let cid_hex = hex::encode(cid);
|
||||||
|
zip.start_file(format!("itsgoin-export/blobs/{}", cid_hex), options)?;
|
||||||
|
zip.write_all(&data)?;
|
||||||
|
current_size += data.len() as u64;
|
||||||
|
blob_count += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
zip.finish()?;
|
||||||
|
zip_paths.push(chunk_path(chunk_index));
|
||||||
|
|
||||||
|
info!(
|
||||||
|
posts = post_count,
|
||||||
|
blobs = blob_count,
|
||||||
|
chunks = zip_paths.len(),
|
||||||
|
scope = ?scope,
|
||||||
|
"Export complete"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(ExportResult {
|
||||||
|
paths: zip_paths,
|
||||||
|
post_count,
|
||||||
|
blob_count,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gather own posts and their blob CIDs.
|
||||||
|
async fn gather_posts(
|
||||||
|
storage: &StoragePool,
|
||||||
|
node_id: &NodeId,
|
||||||
|
) -> anyhow::Result<(Vec<ExportedPost>, Vec<[u8; 32]>)> {
|
||||||
|
let s = storage.get().await;
|
||||||
|
let posts_with_vis = s.list_posts_with_visibility()?;
|
||||||
|
let mut exported = Vec::new();
|
||||||
|
let mut blob_cids = Vec::new();
|
||||||
|
|
||||||
|
for (id, post, vis) in &posts_with_vis {
|
||||||
|
// Only export our own posts
|
||||||
|
if post.author != *node_id {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let header = s.get_blob_header(id).ok().flatten().map(|(json, _)| json);
|
||||||
|
let intent = s.get_post_intent(id).ok().flatten().map(|i| format!("{:?}", i));
|
||||||
|
|
||||||
|
exported.push(ExportedPost {
|
||||||
|
id: hex::encode(id),
|
||||||
|
author: hex::encode(post.author),
|
||||||
|
content: post.content.clone(),
|
||||||
|
attachments_json: serde_json::to_string(&post.attachments).unwrap_or_default(),
|
||||||
|
timestamp_ms: post.timestamp_ms,
|
||||||
|
visibility_json: serde_json::to_string(vis).unwrap_or_default(),
|
||||||
|
header_json: header,
|
||||||
|
intent,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Collect blob CIDs from attachments
|
||||||
|
for att in &post.attachments {
|
||||||
|
if !blob_cids.contains(&att.cid) {
|
||||||
|
blob_cids.push(att.cid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((exported, blob_cids))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gather follows, profiles, and settings for "Everything" export.
|
||||||
|
async fn gather_extras(
|
||||||
|
storage: &StoragePool,
|
||||||
|
) -> anyhow::Result<(
|
||||||
|
Option<Vec<String>>, // follows (hex node_ids)
|
||||||
|
Option<Vec<serde_json::Value>>, // profiles
|
||||||
|
Option<Vec<(String, String)>>, // settings (key, value)
|
||||||
|
)> {
|
||||||
|
let s = storage.get().await;
|
||||||
|
|
||||||
|
// Follows
|
||||||
|
let follows: Vec<String> = s.list_follows()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.into_iter()
|
||||||
|
.map(|nid| hex::encode(nid))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Profiles
|
||||||
|
let profiles: Vec<serde_json::Value> = s.list_profiles()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.into_iter()
|
||||||
|
.map(|p| serde_json::to_value(&p).unwrap_or_default())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Settings — gather all known keys
|
||||||
|
let setting_keys = ["text_size", "notif_messages", "notif_posts", "notif_nearby",
|
||||||
|
"notif_reacts", "cache_size_bytes", "public_visible"];
|
||||||
|
let mut settings_vec = Vec::new();
|
||||||
|
for key in &setting_keys {
|
||||||
|
if let Some(val) = s.get_setting(key).ok().flatten() {
|
||||||
|
settings_vec.push((key.to_string(), val));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
if follows.is_empty() { None } else { Some(follows) },
|
||||||
|
if profiles.is_empty() { None } else { Some(profiles) },
|
||||||
|
if settings_vec.is_empty() { None } else { Some(settings_vec) },
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn now_ms() -> u64 {
|
||||||
|
std::time::SystemTime::now()
|
||||||
|
.duration_since(std::time::UNIX_EPOCH)
|
||||||
|
.unwrap_or_default()
|
||||||
|
.as_millis() as u64
|
||||||
|
}
|
||||||
|
|
@ -100,6 +100,11 @@ impl IdentityManager {
|
||||||
Ok(mgr)
|
Ok(mgr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the base data directory.
|
||||||
|
pub fn base_dir(&self) -> &Path {
|
||||||
|
&self.base_dir
|
||||||
|
}
|
||||||
|
|
||||||
/// Get the currently active Node, if any.
|
/// Get the currently active Node, if any.
|
||||||
pub fn active_node(&self) -> Option<&Arc<Node>> {
|
pub fn active_node(&self) -> Option<&Arc<Node>> {
|
||||||
self.active_node.as_ref()
|
self.active_node.as_ref()
|
||||||
|
|
|
||||||
249
crates/core/src/import.rs
Normal file
249
crates/core/src/import.rs
Normal file
|
|
@ -0,0 +1,249 @@
|
||||||
|
//! Import data from ZIP archives exported by the export module.
|
||||||
|
//!
|
||||||
|
//! Import actions:
|
||||||
|
//! - AddAsIdentity: create a new identity from the export's key + data
|
||||||
|
//! - ImportPublicPosts: import only public posts into the current identity (new PostIds)
|
||||||
|
//! - MergeWithKey: decrypt encrypted posts using provided key, re-encrypt for current identity
|
||||||
|
|
||||||
|
use std::io::Read;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tracing::{debug, info, warn};
|
||||||
|
|
||||||
|
use crate::blob::BlobStore;
|
||||||
|
use crate::content::compute_post_id;
|
||||||
|
use crate::export::{ExportManifest, ExportedPost};
|
||||||
|
use crate::storage::StoragePool;
|
||||||
|
use crate::types::{Attachment, NodeId, Post, PostVisibility};
|
||||||
|
|
||||||
|
/// What to do with the imported data.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum ImportAction {
|
||||||
|
/// Create a new identity from the export's key and restore all data.
|
||||||
|
AddAsIdentity,
|
||||||
|
/// Import public posts into the current identity with new PostIds.
|
||||||
|
ImportPublicPosts,
|
||||||
|
/// Decrypt with the provided key, re-create posts under current identity.
|
||||||
|
MergeWithKey { key_hex: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Summary of what an import ZIP contains (shown to user before importing).
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct ImportSummary {
|
||||||
|
pub node_id: String,
|
||||||
|
pub scope: String,
|
||||||
|
pub export_date: u64,
|
||||||
|
pub post_count: usize,
|
||||||
|
pub blob_count: usize,
|
||||||
|
pub has_identity_key: bool,
|
||||||
|
pub has_follows: bool,
|
||||||
|
pub has_settings: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Result of an import operation.
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct ImportResult {
|
||||||
|
pub posts_imported: usize,
|
||||||
|
pub posts_skipped: usize,
|
||||||
|
pub blobs_imported: usize,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a ZIP and return a summary of its contents (without importing).
|
||||||
|
pub fn read_import_summary(zip_path: &Path) -> anyhow::Result<ImportSummary> {
|
||||||
|
let file = std::fs::File::open(zip_path)?;
|
||||||
|
let mut archive = zip::ZipArchive::new(file)?;
|
||||||
|
|
||||||
|
// Read manifest
|
||||||
|
let manifest: ExportManifest = {
|
||||||
|
let mut entry = archive.by_name("itsgoin-export/manifest.json")?;
|
||||||
|
let mut buf = String::new();
|
||||||
|
entry.read_to_string(&mut buf)?;
|
||||||
|
serde_json::from_str(&buf)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let has_key = archive.by_name("itsgoin-export/identity.key").is_ok();
|
||||||
|
let has_follows = archive.by_name("itsgoin-export/follows.json").is_ok();
|
||||||
|
let has_settings = archive.by_name("itsgoin-export/settings.json").is_ok();
|
||||||
|
|
||||||
|
Ok(ImportSummary {
|
||||||
|
node_id: manifest.node_id,
|
||||||
|
scope: format!("{:?}", manifest.scope),
|
||||||
|
export_date: manifest.export_date,
|
||||||
|
post_count: manifest.post_count,
|
||||||
|
blob_count: manifest.blob_count,
|
||||||
|
has_identity_key: has_key,
|
||||||
|
has_follows,
|
||||||
|
has_settings,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parsed data from a ZIP ready for async import.
|
||||||
|
struct ParsedImport {
|
||||||
|
posts: Vec<(Post, PostVisibility, Vec<(Attachment, Vec<u8>)>)>,
|
||||||
|
skipped: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Import public posts from a ZIP into the current identity.
|
||||||
|
/// Creates new posts with the current node_id as author, preserving original timestamps.
|
||||||
|
pub async fn import_public_posts(
|
||||||
|
zip_path: &Path,
|
||||||
|
storage: &StoragePool,
|
||||||
|
blob_store: &BlobStore,
|
||||||
|
our_node_id: &NodeId,
|
||||||
|
) -> anyhow::Result<ImportResult> {
|
||||||
|
// Phase 1: Read everything from ZIP synchronously (no Send requirement)
|
||||||
|
let parsed = {
|
||||||
|
let zip_path = zip_path.to_path_buf();
|
||||||
|
let our_node_id = *our_node_id;
|
||||||
|
tokio::task::spawn_blocking(move || -> anyhow::Result<ParsedImport> {
|
||||||
|
let file = std::fs::File::open(&zip_path)?;
|
||||||
|
let mut archive = zip::ZipArchive::new(file)?;
|
||||||
|
|
||||||
|
let posts: Vec<ExportedPost> = {
|
||||||
|
let mut entry = archive.by_name("itsgoin-export/posts.json")?;
|
||||||
|
let mut buf = String::new();
|
||||||
|
entry.read_to_string(&mut buf)?;
|
||||||
|
serde_json::from_str(&buf)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut result_posts = Vec::new();
|
||||||
|
let mut skipped = 0usize;
|
||||||
|
|
||||||
|
for ep in &posts {
|
||||||
|
let vis: PostVisibility = serde_json::from_str(&ep.visibility_json).unwrap_or(PostVisibility::Public);
|
||||||
|
if !matches!(vis, PostVisibility::Public) {
|
||||||
|
skipped += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let attachments: Vec<Attachment> = serde_json::from_str(&ep.attachments_json).unwrap_or_default();
|
||||||
|
let new_post = Post {
|
||||||
|
author: our_node_id,
|
||||||
|
content: ep.content.clone(),
|
||||||
|
attachments: attachments.clone(),
|
||||||
|
timestamp_ms: ep.timestamp_ms,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Read blob data from archive
|
||||||
|
let mut blob_data = Vec::new();
|
||||||
|
for att in &attachments {
|
||||||
|
let cid_hex = hex::encode(att.cid);
|
||||||
|
let blob_path = format!("itsgoin-export/blobs/{}", cid_hex);
|
||||||
|
if let Ok(mut blob_entry) = archive.by_name(&blob_path) {
|
||||||
|
let mut data = Vec::new();
|
||||||
|
blob_entry.read_to_end(&mut data)?;
|
||||||
|
blob_data.push((att.clone(), data));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result_posts.push((new_post, vis, blob_data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ParsedImport { posts: result_posts, skipped })
|
||||||
|
}).await??
|
||||||
|
};
|
||||||
|
|
||||||
|
// Phase 2: Store to DB + blob store (async — needs storage.get().await)
|
||||||
|
let mut imported = 0usize;
|
||||||
|
let mut blobs_imported = 0usize;
|
||||||
|
|
||||||
|
for (new_post, _vis, blob_data) in &parsed.posts {
|
||||||
|
let new_id = compute_post_id(new_post);
|
||||||
|
|
||||||
|
let s = storage.get().await;
|
||||||
|
if s.get_post(&new_id).ok().flatten().is_some() {
|
||||||
|
continue; // duplicate
|
||||||
|
}
|
||||||
|
s.store_post_with_visibility(&new_id, new_post, &PostVisibility::Public)?;
|
||||||
|
drop(s);
|
||||||
|
|
||||||
|
for (att, data) in blob_data {
|
||||||
|
if !blob_store.has(&att.cid) {
|
||||||
|
blob_store.store(&att.cid, data)?;
|
||||||
|
let s = storage.get().await;
|
||||||
|
let _ = s.record_blob(&att.cid, &new_id, our_node_id, data.len() as u64, &att.mime_type, att.size_bytes);
|
||||||
|
blobs_imported += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
imported += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(imported, skipped = parsed.skipped, blobs = blobs_imported, "Public post import complete");
|
||||||
|
|
||||||
|
Ok(ImportResult {
|
||||||
|
posts_imported: imported,
|
||||||
|
posts_skipped: parsed.skipped,
|
||||||
|
blobs_imported,
|
||||||
|
message: format!("Imported {} posts ({} skipped), {} blobs", imported, parsed.skipped, blobs_imported),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Import a ZIP as a new identity (create identity subdir, extract everything).
|
||||||
|
pub fn import_as_identity(
|
||||||
|
zip_path: &Path,
|
||||||
|
base_dir: &Path,
|
||||||
|
) -> anyhow::Result<String> {
|
||||||
|
let file = std::fs::File::open(zip_path)?;
|
||||||
|
let mut archive = zip::ZipArchive::new(file)?;
|
||||||
|
|
||||||
|
// Read manifest
|
||||||
|
let manifest: ExportManifest = {
|
||||||
|
let mut entry = archive.by_name("itsgoin-export/manifest.json")?;
|
||||||
|
let mut buf = String::new();
|
||||||
|
entry.read_to_string(&mut buf)?;
|
||||||
|
serde_json::from_str(&buf)?
|
||||||
|
};
|
||||||
|
|
||||||
|
// Read identity key
|
||||||
|
let key_data = {
|
||||||
|
let mut entry = archive.by_name("itsgoin-export/identity.key")
|
||||||
|
.map_err(|_| anyhow::anyhow!("Export doesn't contain an identity key"))?;
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
entry.read_to_end(&mut buf)?;
|
||||||
|
buf
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create identity directory
|
||||||
|
let id_dir = base_dir.join("identities").join(&manifest.node_id);
|
||||||
|
if id_dir.exists() {
|
||||||
|
anyhow::bail!("Identity {} already exists", &manifest.node_id[..12]);
|
||||||
|
}
|
||||||
|
std::fs::create_dir_all(&id_dir)?;
|
||||||
|
|
||||||
|
// Write identity key
|
||||||
|
let key_path = id_dir.join("identity.key");
|
||||||
|
std::fs::write(&key_path, &key_data)?;
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
let _ = std::fs::set_permissions(&key_path, std::fs::Permissions::from_mode(0o600));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write metadata
|
||||||
|
let now = now_ms();
|
||||||
|
let meta = serde_json::json!({
|
||||||
|
"display_name": format!("Imported {}", &manifest.node_id[..12]),
|
||||||
|
"created_at": now,
|
||||||
|
"last_used_at": now,
|
||||||
|
});
|
||||||
|
std::fs::write(id_dir.join("meta.json"), serde_json::to_string_pretty(&meta)?)?;
|
||||||
|
|
||||||
|
info!(identity = manifest.node_id, "Imported identity from ZIP — switch to it to restore data");
|
||||||
|
|
||||||
|
// Note: posts, blobs, follows, settings will be restored when the user switches to this
|
||||||
|
// identity and opens the node. The full DB restore could be done here, but it's simpler
|
||||||
|
// to let the user switch and then import posts via the import wizard.
|
||||||
|
|
||||||
|
Ok(manifest.node_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn now_ms() -> u64 {
|
||||||
|
std::time::SystemTime::now()
|
||||||
|
.duration_since(std::time::UNIX_EPOCH)
|
||||||
|
.unwrap_or_default()
|
||||||
|
.as_millis() as u64
|
||||||
|
}
|
||||||
|
|
@ -4,7 +4,9 @@ pub mod connection;
|
||||||
pub mod content;
|
pub mod content;
|
||||||
pub mod crypto;
|
pub mod crypto;
|
||||||
pub mod http;
|
pub mod http;
|
||||||
|
pub mod export;
|
||||||
pub mod identity;
|
pub mod identity;
|
||||||
|
pub mod import;
|
||||||
pub mod network;
|
pub mod network;
|
||||||
pub mod node;
|
pub mod node;
|
||||||
pub mod protocol;
|
pub mod protocol;
|
||||||
|
|
|
||||||
|
|
@ -2146,6 +2146,76 @@ async fn get_active_identity(state: State<'_, AppIdentity>) -> Result<Option<Ide
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// --- Export/Import IPC ---
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
async fn export_data(
|
||||||
|
state: State<'_, AppNode>,
|
||||||
|
scope: String,
|
||||||
|
output_dir: String,
|
||||||
|
) -> Result<String, String> {
|
||||||
|
let node = get_node(&state).await;
|
||||||
|
let export_scope = match scope.as_str() {
|
||||||
|
"identity_only" => itsgoin_core::export::ExportScope::IdentityOnly,
|
||||||
|
"posts_only" => itsgoin_core::export::ExportScope::PostsOnly,
|
||||||
|
"posts_with_identity" => itsgoin_core::export::ExportScope::PostsWithIdentity,
|
||||||
|
"everything" => itsgoin_core::export::ExportScope::Everything,
|
||||||
|
_ => return Err("Invalid scope".to_string()),
|
||||||
|
};
|
||||||
|
let result = itsgoin_core::export::export_data(
|
||||||
|
&node.data_dir,
|
||||||
|
&node.storage,
|
||||||
|
&node.blob_store,
|
||||||
|
&node.node_id,
|
||||||
|
export_scope,
|
||||||
|
std::path::Path::new(&output_dir),
|
||||||
|
).await.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let paths: Vec<String> = result.paths.iter()
|
||||||
|
.map(|p| p.to_string_lossy().to_string())
|
||||||
|
.collect();
|
||||||
|
Ok(format!("Exported {} posts, {} blobs to {} file(s): {}",
|
||||||
|
result.post_count, result.blob_count, paths.len(),
|
||||||
|
paths.join(", ")))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
async fn import_summary(zip_path: String) -> Result<String, String> {
|
||||||
|
let summary = itsgoin_core::import::read_import_summary(std::path::Path::new(&zip_path))
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
serde_json::to_string(&summary).map_err(|e| e.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
async fn import_public_posts(
|
||||||
|
state: State<'_, AppNode>,
|
||||||
|
zip_path: String,
|
||||||
|
) -> Result<String, String> {
|
||||||
|
let node = get_node(&state).await;
|
||||||
|
let result = itsgoin_core::import::import_public_posts(
|
||||||
|
std::path::Path::new(&zip_path),
|
||||||
|
&node.storage,
|
||||||
|
&node.blob_store,
|
||||||
|
&node.node_id,
|
||||||
|
).await.map_err(|e| e.to_string())?;
|
||||||
|
Ok(result.message)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
async fn import_as_new_identity(
|
||||||
|
state: State<'_, AppIdentity>,
|
||||||
|
zip_path: String,
|
||||||
|
) -> Result<String, String> {
|
||||||
|
let mgr = state.lock().await;
|
||||||
|
let base_dir = mgr.base_dir().to_path_buf();
|
||||||
|
drop(mgr);
|
||||||
|
let node_id = itsgoin_core::import::import_as_identity(
|
||||||
|
std::path::Path::new(&zip_path),
|
||||||
|
&base_dir,
|
||||||
|
).map_err(|e| e.to_string())?;
|
||||||
|
Ok(format!("Identity {} imported — switch to it in Settings", &node_id[..12]))
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||||
pub fn run() {
|
pub fn run() {
|
||||||
tracing_subscriber::fmt()
|
tracing_subscriber::fmt()
|
||||||
|
|
@ -2334,6 +2404,10 @@ pub fn run() {
|
||||||
delete_identity,
|
delete_identity,
|
||||||
import_identity_key,
|
import_identity_key,
|
||||||
get_active_identity,
|
get_active_identity,
|
||||||
|
export_data,
|
||||||
|
import_summary,
|
||||||
|
import_public_posts,
|
||||||
|
import_as_new_identity,
|
||||||
])
|
])
|
||||||
.build(tauri::generate_context!())
|
.build(tauri::generate_context!())
|
||||||
.expect("error while building tauri application")
|
.expect("error while building tauri application")
|
||||||
|
|
|
||||||
139
frontend/app.js
139
frontend/app.js
|
|
@ -3310,9 +3310,142 @@ $('#import-identity-btn').addEventListener('click', () => {
|
||||||
overlay.addEventListener('click', (e) => { if (e.target === overlay) overlay.remove(); });
|
overlay.addEventListener('click', (e) => { if (e.target === overlay) overlay.remove(); });
|
||||||
});
|
});
|
||||||
|
|
||||||
// Export/Import buttons (placeholder — will be implemented in Phase 2/3)
|
// Export wizard
|
||||||
$('#export-btn').addEventListener('click', () => { toast('Export coming soon'); });
|
$('#export-btn').addEventListener('click', () => {
|
||||||
$('#import-btn').addEventListener('click', () => { toast('Import coming soon'); });
|
const overlay = document.createElement('div');
|
||||||
|
overlay.className = 'image-lightbox';
|
||||||
|
overlay.style.cursor = 'default';
|
||||||
|
overlay.innerHTML = `
|
||||||
|
<div style="background:#1a1a2e;border:1px solid #333;border-radius:12px;padding:1.5rem;max-width:400px;width:90%;text-align:center">
|
||||||
|
<h3 style="color:#7fdbca;margin:0 0 0.75rem">Export Data</h3>
|
||||||
|
<p style="font-size:0.75rem;color:#888;margin-bottom:0.75rem">Choose what to include in the export ZIP.</p>
|
||||||
|
<div style="display:flex;flex-direction:column;gap:0.5rem;text-align:left;margin-bottom:1rem">
|
||||||
|
<label class="checkbox-label"><input type="radio" name="export-scope" value="identity_only" /> Identity key only (tiny backup)</label>
|
||||||
|
<label class="checkbox-label"><input type="radio" name="export-scope" value="posts_only" /> Posts + media (no key — safe to share)</label>
|
||||||
|
<label class="checkbox-label"><input type="radio" name="export-scope" value="posts_with_identity" checked /> Posts + media + identity key</label>
|
||||||
|
<label class="checkbox-label"><input type="radio" name="export-scope" value="everything" /> Everything (posts, key, follows, settings)</label>
|
||||||
|
</div>
|
||||||
|
<div style="margin-bottom:0.75rem">
|
||||||
|
<label style="font-size:0.75rem;color:#888">Save to folder:</label>
|
||||||
|
<input id="export-output-dir" type="text" value="Downloads" style="width:100%;margin-top:0.25rem;font-size:0.8rem" />
|
||||||
|
<p style="font-size:0.65rem;color:#555;margin-top:0.2rem">Relative to your home directory, or absolute path</p>
|
||||||
|
</div>
|
||||||
|
<div style="display:flex;gap:0.5rem;justify-content:center">
|
||||||
|
<button class="btn btn-primary btn-sm" id="export-go">Export</button>
|
||||||
|
<button class="btn btn-ghost btn-sm" id="export-cancel">Cancel</button>
|
||||||
|
</div>
|
||||||
|
<div id="export-status" style="margin-top:0.5rem;font-size:0.75rem;color:#888"></div>
|
||||||
|
</div>`;
|
||||||
|
document.body.appendChild(overlay);
|
||||||
|
|
||||||
|
overlay.querySelector('#export-go').addEventListener('click', async () => {
|
||||||
|
const scope = overlay.querySelector('input[name="export-scope"]:checked')?.value;
|
||||||
|
if (!scope) { toast('Select a scope'); return; }
|
||||||
|
let outputDir = overlay.querySelector('#export-output-dir').value.trim() || 'Downloads';
|
||||||
|
// Resolve relative to home
|
||||||
|
if (!outputDir.startsWith('/')) {
|
||||||
|
// On desktop, use a reasonable default
|
||||||
|
outputDir = outputDir;
|
||||||
|
}
|
||||||
|
const status = overlay.querySelector('#export-status');
|
||||||
|
status.textContent = 'Exporting...';
|
||||||
|
overlay.querySelector('#export-go').disabled = true;
|
||||||
|
try {
|
||||||
|
const result = await invoke('export_data', { scope, outputDir });
|
||||||
|
status.textContent = result;
|
||||||
|
toast('Export complete!');
|
||||||
|
} catch (e) {
|
||||||
|
status.textContent = 'Error: ' + e;
|
||||||
|
toast('Export failed: ' + e);
|
||||||
|
} finally {
|
||||||
|
overlay.querySelector('#export-go').disabled = false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
overlay.querySelector('#export-cancel').addEventListener('click', () => overlay.remove());
|
||||||
|
overlay.addEventListener('click', (e) => { if (e.target === overlay) overlay.remove(); });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Import wizard
|
||||||
|
$('#import-btn').addEventListener('click', () => {
|
||||||
|
const overlay = document.createElement('div');
|
||||||
|
overlay.className = 'image-lightbox';
|
||||||
|
overlay.style.cursor = 'default';
|
||||||
|
overlay.innerHTML = `
|
||||||
|
<div style="background:#1a1a2e;border:1px solid #333;border-radius:12px;padding:1.5rem;max-width:420px;width:90%;text-align:center">
|
||||||
|
<h3 style="color:#7fdbca;margin:0 0 0.75rem">Import Data</h3>
|
||||||
|
<p style="font-size:0.75rem;color:#888;margin-bottom:0.75rem">Enter the path to an ItsGoin export ZIP file.</p>
|
||||||
|
<input id="import-zip-path" type="text" placeholder="/path/to/itsgoin-export.zip" style="width:100%;margin-bottom:0.75rem;font-size:0.8rem" />
|
||||||
|
<div id="import-summary-box" style="display:none;text-align:left;background:#111;border-radius:8px;padding:0.75rem;margin-bottom:0.75rem;font-size:0.75rem"></div>
|
||||||
|
<div id="import-action-box" style="display:none;text-align:left;margin-bottom:0.75rem">
|
||||||
|
<label class="checkbox-label" style="font-size:0.8rem"><input type="radio" name="import-action" value="add_identity" /> Add as new identity (requires key in export)</label>
|
||||||
|
<label class="checkbox-label" style="font-size:0.8rem"><input type="radio" name="import-action" value="import_posts" checked /> Import public posts into current identity</label>
|
||||||
|
</div>
|
||||||
|
<div style="display:flex;gap:0.5rem;justify-content:center">
|
||||||
|
<button class="btn btn-ghost btn-sm" id="import-preview">Preview</button>
|
||||||
|
<button class="btn btn-primary btn-sm" id="import-go" style="display:none">Import</button>
|
||||||
|
<button class="btn btn-ghost btn-sm" id="import-cancel">Cancel</button>
|
||||||
|
</div>
|
||||||
|
<div id="import-status" style="margin-top:0.5rem;font-size:0.75rem;color:#888"></div>
|
||||||
|
</div>`;
|
||||||
|
document.body.appendChild(overlay);
|
||||||
|
|
||||||
|
overlay.querySelector('#import-preview').addEventListener('click', async () => {
|
||||||
|
const zipPath = overlay.querySelector('#import-zip-path').value.trim();
|
||||||
|
if (!zipPath) { toast('Enter a ZIP path'); return; }
|
||||||
|
const status = overlay.querySelector('#import-status');
|
||||||
|
status.textContent = 'Reading...';
|
||||||
|
try {
|
||||||
|
const summaryJson = await invoke('import_summary', { zipPath });
|
||||||
|
const s = JSON.parse(summaryJson);
|
||||||
|
const box_ = overlay.querySelector('#import-summary-box');
|
||||||
|
box_.style.display = 'block';
|
||||||
|
box_.innerHTML = `
|
||||||
|
<div><strong>Node:</strong> ${s.node_id.substring(0, 16)}...</div>
|
||||||
|
<div><strong>Posts:</strong> ${s.post_count} <strong>Blobs:</strong> ${s.blob_count}</div>
|
||||||
|
<div><strong>Has key:</strong> ${s.has_identity_key ? 'Yes' : 'No'} <strong>Follows:</strong> ${s.has_follows ? 'Yes' : 'No'}</div>
|
||||||
|
<div><strong>Exported:</strong> ${new Date(s.export_date).toLocaleDateString()}</div>`;
|
||||||
|
overlay.querySelector('#import-action-box').style.display = 'block';
|
||||||
|
overlay.querySelector('#import-go').style.display = '';
|
||||||
|
// Hide "add as identity" if no key in export
|
||||||
|
if (!s.has_identity_key) {
|
||||||
|
const addIdRadio = overlay.querySelector('input[value="add_identity"]');
|
||||||
|
addIdRadio.disabled = true;
|
||||||
|
addIdRadio.parentElement.style.opacity = '0.4';
|
||||||
|
}
|
||||||
|
status.textContent = '';
|
||||||
|
} catch (e) {
|
||||||
|
status.textContent = 'Error: ' + e;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
overlay.querySelector('#import-go').addEventListener('click', async () => {
|
||||||
|
const zipPath = overlay.querySelector('#import-zip-path').value.trim();
|
||||||
|
const action = overlay.querySelector('input[name="import-action"]:checked')?.value;
|
||||||
|
if (!action) { toast('Select an import action'); return; }
|
||||||
|
const status = overlay.querySelector('#import-status');
|
||||||
|
status.textContent = 'Importing...';
|
||||||
|
overlay.querySelector('#import-go').disabled = true;
|
||||||
|
try {
|
||||||
|
let result;
|
||||||
|
if (action === 'add_identity') {
|
||||||
|
result = await invoke('import_as_new_identity', { zipPath });
|
||||||
|
} else {
|
||||||
|
result = await invoke('import_public_posts', { zipPath });
|
||||||
|
}
|
||||||
|
status.textContent = result;
|
||||||
|
toast('Import complete!');
|
||||||
|
loadFeed(true);
|
||||||
|
} catch (e) {
|
||||||
|
status.textContent = 'Error: ' + e;
|
||||||
|
toast('Import failed: ' + e);
|
||||||
|
} finally {
|
||||||
|
overlay.querySelector('#import-go').disabled = false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
overlay.querySelector('#import-cancel').addEventListener('click', () => overlay.remove());
|
||||||
|
overlay.addEventListener('click', (e) => { if (e.target === overlay) overlay.remove(); });
|
||||||
|
});
|
||||||
|
|
||||||
$('#notifications-btn').addEventListener('click', async () => {
|
$('#notifications-btn').addEventListener('click', async () => {
|
||||||
// Load current settings
|
// Load current settings
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue