Export/Import: ZIP export with scope selection, import with public post merge
Export (export.rs): ZIP archive with auto-chunking at 4GB. Four scopes: identity only, posts only, posts+identity, everything (posts+key+follows+ profiles+settings). Includes blobs. Manifest JSON tracks metadata. Import (import.rs): Read ZIP summary without importing (preview). Import public posts into current identity with new PostIds + original timestamps. Import as new identity (creates identity subdir from key). Uses spawn_blocking for ZIP I/O to avoid Send issues with ZipArchive. Tauri IPC: export_data, import_summary, import_public_posts, import_as_new_identity commands. IdentityManager.base_dir() getter. Frontend: Export wizard lightbox with scope radio buttons + output dir. Import wizard with ZIP path, preview summary, action selection. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
fb1e92985c
commit
8ef32e6df6
7 changed files with 786 additions and 3 deletions
249
crates/core/src/import.rs
Normal file
249
crates/core/src/import.rs
Normal file
|
|
@ -0,0 +1,249 @@
|
|||
//! Import data from ZIP archives exported by the export module.
|
||||
//!
|
||||
//! Import actions:
|
||||
//! - AddAsIdentity: create a new identity from the export's key + data
|
||||
//! - ImportPublicPosts: import only public posts into the current identity (new PostIds)
|
||||
//! - MergeWithKey: decrypt encrypted posts using provided key, re-encrypt for current identity
|
||||
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
use crate::blob::BlobStore;
|
||||
use crate::content::compute_post_id;
|
||||
use crate::export::{ExportManifest, ExportedPost};
|
||||
use crate::storage::StoragePool;
|
||||
use crate::types::{Attachment, NodeId, Post, PostVisibility};
|
||||
|
||||
/// What to do with the imported data.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ImportAction {
|
||||
/// Create a new identity from the export's key and restore all data.
|
||||
AddAsIdentity,
|
||||
/// Import public posts into the current identity with new PostIds.
|
||||
ImportPublicPosts,
|
||||
/// Decrypt with the provided key, re-create posts under current identity.
|
||||
MergeWithKey { key_hex: String },
|
||||
}
|
||||
|
||||
/// Summary of what an import ZIP contains (shown to user before importing).
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ImportSummary {
|
||||
pub node_id: String,
|
||||
pub scope: String,
|
||||
pub export_date: u64,
|
||||
pub post_count: usize,
|
||||
pub blob_count: usize,
|
||||
pub has_identity_key: bool,
|
||||
pub has_follows: bool,
|
||||
pub has_settings: bool,
|
||||
}
|
||||
|
||||
/// Result of an import operation.
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ImportResult {
|
||||
pub posts_imported: usize,
|
||||
pub posts_skipped: usize,
|
||||
pub blobs_imported: usize,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
/// Read a ZIP and return a summary of its contents (without importing).
|
||||
pub fn read_import_summary(zip_path: &Path) -> anyhow::Result<ImportSummary> {
|
||||
let file = std::fs::File::open(zip_path)?;
|
||||
let mut archive = zip::ZipArchive::new(file)?;
|
||||
|
||||
// Read manifest
|
||||
let manifest: ExportManifest = {
|
||||
let mut entry = archive.by_name("itsgoin-export/manifest.json")?;
|
||||
let mut buf = String::new();
|
||||
entry.read_to_string(&mut buf)?;
|
||||
serde_json::from_str(&buf)?
|
||||
};
|
||||
|
||||
let has_key = archive.by_name("itsgoin-export/identity.key").is_ok();
|
||||
let has_follows = archive.by_name("itsgoin-export/follows.json").is_ok();
|
||||
let has_settings = archive.by_name("itsgoin-export/settings.json").is_ok();
|
||||
|
||||
Ok(ImportSummary {
|
||||
node_id: manifest.node_id,
|
||||
scope: format!("{:?}", manifest.scope),
|
||||
export_date: manifest.export_date,
|
||||
post_count: manifest.post_count,
|
||||
blob_count: manifest.blob_count,
|
||||
has_identity_key: has_key,
|
||||
has_follows,
|
||||
has_settings,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parsed data from a ZIP ready for async import.
|
||||
struct ParsedImport {
|
||||
posts: Vec<(Post, PostVisibility, Vec<(Attachment, Vec<u8>)>)>,
|
||||
skipped: usize,
|
||||
}
|
||||
|
||||
/// Import public posts from a ZIP into the current identity.
|
||||
/// Creates new posts with the current node_id as author, preserving original timestamps.
|
||||
pub async fn import_public_posts(
|
||||
zip_path: &Path,
|
||||
storage: &StoragePool,
|
||||
blob_store: &BlobStore,
|
||||
our_node_id: &NodeId,
|
||||
) -> anyhow::Result<ImportResult> {
|
||||
// Phase 1: Read everything from ZIP synchronously (no Send requirement)
|
||||
let parsed = {
|
||||
let zip_path = zip_path.to_path_buf();
|
||||
let our_node_id = *our_node_id;
|
||||
tokio::task::spawn_blocking(move || -> anyhow::Result<ParsedImport> {
|
||||
let file = std::fs::File::open(&zip_path)?;
|
||||
let mut archive = zip::ZipArchive::new(file)?;
|
||||
|
||||
let posts: Vec<ExportedPost> = {
|
||||
let mut entry = archive.by_name("itsgoin-export/posts.json")?;
|
||||
let mut buf = String::new();
|
||||
entry.read_to_string(&mut buf)?;
|
||||
serde_json::from_str(&buf)?
|
||||
};
|
||||
|
||||
let mut result_posts = Vec::new();
|
||||
let mut skipped = 0usize;
|
||||
|
||||
for ep in &posts {
|
||||
let vis: PostVisibility = serde_json::from_str(&ep.visibility_json).unwrap_or(PostVisibility::Public);
|
||||
if !matches!(vis, PostVisibility::Public) {
|
||||
skipped += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let attachments: Vec<Attachment> = serde_json::from_str(&ep.attachments_json).unwrap_or_default();
|
||||
let new_post = Post {
|
||||
author: our_node_id,
|
||||
content: ep.content.clone(),
|
||||
attachments: attachments.clone(),
|
||||
timestamp_ms: ep.timestamp_ms,
|
||||
};
|
||||
|
||||
// Read blob data from archive
|
||||
let mut blob_data = Vec::new();
|
||||
for att in &attachments {
|
||||
let cid_hex = hex::encode(att.cid);
|
||||
let blob_path = format!("itsgoin-export/blobs/{}", cid_hex);
|
||||
if let Ok(mut blob_entry) = archive.by_name(&blob_path) {
|
||||
let mut data = Vec::new();
|
||||
blob_entry.read_to_end(&mut data)?;
|
||||
blob_data.push((att.clone(), data));
|
||||
}
|
||||
}
|
||||
|
||||
result_posts.push((new_post, vis, blob_data));
|
||||
}
|
||||
|
||||
Ok(ParsedImport { posts: result_posts, skipped })
|
||||
}).await??
|
||||
};
|
||||
|
||||
// Phase 2: Store to DB + blob store (async — needs storage.get().await)
|
||||
let mut imported = 0usize;
|
||||
let mut blobs_imported = 0usize;
|
||||
|
||||
for (new_post, _vis, blob_data) in &parsed.posts {
|
||||
let new_id = compute_post_id(new_post);
|
||||
|
||||
let s = storage.get().await;
|
||||
if s.get_post(&new_id).ok().flatten().is_some() {
|
||||
continue; // duplicate
|
||||
}
|
||||
s.store_post_with_visibility(&new_id, new_post, &PostVisibility::Public)?;
|
||||
drop(s);
|
||||
|
||||
for (att, data) in blob_data {
|
||||
if !blob_store.has(&att.cid) {
|
||||
blob_store.store(&att.cid, data)?;
|
||||
let s = storage.get().await;
|
||||
let _ = s.record_blob(&att.cid, &new_id, our_node_id, data.len() as u64, &att.mime_type, att.size_bytes);
|
||||
blobs_imported += 1;
|
||||
}
|
||||
}
|
||||
|
||||
imported += 1;
|
||||
}
|
||||
|
||||
info!(imported, skipped = parsed.skipped, blobs = blobs_imported, "Public post import complete");
|
||||
|
||||
Ok(ImportResult {
|
||||
posts_imported: imported,
|
||||
posts_skipped: parsed.skipped,
|
||||
blobs_imported,
|
||||
message: format!("Imported {} posts ({} skipped), {} blobs", imported, parsed.skipped, blobs_imported),
|
||||
})
|
||||
}
|
||||
|
||||
/// Import a ZIP as a new identity (create identity subdir, extract everything).
|
||||
pub fn import_as_identity(
|
||||
zip_path: &Path,
|
||||
base_dir: &Path,
|
||||
) -> anyhow::Result<String> {
|
||||
let file = std::fs::File::open(zip_path)?;
|
||||
let mut archive = zip::ZipArchive::new(file)?;
|
||||
|
||||
// Read manifest
|
||||
let manifest: ExportManifest = {
|
||||
let mut entry = archive.by_name("itsgoin-export/manifest.json")?;
|
||||
let mut buf = String::new();
|
||||
entry.read_to_string(&mut buf)?;
|
||||
serde_json::from_str(&buf)?
|
||||
};
|
||||
|
||||
// Read identity key
|
||||
let key_data = {
|
||||
let mut entry = archive.by_name("itsgoin-export/identity.key")
|
||||
.map_err(|_| anyhow::anyhow!("Export doesn't contain an identity key"))?;
|
||||
let mut buf = Vec::new();
|
||||
entry.read_to_end(&mut buf)?;
|
||||
buf
|
||||
};
|
||||
|
||||
// Create identity directory
|
||||
let id_dir = base_dir.join("identities").join(&manifest.node_id);
|
||||
if id_dir.exists() {
|
||||
anyhow::bail!("Identity {} already exists", &manifest.node_id[..12]);
|
||||
}
|
||||
std::fs::create_dir_all(&id_dir)?;
|
||||
|
||||
// Write identity key
|
||||
let key_path = id_dir.join("identity.key");
|
||||
std::fs::write(&key_path, &key_data)?;
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let _ = std::fs::set_permissions(&key_path, std::fs::Permissions::from_mode(0o600));
|
||||
}
|
||||
|
||||
// Write metadata
|
||||
let now = now_ms();
|
||||
let meta = serde_json::json!({
|
||||
"display_name": format!("Imported {}", &manifest.node_id[..12]),
|
||||
"created_at": now,
|
||||
"last_used_at": now,
|
||||
});
|
||||
std::fs::write(id_dir.join("meta.json"), serde_json::to_string_pretty(&meta)?)?;
|
||||
|
||||
info!(identity = manifest.node_id, "Imported identity from ZIP — switch to it to restore data");
|
||||
|
||||
// Note: posts, blobs, follows, settings will be restored when the user switches to this
|
||||
// identity and opens the node. The full DB restore could be done here, but it's simpler
|
||||
// to let the user switch and then import posts via the import wizard.
|
||||
|
||||
Ok(manifest.node_id)
|
||||
}
|
||||
|
||||
fn now_ms() -> u64 {
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_millis() as u64
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue