From bb014b7c43390f412511f566ad51ed20305a0588 Mon Sep 17 00:00:00 2001 From: Gregory Schier Date: Thu, 24 Apr 2025 19:57:02 -0700 Subject: [PATCH] Remove folder/environment foreign keys to make sync/import easier, and simplify batch upsert code. --- .../migrations/20250424152740_remove-fks.sql | 245 ++++++++++++++++++ src-tauri/src/lib.rs | 4 +- src-tauri/yaak-models/src/queries/batch.rs | 75 ++---- src-tauri/yaak-sync/src/sync.rs | 2 +- src-web/commands/openWorkspaceFromSyncDir.tsx | 1 + .../components/SyncToFilesystemSetting.tsx | 10 +- 6 files changed, 279 insertions(+), 58 deletions(-) create mode 100644 src-tauri/migrations/20250424152740_remove-fks.sql diff --git a/src-tauri/migrations/20250424152740_remove-fks.sql b/src-tauri/migrations/20250424152740_remove-fks.sql new file mode 100644 index 00000000..e84cb194 --- /dev/null +++ b/src-tauri/migrations/20250424152740_remove-fks.sql @@ -0,0 +1,245 @@ +-- NOTE: SQLite does not support dropping foreign keys, so we need to create new +-- tables and copy data instead. To prevent cascade deletes from wrecking stuff, +-- we start with the leaf tables and finish with the parent tables (eg. folder). + +---------------------------- +-- Remove http request FK -- +---------------------------- + +CREATE TABLE http_requests_dg_tmp +( + id TEXT NOT NULL + PRIMARY KEY, + model TEXT DEFAULT 'http_request' NOT NULL, + workspace_id TEXT NOT NULL + REFERENCES workspaces + ON DELETE CASCADE, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, + deleted_at DATETIME, + name TEXT NOT NULL, + url TEXT NOT NULL, + method TEXT NOT NULL, + headers TEXT NOT NULL, + body_type TEXT, + sort_priority REAL DEFAULT 0 NOT NULL, + authentication TEXT DEFAULT '{}' NOT NULL, + authentication_type TEXT, + folder_id TEXT, + body TEXT DEFAULT '{}' NOT NULL, + url_parameters TEXT DEFAULT '[]' NOT NULL, + description TEXT DEFAULT '' NOT NULL +); + +INSERT INTO http_requests_dg_tmp(id, model, workspace_id, created_at, updated_at, deleted_at, name, url, method, + headers, body_type, sort_priority, authentication, authentication_type, folder_id, + body, url_parameters, description) +SELECT id, + model, + workspace_id, + created_at, + updated_at, + deleted_at, + name, + url, + method, + headers, + body_type, + sort_priority, + authentication, + authentication_type, + folder_id, + body, + url_parameters, + description +FROM http_requests; + +DROP TABLE http_requests; + +ALTER TABLE http_requests_dg_tmp + RENAME TO http_requests; + +---------------------------- +-- Remove grpc request FK -- +---------------------------- + +CREATE TABLE grpc_requests_dg_tmp +( + id TEXT NOT NULL + PRIMARY KEY, + model TEXT DEFAULT 'grpc_request' NOT NULL, + workspace_id TEXT NOT NULL + REFERENCES workspaces + ON DELETE CASCADE, + folder_id TEXT, + created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL, + updated_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL, + name TEXT NOT NULL, + sort_priority REAL NOT NULL, + url TEXT NOT NULL, + service TEXT, + method TEXT, + message TEXT NOT NULL, + authentication TEXT DEFAULT '{}' NOT NULL, + authentication_type TEXT, + metadata TEXT DEFAULT '[]' NOT NULL, + description TEXT DEFAULT '' NOT NULL +); + +INSERT INTO grpc_requests_dg_tmp(id, model, workspace_id, folder_id, created_at, updated_at, name, sort_priority, url, + service, method, message, authentication, authentication_type, metadata, description) +SELECT id, + model, + workspace_id, + folder_id, + created_at, + updated_at, + name, + sort_priority, + url, + service, + method, + message, + authentication, + authentication_type, + metadata, + description +FROM grpc_requests; + +DROP TABLE grpc_requests; + +ALTER TABLE grpc_requests_dg_tmp + RENAME TO grpc_requests; + +--------------------------------- +-- Remove websocket request FK -- +--------------------------------- + +CREATE TABLE websocket_requests_dg_tmp +( + id TEXT NOT NULL + PRIMARY KEY, + model TEXT DEFAULT 'websocket_request' NOT NULL, + workspace_id TEXT NOT NULL + REFERENCES workspaces + ON DELETE CASCADE, + folder_id TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, + deleted_at DATETIME, + authentication TEXT DEFAULT '{}' NOT NULL, + authentication_type TEXT, + description TEXT NOT NULL, + name TEXT NOT NULL, + url TEXT NOT NULL, + headers TEXT NOT NULL, + message TEXT NOT NULL, + sort_priority REAL NOT NULL, + url_parameters TEXT DEFAULT '[]' NOT NULL +); + +INSERT INTO websocket_requests_dg_tmp(id, model, workspace_id, folder_id, created_at, updated_at, deleted_at, + authentication, authentication_type, description, name, url, headers, message, + sort_priority, url_parameters) +SELECT id, + model, + workspace_id, + folder_id, + created_at, + updated_at, + deleted_at, + authentication, + authentication_type, + description, + name, + url, + headers, + message, + sort_priority, + url_parameters +FROM websocket_requests; + +DROP TABLE websocket_requests; + +ALTER TABLE websocket_requests_dg_tmp + RENAME TO websocket_requests; + +PRAGMA foreign_keys = ON; + +--------------------------- +-- Remove environment FK -- +--------------------------- + +CREATE TABLE environments_dg_tmp +( + id TEXT NOT NULL + PRIMARY KEY, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, + deleted_at DATETIME, + workspace_id TEXT NOT NULL + REFERENCES workspaces + ON DELETE CASCADE, + name TEXT NOT NULL, + variables DEFAULT '[]' NOT NULL, + model TEXT DEFAULT 'environment', + environment_id TEXT +); + +INSERT INTO environments_dg_tmp(id, created_at, updated_at, deleted_at, workspace_id, name, variables, model, + environment_id) +SELECT id, + created_at, + updated_at, + deleted_at, + workspace_id, + name, + variables, + model, + environment_id +FROM environments; + +DROP TABLE environments; + +ALTER TABLE environments_dg_tmp + RENAME TO environments; + +---------------------- +-- Remove folder FK -- +---------------------- + +CREATE TABLE folders_dg_tmp +( + id TEXT NOT NULL + PRIMARY KEY, + model TEXT DEFAULT 'folder' NOT NULL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, + deleted_at DATETIME, + workspace_id TEXT NOT NULL + REFERENCES workspaces + ON DELETE CASCADE, + folder_id TEXT, + name TEXT NOT NULL, + sort_priority REAL DEFAULT 0 NOT NULL, + description TEXT DEFAULT '' NOT NULL +); + +INSERT INTO folders_dg_tmp(id, model, created_at, updated_at, deleted_at, workspace_id, folder_id, name, sort_priority, + description) +SELECT id, + model, + created_at, + updated_at, + deleted_at, + workspace_id, + folder_id, + name, + sort_priority, + description +FROM folders; + +DROP TABLE folders; + +ALTER TABLE folders_dg_tmp + RENAME TO folders; diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 91c505a3..fe99d976 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -9,7 +9,7 @@ use crate::updates::{UpdateMode, UpdateTrigger, YaakUpdater}; use crate::uri_scheme::handle_uri_scheme; use error::Result as YaakResult; use eventsource_client::{EventParser, SSE}; -use log::{debug, error, warn}; +use log::{debug, error, info, warn}; use std::collections::{BTreeMap, HashMap}; use std::fs::{File, create_dir_all}; use std::path::PathBuf; @@ -880,6 +880,8 @@ async fn cmd_import_data( }) .collect(); + info!("Importing data"); + let upserted = app_handle.with_tx(|tx| { tx.batch_upsert( workspaces, diff --git a/src-tauri/yaak-models/src/queries/batch.rs b/src-tauri/yaak-models/src/queries/batch.rs index c1c29339..7be4a498 100644 --- a/src-tauri/yaak-models/src/queries/batch.rs +++ b/src-tauri/yaak-models/src/queries/batch.rs @@ -1,8 +1,8 @@ +use crate::db_context::DbContext; use crate::error::Result; use crate::models::{Environment, Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace}; use crate::util::{BatchUpsertResult, UpdateSource}; use log::info; -use crate::db_context::DbContext; impl<'a> DbContext<'a> { pub fn batch_upsert( @@ -18,64 +18,19 @@ impl<'a> DbContext<'a> { let mut imported_resources = BatchUpsertResult::default(); if workspaces.len() > 0 { - info!("Batch inserting {} workspaces", workspaces.len()); for v in workspaces { let x = self.upsert_workspace(&v, source)?; imported_resources.workspaces.push(x.clone()); } - } - - if environments.len() > 0 { - while imported_resources.environments.len() < environments.len() { - for v in environments.clone() { - if let Some(id) = v.environment_id.clone() { - let has_parent_to_import = - environments.iter().find(|m| m.id == id).is_some(); - let imported_parent = - imported_resources.environments.iter().find(|m| m.id == id); - // If there's also a parent to upsert, wait for that one - if imported_parent.is_none() && has_parent_to_import { - continue; - } - } - if let Some(_) = imported_resources.environments.iter().find(|f| f.id == v.id) { - continue; - } - let x = self.upsert_environment(&v, source)?; - imported_resources.environments.push(x.clone()); - } - } - info!("Imported {} environments", imported_resources.environments.len()); - } - - if folders.len() > 0 { - while imported_resources.folders.len() < folders.len() { - for v in folders.clone() { - if let Some(id) = v.folder_id.clone() { - let has_parent_to_import = folders.iter().find(|m| m.id == id).is_some(); - let imported_parent = - imported_resources.folders.iter().find(|m| m.id == id); - // If there's also a parent to upsert, wait for that one - if imported_parent.is_none() && has_parent_to_import { - continue; - } - } - if let Some(_) = imported_resources.folders.iter().find(|f| f.id == v.id) { - continue; - } - let x = self.upsert_folder(&v, source)?; - imported_resources.folders.push(x.clone()); - } - } - info!("Imported {} folders", imported_resources.folders.len()); + info!("Upserted {} workspaces", imported_resources.environments.len()); } if http_requests.len() > 0 { for v in http_requests { - let x = self.upsert(&v, source)?; + let x = self.upsert_http_request(&v, source)?; imported_resources.http_requests.push(x.clone()); } - info!("Imported {} http_requests", imported_resources.http_requests.len()); + info!("Upserted Imported {} http_requests", imported_resources.http_requests.len()); } if grpc_requests.len() > 0 { @@ -83,7 +38,7 @@ impl<'a> DbContext<'a> { let x = self.upsert_grpc_request(&v, source)?; imported_resources.grpc_requests.push(x.clone()); } - info!("Imported {} grpc_requests", imported_resources.grpc_requests.len()); + info!("Upserted {} grpc_requests", imported_resources.grpc_requests.len()); } if websocket_requests.len() > 0 { @@ -91,7 +46,25 @@ impl<'a> DbContext<'a> { let x = self.upsert_websocket_request(&v, source)?; imported_resources.websocket_requests.push(x.clone()); } - info!("Imported {} websocket_requests", imported_resources.websocket_requests.len()); + info!("Upserted {} websocket_requests", imported_resources.websocket_requests.len()); + } + + if environments.len() > 0 { + for x in environments { + let x = self.upsert_environment(&x, source)?; + imported_resources.environments.push(x.clone()); + } + info!("Upserted {} environments", imported_resources.environments.len()); + } + + // Do folders last so it doesn't cause the UI to render empty folders before populating + // immediately after. + if folders.len() > 0 { + for v in folders { + let x = self.upsert_folder(&v, source)?; + imported_resources.folders.push(x.clone()); + } + info!("Upserted {} folders", imported_resources.folders.len()); } Ok(imported_resources) diff --git a/src-tauri/yaak-sync/src/sync.rs b/src-tauri/yaak-sync/src/sync.rs index 02a43727..add74504 100644 --- a/src-tauri/yaak-sync/src/sync.rs +++ b/src-tauri/yaak-sync/src/sync.rs @@ -448,7 +448,7 @@ pub(crate) async fn apply_sync_ops( websocket_requests_to_upsert, &UpdateSource::Sync, )?; - + // Ensure we create WorkspaceMeta models for each new workspace, with the appropriate sync dir let sync_dir_string = sync_dir.to_string_lossy().to_string(); for workspace in upserted_models.workspaces { diff --git a/src-web/commands/openWorkspaceFromSyncDir.tsx b/src-web/commands/openWorkspaceFromSyncDir.tsx index ba354d12..83426ce9 100644 --- a/src-web/commands/openWorkspaceFromSyncDir.tsx +++ b/src-web/commands/openWorkspaceFromSyncDir.tsx @@ -11,6 +11,7 @@ export const openWorkspaceFromSyncDir = createFastMutation({ const workspace = ops .map((o) => (o.type === 'dbCreate' && o.fs.model.type === 'workspace' ? o.fs.model : null)) .filter((m) => m)[0]; + if (workspace == null) { showSimpleAlert('Failed to Open', 'No workspace found in directory'); return; diff --git a/src-web/components/SyncToFilesystemSetting.tsx b/src-web/components/SyncToFilesystemSetting.tsx index 3c363a16..b9afcc84 100644 --- a/src-web/components/SyncToFilesystemSetting.tsx +++ b/src-web/components/SyncToFilesystemSetting.tsx @@ -18,10 +18,10 @@ export function SyncToFilesystemSetting({ onCreateNewWorkspace, value, }: SyncToFilesystemSettingProps) { - const [isNonEmpty, setIsNonEmpty] = useState(null); + const [syncDir, setSyncDir] = useState(null); return ( - {isNonEmpty && ( + {syncDir && (

Directory is not empty. Do you want to open it instead?

@@ -31,7 +31,7 @@ export function SyncToFilesystemSetting({ size="xs" type="button" onClick={() => { - openWorkspaceFromSyncDir.mutate(isNonEmpty); + openWorkspaceFromSyncDir.mutate(syncDir); onCreateNewWorkspace(); }} > @@ -52,12 +52,12 @@ export function SyncToFilesystemSetting({ if (filePath != null) { const files = await readDir(filePath); if (files.length > 0) { - setIsNonEmpty(filePath); + setSyncDir(filePath); return; } } - setIsNonEmpty(null); + setSyncDir(null); onChange({ ...value, filePath }); }} />