diff --git a/.tauriignore b/.tauriignore new file mode 100644 index 00000000..5a02d108 --- /dev/null +++ b/.tauriignore @@ -0,0 +1 @@ +plugins diff --git a/src-tauri/plugins/hello-world/greet.js b/src-tauri/plugins/hello-world/greet.js deleted file mode 100644 index 9e23f000..00000000 --- a/src-tauri/plugins/hello-world/greet.js +++ /dev/null @@ -1,4 +0,0 @@ -export function greet() { - // Call Rust-provided fn! - sayHello('Plugin'); -} diff --git a/src-tauri/plugins/hello-world/index.js b/src-tauri/plugins/hello-world/index.js deleted file mode 100644 index b739c110..00000000 --- a/src-tauri/plugins/hello-world/index.js +++ /dev/null @@ -1,7 +0,0 @@ -import { greet } from './greet.js'; - -export function hello() { - greet(); - console.log('Try JSON parse', JSON.parse(`{ "hello": 123 }`).hello); - console.log('Try RegExp', '123'.match(/[\d]+/)); -} diff --git a/src-tauri/plugins/insomnia-importer/out/index.js b/src-tauri/plugins/insomnia-importer/out/index.js index 0dcb068c..70add3d4 100644 --- a/src-tauri/plugins/insomnia-importer/out/index.js +++ b/src-tauri/plugins/insomnia-importer/out/index.js @@ -1,4 +1,4 @@ -function O(e, t) { +function I(e, t) { return ( console.log('IMPORTING Environment', e._id, e.name, JSON.stringify(e, null, 2)), { @@ -8,64 +8,64 @@ function O(e, t) { workspaceId: t, model: 'environment', name: e.name, - variables: Object.entries(e.data).map(([i, s]) => ({ + variables: Object.entries(e.data).map(([n, s]) => ({ enabled: !0, - name: i, + name: n, value: `${s}`, })), } ); } -function g(e) { +function S(e) { return m(e) && e._type === 'workspace'; } -function y(e) { +function O(e) { return m(e) && e._type === 'request_group'; } -function _(e) { +function g(e) { return m(e) && e._type === 'request'; } -function I(e) { +function f(e) { return m(e) && e._type === 'environment'; } function m(e) { return Object.prototype.toString.call(e) === '[object Object]'; } -function h(e) { +function y(e) { return Object.prototype.toString.call(e) === '[object String]'; } -function N(e) { - return Object.entries(e).map(([t, i]) => ({ +function h(e) { + return Object.entries(e).map(([t, n]) => ({ enabled: !0, name: t, - value: `${i}`, + value: `${n}`, })); } -function p(e) { - return h(e) ? e.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, '${[$2]}') : e; +function d(e) { + return y(e) ? e.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, '${[$2]}') : e; } -function D(e, t, i = 0) { - var a, d; +function _(e, t, n = 0) { + var u, r; console.log('IMPORTING REQUEST', e._id, e.name, JSON.stringify(e, null, 2)); let s = null, - n = null; - ((a = e.body) == null ? void 0 : a.mimeType) === 'application/graphql' - ? ((s = 'graphql'), (n = p(e.body.text))) - : ((d = e.body) == null ? void 0 : d.mimeType) === 'application/json' && - ((s = 'application/json'), (n = p(e.body.text))); - let u = null, - r = {}; + o = null; + ((u = e.body) == null ? void 0 : u.mimeType) === 'application/graphql' + ? ((s = 'graphql'), (o = d(e.body.text))) + : ((r = e.body) == null ? void 0 : r.mimeType) === 'application/json' && + ((s = 'application/json'), (o = d(e.body.text))); + let a = null, + l = {}; return ( e.authentication.type === 'bearer' - ? ((u = 'bearer'), - (r = { - token: p(e.authentication.token), + ? ((a = 'bearer'), + (l = { + token: d(e.authentication.token), })) : e.authentication.type === 'basic' && - ((u = 'basic'), - (r = { - username: p(e.authentication.username), - password: p(e.authentication.password), + ((a = 'basic'), + (l = { + username: d(e.authentication.username), + password: d(e.authentication.password), })), { id: e._id, @@ -74,25 +74,25 @@ function D(e, t, i = 0) { workspaceId: t, folderId: e.parentId === t ? null : e.parentId, model: 'http_request', - sortPriority: i, + sortPriority: n, name: e.name, - url: p(e.url), - body: n, + url: d(e.url), + body: o, bodyType: s, - authentication: r, - authenticationType: u, + authentication: l, + authenticationType: a, method: e.method, headers: (e.headers ?? []) - .map(({ name: c, value: o, disabled: f }) => ({ - enabled: !f, + .map(({ name: c, value: p, disabled: i }) => ({ + enabled: !i, name: c, - value: o, + value: p, })) - .filter(({ name: c, value: o }) => c !== '' || o !== ''), + .filter(({ name: c, value: p }) => c !== '' || p !== ''), } ); } -function w(e, t) { +function N(e, t) { return ( console.log('IMPORTING Workspace', e._id, e.name, JSON.stringify(e, null, 2)), { @@ -105,7 +105,7 @@ function w(e, t) { } ); } -function b(e, t) { +function D(e, t) { return ( console.log('IMPORTING FOLDER', e._id, e.name, JSON.stringify(e, null, 2)), { @@ -119,34 +119,37 @@ function b(e, t) { } ); } -function T(e) { - const t = JSON.parse(e); - if (!m(t)) return; - const { _type: i, __export_format: s } = t; - if (i !== 'export' || s !== 4 || !Array.isArray(t.resources)) return; +function w(e) { + let t; + try { + t = JSON.parse(e); + } catch { + return; + } + if (!m(t) || !Array.isArray(t.requests)) return; const n = { workspaces: [], requests: [], environments: [], folders: [], }, - u = t.resources.filter(g); - for (const r of u) { - console.log('IMPORTING WORKSPACE', r.name); - const a = t.resources.find((o) => I(o) && o.parentId === r._id); + s = t.resources.filter(S); + for (const o of s) { + console.log('IMPORTING WORKSPACE', o.name); + const a = t.resources.find((r) => f(r) && r.parentId === o._id); console.log('FOUND BASE ENV', a.name), - n.workspaces.push(w(r, a ? N(a.data) : [])), + n.workspaces.push(N(o, a ? h(a.data) : [])), console.log('IMPORTING ENVIRONMENTS', a.name); - const d = t.resources.filter((o) => I(o) && o.parentId === (a == null ? void 0 : a._id)); - console.log('FOUND', d.length, 'ENVIRONMENTS'), - n.environments.push(...d.map((o) => O(o, r._id))); - const c = (o) => { - const f = t.resources.filter((l) => l.parentId === o); - let S = 0; - for (const l of f) - y(l) ? (n.folders.push(b(l, r._id)), c(l._id)) : _(l) && n.requests.push(D(l, r._id, S++)); + const l = t.resources.filter((r) => f(r) && r.parentId === (a == null ? void 0 : a._id)); + console.log('FOUND', l.length, 'ENVIRONMENTS'), + n.environments.push(...l.map((r) => I(r, o._id))); + const u = (r) => { + const c = t.resources.filter((i) => i.parentId === r); + let p = 0; + for (const i of c) + O(i) ? (n.folders.push(D(i, o._id)), u(i._id)) : g(i) && n.requests.push(_(i, o._id, p++)); }; - c(r._id); + u(o._id); } return ( (n.requests = n.requests.filter(Boolean)), @@ -155,4 +158,4 @@ function T(e) { n ); } -export { T as pluginHookImport }; +export { w as pluginHookImport }; diff --git a/src-tauri/plugins/insomnia-importer/src/index.js b/src-tauri/plugins/insomnia-importer/src/index.js index 375b04c2..d25da234 100644 --- a/src-tauri/plugins/insomnia-importer/src/index.js +++ b/src-tauri/plugins/insomnia-importer/src/index.js @@ -12,14 +12,19 @@ import { parseVariables } from './helpers/variables.js'; import { importFolder } from './importers/folder.js'; export function pluginHookImport(contents) { - const parsed = JSON.parse(contents); - if (!isJSObject(parsed)) { - return; + let parsed; + try { + parsed = JSON.parse(contents); + } catch (e) { + return undefined; } - const { _type, __export_format } = parsed; - if (_type !== 'export' || __export_format !== 4 || !Array.isArray(parsed.resources)) { - return; + if (!isJSObject(parsed)) { + return undefined; + } + + if (!Array.isArray(parsed.requests)) { + return undefined; } const resources = { diff --git a/src-tauri/plugins/yaak-importer/out/index.js b/src-tauri/plugins/yaak-importer/out/index.js new file mode 100644 index 00000000..36a31114 --- /dev/null +++ b/src-tauri/plugins/yaak-importer/out/index.js @@ -0,0 +1,13 @@ +function u(r) { + let e; + try { + e = JSON.parse(r); + } catch { + return; + } + if (t(e) && e.yaakSchema === 1) return e.resources; +} +function t(r) { + return Object.prototype.toString.call(r) === '[object Object]'; +} +export { t as isJSObject, u as pluginHookImport }; diff --git a/src-tauri/plugins/yaak-importer/src/index.js b/src-tauri/plugins/yaak-importer/src/index.js new file mode 100644 index 00000000..b40c52a4 --- /dev/null +++ b/src-tauri/plugins/yaak-importer/src/index.js @@ -0,0 +1,20 @@ +export function pluginHookImport(contents) { + let parsed; + try { + parsed = JSON.parse(contents); + } catch (err) { + return undefined; + } + + if (!isJSObject(parsed)) { + return undefined; + } + + if (parsed.yaakSchema !== 1) return undefined; + + return parsed.resources; // Should already be in the correct format +} + +export function isJSObject(obj) { + return Object.prototype.toString.call(obj) === '[object Object]'; +} diff --git a/src-tauri/plugins/yaak-importer/vite.config.js b/src-tauri/plugins/yaak-importer/vite.config.js new file mode 100644 index 00000000..73b18586 --- /dev/null +++ b/src-tauri/plugins/yaak-importer/vite.config.js @@ -0,0 +1,13 @@ +import { resolve } from 'path'; +import { defineConfig } from 'vite'; + +export default defineConfig({ + build: { + lib: { + entry: resolve(__dirname, 'src/index.js'), + fileName: 'index', + formats: ['es'], + }, + outDir: resolve(__dirname, 'out'), + }, +}); diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index 58ec38d1..f1db3106 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -11,12 +11,12 @@ use std::collections::HashMap; use std::env::current_dir; use std::fs::{create_dir_all, File}; use std::io::Write; -use std::path::Path; use std::process::exit; use base64::Engine; use http::header::{HeaderName, ACCEPT, USER_AGENT}; use http::{HeaderMap, HeaderValue, Method}; +use log::info; use rand::random; use reqwest::redirect::Policy; use serde::Serialize; @@ -35,6 +35,7 @@ use tokio::sync::Mutex; use window_ext::TrafficLightWindowExt; use crate::analytics::{track_event, AnalyticsAction, AnalyticsResource}; +use crate::plugin::ImportResources; mod analytics; mod models; @@ -266,32 +267,89 @@ async fn import_data( window: Window, db_instance: State<'_, Mutex>>, file_paths: Vec<&str>, -) -> Result { +) -> Result { let pool = &*db_instance.lock().await; - let imported = plugin::run_plugin_import( + let mut resources = plugin::run_plugin_import( &window.app_handle(), - pool, "insomnia-importer", file_paths.first().unwrap(), ) .await; - Ok(imported) + println!("Resources: {:?}", resources); + + if resources.is_none() { + resources = plugin::run_plugin_import( + &window.app_handle(), + "yaak-importer", + file_paths.first().unwrap(), + ) + .await; + } + println!("Resources: {:?}", resources); + + match resources { + None => Err("Failed to import data".to_string()), + Some(r) => { + let mut imported_resources = ImportResources::default(); + + info!("Importing resources"); + for w in r.workspaces { + let x = models::upsert_workspace(pool, w) + .await + .expect("Failed to create workspace"); + imported_resources.workspaces.push(x.clone()); + info!("Imported workspace: {}", x.name); + } + + for e in r.environments { + let x = models::upsert_environment(pool, e) + .await + .expect("Failed to create environment"); + imported_resources.environments.push(x.clone()); + info!("Imported environment: {}", x.name); + } + + for f in r.folders { + let x = models::upsert_folder(pool, f) + .await + .expect("Failed to create folder"); + imported_resources.folders.push(x.clone()); + info!("Imported folder: {}", x.name); + } + + for r in r.requests { + let x = models::upsert_request(pool, r) + .await + .expect("Failed to create request"); + imported_resources.requests.push(x.clone()); + info!("Imported request: {}", x.name); + } + + Ok(imported_resources) + } + } } #[tauri::command] async fn export_data( + app_handle: AppHandle, db_instance: State<'_, Mutex>>, - root_dir: &str, + export_path: &str, workspace_id: &str, ) -> Result<(), String> { - let path = Path::new(root_dir).join("yaak-export.json"); let pool = &*db_instance.lock().await; - let imported = models::get_workspace_export_resources(pool, workspace_id).await; - println!("Exporting {:?}", path); - let f = File::create(path).expect("Unable to create file"); - serde_json::to_writer_pretty(f, &imported) + let export_data = models::get_workspace_export_resources(&app_handle, pool, workspace_id).await; + let f = File::options() + .create(true) + .truncate(true) + .write(true) + .open(export_path) + .expect("Unable to create file"); + serde_json::to_writer_pretty(&f, &export_data) .map_err(|e| e.to_string()) .expect("Failed to write"); + f.sync_all().expect("Failed to sync"); + info!("Exported Yaak workspace to {:?}", export_path); Ok(()) } @@ -775,37 +833,6 @@ fn main() { let _ = models::cancel_pending_responses(&pool).await; - // TODO: Move this somewhere better - match app.get_cli_matches() { - Ok(matches) => { - let cmd = matches.subcommand.unwrap_or_default(); - if cmd.name == "import" { - let arg_file = cmd - .matches - .args - .get("file") - .unwrap() - .value - .as_str() - .unwrap(); - plugin::run_plugin_import( - &app.handle(), - &pool, - "insomnia-importer", - arg_file, - ) - .await; - exit(0); - } else if cmd.name == "hello" { - plugin::run_plugin_hello(&app.handle(), "hello-world"); - exit(0); - } - } - Err(e) => { - println!("Nothing found: {}", e); - } - } - Ok(()) }) }) diff --git a/src-tauri/src/models.rs b/src-tauri/src/models.rs index ff317b5c..ed90ae36 100644 --- a/src-tauri/src/models.rs +++ b/src-tauri/src/models.rs @@ -6,6 +6,7 @@ use serde::{Deserialize, Serialize}; use sqlx::types::chrono::NaiveDateTime; use sqlx::types::{Json, JsonValue}; use sqlx::{Pool, Sqlite}; +use tauri::AppHandle; #[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)] #[serde(default, rename_all = "camelCase")] @@ -796,6 +797,16 @@ pub fn generate_id(prefix: Option<&str>) -> String { } #[derive(Default, Debug, Deserialize, Serialize)] +#[serde(default, rename_all = "camelCase")] +pub struct WorkspaceExport { + yaak_version: String, + yaak_schema: i64, + timestamp: NaiveDateTime, + resources: WorkspaceExportResources, +} + +#[derive(Default, Debug, Deserialize, Serialize)] +#[serde(default, rename_all = "camelCase")] pub struct WorkspaceExportResources { workspaces: Vec, environments: Vec, @@ -803,23 +814,29 @@ pub struct WorkspaceExportResources { requests: Vec, } -pub(crate) async fn get_workspace_export_resources( +pub async fn get_workspace_export_resources( + app_handle: &AppHandle, pool: &Pool, workspace_id: &str, -) -> WorkspaceExportResources { +) -> WorkspaceExport { let workspace = get_workspace(workspace_id, pool) .await .expect("Failed to get workspace"); - return WorkspaceExportResources { - workspaces: vec![workspace], - environments: find_environments(workspace_id, pool) - .await - .expect("Failed to get environments"), - folders: find_folders(workspace_id, pool) - .await - .expect("Failed to get folders"), - requests: find_requests(workspace_id, pool) - .await - .expect("Failed to get requests"), + return WorkspaceExport { + yaak_version: app_handle.package_info().version.clone().to_string(), + yaak_schema: 1, + timestamp: chrono::Utc::now().naive_utc(), + resources: WorkspaceExportResources { + workspaces: vec![workspace], + environments: find_environments(workspace_id, pool) + .await + .expect("Failed to get environments"), + folders: find_folders(workspace_id, pool) + .await + .expect("Failed to get folders"), + requests: find_requests(workspace_id, pool) + .await + .expect("Failed to get requests"), + }, }; } diff --git a/src-tauri/src/plugin.rs b/src-tauri/src/plugin.rs index 90c2b343..6a7a9f88 100644 --- a/src-tauri/src/plugin.rs +++ b/src-tauri/src/plugin.rs @@ -8,32 +8,25 @@ use boa_engine::{ Context, JsArgs, JsNativeError, JsValue, Module, NativeFunction, Source, }; use boa_runtime::Console; -use log::info; use serde::{Deserialize, Serialize}; use serde_json::json; -use sqlx::{Pool, Sqlite}; use tauri::AppHandle; -use crate::models::{self, Environment, Folder, HttpRequest, Workspace}; - -pub fn run_plugin_hello(app_handle: &AppHandle, plugin_name: &str) { - run_plugin(app_handle, plugin_name, "hello", &[]); -} +use crate::models::{Environment, Folder, HttpRequest, Workspace}; #[derive(Default, Debug, Deserialize, Serialize)] -pub struct ImportedResources { - workspaces: Vec, - environments: Vec, - folders: Vec, - requests: Vec, +pub struct ImportResources { + pub workspaces: Vec, + pub environments: Vec, + pub folders: Vec, + pub requests: Vec, } pub async fn run_plugin_import( app_handle: &AppHandle, - pool: &Pool, plugin_name: &str, file_path: &str, -) -> ImportedResources { +) -> Option { let file = fs::read_to_string(file_path) .unwrap_or_else(|_| panic!("Unable to read file {}", file_path)); let file_contents = file.as_str(); @@ -43,44 +36,14 @@ pub async fn run_plugin_import( "pluginHookImport", &[js_string!(file_contents).into()], ); - let resources: ImportedResources = + + if result_json.is_null() { + return None; + } + + let resources: ImportResources = serde_json::from_value(result_json).expect("failed to parse result json"); - let mut imported_resources = ImportedResources::default(); - - info!("Importing resources"); - for w in resources.workspaces { - let x = models::upsert_workspace(pool, w) - .await - .expect("Failed to create workspace"); - imported_resources.workspaces.push(x.clone()); - info!("Imported workspace: {}", x.name); - } - - for e in resources.environments { - let x = models::upsert_environment(pool, e) - .await - .expect("Failed to create environment"); - imported_resources.environments.push(x.clone()); - info!("Imported environment: {}", x.name); - } - - for f in resources.folders { - let x = models::upsert_folder(pool, f) - .await - .expect("Failed to create folder"); - imported_resources.folders.push(x.clone()); - info!("Imported folder: {}", x.name); - } - - for r in resources.requests { - let x = models::upsert_request(pool, r) - .await - .expect("Failed to create request"); - imported_resources.requests.push(x.clone()); - info!("Imported request: {}", x.name); - } - - imported_resources + Some(resources) } fn run_plugin( diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 6ba2bf2b..49d00a4c 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -25,8 +25,7 @@ "short": "f", "takesValue": true }] - }, - "hello": {} + } } }, "allowlist": { diff --git a/src-web/components/SidebarActions.tsx b/src-web/components/SidebarActions.tsx index 2e1000f0..b6d3f2aa 100644 --- a/src-web/components/SidebarActions.tsx +++ b/src-web/components/SidebarActions.tsx @@ -14,15 +14,13 @@ export const SidebarActions = memo(function SidebarActions() { return ( - {hidden && ( - - )} + { - const rootDir = await save(saveArgs); - if (rootDir == null) { + const exportPath = await save(saveArgs); + if (exportPath == null) { return; } - await invoke('export_data', { workspaceId, rootDir }); + await invoke('export_data', { workspaceId, exportPath }); }, }); }