mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-04-23 00:58:32 +02:00
Hacky Yaak import complete!
This commit is contained in:
1
.tauriignore
Normal file
1
.tauriignore
Normal file
@@ -0,0 +1 @@
|
|||||||
|
plugins
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
export function greet() {
|
|
||||||
// Call Rust-provided fn!
|
|
||||||
sayHello('Plugin');
|
|
||||||
}
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
import { greet } from './greet.js';
|
|
||||||
|
|
||||||
export function hello() {
|
|
||||||
greet();
|
|
||||||
console.log('Try JSON parse', JSON.parse(`{ "hello": 123 }`).hello);
|
|
||||||
console.log('Try RegExp', '123'.match(/[\d]+/));
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
function O(e, t) {
|
function I(e, t) {
|
||||||
return (
|
return (
|
||||||
console.log('IMPORTING Environment', e._id, e.name, JSON.stringify(e, null, 2)),
|
console.log('IMPORTING Environment', e._id, e.name, JSON.stringify(e, null, 2)),
|
||||||
{
|
{
|
||||||
@@ -8,64 +8,64 @@ function O(e, t) {
|
|||||||
workspaceId: t,
|
workspaceId: t,
|
||||||
model: 'environment',
|
model: 'environment',
|
||||||
name: e.name,
|
name: e.name,
|
||||||
variables: Object.entries(e.data).map(([i, s]) => ({
|
variables: Object.entries(e.data).map(([n, s]) => ({
|
||||||
enabled: !0,
|
enabled: !0,
|
||||||
name: i,
|
name: n,
|
||||||
value: `${s}`,
|
value: `${s}`,
|
||||||
})),
|
})),
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
function g(e) {
|
function S(e) {
|
||||||
return m(e) && e._type === 'workspace';
|
return m(e) && e._type === 'workspace';
|
||||||
}
|
}
|
||||||
function y(e) {
|
function O(e) {
|
||||||
return m(e) && e._type === 'request_group';
|
return m(e) && e._type === 'request_group';
|
||||||
}
|
}
|
||||||
function _(e) {
|
function g(e) {
|
||||||
return m(e) && e._type === 'request';
|
return m(e) && e._type === 'request';
|
||||||
}
|
}
|
||||||
function I(e) {
|
function f(e) {
|
||||||
return m(e) && e._type === 'environment';
|
return m(e) && e._type === 'environment';
|
||||||
}
|
}
|
||||||
function m(e) {
|
function m(e) {
|
||||||
return Object.prototype.toString.call(e) === '[object Object]';
|
return Object.prototype.toString.call(e) === '[object Object]';
|
||||||
}
|
}
|
||||||
function h(e) {
|
function y(e) {
|
||||||
return Object.prototype.toString.call(e) === '[object String]';
|
return Object.prototype.toString.call(e) === '[object String]';
|
||||||
}
|
}
|
||||||
function N(e) {
|
function h(e) {
|
||||||
return Object.entries(e).map(([t, i]) => ({
|
return Object.entries(e).map(([t, n]) => ({
|
||||||
enabled: !0,
|
enabled: !0,
|
||||||
name: t,
|
name: t,
|
||||||
value: `${i}`,
|
value: `${n}`,
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
function p(e) {
|
function d(e) {
|
||||||
return h(e) ? e.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, '${[$2]}') : e;
|
return y(e) ? e.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, '${[$2]}') : e;
|
||||||
}
|
}
|
||||||
function D(e, t, i = 0) {
|
function _(e, t, n = 0) {
|
||||||
var a, d;
|
var u, r;
|
||||||
console.log('IMPORTING REQUEST', e._id, e.name, JSON.stringify(e, null, 2));
|
console.log('IMPORTING REQUEST', e._id, e.name, JSON.stringify(e, null, 2));
|
||||||
let s = null,
|
let s = null,
|
||||||
n = null;
|
o = null;
|
||||||
((a = e.body) == null ? void 0 : a.mimeType) === 'application/graphql'
|
((u = e.body) == null ? void 0 : u.mimeType) === 'application/graphql'
|
||||||
? ((s = 'graphql'), (n = p(e.body.text)))
|
? ((s = 'graphql'), (o = d(e.body.text)))
|
||||||
: ((d = e.body) == null ? void 0 : d.mimeType) === 'application/json' &&
|
: ((r = e.body) == null ? void 0 : r.mimeType) === 'application/json' &&
|
||||||
((s = 'application/json'), (n = p(e.body.text)));
|
((s = 'application/json'), (o = d(e.body.text)));
|
||||||
let u = null,
|
let a = null,
|
||||||
r = {};
|
l = {};
|
||||||
return (
|
return (
|
||||||
e.authentication.type === 'bearer'
|
e.authentication.type === 'bearer'
|
||||||
? ((u = 'bearer'),
|
? ((a = 'bearer'),
|
||||||
(r = {
|
(l = {
|
||||||
token: p(e.authentication.token),
|
token: d(e.authentication.token),
|
||||||
}))
|
}))
|
||||||
: e.authentication.type === 'basic' &&
|
: e.authentication.type === 'basic' &&
|
||||||
((u = 'basic'),
|
((a = 'basic'),
|
||||||
(r = {
|
(l = {
|
||||||
username: p(e.authentication.username),
|
username: d(e.authentication.username),
|
||||||
password: p(e.authentication.password),
|
password: d(e.authentication.password),
|
||||||
})),
|
})),
|
||||||
{
|
{
|
||||||
id: e._id,
|
id: e._id,
|
||||||
@@ -74,25 +74,25 @@ function D(e, t, i = 0) {
|
|||||||
workspaceId: t,
|
workspaceId: t,
|
||||||
folderId: e.parentId === t ? null : e.parentId,
|
folderId: e.parentId === t ? null : e.parentId,
|
||||||
model: 'http_request',
|
model: 'http_request',
|
||||||
sortPriority: i,
|
sortPriority: n,
|
||||||
name: e.name,
|
name: e.name,
|
||||||
url: p(e.url),
|
url: d(e.url),
|
||||||
body: n,
|
body: o,
|
||||||
bodyType: s,
|
bodyType: s,
|
||||||
authentication: r,
|
authentication: l,
|
||||||
authenticationType: u,
|
authenticationType: a,
|
||||||
method: e.method,
|
method: e.method,
|
||||||
headers: (e.headers ?? [])
|
headers: (e.headers ?? [])
|
||||||
.map(({ name: c, value: o, disabled: f }) => ({
|
.map(({ name: c, value: p, disabled: i }) => ({
|
||||||
enabled: !f,
|
enabled: !i,
|
||||||
name: c,
|
name: c,
|
||||||
value: o,
|
value: p,
|
||||||
}))
|
}))
|
||||||
.filter(({ name: c, value: o }) => c !== '' || o !== ''),
|
.filter(({ name: c, value: p }) => c !== '' || p !== ''),
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
function w(e, t) {
|
function N(e, t) {
|
||||||
return (
|
return (
|
||||||
console.log('IMPORTING Workspace', e._id, e.name, JSON.stringify(e, null, 2)),
|
console.log('IMPORTING Workspace', e._id, e.name, JSON.stringify(e, null, 2)),
|
||||||
{
|
{
|
||||||
@@ -105,7 +105,7 @@ function w(e, t) {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
function b(e, t) {
|
function D(e, t) {
|
||||||
return (
|
return (
|
||||||
console.log('IMPORTING FOLDER', e._id, e.name, JSON.stringify(e, null, 2)),
|
console.log('IMPORTING FOLDER', e._id, e.name, JSON.stringify(e, null, 2)),
|
||||||
{
|
{
|
||||||
@@ -119,34 +119,37 @@ function b(e, t) {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
function T(e) {
|
function w(e) {
|
||||||
const t = JSON.parse(e);
|
let t;
|
||||||
if (!m(t)) return;
|
try {
|
||||||
const { _type: i, __export_format: s } = t;
|
t = JSON.parse(e);
|
||||||
if (i !== 'export' || s !== 4 || !Array.isArray(t.resources)) return;
|
} catch {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!m(t) || !Array.isArray(t.requests)) return;
|
||||||
const n = {
|
const n = {
|
||||||
workspaces: [],
|
workspaces: [],
|
||||||
requests: [],
|
requests: [],
|
||||||
environments: [],
|
environments: [],
|
||||||
folders: [],
|
folders: [],
|
||||||
},
|
},
|
||||||
u = t.resources.filter(g);
|
s = t.resources.filter(S);
|
||||||
for (const r of u) {
|
for (const o of s) {
|
||||||
console.log('IMPORTING WORKSPACE', r.name);
|
console.log('IMPORTING WORKSPACE', o.name);
|
||||||
const a = t.resources.find((o) => I(o) && o.parentId === r._id);
|
const a = t.resources.find((r) => f(r) && r.parentId === o._id);
|
||||||
console.log('FOUND BASE ENV', a.name),
|
console.log('FOUND BASE ENV', a.name),
|
||||||
n.workspaces.push(w(r, a ? N(a.data) : [])),
|
n.workspaces.push(N(o, a ? h(a.data) : [])),
|
||||||
console.log('IMPORTING ENVIRONMENTS', a.name);
|
console.log('IMPORTING ENVIRONMENTS', a.name);
|
||||||
const d = t.resources.filter((o) => I(o) && o.parentId === (a == null ? void 0 : a._id));
|
const l = t.resources.filter((r) => f(r) && r.parentId === (a == null ? void 0 : a._id));
|
||||||
console.log('FOUND', d.length, 'ENVIRONMENTS'),
|
console.log('FOUND', l.length, 'ENVIRONMENTS'),
|
||||||
n.environments.push(...d.map((o) => O(o, r._id)));
|
n.environments.push(...l.map((r) => I(r, o._id)));
|
||||||
const c = (o) => {
|
const u = (r) => {
|
||||||
const f = t.resources.filter((l) => l.parentId === o);
|
const c = t.resources.filter((i) => i.parentId === r);
|
||||||
let S = 0;
|
let p = 0;
|
||||||
for (const l of f)
|
for (const i of c)
|
||||||
y(l) ? (n.folders.push(b(l, r._id)), c(l._id)) : _(l) && n.requests.push(D(l, r._id, S++));
|
O(i) ? (n.folders.push(D(i, o._id)), u(i._id)) : g(i) && n.requests.push(_(i, o._id, p++));
|
||||||
};
|
};
|
||||||
c(r._id);
|
u(o._id);
|
||||||
}
|
}
|
||||||
return (
|
return (
|
||||||
(n.requests = n.requests.filter(Boolean)),
|
(n.requests = n.requests.filter(Boolean)),
|
||||||
@@ -155,4 +158,4 @@ function T(e) {
|
|||||||
n
|
n
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
export { T as pluginHookImport };
|
export { w as pluginHookImport };
|
||||||
|
|||||||
@@ -12,14 +12,19 @@ import { parseVariables } from './helpers/variables.js';
|
|||||||
import { importFolder } from './importers/folder.js';
|
import { importFolder } from './importers/folder.js';
|
||||||
|
|
||||||
export function pluginHookImport(contents) {
|
export function pluginHookImport(contents) {
|
||||||
const parsed = JSON.parse(contents);
|
let parsed;
|
||||||
if (!isJSObject(parsed)) {
|
try {
|
||||||
return;
|
parsed = JSON.parse(contents);
|
||||||
|
} catch (e) {
|
||||||
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
const { _type, __export_format } = parsed;
|
if (!isJSObject(parsed)) {
|
||||||
if (_type !== 'export' || __export_format !== 4 || !Array.isArray(parsed.resources)) {
|
return undefined;
|
||||||
return;
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(parsed.requests)) {
|
||||||
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
const resources = {
|
const resources = {
|
||||||
|
|||||||
13
src-tauri/plugins/yaak-importer/out/index.js
Normal file
13
src-tauri/plugins/yaak-importer/out/index.js
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
function u(r) {
|
||||||
|
let e;
|
||||||
|
try {
|
||||||
|
e = JSON.parse(r);
|
||||||
|
} catch {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (t(e) && e.yaakSchema === 1) return e.resources;
|
||||||
|
}
|
||||||
|
function t(r) {
|
||||||
|
return Object.prototype.toString.call(r) === '[object Object]';
|
||||||
|
}
|
||||||
|
export { t as isJSObject, u as pluginHookImport };
|
||||||
20
src-tauri/plugins/yaak-importer/src/index.js
Normal file
20
src-tauri/plugins/yaak-importer/src/index.js
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
export function pluginHookImport(contents) {
|
||||||
|
let parsed;
|
||||||
|
try {
|
||||||
|
parsed = JSON.parse(contents);
|
||||||
|
} catch (err) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isJSObject(parsed)) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parsed.yaakSchema !== 1) return undefined;
|
||||||
|
|
||||||
|
return parsed.resources; // Should already be in the correct format
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isJSObject(obj) {
|
||||||
|
return Object.prototype.toString.call(obj) === '[object Object]';
|
||||||
|
}
|
||||||
13
src-tauri/plugins/yaak-importer/vite.config.js
Normal file
13
src-tauri/plugins/yaak-importer/vite.config.js
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { resolve } from 'path';
|
||||||
|
import { defineConfig } from 'vite';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
build: {
|
||||||
|
lib: {
|
||||||
|
entry: resolve(__dirname, 'src/index.js'),
|
||||||
|
fileName: 'index',
|
||||||
|
formats: ['es'],
|
||||||
|
},
|
||||||
|
outDir: resolve(__dirname, 'out'),
|
||||||
|
},
|
||||||
|
});
|
||||||
@@ -11,12 +11,12 @@ use std::collections::HashMap;
|
|||||||
use std::env::current_dir;
|
use std::env::current_dir;
|
||||||
use std::fs::{create_dir_all, File};
|
use std::fs::{create_dir_all, File};
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::path::Path;
|
|
||||||
use std::process::exit;
|
use std::process::exit;
|
||||||
|
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
use http::header::{HeaderName, ACCEPT, USER_AGENT};
|
use http::header::{HeaderName, ACCEPT, USER_AGENT};
|
||||||
use http::{HeaderMap, HeaderValue, Method};
|
use http::{HeaderMap, HeaderValue, Method};
|
||||||
|
use log::info;
|
||||||
use rand::random;
|
use rand::random;
|
||||||
use reqwest::redirect::Policy;
|
use reqwest::redirect::Policy;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
@@ -35,6 +35,7 @@ use tokio::sync::Mutex;
|
|||||||
use window_ext::TrafficLightWindowExt;
|
use window_ext::TrafficLightWindowExt;
|
||||||
|
|
||||||
use crate::analytics::{track_event, AnalyticsAction, AnalyticsResource};
|
use crate::analytics::{track_event, AnalyticsAction, AnalyticsResource};
|
||||||
|
use crate::plugin::ImportResources;
|
||||||
|
|
||||||
mod analytics;
|
mod analytics;
|
||||||
mod models;
|
mod models;
|
||||||
@@ -266,32 +267,89 @@ async fn import_data(
|
|||||||
window: Window<Wry>,
|
window: Window<Wry>,
|
||||||
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
|
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
|
||||||
file_paths: Vec<&str>,
|
file_paths: Vec<&str>,
|
||||||
) -> Result<plugin::ImportedResources, String> {
|
) -> Result<ImportResources, String> {
|
||||||
let pool = &*db_instance.lock().await;
|
let pool = &*db_instance.lock().await;
|
||||||
let imported = plugin::run_plugin_import(
|
let mut resources = plugin::run_plugin_import(
|
||||||
&window.app_handle(),
|
&window.app_handle(),
|
||||||
pool,
|
|
||||||
"insomnia-importer",
|
"insomnia-importer",
|
||||||
file_paths.first().unwrap(),
|
file_paths.first().unwrap(),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
Ok(imported)
|
println!("Resources: {:?}", resources);
|
||||||
|
|
||||||
|
if resources.is_none() {
|
||||||
|
resources = plugin::run_plugin_import(
|
||||||
|
&window.app_handle(),
|
||||||
|
"yaak-importer",
|
||||||
|
file_paths.first().unwrap(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
println!("Resources: {:?}", resources);
|
||||||
|
|
||||||
|
match resources {
|
||||||
|
None => Err("Failed to import data".to_string()),
|
||||||
|
Some(r) => {
|
||||||
|
let mut imported_resources = ImportResources::default();
|
||||||
|
|
||||||
|
info!("Importing resources");
|
||||||
|
for w in r.workspaces {
|
||||||
|
let x = models::upsert_workspace(pool, w)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create workspace");
|
||||||
|
imported_resources.workspaces.push(x.clone());
|
||||||
|
info!("Imported workspace: {}", x.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
for e in r.environments {
|
||||||
|
let x = models::upsert_environment(pool, e)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create environment");
|
||||||
|
imported_resources.environments.push(x.clone());
|
||||||
|
info!("Imported environment: {}", x.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
for f in r.folders {
|
||||||
|
let x = models::upsert_folder(pool, f)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create folder");
|
||||||
|
imported_resources.folders.push(x.clone());
|
||||||
|
info!("Imported folder: {}", x.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
for r in r.requests {
|
||||||
|
let x = models::upsert_request(pool, r)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create request");
|
||||||
|
imported_resources.requests.push(x.clone());
|
||||||
|
info!("Imported request: {}", x.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(imported_resources)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
async fn export_data(
|
async fn export_data(
|
||||||
|
app_handle: AppHandle<Wry>,
|
||||||
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
|
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
|
||||||
root_dir: &str,
|
export_path: &str,
|
||||||
workspace_id: &str,
|
workspace_id: &str,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
let path = Path::new(root_dir).join("yaak-export.json");
|
|
||||||
let pool = &*db_instance.lock().await;
|
let pool = &*db_instance.lock().await;
|
||||||
let imported = models::get_workspace_export_resources(pool, workspace_id).await;
|
let export_data = models::get_workspace_export_resources(&app_handle, pool, workspace_id).await;
|
||||||
println!("Exporting {:?}", path);
|
let f = File::options()
|
||||||
let f = File::create(path).expect("Unable to create file");
|
.create(true)
|
||||||
serde_json::to_writer_pretty(f, &imported)
|
.truncate(true)
|
||||||
|
.write(true)
|
||||||
|
.open(export_path)
|
||||||
|
.expect("Unable to create file");
|
||||||
|
serde_json::to_writer_pretty(&f, &export_data)
|
||||||
.map_err(|e| e.to_string())
|
.map_err(|e| e.to_string())
|
||||||
.expect("Failed to write");
|
.expect("Failed to write");
|
||||||
|
f.sync_all().expect("Failed to sync");
|
||||||
|
info!("Exported Yaak workspace to {:?}", export_path);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -775,37 +833,6 @@ fn main() {
|
|||||||
|
|
||||||
let _ = models::cancel_pending_responses(&pool).await;
|
let _ = models::cancel_pending_responses(&pool).await;
|
||||||
|
|
||||||
// TODO: Move this somewhere better
|
|
||||||
match app.get_cli_matches() {
|
|
||||||
Ok(matches) => {
|
|
||||||
let cmd = matches.subcommand.unwrap_or_default();
|
|
||||||
if cmd.name == "import" {
|
|
||||||
let arg_file = cmd
|
|
||||||
.matches
|
|
||||||
.args
|
|
||||||
.get("file")
|
|
||||||
.unwrap()
|
|
||||||
.value
|
|
||||||
.as_str()
|
|
||||||
.unwrap();
|
|
||||||
plugin::run_plugin_import(
|
|
||||||
&app.handle(),
|
|
||||||
&pool,
|
|
||||||
"insomnia-importer",
|
|
||||||
arg_file,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
exit(0);
|
|
||||||
} else if cmd.name == "hello" {
|
|
||||||
plugin::run_plugin_hello(&app.handle(), "hello-world");
|
|
||||||
exit(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
println!("Nothing found: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
use sqlx::types::chrono::NaiveDateTime;
|
use sqlx::types::chrono::NaiveDateTime;
|
||||||
use sqlx::types::{Json, JsonValue};
|
use sqlx::types::{Json, JsonValue};
|
||||||
use sqlx::{Pool, Sqlite};
|
use sqlx::{Pool, Sqlite};
|
||||||
|
use tauri::AppHandle;
|
||||||
|
|
||||||
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
|
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
|
||||||
#[serde(default, rename_all = "camelCase")]
|
#[serde(default, rename_all = "camelCase")]
|
||||||
@@ -796,6 +797,16 @@ pub fn generate_id(prefix: Option<&str>) -> String {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Deserialize, Serialize)]
|
#[derive(Default, Debug, Deserialize, Serialize)]
|
||||||
|
#[serde(default, rename_all = "camelCase")]
|
||||||
|
pub struct WorkspaceExport {
|
||||||
|
yaak_version: String,
|
||||||
|
yaak_schema: i64,
|
||||||
|
timestamp: NaiveDateTime,
|
||||||
|
resources: WorkspaceExportResources,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Debug, Deserialize, Serialize)]
|
||||||
|
#[serde(default, rename_all = "camelCase")]
|
||||||
pub struct WorkspaceExportResources {
|
pub struct WorkspaceExportResources {
|
||||||
workspaces: Vec<Workspace>,
|
workspaces: Vec<Workspace>,
|
||||||
environments: Vec<Environment>,
|
environments: Vec<Environment>,
|
||||||
@@ -803,23 +814,29 @@ pub struct WorkspaceExportResources {
|
|||||||
requests: Vec<HttpRequest>,
|
requests: Vec<HttpRequest>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn get_workspace_export_resources(
|
pub async fn get_workspace_export_resources(
|
||||||
|
app_handle: &AppHandle,
|
||||||
pool: &Pool<Sqlite>,
|
pool: &Pool<Sqlite>,
|
||||||
workspace_id: &str,
|
workspace_id: &str,
|
||||||
) -> WorkspaceExportResources {
|
) -> WorkspaceExport {
|
||||||
let workspace = get_workspace(workspace_id, pool)
|
let workspace = get_workspace(workspace_id, pool)
|
||||||
.await
|
.await
|
||||||
.expect("Failed to get workspace");
|
.expect("Failed to get workspace");
|
||||||
return WorkspaceExportResources {
|
return WorkspaceExport {
|
||||||
workspaces: vec![workspace],
|
yaak_version: app_handle.package_info().version.clone().to_string(),
|
||||||
environments: find_environments(workspace_id, pool)
|
yaak_schema: 1,
|
||||||
.await
|
timestamp: chrono::Utc::now().naive_utc(),
|
||||||
.expect("Failed to get environments"),
|
resources: WorkspaceExportResources {
|
||||||
folders: find_folders(workspace_id, pool)
|
workspaces: vec![workspace],
|
||||||
.await
|
environments: find_environments(workspace_id, pool)
|
||||||
.expect("Failed to get folders"),
|
.await
|
||||||
requests: find_requests(workspace_id, pool)
|
.expect("Failed to get environments"),
|
||||||
.await
|
folders: find_folders(workspace_id, pool)
|
||||||
.expect("Failed to get requests"),
|
.await
|
||||||
|
.expect("Failed to get folders"),
|
||||||
|
requests: find_requests(workspace_id, pool)
|
||||||
|
.await
|
||||||
|
.expect("Failed to get requests"),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,32 +8,25 @@ use boa_engine::{
|
|||||||
Context, JsArgs, JsNativeError, JsValue, Module, NativeFunction, Source,
|
Context, JsArgs, JsNativeError, JsValue, Module, NativeFunction, Source,
|
||||||
};
|
};
|
||||||
use boa_runtime::Console;
|
use boa_runtime::Console;
|
||||||
use log::info;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use sqlx::{Pool, Sqlite};
|
|
||||||
use tauri::AppHandle;
|
use tauri::AppHandle;
|
||||||
|
|
||||||
use crate::models::{self, Environment, Folder, HttpRequest, Workspace};
|
use crate::models::{Environment, Folder, HttpRequest, Workspace};
|
||||||
|
|
||||||
pub fn run_plugin_hello(app_handle: &AppHandle, plugin_name: &str) {
|
|
||||||
run_plugin(app_handle, plugin_name, "hello", &[]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug, Deserialize, Serialize)]
|
#[derive(Default, Debug, Deserialize, Serialize)]
|
||||||
pub struct ImportedResources {
|
pub struct ImportResources {
|
||||||
workspaces: Vec<Workspace>,
|
pub workspaces: Vec<Workspace>,
|
||||||
environments: Vec<Environment>,
|
pub environments: Vec<Environment>,
|
||||||
folders: Vec<Folder>,
|
pub folders: Vec<Folder>,
|
||||||
requests: Vec<HttpRequest>,
|
pub requests: Vec<HttpRequest>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn run_plugin_import(
|
pub async fn run_plugin_import(
|
||||||
app_handle: &AppHandle,
|
app_handle: &AppHandle,
|
||||||
pool: &Pool<Sqlite>,
|
|
||||||
plugin_name: &str,
|
plugin_name: &str,
|
||||||
file_path: &str,
|
file_path: &str,
|
||||||
) -> ImportedResources {
|
) -> Option<ImportResources> {
|
||||||
let file = fs::read_to_string(file_path)
|
let file = fs::read_to_string(file_path)
|
||||||
.unwrap_or_else(|_| panic!("Unable to read file {}", file_path));
|
.unwrap_or_else(|_| panic!("Unable to read file {}", file_path));
|
||||||
let file_contents = file.as_str();
|
let file_contents = file.as_str();
|
||||||
@@ -43,44 +36,14 @@ pub async fn run_plugin_import(
|
|||||||
"pluginHookImport",
|
"pluginHookImport",
|
||||||
&[js_string!(file_contents).into()],
|
&[js_string!(file_contents).into()],
|
||||||
);
|
);
|
||||||
let resources: ImportedResources =
|
|
||||||
|
if result_json.is_null() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let resources: ImportResources =
|
||||||
serde_json::from_value(result_json).expect("failed to parse result json");
|
serde_json::from_value(result_json).expect("failed to parse result json");
|
||||||
let mut imported_resources = ImportedResources::default();
|
Some(resources)
|
||||||
|
|
||||||
info!("Importing resources");
|
|
||||||
for w in resources.workspaces {
|
|
||||||
let x = models::upsert_workspace(pool, w)
|
|
||||||
.await
|
|
||||||
.expect("Failed to create workspace");
|
|
||||||
imported_resources.workspaces.push(x.clone());
|
|
||||||
info!("Imported workspace: {}", x.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
for e in resources.environments {
|
|
||||||
let x = models::upsert_environment(pool, e)
|
|
||||||
.await
|
|
||||||
.expect("Failed to create environment");
|
|
||||||
imported_resources.environments.push(x.clone());
|
|
||||||
info!("Imported environment: {}", x.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
for f in resources.folders {
|
|
||||||
let x = models::upsert_folder(pool, f)
|
|
||||||
.await
|
|
||||||
.expect("Failed to create folder");
|
|
||||||
imported_resources.folders.push(x.clone());
|
|
||||||
info!("Imported folder: {}", x.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
for r in resources.requests {
|
|
||||||
let x = models::upsert_request(pool, r)
|
|
||||||
.await
|
|
||||||
.expect("Failed to create request");
|
|
||||||
imported_resources.requests.push(x.clone());
|
|
||||||
info!("Imported request: {}", x.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
imported_resources
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_plugin(
|
fn run_plugin(
|
||||||
|
|||||||
@@ -25,8 +25,7 @@
|
|||||||
"short": "f",
|
"short": "f",
|
||||||
"takesValue": true
|
"takesValue": true
|
||||||
}]
|
}]
|
||||||
},
|
}
|
||||||
"hello": {}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"allowlist": {
|
"allowlist": {
|
||||||
|
|||||||
@@ -14,15 +14,13 @@ export const SidebarActions = memo(function SidebarActions() {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<HStack>
|
<HStack>
|
||||||
{hidden && (
|
<IconButton
|
||||||
<IconButton
|
onClick={toggle}
|
||||||
onClick={toggle}
|
className="pointer-events-auto"
|
||||||
className="pointer-events-auto"
|
size="sm"
|
||||||
size="sm"
|
title="Show sidebar"
|
||||||
title="Show sidebar"
|
icon={hidden ? 'leftPanelHidden' : 'leftPanelVisible'}
|
||||||
icon={hidden ? 'leftPanelHidden' : 'leftPanelVisible'}
|
/>
|
||||||
/>
|
|
||||||
)}
|
|
||||||
<Dropdown
|
<Dropdown
|
||||||
items={[
|
items={[
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ export const WorkspaceHeader = memo(function WorkspaceHeader({ className }: Prop
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<HStack
|
<HStack
|
||||||
|
space={2}
|
||||||
justifyContent="center"
|
justifyContent="center"
|
||||||
alignItems="center"
|
alignItems="center"
|
||||||
className={classNames(className, 'w-full h-full')}
|
className={classNames(className, 'w-full h-full')}
|
||||||
|
|||||||
@@ -14,12 +14,12 @@ export function useExportData() {
|
|||||||
|
|
||||||
return useMutation({
|
return useMutation({
|
||||||
mutationFn: async () => {
|
mutationFn: async () => {
|
||||||
const rootDir = await save(saveArgs);
|
const exportPath = await save(saveArgs);
|
||||||
if (rootDir == null) {
|
if (exportPath == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await invoke('export_data', { workspaceId, rootDir });
|
await invoke('export_data', { workspaceId, exportPath });
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user