Context menu, logs in DevTools, export, tweaks

This commit is contained in:
Gregory Schier
2023-11-09 09:28:01 -08:00
parent aeda72f13e
commit 9ebb3ef532
20 changed files with 593 additions and 157 deletions

View File

@@ -11,6 +11,7 @@ use std::collections::HashMap;
use std::env::current_dir;
use std::fs::{create_dir_all, File};
use std::io::Write;
use std::path::Path;
use std::process::exit;
use base64::Engine;
@@ -20,13 +21,14 @@ use rand::random;
use reqwest::redirect::Policy;
use serde::Serialize;
use sqlx::migrate::Migrator;
use sqlx::sqlite::SqlitePoolOptions;
use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions};
use sqlx::types::Json;
use sqlx::{Pool, Sqlite};
use sqlx::{ConnectOptions, Pool, Sqlite};
#[cfg(target_os = "macos")]
use tauri::TitleBarStyle;
use tauri::{AppHandle, Menu, RunEvent, State, Submenu, Window, WindowUrl, Wry};
use tauri::{CustomMenuItem, Manager, WindowEvent};
use tauri_plugin_log::LogTarget;
use tauri_plugin_window_state::{StateFlags, WindowExt};
use tokio::sync::Mutex;
@@ -258,6 +260,7 @@ async fn actually_send_request(
Err(e) => response_err(response, e.to_string(), app_handle, pool).await,
}
}
#[tauri::command]
async fn import_data(
window: Window<Wry>,
@@ -275,6 +278,23 @@ async fn import_data(
Ok(imported)
}
#[tauri::command]
async fn export_data(
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
root_dir: &str,
workspace_id: &str,
) -> Result<(), String> {
let path = Path::new(root_dir).join("yaak-export.json");
let pool = &*db_instance.lock().await;
let imported = models::get_workspace_export_resources(pool, workspace_id).await;
println!("Exporting {:?}", path);
let f = File::create(path).expect("Unable to create file");
serde_json::to_writer_pretty(f, &imported)
.map_err(|e| e.to_string())
.expect("Failed to write");
Ok(())
}
#[tauri::command]
async fn send_request(
window: Window<Wry>,
@@ -715,7 +735,13 @@ async fn delete_workspace(
fn main() {
tauri::Builder::default()
.plugin(
tauri_plugin_log::Builder::default()
.targets([LogTarget::LogDir, LogTarget::Stdout, LogTarget::Webview])
.build(),
)
.plugin(tauri_plugin_window_state::Builder::default().build())
.plugin(tauri_plugin_context_menu::init())
.setup(|app| {
let dir = match is_dev() {
true => current_dir().unwrap(),
@@ -730,7 +756,13 @@ fn main() {
tauri::async_runtime::block_on(async move {
let pool = SqlitePoolOptions::new()
.connect(url.as_str())
.connect_with(
SqliteConnectOptions::new()
.filename(p)
.create_if_missing(true)
.disable_statement_logging()
.clone(),
)
.await
.expect("Failed to connect to database");
@@ -789,6 +821,7 @@ fn main() {
delete_response,
delete_workspace,
duplicate_request,
export_data,
get_key_value,
get_environment,
get_folder,

View File

@@ -794,3 +794,32 @@ pub fn generate_id(prefix: Option<&str>) -> String {
Some(p) => format!("{p}_{id}"),
};
}
#[derive(Default, Debug, Deserialize, Serialize)]
pub struct WorkspaceExportResources {
workspaces: Vec<Workspace>,
environments: Vec<Environment>,
folders: Vec<Folder>,
requests: Vec<HttpRequest>,
}
pub(crate) async fn get_workspace_export_resources(
pool: &Pool<Sqlite>,
workspace_id: &str,
) -> WorkspaceExportResources {
let workspace = get_workspace(workspace_id, pool)
.await
.expect("Failed to get workspace");
return WorkspaceExportResources {
workspaces: vec![workspace],
environments: find_environments(workspace_id, pool)
.await
.expect("Failed to get environments"),
folders: find_folders(workspace_id, pool)
.await
.expect("Failed to get folders"),
requests: find_requests(workspace_id, pool)
.await
.expect("Failed to get requests"),
};
}

View File

@@ -8,6 +8,7 @@ use boa_engine::{
Context, JsArgs, JsNativeError, JsValue, Module, NativeFunction, Source,
};
use boa_runtime::Console;
use log::info;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sqlx::{Pool, Sqlite};
@@ -34,7 +35,7 @@ pub async fn run_plugin_import(
file_path: &str,
) -> ImportedResources {
let file = fs::read_to_string(file_path)
.expect(format!("Unable to read file {}", file_path.to_string()).as_str());
.unwrap_or_else(|_| panic!("Unable to read file {}", file_path));
let file_contents = file.as_str();
let result_json = run_plugin(
app_handle,
@@ -46,41 +47,37 @@ pub async fn run_plugin_import(
serde_json::from_value(result_json).expect("failed to parse result json");
let mut imported_resources = ImportedResources::default();
println!("Importing resources");
info!("Importing resources");
for w in resources.workspaces {
println!("Importing workspace: {:?}", w);
let x = models::upsert_workspace(&pool, w)
let x = models::upsert_workspace(pool, w)
.await
.expect("Failed to create workspace");
imported_resources.workspaces.push(x.clone());
println!("Imported workspace: {}", x.name);
info!("Imported workspace: {}", x.name);
}
for e in resources.environments {
println!("Importing environment: {:?}", e);
let x = models::upsert_environment(&pool, e)
let x = models::upsert_environment(pool, e)
.await
.expect("Failed to create environment");
imported_resources.environments.push(x.clone());
println!("Imported environment: {}", x.name);
info!("Imported environment: {}", x.name);
}
for f in resources.folders {
println!("Importing folder: {:?}", f);
let x = models::upsert_folder(&pool, f)
let x = models::upsert_folder(pool, f)
.await
.expect("Failed to create folder");
imported_resources.folders.push(x.clone());
println!("Imported folder: {}", x.name);
info!("Imported folder: {}", x.name);
}
for r in resources.requests {
println!("Importing request: {:?}", r);
let x = models::upsert_request(&pool, r)
let x = models::upsert_request(pool, r)
.await
.expect("Failed to create request");
imported_resources.requests.push(x.clone());
println!("Imported request: {}", x.name);
info!("Imported request: {}", x.name);
}
imported_resources