Curl import (#24)

This commit is contained in:
Gregory Schier
2024-05-07 21:57:03 -07:00
committed by GitHub
parent 896e3d5831
commit 82e2a6b73e
27 changed files with 2054 additions and 724 deletions

View File

@@ -233,7 +233,7 @@ fn get_window_size(app_handle: &AppHandle) -> String {
async fn get_id(app_handle: &AppHandle) -> String {
let id = get_key_value_string(app_handle, "analytics", "id", "").await;
if id.is_empty() {
let new_id = generate_id(None);
let new_id = generate_id();
set_key_value_string(app_handle, "analytics", "id", new_id.as_str()).await;
new_id
} else {

View File

@@ -38,22 +38,7 @@ use window_ext::TrafficLightWindowExt;
use crate::analytics::{AnalyticsAction, AnalyticsResource};
use crate::grpc::metadata_to_map;
use crate::http::send_http_request;
use crate::models::{
cancel_pending_grpc_connections, cancel_pending_responses, create_http_response,
delete_all_grpc_connections, delete_all_http_responses, delete_cookie_jar, delete_environment,
delete_folder, delete_grpc_connection, delete_grpc_request, delete_http_request,
delete_http_response, delete_workspace, duplicate_grpc_request, duplicate_http_request,
get_cookie_jar, get_environment, get_folder, get_grpc_connection, get_grpc_request,
get_http_request, get_http_response, get_key_value_raw, get_or_create_settings, get_workspace,
get_workspace_export_resources, list_cookie_jars, list_environments, list_folders,
list_grpc_connections, list_grpc_events, list_grpc_requests, list_http_requests,
list_responses, list_workspaces, set_key_value_raw, update_response_if_id, update_settings,
upsert_cookie_jar, upsert_environment, upsert_folder, upsert_grpc_connection,
upsert_grpc_event, upsert_grpc_request, upsert_http_request, upsert_workspace, CookieJar,
Environment, EnvironmentVariable, Folder, GrpcConnection, GrpcEvent, GrpcEventType,
GrpcRequest, HttpRequest, HttpRequestHeader, HttpResponse, KeyValue, Settings, Workspace,
WorkspaceExportResources,
};
use crate::models::{cancel_pending_grpc_connections, cancel_pending_responses, create_http_response, delete_all_grpc_connections, delete_all_http_responses, delete_cookie_jar, delete_environment, delete_folder, delete_grpc_connection, delete_grpc_request, delete_http_request, delete_http_response, delete_workspace, duplicate_grpc_request, duplicate_http_request, get_cookie_jar, get_environment, get_folder, get_grpc_connection, get_grpc_request, get_http_request, get_http_response, get_key_value_raw, get_or_create_settings, get_workspace, get_workspace_export_resources, list_cookie_jars, list_environments, list_folders, list_grpc_connections, list_grpc_events, list_grpc_requests, list_http_requests, list_responses, list_workspaces, set_key_value_raw, update_response_if_id, update_settings, upsert_cookie_jar, upsert_environment, upsert_folder, upsert_grpc_connection, upsert_grpc_event, upsert_grpc_request, upsert_http_request, upsert_workspace, CookieJar, Environment, EnvironmentVariable, Folder, GrpcConnection, GrpcEvent, GrpcEventType, GrpcRequest, HttpRequest, HttpRequestHeader, HttpResponse, KeyValue, Settings, Workspace, WorkspaceExportResources, generate_model_id, ModelType};
use crate::plugin::ImportResult;
use crate::updates::{update_mode_from_str, UpdateMode, YaakUpdater};
use crate::window_menu::app_menu;
@@ -757,27 +742,59 @@ async fn cmd_import_data(
None => Err("No import handlers found".to_string()),
Some(r) => {
let mut imported_resources = WorkspaceExportResources::default();
let mut id_map: HashMap<String, String> = HashMap::new();
let maybe_gen_id = |id: &str, model: ModelType, ids: &mut HashMap<String, String>| -> String {
if !id.starts_with("GENERATE_ID::") {
return id.to_string();
}
let unique_key = id.replace("GENERATE_ID", "");
if let Some(existing) = ids.get(unique_key.as_str()) {
existing.to_string()
} else {
let new_id = generate_model_id(model);
ids.insert(unique_key, new_id.clone());
new_id
}
};
let maybe_gen_id_opt = |id: Option<String>, model: ModelType, ids: &mut HashMap<String, String>| -> Option<String> {
match id {
Some(id) => Some(maybe_gen_id(id.as_str(), model, ids)),
None => None,
}
};
info!("Importing resources");
for v in r.resources.workspaces {
for mut v in r.resources.workspaces {
v.id = maybe_gen_id(v.id.as_str(), ModelType::Workspace, &mut id_map);
let x = upsert_workspace(&w, v).await.map_err(|e| e.to_string())?;
imported_resources.workspaces.push(x.clone());
info!("Imported workspace: {}", x.name);
}
for v in r.resources.environments {
for mut v in r.resources.environments {
v.id = maybe_gen_id(v.id.as_str(), ModelType::Environment, &mut id_map);
v.workspace_id = maybe_gen_id(v.workspace_id.as_str(), ModelType::Workspace, &mut id_map);
let x = upsert_environment(&w, v).await.map_err(|e| e.to_string())?;
imported_resources.environments.push(x.clone());
info!("Imported environment: {}", x.name);
}
for v in r.resources.folders {
for mut v in r.resources.folders {
v.id = maybe_gen_id(v.id.as_str(), ModelType::Folder, &mut id_map);
v.workspace_id = maybe_gen_id(v.workspace_id.as_str(), ModelType::Workspace, &mut id_map);
v.folder_id = maybe_gen_id_opt(v.folder_id, ModelType::Folder, &mut id_map);
let x = upsert_folder(&w, v).await.map_err(|e| e.to_string())?;
imported_resources.folders.push(x.clone());
info!("Imported folder: {}", x.name);
}
for v in r.resources.http_requests {
for mut v in r.resources.http_requests {
v.id = maybe_gen_id(v.id.as_str(), ModelType::HttpRequest, &mut id_map);
v.workspace_id = maybe_gen_id(v.workspace_id.as_str(), ModelType::Workspace, &mut id_map);
v.folder_id = maybe_gen_id_opt(v.folder_id, ModelType::Folder, &mut id_map);
let x = upsert_http_request(&w, v)
.await
.map_err(|e| e.to_string())?;
@@ -785,7 +802,10 @@ async fn cmd_import_data(
info!("Imported request: {}", x.name);
}
for v in r.resources.grpc_requests {
for mut v in r.resources.grpc_requests {
v.id = maybe_gen_id(v.id.as_str(), ModelType::GrpcRequest, &mut id_map);
v.workspace_id = maybe_gen_id(v.workspace_id.as_str(), ModelType::Workspace, &mut id_map);
v.folder_id = maybe_gen_id_opt(v.folder_id, ModelType::Folder, &mut id_map);
let x = upsert_grpc_request(&w, &v)
.await
.map_err(|e| e.to_string())?;
@@ -1657,7 +1677,7 @@ fn create_window(handle: &AppHandle, url: Option<&str>) -> WebviewWindow {
if !w.is_focused().unwrap() {
return;
}
match event.id().0.as_str() {
"quit" => exit(0),
"close" => w.close().unwrap(),

View File

@@ -4,12 +4,41 @@ use std::fs;
use log::error;
use rand::distributions::{Alphanumeric, DistString};
use serde::{Deserialize, Serialize};
use sqlx::{Pool, Sqlite};
use sqlx::types::{Json, JsonValue};
use sqlx::types::chrono::NaiveDateTime;
use sqlx::types::{Json, JsonValue};
use sqlx::{Pool, Sqlite};
use tauri::{AppHandle, Manager, WebviewWindow, Wry};
use tokio::sync::Mutex;
pub enum ModelType {
CookieJar,
Environment,
Folder,
GrpcConnection,
GrpcEvent,
GrpcRequest,
HttpRequest,
HttpResponse,
Workspace,
}
impl ModelType {
pub fn id_prefix(&self) -> String {
match self {
ModelType::CookieJar => "cj",
ModelType::Environment => "ev",
ModelType::Folder => "fl",
ModelType::GrpcConnection => "gc",
ModelType::GrpcEvent => "ge",
ModelType::GrpcRequest => "gr",
ModelType::HttpRequest => "rq",
ModelType::HttpResponse => "rs",
ModelType::Workspace => "wk",
}
.to_string()
}
}
fn default_true() -> bool {
true
}
@@ -481,10 +510,7 @@ pub async fn list_cookie_jars(
.await
}
pub async fn delete_cookie_jar(
window: &WebviewWindow,
id: &str,
) -> Result<CookieJar, sqlx::Error> {
pub async fn delete_cookie_jar(window: &WebviewWindow, id: &str) -> Result<CookieJar, sqlx::Error> {
let cookie_jar = get_cookie_jar(window, id).await?;
let db = get_db(window).await;
@@ -516,7 +542,7 @@ pub async fn upsert_grpc_request(
) -> Result<GrpcRequest, sqlx::Error> {
let db = get_db(window).await;
let id = match request.id.as_str() {
"" => generate_id(Some("gr")),
"" => generate_model_id(ModelType::GrpcRequest),
_ => request.id.to_string(),
};
let trimmed_name = request.name.trim();
@@ -612,7 +638,7 @@ pub async fn upsert_grpc_connection(
) -> Result<GrpcConnection, sqlx::Error> {
let db = get_db(window).await;
let id = match connection.id.as_str() {
"" => generate_id(Some("gc")),
"" => generate_model_id(ModelType::GrpcConnection),
_ => connection.id.to_string(),
};
sqlx::query!(
@@ -701,7 +727,7 @@ pub async fn upsert_grpc_event(
) -> Result<GrpcEvent, sqlx::Error> {
let db = get_db(window).await;
let id = match event.id.as_str() {
"" => generate_id(Some("ge")),
"" => generate_model_id(ModelType::GrpcEvent),
_ => event.id.to_string(),
};
sqlx::query!(
@@ -782,7 +808,7 @@ pub async fn upsert_cookie_jar(
cookie_jar: &CookieJar,
) -> Result<CookieJar, sqlx::Error> {
let id = match cookie_jar.id.as_str() {
"" => generate_id(Some("cj")),
"" => generate_model_id(ModelType::CookieJar),
_ => cookie_jar.id.to_string(),
};
let trimmed_name = cookie_jar.name.trim();
@@ -914,7 +940,7 @@ pub async fn upsert_environment(
environment: Environment,
) -> Result<Environment, sqlx::Error> {
let id = match environment.id.as_str() {
"" => generate_id(Some("ev")),
"" => generate_model_id(ModelType::Environment),
_ => environment.id.to_string(),
};
let trimmed_name = environment.name.trim();
@@ -1017,7 +1043,7 @@ pub async fn delete_folder(window: &WebviewWindow, id: &str) -> Result<Folder, s
pub async fn upsert_folder(window: &WebviewWindow, r: Folder) -> Result<Folder, sqlx::Error> {
let id = match r.id.as_str() {
"" => generate_id(Some("fl")),
"" => generate_model_id(ModelType::Folder),
_ => r.id.to_string(),
};
let trimmed_name = r.name.trim();
@@ -1064,7 +1090,7 @@ pub async fn upsert_http_request(
r: HttpRequest,
) -> Result<HttpRequest, sqlx::Error> {
let id = match r.id.as_str() {
"" => generate_id(Some("rq")),
"" => generate_model_id(ModelType::HttpRequest),
_ => r.id.to_string(),
};
let trimmed_name = r.name.trim();
@@ -1203,7 +1229,7 @@ pub async fn create_http_response(
remote_addr: Option<&str>,
) -> Result<HttpResponse, sqlx::Error> {
let req = get_http_request(window, request_id).await?;
let id = generate_id(Some("rp"));
let id = generate_model_id(ModelType::HttpResponse);
let headers_json = Json(headers);
let db = get_db(window).await;
sqlx::query!(
@@ -1281,7 +1307,7 @@ pub async fn upsert_workspace(
workspace: Workspace,
) -> Result<Workspace, sqlx::Error> {
let id = match workspace.id.as_str() {
"" => generate_id(Some("wk")),
"" => generate_model_id(ModelType::Workspace),
_ => workspace.id.to_string(),
};
let trimmed_name = workspace.name.trim();
@@ -1513,12 +1539,13 @@ pub async fn delete_all_http_responses(
Ok(())
}
pub fn generate_id(prefix: Option<&str>) -> String {
let id = Alphanumeric.sample_string(&mut rand::thread_rng(), 10);
match prefix {
None => id,
Some(p) => format!("{p}_{id}"),
}
pub fn generate_model_id(model: ModelType) -> String {
let id = generate_id();
format!("{}_{}", model.id_prefix(), id)
}
pub fn generate_id() -> String {
Alphanumeric.sample_string(&mut rand::thread_rng(), 10)
}
#[derive(Default, Debug, Deserialize, Serialize)]
@@ -1597,7 +1624,7 @@ struct ModelPayload<M: Serialize + Clone> {
}
fn emit_upserted_model<M: Serialize + Clone>(window: &WebviewWindow, model: M) -> M {
let payload = ModelPayload{
let payload = ModelPayload {
model: model.clone(),
window_label: window.label().to_string(),
};
@@ -1607,7 +1634,7 @@ fn emit_upserted_model<M: Serialize + Clone>(window: &WebviewWindow, model: M) -
}
fn emit_deleted_model<M: Serialize + Clone, E>(window: &WebviewWindow, model: M) -> Result<M, E> {
let payload = ModelPayload{
let payload = ModelPayload {
model: model.clone(),
window_label: window.label().to_string(),
};

View File

@@ -1,11 +1,11 @@
use std::fs;
use std::rc::Rc;
use boa_engine::{
Context, js_string, JsNativeError, JsValue, Module, module::SimpleModuleLoader,
property::Attribute, Source,
};
use boa_engine::builtins::promise::PromiseState;
use boa_engine::module::ModuleLoader;
use boa_runtime::Console;
use log::{debug, error};
use serde::{Deserialize, Serialize};
@@ -13,7 +13,7 @@ use serde_json::json;
use tauri::{AppHandle, Manager};
use tauri::path::BaseDirectory;
use crate::models::{WorkspaceExportResources};
use crate::models::WorkspaceExportResources;
#[derive(Default, Debug, Deserialize, Serialize)]
pub struct FilterResult {
@@ -67,8 +67,7 @@ pub async fn run_plugin_import(
return Ok(None);
}
let resources: ImportResult =
serde_json::from_value(result_json).map_err(|e| e.to_string())?;
let resources: ImportResult = serde_json::from_value(result_json).map_err(|e| e.to_string())?;
Ok(Some(resources))
}
@@ -90,12 +89,9 @@ fn run_plugin(
plugin_dir, plugin_index_file
);
// Module loader for the specific plugin
let loader = &SimpleModuleLoader::new(plugin_dir).expect("failed to create module loader");
let dyn_loader: &dyn ModuleLoader = loader;
let loader = Rc::new(SimpleModuleLoader::new(plugin_dir).unwrap());
let context = &mut Context::builder()
.module_loader(dyn_loader)
.module_loader(loader.clone())
.build()
.expect("failed to create context");
@@ -109,15 +105,13 @@ fn run_plugin(
// Insert parsed entrypoint into the module loader
loader.insert(plugin_index_file, module.clone());
let promise_result = module
.load_link_evaluate(context)
.expect("failed to evaluate module");
let promise_result = module.load_link_evaluate(context);
// Very important to push forward the job queue after queueing promises.
context.run_jobs();
// Checking if the final promise didn't return an error.
match promise_result.state().expect("failed to get promise state") {
match promise_result.state() {
PromiseState::Pending => {
panic!("Promise was pending");
}