Model and DB refactor (#61)

- [x] Move from `sqlx` to `rusqlite`
- [x] Generate TS types from Rust models
This commit is contained in:
Gregory Schier
2024-08-05 07:58:20 -07:00
committed by GitHub
parent 71013fd701
commit 989b5a8058
193 changed files with 7083 additions and 8337 deletions

View File

@@ -6,10 +6,9 @@ use serde_json::json;
use sqlx::types::JsonValue;
use tauri::{AppHandle, Manager};
use yaak_models::queries::{generate_id, get_key_value_int, get_key_value_string, set_key_value_int, set_key_value_string};
use crate::is_dev;
use crate::models::{
generate_id, get_key_value_int, get_key_value_string, set_key_value_int, set_key_value_string,
};
const NAMESPACE: &str = "analytics";
const NUM_LAUNCHES_KEY: &str = "num_launches";

View File

@@ -1,16 +0,0 @@
use deno_core::error::AnyError;
use deno_core::op2;
#[op2]
#[serde]
pub fn op_yaml_parse(#[string] text: String) -> Result<serde_json::Value, AnyError> {
let value = serde_yaml::from_str(&text)?;
Ok(value)
}
#[op2]
#[string]
pub fn op_yaml_stringify(#[serde] value: serde_json::Value) -> Result<String, AnyError> {
let value = serde_yaml::to_string(&value)?;
Ok(value)
}

View File

@@ -0,0 +1,77 @@
use chrono::NaiveDateTime;
use serde::{Deserialize, Serialize};
use tauri::{Manager, WebviewWindow};
use yaak_models::models::{Environment, Folder, GrpcRequest, HttpRequest, Workspace};
#[derive(Default, Debug, Deserialize, Serialize)]
#[serde(default, rename_all = "camelCase")]
pub struct WorkspaceExport {
pub yaak_version: String,
pub yaak_schema: i64,
pub timestamp: NaiveDateTime,
pub resources: WorkspaceExportResources,
}
#[derive(Default, Debug, Deserialize, Serialize)]
#[serde(default, rename_all = "camelCase")]
pub struct WorkspaceExportResources {
pub workspaces: Vec<Workspace>,
pub environments: Vec<Environment>,
pub folders: Vec<Folder>,
pub http_requests: Vec<HttpRequest>,
pub grpc_requests: Vec<GrpcRequest>,
}
#[derive(Default, Debug, Deserialize, Serialize)]
pub struct ImportResult {
pub resources: WorkspaceExportResources,
}
pub async fn get_workspace_export_resources(
window: &WebviewWindow,
workspace_ids: Vec<&str>,
) -> WorkspaceExport {
let app_handle = window.app_handle();
let mut data = WorkspaceExport {
yaak_version: app_handle.package_info().version.clone().to_string(),
yaak_schema: 2,
timestamp: chrono::Utc::now().naive_utc(),
resources: WorkspaceExportResources {
workspaces: Vec::new(),
environments: Vec::new(),
folders: Vec::new(),
http_requests: Vec::new(),
grpc_requests: Vec::new(),
},
};
for workspace_id in workspace_ids {
data.resources.workspaces.push(
yaak_models::queries::get_workspace(window, workspace_id)
.await
.expect("Failed to get workspace"),
);
data.resources.environments.append(
&mut yaak_models::queries::list_environments(window, workspace_id)
.await
.expect("Failed to get environments"),
);
data.resources.folders.append(
&mut yaak_models::queries::list_folders(window, workspace_id)
.await
.expect("Failed to get folders"),
);
data.resources.http_requests.append(
&mut yaak_models::queries::list_http_requests(window, workspace_id)
.await
.expect("Failed to get http requests"),
);
data.resources.grpc_requests.append(
&mut yaak_models::queries::list_grpc_requests(window, workspace_id)
.await
.expect("Failed to get grpc requests"),
);
}
return data;
}

View File

@@ -6,6 +6,8 @@ use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use crate::render::variables_from_environment;
use crate::{render, response_err};
use base64::Engine;
use http::header::{ACCEPT, USER_AGENT};
use http::{HeaderMap, HeaderName, HeaderValue};
@@ -14,25 +16,23 @@ use mime_guess::Mime;
use reqwest::redirect::Policy;
use reqwest::Method;
use reqwest::{multipart, Url};
use sqlx::types::{Json, JsonValue};
use tauri::{Manager, WebviewWindow};
use tokio::sync::oneshot;
use tokio::sync::watch::Receiver;
use crate::render::variables_from_environment;
use crate::{models, render, response_err};
use yaak_models::models::{Cookie, CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseHeader};
use yaak_models::queries::{get_workspace, update_response_if_id, upsert_cookie_jar};
pub async fn send_http_request(
window: &WebviewWindow,
request: models::HttpRequest,
response: &models::HttpResponse,
environment: Option<models::Environment>,
cookie_jar: Option<models::CookieJar>,
request: HttpRequest,
response: &HttpResponse,
environment: Option<Environment>,
cookie_jar: Option<CookieJar>,
download_path: Option<PathBuf>,
cancel_rx: &mut Receiver<bool>,
) -> Result<models::HttpResponse, String> {
) -> Result<HttpResponse, String> {
let environment_ref = environment.as_ref();
let workspace = models::get_workspace(window, &request.workspace_id)
let workspace = get_workspace(window, &request.workspace_id)
.await
.expect("Failed to get Workspace");
let vars = variables_from_environment(&workspace, environment_ref);
@@ -63,11 +63,10 @@ pub async fn send_http_request(
// HACK: Can't construct Cookie without serde, so we have to do this
let cookies = cj
.cookies
.0
.iter()
.map(|json_cookie| {
serde_json::from_value(json_cookie.clone())
.expect("Failed to deserialize cookie")
.map(|cookie| {
let json_cookie = serde_json::to_value(cookie).unwrap();
serde_json::from_value(json_cookie).expect("Failed to deserialize cookie")
})
.map(|c| Ok(c))
.collect::<Vec<Result<_, ()>>>();
@@ -85,7 +84,7 @@ pub async fn send_http_request(
if workspace.setting_request_timeout > 0 {
client_builder = client_builder.timeout(Duration::from_millis(
workspace.setting_request_timeout.unsigned_abs(),
workspace.setting_request_timeout.unsigned_abs() as u64,
));
}
@@ -138,7 +137,7 @@ pub async fn send_http_request(
// );
// }
for h in request.headers.0 {
for h in request.headers {
if h.name.is_empty() && h.value.is_empty() {
continue;
}
@@ -170,7 +169,7 @@ pub async fn send_http_request(
if let Some(b) = &request.authentication_type {
let empty_value = &serde_json::to_value("").unwrap();
let a = request.authentication.0;
let a = request.authentication;
if b == "basic" {
let raw_username = a
@@ -203,7 +202,7 @@ pub async fn send_http_request(
}
let mut query_params = Vec::new();
for p in request.url_parameters.0 {
for p in request.url_parameters {
if !p.enabled || p.name.is_empty() {
continue;
}
@@ -217,7 +216,7 @@ pub async fn send_http_request(
if let Some(body_type) = &request.body_type {
let empty_string = &serde_json::to_value("").unwrap();
let empty_bool = &serde_json::to_value(false).unwrap();
let request_body = request.body.0;
let request_body = request.body;
if request_body.contains_key("text") {
let raw_text = request_body
@@ -382,19 +381,17 @@ pub async fn send_http_request(
match raw_response {
Ok(v) => {
let mut response = response.clone();
response.elapsed_headers = start.elapsed().as_millis() as i64;
response.elapsed_headers = start.elapsed().as_millis() as i32;
let response_headers = v.headers().clone();
response.status = v.status().as_u16() as i64;
response.status = v.status().as_u16() as i32;
response.status_reason = v.status().canonical_reason().map(|s| s.to_string());
response.headers = Json(
response_headers
.iter()
.map(|(k, v)| models::HttpResponseHeader {
name: k.as_str().to_string(),
value: v.to_str().unwrap().to_string(),
})
.collect(),
);
response.headers = response_headers
.iter()
.map(|(k, v)| HttpResponseHeader {
name: k.as_str().to_string(),
value: v.to_str().unwrap_or_default().to_string(),
})
.collect();
response.url = v.url().to_string();
response.remote_addr = v.remote_addr().map(|a| a.to_string());
response.version = match v.version() {
@@ -408,12 +405,12 @@ pub async fn send_http_request(
let content_length = v.content_length();
let body_bytes = v.bytes().await.expect("Failed to get body").to_vec();
response.elapsed = start.elapsed().as_millis() as i64;
response.elapsed = start.elapsed().as_millis() as i32;
// Use content length if available, otherwise use body length
response.content_length = match content_length {
Some(l) => Some(l as i64),
None => Some(body_bytes.len() as i64),
Some(l) => Some(l as i32),
None => Some(body_bytes.len() as i32),
};
{
@@ -441,11 +438,11 @@ pub async fn send_http_request(
);
}
response = models::update_response_if_id(window, &response)
response = update_response_if_id(window, &response)
.await
.expect("Failed to update response");
// Copy response to download path, if specified
// Copy response to the download path, if specified
match (download_path, response.body_path.clone()) {
(Some(dl_path), Some(body_path)) => {
info!("Downloading response body to {}", dl_path.display());
@@ -464,16 +461,17 @@ pub async fn send_http_request(
// });
// store.store_response_cookies(cookies, &url);
let json_cookies: Json<Vec<JsonValue>> = Json(
cookie_store
.lock()
.unwrap()
.iter_any()
.map(|c| serde_json::to_value(&c).expect("Failed to serialize cookie"))
.collect::<Vec<_>>(),
);
let json_cookies: Vec<Cookie> = cookie_store
.lock()
.unwrap()
.iter_any()
.map(|c| {
let json_cookie = serde_json::to_value(&c).expect("Failed to serialize cookie");
serde_json::from_value(json_cookie).expect("Failed to deserialize cookie")
})
.collect::<Vec<_>>();
cookie_jar.cookies = json_cookies;
if let Err(e) = models::upsert_cookie_jar(window, &cookie_jar).await {
if let Err(e) = upsert_cookie_jar(window, &cookie_jar).await {
error!("Failed to update cookie jar: {}", e);
};
}

View File

@@ -6,7 +6,7 @@ extern crate objc;
use std::collections::HashMap;
use std::env::current_dir;
use std::fs;
use std::fs::{create_dir_all, File, read_to_string};
use std::fs::{create_dir_all, read_to_string, File};
use std::path::PathBuf;
use std::process::exit;
use std::str::FromStr;
@@ -17,66 +17,65 @@ use fern::colors::ColoredLevelConfig;
use log::{debug, error, info, warn};
use rand::random;
use serde_json::{json, Value};
use sqlx::{Pool, Sqlite, SqlitePool};
use sqlx::migrate::Migrator;
use sqlx::sqlite::SqliteConnectOptions;
use sqlx::types::Json;
use tauri::{AppHandle, Emitter, LogicalSize, RunEvent, State, WebviewUrl, WebviewWindow};
use tauri::{Manager, WindowEvent};
use tauri::Listener;
use sqlx::{Pool, Sqlite, SqlitePool};
use tauri::path::BaseDirectory;
use tauri::Listener;
#[cfg(target_os = "macos")]
use tauri::TitleBarStyle;
use tauri::{AppHandle, Emitter, LogicalSize, RunEvent, State, WebviewUrl, WebviewWindow};
use tauri::{Manager, WindowEvent};
use tauri_plugin_log::{fern, Target, TargetKind};
use tauri_plugin_shell::ShellExt;
use tokio::sync::Mutex;
use ::grpc::{Code, deserialize_message, serialize_message, ServiceDefinition};
use ::grpc::manager::{DynamicMessage, GrpcHandle};
use plugin_runtime::manager::PluginManager;
use ::grpc::{deserialize_message, serialize_message, Code, ServiceDefinition};
use yaak_plugin_runtime::manager::PluginManager;
use crate::analytics::{AnalyticsAction, AnalyticsResource};
use crate::grpc::metadata_to_map;
use crate::http_request::send_http_request;
use crate::models::{
cancel_pending_grpc_connections, cancel_pending_responses, CookieJar,
create_http_response, delete_all_grpc_connections, delete_all_http_responses, delete_cookie_jar,
delete_environment, delete_folder, delete_grpc_connection, delete_grpc_request,
delete_http_request, delete_http_response, delete_workspace, duplicate_grpc_request,
duplicate_http_request, Environment, EnvironmentVariable, Folder, generate_model_id,
get_cookie_jar, get_environment, get_folder, get_grpc_connection,
get_grpc_request, get_http_request, get_http_response, get_key_value_raw,
get_or_create_settings, get_workspace, get_workspace_export_resources, GrpcConnection, GrpcEvent,
GrpcEventType, GrpcRequest, HttpRequest, HttpResponse, KeyValue,
list_cookie_jars, list_environments, list_folders, list_grpc_connections, list_grpc_events,
list_grpc_requests, list_http_requests, list_responses, list_workspaces, ModelType,
set_key_value_raw, Settings, update_response_if_id, update_settings, upsert_cookie_jar, upsert_environment,
upsert_folder, upsert_grpc_connection, upsert_grpc_event, upsert_grpc_request, upsert_http_request, upsert_workspace, Workspace,
WorkspaceExportResources,
};
use crate::models::ImportResult;
use crate::export_resources::{get_workspace_export_resources, ImportResult, WorkspaceExportResources};
use crate::notifications::YaakNotifier;
use crate::render::{render_request, variables_from_environment};
use crate::updates::{UpdateMode, YaakUpdater};
use crate::window_menu::app_menu;
use yaak_models::models::{
CookieJar, Environment, EnvironmentVariable, Folder, GrpcConnection, GrpcEvent, GrpcEventType,
GrpcRequest, HttpRequest, HttpResponse, KeyValue, ModelType, Settings, Workspace,
};
use yaak_models::queries::{
cancel_pending_grpc_connections, cancel_pending_responses, create_http_response,
delete_all_grpc_connections, delete_all_http_responses, delete_cookie_jar, delete_environment,
delete_folder, delete_grpc_connection, delete_grpc_request, delete_http_request,
delete_http_response, delete_workspace, duplicate_grpc_request, duplicate_http_request,
generate_model_id, get_cookie_jar, get_environment, get_folder, get_grpc_connection,
get_grpc_request, get_http_request, get_http_response, get_key_value_raw,
get_or_create_settings, get_workspace, list_cookie_jars, list_environments, list_folders,
list_grpc_connections, list_grpc_events, list_grpc_requests, list_http_requests,
list_responses, list_workspaces, set_key_value_raw, update_response_if_id, update_settings,
upsert_cookie_jar, upsert_environment, upsert_folder, upsert_grpc_connection,
upsert_grpc_event, upsert_grpc_request, upsert_http_request, upsert_workspace,
};
mod analytics;
mod grpc;
mod http_request;
mod models;
mod export_resources;
mod notifications;
mod render;
#[cfg(target_os = "macos")]
mod tauri_plugin_mac_window;
mod template_fns;
mod updates;
mod window_menu;
mod template_fns;
const DEFAULT_WINDOW_WIDTH: f64 = 1100.0;
const DEFAULT_WINDOW_HEIGHT: f64 = 600.0;
async fn migrate_db(app_handle: &AppHandle, db: &Mutex<Pool<Sqlite>>) -> Result<(), String> {
let pool = &*db.lock().await;
async fn migrate_db(app_handle: &AppHandle, pool: &Pool<Sqlite>) -> Result<(), String> {
let p = app_handle
.path()
.resolve("migrations", BaseDirectory::Resource)
@@ -170,7 +169,7 @@ async fn cmd_grpc_go(
let vars = variables_from_environment(&workspace, environment.as_ref());
// Add rest of metadata
for h in req.clone().metadata.0 {
for h in req.clone().metadata {
if h.name.is_empty() && h.value.is_empty() {
continue;
}
@@ -188,7 +187,7 @@ async fn cmd_grpc_go(
if let Some(b) = &req.authentication_type {
let req = req.clone();
let empty_value = &serde_json::to_value("").unwrap();
let a = req.authentication.0;
let a = req.authentication;
if b == "basic" {
let raw_username = a
@@ -276,7 +275,7 @@ async fn cmd_grpc_go(
upsert_grpc_connection(
&w,
&GrpcConnection {
elapsed: start.elapsed().as_millis() as i64,
elapsed: start.elapsed().as_millis() as i32,
error: Some(err.clone()),
..conn.clone()
},
@@ -392,7 +391,7 @@ async fn cmd_grpc_go(
&GrpcEvent {
content: format!("Connecting to {}", req.url),
event_type: GrpcEventType::ConnectionStart,
metadata: Json(metadata.clone()),
metadata: metadata.clone(),
..base_event.clone()
},
)
@@ -452,7 +451,7 @@ async fn cmd_grpc_go(
upsert_grpc_event(
&w,
&GrpcEvent {
metadata: Json(metadata_to_map(msg.metadata().clone())),
metadata: metadata_to_map(msg.metadata().clone()),
content: if msg.metadata().len() == 0 {
"Received response"
} else {
@@ -480,7 +479,7 @@ async fn cmd_grpc_go(
&GrpcEvent {
content: "Connection complete".to_string(),
event_type: GrpcEventType::ConnectionEnd,
status: Some(Code::Ok as i64),
status: Some(Code::Ok as i32),
..base_event.clone()
},
)
@@ -493,15 +492,15 @@ async fn cmd_grpc_go(
&(match e.status {
Some(s) => GrpcEvent {
error: Some(s.message().to_string()),
status: Some(s.code() as i64),
status: Some(s.code() as i32),
content: "Failed to connect".to_string(),
metadata: Json(metadata_to_map(s.metadata().clone())),
metadata: metadata_to_map(s.metadata().clone()),
event_type: GrpcEventType::ConnectionEnd,
..base_event.clone()
},
None => GrpcEvent {
error: Some(e.message),
status: Some(Code::Unknown as i64),
status: Some(Code::Unknown as i32),
content: "Failed to connect".to_string(),
event_type: GrpcEventType::ConnectionEnd,
..base_event.clone()
@@ -521,7 +520,7 @@ async fn cmd_grpc_go(
upsert_grpc_event(
&w,
&GrpcEvent {
metadata: Json(metadata_to_map(stream.metadata().clone())),
metadata: metadata_to_map(stream.metadata().clone()),
content: if stream.metadata().len() == 0 {
"Received response"
} else {
@@ -542,15 +541,15 @@ async fn cmd_grpc_go(
&(match e.status {
Some(s) => GrpcEvent {
error: Some(s.message().to_string()),
status: Some(s.code() as i64),
status: Some(s.code() as i32),
content: "Failed to connect".to_string(),
metadata: Json(metadata_to_map(s.metadata().clone())),
metadata: metadata_to_map(s.metadata().clone()),
event_type: GrpcEventType::ConnectionEnd,
..base_event.clone()
},
None => GrpcEvent {
error: Some(e.message),
status: Some(Code::Unknown as i64),
status: Some(Code::Unknown as i32),
content: "Failed to connect".to_string(),
event_type: GrpcEventType::ConnectionEnd,
..base_event.clone()
@@ -589,8 +588,8 @@ async fn cmd_grpc_go(
&w,
&GrpcEvent {
content: "Connection complete".to_string(),
status: Some(Code::Unavailable as i64),
metadata: Json(metadata_to_map(trailers)),
status: Some(Code::Unavailable as i32),
metadata: metadata_to_map(trailers),
event_type: GrpcEventType::ConnectionEnd,
..base_event.clone()
},
@@ -604,8 +603,8 @@ async fn cmd_grpc_go(
&w,
&GrpcEvent {
content: status.to_string(),
status: Some(status.code() as i64),
metadata: Json(metadata_to_map(status.metadata().clone())),
status: Some(status.code() as i32),
metadata: metadata_to_map(status.metadata().clone()),
event_type: GrpcEventType::ConnectionEnd,
..base_event.clone()
},
@@ -630,11 +629,11 @@ async fn cmd_grpc_go(
let closed_event = events
.iter()
.find(|e| GrpcEventType::ConnectionEnd == e.event_type);
let closed_status = closed_event.and_then(|e| e.status).unwrap_or(Code::Unavailable as i64);
let closed_status = closed_event.and_then(|e| e.status).unwrap_or(Code::Unavailable as i32);
upsert_grpc_connection(
&w,
&GrpcConnection{
elapsed: start.elapsed().as_millis() as i64,
elapsed: start.elapsed().as_millis() as i32,
status: closed_status,
..get_grpc_connection(&w, &conn_id).await.unwrap().clone()
},
@@ -646,15 +645,15 @@ async fn cmd_grpc_go(
&GrpcEvent {
content: "Cancelled".to_string(),
event_type: GrpcEventType::ConnectionEnd,
status: Some(Code::Cancelled as i64),
status: Some(Code::Cancelled as i32),
..base_msg.clone()
},
).await.unwrap();
upsert_grpc_connection(
&w,
&GrpcConnection {
elapsed: start.elapsed().as_millis() as i64,
status: Code::Cancelled as i64,
elapsed: start.elapsed().as_millis() as i32,
status: Code::Cancelled as i32,
..get_grpc_connection(&w, &conn_id).await.unwrap().clone()
},
)
@@ -1182,7 +1181,7 @@ async fn cmd_create_environment(
Environment {
workspace_id: workspace_id.to_string(),
name: name.to_string(),
variables: Json(variables),
variables,
..Default::default()
},
)
@@ -1194,7 +1193,7 @@ async fn cmd_create_environment(
async fn cmd_create_grpc_request(
workspace_id: &str,
name: &str,
sort_priority: f64,
sort_priority: f32,
folder_id: Option<&str>,
w: WebviewWindow,
) -> Result<GrpcRequest, String> {
@@ -1304,7 +1303,7 @@ async fn cmd_list_folders(workspace_id: &str, w: WebviewWindow) -> Result<Vec<Fo
async fn cmd_create_folder(
workspace_id: &str,
name: &str,
sort_priority: f64,
sort_priority: f32,
folder_id: Option<&str>,
w: WebviewWindow,
) -> Result<Folder, String> {
@@ -1612,11 +1611,12 @@ pub fn run() {
.plugin(tauri_plugin_clipboard_manager::init())
.plugin(tauri_plugin_window_state::Builder::default().build())
.plugin(tauri_plugin_shell::init())
.plugin(tauri_plugin_updater::Builder::new().build())
.plugin(tauri_plugin_updater::Builder::default().build())
.plugin(tauri_plugin_dialog::init())
.plugin(tauri_plugin_os::init())
.plugin(plugin_runtime::init())
.plugin(tauri_plugin_fs::init());
.plugin(tauri_plugin_fs::init())
.plugin(yaak_models::Builder::default().build())
.plugin(yaak_plugin_runtime::init());
#[cfg(target_os = "macos")]
{
@@ -1671,11 +1671,9 @@ pub fn run() {
let pool = SqlitePool::connect_with(opts)
.await
.expect("Failed to connect to database");
let m = Mutex::new(pool.clone());
migrate_db(app.handle(), &m)
migrate_db(app.handle(), &pool)
.await
.expect("Failed to migrate database");
app.manage(m);
let h = app.handle();
let _ = cancel_pending_responses(h).await;
let _ = cancel_pending_grpc_connections(h).await;

File diff suppressed because it is too large Load Diff

View File

@@ -5,9 +5,8 @@ use log::debug;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use tauri::{AppHandle, Emitter};
use yaak_models::queries::{get_key_value_raw, set_key_value_raw};
use crate::analytics::get_num_launches;
use crate::models::{get_key_value_raw, set_key_value_raw};
// Check for updates every hour
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;

View File

@@ -1,13 +1,12 @@
use std::collections::HashMap;
use sqlx::types::{Json, JsonValue};
use sqlx::types::JsonValue;
use crate::template_fns::timestamp;
use templates::parse_and_render;
use crate::models::{
use yaak_models::models::{
Environment, EnvironmentVariable, HttpRequest, HttpRequestHeader, HttpUrlParameter, Workspace,
};
use crate::template_fns::timestamp;
pub fn render_request(r: &HttpRequest, w: &Workspace, e: Option<&Environment>) -> HttpRequest {
let r = r.clone();
@@ -15,56 +14,48 @@ pub fn render_request(r: &HttpRequest, w: &Workspace, e: Option<&Environment>) -
HttpRequest {
url: render(r.url.as_str(), vars),
url_parameters: Json(
r.url_parameters
.0
.iter()
.map(|p| HttpUrlParameter {
enabled: p.enabled,
name: render(p.name.as_str(), vars),
value: render(p.value.as_str(), vars),
})
.collect::<Vec<HttpUrlParameter>>(),
),
headers: Json(
r.headers
.0
.iter()
.map(|p| HttpRequestHeader {
enabled: p.enabled,
name: render(p.name.as_str(), vars),
value: render(p.value.as_str(), vars),
})
.collect::<Vec<HttpRequestHeader>>(),
),
body: Json(
r.body
.0
.iter()
.map(|(k, v)| {
let v = if v.is_string() {
render(v.as_str().unwrap(), vars)
} else {
v.to_string()
};
(render(k, vars), JsonValue::from(v))
})
.collect::<HashMap<String, JsonValue>>(),
),
authentication: Json(
r.authentication
.0
.iter()
.map(|(k, v)| {
let v = if v.is_string() {
render(v.as_str().unwrap(), vars)
} else {
v.to_string()
};
(render(k, vars), JsonValue::from(v))
})
.collect::<HashMap<String, JsonValue>>(),
),
url_parameters: r
.url_parameters
.iter()
.map(|p| HttpUrlParameter {
enabled: p.enabled,
name: render(p.name.as_str(), vars),
value: render(p.value.as_str(), vars),
})
.collect::<Vec<HttpUrlParameter>>(),
headers: r
.headers
.iter()
.map(|p| HttpRequestHeader {
enabled: p.enabled,
name: render(p.name.as_str(), vars),
value: render(p.value.as_str(), vars),
})
.collect::<Vec<HttpRequestHeader>>(),
body: r
.body
.iter()
.map(|(k, v)| {
let v = if v.is_string() {
render(v.as_str().unwrap(), vars)
} else {
v.to_string()
};
(render(k, vars), JsonValue::from(v))
})
.collect::<HashMap<String, JsonValue>>(),
authentication: r
.authentication
.iter()
.map(|(k, v)| {
let v = if v.is_string() {
render(v.as_str().unwrap(), vars)
} else {
v.to_string()
};
(render(k, vars), JsonValue::from(v))
})
.collect::<HashMap<String, JsonValue>>(),
..r
}
}
@@ -95,10 +86,10 @@ pub fn variables_from_environment(
environment: Option<&Environment>,
) -> HashMap<String, String> {
let mut variables = HashMap::new();
variables = add_variable_to_map(variables, &workspace.variables.0);
variables = add_variable_to_map(variables, &workspace.variables);
if let Some(e) = environment {
variables = add_variable_to_map(variables, &e.variables.0);
variables = add_variable_to_map(variables, &e.variables);
}
recursively_render_variables(&variables, 0)

View File

@@ -12,7 +12,10 @@ pub fn timestamp(args: HashMap<String, String>) -> Result<String, String> {
}
_ => {
let json_from = serde_json::to_string(from).unwrap_or_default();
let now: DateTime<Utc> = serde_json::from_str(json_from.as_str()).unwrap();
let now: DateTime<Utc> = match serde_json::from_str(json_from.as_str()) {
Ok(r) => r,
Err(e) => return Err(e.to_string()),
};
now
}
};
@@ -43,7 +46,10 @@ mod tests {
fn timestamp_from() {
let mut args = HashMap::new();
args.insert("from".to_string(), "2024-07-31T14:16:41.983Z".to_string());
assert_eq!(timestamp(args), Ok("2024-07-31T14:16:41.983+00:00".to_string()));
assert_eq!(
timestamp(args),
Ok("2024-07-31T14:16:41.983+00:00".to_string())
);
}
#[test]