Compare commits

..

7 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
0cb42dedc9 Initial plan 2025-12-22 18:54:12 +00:00
Gregory Schier
089c7e8dce Http response events (#326) 2025-12-21 14:34:37 -08:00
Gregory Schier
7e0aa919fb Immediate cancellation 2025-12-21 06:28:36 -08:00
Gregory Schier
5776bab288 Tweak response pane and refactor timings 2025-12-21 06:24:01 -08:00
Gregory Schier
6b52a0cbed Try fix tests on Windows 2025-12-20 14:48:23 -08:00
Gregory Schier
46933059f6 Split up HTTP sending logic (#320) 2025-12-20 14:10:55 -08:00
Gregory Schier
cfbfd66eef Reformat project 2025-12-13 08:10:12 -08:00
114 changed files with 3646 additions and 1451 deletions

3
.gitattributes vendored
View File

@@ -1,2 +1,5 @@
src-tauri/vendored/**/* linguist-generated=true
src-tauri/gen/schemas/**/* linguist-generated=true
# Ensure consistent line endings for test files that check exact content
src-tauri/yaak-http/tests/test.txt text eol=lf

2
.gitignore vendored
View File

@@ -34,3 +34,5 @@ out
.tmp
tmp
.zed
codebook.toml

View File

@@ -5,3 +5,4 @@ chain_width = 100
max_width = 100
single_line_if_else_max_width = 100
fn_call_width = 100
struct_lit_width = 100

44
src-tauri/Cargo.lock generated
View File

@@ -192,12 +192,14 @@ version = "0.4.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b37fc50485c4f3f736a4fb14199f6d5f5ba008d7f28fe710306c92780f004c07"
dependencies = [
"brotli",
"brotli 8.0.1",
"flate2",
"futures-core",
"memchr",
"pin-project-lite",
"tokio",
"zstd",
"zstd-safe",
]
[[package]]
@@ -536,6 +538,17 @@ dependencies = [
"syn 2.0.101",
]
[[package]]
name = "brotli"
version = "7.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
"brotli-decompressor 4.0.3",
]
[[package]]
name = "brotli"
version = "8.0.1"
@@ -544,7 +557,17 @@ checksum = "9991eea70ea4f293524138648e41ee89b0b2b12ddef3b255effa43c8056e0e0d"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
"brotli-decompressor",
"brotli-decompressor 5.0.0",
]
[[package]]
name = "brotli-decompressor"
version = "4.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a334ef7c9e23abf0ce748e8cd309037da93e606ad52eb372e4ce327a0dcfbdfd"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
]
[[package]]
@@ -5762,7 +5785,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fa9844cefcf99554a16e0a278156ae73b0d8680bbc0e2ad1e4287aadd8489cf"
dependencies = [
"base64 0.22.1",
"brotli",
"brotli 8.0.1",
"ico",
"json-patch",
"plist",
@@ -6094,7 +6117,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76a423c51176eb3616ee9b516a9fa67fed5f0e78baaba680e44eb5dd2cc37490"
dependencies = [
"anyhow",
"brotli",
"brotli 8.0.1",
"cargo_metadata",
"ctor",
"dunce",
@@ -7903,6 +7926,7 @@ dependencies = [
"thiserror 2.0.17",
"tokio",
"tokio-stream",
"tokio-util",
"ts-rs",
"uuid",
"yaak-common",
@@ -7929,6 +7953,7 @@ dependencies = [
"regex",
"reqwest",
"serde",
"serde_json",
"tauri",
"thiserror 2.0.17",
]
@@ -8010,19 +8035,30 @@ dependencies = [
name = "yaak-http"
version = "0.1.0"
dependencies = [
"async-compression",
"async-trait",
"brotli 7.0.0",
"bytes",
"flate2",
"futures-util",
"hyper-util",
"log",
"mime_guess",
"regex",
"reqwest",
"reqwest_cookie_store",
"serde",
"serde_json",
"tauri",
"thiserror 2.0.17",
"tokio",
"tokio-util",
"tower-service",
"urlencoding",
"yaak-common",
"yaak-models",
"yaak-tls",
"zstd",
]
[[package]]

View File

@@ -74,6 +74,7 @@ tauri-plugin-window-state = "2.4.1"
thiserror = { workspace = true }
tokio = { workspace = true, features = ["sync"] }
tokio-stream = "0.1.17"
tokio-util = { version = "0.7", features = ["codec"] }
ts-rs = { workspace = true }
uuid = "1.12.1"
yaak-common = { workspace = true }

View File

@@ -1,5 +1,5 @@
use crate::error::Result;
use tauri::{command, AppHandle, Manager, Runtime, State, WebviewWindow};
use tauri::{AppHandle, Manager, Runtime, State, WebviewWindow, command};
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
use yaak_crypto::manager::EncryptionManagerExt;
use yaak_plugins::events::{GetThemesResponse, PluginContext};

View File

@@ -1,4 +1,4 @@
use mime_guess::{mime, Mime};
use mime_guess::{Mime, mime};
use std::path::Path;
use std::str::FromStr;
use tokio::fs;

View File

@@ -1,5 +1,5 @@
use std::io;
use serde::{Serialize, Serializer};
use std::io;
use thiserror::Error;
#[derive(Error, Debug)]
@@ -59,7 +59,7 @@ pub enum Error {
#[error("Request error: {0}")]
RequestError(#[from] reqwest::Error),
#[error("Generic error: {0}")]
#[error("{0}")]
GenericError(String),
}

View File

@@ -2,31 +2,24 @@ use crate::error::Error::GenericError;
use crate::error::Result;
use crate::render::render_http_request;
use crate::response_err;
use http::header::{ACCEPT, USER_AGENT};
use http::{HeaderMap, HeaderName, HeaderValue};
use log::{debug, error, warn};
use mime_guess::Mime;
use reqwest::{Method, Response};
use reqwest::{Url, multipart};
use log::debug;
use reqwest_cookie_store::{CookieStore, CookieStoreMutex};
use serde_json::Value;
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use tauri::{Manager, Runtime, WebviewWindow};
use tokio::fs;
use std::time::{Duration, Instant};
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
use tokio::fs::{File, create_dir_all};
use tokio::io::AsyncWriteExt;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::sync::Mutex;
use tokio::sync::watch::Receiver;
use tokio::sync::{Mutex, oneshot};
use yaak_http::client::{
HttpConnectionOptions, HttpConnectionProxySetting, HttpConnectionProxySettingAuth,
};
use yaak_http::manager::HttpConnectionManager;
use yaak_http::sender::ReqwestSender;
use yaak_http::transaction::HttpTransaction;
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions, append_query_params};
use yaak_models::models::{
Cookie, CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseHeader,
CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseHeader,
HttpResponseState, ProxySetting, ProxySettingAuth,
};
use yaak_models::query_manager::QueryManagerExt;
@@ -36,7 +29,7 @@ use yaak_plugins::events::{
};
use yaak_plugins::manager::PluginManager;
use yaak_plugins::template_callback::PluginTemplateCallback;
use yaak_templates::{RenderErrorBehavior, RenderOptions};
use yaak_templates::RenderOptions;
use yaak_tls::find_client_certificate;
pub async fn send_http_request<R: Runtime>(
@@ -65,75 +58,75 @@ pub async fn send_http_request_with_context<R: Runtime>(
og_response: &HttpResponse,
environment: Option<Environment>,
cookie_jar: Option<CookieJar>,
cancelled_rx: &mut Receiver<bool>,
cancelled_rx: &Receiver<bool>,
plugin_context: &PluginContext,
) -> Result<HttpResponse> {
let app_handle = window.app_handle().clone();
let response = Arc::new(Mutex::new(og_response.clone()));
let update_source = UpdateSource::from_window(window);
// Execute the inner send logic and handle errors consistently
let result = send_http_request_inner(
window,
unrendered_request,
og_response,
environment,
cookie_jar,
cancelled_rx,
plugin_context,
)
.await;
match result {
Ok(response) => Ok(response),
Err(e) => {
Ok(response_err(&app_handle, &*response.lock().await, e.to_string(), &update_source))
}
}
}
async fn send_http_request_inner<R: Runtime>(
window: &WebviewWindow<R>,
unrendered_request: &HttpRequest,
og_response: &HttpResponse,
environment: Option<Environment>,
cookie_jar: Option<CookieJar>,
cancelled_rx: &Receiver<bool>,
plugin_context: &PluginContext,
) -> Result<HttpResponse> {
let app_handle = window.app_handle().clone();
let plugin_manager = app_handle.state::<PluginManager>();
let connection_manager = app_handle.state::<HttpConnectionManager>();
let settings = window.db().get_settings();
let workspace = window.db().get_workspace(&unrendered_request.workspace_id)?;
let environment_id = environment.map(|e| e.id);
let environment_chain = window.db().resolve_environments(
&unrendered_request.workspace_id,
unrendered_request.folder_id.as_deref(),
environment_id.as_deref(),
)?;
let response_id = og_response.id.clone();
let wrk_id = &unrendered_request.workspace_id;
let fld_id = unrendered_request.folder_id.as_deref();
let env_id = environment.map(|e| e.id);
let resp_id = og_response.id.clone();
let workspace = window.db().get_workspace(wrk_id)?;
let response = Arc::new(Mutex::new(og_response.clone()));
let update_source = UpdateSource::from_window(window);
let (resolved_request, auth_context_id) = match resolve_http_request(window, unrendered_request)
{
Ok(r) => r,
Err(e) => {
return Ok(response_err(
&app_handle,
&*response.lock().await,
e.to_string(),
&update_source,
));
}
};
let (resolved, auth_context_id) = resolve_http_request(window, unrendered_request)?;
let cb = PluginTemplateCallback::new(window.app_handle(), &plugin_context, RenderPurpose::Send);
let env_chain = window.db().resolve_environments(&workspace.id, fld_id, env_id.as_deref())?;
let request = render_http_request(&resolved, env_chain, &cb, &RenderOptions::throw()).await?;
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
// Build the sendable request using the new SendableHttpRequest type
let options = SendableHttpRequestOptions {
follow_redirects: workspace.setting_follow_redirects,
timeout: if workspace.setting_request_timeout > 0 {
Some(Duration::from_millis(workspace.setting_request_timeout.unsigned_abs() as u64))
} else {
None
},
};
let mut sendable_request = SendableHttpRequest::from_http_request(&request, options).await?;
let request = match render_http_request(&resolved_request, environment_chain, &cb, &opt).await {
Ok(r) => r,
Err(e) => {
return Ok(response_err(
&app_handle,
&*response.lock().await,
e.to_string(),
&update_source,
));
}
};
let mut url_string = request.url.clone();
url_string = ensure_proto(&url_string);
if !url_string.starts_with("http://") && !url_string.starts_with("https://") {
url_string = format!("http://{}", url_string);
}
debug!("Sending request to {} {url_string}", request.method);
debug!("Sending request to {} {}", sendable_request.method, sendable_request.url);
let proxy_setting = match settings.proxy {
None => HttpConnectionProxySetting::System,
Some(ProxySetting::Disabled) => HttpConnectionProxySetting::Disabled,
Some(ProxySetting::Enabled {
http,
https,
auth,
bypass,
disabled,
}) => {
Some(ProxySetting::Enabled { http, https, auth, bypass, disabled }) => {
if disabled {
HttpConnectionProxySetting::System
} else {
@@ -152,7 +145,8 @@ pub async fn send_http_request_with_context<R: Runtime>(
}
};
let client_certificate = find_client_certificate(&url_string, &settings.client_certificates);
let client_certificate =
find_client_certificate(&sendable_request.url, &settings.client_certificates);
// Add cookie store if specified
let maybe_cookie_manager = match cookie_jar.clone() {
@@ -183,523 +177,53 @@ pub async fn send_http_request_with_context<R: Runtime>(
let client = connection_manager
.get_client(&HttpConnectionOptions {
id: plugin_context.id.clone(),
follow_redirects: workspace.setting_follow_redirects,
validate_certificates: workspace.setting_validate_certificates,
proxy: proxy_setting,
cookie_provider: maybe_cookie_manager.as_ref().map(|(p, _)| Arc::clone(&p)),
client_certificate,
timeout: if workspace.setting_request_timeout > 0 {
Some(Duration::from_millis(workspace.setting_request_timeout.unsigned_abs() as u64))
} else {
None
},
})
.await?;
// Render query parameters
let mut query_params = Vec::new();
for p in request.url_parameters.clone() {
if !p.enabled || p.name.is_empty() {
continue;
}
query_params.push((p.name, p.value));
// Apply authentication to the request
apply_authentication(
&window,
&mut sendable_request,
&request,
auth_context_id,
&plugin_manager,
plugin_context,
)
.await?;
let start_for_cancellation = Instant::now();
let final_resp = execute_transaction(
client,
sendable_request,
response.clone(),
&resp_id,
&app_handle,
&update_source,
cancelled_rx.clone(),
)
.await;
match final_resp {
Ok(r) => Ok(r),
Err(e) => match app_handle.db().get_http_response(&resp_id) {
Ok(mut r) => {
r.state = HttpResponseState::Closed;
r.elapsed = start_for_cancellation.elapsed().as_millis() as i32;
r.elapsed_headers = start_for_cancellation.elapsed().as_millis() as i32;
r.error = Some(e.to_string());
app_handle
.db()
.update_http_response_if_id(&r, &UpdateSource::from_window(window))
.expect("Failed to update response");
Ok(r)
}
_ => Err(GenericError("Ephemeral request was cancelled".to_string())),
},
}
let url = match Url::from_str(&url_string) {
Ok(u) => u,
Err(e) => {
return Ok(response_err(
&app_handle,
&*response.lock().await,
format!("Failed to parse URL \"{}\": {}", url_string, e.to_string()),
&update_source,
));
}
};
let m = Method::from_str(&request.method.to_uppercase())
.map_err(|e| GenericError(e.to_string()))?;
let mut request_builder = client.request(m, url).query(&query_params);
let mut headers = HeaderMap::new();
headers.insert(USER_AGENT, HeaderValue::from_static("yaak"));
headers.insert(ACCEPT, HeaderValue::from_static("*/*"));
// TODO: Set cookie header ourselves once we also handle redirects. We need to do this
// because reqwest doesn't give us a way to inspect the headers it sent (we have to do
// everything manually to know that).
// if let Some(cookie_store) = maybe_cookie_store.clone() {
// let values1 = cookie_store.get_request_values(&url);
// let raw_value = cookie_store.get_request_values(&url)
// .map(|(name, value)| format!("{}={}", name, value))
// .collect::<Vec<_>>()
// .join("; ");
// headers.insert(
// COOKIE,
// HeaderValue::from_str(&raw_value).expect("Failed to create cookie header"),
// );
// }
for h in request.headers.clone() {
if h.name.is_empty() && h.value.is_empty() {
continue;
}
if !h.enabled {
continue;
}
let header_name = match HeaderName::from_str(&h.name) {
Ok(n) => n,
Err(e) => {
error!("Failed to create header name: {}", e);
continue;
}
};
let header_value = match HeaderValue::from_str(&h.value) {
Ok(n) => n,
Err(e) => {
error!("Failed to create header value: {}", e);
continue;
}
};
headers.insert(header_name, header_value);
}
let request_body = request.body.clone();
if let Some(body_type) = &request.body_type.clone() {
if body_type == "graphql" {
let query = get_str_h(&request_body, "query");
let variables = get_str_h(&request_body, "variables");
if request.method.to_lowercase() == "get" {
request_builder = request_builder.query(&[("query", query)]);
if !variables.trim().is_empty() {
request_builder = request_builder.query(&[("variables", variables)]);
}
} else {
let body = if variables.trim().is_empty() {
format!(r#"{{"query":{}}}"#, serde_json::to_string(query).unwrap_or_default())
} else {
format!(
r#"{{"query":{},"variables":{variables}}}"#,
serde_json::to_string(query).unwrap_or_default()
)
};
request_builder = request_builder.body(body.to_owned());
}
} else if body_type == "application/x-www-form-urlencoded"
&& request_body.contains_key("form")
{
let mut form_params = Vec::new();
let form = request_body.get("form");
if let Some(f) = form {
match f.as_array() {
None => {}
Some(a) => {
for p in a {
let enabled = get_bool(p, "enabled", true);
let name = get_str(p, "name");
if !enabled || name.is_empty() {
continue;
}
let value = get_str(p, "value");
form_params.push((name, value));
}
}
}
}
request_builder = request_builder.form(&form_params);
} else if body_type == "binary" && request_body.contains_key("filePath") {
let file_path = request_body
.get("filePath")
.ok_or(GenericError("filePath not set".to_string()))?
.as_str()
.unwrap_or_default();
match fs::read(file_path).await.map_err(|e| e.to_string()) {
Ok(f) => {
request_builder = request_builder.body(f);
}
Err(e) => {
return Ok(response_err(
&app_handle,
&*response.lock().await,
e,
&update_source,
));
}
}
} else if body_type == "multipart/form-data" && request_body.contains_key("form") {
let mut multipart_form = multipart::Form::new();
if let Some(form_definition) = request_body.get("form") {
match form_definition.as_array() {
None => {}
Some(fd) => {
for p in fd {
let enabled = get_bool(p, "enabled", true);
let name = get_str(p, "name").to_string();
if !enabled || name.is_empty() {
continue;
}
let file_path = get_str(p, "file").to_owned();
let value = get_str(p, "value").to_owned();
let mut part = if file_path.is_empty() {
multipart::Part::text(value.clone())
} else {
match fs::read(file_path.clone()).await {
Ok(f) => multipart::Part::bytes(f),
Err(e) => {
return Ok(response_err(
&app_handle,
&*response.lock().await,
e.to_string(),
&update_source,
));
}
}
};
let content_type = get_str(p, "contentType");
// Set or guess mimetype
if !content_type.is_empty() {
part = match part.mime_str(content_type) {
Ok(p) => p,
Err(e) => {
return Ok(response_err(
&app_handle,
&*response.lock().await,
format!("Invalid mime for multi-part entry {e:?}"),
&update_source,
));
}
};
} else if !file_path.is_empty() {
let default_mime =
Mime::from_str("application/octet-stream").unwrap();
let mime =
mime_guess::from_path(file_path.clone()).first_or(default_mime);
part = match part.mime_str(mime.essence_str()) {
Ok(p) => p,
Err(e) => {
return Ok(response_err(
&app_handle,
&*response.lock().await,
format!("Invalid mime for multi-part entry {e:?}"),
&update_source,
));
}
};
}
// Set a file path if it is not empty
if !file_path.is_empty() {
let user_filename = get_str(p, "filename").to_owned();
let filename = if user_filename.is_empty() {
PathBuf::from(file_path)
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string()
} else {
user_filename
};
part = part.file_name(filename);
}
multipart_form = multipart_form.part(name, part);
}
}
}
}
headers.remove("Content-Type"); // reqwest will add this automatically
request_builder = request_builder.multipart(multipart_form);
} else if request_body.contains_key("text") {
let body = get_str_h(&request_body, "text");
request_builder = request_builder.body(body.to_owned());
} else {
warn!("Unsupported body type: {}", body_type);
}
} else {
// No body set
let method = request.method.to_ascii_lowercase();
let is_body_method = method == "post" || method == "put" || method == "patch";
// Add Content-Length for methods that commonly accept a body because some servers
// will error if they don't receive it.
if is_body_method && !headers.contains_key("content-length") {
headers.insert("Content-Length", HeaderValue::from_static("0"));
}
}
// Add headers last, because previous steps may modify them
request_builder = request_builder.headers(headers);
let mut sendable_req = match request_builder.build() {
Ok(r) => r,
Err(e) => {
warn!("Failed to build request builder {e:?}");
return Ok(response_err(
&app_handle,
&*response.lock().await,
e.to_string(),
&update_source,
));
}
};
match request.authentication_type {
None => {
// No authentication found. Not even inherited
}
Some(authentication_type) if authentication_type == "none" => {
// Explicitly no authentication
}
Some(authentication_type) => {
let req = CallHttpAuthenticationRequest {
context_id: format!("{:x}", md5::compute(auth_context_id)),
values: serde_json::from_value(serde_json::to_value(&request.authentication)?)?,
url: sendable_req.url().to_string(),
method: sendable_req.method().to_string(),
headers: sendable_req
.headers()
.iter()
.map(|(name, value)| HttpHeader {
name: name.to_string(),
value: value.to_str().unwrap_or_default().to_string(),
})
.collect(),
};
let auth_result = plugin_manager
.call_http_authentication(&window, &authentication_type, req, plugin_context)
.await;
let plugin_result = match auth_result {
Ok(r) => r,
Err(e) => {
return Ok(response_err(
&app_handle,
&*response.lock().await,
e.to_string(),
&update_source,
));
}
};
let headers = sendable_req.headers_mut();
for header in plugin_result.set_headers.unwrap_or_default() {
match (HeaderName::from_str(&header.name), HeaderValue::from_str(&header.value)) {
(Ok(name), Ok(value)) => {
headers.insert(name, value);
}
_ => continue,
};
}
if let Some(params) = plugin_result.set_query_parameters {
let mut query_pairs = sendable_req.url_mut().query_pairs_mut();
for p in params {
query_pairs.append_pair(&p.name, &p.value);
}
}
}
}
let (resp_tx, resp_rx) = oneshot::channel::<std::result::Result<Response, reqwest::Error>>();
let (done_tx, done_rx) = oneshot::channel::<HttpResponse>();
let start = std::time::Instant::now();
tokio::spawn(async move {
let _ = resp_tx.send(client.execute(sendable_req).await);
});
let raw_response = tokio::select! {
Ok(r) = resp_rx => r,
_ = cancelled_rx.changed() => {
let mut r = response.lock().await;
r.elapsed_headers = start.elapsed().as_millis() as i32;
r.elapsed = start.elapsed().as_millis() as i32;
return Ok(response_err(&app_handle, &r, "Request was cancelled".to_string(), &update_source));
}
};
{
let app_handle = app_handle.clone();
let window = window.clone();
let cancelled_rx = cancelled_rx.clone();
let response_id = response_id.clone();
let response = response.clone();
let update_source = update_source.clone();
tokio::spawn(async move {
match raw_response {
Ok(mut v) => {
let content_length = v.content_length();
let response_headers = v.headers().clone();
let dir = app_handle.path().app_data_dir().unwrap();
let base_dir = dir.join("responses");
create_dir_all(base_dir.clone()).await.expect("Failed to create responses dir");
let body_path = if response_id.is_empty() {
base_dir.join(uuid::Uuid::new_v4().to_string())
} else {
base_dir.join(response_id.clone())
};
{
let mut r = response.lock().await;
r.body_path = Some(body_path.to_str().unwrap().to_string());
r.elapsed_headers = start.elapsed().as_millis() as i32;
r.elapsed = start.elapsed().as_millis() as i32;
r.status = v.status().as_u16() as i32;
r.status_reason = v.status().canonical_reason().map(|s| s.to_string());
r.headers = response_headers
.iter()
.map(|(k, v)| HttpResponseHeader {
name: k.as_str().to_string(),
value: v.to_str().unwrap_or_default().to_string(),
})
.collect();
r.url = v.url().to_string();
r.remote_addr = v.remote_addr().map(|a| a.to_string());
r.version = match v.version() {
reqwest::Version::HTTP_09 => Some("HTTP/0.9".to_string()),
reqwest::Version::HTTP_10 => Some("HTTP/1.0".to_string()),
reqwest::Version::HTTP_11 => Some("HTTP/1.1".to_string()),
reqwest::Version::HTTP_2 => Some("HTTP/2".to_string()),
reqwest::Version::HTTP_3 => Some("HTTP/3".to_string()),
_ => None,
};
r.state = HttpResponseState::Connected;
app_handle
.db()
.update_http_response_if_id(&r, &update_source)
.expect("Failed to update response after connected");
}
// Write body to FS
let mut f = File::options()
.create(true)
.truncate(true)
.write(true)
.open(&body_path)
.await
.expect("Failed to open file");
let mut written_bytes: usize = 0;
loop {
let chunk = v.chunk().await;
if *cancelled_rx.borrow() {
// Request was canceled
return;
}
match chunk {
Ok(Some(bytes)) => {
let mut r = response.lock().await;
r.elapsed = start.elapsed().as_millis() as i32;
f.write_all(&bytes).await.expect("Failed to write to file");
f.flush().await.expect("Failed to flush file");
written_bytes += bytes.len();
r.content_length = Some(written_bytes as i32);
app_handle
.db()
.update_http_response_if_id(&r, &update_source)
.expect("Failed to update response");
}
Ok(None) => {
break;
}
Err(e) => {
response_err(
&app_handle,
&*response.lock().await,
e.to_string(),
&update_source,
);
break;
}
}
}
// Set the final content length
{
let mut r = response.lock().await;
r.content_length = match content_length {
Some(l) => Some(l as i32),
None => Some(written_bytes as i32),
};
r.state = HttpResponseState::Closed;
app_handle
.db()
.update_http_response_if_id(&r, &UpdateSource::from_window(&window))
.expect("Failed to update response");
};
// Add cookie store if specified
if let Some((cookie_store, mut cookie_jar)) = maybe_cookie_manager {
// let cookies = response_headers.get_all(SET_COOKIE).iter().map(|h| {
// println!("RESPONSE COOKIE: {}", h.to_str().unwrap());
// cookie_store::RawCookie::from_str(h.to_str().unwrap())
// .expect("Failed to parse cookie")
// });
// store.store_response_cookies(cookies, &url);
let json_cookies: Vec<Cookie> = cookie_store
.lock()
.unwrap()
.iter_any()
.map(|c| {
let json_cookie =
serde_json::to_value(&c).expect("Failed to serialize cookie");
serde_json::from_value(json_cookie)
.expect("Failed to deserialize cookie")
})
.collect::<Vec<_>>();
cookie_jar.cookies = json_cookies;
if let Err(e) = app_handle
.db()
.upsert_cookie_jar(&cookie_jar, &UpdateSource::from_window(&window))
{
error!("Failed to update cookie jar: {}", e);
};
}
}
Err(e) => {
warn!("Failed to execute request {e}");
response_err(
&app_handle,
&*response.lock().await,
format!("{e}{e:?}"),
&update_source,
);
}
};
let r = response.lock().await.clone();
done_tx.send(r).unwrap();
});
};
let app_handle = app_handle.clone();
Ok(tokio::select! {
Ok(r) = done_rx => r,
_ = cancelled_rx.changed() => {
match app_handle.with_db(|c| c.get_http_response(&response_id)) {
Ok(mut r) => {
r.state = HttpResponseState::Closed;
r.elapsed = start.elapsed().as_millis() as i32;
r.elapsed_headers = start.elapsed().as_millis() as i32;
app_handle.db().update_http_response_if_id(&r, &UpdateSource::from_window(window))
.expect("Failed to update response")
},
_ => {
response_err(&app_handle, &*response.lock().await, "Ephemeral request was cancelled".to_string(), &update_source)
}.clone(),
}
}
})
}
pub fn resolve_http_request<R: Runtime>(
@@ -719,46 +243,209 @@ pub fn resolve_http_request<R: Runtime>(
Ok((new_request, authentication_context_id))
}
fn ensure_proto(url_str: &str) -> String {
if url_str.starts_with("http://") || url_str.starts_with("https://") {
return url_str.to_string();
async fn execute_transaction<R: Runtime>(
client: reqwest::Client,
sendable_request: SendableHttpRequest,
response: Arc<Mutex<HttpResponse>>,
response_id: &String,
app_handle: &AppHandle<R>,
update_source: &UpdateSource,
mut cancelled_rx: Receiver<bool>,
) -> Result<HttpResponse> {
let sender = ReqwestSender::with_client(client);
let transaction = HttpTransaction::new(sender);
let start = Instant::now();
// Capture request headers before sending
let request_headers: Vec<HttpResponseHeader> = sendable_request
.headers
.iter()
.map(|(name, value)| HttpResponseHeader { name: name.clone(), value: value.clone() })
.collect();
{
// Update response with headers info and mark as connected
let mut r = response.lock().await;
r.url = sendable_request.url.clone();
r.request_headers = request_headers.clone();
app_handle.db().update_http_response_if_id(&r, &update_source)?;
}
// Url::from_str will fail without a proto, so add one
let parseable_url = format!("http://{}", url_str);
if let Ok(u) = Url::from_str(parseable_url.as_str()) {
match u.host() {
Some(host) => {
let h = host.to_string();
// These TLDs force HTTPS
if h.ends_with(".app") || h.ends_with(".dev") || h.ends_with(".page") {
return format!("https://{url_str}");
}
// Create channel for receiving events and spawn a task to store them in DB
let (event_tx, mut event_rx) =
tokio::sync::mpsc::unbounded_channel::<yaak_http::sender::HttpResponseEvent>();
// Write events to DB in a task
{
let response_id = response_id.clone();
let workspace_id = response.lock().await.workspace_id.clone();
let app_handle = app_handle.clone();
let update_source = update_source.clone();
tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
let db_event = HttpResponseEvent::new(&response_id, &workspace_id, event.into());
let _ = app_handle.db().upsert(&db_event, &update_source);
}
});
};
// Execute the transaction with cancellation support
// This returns the response with headers, but body is not yet consumed
// Events (headers, settings, chunks) are sent through the channel
let mut http_response = transaction
.execute_with_cancellation(sendable_request, cancelled_rx.clone(), event_tx)
.await?;
// Prepare the response path before consuming the body
let dir = app_handle.path().app_data_dir()?;
let base_dir = dir.join("responses");
create_dir_all(&base_dir).await?;
let body_path = if response_id.is_empty() {
base_dir.join(uuid::Uuid::new_v4().to_string())
} else {
base_dir.join(&response_id)
};
// Extract metadata before consuming the body (headers are available immediately)
// Url might change, so update again
let headers: Vec<HttpResponseHeader> = http_response
.headers
.iter()
.map(|(name, value)| HttpResponseHeader { name: name.clone(), value: value.clone() })
.collect();
{
// Update response with headers info and mark as connected
let mut r = response.lock().await;
r.body_path = Some(body_path.to_string_lossy().to_string());
r.elapsed_headers = start.elapsed().as_millis() as i32;
r.status = http_response.status as i32;
r.status_reason = http_response.status_reason.clone().clone();
r.url = http_response.url.clone().clone();
r.remote_addr = http_response.remote_addr.clone();
r.version = http_response.version.clone().clone();
r.headers = headers.clone();
r.content_length = http_response.content_length.map(|l| l as i32);
r.request_headers = http_response
.request_headers
.iter()
.map(|(n, v)| HttpResponseHeader { name: n.clone(), value: v.clone() })
.collect();
r.state = HttpResponseState::Connected;
app_handle.db().update_http_response_if_id(&r, &update_source)?;
}
// Get the body stream for manual consumption
let mut body_stream = http_response.into_body_stream()?;
// Open file for writing
let mut file = File::options()
.create(true)
.truncate(true)
.write(true)
.open(&body_path)
.await
.map_err(|e| GenericError(format!("Failed to open file: {}", e)))?;
// Stream body to file, updating DB on each chunk
let mut written_bytes: usize = 0;
let mut buf = [0u8; 8192];
loop {
// Check for cancellation. If we already have headers/body, just close cleanly without error
if *cancelled_rx.borrow() {
break;
}
// Use select! to race between reading and cancellation, so cancellation is immediate
let read_result = tokio::select! {
biased;
_ = cancelled_rx.changed() => {
break;
}
result = body_stream.read(&mut buf) => result,
};
match read_result {
Ok(0) => break, // EOF
Ok(n) => {
file.write_all(&buf[..n])
.await
.map_err(|e| GenericError(format!("Failed to write to file: {}", e)))?;
file.flush()
.await
.map_err(|e| GenericError(format!("Failed to flush file: {}", e)))?;
written_bytes += n;
// Update response in DB with progress
let mut r = response.lock().await;
r.elapsed = start.elapsed().as_millis() as i32; // Approx until the end
r.content_length = Some(written_bytes as i32);
app_handle.db().update_http_response_if_id(&r, &update_source)?;
}
Err(e) => {
return Err(GenericError(format!("Failed to read response body: {}", e)));
}
None => {}
}
}
format!("http://{url_str}")
// Final update with closed state
let mut resp = response.lock().await.clone();
resp.elapsed = start.elapsed().as_millis() as i32;
resp.state = HttpResponseState::Closed;
resp.body_path = Some(
body_path.to_str().ok_or(GenericError(format!("Invalid path {body_path:?}",)))?.to_string(),
);
app_handle.db().update_http_response_if_id(&resp, &update_source)?;
Ok(resp)
}
fn get_bool(v: &Value, key: &str, fallback: bool) -> bool {
match v.get(key) {
None => fallback,
Some(v) => v.as_bool().unwrap_or(fallback),
}
}
async fn apply_authentication<R: Runtime>(
window: &WebviewWindow<R>,
sendable_request: &mut SendableHttpRequest,
request: &HttpRequest,
auth_context_id: String,
plugin_manager: &PluginManager,
plugin_context: &PluginContext,
) -> Result<()> {
match &request.authentication_type {
None => {
// No authentication found. Not even inherited
}
Some(authentication_type) if authentication_type == "none" => {
// Explicitly no authentication
}
Some(authentication_type) => {
let req = CallHttpAuthenticationRequest {
context_id: format!("{:x}", md5::compute(auth_context_id)),
values: serde_json::from_value(serde_json::to_value(&request.authentication)?)?,
url: sendable_request.url.clone(),
method: sendable_request.method.clone(),
headers: sendable_request
.headers
.iter()
.map(|(name, value)| HttpHeader {
name: name.to_string(),
value: value.to_string(),
})
.collect(),
};
let plugin_result = plugin_manager
.call_http_authentication(&window, &authentication_type, req, plugin_context)
.await?;
fn get_str<'a>(v: &'a Value, key: &str) -> &'a str {
match v.get(key) {
None => "",
Some(v) => v.as_str().unwrap_or_default(),
}
}
for header in plugin_result.set_headers.unwrap_or_default() {
sendable_request.insert_header((header.name, header.value));
}
fn get_str_h<'a>(v: &'a BTreeMap<String, Value>, key: &str) -> &'a str {
match v.get(key) {
None => "",
Some(v) => v.as_str().unwrap_or_default(),
if let Some(params) = plugin_result.set_query_parameters {
let params = params.into_iter().map(|p| (p.name, p.value)).collect::<Vec<_>>();
sendable_request.url = append_query_params(&sendable_request.url, params);
}
}
}
Ok(())
}

View File

@@ -34,8 +34,8 @@ use yaak_grpc::{Code, ServiceDefinition, serialize_message};
use yaak_mac_window::AppHandleMacWindowExt;
use yaak_models::models::{
AnyModel, CookieJar, Environment, GrpcConnection, GrpcConnectionState, GrpcEvent,
GrpcEventType, GrpcRequest, HttpRequest, HttpResponse, HttpResponseState, Plugin, Workspace,
WorkspaceMeta,
GrpcEventType, GrpcRequest, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseState,
Plugin, Workspace, WorkspaceMeta,
};
use yaak_models::query_manager::QueryManagerExt;
use yaak_models::util::{BatchUpsertResult, UpdateSource, get_workspace_export_resources};
@@ -180,9 +180,7 @@ async fn cmd_grpc_reflect<R: Runtime>(
&PluginContext::new(&window),
RenderPurpose::Send,
),
&RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
},
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await?;
@@ -234,9 +232,7 @@ async fn cmd_grpc_go<R: Runtime>(
&PluginContext::new(&window),
RenderPurpose::Send,
),
&RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
},
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await?;
@@ -362,9 +358,7 @@ async fn cmd_grpc_go<R: Runtime>(
&PluginContext::new(&window),
RenderPurpose::Send,
),
&RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
},
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await
.expect("Failed to render template")
@@ -414,9 +408,7 @@ async fn cmd_grpc_go<R: Runtime>(
&PluginContext::new(&window),
RenderPurpose::Send,
),
&RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
},
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await?;
@@ -813,10 +805,7 @@ async fn cmd_http_response_body<R: Runtime>(
Some(filter) if !filter.is_empty() => {
Ok(plugin_manager.filter_data(&window, filter, &body, content_type).await?)
}
_ => Ok(FilterResponse {
content: body,
error: None,
}),
_ => Ok(FilterResponse { content: body, error: None }),
}
}
@@ -841,6 +830,17 @@ async fn cmd_get_sse_events(file_path: &str) -> YaakResult<Vec<ServerSentEvent>>
Ok(events)
}
#[tauri::command]
async fn cmd_get_http_response_events<R: Runtime>(
app_handle: AppHandle<R>,
response_id: &str,
) -> YaakResult<Vec<HttpResponseEvent>> {
use yaak_models::models::HttpResponseEventIden;
let events: Vec<HttpResponseEvent> =
app_handle.db().find_many(HttpResponseEventIden::ResponseId, response_id, None)?;
Ok(events)
}
#[tauri::command]
async fn cmd_import_data<R: Runtime>(
window: WebviewWindow<R>,
@@ -1202,11 +1202,7 @@ async fn cmd_install_plugin<R: Runtime>(
plugin_manager.add_plugin_by_dir(&PluginContext::new(&window), &directory).await?;
Ok(app_handle.db().upsert_plugin(
&Plugin {
directory: directory.into(),
url,
..Default::default()
},
&Plugin { directory: directory.into(), url, ..Default::default() },
&UpdateSource::from_window(&window),
)?)
}
@@ -1477,6 +1473,7 @@ pub fn run() {
cmd_get_http_authentication_summaries,
cmd_get_http_authentication_config,
cmd_get_sse_events,
cmd_get_http_response_events,
cmd_get_workspace_meta,
cmd_grpc_go,
cmd_grpc_reflect,
@@ -1527,11 +1524,7 @@ pub fn run() {
let _ = db.cancel_pending_websocket_connections();
});
}
RunEvent::WindowEvent {
event: WindowEvent::Focused(true),
label,
..
} => {
RunEvent::WindowEvent { event: WindowEvent::Focused(true), label, .. } => {
if cfg!(feature = "updater") {
// Run update check whenever the window is focused
let w = app_handle.get_webview_window(&label).unwrap();
@@ -1566,10 +1559,7 @@ pub fn run() {
}
});
}
RunEvent::WindowEvent {
event: WindowEvent::CloseRequested { .. },
..
} => {
RunEvent::WindowEvent { event: WindowEvent::CloseRequested { .. }, .. } => {
if let Err(e) = app_handle.save_window_state(StateFlags::all()) {
warn!("Failed to save window state {e:?}");
} else {

View File

@@ -78,9 +78,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
environment_id.as_deref(),
)?;
let cb = PluginTemplateCallback::new(app_handle, &plugin_context, req.purpose);
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let grpc_request =
render_grpc_request(&req.grpc_request, environment_chain, &cb, &opt).await?;
Ok(Some(InternalEventPayload::RenderGrpcRequestResponse(RenderGrpcRequestResponse {
@@ -99,9 +97,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
environment_id.as_deref(),
)?;
let cb = PluginTemplateCallback::new(app_handle, &plugin_context, req.purpose);
let opt = &RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let http_request =
render_http_request(&req.http_request, environment_chain, &cb, &opt).await?;
Ok(Some(InternalEventPayload::RenderHttpRequestResponse(RenderHttpRequestResponse {
@@ -130,9 +126,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
environment_id.as_deref(),
)?;
let cb = PluginTemplateCallback::new(app_handle, &plugin_context, req.purpose);
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let data = render_json_value(req.data, environment_chain, &cb, &opt).await?;
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))
}

View File

@@ -80,12 +80,7 @@ pub async fn render_grpc_request<T: TemplateCallback>(
let url = parse_and_render(r.url.as_str(), vars, cb, &opt).await?;
Ok(GrpcRequest {
url,
metadata,
authentication,
..r.to_owned()
})
Ok(GrpcRequest { url, metadata, authentication, ..r.to_owned() })
}
pub async fn render_http_request<T: TemplateCallback>(
@@ -162,14 +157,7 @@ pub async fn render_http_request<T: TemplateCallback>(
let url = parse_and_render(r.url.clone().as_str(), vars, cb, &opt).await?;
// This doesn't fit perfectly with the concept of "rendering" but it kind of does
let (url, url_parameters) = apply_path_placeholders(&url, url_parameters);
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
Ok(HttpRequest {
url,
url_parameters,
headers,
body,
authentication,
..r.to_owned()
})
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
}

View File

@@ -259,17 +259,11 @@ async fn start_integrated_update<R: Runtime>(
self.win.unlisten(self.id);
}
}
let _guard = Unlisten {
win: window,
id: event_id,
};
let _guard = Unlisten { win: window, id: event_id };
// 2) Emit the event now that listener is in place
let info = UpdateInfo {
version: update.version.to_string(),
downloaded,
reply_event_id: reply_id,
};
let info =
UpdateInfo { version: update.version.to_string(), downloaded, reply_event_id: reply_id };
window
.emit_to(window.label(), "update_available", &info)
.map_err(|e| GenericError(format!("Failed to emit update_available: {e}")))?;

View File

@@ -3,7 +3,8 @@ use crate::window_menu::app_menu;
use log::{info, warn};
use rand::random;
use tauri::{
AppHandle, Emitter, LogicalSize, Manager, PhysicalSize, Runtime, WebviewUrl, WebviewWindow, WindowEvent
AppHandle, Emitter, LogicalSize, Manager, PhysicalSize, Runtime, WebviewUrl, WebviewWindow,
WindowEvent,
};
use tauri_plugin_opener::OpenerExt;
use tokio::sync::mpsc;

View File

@@ -30,7 +30,8 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
],
)?;
#[cfg(target_os = "macos")] {
#[cfg(target_os = "macos")]
{
window_menu.set_as_windows_menu_for_nsapp()?;
}
@@ -48,7 +49,8 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
],
)?;
#[cfg(target_os = "macos")] {
#[cfg(target_os = "macos")]
{
help_menu.set_as_windows_menu_for_nsapp()?;
}
@@ -151,8 +153,11 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
.build(app_handle)?,
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
.build(app_handle)?,
&MenuItemBuilder::with_id("dev.reset_size_record".to_string(), "Reset Size 16x9")
.build(app_handle)?,
&MenuItemBuilder::with_id(
"dev.reset_size_record".to_string(),
"Reset Size 16x9",
)
.build(app_handle)?,
&MenuItemBuilder::with_id(
"dev.generate_theme_css".to_string(),
"Generate Theme CSS",

View File

@@ -10,3 +10,4 @@ reqwest = { workspace = true, features = ["system-proxy", "gzip"] }
thiserror = { workspace = true }
regex = "1.11.0"
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }

View File

@@ -1,4 +1,5 @@
pub mod window;
pub mod platform;
pub mod api_client;
pub mod error;
pub mod platform;
pub mod serde;
pub mod window;

View File

@@ -0,0 +1,23 @@
use serde_json::Value;
use std::collections::BTreeMap;
pub fn get_bool(v: &Value, key: &str, fallback: bool) -> bool {
match v.get(key) {
None => fallback,
Some(v) => v.as_bool().unwrap_or(fallback),
}
}
pub fn get_str<'a>(v: &'a Value, key: &str) -> &'a str {
match v.get(key) {
None => "",
Some(v) => v.as_str().unwrap_or_default(),
}
}
pub fn get_str_map<'a>(v: &'a BTreeMap<String, Value>, key: &str) -> &'a str {
match v.get(key) {
None => "",
Some(v) => v.as_str().unwrap_or_default(),
}
}

View File

@@ -96,18 +96,12 @@ impl EncryptionManager {
let workspace = tx.get_workspace(workspace_id)?;
let workspace_meta = tx.get_or_create_workspace_meta(workspace_id)?;
tx.upsert_workspace(
&Workspace {
encryption_key_challenge,
..workspace
},
&Workspace { encryption_key_challenge, ..workspace },
&UpdateSource::Background,
)?;
Ok(tx.upsert_workspace_meta(
&WorkspaceMeta {
encryption_key: Some(encrypted_key.clone()),
..workspace_meta
},
&WorkspaceMeta { encryption_key: Some(encrypted_key.clone()), ..workspace_meta },
&UpdateSource::Background,
)?)
})?;

View File

@@ -39,9 +39,7 @@ impl WorkspaceKey {
}
pub(crate) fn from_raw_key(key: &[u8]) -> Self {
Self {
key: Key::<XChaCha20Poly1305>::clone_from_slice(key),
}
Self { key: Key::<XChaCha20Poly1305>::clone_from_slice(key) }
}
pub(crate) fn raw_key(&self) -> &[u8] {

View File

@@ -34,8 +34,5 @@ pub(crate) async fn list() -> Result<Fonts> {
ui_fonts.sort();
editor_fonts.sort();
Ok(Fonts {
ui_fonts,
editor_fonts,
})
Ok(Fonts { ui_fonts, editor_fonts })
}

View File

@@ -1,10 +1,10 @@
use crate::error::Result;
use std::path::Path;
use std::process::{Command, Stdio};
use crate::error::Result;
use crate::error::Error::GitNotFound;
#[cfg(target_os = "windows")]
use std::os::windows::process::CommandExt;
use crate::error::Error::GitNotFound;
#[cfg(target_os = "windows")]
const CREATE_NO_WINDOW: u32 = 0x0800_0000;

View File

@@ -1,4 +1,7 @@
use crate::commands::{add, add_credential, add_remote, branch, checkout, commit, delete_branch, fetch_all, initialize, log, merge_branch, pull, push, remotes, rm_remote, status, unstage};
use crate::commands::{
add, add_credential, add_remote, branch, checkout, commit, delete_branch, fetch_all,
initialize, log, merge_branch, pull, push, remotes, rm_remote, status, unstage,
};
use tauri::{
Runtime, generate_handler,
plugin::{Builder, TauriPlugin},
@@ -10,6 +13,7 @@ mod branch;
mod commands;
mod commit;
mod credential;
pub mod error;
mod fetch;
mod init;
mod log;
@@ -21,7 +25,6 @@ mod repository;
mod status;
mod unstage;
mod util;
pub mod error;
pub fn init<R: Runtime>() -> TauriPlugin<R> {
Builder::new("yaak-git")

View File

@@ -37,10 +37,7 @@ pub(crate) fn git_pull(dir: &Path) -> Result<PullResult> {
info!("Pulled status={} {combined}", out.status);
if combined.to_lowercase().contains("could not read") {
return Ok(PullResult::NeedsCredentials {
url: remote_url.to_string(),
error: None,
});
return Ok(PullResult::NeedsCredentials { url: remote_url.to_string(), error: None });
}
if combined.to_lowercase().contains("unable to access") {
@@ -58,9 +55,7 @@ pub(crate) fn git_pull(dir: &Path) -> Result<PullResult> {
return Ok(PullResult::UpToDate);
}
Ok(PullResult::Success {
message: format!("Pulled from {}/{}", remote_name, branch_name),
})
Ok(PullResult::Success { message: format!("Pulled from {}/{}", remote_name, branch_name) })
}
// pub(crate) fn git_pull_old(dir: &Path) -> Result<PullResult> {

View File

@@ -37,10 +37,7 @@ pub(crate) fn git_push(dir: &Path) -> Result<PushResult> {
info!("Pushed to repo status={} {combined}", out.status);
if combined.to_lowercase().contains("could not read") {
return Ok(PushResult::NeedsCredentials {
url: remote_url.to_string(),
error: None,
});
return Ok(PushResult::NeedsCredentials { url: remote_url.to_string(), error: None });
}
if combined.to_lowercase().contains("unable to access") {
@@ -58,7 +55,5 @@ pub(crate) fn git_push(dir: &Path) -> Result<PushResult> {
return Err(GenericError(format!("Failed to push {combined}")));
}
Ok(PushResult::Success {
message: format!("Pushed to {}/{}", remote_name, branch_name),
})
Ok(PushResult::Success { message: format!("Pushed to {}/{}", remote_name, branch_name) })
}

View File

@@ -28,10 +28,7 @@ pub(crate) fn git_remotes(dir: &Path) -> Result<Vec<GitRemote>> {
continue;
}
};
remotes.push(GitRemote {
name: name.to_string(),
url: r.url().map(|u| u.to_string()),
});
remotes.push(GitRemote { name: name.to_string(), url: r.url().map(|u| u.to_string()) });
}
Ok(remotes)
@@ -40,10 +37,7 @@ pub(crate) fn git_remotes(dir: &Path) -> Result<Vec<GitRemote>> {
pub(crate) fn git_add_remote(dir: &Path, name: &str, url: &str) -> Result<GitRemote> {
let repo = open_repo(dir)?;
repo.remote(name, url)?;
Ok(GitRemote {
name: name.to_string(),
url: Some(url.to_string()),
})
Ok(GitRemote { name: name.to_string(), url: Some(url.to_string()) })
}
pub(crate) fn git_rm_remote(dir: &Path, name: &str) -> Result<()> {

View File

@@ -1,5 +1,5 @@
use std::path::Path;
use crate::error::Error::{GitRepoNotFound, GitUnknown};
use std::path::Path;
pub(crate) fn open_repo(dir: &Path) -> crate::error::Result<git2::Repository> {
match git2::Repository::discover(dir) {
@@ -8,4 +8,3 @@ pub(crate) fn open_repo(dir: &Path) -> crate::error::Result<git2::Repository> {
Err(e) => Err(GitUnknown(e)),
}
}

View File

@@ -1,6 +1,6 @@
use std::path::Path;
use log::info;
use crate::repository::open_repo;
use log::info;
use std::path::Path;
pub(crate) fn git_unstage(dir: &Path, rela_path: &Path) -> crate::error::Result<()> {
let repo = open_repo(dir)?;
@@ -25,4 +25,3 @@ pub(crate) fn git_unstage(dir: &Path, rela_path: &Path) -> crate::error::Result<
Ok(())
}

View File

@@ -42,11 +42,7 @@ impl AutoReflectionClient {
get_transport(validate_certificates, client_cert.clone())?,
uri.clone(),
);
Ok(AutoReflectionClient {
use_v1alpha: false,
client_v1,
client_v1alpha,
})
Ok(AutoReflectionClient { use_v1alpha: false, client_v1, client_v1alpha })
}
#[async_recursion]
@@ -140,9 +136,7 @@ fn to_v1_msg_response(
service: v
.service
.iter()
.map(|s| ServiceResponse {
name: s.name.clone(),
})
.map(|s| ServiceResponse { name: s.name.clone() })
.collect(),
})
}
@@ -176,10 +170,7 @@ fn to_v1alpha_msg_request(
extension_number,
containing_type,
}) => v1alpha::server_reflection_request::MessageRequest::FileContainingExtension(
v1alpha::ExtensionRequest {
extension_number,
containing_type,
},
v1alpha::ExtensionRequest { extension_number, containing_type },
),
MessageRequest::AllExtensionNumbersOfType(v) => {
v1alpha::server_reflection_request::MessageRequest::AllExtensionNumbersOfType(v)

View File

@@ -1,7 +1,7 @@
use prost_reflect::prost::Message;
use prost_reflect::{DynamicMessage, MethodDescriptor};
use tonic::codec::{Codec, DecodeBuf, Decoder, EncodeBuf, Encoder};
use tonic::Status;
use tonic::codec::{Codec, DecodeBuf, Decoder, EncodeBuf, Encoder};
#[derive(Clone)]
pub struct DynamicCodec(MethodDescriptor);

View File

@@ -1,8 +1,8 @@
use crate::manager::GrpcStreamError;
use prost::DecodeError;
use serde::{Serialize, Serializer};
use serde_json::Error as SerdeJsonError;
use std::io;
use prost::DecodeError;
use thiserror::Error;
use tonic::Status;

View File

@@ -11,9 +11,7 @@ struct JsonSchemaGenerator {
impl JsonSchemaGenerator {
pub fn new() -> Self {
JsonSchemaGenerator {
msg_mapping: HashMap::new(),
}
JsonSchemaGenerator { msg_mapping: HashMap::new() }
}
pub fn generate_json_schema(msg: MessageDescriptor) -> JsonSchemaEntry {
@@ -297,16 +295,10 @@ impl JsonSchemaEntry {
impl JsonSchemaEntry {
pub fn object() -> Self {
JsonSchemaEntry {
type_: Some(JsonType::Object),
..Default::default()
}
JsonSchemaEntry { type_: Some(JsonType::Object), ..Default::default() }
}
pub fn boolean() -> Self {
JsonSchemaEntry {
type_: Some(JsonType::Boolean),
..Default::default()
}
JsonSchemaEntry { type_: Some(JsonType::Boolean), ..Default::default() }
}
pub fn number<S: Into<String>>(format: S) -> Self {
JsonSchemaEntry {
@@ -316,10 +308,7 @@ impl JsonSchemaEntry {
}
}
pub fn string() -> Self {
JsonSchemaEntry {
type_: Some(JsonType::String),
..Default::default()
}
JsonSchemaEntry { type_: Some(JsonType::String), ..Default::default() }
}
pub fn string_with_format<S: Into<String>>(format: S) -> Self {
@@ -330,16 +319,10 @@ impl JsonSchemaEntry {
}
}
pub fn reference<S: AsRef<str>>(ref_: S) -> Self {
JsonSchemaEntry {
ref_: Some(format!("#/$defs/{}", ref_.as_ref())),
..Default::default()
}
JsonSchemaEntry { ref_: Some(format!("#/$defs/{}", ref_.as_ref())), ..Default::default() }
}
pub fn root_reference() -> Self{
JsonSchemaEntry {
ref_: Some("#".to_string()),
..Default::default()
}
pub fn root_reference() -> Self {
JsonSchemaEntry { ref_: Some("#".to_string()), ..Default::default() }
}
pub fn array(item: JsonSchemaEntry) -> Self {
JsonSchemaEntry {
@@ -349,11 +332,7 @@ impl JsonSchemaEntry {
}
}
pub fn enums(enums: Vec<String>) -> Self {
JsonSchemaEntry {
type_: Some(JsonType::String),
enum_: Some(enums),
..Default::default()
}
JsonSchemaEntry { type_: Some(JsonType::String), enum_: Some(enums), ..Default::default() }
}
pub fn map(value_type: JsonSchemaEntry) -> Self {
@@ -365,10 +344,7 @@ impl JsonSchemaEntry {
}
pub fn null() -> Self {
JsonSchemaEntry {
type_: Some(JsonType::Null),
..Default::default()
}
JsonSchemaEntry { type_: Some(JsonType::Null), ..Default::default() }
}
}

View File

@@ -2,17 +2,17 @@ use prost_reflect::{DynamicMessage, MethodDescriptor, SerializeOptions};
use serde::{Deserialize, Serialize};
use serde_json::Deserializer;
mod any;
mod client;
mod codec;
pub mod error;
mod json_schema;
pub mod manager;
mod reflection;
mod transport;
mod any;
pub mod error;
pub use tonic::metadata::*;
pub use tonic::Code;
pub use tonic::metadata::*;
pub fn serialize_options() -> SerializeOptions {
SerializeOptions::new().skip_default_fields(false)

View File

@@ -57,19 +57,13 @@ impl Display for GrpcStreamError {
impl From<String> for GrpcStreamError {
fn from(value: String) -> Self {
GrpcStreamError {
message: value.to_string(),
status: None,
}
GrpcStreamError { message: value.to_string(), status: None }
}
}
impl From<Status> for GrpcStreamError {
fn from(s: Status) -> Self {
GrpcStreamError {
message: s.message().to_string(),
status: Some(s),
}
GrpcStreamError { message: s.message().to_string(), status: Some(s) }
}
}
@@ -227,10 +221,10 @@ impl GrpcConnection {
decorate_req(metadata, &mut req)?;
client.ready().await.map_err(|e| GenericError(format!("Failed to connect: {}", e)))?;
Ok(client.client_streaming(req, path, codec).await.map_err(|e| GrpcStreamError {
message: e.message().to_string(),
status: Some(e),
})?)
Ok(client
.client_streaming(req, path, codec)
.await
.map_err(|e| GrpcStreamError { message: e.message().to_string(), status: Some(e) })?)
}
pub async fn server_streaming(
@@ -267,10 +261,7 @@ pub struct GrpcHandle {
impl GrpcHandle {
pub fn new(app_handle: &AppHandle) -> Self {
let pools = BTreeMap::new();
Self {
pools,
app_handle: app_handle.clone(),
}
Self { pools, app_handle: app_handle.clone() }
}
}
@@ -335,10 +326,8 @@ impl GrpcHandle {
fn services_from_pool(&self, pool: &DescriptorPool) -> Vec<ServiceDefinition> {
pool.services()
.map(|s| {
let mut def = ServiceDefinition {
name: s.full_name().to_string(),
methods: vec![],
};
let mut def =
ServiceDefinition { name: s.full_name().to_string(), methods: vec![] };
for method in s.methods() {
let input_message = method.input();
def.methods.push(MethodDefinition {
@@ -384,12 +373,7 @@ impl GrpcHandle {
.clone();
let uri = uri_from_str(uri)?;
let conn = get_transport(validate_certificates, client_cert.clone())?;
Ok(GrpcConnection {
pool: Arc::new(RwLock::new(pool)),
use_reflection,
conn,
uri,
})
Ok(GrpcConnection { pool: Arc::new(RwLock::new(pool)), use_reflection, conn, uri })
}
fn get_pool(&self, id: &str, uri: &str, proto_files: &Vec<PathBuf>) -> Option<&DescriptorPool> {

View File

@@ -327,10 +327,7 @@ mod topology {
T: Eq + std::hash::Hash + Clone,
{
pub fn new() -> Self {
SimpleTopoSort {
out_graph: HashMap::new(),
in_graph: HashMap::new(),
}
SimpleTopoSort { out_graph: HashMap::new(), in_graph: HashMap::new() }
}
pub fn insert<I: IntoIterator<Item = T>>(&mut self, node: T, deps: I) {
@@ -376,10 +373,7 @@ mod topology {
}
}
SimpleTopoSortIter {
data,
zero_indegree,
}
SimpleTopoSortIter { data, zero_indegree }
}
}

View File

@@ -1,11 +1,11 @@
use crate::error::Result;
use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder};
use hyper_util::client::legacy::connect::HttpConnector;
use hyper_util::client::legacy::Client;
use hyper_util::client::legacy::connect::HttpConnector;
use hyper_util::rt::TokioExecutor;
use log::info;
use tonic::body::BoxBody;
use yaak_tls::{get_tls_config, ClientCertificateConfig};
use yaak_tls::{ClientCertificateConfig, get_tls_config};
// I think ALPN breaks this because we're specifying http2_only
const WITH_ALPN: bool = false;
@@ -14,8 +14,7 @@ pub(crate) fn get_transport(
validate_certificates: bool,
client_cert: Option<ClientCertificateConfig>,
) -> Result<Client<HttpsConnector<HttpConnector>, BoxBody>> {
let tls_config =
get_tls_config(validate_certificates, WITH_ALPN, client_cert.clone())?;
let tls_config = get_tls_config(validate_certificates, WITH_ALPN, client_cert.clone())?;
let mut http = HttpConnector::new();
http.enforce_http(false);

View File

@@ -5,16 +5,27 @@ edition = "2024"
publish = false
[dependencies]
async-compression = { version = "0.4", features = ["tokio", "gzip", "deflate", "brotli", "zstd"] }
async-trait = "0.1"
brotli = "7"
bytes = "1.5.0"
flate2 = "1"
futures-util = "0.3"
zstd = "0.13"
hyper-util = { version = "0.1.17", default-features = false, features = ["client-legacy"] }
log = { workspace = true }
mime_guess = "2.0.5"
regex = "1.11.1"
reqwest = { workspace = true, features = ["multipart", "cookies", "gzip", "brotli", "deflate", "json", "rustls-tls-manual-roots-no-provider", "socks", "http2"] }
reqwest = { workspace = true, features = ["cookies", "rustls-tls-manual-roots-no-provider", "socks", "http2", "stream"] }
reqwest_cookie_store = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
tauri = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tokio = { workspace = true, features = ["macros", "rt", "fs", "io-util"] }
tokio-util = { version = "0.7", features = ["codec", "io", "io-util"] }
tower-service = "0.3.3"
urlencoding = "2.1.3"
yaak-common = { workspace = true }
yaak-models = { workspace = true }
yaak-tls = { workspace = true }

View File

@@ -0,0 +1,78 @@
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::io::{AsyncRead, ReadBuf};
/// A stream that chains multiple AsyncRead sources together
pub(crate) struct ChainedReader {
readers: Vec<ReaderType>,
current_index: usize,
current_reader: Option<Box<dyn AsyncRead + Send + Unpin + 'static>>,
}
#[derive(Clone)]
pub(crate) enum ReaderType {
Bytes(Vec<u8>),
FilePath(String),
}
impl ChainedReader {
pub(crate) fn new(readers: Vec<ReaderType>) -> Self {
Self { readers, current_index: 0, current_reader: None }
}
}
impl AsyncRead for ChainedReader {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<io::Result<()>> {
loop {
// Try to read from current reader if we have one
if let Some(ref mut reader) = self.current_reader {
let before_len = buf.filled().len();
return match Pin::new(reader).poll_read(cx, buf) {
Poll::Ready(Ok(())) => {
if buf.filled().len() == before_len && buf.remaining() > 0 {
// Current reader is exhausted, move to next
self.current_reader = None;
continue;
}
Poll::Ready(Ok(()))
}
Poll::Ready(Err(e)) => Poll::Ready(Err(e)),
Poll::Pending => Poll::Pending,
};
}
// We need to get the next reader
if self.current_index >= self.readers.len() {
// No more readers
return Poll::Ready(Ok(()));
}
// Get the next reader
let reader_type = self.readers[self.current_index].clone();
self.current_index += 1;
match reader_type {
ReaderType::Bytes(bytes) => {
self.current_reader = Some(Box::new(io::Cursor::new(bytes)));
}
ReaderType::FilePath(path) => {
// We need to handle file opening synchronously in poll_read
// This is a limitation - we'll use blocking file open
match std::fs::File::open(&path) {
Ok(file) => {
// Convert std File to tokio File
let tokio_file = tokio::fs::File::from_std(file);
self.current_reader = Some(Box::new(tokio_file));
}
Err(e) => return Poll::Ready(Err(e)),
}
}
}
}
}
}

View File

@@ -1,11 +1,9 @@
use crate::dns::LocalhostResolver;
use crate::error::Result;
use log::{debug, info, warn};
use reqwest::redirect::Policy;
use reqwest::{Client, Proxy};
use reqwest::{Client, Proxy, redirect};
use reqwest_cookie_store::CookieStoreMutex;
use std::sync::Arc;
use std::time::Duration;
use yaak_tls::{ClientCertificateConfig, get_tls_config};
#[derive(Clone)]
@@ -29,11 +27,9 @@ pub enum HttpConnectionProxySetting {
#[derive(Clone)]
pub struct HttpConnectionOptions {
pub id: String,
pub follow_redirects: bool,
pub validate_certificates: bool,
pub proxy: HttpConnectionProxySetting,
pub cookie_provider: Option<Arc<CookieStoreMutex>>,
pub timeout: Option<Duration>,
pub client_certificate: Option<ClientCertificateConfig>,
}
@@ -41,9 +37,11 @@ impl HttpConnectionOptions {
pub(crate) fn build_client(&self) -> Result<Client> {
let mut client = Client::builder()
.connection_verbose(true)
.gzip(true)
.brotli(true)
.deflate(true)
.redirect(redirect::Policy::none())
// Decompression is handled by HttpTransaction, not reqwest
.no_gzip()
.no_brotli()
.no_deflate()
.referer(false)
.tls_info(true);
@@ -55,12 +53,6 @@ impl HttpConnectionOptions {
// Configure DNS resolver
client = client.dns_resolver(LocalhostResolver::new());
// Configure redirects
client = client.redirect(match self.follow_redirects {
true => Policy::limited(10), // TODO: Handle redirects natively
false => Policy::none(),
});
// Configure cookie provider
if let Some(p) = &self.cookie_provider {
client = client.cookie_provider(Arc::clone(&p));
@@ -72,23 +64,13 @@ impl HttpConnectionOptions {
HttpConnectionProxySetting::Disabled => {
client = client.no_proxy();
}
HttpConnectionProxySetting::Enabled {
http,
https,
auth,
bypass,
} => {
HttpConnectionProxySetting::Enabled { http, https, auth, bypass } => {
for p in build_enabled_proxy(http, https, auth, bypass) {
client = client.proxy(p)
}
}
}
// Configure timeout
if let Some(d) = self.timeout {
client = client.timeout(d);
}
info!(
"Building new HTTP client validate_certificates={} client_cert={}",
self.validate_certificates,

View File

@@ -0,0 +1,188 @@
use crate::error::{Error, Result};
use async_compression::tokio::bufread::{
BrotliDecoder, DeflateDecoder as AsyncDeflateDecoder, GzipDecoder,
ZstdDecoder as AsyncZstdDecoder,
};
use flate2::read::{DeflateDecoder, GzDecoder};
use std::io::Read;
use tokio::io::{AsyncBufRead, AsyncRead};
/// Supported compression encodings
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ContentEncoding {
Gzip,
Deflate,
Brotli,
Zstd,
Identity,
}
impl ContentEncoding {
/// Parse a Content-Encoding header value into an encoding type.
/// Returns Identity for unknown or missing encodings.
pub fn from_header(value: Option<&str>) -> Self {
match value.map(|s| s.trim().to_lowercase()).as_deref() {
Some("gzip") | Some("x-gzip") => ContentEncoding::Gzip,
Some("deflate") => ContentEncoding::Deflate,
Some("br") => ContentEncoding::Brotli,
Some("zstd") => ContentEncoding::Zstd,
_ => ContentEncoding::Identity,
}
}
}
/// Result of decompression, containing both the decompressed data and size info
#[derive(Debug)]
pub struct DecompressResult {
pub data: Vec<u8>,
pub compressed_size: u64,
pub decompressed_size: u64,
}
/// Decompress data based on the Content-Encoding.
/// Returns the original data unchanged if encoding is Identity or unknown.
pub fn decompress(data: Vec<u8>, encoding: ContentEncoding) -> Result<DecompressResult> {
let compressed_size = data.len() as u64;
let decompressed = match encoding {
ContentEncoding::Identity => data,
ContentEncoding::Gzip => decompress_gzip(&data)?,
ContentEncoding::Deflate => decompress_deflate(&data)?,
ContentEncoding::Brotli => decompress_brotli(&data)?,
ContentEncoding::Zstd => decompress_zstd(&data)?,
};
let decompressed_size = decompressed.len() as u64;
Ok(DecompressResult { data: decompressed, compressed_size, decompressed_size })
}
fn decompress_gzip(data: &[u8]) -> Result<Vec<u8>> {
let mut decoder = GzDecoder::new(data);
let mut decompressed = Vec::new();
decoder
.read_to_end(&mut decompressed)
.map_err(|e| Error::DecompressionError(format!("gzip decompression failed: {}", e)))?;
Ok(decompressed)
}
fn decompress_deflate(data: &[u8]) -> Result<Vec<u8>> {
let mut decoder = DeflateDecoder::new(data);
let mut decompressed = Vec::new();
decoder
.read_to_end(&mut decompressed)
.map_err(|e| Error::DecompressionError(format!("deflate decompression failed: {}", e)))?;
Ok(decompressed)
}
fn decompress_brotli(data: &[u8]) -> Result<Vec<u8>> {
let mut decompressed = Vec::new();
brotli::BrotliDecompress(&mut std::io::Cursor::new(data), &mut decompressed)
.map_err(|e| Error::DecompressionError(format!("brotli decompression failed: {}", e)))?;
Ok(decompressed)
}
fn decompress_zstd(data: &[u8]) -> Result<Vec<u8>> {
zstd::stream::decode_all(std::io::Cursor::new(data))
.map_err(|e| Error::DecompressionError(format!("zstd decompression failed: {}", e)))
}
/// Create a streaming decompressor that wraps an async reader.
/// Returns an AsyncRead that decompresses data on-the-fly.
pub fn streaming_decoder<R: AsyncBufRead + Unpin + Send + 'static>(
reader: R,
encoding: ContentEncoding,
) -> Box<dyn AsyncRead + Unpin + Send> {
match encoding {
ContentEncoding::Identity => Box::new(reader),
ContentEncoding::Gzip => Box::new(GzipDecoder::new(reader)),
ContentEncoding::Deflate => Box::new(AsyncDeflateDecoder::new(reader)),
ContentEncoding::Brotli => Box::new(BrotliDecoder::new(reader)),
ContentEncoding::Zstd => Box::new(AsyncZstdDecoder::new(reader)),
}
}
#[cfg(test)]
mod tests {
use super::*;
use flate2::Compression;
use flate2::write::GzEncoder;
use std::io::Write;
#[test]
fn test_content_encoding_from_header() {
assert_eq!(ContentEncoding::from_header(Some("gzip")), ContentEncoding::Gzip);
assert_eq!(ContentEncoding::from_header(Some("x-gzip")), ContentEncoding::Gzip);
assert_eq!(ContentEncoding::from_header(Some("GZIP")), ContentEncoding::Gzip);
assert_eq!(ContentEncoding::from_header(Some("deflate")), ContentEncoding::Deflate);
assert_eq!(ContentEncoding::from_header(Some("br")), ContentEncoding::Brotli);
assert_eq!(ContentEncoding::from_header(Some("zstd")), ContentEncoding::Zstd);
assert_eq!(ContentEncoding::from_header(Some("identity")), ContentEncoding::Identity);
assert_eq!(ContentEncoding::from_header(Some("unknown")), ContentEncoding::Identity);
assert_eq!(ContentEncoding::from_header(None), ContentEncoding::Identity);
}
#[test]
fn test_decompress_identity() {
let data = b"hello world".to_vec();
let result = decompress(data.clone(), ContentEncoding::Identity).unwrap();
assert_eq!(result.data, data);
assert_eq!(result.compressed_size, 11);
assert_eq!(result.decompressed_size, 11);
}
#[test]
fn test_decompress_gzip() {
// Compress some data with gzip
let original = b"hello world, this is a test of gzip compression";
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
encoder.write_all(original).unwrap();
let compressed = encoder.finish().unwrap();
let result = decompress(compressed.clone(), ContentEncoding::Gzip).unwrap();
assert_eq!(result.data, original);
assert_eq!(result.compressed_size, compressed.len() as u64);
assert_eq!(result.decompressed_size, original.len() as u64);
}
#[test]
fn test_decompress_deflate() {
// Compress some data with deflate
let original = b"hello world, this is a test of deflate compression";
let mut encoder = flate2::write::DeflateEncoder::new(Vec::new(), Compression::default());
encoder.write_all(original).unwrap();
let compressed = encoder.finish().unwrap();
let result = decompress(compressed.clone(), ContentEncoding::Deflate).unwrap();
assert_eq!(result.data, original);
assert_eq!(result.compressed_size, compressed.len() as u64);
assert_eq!(result.decompressed_size, original.len() as u64);
}
#[test]
fn test_decompress_brotli() {
// Compress some data with brotli
let original = b"hello world, this is a test of brotli compression";
let mut compressed = Vec::new();
let mut writer = brotli::CompressorWriter::new(&mut compressed, 4096, 4, 22);
writer.write_all(original).unwrap();
drop(writer);
let result = decompress(compressed.clone(), ContentEncoding::Brotli).unwrap();
assert_eq!(result.data, original);
assert_eq!(result.compressed_size, compressed.len() as u64);
assert_eq!(result.decompressed_size, original.len() as u64);
}
#[test]
fn test_decompress_zstd() {
// Compress some data with zstd
let original = b"hello world, this is a test of zstd compression";
let compressed = zstd::stream::encode_all(std::io::Cursor::new(original), 3).unwrap();
let result = decompress(compressed.clone(), ContentEncoding::Zstd).unwrap();
assert_eq!(result.data, original);
assert_eq!(result.compressed_size, compressed.len() as u64);
assert_eq!(result.decompressed_size, original.len() as u64);
}
}

View File

@@ -8,6 +8,21 @@ pub enum Error {
#[error(transparent)]
TlsError(#[from] yaak_tls::error::Error),
#[error("Request failed with {0:?}")]
RequestError(String),
#[error("Request canceled")]
RequestCanceledError,
#[error("Timeout of {0:?} reached")]
RequestTimeout(std::time::Duration),
#[error("Decompression error: {0}")]
DecompressionError(String),
#[error("Failed to read response body: {0}")]
BodyReadError(String),
}
impl Serialize for Error {

View File

@@ -2,11 +2,17 @@ use crate::manager::HttpConnectionManager;
use tauri::plugin::{Builder, TauriPlugin};
use tauri::{Manager, Runtime};
mod chained_reader;
pub mod client;
pub mod decompress;
pub mod dns;
pub mod error;
pub mod manager;
pub mod path_placeholders;
mod proto;
pub mod sender;
pub mod transaction;
pub mod types;
pub fn init<R: Runtime>() -> TauriPlugin<R> {
Builder::new("yaak-http")

View File

@@ -2,7 +2,7 @@ use yaak_models::models::HttpUrlParameter;
pub fn apply_path_placeholders(
url: &str,
parameters: Vec<HttpUrlParameter>,
parameters: &Vec<HttpUrlParameter>,
) -> (String, Vec<HttpUrlParameter>) {
let mut new_parameters = Vec::new();
@@ -18,7 +18,7 @@ pub fn apply_path_placeholders(
// Remove as param if it modified the URL
if old_url_string == *url {
new_parameters.push(p);
new_parameters.push(p.to_owned());
}
}
@@ -55,12 +55,8 @@ mod placeholder_tests {
#[test]
fn placeholder_middle() {
let p = HttpUrlParameter {
name: ":foo".into(),
value: "xxx".into(),
enabled: true,
id: None,
};
let p =
HttpUrlParameter { name: ":foo".into(), value: "xxx".into(), enabled: true, id: None };
assert_eq!(
replace_path_placeholder(&p, "https://example.com/:foo/bar"),
"https://example.com/xxx/bar",
@@ -69,12 +65,8 @@ mod placeholder_tests {
#[test]
fn placeholder_end() {
let p = HttpUrlParameter {
name: ":foo".into(),
value: "xxx".into(),
enabled: true,
id: None,
};
let p =
HttpUrlParameter { name: ":foo".into(), value: "xxx".into(), enabled: true, id: None };
assert_eq!(
replace_path_placeholder(&p, "https://example.com/:foo"),
"https://example.com/xxx",
@@ -83,12 +75,8 @@ mod placeholder_tests {
#[test]
fn placeholder_query() {
let p = HttpUrlParameter {
name: ":foo".into(),
value: "xxx".into(),
enabled: true,
id: None,
};
let p =
HttpUrlParameter { name: ":foo".into(), value: "xxx".into(), enabled: true, id: None };
assert_eq!(
replace_path_placeholder(&p, "https://example.com/:foo?:foo"),
"https://example.com/xxx?:foo",
@@ -125,12 +113,8 @@ mod placeholder_tests {
#[test]
fn placeholder_prefix() {
let p = HttpUrlParameter {
name: ":foo".into(),
value: "xxx".into(),
enabled: true,
id: None,
};
let p =
HttpUrlParameter { name: ":foo".into(), value: "xxx".into(), enabled: true, id: None };
assert_eq!(
replace_path_placeholder(&p, "https://example.com/:foooo"),
"https://example.com/:foooo",
@@ -172,7 +156,7 @@ mod placeholder_tests {
..Default::default()
};
let (url, url_parameters) = apply_path_placeholders(&req.url, req.url_parameters);
let (url, url_parameters) = apply_path_placeholders(&req.url, &req.url_parameters);
// Pattern match back to access it
assert_eq!(url, "example.com/aaa/bar");

View File

@@ -0,0 +1,29 @@
use reqwest::Url;
use std::str::FromStr;
pub(crate) fn ensure_proto(url_str: &str) -> String {
if url_str.is_empty() {
return "".to_string();
}
if url_str.starts_with("http://") || url_str.starts_with("https://") {
return url_str.to_string();
}
// Url::from_str will fail without a proto, so add one
let parseable_url = format!("http://{}", url_str);
if let Ok(u) = Url::from_str(parseable_url.as_str()) {
match u.host() {
Some(host) => {
let h = host.to_string();
// These TLDs force HTTPS
if h.ends_with(".app") || h.ends_with(".dev") || h.ends_with(".page") {
return format!("https://{url_str}");
}
}
None => {}
}
}
format!("http://{url_str}")
}

View File

@@ -0,0 +1,482 @@
use crate::decompress::{ContentEncoding, streaming_decoder};
use crate::error::{Error, Result};
use crate::types::{SendableBody, SendableHttpRequest};
use async_trait::async_trait;
use futures_util::StreamExt;
use reqwest::{Client, Method, Version};
use std::collections::HashMap;
use std::fmt::Display;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::time::Duration;
use tokio::io::{AsyncRead, AsyncReadExt, BufReader, ReadBuf};
use tokio::sync::mpsc;
use tokio_util::io::StreamReader;
#[derive(Debug, Clone)]
pub enum RedirectBehavior {
/// 307/308: Method and body are preserved
Preserve,
/// 303 or 301/302 with POST: Method changed to GET, body dropped
DropBody,
}
#[derive(Debug, Clone)]
pub enum HttpResponseEvent {
Setting(String, String),
Info(String),
Redirect {
url: String,
status: u16,
behavior: RedirectBehavior,
},
SendUrl {
method: String,
path: String,
},
ReceiveUrl {
version: Version,
status: String,
},
HeaderUp(String, String),
HeaderDown(String, String),
ChunkSent {
bytes: usize,
},
ChunkReceived {
bytes: usize,
},
}
impl Display for HttpResponseEvent {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
HttpResponseEvent::Setting(name, value) => write!(f, "* Setting {}={}", name, value),
HttpResponseEvent::Info(s) => write!(f, "* {}", s),
HttpResponseEvent::Redirect { url, status, behavior } => {
let behavior_str = match behavior {
RedirectBehavior::Preserve => "preserve",
RedirectBehavior::DropBody => "drop body",
};
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
}
HttpResponseEvent::SendUrl { method, path } => write!(f, "> {} {}", method, path),
HttpResponseEvent::ReceiveUrl { version, status } => {
write!(f, "< {} {}", version_to_str(version), status)
}
HttpResponseEvent::HeaderUp(name, value) => write!(f, "> {}: {}", name, value),
HttpResponseEvent::HeaderDown(name, value) => write!(f, "< {}: {}", name, value),
HttpResponseEvent::ChunkSent { bytes } => write!(f, "> [{} bytes sent]", bytes),
HttpResponseEvent::ChunkReceived { bytes } => write!(f, "< [{} bytes received]", bytes),
}
}
}
impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
fn from(event: HttpResponseEvent) -> Self {
use yaak_models::models::HttpResponseEventData as D;
match event {
HttpResponseEvent::Setting(name, value) => D::Setting { name, value },
HttpResponseEvent::Info(message) => D::Info { message },
HttpResponseEvent::Redirect { url, status, behavior } => D::Redirect {
url,
status,
behavior: match behavior {
RedirectBehavior::Preserve => "preserve".to_string(),
RedirectBehavior::DropBody => "drop_body".to_string(),
},
},
HttpResponseEvent::SendUrl { method, path } => D::SendUrl { method, path },
HttpResponseEvent::ReceiveUrl { version, status } => {
D::ReceiveUrl { version: format!("{:?}", version), status }
}
HttpResponseEvent::HeaderUp(name, value) => D::HeaderUp { name, value },
HttpResponseEvent::HeaderDown(name, value) => D::HeaderDown { name, value },
HttpResponseEvent::ChunkSent { bytes } => D::ChunkSent { bytes },
HttpResponseEvent::ChunkReceived { bytes } => D::ChunkReceived { bytes },
}
}
}
/// Statistics about the body after consumption
#[derive(Debug, Default, Clone)]
pub struct BodyStats {
/// Size of the body as received over the wire (before decompression)
pub size_compressed: u64,
/// Size of the body after decompression
pub size_decompressed: u64,
}
/// An AsyncRead wrapper that sends chunk events as data is read
pub struct TrackingRead<R> {
inner: R,
event_tx: mpsc::UnboundedSender<HttpResponseEvent>,
ended: bool,
}
impl<R> TrackingRead<R> {
pub fn new(inner: R, event_tx: mpsc::UnboundedSender<HttpResponseEvent>) -> Self {
Self { inner, event_tx, ended: false }
}
}
impl<R: AsyncRead + Unpin> AsyncRead for TrackingRead<R> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
let before = buf.filled().len();
let result = Pin::new(&mut self.inner).poll_read(cx, buf);
if let Poll::Ready(Ok(())) = &result {
let bytes_read = buf.filled().len() - before;
if bytes_read > 0 {
// Ignore send errors - receiver may have been dropped
let _ = self.event_tx.send(HttpResponseEvent::ChunkReceived { bytes: bytes_read });
} else if !self.ended {
self.ended = true;
}
}
result
}
}
/// Type alias for the body stream
type BodyStream = Pin<Box<dyn AsyncRead + Send>>;
/// HTTP response with deferred body consumption.
/// Headers are available immediately after send(), body can be consumed in different ways.
/// Note: Debug is manually implemented since BodyStream doesn't implement Debug.
pub struct HttpResponse {
/// HTTP status code
pub status: u16,
/// HTTP status reason phrase (e.g., "OK", "Not Found")
pub status_reason: Option<String>,
/// Response headers
pub headers: HashMap<String, String>,
/// Request headers
pub request_headers: HashMap<String, String>,
/// Content-Length from headers (may differ from actual body size)
pub content_length: Option<u64>,
/// Final URL (after redirects)
pub url: String,
/// Remote address of the server
pub remote_addr: Option<String>,
/// HTTP version (e.g., "HTTP/1.1", "HTTP/2")
pub version: Option<String>,
/// The body stream (consumed when calling bytes(), text(), write_to_file(), or drain())
body_stream: Option<BodyStream>,
/// Content-Encoding for decompression
encoding: ContentEncoding,
}
impl std::fmt::Debug for HttpResponse {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("HttpResponse")
.field("status", &self.status)
.field("status_reason", &self.status_reason)
.field("headers", &self.headers)
.field("content_length", &self.content_length)
.field("url", &self.url)
.field("remote_addr", &self.remote_addr)
.field("version", &self.version)
.field("body_stream", &"<stream>")
.field("encoding", &self.encoding)
.finish()
}
}
impl HttpResponse {
/// Create a new HttpResponse with an unconsumed body stream
#[allow(clippy::too_many_arguments)]
pub fn new(
status: u16,
status_reason: Option<String>,
headers: HashMap<String, String>,
request_headers: HashMap<String, String>,
content_length: Option<u64>,
url: String,
remote_addr: Option<String>,
version: Option<String>,
body_stream: BodyStream,
encoding: ContentEncoding,
) -> Self {
Self {
status,
status_reason,
headers,
request_headers,
content_length,
url,
remote_addr,
version,
body_stream: Some(body_stream),
encoding,
}
}
/// Consume the body and return it as bytes (loads entire body into memory).
/// Also decompresses the body if Content-Encoding is set.
pub async fn bytes(mut self) -> Result<(Vec<u8>, BodyStats)> {
let stream = self.body_stream.take().ok_or_else(|| {
Error::RequestError("Response body has already been consumed".to_string())
})?;
let buf_reader = BufReader::new(stream);
let mut decoder = streaming_decoder(buf_reader, self.encoding);
let mut decompressed = Vec::new();
let mut bytes_read = 0u64;
// Read through the decoder in chunks to track compressed size
let mut buf = [0u8; 8192];
loop {
match decoder.read(&mut buf).await {
Ok(0) => break,
Ok(n) => {
decompressed.extend_from_slice(&buf[..n]);
bytes_read += n as u64;
}
Err(e) => {
return Err(Error::BodyReadError(e.to_string()));
}
}
}
let stats = BodyStats {
// For now, we can't easily track compressed size when streaming through decoder
// Use content_length as an approximation, or decompressed size if identity encoding
size_compressed: self.content_length.unwrap_or(bytes_read),
size_decompressed: decompressed.len() as u64,
};
Ok((decompressed, stats))
}
/// Consume the body and return it as a UTF-8 string.
pub async fn text(self) -> Result<(String, BodyStats)> {
let (bytes, stats) = self.bytes().await?;
let text = String::from_utf8(bytes)
.map_err(|e| Error::RequestError(format!("Response is not valid UTF-8: {}", e)))?;
Ok((text, stats))
}
/// Take the body stream for manual consumption.
/// Returns an AsyncRead that decompresses on-the-fly if Content-Encoding is set.
/// The caller is responsible for reading and processing the stream.
pub fn into_body_stream(&mut self) -> Result<Box<dyn AsyncRead + Unpin + Send>> {
let stream = self.body_stream.take().ok_or_else(|| {
Error::RequestError("Response body has already been consumed".to_string())
})?;
let buf_reader = BufReader::new(stream);
let decoder = streaming_decoder(buf_reader, self.encoding);
Ok(decoder)
}
/// Discard the body without reading it (useful for redirects).
pub async fn drain(mut self) -> Result<()> {
let stream = self.body_stream.take().ok_or_else(|| {
Error::RequestError("Response body has already been consumed".to_string())
})?;
// Just read and discard all bytes
let mut reader = stream;
let mut buf = [0u8; 8192];
loop {
match reader.read(&mut buf).await {
Ok(0) => break,
Ok(_) => continue,
Err(e) => {
return Err(Error::RequestError(format!(
"Failed to drain response body: {}",
e
)));
}
}
}
Ok(())
}
}
/// Trait for sending HTTP requests
#[async_trait]
pub trait HttpSender: Send + Sync {
/// Send an HTTP request and return the response with headers.
/// The body is not consumed until you call bytes(), text(), write_to_file(), or drain().
/// Events are sent through the provided channel.
async fn send(
&self,
request: SendableHttpRequest,
event_tx: mpsc::UnboundedSender<HttpResponseEvent>,
) -> Result<HttpResponse>;
}
/// Reqwest-based implementation of HttpSender
pub struct ReqwestSender {
client: Client,
}
impl ReqwestSender {
/// Create a new ReqwestSender with a default client
pub fn new() -> Result<Self> {
let client = Client::builder().build().map_err(Error::Client)?;
Ok(Self { client })
}
/// Create a new ReqwestSender with a custom client
pub fn with_client(client: Client) -> Self {
Self { client }
}
}
#[async_trait]
impl HttpSender for ReqwestSender {
async fn send(
&self,
request: SendableHttpRequest,
event_tx: mpsc::UnboundedSender<HttpResponseEvent>,
) -> Result<HttpResponse> {
// Helper to send events (ignores errors if receiver is dropped)
let send_event = |event: HttpResponseEvent| {
let _ = event_tx.send(event);
};
// Parse the HTTP method
let method = Method::from_bytes(request.method.as_bytes())
.map_err(|e| Error::RequestError(format!("Invalid HTTP method: {}", e)))?;
// Build the request
let mut req_builder = self.client.request(method, &request.url);
// Add headers
for header in request.headers {
req_builder = req_builder.header(&header.0, &header.1);
}
// Configure timeout
if let Some(d) = request.options.timeout
&& !d.is_zero()
{
req_builder = req_builder.timeout(d);
}
// Add body
match request.body {
None => {}
Some(SendableBody::Bytes(bytes)) => {
req_builder = req_builder.body(bytes);
}
Some(SendableBody::Stream(stream)) => {
// Convert AsyncRead stream to reqwest Body
let stream = tokio_util::io::ReaderStream::new(stream);
let body = reqwest::Body::wrap_stream(stream);
req_builder = req_builder.body(body);
}
}
// Send the request
let sendable_req = req_builder.build()?;
send_event(HttpResponseEvent::Setting(
"timeout".to_string(),
if request.options.timeout.unwrap_or_default().is_zero() {
"Infinity".to_string()
} else {
format!("{:?}", request.options.timeout)
},
));
send_event(HttpResponseEvent::SendUrl {
path: sendable_req.url().path().to_string(),
method: sendable_req.method().to_string(),
});
let mut request_headers = HashMap::new();
for (name, value) in sendable_req.headers() {
let v = value.to_str().unwrap_or_default().to_string();
request_headers.insert(name.to_string(), v.clone());
send_event(HttpResponseEvent::HeaderUp(name.to_string(), v));
}
send_event(HttpResponseEvent::Info("Sending request to server".to_string()));
// Map some errors to our own, so they look nicer
let response = self.client.execute(sendable_req).await.map_err(|e| {
if reqwest::Error::is_timeout(&e) {
Error::RequestTimeout(
request.options.timeout.unwrap_or(Duration::from_secs(0)).clone(),
)
} else {
Error::Client(e)
}
})?;
let status = response.status().as_u16();
let status_reason = response.status().canonical_reason().map(|s| s.to_string());
let url = response.url().to_string();
let remote_addr = response.remote_addr().map(|a| a.to_string());
let version = Some(version_to_str(&response.version()));
let content_length = response.content_length();
send_event(HttpResponseEvent::ReceiveUrl {
version: response.version(),
status: response.status().to_string(),
});
// Extract headers
let mut headers = HashMap::new();
for (key, value) in response.headers() {
if let Ok(v) = value.to_str() {
send_event(HttpResponseEvent::HeaderDown(key.to_string(), v.to_string()));
headers.insert(key.to_string(), v.to_string());
}
}
// Determine content encoding for decompression
// HTTP headers are case-insensitive, so we need to search for any casing
let encoding = ContentEncoding::from_header(
headers
.iter()
.find(|(k, _)| k.eq_ignore_ascii_case("content-encoding"))
.map(|(_, v)| v.as_str()),
);
// Get the byte stream instead of loading into memory
let byte_stream = response.bytes_stream();
// Convert the stream to an AsyncRead
let stream_reader = StreamReader::new(
byte_stream.map(|result| result.map_err(|e| std::io::Error::other(e))),
);
// Wrap the stream with tracking to emit chunk received events via the same channel
let tracking_reader = TrackingRead::new(stream_reader, event_tx);
let body_stream: BodyStream = Box::pin(tracking_reader);
Ok(HttpResponse::new(
status,
status_reason,
headers,
request_headers,
content_length,
url,
remote_addr,
version,
body_stream,
encoding,
))
}
}
fn version_to_str(version: &Version) -> String {
match *version {
Version::HTTP_09 => "HTTP/0.9".to_string(),
Version::HTTP_10 => "HTTP/1.0".to_string(),
Version::HTTP_11 => "HTTP/1.1".to_string(),
Version::HTTP_2 => "HTTP/2".to_string(),
Version::HTTP_3 => "HTTP/3".to_string(),
_ => "unknown".to_string(),
}
}

View File

@@ -0,0 +1,391 @@
use crate::error::Result;
use crate::sender::{HttpResponse, HttpResponseEvent, HttpSender, RedirectBehavior};
use crate::types::SendableHttpRequest;
use tokio::sync::mpsc;
use tokio::sync::watch::Receiver;
/// HTTP Transaction that manages the lifecycle of a request, including redirect handling
pub struct HttpTransaction<S: HttpSender> {
sender: S,
max_redirects: usize,
}
impl<S: HttpSender> HttpTransaction<S> {
/// Create a new transaction with default settings
pub fn new(sender: S) -> Self {
Self { sender, max_redirects: 10 }
}
/// Create a new transaction with custom max redirects
pub fn with_max_redirects(sender: S, max_redirects: usize) -> Self {
Self { sender, max_redirects }
}
/// Execute the request with cancellation support.
/// Returns an HttpResponse with unconsumed body - caller decides how to consume it.
/// Events are sent through the provided channel.
pub async fn execute_with_cancellation(
&self,
request: SendableHttpRequest,
mut cancelled_rx: Receiver<bool>,
event_tx: mpsc::UnboundedSender<HttpResponseEvent>,
) -> Result<HttpResponse> {
let mut redirect_count = 0;
let mut current_url = request.url;
let mut current_method = request.method;
let mut current_headers = request.headers;
let mut current_body = request.body;
// Helper to send events (ignores errors if receiver is dropped)
let send_event = |event: HttpResponseEvent| {
let _ = event_tx.send(event);
};
loop {
// Check for cancellation before each request
if *cancelled_rx.borrow() {
return Err(crate::error::Error::RequestCanceledError);
}
// Build request for this iteration
let req = SendableHttpRequest {
url: current_url.clone(),
method: current_method.clone(),
headers: current_headers.clone(),
body: current_body,
options: request.options.clone(),
};
// Send the request
send_event(HttpResponseEvent::Setting(
"redirects".to_string(),
request.options.follow_redirects.to_string(),
));
// Execute with cancellation support
let response = tokio::select! {
result = self.sender.send(req, event_tx.clone()) => result?,
_ = cancelled_rx.changed() => {
return Err(crate::error::Error::RequestCanceledError);
}
};
if !Self::is_redirect(response.status) {
// Not a redirect - return the response for caller to consume body
return Ok(response);
}
if !request.options.follow_redirects {
// Redirects disabled - return the redirect response as-is
return Ok(response);
}
// Check if we've exceeded max redirects
if redirect_count >= self.max_redirects {
// Drain the response before returning error
let _ = response.drain().await;
return Err(crate::error::Error::RequestError(format!(
"Maximum redirect limit ({}) exceeded",
self.max_redirects
)));
}
// Extract Location header before draining (headers are available immediately)
// HTTP headers are case-insensitive, so we need to search for any casing
let location = response
.headers
.iter()
.find(|(k, _)| k.eq_ignore_ascii_case("location"))
.map(|(_, v)| v.clone())
.ok_or_else(|| {
crate::error::Error::RequestError(
"Redirect response missing Location header".to_string(),
)
})?;
// Also get status before draining
let status = response.status;
send_event(HttpResponseEvent::Info("Ignoring the response body".to_string()));
// Drain the redirect response body before following
response.drain().await?;
// Update the request URL
current_url = if location.starts_with("http://") || location.starts_with("https://") {
// Absolute URL
location
} else if location.starts_with('/') {
// Absolute path - need to extract base URL from current request
let base_url = Self::extract_base_url(&current_url)?;
format!("{}{}", base_url, location)
} else {
// Relative path - need to resolve relative to current path
let base_path = Self::extract_base_path(&current_url)?;
format!("{}/{}", base_path, location)
};
// Determine redirect behavior based on status code and method
let behavior = if status == 303 {
// 303 See Other always changes to GET
RedirectBehavior::DropBody
} else if (status == 301 || status == 302) && current_method == "POST" {
// For 301/302, change POST to GET (common browser behavior)
RedirectBehavior::DropBody
} else {
// For 307 and 308, the method and body are preserved
// Also for 301/302 with non-POST methods
RedirectBehavior::Preserve
};
send_event(HttpResponseEvent::Redirect {
url: current_url.clone(),
status,
behavior: behavior.clone(),
});
// Handle method changes for certain redirect codes
if matches!(behavior, RedirectBehavior::DropBody) {
if current_method != "GET" {
current_method = "GET".to_string();
}
// Remove content-related headers
current_headers.retain(|h| {
let name_lower = h.0.to_lowercase();
!name_lower.starts_with("content-") && name_lower != "transfer-encoding"
});
}
// Reset body for next iteration (since it was moved in the send call)
// For redirects that change method to GET or for all redirects since body was consumed
current_body = None;
redirect_count += 1;
}
}
/// Check if a status code indicates a redirect
fn is_redirect(status: u16) -> bool {
matches!(status, 301 | 302 | 303 | 307 | 308)
}
/// Extract the base URL (scheme + host) from a full URL
fn extract_base_url(url: &str) -> Result<String> {
// Find the position after "://"
let scheme_end = url.find("://").ok_or_else(|| {
crate::error::Error::RequestError(format!("Invalid URL format: {}", url))
})?;
// Find the first '/' after the scheme
let path_start = url[scheme_end + 3..].find('/');
if let Some(idx) = path_start {
Ok(url[..scheme_end + 3 + idx].to_string())
} else {
// No path, return entire URL
Ok(url.to_string())
}
}
/// Extract the base path (everything except the last segment) from a URL
fn extract_base_path(url: &str) -> Result<String> {
if let Some(last_slash) = url.rfind('/') {
// Don't include the trailing slash if it's part of the host
if url[..last_slash].ends_with("://") || url[..last_slash].ends_with(':') {
Ok(url.to_string())
} else {
Ok(url[..last_slash].to_string())
}
} else {
Ok(url.to_string())
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::decompress::ContentEncoding;
use crate::sender::{HttpResponseEvent, HttpSender};
use async_trait::async_trait;
use std::collections::HashMap;
use std::pin::Pin;
use std::sync::Arc;
use tokio::io::AsyncRead;
use tokio::sync::Mutex;
/// Mock sender for testing
struct MockSender {
responses: Arc<Mutex<Vec<MockResponse>>>,
}
struct MockResponse {
status: u16,
headers: HashMap<String, String>,
body: Vec<u8>,
}
impl MockSender {
fn new(responses: Vec<MockResponse>) -> Self {
Self { responses: Arc::new(Mutex::new(responses)) }
}
}
#[async_trait]
impl HttpSender for MockSender {
async fn send(
&self,
_request: SendableHttpRequest,
_event_tx: mpsc::UnboundedSender<HttpResponseEvent>,
) -> Result<HttpResponse> {
let mut responses = self.responses.lock().await;
if responses.is_empty() {
Err(crate::error::Error::RequestError("No more mock responses".to_string()))
} else {
let mock = responses.remove(0);
// Create a simple in-memory stream from the body
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
Box::pin(std::io::Cursor::new(mock.body));
Ok(HttpResponse::new(
mock.status,
None, // status_reason
mock.headers,
HashMap::new(),
None, // content_length
"https://example.com".to_string(), // url
None, // remote_addr
Some("HTTP/1.1".to_string()), // version
body_stream,
ContentEncoding::Identity,
))
}
}
}
#[tokio::test]
async fn test_transaction_no_redirect() {
let response = MockResponse { status: 200, headers: HashMap::new(), body: b"OK".to_vec() };
let sender = MockSender::new(vec![response]);
let transaction = HttpTransaction::new(sender);
let request = SendableHttpRequest {
url: "https://example.com".to_string(),
method: "GET".to_string(),
headers: vec![],
..Default::default()
};
let (_tx, rx) = tokio::sync::watch::channel(false);
let (event_tx, _event_rx) = mpsc::unbounded_channel();
let result = transaction.execute_with_cancellation(request, rx, event_tx).await.unwrap();
assert_eq!(result.status, 200);
// Consume the body to verify it
let (body, _) = result.bytes().await.unwrap();
assert_eq!(body, b"OK");
}
#[tokio::test]
async fn test_transaction_single_redirect() {
let mut redirect_headers = HashMap::new();
redirect_headers.insert("Location".to_string(), "https://example.com/new".to_string());
let responses = vec![
MockResponse { status: 302, headers: redirect_headers, body: vec![] },
MockResponse { status: 200, headers: HashMap::new(), body: b"Final".to_vec() },
];
let sender = MockSender::new(responses);
let transaction = HttpTransaction::new(sender);
let request = SendableHttpRequest {
url: "https://example.com/old".to_string(),
method: "GET".to_string(),
options: crate::types::SendableHttpRequestOptions {
follow_redirects: true,
..Default::default()
},
..Default::default()
};
let (_tx, rx) = tokio::sync::watch::channel(false);
let (event_tx, _event_rx) = mpsc::unbounded_channel();
let result = transaction.execute_with_cancellation(request, rx, event_tx).await.unwrap();
assert_eq!(result.status, 200);
let (body, _) = result.bytes().await.unwrap();
assert_eq!(body, b"Final");
}
#[tokio::test]
async fn test_transaction_max_redirects_exceeded() {
let mut redirect_headers = HashMap::new();
redirect_headers.insert("Location".to_string(), "https://example.com/loop".to_string());
// Create more redirects than allowed
let responses: Vec<MockResponse> = (0..12)
.map(|_| MockResponse { status: 302, headers: redirect_headers.clone(), body: vec![] })
.collect();
let sender = MockSender::new(responses);
let transaction = HttpTransaction::with_max_redirects(sender, 10);
let request = SendableHttpRequest {
url: "https://example.com/start".to_string(),
method: "GET".to_string(),
options: crate::types::SendableHttpRequestOptions {
follow_redirects: true,
..Default::default()
},
..Default::default()
};
let (_tx, rx) = tokio::sync::watch::channel(false);
let (event_tx, _event_rx) = mpsc::unbounded_channel();
let result = transaction.execute_with_cancellation(request, rx, event_tx).await;
if let Err(crate::error::Error::RequestError(msg)) = result {
assert!(msg.contains("Maximum redirect limit"));
} else {
panic!("Expected RequestError with max redirect message. Got {result:?}");
}
}
#[test]
fn test_is_redirect() {
assert!(HttpTransaction::<MockSender>::is_redirect(301));
assert!(HttpTransaction::<MockSender>::is_redirect(302));
assert!(HttpTransaction::<MockSender>::is_redirect(303));
assert!(HttpTransaction::<MockSender>::is_redirect(307));
assert!(HttpTransaction::<MockSender>::is_redirect(308));
assert!(!HttpTransaction::<MockSender>::is_redirect(200));
assert!(!HttpTransaction::<MockSender>::is_redirect(404));
assert!(!HttpTransaction::<MockSender>::is_redirect(500));
}
#[test]
fn test_extract_base_url() {
let result =
HttpTransaction::<MockSender>::extract_base_url("https://example.com/path/to/resource");
assert_eq!(result.unwrap(), "https://example.com");
let result = HttpTransaction::<MockSender>::extract_base_url("http://localhost:8080/api");
assert_eq!(result.unwrap(), "http://localhost:8080");
let result = HttpTransaction::<MockSender>::extract_base_url("invalid-url");
assert!(result.is_err());
}
#[test]
fn test_extract_base_path() {
let result = HttpTransaction::<MockSender>::extract_base_path(
"https://example.com/path/to/resource",
);
assert_eq!(result.unwrap(), "https://example.com/path/to");
let result = HttpTransaction::<MockSender>::extract_base_path("https://example.com/single");
assert_eq!(result.unwrap(), "https://example.com");
let result = HttpTransaction::<MockSender>::extract_base_path("https://example.com/");
assert_eq!(result.unwrap(), "https://example.com");
}
}

View File

@@ -0,0 +1,975 @@
use crate::chained_reader::{ChainedReader, ReaderType};
use crate::error::Error::RequestError;
use crate::error::Result;
use crate::path_placeholders::apply_path_placeholders;
use crate::proto::ensure_proto;
use bytes::Bytes;
use log::warn;
use std::collections::BTreeMap;
use std::pin::Pin;
use std::time::Duration;
use tokio::io::AsyncRead;
use yaak_common::serde::{get_bool, get_str, get_str_map};
use yaak_models::models::HttpRequest;
pub(crate) const MULTIPART_BOUNDARY: &str = "------YaakFormBoundary";
pub enum SendableBody {
Bytes(Bytes),
Stream(Pin<Box<dyn AsyncRead + Send + 'static>>),
}
enum SendableBodyWithMeta {
Bytes(Bytes),
Stream {
data: Pin<Box<dyn AsyncRead + Send + 'static>>,
content_length: Option<usize>,
},
}
impl From<SendableBodyWithMeta> for SendableBody {
fn from(value: SendableBodyWithMeta) -> Self {
match value {
SendableBodyWithMeta::Bytes(b) => SendableBody::Bytes(b),
SendableBodyWithMeta::Stream { data, .. } => SendableBody::Stream(data),
}
}
}
#[derive(Default)]
pub struct SendableHttpRequest {
pub url: String,
pub method: String,
pub headers: Vec<(String, String)>,
pub body: Option<SendableBody>,
pub options: SendableHttpRequestOptions,
}
#[derive(Default, Clone)]
pub struct SendableHttpRequestOptions {
pub timeout: Option<Duration>,
pub follow_redirects: bool,
}
impl SendableHttpRequest {
pub async fn from_http_request(
r: &HttpRequest,
options: SendableHttpRequestOptions,
) -> Result<Self> {
let initial_headers = build_headers(r);
let (body, headers) = build_body(&r.method, &r.body_type, &r.body, initial_headers).await?;
Ok(Self {
url: build_url(r),
method: r.method.to_uppercase(),
headers,
body: body.into(),
options,
})
}
pub fn insert_header(&mut self, header: (String, String)) {
if let Some(existing) =
self.headers.iter_mut().find(|h| h.0.to_lowercase() == header.0.to_lowercase())
{
existing.1 = header.1;
} else {
self.headers.push(header);
}
}
}
pub fn append_query_params(url: &str, params: Vec<(String, String)>) -> String {
let url_string = url.to_string();
if params.is_empty() {
return url.to_string();
}
// Build query string
let query_string = params
.iter()
.map(|(name, value)| {
format!("{}={}", urlencoding::encode(name), urlencoding::encode(value))
})
.collect::<Vec<_>>()
.join("&");
// Split URL into parts: base URL, query, and fragment
let (base_and_query, fragment) = if let Some(hash_pos) = url_string.find('#') {
let (before_hash, after_hash) = url_string.split_at(hash_pos);
(before_hash.to_string(), Some(after_hash.to_string()))
} else {
(url_string, None)
};
// Now handle query parameters on the base URL (without fragment)
let mut result = if base_and_query.contains('?') {
// Check if there's already a query string after the '?'
let parts: Vec<&str> = base_and_query.splitn(2, '?').collect();
if parts.len() == 2 && !parts[1].trim().is_empty() {
// Append with & if there are existing parameters
format!("{}&{}", base_and_query, query_string)
} else {
// Just append the new parameters directly (URL ends with '?')
format!("{}{}", base_and_query, query_string)
}
} else {
// No existing query parameters, add with '?'
format!("{}?{}", base_and_query, query_string)
};
// Re-append the fragment if it exists
if let Some(fragment) = fragment {
result.push_str(&fragment);
}
result
}
fn build_url(r: &HttpRequest) -> String {
let (url_string, params) = apply_path_placeholders(&ensure_proto(&r.url), &r.url_parameters);
append_query_params(
&url_string,
params
.iter()
.filter(|p| p.enabled && !p.name.is_empty())
.map(|p| (p.name.clone(), p.value.clone()))
.collect(),
)
}
fn build_headers(r: &HttpRequest) -> Vec<(String, String)> {
r.headers
.iter()
.filter_map(|h| {
if h.enabled && !h.name.is_empty() {
Some((h.name.clone(), h.value.clone()))
} else {
None
}
})
.collect()
}
async fn build_body(
method: &str,
body_type: &Option<String>,
body: &BTreeMap<String, serde_json::Value>,
headers: Vec<(String, String)>,
) -> Result<(Option<SendableBody>, Vec<(String, String)>)> {
let body_type = match &body_type {
None => return Ok((None, headers)),
Some(t) => t,
};
let (body, content_type) = match body_type.as_str() {
"binary" => (build_binary_body(&body).await?, None),
"graphql" => (build_graphql_body(&method, &body), Some("application/json".to_string())),
"application/x-www-form-urlencoded" => {
(build_form_body(&body), Some("application/x-www-form-urlencoded".to_string()))
}
"multipart/form-data" => build_multipart_body(&body, &headers).await?,
_ if body.contains_key("text") => (build_text_body(&body), None),
t => {
warn!("Unsupported body type: {}", t);
(None, None)
}
};
// Add or update the Content-Type header
let mut headers = headers;
if let Some(ct) = content_type {
if let Some(existing) = headers.iter_mut().find(|h| h.0.to_lowercase() == "content-type") {
existing.1 = ct;
} else {
headers.push(("Content-Type".to_string(), ct));
}
}
// Check if Transfer-Encoding: chunked is already set
let has_chunked_encoding = headers.iter().any(|h| {
h.0.to_lowercase() == "transfer-encoding" && h.1.to_lowercase().contains("chunked")
});
// Add a Content-Length header only if chunked encoding is not being used
if !has_chunked_encoding {
let content_length = match body {
Some(SendableBodyWithMeta::Bytes(ref bytes)) => Some(bytes.len()),
Some(SendableBodyWithMeta::Stream { content_length, .. }) => content_length,
None => None,
};
if let Some(cl) = content_length {
headers.push(("Content-Length".to_string(), cl.to_string()));
}
}
Ok((body.map(|b| b.into()), headers))
}
fn build_form_body(body: &BTreeMap<String, serde_json::Value>) -> Option<SendableBodyWithMeta> {
let form_params = match body.get("form").map(|f| f.as_array()) {
Some(Some(f)) => f,
_ => return None,
};
let mut body = String::new();
for p in form_params {
let enabled = get_bool(p, "enabled", true);
let name = get_str(p, "name");
if !enabled || name.is_empty() {
continue;
}
let value = get_str(p, "value");
if !body.is_empty() {
body.push('&');
}
body.push_str(&urlencoding::encode(&name));
body.push('=');
body.push_str(&urlencoding::encode(&value));
}
if body.is_empty() { None } else { Some(SendableBodyWithMeta::Bytes(Bytes::from(body))) }
}
async fn build_binary_body(
body: &BTreeMap<String, serde_json::Value>,
) -> Result<Option<SendableBodyWithMeta>> {
let file_path = match body.get("filePath").map(|f| f.as_str()) {
Some(Some(f)) => f,
_ => return Ok(None),
};
// Open a file for streaming
let content_length = tokio::fs::metadata(file_path)
.await
.map_err(|e| RequestError(format!("Failed to get file metadata: {}", e)))?
.len();
let file = tokio::fs::File::open(file_path)
.await
.map_err(|e| RequestError(format!("Failed to open file: {}", e)))?;
Ok(Some(SendableBodyWithMeta::Stream {
data: Box::pin(file),
content_length: Some(content_length as usize),
}))
}
fn build_text_body(body: &BTreeMap<String, serde_json::Value>) -> Option<SendableBodyWithMeta> {
let text = get_str_map(body, "text");
if text.is_empty() {
None
} else {
Some(SendableBodyWithMeta::Bytes(Bytes::from(text.to_string())))
}
}
fn build_graphql_body(
method: &str,
body: &BTreeMap<String, serde_json::Value>,
) -> Option<SendableBodyWithMeta> {
let query = get_str_map(body, "query");
let variables = get_str_map(body, "variables");
if method.to_lowercase() == "get" {
// GraphQL GET requests use query parameters, not a body
return None;
}
let body = if variables.trim().is_empty() {
format!(r#"{{"query":{}}}"#, serde_json::to_string(&query).unwrap_or_default())
} else {
format!(
r#"{{"query":{},"variables":{}}}"#,
serde_json::to_string(&query).unwrap_or_default(),
variables
)
};
Some(SendableBodyWithMeta::Bytes(Bytes::from(body)))
}
async fn build_multipart_body(
body: &BTreeMap<String, serde_json::Value>,
headers: &Vec<(String, String)>,
) -> Result<(Option<SendableBodyWithMeta>, Option<String>)> {
let boundary = extract_boundary_from_headers(headers);
let form_params = match body.get("form").map(|f| f.as_array()) {
Some(Some(f)) => f,
_ => return Ok((None, None)),
};
// Build a list of readers for streaming and calculate total content length
let mut readers: Vec<ReaderType> = Vec::new();
let mut has_content = false;
let mut total_size: usize = 0;
for p in form_params {
let enabled = get_bool(p, "enabled", true);
let name = get_str(p, "name");
if !enabled || name.is_empty() {
continue;
}
has_content = true;
// Add boundary delimiter
let boundary_bytes = format!("--{}\r\n", boundary).into_bytes();
total_size += boundary_bytes.len();
readers.push(ReaderType::Bytes(boundary_bytes));
let file_path = get_str(p, "file");
let value = get_str(p, "value");
let content_type = get_str(p, "contentType");
if file_path.is_empty() {
// Text field
let header =
format!("Content-Disposition: form-data; name=\"{}\"\r\n\r\n{}", name, value);
let header_bytes = header.into_bytes();
total_size += header_bytes.len();
readers.push(ReaderType::Bytes(header_bytes));
} else {
// File field - validate that file exists first
if !tokio::fs::try_exists(file_path).await.unwrap_or(false) {
return Err(RequestError(format!("File not found: {}", file_path)));
}
// Get file size for content length calculation
let file_metadata = tokio::fs::metadata(file_path)
.await
.map_err(|e| RequestError(format!("Failed to get file metadata: {}", e)))?;
let file_size = file_metadata.len() as usize;
let filename = get_str(p, "filename");
let filename = if filename.is_empty() {
std::path::Path::new(file_path)
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("file")
} else {
filename
};
// Add content type
let mime_type = if !content_type.is_empty() {
content_type.to_string()
} else {
// Guess mime type from file extension
mime_guess::from_path(file_path).first_or_octet_stream().to_string()
};
let header = format!(
"Content-Disposition: form-data; name=\"{}\"; filename=\"{}\"\r\nContent-Type: {}\r\n\r\n",
name, filename, mime_type
);
let header_bytes = header.into_bytes();
total_size += header_bytes.len();
total_size += file_size;
readers.push(ReaderType::Bytes(header_bytes));
// Add a file path for streaming
readers.push(ReaderType::FilePath(file_path.to_string()));
}
let line_ending = b"\r\n".to_vec();
total_size += line_ending.len();
readers.push(ReaderType::Bytes(line_ending));
}
if has_content {
// Add the final boundary
let final_boundary = format!("--{}--\r\n", boundary).into_bytes();
total_size += final_boundary.len();
readers.push(ReaderType::Bytes(final_boundary));
let content_type = format!("multipart/form-data; boundary={}", boundary);
let stream = ChainedReader::new(readers);
Ok((
Some(SendableBodyWithMeta::Stream {
data: Box::pin(stream),
content_length: Some(total_size),
}),
Some(content_type),
))
} else {
Ok((None, None))
}
}
fn extract_boundary_from_headers(headers: &Vec<(String, String)>) -> String {
headers
.iter()
.find(|h| h.0.to_lowercase() == "content-type")
.and_then(|h| {
// Extract boundary from the Content-Type header (e.g., "multipart/form-data; boundary=xyz")
h.1.split(';')
.find(|part| part.trim().starts_with("boundary="))
.and_then(|boundary_part| boundary_part.split('=').nth(1))
.map(|b| b.trim().to_string())
})
.unwrap_or_else(|| MULTIPART_BOUNDARY.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
use bytes::Bytes;
use serde_json::json;
use std::collections::BTreeMap;
use yaak_models::models::{HttpRequest, HttpUrlParameter};
#[test]
fn test_build_url_no_params() {
let r = HttpRequest {
url: "https://example.com/api".to_string(),
url_parameters: vec![],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api");
}
#[test]
fn test_build_url_with_params() {
let r = HttpRequest {
url: "https://example.com/api".to_string(),
url_parameters: vec![
HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
},
HttpUrlParameter {
enabled: true,
name: "baz".to_string(),
value: "qux".to_string(),
id: None,
},
],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?foo=bar&baz=qux");
}
#[test]
fn test_build_url_with_disabled_params() {
let r = HttpRequest {
url: "https://example.com/api".to_string(),
url_parameters: vec![
HttpUrlParameter {
enabled: false,
name: "disabled".to_string(),
value: "value".to_string(),
id: None,
},
HttpUrlParameter {
enabled: true,
name: "enabled".to_string(),
value: "value".to_string(),
id: None,
},
],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?enabled=value");
}
#[test]
fn test_build_url_with_existing_query() {
let r = HttpRequest {
url: "https://example.com/api?existing=param".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "new".to_string(),
value: "value".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?existing=param&new=value");
}
#[test]
fn test_build_url_with_empty_existing_query() {
let r = HttpRequest {
url: "https://example.com/api?".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "new".to_string(),
value: "value".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?new=value");
}
#[test]
fn test_build_url_with_special_chars() {
let r = HttpRequest {
url: "https://example.com/api".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "special chars!@#".to_string(),
value: "value with spaces & symbols".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(
result,
"https://example.com/api?special%20chars%21%40%23=value%20with%20spaces%20%26%20symbols"
);
}
#[test]
fn test_build_url_adds_protocol() {
let r = HttpRequest {
url: "example.com/api".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
// ensure_proto defaults to http:// for regular domains
assert_eq!(result, "http://example.com/api?foo=bar");
}
#[test]
fn test_build_url_adds_https_for_dev_domain() {
let r = HttpRequest {
url: "example.dev/api".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
// .dev domains force https
assert_eq!(result, "https://example.dev/api?foo=bar");
}
#[test]
fn test_build_url_with_fragment() {
let r = HttpRequest {
url: "https://example.com/api#section".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?foo=bar#section");
}
#[test]
fn test_build_url_with_existing_query_and_fragment() {
let r = HttpRequest {
url: "https://yaak.app?foo=bar#some-hash".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "baz".to_string(),
value: "qux".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://yaak.app?foo=bar&baz=qux#some-hash");
}
#[test]
fn test_build_url_with_empty_query_and_fragment() {
let r = HttpRequest {
url: "https://example.com/api?#section".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?foo=bar#section");
}
#[test]
fn test_build_url_with_fragment_containing_special_chars() {
let r = HttpRequest {
url: "https://example.com#section/with/slashes?and=fake&query".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "real".to_string(),
value: "param".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com?real=param#section/with/slashes?and=fake&query");
}
#[test]
fn test_build_url_preserves_empty_fragment() {
let r = HttpRequest {
url: "https://example.com/api#".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?foo=bar#");
}
#[test]
fn test_build_url_with_multiple_fragments() {
// Testing edge case where the URL has multiple # characters (though technically invalid)
let r = HttpRequest {
url: "https://example.com#section#subsection".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
// Should treat everything after first # as fragment
assert_eq!(result, "https://example.com?foo=bar#section#subsection");
}
#[tokio::test]
async fn test_text_body() {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
let result = build_text_body(&body);
match result {
Some(SendableBodyWithMeta::Bytes(bytes)) => {
assert_eq!(bytes, Bytes::from("Hello, World!"))
}
_ => panic!("Expected Some(SendableBody::Bytes)"),
}
}
#[tokio::test]
async fn test_text_body_empty() {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!(""));
let result = build_text_body(&body);
assert!(result.is_none());
}
#[tokio::test]
async fn test_text_body_missing() {
let body = BTreeMap::new();
let result = build_text_body(&body);
assert!(result.is_none());
}
#[tokio::test]
async fn test_form_urlencoded_body() -> Result<()> {
let mut body = BTreeMap::new();
body.insert(
"form".to_string(),
json!([
{ "enabled": true, "name": "basic", "value": "aaa"},
{ "enabled": true, "name": "fUnkey Stuff!$*#(", "value": "*)%&#$)@ *$#)@&"},
{ "enabled": false, "name": "disabled", "value": "won't show"},
]),
);
let result = build_form_body(&body);
match result {
Some(SendableBodyWithMeta::Bytes(bytes)) => {
let expected = "basic=aaa&fUnkey%20Stuff%21%24%2A%23%28=%2A%29%25%26%23%24%29%40%20%2A%24%23%29%40%26";
assert_eq!(bytes, Bytes::from(expected));
}
_ => panic!("Expected Some(SendableBody::Bytes)"),
}
Ok(())
}
#[tokio::test]
async fn test_form_urlencoded_body_missing_form() {
let body = BTreeMap::new();
let result = build_form_body(&body);
assert!(result.is_none());
}
#[tokio::test]
async fn test_binary_body() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("filePath".to_string(), json!("./tests/test.txt"));
let result = build_binary_body(&body).await?;
assert!(matches!(result, Some(SendableBodyWithMeta::Stream { .. })));
Ok(())
}
#[tokio::test]
async fn test_binary_body_file_not_found() {
let mut body = BTreeMap::new();
body.insert("filePath".to_string(), json!("./nonexistent/file.txt"));
let result = build_binary_body(&body).await;
assert!(result.is_err());
if let Err(e) = result {
assert!(matches!(e, RequestError(_)));
}
}
#[tokio::test]
async fn test_graphql_body_with_variables() {
let mut body = BTreeMap::new();
body.insert("query".to_string(), json!("{ user(id: $id) { name } }"));
body.insert("variables".to_string(), json!(r#"{"id": "123"}"#));
let result = build_graphql_body("POST", &body);
match result {
Some(SendableBodyWithMeta::Bytes(bytes)) => {
let expected =
r#"{"query":"{ user(id: $id) { name } }","variables":{"id": "123"}}"#;
assert_eq!(bytes, Bytes::from(expected));
}
_ => panic!("Expected Some(SendableBody::Bytes)"),
}
}
#[tokio::test]
async fn test_graphql_body_without_variables() {
let mut body = BTreeMap::new();
body.insert("query".to_string(), json!("{ users { name } }"));
body.insert("variables".to_string(), json!(""));
let result = build_graphql_body("POST", &body);
match result {
Some(SendableBodyWithMeta::Bytes(bytes)) => {
let expected = r#"{"query":"{ users { name } }"}"#;
assert_eq!(bytes, Bytes::from(expected));
}
_ => panic!("Expected Some(SendableBody::Bytes)"),
}
}
#[tokio::test]
async fn test_graphql_body_get_method() {
let mut body = BTreeMap::new();
body.insert("query".to_string(), json!("{ users { name } }"));
let result = build_graphql_body("GET", &body);
assert!(result.is_none());
}
#[tokio::test]
async fn test_multipart_body_text_fields() -> Result<()> {
let mut body = BTreeMap::new();
body.insert(
"form".to_string(),
json!([
{ "enabled": true, "name": "field1", "value": "value1", "file": "" },
{ "enabled": true, "name": "field2", "value": "value2", "file": "" },
{ "enabled": false, "name": "disabled", "value": "won't show", "file": "" },
]),
);
let (result, content_type) = build_multipart_body(&body, &vec![]).await?;
assert!(content_type.is_some());
match result {
Some(SendableBodyWithMeta::Stream { data: mut stream, content_length }) => {
// Read the entire stream to verify content
let mut buf = Vec::new();
use tokio::io::AsyncReadExt;
stream.read_to_end(&mut buf).await.expect("Failed to read stream");
let body_str = String::from_utf8_lossy(&buf);
assert_eq!(
body_str,
"--------YaakFormBoundary\r\nContent-Disposition: form-data; name=\"field1\"\r\n\r\nvalue1\r\n--------YaakFormBoundary\r\nContent-Disposition: form-data; name=\"field2\"\r\n\r\nvalue2\r\n--------YaakFormBoundary--\r\n",
);
assert_eq!(content_length, Some(body_str.len()));
}
_ => panic!("Expected Some(SendableBody::Stream)"),
}
assert_eq!(
content_type.unwrap(),
format!("multipart/form-data; boundary={}", MULTIPART_BOUNDARY)
);
Ok(())
}
#[tokio::test]
async fn test_multipart_body_with_file() -> Result<()> {
let mut body = BTreeMap::new();
body.insert(
"form".to_string(),
json!([
{ "enabled": true, "name": "file_field", "file": "./tests/test.txt", "filename": "custom.txt", "contentType": "text/plain" },
]),
);
let (result, content_type) = build_multipart_body(&body, &vec![]).await?;
assert!(content_type.is_some());
match result {
Some(SendableBodyWithMeta::Stream { data: mut stream, content_length }) => {
// Read the entire stream to verify content
let mut buf = Vec::new();
use tokio::io::AsyncReadExt;
stream.read_to_end(&mut buf).await.expect("Failed to read stream");
let body_str = String::from_utf8_lossy(&buf);
assert_eq!(
body_str,
"--------YaakFormBoundary\r\nContent-Disposition: form-data; name=\"file_field\"; filename=\"custom.txt\"\r\nContent-Type: text/plain\r\n\r\nThis is a test file!\n\r\n--------YaakFormBoundary--\r\n"
);
assert_eq!(content_length, Some(body_str.len()));
}
_ => panic!("Expected Some(SendableBody::Stream)"),
}
assert_eq!(
content_type.unwrap(),
format!("multipart/form-data; boundary={}", MULTIPART_BOUNDARY)
);
Ok(())
}
#[tokio::test]
async fn test_multipart_body_empty() -> Result<()> {
let body = BTreeMap::new();
let (result, content_type) = build_multipart_body(&body, &vec![]).await?;
assert!(result.is_none());
assert_eq!(content_type, None);
Ok(())
}
#[test]
fn test_extract_boundary_from_headers_with_custom_boundary() {
let headers = vec![(
"Content-Type".to_string(),
"multipart/form-data; boundary=customBoundary123".to_string(),
)];
let boundary = extract_boundary_from_headers(&headers);
assert_eq!(boundary, "customBoundary123");
}
#[test]
fn test_extract_boundary_from_headers_default() {
let headers = vec![("Accept".to_string(), "*/*".to_string())];
let boundary = extract_boundary_from_headers(&headers);
assert_eq!(boundary, MULTIPART_BOUNDARY);
}
#[test]
fn test_extract_boundary_from_headers_no_boundary_in_content_type() {
let headers = vec![("Content-Type".to_string(), "multipart/form-data".to_string())];
let boundary = extract_boundary_from_headers(&headers);
assert_eq!(boundary, MULTIPART_BOUNDARY);
}
#[test]
fn test_extract_boundary_case_insensitive() {
let headers = vec![(
"Content-Type".to_string(),
"multipart/form-data; boundary=myBoundary".to_string(),
)];
let boundary = extract_boundary_from_headers(&headers);
assert_eq!(boundary, "myBoundary");
}
#[tokio::test]
async fn test_no_content_length_with_chunked_encoding() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
// Headers with Transfer-Encoding: chunked
let headers = vec![("Transfer-Encoding".to_string(), "chunked".to_string())];
let (_, result_headers) =
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
// Verify that Content-Length is NOT present when Transfer-Encoding: chunked is set
let has_content_length =
result_headers.iter().any(|h| h.0.to_lowercase() == "content-length");
assert!(!has_content_length, "Content-Length should not be present with chunked encoding");
// Verify that the Transfer-Encoding header is still present
let has_chunked = result_headers.iter().any(|h| {
h.0.to_lowercase() == "transfer-encoding" && h.1.to_lowercase().contains("chunked")
});
assert!(has_chunked, "Transfer-Encoding: chunked should be preserved");
Ok(())
}
#[tokio::test]
async fn test_content_length_without_chunked_encoding() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
// Headers without Transfer-Encoding: chunked
let headers = vec![];
let (_, result_headers) =
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
// Verify that Content-Length IS present when Transfer-Encoding: chunked is NOT set
let content_length_header =
result_headers.iter().find(|h| h.0.to_lowercase() == "content-length");
assert!(
content_length_header.is_some(),
"Content-Length should be present without chunked encoding"
);
assert_eq!(
content_length_header.unwrap().1,
"13",
"Content-Length should match the body size"
);
Ok(())
}
}

View File

@@ -0,0 +1 @@
This is a test file!

View File

@@ -1,7 +1,6 @@
use tauri::{
generate_handler,
Runtime, generate_handler,
plugin::{Builder, TauriPlugin},
Runtime,
};
mod commands;

View File

@@ -114,10 +114,7 @@ pub async fn activate_license<R: Runtime>(
if response.status().is_client_error() {
let body: APIErrorResponsePayload = response.json().await?;
return Err(ClientError {
message: body.message,
error: body.error,
});
return Err(ClientError { message: body.message, error: body.error });
}
if response.status().is_server_error() {
@@ -154,10 +151,7 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
if response.status().is_client_error() {
let body: APIErrorResponsePayload = response.json().await?;
return Err(ClientError {
message: body.message,
error: body.error,
});
return Err(ClientError { message: body.message, error: body.error });
}
if response.status().is_server_error() {
@@ -192,9 +186,7 @@ pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<Lice
match (has_activation_id, trial_period_active) {
(false, true) => Ok(LicenseCheckStatus::Trialing { end: trial_end }),
(false, false) => Ok(LicenseCheckStatus::PersonalUse {
trial_ended: trial_end,
}),
(false, false) => Ok(LicenseCheckStatus::PersonalUse { trial_ended: trial_end }),
(true, _) => {
info!("Checking license activation");
// A license has been activated, so let's check the license server
@@ -204,10 +196,7 @@ pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<Lice
if response.status().is_client_error() {
let body: APIErrorResponsePayload = response.json().await?;
return Err(ClientError {
message: body.message,
error: body.error,
});
return Err(ClientError { message: body.message, error: body.error });
}
if response.status().is_server_error() {

View File

@@ -1,4 +1,4 @@
use tauri::{command, Runtime, Window};
use tauri::{Runtime, Window, command};
#[command]
pub(crate) fn set_title<R: Runtime>(window: Window<R>, title: &str) {

View File

@@ -27,9 +27,7 @@ pub(crate) struct PluginState {
pub fn init<R: Runtime>() -> TauriPlugin<R> {
let mut builder = plugin::Builder::new("yaak-mac-window")
.setup(move |app, _| {
app.manage(PluginState {
native_titlebar: AtomicBool::new(false),
});
app.manage(PluginState { native_titlebar: AtomicBool::new(false) });
Ok(())
})
.invoke_handler(generate_handler![set_title, set_theme]);

View File

@@ -371,9 +371,7 @@ pub fn setup_traffic_light_positioner<R: Runtime>(window: &Window<R>) {
// Are we de-allocing this properly? (I miss safe Rust :( )
let window_label = window.label().to_string();
let app_state = WindowState {
window: window.clone(),
};
let app_state = WindowState { window: window.clone() };
let app_box = Box::into_raw(Box::new(app_state)) as *mut c_void;
let random_str: String =
rand::rng().sample_iter(&Alphanumeric).take(20).map(char::from).collect();

View File

@@ -1,6 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type AnyModel = CookieJar | Environment | Folder | GraphQlIntrospection | GrpcConnection | GrpcEvent | GrpcRequest | HttpRequest | HttpResponse | KeyValue | Plugin | Settings | SyncState | WebsocketConnection | WebsocketEvent | WebsocketRequest | Workspace | WorkspaceMeta;
export type AnyModel = CookieJar | Environment | Folder | GraphQlIntrospection | GrpcConnection | GrpcEvent | GrpcRequest | HttpRequest | HttpResponse | HttpResponseEvent | KeyValue | Plugin | Settings | SyncState | WebsocketConnection | WebsocketEvent | WebsocketRequest | Workspace | WorkspaceMeta;
export type ClientCertificate = { host: string, port: number | null, crtFile: string | null, keyFile: string | null, pfxFile: string | null, passphrase: string | null, enabled?: boolean, };
@@ -38,7 +38,16 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
/**
* Serializable representation of HTTP response events for DB storage.
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
* The `From` impl is in yaak-http to avoid circular dependencies.
*/
export type HttpResponseEventData = { "type": "start_request" } | { "type": "end_request" } | { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
export type HttpResponseHeader = { name: string, value: string, };

View File

@@ -15,6 +15,7 @@ export const grpcEventsAtom = createOrderedModelAtom('grpc_event', 'createdAt',
export const grpcRequestsAtom = createModelAtom('grpc_request');
export const httpRequestsAtom = createModelAtom('http_request');
export const httpResponsesAtom = createOrderedModelAtom('http_response', 'createdAt', 'desc');
export const httpResponseEventsAtom = createOrderedModelAtom('http_response_event', 'createdAt', 'asc');
export const keyValuesAtom = createModelAtom('key_value');
export const pluginsAtom = createModelAtom('plugin');
export const settingsAtom = createSingularModelAtom('settings');

View File

@@ -11,6 +11,7 @@ export function newStoreData(): ModelStoreData {
grpc_request: {},
http_request: {},
http_response: {},
http_response_event: {},
key_value: {},
plugin: {},
settings: {},

View File

@@ -0,0 +1,15 @@
-- Add default User-Agent header to workspaces that don't already have one (case-insensitive check)
UPDATE workspaces
SET headers = json_insert(headers, '$[#]', json('{"enabled":true,"name":"User-Agent","value":"yaak"}'))
WHERE NOT EXISTS (
SELECT 1 FROM json_each(workspaces.headers)
WHERE LOWER(json_extract(value, '$.name')) = 'user-agent'
);
-- Add default Accept header to workspaces that don't already have one (case-insensitive check)
UPDATE workspaces
SET headers = json_insert(headers, '$[#]', json('{"enabled":true,"name":"Accept","value":"*/*"}'))
WHERE NOT EXISTS (
SELECT 1 FROM json_each(workspaces.headers)
WHERE LOWER(json_extract(value, '$.name')) = 'accept'
);

View File

@@ -0,0 +1,3 @@
-- Add request_headers and content_length_compressed columns to http_responses table
ALTER TABLE http_responses ADD COLUMN request_headers TEXT NOT NULL DEFAULT '[]';
ALTER TABLE http_responses ADD COLUMN content_length_compressed INTEGER;

View File

@@ -0,0 +1,15 @@
CREATE TABLE http_response_events
(
id TEXT NOT NULL
PRIMARY KEY,
model TEXT DEFAULT 'http_response_event' NOT NULL,
workspace_id TEXT NOT NULL
REFERENCES workspaces
ON DELETE CASCADE,
response_id TEXT NOT NULL
REFERENCES http_responses
ON DELETE CASCADE,
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
updated_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
event TEXT NOT NULL
);

View File

@@ -18,7 +18,7 @@ pub enum Error {
#[error("Model serialization error: {0}")]
ModelSerializationError(String),
#[error("Model error: {0}")]
#[error("HTTP error: {0}")]
GenericError(String),
#[error("DB Migration Failed: {0}")]

View File

@@ -1323,11 +1323,13 @@ pub struct HttpResponse {
pub body_path: Option<String>,
pub content_length: Option<i32>,
pub content_length_compressed: Option<i32>,
pub elapsed: i32,
pub elapsed_headers: i32,
pub error: Option<String>,
pub headers: Vec<HttpResponseHeader>,
pub remote_addr: Option<String>,
pub request_headers: Vec<HttpResponseHeader>,
pub status: i32,
pub status_reason: Option<String>,
pub state: HttpResponseState,
@@ -1368,11 +1370,13 @@ impl UpsertModelInfo for HttpResponse {
(WorkspaceId, self.workspace_id.into()),
(BodyPath, self.body_path.into()),
(ContentLength, self.content_length.into()),
(ContentLengthCompressed, self.content_length_compressed.into()),
(Elapsed, self.elapsed.into()),
(ElapsedHeaders, self.elapsed_headers.into()),
(Error, self.error.into()),
(Headers, serde_json::to_string(&self.headers)?.into()),
(RemoteAddr, self.remote_addr.into()),
(RequestHeaders, serde_json::to_string(&self.request_headers)?.into()),
(State, serde_json::to_value(self.state)?.as_str().into()),
(Status, self.status.into()),
(StatusReason, self.status_reason.into()),
@@ -1386,11 +1390,13 @@ impl UpsertModelInfo for HttpResponse {
HttpResponseIden::UpdatedAt,
HttpResponseIden::BodyPath,
HttpResponseIden::ContentLength,
HttpResponseIden::ContentLengthCompressed,
HttpResponseIden::Elapsed,
HttpResponseIden::ElapsedHeaders,
HttpResponseIden::Error,
HttpResponseIden::Headers,
HttpResponseIden::RemoteAddr,
HttpResponseIden::RequestHeaders,
HttpResponseIden::State,
HttpResponseIden::Status,
HttpResponseIden::StatusReason,
@@ -1415,6 +1421,7 @@ impl UpsertModelInfo for HttpResponse {
error: r.get("error")?,
url: r.get("url")?,
content_length: r.get("content_length")?,
content_length_compressed: r.get("content_length_compressed").unwrap_or_default(),
version: r.get("version")?,
elapsed: r.get("elapsed")?,
elapsed_headers: r.get("elapsed_headers")?,
@@ -1424,10 +1431,151 @@ impl UpsertModelInfo for HttpResponse {
state: serde_json::from_str(format!(r#""{state}""#).as_str()).unwrap(),
body_path: r.get("body_path")?,
headers: serde_json::from_str(headers.as_str()).unwrap_or_default(),
request_headers: serde_json::from_str(
r.get::<_, String>("request_headers").unwrap_or_default().as_str(),
)
.unwrap_or_default(),
})
}
}
/// Serializable representation of HTTP response events for DB storage.
/// This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
/// The `From` impl is in yaak-http to avoid circular dependencies.
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(tag = "type", rename_all = "snake_case")]
#[ts(export, export_to = "gen_models.ts")]
pub enum HttpResponseEventData {
Setting {
name: String,
value: String,
},
Info {
message: String,
},
Redirect {
url: String,
status: u16,
behavior: String,
},
SendUrl {
method: String,
path: String,
},
ReceiveUrl {
version: String,
status: String,
},
HeaderUp {
name: String,
value: String,
},
HeaderDown {
name: String,
value: String,
},
ChunkSent {
bytes: usize,
},
ChunkReceived {
bytes: usize,
},
}
impl Default for HttpResponseEventData {
fn default() -> Self {
Self::Info { message: String::new() }
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
#[enum_def(table_name = "http_response_events")]
pub struct HttpResponseEvent {
#[ts(type = "\"http_response_event\"")]
pub model: String,
pub id: String,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub workspace_id: String,
pub response_id: String,
pub event: HttpResponseEventData,
}
impl UpsertModelInfo for HttpResponseEvent {
fn table_name() -> impl IntoTableRef + IntoIden {
HttpResponseEventIden::Table
}
fn id_column() -> impl IntoIden + Eq + Clone {
HttpResponseEventIden::Id
}
fn generate_id() -> String {
generate_prefixed_id("re")
}
fn order_by() -> (impl IntoColumnRef, Order) {
(HttpResponseEventIden::CreatedAt, Order::Asc)
}
fn get_id(&self) -> String {
self.id.clone()
}
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use HttpResponseEventIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
(UpdatedAt, upsert_date(source, self.updated_at)),
(WorkspaceId, self.workspace_id.into()),
(ResponseId, self.response_id.into()),
(Event, serde_json::to_string(&self.event)?.into()),
])
}
fn update_columns() -> Vec<impl IntoIden> {
vec![
HttpResponseEventIden::UpdatedAt,
HttpResponseEventIden::Event,
]
}
fn from_row(r: &Row) -> rusqlite::Result<Self>
where
Self: Sized,
{
let event: String = r.get("event")?;
Ok(Self {
id: r.get("id")?,
model: r.get("model")?,
workspace_id: r.get("workspace_id")?,
response_id: r.get("response_id")?,
created_at: r.get("created_at")?,
updated_at: r.get("updated_at")?,
event: serde_json::from_str(&event).unwrap_or_default(),
})
}
}
impl HttpResponseEvent {
pub fn new(response_id: &str, workspace_id: &str, event: HttpResponseEventData) -> Self {
Self {
model: "http_response_event".to_string(),
id: Self::generate_id(),
created_at: Utc::now().naive_utc(),
updated_at: Utc::now().naive_utc(),
workspace_id: workspace_id.to_string(),
response_id: response_id.to_string(),
event,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
@@ -2178,6 +2326,7 @@ define_any_model! {
GrpcRequest,
HttpRequest,
HttpResponse,
HttpResponseEvent,
KeyValue,
Plugin,
Settings,

View File

@@ -1,8 +1,6 @@
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{
GrpcRequest, HttpRequest, WebsocketRequest,
};
use crate::models::{GrpcRequest, HttpRequest, WebsocketRequest};
pub enum AnyRequest {
HttpRequest(HttpRequest),

View File

@@ -143,11 +143,7 @@ impl<'a> DbContext<'a> {
}
self.upsert(
&Environment {
name,
variables: cleaned_variables,
..environment.clone()
},
&Environment { name, variables: cleaned_variables, ..environment.clone() },
source,
)
}

View File

@@ -1,7 +1,10 @@
use crate::connection_or_tx::ConnectionOrTx;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{Environment, EnvironmentIden, Folder, FolderIden, GrpcRequest, GrpcRequestIden, HttpRequest, HttpRequestHeader, HttpRequestIden, WebsocketRequest, WebsocketRequestIden};
use crate::models::{
Environment, EnvironmentIden, Folder, FolderIden, GrpcRequest, GrpcRequestIden, HttpRequest,
HttpRequestHeader, HttpRequestIden, WebsocketRequest, WebsocketRequestIden,
};
use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
@@ -69,57 +72,35 @@ impl<'a> DbContext<'a> {
for m in self.find_many::<HttpRequest>(HttpRequestIden::FolderId, fid, None)? {
self.upsert_http_request(
&HttpRequest {
id: "".into(),
folder_id: Some(new_folder.id.clone()),
..m
},
&HttpRequest { id: "".into(), folder_id: Some(new_folder.id.clone()), ..m },
source,
)?;
}
for m in self.find_many::<WebsocketRequest>(WebsocketRequestIden::FolderId, fid, None)? {
self.upsert_websocket_request(
&WebsocketRequest {
id: "".into(),
folder_id: Some(new_folder.id.clone()),
..m
},
&WebsocketRequest { id: "".into(), folder_id: Some(new_folder.id.clone()), ..m },
source,
)?;
}
for m in self.find_many::<GrpcRequest>(GrpcRequestIden::FolderId, fid, None)? {
self.upsert_grpc_request(
&GrpcRequest {
id: "".into(),
folder_id: Some(new_folder.id.clone()),
..m
},
&GrpcRequest { id: "".into(), folder_id: Some(new_folder.id.clone()), ..m },
source,
)?;
}
for m in self.find_many::<Environment>(EnvironmentIden::ParentId, fid, None)? {
self.upsert_environment(
&Environment {
id: "".into(),
parent_id: Some(new_folder.id.clone()),
..m
},
&Environment { id: "".into(), parent_id: Some(new_folder.id.clone()), ..m },
source,
)?;
}
for m in self.find_many::<Folder>(FolderIden::FolderId, fid, None)? {
// Recurse down
self.duplicate_folder(
&Folder {
folder_id: Some(new_folder.id.clone()),
..m
},
source,
)?;
self.duplicate_folder(&Folder { folder_id: Some(new_folder.id.clone()), ..m }, source)?;
}
Ok(new_folder)

View File

@@ -31,13 +31,9 @@ impl<'a> DbContext<'a> {
},
source,
),
Some(introspection) => self.upsert(
&GraphQlIntrospection {
content,
..introspection
},
source,
),
Some(introspection) => {
self.upsert(&GraphQlIntrospection { content, ..introspection }, source)
}
}
}

View File

@@ -1,12 +1,12 @@
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{HttpResponse, HttpResponseIden, HttpResponseState};
use crate::queries::MAX_HISTORY_ITEMS;
use crate::util::UpdateSource;
use log::{debug, error};
use sea_query::{Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
use std::fs;
use crate::db_context::DbContext;
use crate::queries::MAX_HISTORY_ITEMS;
impl<'a> DbContext<'a> {
pub fn get_http_response(&self, id: &str) -> Result<HttpResponse> {
@@ -101,10 +101,6 @@ impl<'a> DbContext<'a> {
response: &HttpResponse,
source: &UpdateSource,
) -> Result<HttpResponse> {
if response.id.is_empty() {
Ok(response.clone())
} else {
self.upsert(response, source)
}
if response.id.is_empty() { Ok(response.clone()) } else { self.upsert(response, source) }
}
}

View File

@@ -1,8 +1,8 @@
use chrono::NaiveDateTime;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{KeyValue, KeyValueIden, UpsertModelInfo};
use crate::util::UpdateSource;
use chrono::NaiveDateTime;
use log::error;
use sea_query::{Asterisk, Cond, Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
@@ -39,7 +39,12 @@ impl<'a> DbContext<'a> {
}
}
pub fn get_key_value_dte(&self, namespace: &str, key: &str, default: NaiveDateTime) -> NaiveDateTime {
pub fn get_key_value_dte(
&self,
namespace: &str,
key: &str,
default: NaiveDateTime,
) -> NaiveDateTime {
match self.get_key_value_raw(namespace, key) {
None => default,
Some(v) => {
@@ -139,14 +144,8 @@ impl<'a> DbContext<'a> {
true,
),
Some(kv) => (
self.upsert_key_value(
&KeyValue {
value: value.to_string(),
..kv
},
source,
)
.expect("Failed to update key value"),
self.upsert_key_value(&KeyValue { value: value.to_string(), ..kv }, source)
.expect("Failed to update key value"),
false,
),
}

View File

@@ -1,10 +1,10 @@
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{SyncState, SyncStateIden, UpsertModelInfo};
use crate::util::UpdateSource;
use sea_query::{Asterisk, Cond, Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
use std::path::Path;
use crate::db_context::DbContext;
impl<'a> DbContext<'a> {
pub fn get_sync_state(&self, id: &str) -> Result<SyncState> {

View File

@@ -1,9 +1,6 @@
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{
WebsocketEvent,
WebsocketEventIden,
};
use crate::models::{WebsocketEvent, WebsocketEventIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {

View File

@@ -56,7 +56,11 @@ impl<'a> DbContext<'a> {
websocket_request: &WebsocketRequest,
) -> Result<(Option<String>, BTreeMap<String, Value>, String)> {
if let Some(at) = websocket_request.authentication_type.clone() {
return Ok((Some(at), websocket_request.authentication.clone(), websocket_request.id.clone()));
return Ok((
Some(at),
websocket_request.authentication.clone(),
websocket_request.id.clone(),
));
}
if let Some(folder_id) = websocket_request.folder_id.clone() {

View File

@@ -14,10 +14,7 @@ impl<'a> DbContext<'a> {
self.find_many(WorkspaceMetaIden::WorkspaceId, workspace_id, None)?;
if workspace_metas.is_empty() {
let wm = WorkspaceMeta {
workspace_id: workspace_id.to_string(),
..Default::default()
};
let wm = WorkspaceMeta { workspace_id: workspace_id.to_string(), ..Default::default() };
workspace_metas.push(self.upsert_workspace_meta(&wm, &UpdateSource::Background)?)
}
@@ -30,10 +27,8 @@ impl<'a> DbContext<'a> {
return Ok(workspace_meta);
}
let workspace_meta = WorkspaceMeta {
workspace_id: workspace_id.to_string(),
..Default::default()
};
let workspace_meta =
WorkspaceMeta { workspace_id: workspace_id.to_string(), ..Default::default() };
info!("Creating WorkspaceMeta for {workspace_id}");

View File

@@ -6,7 +6,7 @@ use crate::util::ModelPayload;
use r2d2::Pool;
use r2d2_sqlite::SqliteConnectionManager;
use rusqlite::TransactionBehavior;
use std::sync::{mpsc, Arc, Mutex};
use std::sync::{Arc, Mutex, mpsc};
use tauri::{Manager, Runtime, State};
pub trait QueryManagerExt<'a, R> {
@@ -58,10 +58,7 @@ impl QueryManager {
pool: Pool<SqliteConnectionManager>,
events_tx: mpsc::Sender<ModelPayload>,
) -> Self {
QueryManager {
pool: Arc::new(Mutex::new(pool)),
events_tx,
}
QueryManager { pool: Arc::new(Mutex::new(pool)), events_tx }
}
pub fn connect(&self) -> DbContext<'_> {
@@ -71,10 +68,7 @@ impl QueryManager {
.expect("Failed to gain lock on DB")
.get()
.expect("Failed to get a new DB connection from the pool");
DbContext {
events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Connection(conn),
}
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
}
pub fn with_conn<F, T>(&self, func: F) -> T
@@ -88,10 +82,8 @@ impl QueryManager {
.get()
.expect("Failed to get new DB connection from the pool");
let db_context = DbContext {
events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Connection(conn),
};
let db_context =
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) };
func(&db_context)
}
@@ -113,10 +105,8 @@ impl QueryManager {
.transaction_with_behavior(TransactionBehavior::Immediate)
.expect("Failed to start DB transaction");
let db_context = DbContext {
events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Transaction(&tx),
};
let db_context =
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Transaction(&tx) };
match func(&db_context) {
Ok(val) => {

View File

@@ -62,9 +62,7 @@ pub enum UpdateSource {
impl UpdateSource {
pub fn from_window<R: Runtime>(window: &WebviewWindow<R>) -> Self {
Self::Window {
label: window.label().to_string(),
}
Self::Window { label: window.label().to_string() }
}
}

View File

@@ -12,7 +12,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponseHeader = { name: string, value: string, };

View File

@@ -58,10 +58,7 @@ pub async fn check_plugin_updates<R: Runtime>(
.list_plugins()?
.into_iter()
.filter_map(|p| match get_plugin_meta(&Path::new(&p.directory)) {
Ok(m) => Some(PluginNameVersion {
name: m.name,
version: m.version,
}),
Ok(m) => Some(PluginNameVersion { name: m.name, version: m.version }),
Err(e) => {
warn!("Failed to get plugin metadata: {}", e);
None
@@ -70,9 +67,7 @@ pub async fn check_plugin_updates<R: Runtime>(
.collect();
let url = build_url("/updates");
let body = serde_json::to_vec(&PluginUpdatesResponse {
plugins: name_versions,
})?;
let body = serde_json::to_vec(&PluginUpdatesResponse { plugins: name_versions })?;
let resp = yaak_api_client(app_handle)?.post(url.clone()).body(body).send().await?;
if !resp.status().is_success() {
return Err(ApiErr(format!("{} response to {}", resp.status(), url.to_string())));

View File

@@ -1,9 +1,9 @@
use crate::api::{
check_plugin_updates, search_plugins, PluginSearchResponse, PluginUpdatesResponse,
PluginSearchResponse, PluginUpdatesResponse, check_plugin_updates, search_plugins,
};
use crate::error::Result;
use crate::install::{delete_and_uninstall, download_and_install};
use tauri::{command, AppHandle, Runtime, WebviewWindow};
use tauri::{AppHandle, Runtime, WebviewWindow, command};
use yaak_models::models::Plugin;
#[command]

View File

@@ -45,11 +45,7 @@ pub struct PluginContext {
impl PluginContext {
pub fn new_empty() -> Self {
Self {
id: "default".to_string(),
label: None,
workspace_id: None,
}
Self { id: "default".to_string(), label: None, workspace_id: None }
}
pub fn new<R: Runtime>(window: &WebviewWindow<R>) -> Self {
Self {
@@ -1049,9 +1045,7 @@ pub enum Content {
impl Default for Content {
fn default() -> Self {
Self::Text {
content: String::default(),
}
Self::Text { content: String::default() }
}
}

View File

@@ -1,24 +1,24 @@
use std::sync::atomic::{AtomicBool, Ordering};
use crate::commands::{install, search, uninstall, updates};
use crate::manager::PluginManager;
use log::info;
use std::sync::atomic::{AtomicBool, Ordering};
use tauri::plugin::{Builder, TauriPlugin};
use tauri::{generate_handler, Manager, RunEvent, Runtime, State};
use tauri::{Manager, RunEvent, Runtime, State, generate_handler};
pub mod api;
mod checksum;
mod commands;
pub mod error;
pub mod events;
pub mod install;
pub mod manager;
pub mod native_template_functions;
mod nodejs;
pub mod plugin_handle;
pub mod plugin_meta;
mod server_ws;
pub mod template_callback;
mod util;
mod checksum;
pub mod api;
pub mod install;
pub mod plugin_meta;
static EXITING: AtomicBool = AtomicBool::new(false);

View File

@@ -185,12 +185,8 @@ impl PluginManager {
.collect();
let plugins = app_handle.db().list_plugins().unwrap_or_default();
let installed_plugin_dirs: Vec<PluginCandidate> = plugins
.iter()
.map(|p| PluginCandidate {
dir: p.directory.to_owned(),
})
.collect();
let installed_plugin_dirs: Vec<PluginCandidate> =
plugins.iter().map(|p| PluginCandidate { dir: p.directory.to_owned() }).collect();
[bundled_plugin_dirs, installed_plugin_dirs].concat()
}
@@ -524,9 +520,7 @@ impl PluginManager {
RenderPurpose::Preview,
);
// We don't want to fail for this op because the UI will not be able to list any auth types then
let render_opt = RenderOptions {
error_behavior: RenderErrorBehavior::ReturnEmpty,
};
let render_opt = RenderOptions { error_behavior: RenderErrorBehavior::ReturnEmpty };
let rendered_values = render_json_value_raw(json!(values), vars, &cb, &render_opt).await?;
let context_id = format!("{:x}", md5::compute(model_id.to_string()));
@@ -643,9 +637,7 @@ impl PluginManager {
RenderPurpose::Preview,
);
// We don't want to fail for this op because the UI will not be able to list any auth types then
let render_opt = RenderOptions {
error_behavior: RenderErrorBehavior::ReturnEmpty,
};
let render_opt = RenderOptions { error_behavior: RenderErrorBehavior::ReturnEmpty };
let rendered_values = render_json_value_raw(json!(values), vars, &cb, &render_opt).await?;
let context_id = format!("{:x}", md5::compute(model_id.to_string()));
let event = self
@@ -688,9 +680,7 @@ impl PluginManager {
&PluginContext::new(&window),
RenderPurpose::Preview,
),
&RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
},
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await?;
let results = self.get_http_authentication_summaries(window).await?;
@@ -807,21 +797,20 @@ impl PluginManager {
.await
.map_err(|e| RenderError(format!("Failed to call template function {e:}")))?;
let value = events.into_iter().find_map(|e| match e.payload {
// Error returned
InternalEventPayload::CallTemplateFunctionResponse(CallTemplateFunctionResponse {
error: Some(error),
..
}) => Some(Err(error)),
// Value or null returned
InternalEventPayload::CallTemplateFunctionResponse(CallTemplateFunctionResponse {
value,
..
}) => Some(Ok(value.unwrap_or_default())),
// Generic error returned
InternalEventPayload::ErrorResponse(ErrorResponse { error }) => Some(Err(error)),
_ => None,
});
let value =
events.into_iter().find_map(|e| match e.payload {
// Error returned
InternalEventPayload::CallTemplateFunctionResponse(
CallTemplateFunctionResponse { error: Some(error), .. },
) => Some(Err(error)),
// Value or null returned
InternalEventPayload::CallTemplateFunctionResponse(
CallTemplateFunctionResponse { value, .. },
) => Some(Ok(value.unwrap_or_default())),
// Generic error returned
InternalEventPayload::ErrorResponse(ErrorResponse { error }) => Some(Err(error)),
_ => None,
});
match value {
None => Err(RenderError(format!("Template function {fn_name}(…) not found "))),

View File

@@ -4,17 +4,17 @@ use crate::events::{
TemplateFunctionPreviewType,
};
use crate::template_callback::PluginTemplateCallback;
use base64::prelude::BASE64_STANDARD;
use base64::Engine;
use base64::prelude::BASE64_STANDARD;
use keyring::Error::NoEntry;
use log::{debug, info};
use std::collections::HashMap;
use tauri::{AppHandle, Runtime};
use yaak_common::platform::{get_os, OperatingSystem};
use yaak_common::platform::{OperatingSystem, get_os};
use yaak_crypto::manager::EncryptionManagerExt;
use yaak_templates::error::Error::RenderError;
use yaak_templates::error::Result;
use yaak_templates::{transform_args, FnArg, Parser, Token, Tokens, Val};
use yaak_templates::{FnArg, Parser, Token, Tokens, Val, transform_args};
pub(crate) fn template_function_secure() -> TemplateFunction {
TemplateFunction {
@@ -179,9 +179,7 @@ pub fn decrypt_secure_template_function<R: Runtime>(
for token in parsed.tokens.iter() {
match token {
Token::Tag {
val: Val::Fn { name, args },
} if name == "secure" => {
Token::Tag { val: Val::Fn { name, args } } if name == "secure" => {
let mut args_map = HashMap::new();
for a in args {
match a.clone().value {
@@ -228,7 +226,7 @@ pub fn encrypt_secure_template_function<R: Runtime>(
tokens,
&PluginTemplateCallback::new(app_handle, plugin_context, RenderPurpose::Preview),
)?
.to_string())
.to_string())
}
pub fn template_function_keychain_run(args: HashMap<String, serde_json::Value>) -> Result<String> {

View File

@@ -3,8 +3,8 @@ use log::{info, warn};
use std::net::SocketAddr;
use tauri::path::BaseDirectory;
use tauri::{AppHandle, Manager, Runtime};
use tauri_plugin_shell::process::CommandEvent;
use tauri_plugin_shell::ShellExt;
use tauri_plugin_shell::process::CommandEvent;
use tokio::sync::watch::Receiver;
pub async fn start_nodejs_plugin_runtime<R: Runtime>(

View File

@@ -3,10 +3,10 @@ use futures_util::{SinkExt, StreamExt};
use log::{error, info, warn};
use std::sync::Arc;
use tokio::net::{TcpListener, TcpStream};
use tokio::sync::{mpsc, Mutex};
use tokio::sync::{Mutex, mpsc};
use tokio_tungstenite::accept_async_with_config;
use tokio_tungstenite::tungstenite::protocol::WebSocketConfig;
use tokio_tungstenite::tungstenite::Message;
use tokio_tungstenite::tungstenite::protocol::WebSocketConfig;
#[derive(Clone)]
pub(crate) struct PluginRuntimeServerWebsocket {

View File

@@ -1,5 +1,5 @@
use rand::distr::Alphanumeric;
use rand::Rng;
use rand::distr::Alphanumeric;
pub fn gen_id() -> String {
rand::rng().sample_iter(&Alphanumeric).take(5).map(char::from).collect()

View File

@@ -1,18 +1,18 @@
use crate::error::Error::InvalidSyncDirectory;
use crate::error::Result;
use crate::sync::{
apply_sync_ops, apply_sync_state_ops, compute_sync_ops, get_db_candidates, get_fs_candidates, FsCandidate,
SyncOp,
FsCandidate, SyncOp, apply_sync_ops, apply_sync_state_ops, compute_sync_ops, get_db_candidates,
get_fs_candidates,
};
use crate::watch::{watch_directory, WatchEvent};
use crate::watch::{WatchEvent, watch_directory};
use chrono::Utc;
use log::warn;
use serde::{Deserialize, Serialize};
use std::path::Path;
use tauri::ipc::Channel;
use tauri::{command, AppHandle, Listener, Runtime};
use tauri::{AppHandle, Listener, Runtime, command};
use tokio::sync::watch;
use ts_rs::TS;
use crate::error::Error::InvalidSyncDirectory;
#[command]
pub async fn calculate<R: Runtime>(
@@ -21,7 +21,7 @@ pub async fn calculate<R: Runtime>(
sync_dir: &Path,
) -> Result<Vec<SyncOp>> {
if !sync_dir.exists() {
return Err(InvalidSyncDirectory(sync_dir.to_string_lossy().to_string()))
return Err(InvalidSyncDirectory(sync_dir.to_string_lossy().to_string()));
}
let db_candidates = get_db_candidates(&app_handle, workspace_id, sync_dir)?;

View File

@@ -1,8 +1,7 @@
use crate::commands::{apply, calculate, calculate_fs, watch};
use tauri::{
generate_handler,
Runtime, generate_handler,
plugin::{Builder, TauriPlugin},
Runtime,
};
mod commands;

View File

@@ -208,6 +208,7 @@ impl TryFrom<AnyModel> for SyncModel {
AnyModel::GrpcConnection(m) => return Err(UnknownModel(m.model)),
AnyModel::GrpcEvent(m) => return Err(UnknownModel(m.model)),
AnyModel::HttpResponse(m) => return Err(UnknownModel(m.model)),
AnyModel::HttpResponseEvent(m) => return Err(UnknownModel(m.model)),
AnyModel::KeyValue(m) => return Err(UnknownModel(m.model)),
AnyModel::Plugin(m) => return Err(UnknownModel(m.model)),
AnyModel::Settings(m) => return Err(UnknownModel(m.model)),

View File

@@ -202,11 +202,7 @@ pub(crate) fn get_fs_candidates(dir: &Path) -> Result<Vec<FsCandidate>> {
};
let rel_path = Path::new(&dir_entry.file_name()).to_path_buf();
candidates.push(FsCandidate {
rel_path,
model,
checksum,
})
candidates.push(FsCandidate { rel_path, model, checksum })
}
Ok(candidates)
@@ -236,28 +232,25 @@ pub(crate) fn compute_sync_ops(
(None, Some(fs)) => SyncOp::DbCreate { fs: fs.to_owned() },
// DB unchanged <-> FS missing
(Some(DbCandidate::Unmodified(model, sync_state)), None) => SyncOp::DbDelete {
model: model.to_owned(),
state: sync_state.to_owned(),
},
(Some(DbCandidate::Unmodified(model, sync_state)), None) => {
SyncOp::DbDelete { model: model.to_owned(), state: sync_state.to_owned() }
}
// DB modified <-> FS missing
(Some(DbCandidate::Modified(model, sync_state)), None) => SyncOp::FsUpdate {
model: model.to_owned(),
state: sync_state.to_owned(),
},
(Some(DbCandidate::Modified(model, sync_state)), None) => {
SyncOp::FsUpdate { model: model.to_owned(), state: sync_state.to_owned() }
}
// DB added <-> FS missing
(Some(DbCandidate::Added(model)), None) => SyncOp::FsCreate {
model: model.to_owned(),
},
(Some(DbCandidate::Added(model)), None) => {
SyncOp::FsCreate { model: model.to_owned() }
}
// DB deleted <-> FS missing
// Already deleted on FS, but sending it so the SyncState gets dealt with
(Some(DbCandidate::Deleted(sync_state)), None) => SyncOp::FsDelete {
state: sync_state.to_owned(),
fs: None,
},
(Some(DbCandidate::Deleted(sync_state)), None) => {
SyncOp::FsDelete { state: sync_state.to_owned(), fs: None }
}
// DB unchanged <-> FS exists
(Some(DbCandidate::Unmodified(_, sync_state)), Some(fs_candidate)) => {
@@ -274,10 +267,7 @@ pub(crate) fn compute_sync_ops(
// DB modified <-> FS exists
(Some(DbCandidate::Modified(model, sync_state)), Some(fs_candidate)) => {
if sync_state.checksum == fs_candidate.checksum {
SyncOp::FsUpdate {
model: model.to_owned(),
state: sync_state.to_owned(),
}
SyncOp::FsUpdate { model: model.to_owned(), state: sync_state.to_owned() }
} else if model.updated_at() < fs_candidate.model.updated_at() {
// CONFLICT! Write to DB if the fs model is newer
SyncOp::DbUpdate {
@@ -286,19 +276,14 @@ pub(crate) fn compute_sync_ops(
}
} else {
// CONFLICT! Write to FS if the db model is newer
SyncOp::FsUpdate {
model: model.to_owned(),
state: sync_state.to_owned(),
}
SyncOp::FsUpdate { model: model.to_owned(), state: sync_state.to_owned() }
}
}
// DB added <-> FS anything
(Some(DbCandidate::Added(model)), Some(_)) => {
// This would be super rare (impossible?), so let's follow the user's intention
SyncOp::FsCreate {
model: model.to_owned(),
}
SyncOp::FsCreate { model: model.to_owned() }
}
// DB deleted <-> FS exists
@@ -389,11 +374,7 @@ pub(crate) fn apply_sync_ops<R: Runtime>(
let (content, checksum) = model.to_file_contents(&rel_path)?;
let mut f = File::create(&abs_path)?;
f.write_all(&content)?;
SyncStateOp::Create {
model_id: model.id(),
checksum,
rel_path,
}
SyncStateOp::Create { model_id: model.id(), checksum, rel_path }
}
SyncOp::FsUpdate { model, state } => {
// Always write the existing path
@@ -408,21 +389,14 @@ pub(crate) fn apply_sync_ops<R: Runtime>(
rel_path: rel_path.to_owned(),
}
}
SyncOp::FsDelete {
state,
fs: fs_candidate,
} => match fs_candidate {
None => SyncStateOp::Delete {
state: state.to_owned(),
},
SyncOp::FsDelete { state, fs: fs_candidate } => match fs_candidate {
None => SyncStateOp::Delete { state: state.to_owned() },
Some(_) => {
// Always delete the existing path
let rel_path = Path::new(&state.rel_path);
let abs_path = Path::new(&state.sync_dir).join(&rel_path);
fs::remove_file(&abs_path)?;
SyncStateOp::Delete {
state: state.to_owned(),
}
SyncStateOp::Delete { state: state.to_owned() }
}
},
SyncOp::DbCreate { fs } => {
@@ -463,9 +437,7 @@ pub(crate) fn apply_sync_ops<R: Runtime>(
}
SyncOp::DbDelete { model, state } => {
delete_model(app_handle, &model)?;
SyncStateOp::Delete {
state: state.to_owned(),
}
SyncStateOp::Delete { state: state.to_owned() }
}
SyncOp::IgnorePrivate { .. } => SyncStateOp::NoOp,
});
@@ -541,11 +513,7 @@ pub(crate) fn apply_sync_state_ops<R: Runtime>(
) -> Result<()> {
for op in ops {
match op {
SyncStateOp::Create {
checksum,
rel_path,
model_id,
} => {
SyncStateOp::Create { checksum, rel_path, model_id } => {
let sync_state = SyncState {
workspace_id: workspace_id.to_string(),
model_id,
@@ -557,11 +525,7 @@ pub(crate) fn apply_sync_state_ops<R: Runtime>(
};
app_handle.db().upsert_sync_state(&sync_state)?;
}
SyncStateOp::Update {
state: sync_state,
checksum,
rel_path,
} => {
SyncStateOp::Update { state: sync_state, checksum, rel_path } => {
let sync_state = SyncState {
checksum,
sync_dir: sync_dir.to_str().unwrap().to_string(),

View File

@@ -97,10 +97,7 @@ impl Display for Token {
fn transform_val<T: TemplateCallback>(val: &Val, cb: &T) -> Result<Val> {
let val = match val {
Val::Fn {
name: fn_name,
args,
} => {
Val::Fn { name: fn_name, args } => {
let mut new_args: Vec<FnArg> = Vec::new();
for arg in args {
let value = match arg.clone().value {
@@ -112,15 +109,9 @@ fn transform_val<T: TemplateCallback>(val: &Val, cb: &T) -> Result<Val> {
};
let arg_name = arg.name.clone();
new_args.push(FnArg {
name: arg_name,
value,
});
}
Val::Fn {
name: fn_name.clone(),
args: new_args,
new_args.push(FnArg { name: arg_name, value });
}
Val::Fn { name: fn_name.clone(), args: new_args }
}
_ => val.clone(),
};
@@ -160,10 +151,7 @@ pub struct Parser {
impl Parser {
pub fn new(text: &str) -> Parser {
Parser {
chars: text.chars().collect(),
..Parser::default()
}
Parser { chars: text.chars().collect(), ..Parser::default() }
}
pub fn parse(&mut self) -> Result<Tokens> {
@@ -195,9 +183,7 @@ impl Parser {
}
self.push_token(Token::Eof);
Ok(Tokens {
tokens: self.tokens.clone(),
})
Ok(Tokens { tokens: self.tokens.clone() })
}
fn parse_tag(&mut self) -> Result<Option<Token>> {
@@ -463,9 +449,7 @@ impl Parser {
fn push_token(&mut self, token: Token) {
// Push any text we've accumulated
if !self.curr_text.is_empty() {
let text_token = Token::Raw {
text: self.curr_text.clone(),
};
let text_token = Token::Raw { text: self.curr_text.clone() };
self.tokens.push(text_token);
self.curr_text.clear();
}
@@ -501,12 +485,7 @@ mod tests {
let mut p = Parser::new(r#"\${[ foo ]}"#);
assert_eq!(
p.parse()?.tokens,
vec![
Token::Raw {
text: "${[ foo ]}".to_string()
},
Token::Eof
]
vec![Token::Raw { text: "${[ foo ]}".to_string() }, Token::Eof]
);
Ok(())
}
@@ -517,12 +496,8 @@ mod tests {
assert_eq!(
p.parse()?.tokens,
vec![
Token::Raw {
text: r#"\\"#.to_string()
},
Token::Tag {
val: Val::Var { name: "foo".into() }
},
Token::Raw { text: r#"\\"#.to_string() },
Token::Tag { val: Val::Var { name: "foo".into() } },
Token::Eof
]
);
@@ -535,9 +510,7 @@ mod tests {
assert_eq!(
p.parse()?.tokens,
vec![
Token::Tag {
val: Val::Var { name: "foo".into() }
},
Token::Tag { val: Val::Var { name: "foo".into() } },
Token::Eof
]
);
@@ -550,9 +523,7 @@ mod tests {
assert_eq!(
p.parse()?.tokens,
vec![
Token::Tag {
val: Val::Var { name: "a-b".into() }
},
Token::Tag { val: Val::Var { name: "a-b".into() } },
Token::Eof
]
);
@@ -566,9 +537,7 @@ mod tests {
assert_eq!(
p.parse()?.tokens,
vec![
Token::Tag {
val: Val::Var { name: "a_b".into() }
},
Token::Tag { val: Val::Var { name: "a_b".into() } },
Token::Eof
]
);
@@ -599,9 +568,7 @@ mod tests {
assert_eq!(
p.parse()?.tokens,
vec![
Token::Tag {
val: Val::Var { name: "_a".into() }
},
Token::Tag { val: Val::Var { name: "_a".into() } },
Token::Eof
]
);
@@ -615,12 +582,8 @@ mod tests {
assert_eq!(
p.parse()?.tokens,
vec![
Token::Tag {
val: Val::Bool { value: true },
},
Token::Tag {
val: Val::Bool { value: false },
},
Token::Tag { val: Val::Bool { value: true } },
Token::Tag { val: Val::Bool { value: false } },
Token::Eof
]
);
@@ -633,12 +596,7 @@ mod tests {
let mut p = Parser::new("${[ foo bar ]}");
assert_eq!(
p.parse()?.tokens,
vec![
Token::Raw {
text: "${[ foo bar ]}".into()
},
Token::Eof
]
vec![Token::Raw { text: "${[ foo bar ]}".into() }, Token::Eof]
);
Ok(())
@@ -650,11 +608,7 @@ mod tests {
assert_eq!(
p.parse()?.tokens,
vec![
Token::Tag {
val: Val::Str {
text: r#"foo 'bar' baz"#.into()
}
},
Token::Tag { val: Val::Str { text: r#"foo 'bar' baz"#.into() } },
Token::Eof
]
);
@@ -668,11 +622,7 @@ mod tests {
assert_eq!(
p.parse()?.tokens,
vec![
Token::Tag {
val: Val::Str {
text: r#"foo 'bar' baz"#.into()
}
},
Token::Tag { val: Val::Str { text: r#"foo 'bar' baz"#.into() } },
Token::Eof
]
);
@@ -686,15 +636,9 @@ mod tests {
assert_eq!(
p.parse()?.tokens,
vec![
Token::Raw {
text: "Hello ".to_string()
},
Token::Tag {
val: Val::Var { name: "foo".into() }
},
Token::Raw {
text: "!".to_string()
},
Token::Raw { text: "Hello ".to_string() },
Token::Tag { val: Val::Var { name: "foo".into() } },
Token::Raw { text: "!".to_string() },
Token::Eof,
]
);
@@ -708,12 +652,7 @@ mod tests {
assert_eq!(
p.parse()?.tokens,
vec![
Token::Tag {
val: Val::Fn {
name: "foo".into(),
args: Vec::new(),
}
},
Token::Tag { val: Val::Fn { name: "foo".into(), args: Vec::new() } },
Token::Eof
]
);
@@ -727,12 +666,7 @@ mod tests {
assert_eq!(
p.parse()?.tokens,
vec![
Token::Tag {
val: Val::Fn {
name: "foo.bar.baz".into(),
args: Vec::new(),
}
},
Token::Tag { val: Val::Fn { name: "foo.bar.baz".into(), args: Vec::new() } },
Token::Eof
]
);
@@ -772,18 +706,9 @@ mod tests {
val: Val::Fn {
name: "foo".into(),
args: vec![
FnArg {
name: "a".into(),
value: Val::Var { name: "bar".into() }
},
FnArg {
name: "b".into(),
value: Val::Var { name: "baz".into() }
},
FnArg {
name: "c".into(),
value: Val::Var { name: "qux".into() }
},
FnArg { name: "a".into(), value: Val::Var { name: "bar".into() } },
FnArg { name: "b".into(), value: Val::Var { name: "baz".into() } },
FnArg { name: "c".into(), value: Val::Var { name: "qux".into() } },
],
}
},
@@ -804,24 +729,13 @@ mod tests {
val: Val::Fn {
name: "foo".into(),
args: vec![
FnArg {
name: "aaa".into(),
value: Val::Var { name: "bar".into() }
},
FnArg { name: "aaa".into(), value: Val::Var { name: "bar".into() } },
FnArg {
name: "bb".into(),
value: Val::Str {
text: r#"baz 'hi'"#.into()
}
},
FnArg {
name: "c".into(),
value: Val::Var { name: "qux".into() }
},
FnArg {
name: "z".into(),
value: Val::Bool { value: true }
value: Val::Str { text: r#"baz 'hi'"#.into() }
},
FnArg { name: "c".into(), value: Val::Var { name: "qux".into() } },
FnArg { name: "z".into(), value: Val::Bool { value: true } },
],
}
},
@@ -843,10 +757,7 @@ mod tests {
name: "foo".into(),
args: vec![FnArg {
name: "b".into(),
value: Val::Fn {
name: "bar".into(),
args: vec![],
}
value: Val::Fn { name: "bar".into(), args: vec![] }
}],
}
},
@@ -883,10 +794,7 @@ mod tests {
],
}
},
FnArg {
name: "c".into(),
value: Val::Str { text: "o".into() }
},
FnArg { name: "c".into(), value: Val::Str { text: "o".into() } },
],
}
},
@@ -899,26 +807,14 @@ mod tests {
#[test]
fn token_display_var() -> Result<()> {
assert_eq!(
Val::Var {
name: "foo".to_string()
}
.to_string(),
"foo"
);
assert_eq!(Val::Var { name: "foo".to_string() }.to_string(), "foo");
Ok(())
}
#[test]
fn token_display_str() -> Result<()> {
assert_eq!(
Val::Str {
text: "Hello You".to_string()
}
.to_string(),
"'Hello You'"
);
assert_eq!(Val::Str { text: "Hello You".to_string() }.to_string(), "'Hello You'");
Ok(())
}
@@ -926,10 +822,7 @@ mod tests {
#[test]
fn token_display_complex_str() -> Result<()> {
assert_eq!(
Val::Str {
text: "Hello 'You'".to_string()
}
.to_string(),
Val::Str { text: "Hello 'You'".to_string() }.to_string(),
"b64'SGVsbG8gJ1lvdSc'"
);
@@ -942,16 +835,8 @@ mod tests {
Val::Fn {
name: "fn".to_string(),
args: vec![
FnArg {
name: "n".to_string(),
value: Null,
},
FnArg {
name: "a".to_string(),
value: Val::Str {
text: "aaa".to_string()
}
}
FnArg { name: "n".to_string(), value: Null },
FnArg { name: "a".to_string(), value: Val::Str { text: "aaa".to_string() } }
]
}
.to_string(),
@@ -970,15 +855,11 @@ mod tests {
args: vec![
FnArg {
name: "arg".to_string(),
value: Val::Str {
text: "v 'x'".to_string()
}
value: Val::Str { text: "v 'x'".to_string() }
},
FnArg {
name: "arg2".to_string(),
value: Val::Var {
name: "my_var".to_string()
}
value: Val::Var { name: "my_var".to_string() }
}
]
}
@@ -995,19 +876,9 @@ mod tests {
assert_eq!(
Tokens {
tokens: vec![
Token::Tag {
val: Val::Var {
name: "my_var".to_string()
}
},
Token::Raw {
text: " Some cool text ".to_string(),
},
Token::Tag {
val: Val::Str {
text: "Hello World".to_string()
}
}
Token::Tag { val: Val::Var { name: "my_var".to_string() } },
Token::Raw { text: " Some cool text ".to_string() },
Token::Tag { val: Val::Str { text: "Hello World".to_string() } }
]
}
.to_string(),

View File

@@ -77,6 +77,12 @@ pub struct RenderOptions {
pub error_behavior: RenderErrorBehavior,
}
impl RenderOptions {
pub fn throw() -> Self {
Self { error_behavior: RenderErrorBehavior::Throw }
}
}
impl RenderErrorBehavior {
pub fn handle(&self, r: Result<String>) -> Result<String> {
match (self, r) {
@@ -194,9 +200,7 @@ mod parse_and_render_tests {
let template = "";
let vars = HashMap::new();
let result = "";
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
assert_eq!(parse_and_render(template, &vars, &empty_cb, &opt).await?, result.to_string());
Ok(())
}
@@ -207,9 +211,7 @@ mod parse_and_render_tests {
let template = "Hello World!";
let vars = HashMap::new();
let result = "Hello World!";
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
assert_eq!(parse_and_render(template, &vars, &empty_cb, &opt).await?, result.to_string());
Ok(())
}
@@ -220,9 +222,7 @@ mod parse_and_render_tests {
let template = "${[ foo ]}";
let vars = HashMap::from([("foo".to_string(), "bar".to_string())]);
let result = "bar";
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
assert_eq!(parse_and_render(template, &vars, &empty_cb, &opt).await?, result.to_string());
Ok(())
}
@@ -237,9 +237,7 @@ mod parse_and_render_tests {
vars.insert("baz".to_string(), "baz".to_string());
let result = "foo: bar: baz";
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
assert_eq!(parse_and_render(template, &vars, &empty_cb, &opt).await?, result.to_string());
Ok(())
}
@@ -249,9 +247,7 @@ mod parse_and_render_tests {
let empty_cb = EmptyCB {};
let template = "${[ foo ]}";
let vars = HashMap::new();
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
assert_eq!(
parse_and_render(template, &vars, &empty_cb, &opt).await,
Err(VariableNotFound("foo".to_string()))
@@ -265,13 +261,8 @@ mod parse_and_render_tests {
let template = "${[ foo ]}";
let mut vars = HashMap::new();
vars.insert("foo".to_string(), "".to_string());
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
assert_eq!(
parse_and_render(template, &vars, &empty_cb, &opt).await,
Ok("".to_string())
);
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
assert_eq!(parse_and_render(template, &vars, &empty_cb, &opt).await, Ok("".to_string()));
Ok(())
}
@@ -281,9 +272,7 @@ mod parse_and_render_tests {
let template = "${[ foo ]}";
let mut vars = HashMap::new();
vars.insert("foo".to_string(), "${[ foo ]}".to_string());
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
assert_eq!(
parse_and_render(template, &vars, &empty_cb, &opt).await,
Err(RenderStackExceededError)
@@ -297,9 +286,7 @@ mod parse_and_render_tests {
let template = "hello ${[ word ]} world!";
let vars = HashMap::from([("word".to_string(), "cruel".to_string())]);
let result = "hello cruel world!";
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
assert_eq!(parse_and_render(template, &vars, &empty_cb, &opt).await?, result.to_string());
Ok(())
}
@@ -309,9 +296,7 @@ mod parse_and_render_tests {
let vars = HashMap::new();
let template = r#"${[ say_hello(a='John', b='Kate') ]}"#;
let result = r#"say_hello: 2, Some(String("John")) Some(String("Kate"))"#;
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
struct CB {}
impl TemplateCallback for CB {
@@ -341,9 +326,7 @@ mod parse_and_render_tests {
let vars = HashMap::new();
let template = r#"${[ upper(foo='bar') ]}"#;
let result = r#""BAR""#;
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
struct CB {}
impl TemplateCallback for CB {
async fn run(
@@ -378,9 +361,7 @@ mod parse_and_render_tests {
vars.insert("foo".to_string(), "bar".to_string());
let template = r#"${[ upper(foo=b64'Zm9vICdiYXInIGJheg') ]}"#;
let result = r#""FOO 'BAR' BAZ""#;
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
struct CB {}
impl TemplateCallback for CB {
async fn run(
@@ -414,9 +395,7 @@ mod parse_and_render_tests {
vars.insert("foo".to_string(), "bar".to_string());
let template = r#"${[ upper(foo='${[ foo ]}') ]}"#;
let result = r#""BAR""#;
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
struct CB {}
impl TemplateCallback for CB {
@@ -452,9 +431,7 @@ mod parse_and_render_tests {
vars.insert("foo".to_string(), "bar".to_string());
let template = r#"${[ no_op(inner='${[ foo ]}') ]}"#;
let result = r#""bar""#;
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
struct CB {}
impl TemplateCallback for CB {
@@ -489,9 +466,7 @@ mod parse_and_render_tests {
let template = r#"${[ upper(foo=secret()) ]}"#;
let result = r#""ABC""#;
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
struct CB {}
impl TemplateCallback for CB {
async fn run(
@@ -523,9 +498,7 @@ mod parse_and_render_tests {
async fn render_fn_err() -> Result<()> {
let vars = HashMap::new();
let template = r#"hello ${[ error() ]}"#;
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
struct CB {}
impl TemplateCallback for CB {
@@ -591,9 +564,7 @@ mod render_json_value_raw_tests {
let v = json!("${[a]}");
let mut vars = HashMap::new();
vars.insert("a".to_string(), "aaa".to_string());
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
assert_eq!(render_json_value_raw(v, &vars, &EmptyCB {}, &opt).await?, json!("aaa"));
Ok(())
@@ -604,9 +575,7 @@ mod render_json_value_raw_tests {
let v = json!(["${[a]}", "${[a]}"]);
let mut vars = HashMap::new();
vars.insert("a".to_string(), "aaa".to_string());
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let result = render_json_value_raw(v, &vars, &EmptyCB {}, &opt).await?;
assert_eq!(result, json!(["aaa", "aaa"]));
@@ -619,9 +588,7 @@ mod render_json_value_raw_tests {
let v = json!({"${[a]}": "${[a]}"});
let mut vars = HashMap::new();
vars.insert("a".to_string(), "aaa".to_string());
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let result = render_json_value_raw(v, &vars, &EmptyCB {}, &opt).await?;
assert_eq!(result, json!({"aaa": "aaa"}));
@@ -641,9 +608,7 @@ mod render_json_value_raw_tests {
]);
let mut vars = HashMap::new();
vars.insert("a".to_string(), "aaa".to_string());
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let result = render_json_value_raw(v, &vars, &EmptyCB {}, &opt).await?;
assert_eq!(
@@ -664,9 +629,7 @@ mod render_json_value_raw_tests {
#[tokio::test]
async fn render_opt_return_empty() -> Result<()> {
let vars = HashMap::new();
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::ReturnEmpty,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::ReturnEmpty };
let result = parse_and_render("DNE: ${[hello]}", &vars, &EmptyCB {}, &opt).await?;
assert_eq!(result, "DNE: ".to_string());

View File

@@ -1,7 +1,7 @@
use crate::error::Result;
use crate::{escape, Parser};
use wasm_bindgen::prelude::wasm_bindgen;
use crate::{Parser, escape};
use wasm_bindgen::JsValue;
use wasm_bindgen::prelude::wasm_bindgen;
#[wasm_bindgen]
pub fn parse_template(template: &str) -> Result<JsValue> {

View File

@@ -128,9 +128,7 @@ pub(crate) async fn send<R: Runtime>(
&PluginContext::new(&window),
RenderPurpose::Send,
),
&RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
},
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await?;
@@ -164,10 +162,7 @@ pub(crate) async fn close<R: Runtime>(
let db = app_handle.db();
let connection = db.get_websocket_connection(connection_id)?;
db.upsert_websocket_connection(
&WebsocketConnection {
state: WebsocketConnectionState::Closing,
..connection
},
&WebsocketConnection { state: WebsocketConnectionState::Closing, ..connection },
&UpdateSource::from_window(&window),
)?
};
@@ -208,9 +203,7 @@ pub(crate) async fn connect<R: Runtime>(
&PluginContext::new(&window),
RenderPurpose::Send,
),
&RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
},
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await?;
@@ -223,7 +216,7 @@ pub(crate) async fn connect<R: Runtime>(
&UpdateSource::from_window(&window),
)?;
let (mut url, url_parameters) = apply_path_placeholders(&request.url, request.url_parameters);
let (mut url, url_parameters) = apply_path_placeholders(&request.url, &request.url_parameters);
if !url.starts_with("ws://") && !url.starts_with("wss://") {
url.insert_str(0, "ws://");
}
@@ -278,10 +271,7 @@ pub(crate) async fn connect<R: Runtime>(
.headers
.clone()
.into_iter()
.map(|h| HttpHeader {
name: h.name,
value: h.value,
})
.map(|h| HttpHeader { name: h.name, value: h.value })
.collect(),
};
let plugin_result = plugin_manager

View File

@@ -11,7 +11,7 @@ use crate::commands::{
};
use crate::manager::WebsocketManager;
use tauri::plugin::{Builder, TauriPlugin};
use tauri::{generate_handler, Manager, Runtime};
use tauri::{Manager, Runtime, generate_handler};
use tokio::sync::Mutex;
pub fn init<R: Runtime>() -> TauriPlugin<R> {

View File

@@ -23,10 +23,7 @@ pub struct WebsocketManager {
impl WebsocketManager {
pub fn new() -> Self {
WebsocketManager {
connections: Default::default(),
read_tasks: Default::default(),
}
WebsocketManager { connections: Default::default(), read_tasks: Default::default() }
}
pub async fn connect(

View File

@@ -70,12 +70,5 @@ pub async fn render_websocket_request<T: TemplateCallback>(
let message = parse_and_render(&r.message.clone(), vars, cb, opt).await?;
Ok(WebsocketRequest {
url,
url_parameters,
headers,
authentication,
message,
..r.to_owned()
})
Ok(WebsocketRequest { url, url_parameters, headers, authentication, message, ..r.to_owned() })
}

View File

@@ -128,7 +128,7 @@ function ExportDataDialogContent({
))}
</tbody>
</table>
<DetailsBanner color="secondary" open summary="Extra Settings">
<DetailsBanner color="secondary" defaultOpen summary="Extra Settings">
<Checkbox
checked={includePrivateEnvironments}
onChange={setIncludePrivateEnvironments}

View File

@@ -188,7 +188,10 @@ function HttpRequestCard({ request }: { request: HttpRequest }) {
<span>&bull;</span>
<HttpResponseDurationTag response={latestResponse} />
<span>&bull;</span>
<SizeTag contentLength={latestResponse.contentLength ?? 0} />
<SizeTag
contentLength={latestResponse.contentLength ?? 0}
contentLengthCompressed={latestResponse.contentLength}
/>
</HStack>
</button>
) : (

View File

@@ -34,11 +34,11 @@ export function HeadersEditor({
const validInheritedHeaders =
inheritedHeaders?.filter((pair) => pair.enabled && (pair.name || pair.value)) ?? [];
return (
<div className="@container w-full h-full grid grid-rows-[auto_minmax(0,1fr)]">
<div className="@container w-full h-full grid grid-rows-[auto_minmax(0,1fr)] gap-y-1.5">
{validInheritedHeaders.length > 0 ? (
<DetailsBanner
color="secondary"
className="text-sm mb-1.5"
className="text-sm"
summary={
<HStack>
Inherited <CountBadge count={validInheritedHeaders.length} />

View File

@@ -4,6 +4,7 @@ import type { ComponentType, CSSProperties } from 'react';
import { lazy, Suspense, useCallback, useMemo } from 'react';
import { useLocalStorage } from 'react-use';
import { useCancelHttpResponse } from '../hooks/useCancelHttpResponse';
import { useHttpResponseEvents } from '../hooks/useHttpResponseEvents';
import { usePinnedHttpResponse } from '../hooks/usePinnedHttpResponse';
import { useResponseViewMode } from '../hooks/useResponseViewMode';
import { getMimeTypeFromContentType } from '../lib/contentType';
@@ -23,6 +24,7 @@ import { TabContent, Tabs } from './core/Tabs/Tabs';
import { EmptyStateText } from './EmptyStateText';
import { ErrorBoundary } from './ErrorBoundary';
import { RecentHttpResponsesDropdown } from './RecentHttpResponsesDropdown';
import { ResponseEvents } from './ResponseEvents';
import { ResponseHeaders } from './ResponseHeaders';
import { ResponseInfo } from './ResponseInfo';
import { AudioViewer } from './responseViewers/AudioViewer';
@@ -46,6 +48,7 @@ interface Props {
const TAB_BODY = 'body';
const TAB_HEADERS = 'headers';
const TAB_INFO = 'info';
const TAB_EVENTS = 'events';
export function HttpResponsePane({ style, className, activeRequestId }: Props) {
const { activeResponse, setPinnedResponseId, responses } = usePinnedHttpResponse(activeRequestId);
@@ -57,6 +60,8 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
const contentType = getContentTypeFromHeaders(activeResponse?.headers ?? null);
const mimeType = contentType == null ? null : getMimeTypeFromContentType(contentType).essence;
const responseEvents = useHttpResponseEvents(activeResponse);
const tabs = useMemo<TabItem[]>(
() => [
{
@@ -76,16 +81,29 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
label: 'Headers',
rightSlot: (
<CountBadge
count={activeResponse?.headers.filter((h) => h.name && h.value).length ?? 0}
count2={activeResponse?.headers.length ?? 0}
count={activeResponse?.requestHeaders.length ?? 0}
/>
),
},
{
value: TAB_EVENTS,
label: 'Timeline',
rightSlot: <CountBadge count={responseEvents.data?.length ?? 0} />,
},
{
value: TAB_INFO,
label: 'Info',
},
],
[activeResponse?.headers, mimeType, setViewMode, viewMode],
[
activeResponse?.headers,
mimeType,
setViewMode,
viewMode,
activeResponse?.requestHeaders.length,
responseEvents.data?.length,
],
);
const activeTab = activeTabs?.[activeRequestId];
const setActiveTab = useCallback(
@@ -133,7 +151,10 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
<span>&bull;</span>
<HttpResponseDurationTag response={activeResponse} />
<span>&bull;</span>
<SizeTag contentLength={activeResponse.contentLength ?? 0} />
<SizeTag
contentLength={activeResponse.contentLength ?? 0}
contentLengthCompressed={activeResponse.contentLengthCompressed}
/>
<div className="ml-auto">
<RecentHttpResponsesDropdown
@@ -146,18 +167,20 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
)}
</HStack>
{activeResponse?.error ? (
<Banner color="danger" className="m-2">
{activeResponse.error}
</Banner>
) : (
<div className="overflow-hidden flex flex-col min-h-0">
{activeResponse?.error && (
<Banner color="danger" className="mx-3 mt-1 flex-shrink-0">
{activeResponse.error}
</Banner>
)}
{/* Show tabs if we have any data (headers, body, etc.) even if there's an error */}
<Tabs
key={activeRequestId} // Freshen tabs on request change
value={activeTab}
onChangeValue={setActiveTab}
tabs={tabs}
label="Response"
className="ml-3 mr-3 mb-3"
className="ml-3 mr-3 mb-3 min-h-0 flex-1"
tabListClassName="mt-0.5"
>
<TabContent value={TAB_BODY}>
@@ -210,8 +233,11 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
<TabContent value={TAB_INFO}>
<ResponseInfo response={activeResponse} />
</TabContent>
<TabContent value={TAB_EVENTS}>
<ResponseEvents response={activeResponse} />
</TabContent>
</Tabs>
)}
</div>
</div>
)}
</div>

View File

@@ -0,0 +1,341 @@
import type {
HttpResponse,
HttpResponseEvent,
HttpResponseEventData,
} from '@yaakapp-internal/models';
import classNames from 'classnames';
import { format } from 'date-fns';
import { Fragment, type ReactNode, useMemo, useState } from 'react';
import { useHttpResponseEvents } from '../hooks/useHttpResponseEvents';
import { AutoScroller } from './core/AutoScroller';
import { Banner } from './core/Banner';
import { HttpMethodTagRaw } from './core/HttpMethodTag';
import { HttpStatusTagRaw } from './core/HttpStatusTag';
import { Icon, type IconProps } from './core/Icon';
import { KeyValueRow, KeyValueRows } from './core/KeyValueRow';
import { Separator } from './core/Separator';
import { SplitLayout } from './core/SplitLayout';
interface Props {
response: HttpResponse;
}
export function ResponseEvents({ response }: Props) {
return (
<Fragment key={response.id}>
<ActualResponseEvents response={response} />
</Fragment>
);
}
function ActualResponseEvents({ response }: Props) {
const [activeEventIndex, setActiveEventIndex] = useState<number | null>(null);
const { data: events, error, isLoading } = useHttpResponseEvents(response);
const activeEvent = useMemo(
() => (activeEventIndex == null ? null : events?.[activeEventIndex]),
[activeEventIndex, events],
);
if (isLoading) {
return <div className="p-3 text-text-subtlest italic">Loading events...</div>;
}
if (error) {
return (
<Banner color="danger" className="m-3">
{String(error)}
</Banner>
);
}
if (!events || events.length === 0) {
return <div className="p-3 text-text-subtlest italic">No events recorded</div>;
}
return (
<SplitLayout
layout="vertical"
name="http_response_events"
defaultRatio={0.5}
minHeightPx={20}
firstSlot={() => (
<AutoScroller
data={events}
render={(event, i) => (
<EventRow
key={event.id}
event={event}
isActive={i === activeEventIndex}
onClick={() => {
if (i === activeEventIndex) setActiveEventIndex(null);
else setActiveEventIndex(i);
}}
/>
)}
/>
)}
secondSlot={
activeEvent
? () => (
<div className="grid grid-rows-[auto_minmax(0,1fr)]">
<div className="pb-3 px-2">
<Separator />
</div>
<div className="mx-2 overflow-y-auto">
<EventDetails event={activeEvent} />
</div>
</div>
)
: null
}
/>
);
}
function EventRow({
onClick,
isActive,
event,
}: {
onClick: () => void;
isActive: boolean;
event: HttpResponseEvent;
}) {
const display = getEventDisplay(event.event);
const { icon, color, summary } = display;
return (
<div className="px-1">
<button
type="button"
onClick={onClick}
className={classNames(
'w-full grid grid-cols-[auto_minmax(0,1fr)_auto] gap-2 items-center text-left',
'px-1.5 h-xs font-mono cursor-default group focus:outline-none focus:text-text rounded',
isActive && '!bg-surface-active !text-text',
'text-text-subtle hover:text',
)}
>
<Icon color={color} icon={icon} size="sm" />
<div className="w-full truncate text-xs">{summary}</div>
<div className="text-xs opacity-50">{format(`${event.createdAt}Z`, 'HH:mm:ss.SSS')}</div>
</button>
</div>
);
}
function formatBytes(bytes: number): string {
if (bytes < 1024) return `${bytes} B`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
}
function EventDetails({ event }: { event: HttpResponseEvent }) {
const { label } = getEventDisplay(event.event);
const timestamp = format(new Date(`${event.createdAt}Z`), 'HH:mm:ss.SSS');
const e = event.event;
// Headers - show name and value with Editor for JSON
if (e.type === 'header_up' || e.type === 'header_down') {
return (
<div className="flex flex-col gap-2 h-full">
<DetailHeader
title={e.type === 'header_down' ? 'Header Received' : 'Header Sent'}
timestamp={timestamp}
/>
<KeyValueRows>
<KeyValueRow label="Header">{e.name}</KeyValueRow>
<KeyValueRow label="Value">{e.value}</KeyValueRow>
</KeyValueRows>
</div>
);
}
// Request URL - show method and path separately
if (e.type === 'send_url') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Request" timestamp={timestamp} />
<KeyValueRows>
<KeyValueRow label="Method">
<HttpMethodTagRaw forceColor method={e.method} />
</KeyValueRow>
<KeyValueRow label="Path">{e.path}</KeyValueRow>
</KeyValueRows>
</div>
);
}
// Response status - show version and status separately
if (e.type === 'receive_url') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Response" timestamp={timestamp} />
<KeyValueRows>
<KeyValueRow label="HTTP Version">{e.version}</KeyValueRow>
<KeyValueRow label="Status">
<HttpStatusTagRaw status={e.status} />
</KeyValueRow>
</KeyValueRows>
</div>
);
}
// Redirect - show status, URL, and behavior
if (e.type === 'redirect') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Redirect" timestamp={timestamp} />
<KeyValueRows>
<KeyValueRow label="Status">
<HttpStatusTagRaw status={e.status} />
</KeyValueRow>
<KeyValueRow label="Location">{e.url}</KeyValueRow>
<KeyValueRow label="Behavior">
{e.behavior === 'drop_body' ? 'Drop body, change to GET' : 'Preserve method and body'}
</KeyValueRow>
</KeyValueRows>
</div>
);
}
// Settings - show as key/value
if (e.type === 'setting') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Apply Setting" timestamp={timestamp} />
<KeyValueRows>
<KeyValueRow label="Setting">{e.name}</KeyValueRow>
<KeyValueRow label="Value">{e.value}</KeyValueRow>
</KeyValueRows>
</div>
);
}
// Chunks - show formatted bytes
if (e.type === 'chunk_sent' || e.type === 'chunk_received') {
const direction = e.type === 'chunk_sent' ? 'Sent' : 'Received';
return (
<div className="flex flex-col gap-2">
<DetailHeader title={`Data ${direction}`} timestamp={timestamp} />
<div className="font-mono text-sm">{formatBytes(e.bytes)}</div>
</div>
);
}
// Default - use summary
const { summary } = getEventDisplay(event.event);
return (
<div className="flex flex-col gap-1">
<DetailHeader title={label} timestamp={timestamp} />
<div className="font-mono text-sm">{summary}</div>
</div>
);
}
function DetailHeader({ title, timestamp }: { title: string; timestamp: string }) {
return (
<div className="flex items-center justify-between gap-2">
<h3 className="font-semibold select-auto cursor-auto">{title}</h3>
<span className="text-text-subtlest font-mono text-editor">{timestamp}</span>
</div>
);
}
type EventDisplay = {
icon: IconProps['icon'];
color: IconProps['color'];
label: string;
summary: ReactNode;
};
function getEventDisplay(event: HttpResponseEventData): EventDisplay {
switch (event.type) {
case 'start_request':
return {
icon: 'info',
color: 'secondary',
label: 'Start',
summary: 'Request started',
};
case 'end_request':
return {
icon: 'info',
color: 'secondary',
label: 'End',
summary: 'Request complete',
};
case 'setting':
return {
icon: 'settings',
color: 'secondary',
label: 'Setting',
summary: `${event.name} = ${event.value}`,
};
case 'info':
return {
icon: 'info',
color: 'secondary',
label: 'Info',
summary: event.message,
};
case 'redirect':
return {
icon: 'arrow_big_right_dash',
color: 'warning',
label: 'Redirect',
summary: `Redirecting ${event.status} ${event.url}${event.behavior === 'drop_body' ? ' (drop body)' : ''}`,
};
case 'send_url':
return {
icon: 'arrow_big_up_dash',
color: 'primary',
label: 'Request',
summary: `${event.method} ${event.path}`,
};
case 'receive_url':
return {
icon: 'arrow_big_down_dash',
color: 'info',
label: 'Response',
summary: `${event.version} ${event.status}`,
};
case 'header_up':
return {
icon: 'arrow_big_up_dash',
color: 'primary',
label: 'Header',
summary: `${event.name}: ${event.value}`,
};
case 'header_down':
return {
icon: 'arrow_big_down_dash',
color: 'info',
label: 'Header',
summary: `${event.name}: ${event.value}`,
};
case 'chunk_sent':
return {
icon: 'info',
color: 'secondary',
label: 'Chunk',
summary: `${event.bytes} bytes sent`,
};
case 'chunk_received':
return {
icon: 'info',
color: 'secondary',
label: 'Chunk',
summary: `${event.bytes} bytes received`,
};
default:
return {
icon: 'info',
color: 'secondary',
label: 'Unknown',
summary: 'Unknown event',
};
}
}

View File

@@ -1,5 +1,7 @@
import type { HttpResponse } from '@yaakapp-internal/models';
import { useMemo } from 'react';
import { CountBadge } from './core/CountBadge';
import { DetailsBanner } from './core/DetailsBanner';
import { KeyValueRow, KeyValueRows } from './core/KeyValueRow';
interface Props {
@@ -7,20 +9,69 @@ interface Props {
}
export function ResponseHeaders({ response }: Props) {
const sortedHeaders = useMemo(
() => [...response.headers].sort((a, b) => a.name.localeCompare(b.name)),
const responseHeaders = useMemo(
() =>
[...response.headers].sort((a, b) =>
a.name.toLocaleLowerCase().localeCompare(b.name.toLocaleLowerCase()),
),
[response.headers],
);
const requestHeaders = useMemo(
() =>
[...response.requestHeaders].sort((a, b) =>
a.name.toLocaleLowerCase().localeCompare(b.name.toLocaleLowerCase()),
),
[response.requestHeaders],
);
return (
<div className="overflow-auto h-full pb-4">
<KeyValueRows>
{sortedHeaders.map((h, i) => (
// biome-ignore lint/suspicious/noArrayIndexKey: none
<KeyValueRow labelColor="primary" key={i} label={h.name}>
{h.value}
</KeyValueRow>
))}
</KeyValueRows>
<div className="overflow-auto h-full pb-4 gap-y-3 flex flex-col pr-0.5">
<DetailsBanner
storageKey={`${response.requestId}.request_headers`}
summary={
<h2 className="flex items-center">
Request <CountBadge showZero count={requestHeaders.length} />
</h2>
}
>
{requestHeaders.length === 0 ? (
<NoHeaders />
) : (
<KeyValueRows>
{requestHeaders.map((h, i) => (
// biome-ignore lint/suspicious/noArrayIndexKey: none
<KeyValueRow labelColor="primary" key={i} label={h.name}>
{h.value}
</KeyValueRow>
))}
</KeyValueRows>
)}
</DetailsBanner>
<DetailsBanner
defaultOpen
storageKey={`${response.requestId}.response_headers`}
summary={
<h2 className="flex items-center">
Response <CountBadge showZero count={responseHeaders.length} />
</h2>
}
>
{responseHeaders.length === 0 ? (
<NoHeaders />
) : (
<KeyValueRows>
{responseHeaders.map((h, i) => (
// biome-ignore lint/suspicious/noArrayIndexKey: none
<KeyValueRow labelColor="primary" key={i} label={h.name}>
{h.value}
</KeyValueRow>
))}
</KeyValueRows>
)}
</DetailsBanner>
</div>
);
}
function NoHeaders() {
return <span className="text-text-subtlest text-sm italic">No Headers</span>;
}

Some files were not shown because too many files have changed in this diff Show More