Cargo fmt

This commit is contained in:
Gregory Schier
2026-05-08 12:03:34 -07:00
parent 19ed8c2f0d
commit b0b282535f
25 changed files with 199 additions and 252 deletions

View File

@@ -65,8 +65,7 @@ impl<'a> DbContext<'a> {
.cond_where(Expr::col(col).eq(value))
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str()).expect("Failed to prepare query");
stmt.query_row(&*params.as_params(), M::from_row)
.ok()
stmt.query_row(&*params.as_params(), M::from_row).ok()
}
pub fn find_all<M>(&self) -> Result<Vec<M>>
@@ -126,9 +125,8 @@ impl<'a> DbContext<'a> {
let other_values = model.clone().insert_values(source)?;
let mut column_vec = vec![id_iden.clone()];
let mut value_vec = vec![
if id_val.is_empty() { M::generate_id().into() } else { id_val.into() },
];
let mut value_vec =
vec![if id_val.is_empty() { M::generate_id().into() } else { id_val.into() }];
for (col, val) in other_values {
value_vec.push(val.into());

View File

@@ -55,8 +55,7 @@ pub fn run_migrations(pool: &Pool<SqliteConnectionManager>, dir: &Dir<'_>) -> Re
continue;
}
let sql =
entry.as_file().unwrap().contents_utf8().expect("Failed to read migration file");
let sql = entry.as_file().unwrap().contents_utf8().expect("Failed to read migration file");
info!("Applying migration: {}", filename);
let conn = pool.get()?;

View File

@@ -10,10 +10,10 @@ pub fn generate_id() -> String {
pub fn generate_id_of_length(n: usize) -> String {
let alphabet: [char; 57] = [
'2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i',
'j', 'k', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A',
'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T',
'U', 'V', 'W', 'X', 'Y', 'Z',
'2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j',
'k', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C',
'D', 'E', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W',
'X', 'Y', 'Z',
];
nanoid!(n, &alphabet)

View File

@@ -3,7 +3,8 @@ use std::collections::HashMap;
use std::sync::mpsc;
/// Type-erased handler function: takes context + JSON payload, returns JSON or error.
type HandlerFn<Ctx> = Box<dyn Fn(&Ctx, serde_json::Value) -> Result<serde_json::Value, RpcError> + Send + Sync>;
type HandlerFn<Ctx> =
Box<dyn Fn(&Ctx, serde_json::Value) -> Result<serde_json::Value, RpcError> + Send + Sync>;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RpcError {
@@ -57,9 +58,7 @@ pub struct RpcRouter<Ctx> {
impl<Ctx> RpcRouter<Ctx> {
pub fn new() -> Self {
Self {
handlers: HashMap::new(),
}
Self { handlers: HashMap::new() }
}
/// Register a handler for a command name.
@@ -77,23 +76,15 @@ impl<Ctx> RpcRouter<Ctx> {
) -> Result<serde_json::Value, RpcError> {
match self.handlers.get(cmd) {
Some(handler) => handler(ctx, payload),
None => Err(RpcError {
message: format!("unknown command: {cmd}"),
}),
None => Err(RpcError { message: format!("unknown command: {cmd}") }),
}
}
/// Handle a full `RpcRequest`, returning an `RpcResponse`.
pub fn handle(&self, req: RpcRequest, ctx: &Ctx) -> RpcResponse {
match self.dispatch(&req.cmd, req.payload, ctx) {
Ok(payload) => RpcResponse::Success {
id: req.id,
payload,
},
Err(e) => RpcResponse::Error {
id: req.id,
error: e.message,
},
Ok(payload) => RpcResponse::Success { id: req.id, payload },
Err(e) => RpcResponse::Error { id: req.id, error: e.message },
}
}

View File

@@ -304,7 +304,10 @@ async fn build_binary_body(
}))
}
fn build_text_body(body: &BTreeMap<String, serde_json::Value>, body_type: &str) -> Option<SendableBodyWithMeta> {
fn build_text_body(
body: &BTreeMap<String, serde_json::Value>,
body_type: &str,
) -> Option<SendableBodyWithMeta> {
let text = get_str_map(body, "text");
if text.is_empty() {
return None;

View File

@@ -16,8 +16,8 @@ use std::collections::HashMap;
use std::fmt::{Debug, Display};
use std::str::FromStr;
use ts_rs::TS;
use yaak_database::{Result as DbResult, UpdateSource};
pub use yaak_database::{UpsertModelInfo, upsert_date};
use yaak_database::{UpdateSource, Result as DbResult};
#[macro_export]
macro_rules! impl_model {
@@ -2526,4 +2526,3 @@ impl AnyModel {
}
}
}

View File

@@ -1,5 +1,5 @@
use crate::connection_or_tx::ConnectionOrTx;
use crate::client_db::ClientDb;
use crate::connection_or_tx::ConnectionOrTx;
use crate::error::Result;
use crate::models::{
Environment, EnvironmentIden, Folder, FolderIden, GrpcRequest, GrpcRequestIden, HttpRequest,

View File

@@ -16,7 +16,10 @@ impl<'a> ClientDb<'a> {
.add(Expr::col(PluginKeyValueIden::Key).eq(key)),
)
.build_rusqlite(SqliteQueryBuilder);
self.conn().resolve().query_row(sql.as_str(), &*params.as_params(), |row| row.try_into()).ok()
self.conn()
.resolve()
.query_row(sql.as_str(), &*params.as_params(), |row| row.try_into())
.ok()
}
pub fn set_plugin_key_value(

View File

@@ -10,7 +10,9 @@ use std::collections::BTreeMap;
use ts_rs::TS;
use yaak_core::WorkspaceContext;
pub use yaak_database::{ModelChangeEvent, generate_id, generate_id_of_length, generate_prefixed_id};
pub use yaak_database::{
ModelChangeEvent, generate_id, generate_id_of_length, generate_prefixed_id,
};
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]

View File

@@ -79,10 +79,9 @@ where
let len = data.len();
self.bytes_count += len as u64;
self.chunks.push(data.clone());
let _ = self.event_tx.send(ProxyEvent::ResponseBodyChunk {
id: self.request_id,
bytes: len,
});
let _ = self
.event_tx
.send(ProxyEvent::ResponseBodyChunk { id: self.request_id, bytes: len });
}
Poll::Ready(Some(Ok(frame)))
}

View File

@@ -18,23 +18,14 @@ impl CertificateAuthority {
params.is_ca = IsCa::Ca(BasicConstraints::Unconstrained);
params.key_usages.push(KeyUsagePurpose::KeyCertSign);
params.key_usages.push(KeyUsagePurpose::CrlSign);
params
.distinguished_name
.push(rcgen::DnType::CommonName, "Debug Proxy CA");
params
.distinguished_name
.push(rcgen::DnType::OrganizationName, "Debug Proxy");
params.distinguished_name.push(rcgen::DnType::CommonName, "Debug Proxy CA");
params.distinguished_name.push(rcgen::DnType::OrganizationName, "Debug Proxy");
let key = KeyPair::generate()?;
let ca_cert = params.self_signed(&key)?;
let ca_cert_der = ca_cert.der().clone();
Ok(Self {
ca_cert,
ca_cert_der,
ca_key: key,
cache: Mutex::new(HashMap::new()),
})
Ok(Self { ca_cert, ca_cert_der, ca_key: key, cache: Mutex::new(HashMap::new()) })
}
pub fn ca_pem(&self) -> String {
@@ -53,9 +44,7 @@ impl CertificateAuthority {
}
let mut params = CertificateParams::new(vec![domain.to_string()])?;
params
.distinguished_name
.push(rcgen::DnType::CommonName, domain);
params.distinguished_name.push(rcgen::DnType::CommonName, domain);
let leaf_key = KeyPair::generate()?;
let leaf_cert = params.signed_by(&leaf_key, &self.ca_cert, &self.ca_key)?;
@@ -63,20 +52,18 @@ impl CertificateAuthority {
let cert_der = leaf_cert.der().clone();
let key_der = leaf_key.serialize_der();
let mut config = ServerConfig::builder_with_provider(Arc::new(rustls::crypto::ring::default_provider()))
.with_safe_default_protocol_versions()?
.with_no_client_auth()
.with_single_cert(
vec![cert_der, self.ca_cert_der.clone()],
PrivateKeyDer::Pkcs8(PrivatePkcs8KeyDer::from(key_der)),
)?;
let mut config =
ServerConfig::builder_with_provider(Arc::new(rustls::crypto::ring::default_provider()))
.with_safe_default_protocol_versions()?
.with_no_client_auth()
.with_single_cert(
vec![cert_der, self.ca_cert_der.clone()],
PrivateKeyDer::Pkcs8(PrivatePkcs8KeyDer::from(key_der)),
)?;
config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
let config = Arc::new(config);
self.cache
.lock()
.unwrap()
.insert(domain.to_string(), config.clone());
self.cache.lock().unwrap().insert(domain.to_string(), config.clone());
Ok(config)
}
}

View File

@@ -1,5 +1,5 @@
use std::sync::mpsc as std_mpsc;
use std::sync::Arc;
use std::sync::mpsc as std_mpsc;
use hyper::server::conn::http1;
use hyper::service::service_fn;

View File

@@ -4,9 +4,9 @@ mod connection;
mod request;
use std::net::SocketAddr;
use std::sync::Arc;
use std::sync::atomic::AtomicU64;
use std::sync::mpsc as std_mpsc;
use std::sync::Arc;
use cert::CertificateAuthority;
use tokio::net::TcpListener;
@@ -27,7 +27,11 @@ pub enum ProxyEvent {
http_version: String,
},
/// A request header sent to the upstream server.
RequestHeader { id: u64, name: String, value: String },
RequestHeader {
id: u64,
name: String,
value: String,
},
/// The full request body (buffered before forwarding).
RequestBody { id: u64, body: Vec<u8> },
/// Response headers received from upstream.
@@ -38,7 +42,11 @@ pub enum ProxyEvent {
elapsed_ms: u64,
},
/// A response header received from the upstream server.
ResponseHeader { id: u64, name: String, value: String },
ResponseHeader {
id: u64,
name: String,
value: String,
},
/// A chunk of the response body was received (emitted per-frame).
ResponseBodyChunk { id: u64, bytes: usize },
/// The response body stream has completed.

View File

@@ -63,10 +63,7 @@ fn emit_request_events(
});
}
if let Some(body) = body {
let _ = tx.send(ProxyEvent::RequestBody {
id,
body: body.clone(),
});
let _ = tx.send(ProxyEvent::RequestBody { id, body: body.clone() });
}
}
@@ -123,22 +120,13 @@ async fn handle_http(
let http_version = version_str(req.version());
let start = Instant::now();
let _ = event_tx.send(ProxyEvent::RequestStart {
id,
method,
url: uri.clone(),
http_version,
});
let _ = event_tx.send(ProxyEvent::RequestStart { id, method, url: uri.clone(), http_version });
let client: Client<_, Full<Bytes>> = Client::builder(TokioExecutor::new()).build_http();
let (parts, body) = req.into_parts();
let body_bytes = body.collect().await?.to_bytes();
let request_body = if body_bytes.is_empty() {
None
} else {
Some(body_bytes.to_vec())
};
let request_body = if body_bytes.is_empty() { None } else { Some(body_bytes.to_vec()) };
emit_request_events(&event_tx, id, &parts.headers, &request_body);
let outgoing_req = Request::from_parts(parts, Full::new(body_bytes));
@@ -148,16 +136,10 @@ async fn handle_http(
emit_response_events(&event_tx, id, &resp, &start);
let (parts, body) = resp.into_parts();
Ok(Response::from_parts(
parts,
measured_incoming(body, id, start, event_tx),
))
Ok(Response::from_parts(parts, measured_incoming(body, id, start, event_tx)))
}
Err(e) => {
let _ = event_tx.send(ProxyEvent::Error {
id,
error: e.to_string(),
});
let _ = event_tx.send(ProxyEvent::Error { id, error: e.to_string() });
Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>)
}
}
@@ -168,11 +150,7 @@ async fn handle_connect(
event_tx: std_mpsc::Sender<ProxyEvent>,
ca: Arc<CertificateAuthority>,
) -> Result<Response<BoxBody>, Box<dyn std::error::Error + Send + Sync>> {
let authority = req
.uri()
.authority()
.map(|a| a.to_string())
.unwrap_or_default();
let authority = req.uri().authority().map(|a| a.to_string()).unwrap_or_default();
let (host, port) = parse_host_port(&authority);
let server_config = ca.server_config(&host)?;
@@ -189,10 +167,7 @@ async fn handle_connect(
}
};
let tls_stream = match acceptor
.accept(hyper_util::rt::TokioIo::new(upgraded))
.await
{
let tls_stream = match acceptor.accept(hyper_util::rt::TokioIo::new(upgraded)).await {
Ok(s) => s,
Err(e) => {
eprintln!("TLS accept failed for {host}: {e}");
@@ -203,10 +178,7 @@ async fn handle_connect(
let tx = event_tx.clone();
let host_for_requests = host.clone();
let mut builder = auto::Builder::new(TokioExecutor::new());
builder
.http1()
.preserve_header_case(true)
.title_case_headers(true);
builder.http1().preserve_header_case(true).title_case_headers(true);
if let Err(e) = builder
.serve_connection_with_upgrades(
hyper_util::rt::TokioIo::new(tls_stream),
@@ -271,20 +243,12 @@ async fn forward_https(
let id = REQUEST_ID.fetch_add(1, Ordering::Relaxed);
let method = req.method().to_string();
let http_version = version_str(req.version());
let path = req
.uri()
.path_and_query()
.map(|pq| pq.to_string())
.unwrap_or_else(|| "/".into());
let path = req.uri().path_and_query().map(|pq| pq.to_string()).unwrap_or_else(|| "/".into());
let uri_str = format!("https://{host}{path}");
let start = Instant::now();
let _ = event_tx.send(ProxyEvent::RequestStart {
id,
method,
url: uri_str.clone(),
http_version,
});
let _ =
event_tx.send(ProxyEvent::RequestStart { id, method, url: uri_str.clone(), http_version });
// Connect to upstream with TLS
let tcp_stream = TcpStream::connect(target_addr).await?;
@@ -305,18 +269,13 @@ async fn forward_https(
let server_name = ServerName::try_from(host.to_string())?;
let tls_stream = connector.connect(server_name, tcp_stream).await?;
let negotiated_h2 = tls_stream
.get_ref()
.1
.alpn_protocol()
.map_or(false, |p| p == b"h2");
let negotiated_h2 = tls_stream.get_ref().1.alpn_protocol().map_or(false, |p| p == b"h2");
let io = hyper_util::rt::TokioIo::new(tls_stream);
let mut sender = if negotiated_h2 {
let (sender, conn) = hyper::client::conn::http2::Builder::new(TokioExecutor::new())
.handshake(io)
.await?;
let (sender, conn) =
hyper::client::conn::http2::Builder::new(TokioExecutor::new()).handshake(io).await?;
tokio::spawn(async move {
if let Err(e) = conn.await {
eprintln!("Upstream h2 connection error: {e}");
@@ -340,11 +299,7 @@ async fn forward_https(
// Capture request metadata
let (mut parts, body) = req.into_parts();
let body_bytes = body.collect().await?.to_bytes();
let request_body = if body_bytes.is_empty() {
None
} else {
Some(body_bytes.to_vec())
};
let request_body = if body_bytes.is_empty() { None } else { Some(body_bytes.to_vec()) };
emit_request_events(&event_tx, id, &parts.headers, &request_body);
if negotiated_h2 {
@@ -365,16 +320,10 @@ async fn forward_https(
emit_response_events(&event_tx, id, &resp, &start);
let (parts, body) = resp.into_parts();
Ok(Response::from_parts(
parts,
measured_incoming(body, id, start, event_tx),
))
Ok(Response::from_parts(parts, measured_incoming(body, id, start, event_tx)))
}
Err(e) => {
let _ = event_tx.send(ProxyEvent::Error {
id,
error: e.to_string(),
});
let _ = event_tx.send(ProxyEvent::Error { id, error: e.to_string() });
Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>)
}
}

View File

@@ -1,9 +1,9 @@
pub mod error;
pub mod escape;
pub mod format_json;
pub mod strip_json_comments;
pub mod parser;
pub mod renderer;
pub mod strip_json_comments;
pub mod wasm;
pub use parser::*;

View File

@@ -113,11 +113,8 @@ pub fn strip_json_comments(text: &str) -> String {
}
// Remove lines that are now empty (were comment-only lines)
let result = result
.lines()
.filter(|line| !line.trim().is_empty())
.collect::<Vec<&str>>()
.join("\n");
let result =
result.lines().filter(|line| !line.trim().is_empty()).collect::<Vec<&str>>().join("\n");
// Remove trailing commas before } or ]
strip_trailing_commas(&result)
@@ -192,10 +189,12 @@ mod tests {
#[test]
fn test_trailing_line_comment() {
assert_eq!(
strip_json_comments(r#"{
strip_json_comments(
r#"{
"foo": "bar", // this is a comment
"baz": 123
}"#),
}"#
),
r#"{
"foo": "bar",
"baz": 123
@@ -206,10 +205,12 @@ mod tests {
#[test]
fn test_whole_line_comment() {
assert_eq!(
strip_json_comments(r#"{
strip_json_comments(
r#"{
// this is a comment
"foo": "bar"
}"#),
}"#
),
r#"{
"foo": "bar"
}"#
@@ -219,9 +220,11 @@ mod tests {
#[test]
fn test_inline_block_comment() {
assert_eq!(
strip_json_comments(r#"{
strip_json_comments(
r#"{
"foo": /* a comment */ "bar"
}"#),
}"#
),
r#"{
"foo": "bar"
}"#
@@ -231,10 +234,12 @@ mod tests {
#[test]
fn test_whole_line_block_comment() {
assert_eq!(
strip_json_comments(r#"{
strip_json_comments(
r#"{
/* a comment */
"foo": "bar"
}"#),
}"#
),
r#"{
"foo": "bar"
}"#
@@ -244,12 +249,14 @@ mod tests {
#[test]
fn test_multiline_block_comment() {
assert_eq!(
strip_json_comments(r#"{
strip_json_comments(
r#"{
/**
* Hello World!
*/
"foo": "bar"
}"#),
}"#
),
r#"{
"foo": "bar"
}"#
@@ -276,12 +283,14 @@ mod tests {
#[test]
fn test_multiple_comments() {
assert_eq!(
strip_json_comments(r#"{
strip_json_comments(
r#"{
// first comment
"foo": "bar", // trailing
/* block */
"baz": 123
}"#),
}"#
),
r#"{
"foo": "bar",
"baz": 123
@@ -292,10 +301,12 @@ mod tests {
#[test]
fn test_trailing_comma_after_comment_removed() {
assert_eq!(
strip_json_comments(r#"{
strip_json_comments(
r#"{
"a": "aaa",
// "b": "bbb"
}"#),
}"#
),
r#"{
"a": "aaa"
}"#
@@ -304,10 +315,7 @@ mod tests {
#[test]
fn test_trailing_comma_in_array() {
assert_eq!(
strip_json_comments(r#"[1, 2, /* 3 */]"#),
r#"[1, 2]"#
);
assert_eq!(strip_json_comments(r#"[1, 2, /* 3 */]"#), r#"[1, 2]"#);
}
#[test]

View File

@@ -2,7 +2,9 @@ use log::info;
use serde_json::Value;
use std::collections::BTreeMap;
use yaak_http::path_placeholders::apply_path_placeholders;
use yaak_models::models::{Environment, GrpcRequest, HttpRequest, HttpRequestHeader, HttpUrlParameter};
use yaak_models::models::{
Environment, GrpcRequest, HttpRequest, HttpRequestHeader, HttpUrlParameter,
};
use yaak_models::render::make_vars_hashmap;
use yaak_templates::{RenderOptions, TemplateCallback, parse_and_render, render_json_value_raw};