Split codebase (#455)

This commit is contained in:
Gregory Schier
2026-05-07 15:50:10 -07:00
committed by GitHub
parent d2dc719cc6
commit 10559c8f4f
742 changed files with 7686 additions and 3249 deletions

View File

@@ -0,0 +1,20 @@
[package]
name = "yaak-database"
version = "0.1.0"
edition = "2024"
publish = false
[dependencies]
chrono = { version = "0.4.38", features = ["serde"] }
include_dir = "0.7"
log = { workspace = true }
nanoid = "0.4.0"
r2d2 = "0.8.10"
r2d2_sqlite = { version = "0.25.0" }
rusqlite = { version = "0.32.1", features = ["bundled", "chrono"] }
sea-query = { version = "0.32.1", features = ["with-chrono", "attr"] }
sea-query-rusqlite = { version = "0.7.0", features = ["with-chrono"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
thiserror = { workspace = true }
ts-rs = { workspace = true }

View File

@@ -0,0 +1,25 @@
use r2d2::PooledConnection;
use r2d2_sqlite::SqliteConnectionManager;
use rusqlite::{Connection, Statement, ToSql, Transaction};
pub enum ConnectionOrTx<'a> {
Connection(PooledConnection<SqliteConnectionManager>),
Transaction(&'a Transaction<'a>),
}
impl<'a> ConnectionOrTx<'a> {
pub fn resolve(&self) -> &Connection {
match self {
ConnectionOrTx::Connection(c) => c,
ConnectionOrTx::Transaction(c) => c,
}
}
pub fn prepare(&self, sql: &str) -> rusqlite::Result<Statement<'_>> {
self.resolve().prepare(sql)
}
pub fn execute(&self, sql: &str, params: &[&dyn ToSql]) -> rusqlite::Result<usize> {
self.resolve().execute(sql, params)
}
}

View File

@@ -1,33 +1,37 @@
use crate::connection_or_tx::ConnectionOrTx;
use crate::error::Error::ModelNotFound;
use crate::error::Result;
use crate::models::{AnyModel, UpsertModelInfo};
use crate::util::{ModelChangeEvent, ModelPayload, UpdateSource};
use rusqlite::{OptionalExtension, params};
use crate::traits::UpsertModelInfo;
use crate::update_source::UpdateSource;
use sea_query::{
Asterisk, Expr, Func, IntoColumnRef, IntoIden, IntoTableRef, OnConflict, Query, SimpleExpr,
Asterisk, Expr, Func, IntoColumnRef, IntoIden, OnConflict, Query, SimpleExpr,
SqliteQueryBuilder,
};
use sea_query_rusqlite::RusqliteBinder;
use std::fmt::Debug;
use std::sync::mpsc;
pub struct DbContext<'a> {
pub(crate) _events_tx: mpsc::Sender<ModelPayload>,
pub(crate) conn: ConnectionOrTx<'a>,
conn: ConnectionOrTx<'a>,
}
impl<'a> DbContext<'a> {
pub(crate) fn find_one<'s, M>(
pub fn new(conn: ConnectionOrTx<'a>) -> Self {
Self { conn }
}
pub fn conn(&self) -> &ConnectionOrTx<'a> {
&self.conn
}
pub fn find_one<M>(
&self,
col: impl IntoColumnRef + IntoIden + Clone,
value: impl Into<SimpleExpr> + Debug,
) -> Result<M>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
M: UpsertModelInfo,
{
let value_debug = format!("{:?}", value);
let value_expr = value.into();
let (sql, params) = Query::select()
.from(M::table_name())
@@ -47,13 +51,13 @@ impl<'a> DbContext<'a> {
}
}
pub(crate) fn find_optional<'s, M>(
pub fn find_optional<M>(
&self,
col: impl IntoColumnRef,
value: impl Into<SimpleExpr>,
) -> Option<M>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
M: UpsertModelInfo,
{
let (sql, params) = Query::select()
.from(M::table_name())
@@ -62,13 +66,12 @@ impl<'a> DbContext<'a> {
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str()).expect("Failed to prepare query");
stmt.query_row(&*params.as_params(), M::from_row)
.optional()
.expect("Failed to run find on DB")
.ok()
}
pub(crate) fn find_all<'s, M>(&self) -> Result<Vec<M>>
pub fn find_all<M>(&self) -> Result<Vec<M>>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
M: UpsertModelInfo,
{
let (order_by_col, order_by_dir) = M::order_by();
let (sql, params) = Query::select()
@@ -81,16 +84,15 @@ impl<'a> DbContext<'a> {
Ok(items.map(|v| v.unwrap()).collect())
}
pub(crate) fn find_many<'s, M>(
pub fn find_many<M>(
&self,
col: impl IntoColumnRef,
value: impl Into<SimpleExpr>,
limit: Option<u64>,
) -> Result<Vec<M>>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
M: UpsertModelInfo,
{
// TODO: Figure out how to do this conditional builder better
let (order_by_col, order_by_dir) = M::order_by();
let (sql, params) = if let Some(limit) = limit {
Query::select()
@@ -114,46 +116,30 @@ impl<'a> DbContext<'a> {
Ok(items.map(|v| v.unwrap()).collect())
}
pub(crate) fn upsert<M>(&self, model: &M, source: &UpdateSource) -> Result<M>
/// Upsert a model. Returns `(model, created)` where `created` is true if a new row was inserted.
pub fn upsert<M>(&self, model: &M, source: &UpdateSource) -> Result<(M, bool)>
where
M: Into<AnyModel> + From<AnyModel> + UpsertModelInfo + Clone,
M: UpsertModelInfo + Clone,
{
self.upsert_one(
M::table_name(),
M::id_column(),
model.get_id().as_str(),
model.clone().insert_values(source)?,
M::update_columns(),
source,
)
}
let id_iden = M::id_column().into_iden();
let id_val = model.get_id();
let other_values = model.clone().insert_values(source)?;
fn upsert_one<M>(
&self,
table: impl IntoTableRef,
id_col: impl IntoIden + Eq + Clone,
id_val: &str,
other_values: Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>,
update_columns: Vec<impl IntoIden>,
source: &UpdateSource,
) -> Result<M>
where
M: Into<AnyModel> + From<AnyModel> + UpsertModelInfo + Clone,
{
let id_iden = id_col.into_iden();
let mut column_vec = vec![id_iden.clone()];
let mut value_vec =
vec![if id_val == "" { M::generate_id().into() } else { id_val.into() }];
let mut value_vec = vec![
if id_val.is_empty() { M::generate_id().into() } else { id_val.into() },
];
for (col, val) in other_values {
value_vec.push(val.into());
column_vec.push(col.into_iden());
}
let on_conflict = OnConflict::column(id_iden).update_columns(update_columns).to_owned();
let on_conflict =
OnConflict::column(id_iden).update_columns(M::update_columns()).to_owned();
let (sql, params) = Query::insert()
.into_table(table)
.into_table(M::table_name())
.columns(column_vec)
.values_panic(value_vec)
.on_conflict(on_conflict)
@@ -173,59 +159,19 @@ impl<'a> DbContext<'a> {
})
})?;
let payload = ModelPayload {
model: m.clone().into(),
update_source: source.clone(),
change: ModelChangeEvent::Upsert { created },
};
self.record_model_change(&payload)?;
let _ = self._events_tx.send(payload);
Ok(m)
Ok((m, created))
}
pub(crate) fn delete<'s, M>(&self, m: &M, source: &UpdateSource) -> Result<M>
/// Delete a model by its ID. Returns the number of rows deleted.
pub fn delete<M>(&self, m: &M) -> Result<usize>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
M: UpsertModelInfo,
{
let (sql, params) = Query::delete()
.from_table(M::table_name())
.cond_where(Expr::col(M::id_column().into_iden()).eq(m.get_id()))
.build_rusqlite(SqliteQueryBuilder);
self.conn.execute(sql.as_str(), &*params.as_params())?;
let payload = ModelPayload {
model: m.clone().into(),
update_source: source.clone(),
change: ModelChangeEvent::Delete,
};
self.record_model_change(&payload)?;
let _ = self._events_tx.send(payload);
Ok(m.clone())
}
fn record_model_change(&self, payload: &ModelPayload) -> Result<()> {
let payload_json = serde_json::to_string(payload)?;
let source_json = serde_json::to_string(&payload.update_source)?;
let change_json = serde_json::to_string(&payload.change)?;
self.conn.resolve().execute(
r#"
INSERT INTO model_changes (model, model_id, change, update_source, payload)
VALUES (?1, ?2, ?3, ?4, ?5)
"#,
params![
payload.model.model(),
payload.model.id(),
change_json,
source_json,
payload_json,
],
)?;
Ok(())
let count = self.conn.execute(sql.as_str(), &*params.as_params())?;
Ok(count)
}
}

View File

@@ -0,0 +1,37 @@
use serde::{Serialize, Serializer};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum Error {
#[error("SQL error: {0}")]
SqlError(#[from] rusqlite::Error),
#[error("SQL Pool error: {0}")]
SqlPoolError(#[from] r2d2::Error),
#[error("Database error: {0}")]
Database(String),
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
#[error("JSON error: {0}")]
JsonError(#[from] serde_json::Error),
#[error("Model not found: {0}")]
ModelNotFound(String),
#[error("DB Migration Failed: {0}")]
MigrationError(String),
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.to_string().as_ref())
}
}
pub type Result<T> = std::result::Result<T, Error>;

View File

@@ -0,0 +1,23 @@
pub mod connection_or_tx;
pub mod db_context;
pub mod error;
pub mod migrate;
pub mod traits;
pub mod update_source;
pub mod util;
// Re-export key types for convenience
pub use connection_or_tx::ConnectionOrTx;
pub use db_context::DbContext;
pub use error::{Error, Result};
pub use migrate::run_migrations;
pub use traits::{UpsertModelInfo, upsert_date};
pub use update_source::{ModelChangeEvent, UpdateSource};
pub use util::{generate_id, generate_id_of_length, generate_prefixed_id};
// Re-export pool types that consumers will need
pub use r2d2;
pub use r2d2_sqlite;
pub use rusqlite;
pub use sea_query;
pub use sea_query_rusqlite;

View File

@@ -0,0 +1,81 @@
use crate::error::Result;
use include_dir::Dir;
use log::{debug, info};
use r2d2::Pool;
use r2d2_sqlite::SqliteConnectionManager;
use rusqlite::{OptionalExtension, params};
const TRACKING_TABLE: &str = "_sqlx_migrations";
/// Run SQL migrations from an embedded directory.
///
/// Migrations are sorted by filename (use timestamp prefixes like `00000001_init.sql`).
/// Applied migrations are tracked in `_sqlx_migrations`.
pub fn run_migrations(pool: &Pool<SqliteConnectionManager>, dir: &Dir<'_>) -> Result<()> {
info!("Running migrations");
// Create tracking table
pool.get()?.execute(
&format!(
"CREATE TABLE IF NOT EXISTS {TRACKING_TABLE} (
version TEXT PRIMARY KEY,
description TEXT NOT NULL,
applied_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL
)"
),
[],
)?;
// Read and sort all .sql files
let mut entries: Vec<_> = dir
.entries()
.iter()
.filter(|e| e.path().extension().map(|ext| ext == "sql").unwrap_or(false))
.collect();
entries.sort_by_key(|e| e.path());
let mut ran_migrations = 0;
for entry in &entries {
let filename = entry.path().file_name().unwrap().to_str().unwrap();
let version = filename.split('_').next().unwrap();
// Check if already applied
let already_applied: Option<i64> = pool
.get()?
.query_row(
&format!("SELECT 1 FROM {TRACKING_TABLE} WHERE version = ?"),
[version],
|r| r.get(0),
)
.optional()?;
if already_applied.is_some() {
debug!("Skipping already applied migration: {}", filename);
continue;
}
let sql =
entry.as_file().unwrap().contents_utf8().expect("Failed to read migration file");
info!("Applying migration: {}", filename);
let conn = pool.get()?;
conn.execute_batch(sql)?;
// Record migration
conn.execute(
&format!("INSERT INTO {TRACKING_TABLE} (version, description) VALUES (?, ?)"),
params![version, filename],
)?;
ran_migrations += 1;
}
if ran_migrations == 0 {
info!("No migrations to run");
} else {
info!("Ran {} migration(s)", ran_migrations);
}
Ok(())
}

View File

@@ -0,0 +1,36 @@
use crate::error::Result;
use crate::update_source::UpdateSource;
use chrono::{NaiveDateTime, Utc};
use rusqlite::Row;
use sea_query::{IntoColumnRef, IntoIden, IntoTableRef, Order, SimpleExpr};
pub trait UpsertModelInfo {
fn table_name() -> impl IntoTableRef + IntoIden;
fn id_column() -> impl IntoIden + Eq + Clone;
fn generate_id() -> String;
fn order_by() -> (impl IntoColumnRef, Order);
fn get_id(&self) -> String;
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>>;
fn update_columns() -> Vec<impl IntoIden>;
fn from_row(row: &Row) -> rusqlite::Result<Self>
where
Self: Sized;
}
/// Generate timestamps for upsert operations.
/// Sync and import operations preserve existing timestamps; other sources use current time.
pub fn upsert_date(update_source: &UpdateSource, dt: NaiveDateTime) -> SimpleExpr {
match update_source {
UpdateSource::Sync | UpdateSource::Import => {
if dt.and_utc().timestamp() == 0 {
Utc::now().naive_utc().into()
} else {
dt.into()
}
}
_ => Utc::now().naive_utc().into(),
}
}

View File

@@ -0,0 +1,25 @@
use serde::{Deserialize, Serialize};
use ts_rs::TS;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case", tag = "type")]
pub enum UpdateSource {
Background,
Import,
Plugin,
Sync,
Window { label: String },
}
impl UpdateSource {
pub fn from_window_label(label: impl Into<String>) -> Self {
Self::Window { label: label.into() }
}
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
pub enum ModelChangeEvent {
Upsert { created: bool },
Delete,
}

View File

@@ -0,0 +1,20 @@
use nanoid::nanoid;
pub fn generate_prefixed_id(prefix: &str) -> String {
format!("{prefix}_{}", generate_id())
}
pub fn generate_id() -> String {
generate_id_of_length(10)
}
pub fn generate_id_of_length(n: usize) -> String {
let alphabet: [char; 57] = [
'2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i',
'j', 'k', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A',
'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T',
'U', 'V', 'W', 'X', 'Y', 'Z',
];
nanoid!(n, &alphabet)
}

View File

@@ -0,0 +1,12 @@
[package]
name = "yaak-rpc"
version = "0.0.0"
edition = "2024"
authors = ["Gregory Schier"]
publish = false
[dependencies]
log = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
ts-rs = { workspace = true }

View File

@@ -0,0 +1,228 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::mpsc;
/// Type-erased handler function: takes context + JSON payload, returns JSON or error.
type HandlerFn<Ctx> = Box<dyn Fn(&Ctx, serde_json::Value) -> Result<serde_json::Value, RpcError> + Send + Sync>;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RpcError {
pub message: String,
}
impl std::fmt::Display for RpcError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.message)
}
}
impl std::error::Error for RpcError {}
impl From<serde_json::Error> for RpcError {
fn from(e: serde_json::Error) -> Self {
Self { message: e.to_string() }
}
}
/// Incoming message from a client (Tauri invoke, WebSocket, etc.).
#[derive(Debug, Deserialize)]
pub struct RpcRequest {
pub id: String,
pub cmd: String,
#[serde(default)]
pub payload: serde_json::Value,
}
/// Outgoing response to a client.
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
pub enum RpcResponse {
Success {
id: String,
payload: serde_json::Value,
},
Error {
id: String,
error: String,
},
}
/// Transport-agnostic command router.
///
/// Register typed handler functions, then dispatch incoming JSON messages.
/// Each transport adapter (Tauri, WebSocket, etc.) calls `dispatch()`.
pub struct RpcRouter<Ctx> {
handlers: HashMap<&'static str, HandlerFn<Ctx>>,
}
impl<Ctx> RpcRouter<Ctx> {
pub fn new() -> Self {
Self {
handlers: HashMap::new(),
}
}
/// Register a handler for a command name.
/// Use the `rpc_handler!` macro to wrap a typed function.
pub fn register(&mut self, cmd: &'static str, handler: HandlerFn<Ctx>) {
self.handlers.insert(cmd, handler);
}
/// Dispatch a command by name with a JSON payload.
pub fn dispatch(
&self,
cmd: &str,
payload: serde_json::Value,
ctx: &Ctx,
) -> Result<serde_json::Value, RpcError> {
match self.handlers.get(cmd) {
Some(handler) => handler(ctx, payload),
None => Err(RpcError {
message: format!("unknown command: {cmd}"),
}),
}
}
/// Handle a full `RpcRequest`, returning an `RpcResponse`.
pub fn handle(&self, req: RpcRequest, ctx: &Ctx) -> RpcResponse {
match self.dispatch(&req.cmd, req.payload, ctx) {
Ok(payload) => RpcResponse::Success {
id: req.id,
payload,
},
Err(e) => RpcResponse::Error {
id: req.id,
error: e.message,
},
}
}
pub fn commands(&self) -> Vec<&'static str> {
self.handlers.keys().copied().collect()
}
}
/// A named event carrying a JSON payload, emitted from backend to frontend.
#[derive(Debug, Clone, Serialize)]
pub struct RpcEvent {
pub event: &'static str,
pub payload: serde_json::Value,
}
/// Channel-based event emitter. The backend calls `emit()`, the transport
/// adapter (Tauri, WebSocket, etc.) drains the receiver and delivers events.
#[derive(Clone)]
pub struct RpcEventEmitter {
tx: mpsc::Sender<RpcEvent>,
}
impl RpcEventEmitter {
pub fn new() -> (Self, mpsc::Receiver<RpcEvent>) {
let (tx, rx) = mpsc::channel();
(Self { tx }, rx)
}
/// Emit a typed event. Serializes the payload to JSON.
pub fn emit<T: Serialize>(&self, event: &'static str, payload: &T) {
if let Ok(value) = serde_json::to_value(payload) {
let _ = self.tx.send(RpcEvent { event, payload: value });
}
}
}
/// Define RPC commands and events with a single source of truth.
///
/// Generates:
/// - `build_router()` — creates an `RpcRouter` with all handlers registered
/// - `RpcSchema` — a struct with ts-rs derives for TypeScript type generation
/// - `RpcEventSchema` — (if events declared) a struct mapping event names to payload types
///
/// The wire name for each command/event is derived from `stringify!($ident)`.
///
/// # Example
/// ```ignore
/// define_rpc! {
/// ProxyCtx;
/// commands {
/// proxy_start(ProxyStartRequest) -> ProxyStartResponse,
/// proxy_stop(ProxyStopRequest) -> bool,
/// }
/// events {
/// model_write(ModelPayload),
/// }
/// }
/// ```
#[macro_export]
macro_rules! define_rpc {
// With both commands and events
(
$ctx:ty;
commands {
$( $handler:ident ( $req:ty ) -> $res:ty ),* $(,)?
}
events {
$( $evt_ident:ident ( $evt_payload:ty ) ),* $(,)?
}
) => {
pub fn build_router() -> $crate::RpcRouter<$ctx> {
let mut router = $crate::RpcRouter::new();
$( router.register(stringify!($handler), $crate::rpc_handler!($handler)); )*
router
}
#[derive(ts_rs::TS)]
#[ts(export, export_to = "gen_rpc.ts")]
pub struct RpcSchema {
$( pub $handler: ($req, $res), )*
}
#[derive(ts_rs::TS)]
#[ts(export, export_to = "gen_rpc.ts")]
pub struct RpcEventSchema {
$( pub $evt_ident: $evt_payload, )*
}
};
// Commands only (no events)
(
$ctx:ty;
commands {
$( $handler:ident ( $req:ty ) -> $res:ty ),* $(,)?
}
) => {
pub fn build_router() -> $crate::RpcRouter<$ctx> {
let mut router = $crate::RpcRouter::new();
$( router.register(stringify!($handler), $crate::rpc_handler!($handler)); )*
router
}
#[derive(ts_rs::TS)]
#[ts(export, export_to = "gen_rpc.ts")]
pub struct RpcSchema {
$( pub $handler: ($req, $res), )*
}
};
}
/// Wrap a typed handler function into a type-erased `HandlerFn`.
///
/// The function must have the signature:
/// `fn(ctx: &Ctx, req: Req) -> Result<Res, RpcError>`
/// where `Req: DeserializeOwned` and `Res: Serialize`.
///
/// # Example
/// ```ignore
/// fn proxy_start(ctx: &MyCtx, req: StartReq) -> Result<StartRes, RpcError> { ... }
///
/// router.register("proxy_start", rpc_handler!(proxy_start));
/// ```
#[macro_export]
macro_rules! rpc_handler {
($f:expr) => {
Box::new(|ctx, payload| {
let req = serde_json::from_value(payload).map_err($crate::RpcError::from)?;
let res = $f(ctx, req)?;
serde_json::to_value(res).map_err($crate::RpcError::from)
})
};
}

View File

@@ -2,22 +2,44 @@
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null,
/**
* Variables defined in this environment scope.
* Child environments override parent variables by name.
*/
variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
export type Folder = { model: "folder", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, sortPriority: number, };
export type GrpcRequest = { model: "grpc_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authenticationType: string | null, authentication: Record<string, any>, description: string, message: string, metadata: Array<HttpRequestHeader>, method: string | null, name: string, service: string | null, sortPriority: number, url: string, };
export type GrpcRequest = { model: "grpc_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authenticationType: string | null, authentication: Record<string, any>, description: string, message: string, metadata: Array<HttpRequestHeader>, method: string | null, name: string, service: string | null, sortPriority: number,
/**
* Server URL (http for plaintext or https for secure)
*/
url: string, };
export type HttpRequest = { model: "http_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, body: Record<string, any>, bodyType: string | null, description: string, headers: Array<HttpRequestHeader>, method: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type HttpRequest = { model: "http_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, body: Record<string, any>, bodyType: string | null, description: string, headers: Array<HttpRequestHeader>, method: string, name: string, sortPriority: number, url: string,
/**
* URL parameters used for both path placeholders (`:id`) and query string entries.
*/
urlParameters: Array<HttpUrlParameter>, };
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpUrlParameter = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpUrlParameter = { enabled?: boolean,
/**
* Colon-prefixed parameters are treated as path parameters if they match, like `/users/:id`
* Other entries are appended as query parameters
*/
name: string, value: string, id?: string, };
export type SyncModel = { "type": "workspace" } & Workspace | { "type": "environment" } & Environment | { "type": "folder" } & Folder | { "type": "http_request" } & HttpRequest | { "type": "grpc_request" } & GrpcRequest | { "type": "websocket_request" } & WebsocketRequest;
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string,
/**
* URL parameters used for both path placeholders (`:id`) and query string entries.
*/
urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };

View File

@@ -1,7 +1,7 @@
import { useQuery } from "@tanstack/react-query";
import { invoke } from "@tauri-apps/api/core";
import { createFastMutation } from "@yaakapp/app/hooks/useFastMutation";
import { queryClient } from "@yaakapp/app/lib/queryClient";
import { createFastMutation } from "@yaakapp/yaak-client/hooks/useFastMutation";
import { queryClient } from "@yaakapp/yaak-client/lib/queryClient";
import { useMemo } from "react";
import {
BranchDeleteResult,
@@ -12,7 +12,7 @@ import {
PullResult,
PushResult,
} from "./bindings/gen_git";
import { showToast } from "@yaakapp/app/lib/toast";
import { showToast } from "@yaakapp/yaak-client/lib/toast";
export * from "./bindings/gen_git";
export * from "./bindings/gen_models";
@@ -95,8 +95,8 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
const handleError = (err: unknown) => {
showToast({
id: err instanceof Error ? err.message : String(err),
message: err instanceof Error ? err.message : String(err),
id: String(err),
message: String(err),
color: "danger",
timeout: 5000,
});
@@ -272,7 +272,10 @@ export async function gitClone(
if (result.type !== "needs_credentials") return result;
// Prompt for credentials
const creds = await promptCredentials({ url: result.url, error: result.error });
const creds = await promptCredentials({
url: result.url,
error: result.error,
});
if (creds == null) return { type: "cancelled" };
// Store credentials and retry

View File

@@ -18,9 +18,11 @@ zstd = "0.13"
hyper-util = { version = "0.1.17", default-features = false, features = ["client-legacy"] }
log = { workspace = true }
mime_guess = "2.0.5"
native-tls = "0.2"
regex = "1.11.1"
reqwest = { workspace = true, features = [
"rustls-tls-manual-roots-no-provider",
"native-tls",
"socks",
"http2",
"stream",

View File

@@ -31,6 +31,56 @@ impl ConfiguredClient {
}
}
/// Build a native-tls connector for maximum compatibility when certificate
/// validation is disabled. Unlike rustls, native-tls uses the OS TLS stack
/// (Secure Transport on macOS, SChannel on Windows, OpenSSL on Linux) which
/// supports TLS 1.0+ for legacy servers.
fn build_native_tls_connector(
client_cert: Option<ClientCertificateConfig>,
) -> Result<native_tls::TlsConnector> {
let mut builder = native_tls::TlsConnector::builder();
builder.danger_accept_invalid_certs(true);
builder.danger_accept_invalid_hostnames(true);
builder.min_protocol_version(Some(native_tls::Protocol::Tlsv10));
if let Some(identity) = build_native_tls_identity(client_cert)? {
builder.identity(identity);
}
Ok(builder.build()?)
}
fn build_native_tls_identity(
client_cert: Option<ClientCertificateConfig>,
) -> Result<Option<native_tls::Identity>> {
let config = match client_cert {
None => return Ok(None),
Some(c) => c,
};
// Try PFX/PKCS12 first
if let Some(pfx_path) = &config.pfx_file {
if !pfx_path.is_empty() {
let pfx_data = std::fs::read(pfx_path)?;
let password = config.passphrase.as_deref().unwrap_or("");
let identity = native_tls::Identity::from_pkcs12(&pfx_data, password)?;
return Ok(Some(identity));
}
}
// Try CRT + KEY files
if let (Some(crt_path), Some(key_path)) = (&config.crt_file, &config.key_file) {
if !crt_path.is_empty() && !key_path.is_empty() {
let crt_data = std::fs::read(crt_path)?;
let key_data = std::fs::read(key_path)?;
let identity = native_tls::Identity::from_pkcs8(&crt_data, &key_data)?;
return Ok(Some(identity));
}
}
Ok(None)
}
#[derive(Clone)]
pub struct HttpConnectionProxySettingAuth {
pub user: String,
@@ -76,10 +126,16 @@ impl HttpConnectionOptions {
// This is needed so we can emit DNS timing events for each request
.pool_max_idle_per_host(0);
// Configure TLS with optional client certificate
let config =
get_tls_config(self.validate_certificates, true, self.client_certificate.clone())?;
client = client.use_preconfigured_tls(config);
// Configure TLS
if self.validate_certificates {
// Use rustls with platform certificate verification (TLS 1.2+ only)
let config = get_tls_config(true, true, self.client_certificate.clone())?;
client = client.use_preconfigured_tls(config);
} else {
// Use native TLS for maximum compatibility (supports TLS 1.0+)
let connector = build_native_tls_connector(self.client_certificate.clone())?;
client = client.use_preconfigured_tls(connector);
}
// Configure DNS resolver - keep a reference to configure per-request
let resolver = LocalhostResolver::new(self.dns_overrides.clone());

View File

@@ -9,6 +9,12 @@ pub enum Error {
#[error(transparent)]
TlsError(#[from] yaak_tls::error::Error),
#[error("Native TLS error: {0}")]
NativeTlsError(#[from] native_tls::Error),
#[error("IO error: {0}")]
IoError(#[from] std::io::Error),
#[error("Request failed with {0:?}")]
RequestError(String),

View File

@@ -6,6 +6,7 @@ publish = false
[dependencies]
chrono = { version = "0.4.38", features = ["serde"] }
yaak-database = { workspace = true }
hex = { workspace = true }
include_dir = "0.7"
log = { workspace = true }

View File

@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type ModelChangeEvent = { "type": "upsert", created: boolean, } | { "type": "delete" };

View File

@@ -1,4 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ModelChangeEvent } from "./ModelChangeEvent";
export type AnyModel = CookieJar | Environment | Folder | GraphQlIntrospection | GrpcConnection | GrpcEvent | GrpcRequest | HttpRequest | HttpResponse | HttpResponseEvent | KeyValue | Plugin | Settings | SyncState | WebsocketConnection | WebsocketEvent | WebsocketRequest | Workspace | WorkspaceMeta;
@@ -18,7 +19,12 @@ export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
export type EncryptedKey = { encryptedKey: string, };
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null,
/**
* Variables defined in this environment scope.
* Child environments override parent variables by name.
*/
variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
@@ -34,9 +40,17 @@ export type GrpcEvent = { model: "grpc_event", id: string, createdAt: string, up
export type GrpcEventType = "info" | "error" | "client_message" | "server_message" | "connection_start" | "connection_end";
export type GrpcRequest = { model: "grpc_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authenticationType: string | null, authentication: Record<string, any>, description: string, message: string, metadata: Array<HttpRequestHeader>, method: string | null, name: string, service: string | null, sortPriority: number, url: string, };
export type GrpcRequest = { model: "grpc_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authenticationType: string | null, authentication: Record<string, any>, description: string, message: string, metadata: Array<HttpRequestHeader>, method: string | null, name: string, service: string | null, sortPriority: number,
/**
* Server URL (http for plaintext or https for secure)
*/
url: string, };
export type HttpRequest = { model: "http_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, body: Record<string, any>, bodyType: string | null, description: string, headers: Array<HttpRequestHeader>, method: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type HttpRequest = { model: "http_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, body: Record<string, any>, bodyType: string | null, description: string, headers: Array<HttpRequestHeader>, method: string, name: string, sortPriority: number, url: string,
/**
* URL parameters used for both path placeholders (`:id`) and query string entries.
*/
urlParameters: Array<HttpUrlParameter>, };
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
@@ -55,12 +69,15 @@ export type HttpResponseHeader = { name: string, value: string, };
export type HttpResponseState = "initialized" | "connected" | "closed";
export type HttpUrlParameter = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpUrlParameter = { enabled?: boolean,
/**
* Colon-prefixed parameters are treated as path parameters if they match, like `/users/:id`
* Other entries are appended as query parameters
*/
name: string, value: string, id?: string, };
export type KeyValue = { model: "key_value", id: string, createdAt: string, updatedAt: string, key: string, namespace: string, value: string, };
export type ModelChangeEvent = { "type": "upsert", created: boolean, } | { "type": "delete" };
export type ModelPayload = { model: AnyModel, updateSource: UpdateSource, change: ModelChangeEvent, };
export type ParentAuthentication = { authentication: Record<string, any>, authenticationType: string | null, };
@@ -69,10 +86,10 @@ export type ParentHeaders = { headers: Array<HttpRequestHeader>, };
export type Plugin = { model: "plugin", id: string, createdAt: string, updatedAt: string, checkedAt: string | null, directory: string, enabled: boolean, url: string | null, source: PluginSource, };
export type PluginSource = "bundled" | "filesystem" | "registry";
export type PluginKeyValue = { model: "plugin_key_value", createdAt: string, updatedAt: string, pluginName: string, key: string, value: string, };
export type PluginSource = "bundled" | "filesystem" | "registry";
export type ProxySetting = { "type": "enabled", http: string, https: string, auth: ProxySettingAuth | null, bypass: string, disabled: boolean, } | { "type": "disabled" };
export type ProxySettingAuth = { user: string, password: string, };
@@ -93,7 +110,11 @@ export type WebsocketEventType = "binary" | "close" | "frame" | "open" | "ping"
export type WebsocketMessageType = "text" | "binary";
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string,
/**
* URL parameters used for both path placeholders (`:id`) and query string entries.
*/
urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };

View File

@@ -1,6 +1,6 @@
import { invoke } from "@tauri-apps/api/core";
import { getCurrentWebviewWindow } from "@tauri-apps/api/webviewWindow";
import { resolvedModelName } from "@yaakapp/app/lib/resolvedModelName";
import { resolvedModelName } from "@yaakapp/yaak-client/lib/resolvedModelName";
import { AnyModel, ModelPayload } from "../bindings/gen_models";
import { modelStoreDataAtom } from "./atoms";
import { ExtractModel, JotaiStore, ModelStoreData } from "./types";
@@ -86,7 +86,7 @@ export function getModel<M extends AnyModel["model"], T extends ExtractModel<Any
export function getAnyModel(id: string): AnyModel | null {
let data = mustStore().get(modelStoreDataAtom);
for (const t of Object.keys(data)) {
// oxlint-disable-next-line no-explicit-any
// oxlint-disable-next-line no-explicit-any -- dynamic key access
let v = (data as any)[t]?.[id];
if (v?.model === t) return v;
}

View File

@@ -0,0 +1,127 @@
use crate::error::Result;
use crate::models::{AnyModel, UpsertModelInfo};
use crate::util::{ModelChangeEvent, ModelPayload, UpdateSource};
use rusqlite::params;
use sea_query::{IntoColumnRef, IntoIden, SimpleExpr};
use std::fmt::Debug;
use std::sync::mpsc;
use yaak_database::DbContext;
pub struct ClientDb<'a> {
pub(crate) ctx: DbContext<'a>,
pub(crate) events_tx: mpsc::Sender<ModelPayload>,
}
impl<'a> ClientDb<'a> {
pub fn new(ctx: DbContext<'a>, events_tx: mpsc::Sender<ModelPayload>) -> Self {
Self { ctx, events_tx }
}
/// Access the underlying connection for custom queries.
pub(crate) fn conn(&self) -> &yaak_database::ConnectionOrTx<'a> {
self.ctx.conn()
}
// --- Read delegates (thin wrappers over DbContext) ---
pub(crate) fn find_one<M>(
&self,
col: impl IntoColumnRef + IntoIden + Clone,
value: impl Into<SimpleExpr> + Debug,
) -> Result<M>
where
M: UpsertModelInfo,
{
Ok(self.ctx.find_one(col, value)?)
}
pub(crate) fn find_optional<M>(
&self,
col: impl IntoColumnRef,
value: impl Into<SimpleExpr>,
) -> Option<M>
where
M: UpsertModelInfo,
{
self.ctx.find_optional(col, value)
}
pub(crate) fn find_all<M>(&self) -> Result<Vec<M>>
where
M: UpsertModelInfo,
{
Ok(self.ctx.find_all()?)
}
pub(crate) fn find_many<M>(
&self,
col: impl IntoColumnRef,
value: impl Into<SimpleExpr>,
limit: Option<u64>,
) -> Result<Vec<M>>
where
M: UpsertModelInfo,
{
Ok(self.ctx.find_many(col, value, limit)?)
}
// --- Write operations (with event recording) ---
pub(crate) fn upsert<M>(&self, model: &M, source: &UpdateSource) -> Result<M>
where
M: Into<AnyModel> + UpsertModelInfo + Clone,
{
let (m, created) = self.ctx.upsert(model, &source.to_db())?;
let payload = ModelPayload {
model: m.clone().into(),
update_source: source.clone(),
change: ModelChangeEvent::Upsert { created },
};
self.record_model_change(&payload)?;
let _ = self.events_tx.send(payload);
Ok(m)
}
pub(crate) fn delete<M>(&self, m: &M, source: &UpdateSource) -> Result<M>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
{
self.ctx.delete(m)?;
let payload = ModelPayload {
model: m.clone().into(),
update_source: source.clone(),
change: ModelChangeEvent::Delete,
};
self.record_model_change(&payload)?;
let _ = self.events_tx.send(payload);
Ok(m.clone())
}
fn record_model_change(&self, payload: &ModelPayload) -> Result<()> {
let payload_json = serde_json::to_string(payload)?;
let source_json = serde_json::to_string(&payload.update_source)?;
let change_json = serde_json::to_string(&payload.change)?;
self.ctx.conn().resolve().execute(
r#"
INSERT INTO model_changes (model, model_id, change, update_source, payload)
VALUES (?1, ?2, ?3, ?4, ?5)
"#,
params![
payload.model.model(),
payload.model.id(),
change_json,
source_json,
payload_json,
],
)?;
Ok(())
}
}

View File

@@ -1,25 +1 @@
use r2d2::PooledConnection;
use r2d2_sqlite::SqliteConnectionManager;
use rusqlite::{Connection, Statement, ToSql, Transaction};
pub enum ConnectionOrTx<'a> {
Connection(PooledConnection<SqliteConnectionManager>),
Transaction(&'a Transaction<'a>),
}
impl<'a> ConnectionOrTx<'a> {
pub(crate) fn resolve(&self) -> &Connection {
match self {
ConnectionOrTx::Connection(c) => c,
ConnectionOrTx::Transaction(c) => c,
}
}
pub(crate) fn prepare(&self, sql: &str) -> rusqlite::Result<Statement<'_>> {
self.resolve().prepare(sql)
}
pub(crate) fn execute(&self, sql: &str, params: &[&dyn ToSql]) -> rusqlite::Result<usize> {
self.resolve().execute(sql, params)
}
}
pub use yaak_database::connection_or_tx::ConnectionOrTx;

View File

@@ -40,6 +40,20 @@ pub enum Error {
Unknown,
}
impl From<yaak_database::Error> for Error {
fn from(e: yaak_database::Error) -> Self {
match e {
yaak_database::Error::SqlError(e) => Error::SqlError(e),
yaak_database::Error::SqlPoolError(e) => Error::SqlPoolError(e),
yaak_database::Error::Database(s) => Error::Database(s),
yaak_database::Error::Io(e) => Error::Io(e),
yaak_database::Error::JsonError(e) => Error::JsonError(e),
yaak_database::Error::ModelNotFound(s) => Error::ModelNotFound(s),
yaak_database::Error::MigrationError(s) => Error::MigrationError(s),
}
}
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where

View File

@@ -12,8 +12,8 @@ use std::sync::mpsc;
use std::time::Duration;
pub mod blob_manager;
pub mod client_db;
mod connection_or_tx;
pub mod db_context;
pub mod error;
pub mod migrate;
pub mod models;

View File

@@ -3,7 +3,7 @@ use crate::models::HttpRequestIden::{
Authentication, AuthenticationType, Body, BodyType, CreatedAt, Description, FolderId, Headers,
Method, Name, SortPriority, UpdatedAt, Url, UrlParameters, WorkspaceId,
};
use crate::util::{UpdateSource, generate_prefixed_id};
use crate::util::generate_prefixed_id;
use chrono::{NaiveDateTime, Utc};
use rusqlite::Row;
use schemars::JsonSchema;
@@ -16,6 +16,8 @@ use std::collections::HashMap;
use std::fmt::{Debug, Display};
use std::str::FromStr;
use ts_rs::TS;
pub use yaak_database::{UpsertModelInfo, upsert_date};
use yaak_database::{UpdateSource, Result as DbResult};
#[macro_export]
macro_rules! impl_model {
@@ -190,7 +192,7 @@ impl UpsertModelInfo for Settings {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use SettingsIden::*;
let proxy = match self.proxy {
None => None,
@@ -346,7 +348,7 @@ impl UpsertModelInfo for Workspace {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use WorkspaceIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -453,7 +455,7 @@ impl UpsertModelInfo for WorkspaceMeta {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use WorkspaceMetaIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -554,7 +556,7 @@ impl UpsertModelInfo for CookieJar {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use CookieJarIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -642,7 +644,7 @@ impl UpsertModelInfo for Environment {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use EnvironmentIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -775,7 +777,7 @@ impl UpsertModelInfo for Folder {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use FolderIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -909,7 +911,7 @@ impl UpsertModelInfo for HttpRequest {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
(UpdatedAt, upsert_date(source, self.updated_at)),
@@ -1036,7 +1038,7 @@ impl UpsertModelInfo for WebsocketConnection {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use WebsocketConnectionIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1151,7 +1153,7 @@ impl UpsertModelInfo for WebsocketRequest {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use WebsocketRequestIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1276,7 +1278,7 @@ impl UpsertModelInfo for WebsocketEvent {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use WebsocketEventIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1397,7 +1399,7 @@ impl UpsertModelInfo for HttpResponse {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use HttpResponseIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1593,7 +1595,7 @@ impl UpsertModelInfo for HttpResponseEvent {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use HttpResponseEventIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1681,7 +1683,7 @@ impl UpsertModelInfo for GraphQlIntrospection {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use GraphQlIntrospectionIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1766,7 +1768,7 @@ impl UpsertModelInfo for GrpcRequest {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use GrpcRequestIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1893,7 +1895,7 @@ impl UpsertModelInfo for GrpcConnection {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use GrpcConnectionIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -2013,7 +2015,7 @@ impl UpsertModelInfo for GrpcEvent {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use GrpcEventIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -2144,7 +2146,7 @@ impl UpsertModelInfo for Plugin {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use PluginIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -2229,7 +2231,7 @@ impl UpsertModelInfo for SyncState {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use SyncStateIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -2312,7 +2314,7 @@ impl UpsertModelInfo for KeyValue {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use KeyValueIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -2525,36 +2527,3 @@ impl AnyModel {
}
}
pub trait UpsertModelInfo {
fn table_name() -> impl IntoTableRef + IntoIden;
fn id_column() -> impl IntoIden + Eq + Clone;
fn generate_id() -> String;
fn order_by() -> (impl IntoColumnRef, Order);
fn get_id(&self) -> String;
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>>;
fn update_columns() -> Vec<impl IntoIden>;
fn from_row(row: &Row) -> rusqlite::Result<Self>
where
Self: Sized;
}
// Generate the created_at or updated_at timestamps for an upsert operation, depending on the ID
// provided.
fn upsert_date(update_source: &UpdateSource, dt: NaiveDateTime) -> SimpleExpr {
match update_source {
// Sync and import operations always preserve timestamps
UpdateSource::Sync | UpdateSource::Import => {
if dt.and_utc().timestamp() == 0 {
// Sometimes data won't have timestamps (partial data)
Utc::now().naive_utc().into()
} else {
dt.into()
}
}
// Other sources will always update to the latest time
_ => Utc::now().naive_utc().into(),
}
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{GrpcRequest, HttpRequest, WebsocketRequest};
@@ -8,7 +8,7 @@ pub enum AnyRequest {
WebsocketRequest(WebsocketRequest),
}
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_any_request(&self, id: &str) -> Result<AnyRequest> {
if let Ok(http_request) = self.get_http_request(id) {
Ok(AnyRequest::HttpRequest(http_request))

View File

@@ -1,10 +1,10 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{Environment, Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace};
use crate::util::{BatchUpsertResult, UpdateSource};
use log::info;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn batch_upsert(
&self,
workspaces: Vec<Workspace>,

View File

@@ -1,9 +1,9 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{CookieJar, CookieJarIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_cookie_jar(&self, id: &str) -> Result<CookieJar> {
self.find_one(CookieJarIden::Id, id)
}

View File

@@ -1,11 +1,11 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Error::{MissingBaseEnvironment, MultipleBaseEnvironments};
use crate::error::Result;
use crate::models::{Environment, EnvironmentIden, EnvironmentVariable};
use crate::util::UpdateSource;
use log::{info, warn};
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_environment(&self, id: &str) -> Result<Environment> {
self.find_one(EnvironmentIden::Id, id)
}

View File

@@ -1,5 +1,5 @@
use crate::connection_or_tx::ConnectionOrTx;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{
Environment, EnvironmentIden, Folder, FolderIden, GrpcRequest, GrpcRequestIden, HttpRequest,
@@ -9,7 +9,7 @@ use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_folder(&self, id: &str) -> Result<Folder> {
self.find_one(FolderIden::Id, id)
}
@@ -19,7 +19,7 @@ impl<'a> DbContext<'a> {
}
pub fn delete_folder(&self, folder: &Folder, source: &UpdateSource) -> Result<Folder> {
match self.conn {
match self.conn() {
ConnectionOrTx::Connection(_) => {}
ConnectionOrTx::Transaction(_) => {}
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{GraphQlIntrospection, GraphQlIntrospectionIden};
use crate::util::UpdateSource;
@@ -6,7 +6,7 @@ use chrono::{Duration, Utc};
use sea_query::{Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_graphql_introspection(&self, request_id: &str) -> Option<GraphQlIntrospection> {
self.find_optional(GraphQlIntrospectionIden::RequestId, request_id)
}
@@ -44,7 +44,7 @@ impl<'a> DbContext<'a> {
.cond_where(Expr::col(GraphQlIntrospectionIden::UpdatedAt).lt(cutoff))
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.resolve().prepare(sql.as_str())?;
let mut stmt = self.conn().resolve().prepare(sql.as_str())?;
stmt.execute(&*params.as_params())?;
Ok(())
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{GrpcConnection, GrpcConnectionIden, GrpcConnectionState};
use crate::queries::MAX_HISTORY_ITEMS;
@@ -7,7 +7,7 @@ use log::debug;
use sea_query::{Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_grpc_connection(&self, id: &str) -> Result<GrpcConnection> {
self.find_one(GrpcConnectionIden::Id, id)
}
@@ -71,7 +71,7 @@ impl<'a> DbContext<'a> {
.values([(GrpcConnectionIden::State, closed.as_str().into())])
.cond_where(Expr::col(GrpcConnectionIden::State).ne(closed.as_str()))
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str())?;
let mut stmt = self.conn().prepare(sql.as_str())?;
stmt.execute(&*params.as_params())?;
Ok(())
}

View File

@@ -1,9 +1,9 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{GrpcEvent, GrpcEventIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_grpc_events(&self, id: &str) -> Result<GrpcEvent> {
self.find_one(GrpcEventIden::Id, id)
}

View File

@@ -1,12 +1,12 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{Folder, FolderIden, GrpcRequest, GrpcRequestIden, HttpRequestHeader};
use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_grpc_request(&self, id: &str) -> Result<GrpcRequest> {
self.find_one(GrpcRequestIden::Id, id)
}

View File

@@ -1,12 +1,12 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{Folder, FolderIden, HttpRequest, HttpRequestHeader, HttpRequestIden};
use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_http_request(&self, id: &str) -> Result<HttpRequest> {
self.find_one(HttpRequestIden::Id, id)
}

View File

@@ -1,9 +1,9 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{HttpResponseEvent, HttpResponseEventIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn list_http_response_events(&self, response_id: &str) -> Result<Vec<HttpResponseEvent>> {
self.find_many(HttpResponseEventIden::ResponseId, response_id, None)
}

View File

@@ -1,5 +1,5 @@
use crate::blob_manager::BlobManager;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{HttpResponse, HttpResponseIden, HttpResponseState};
use crate::queries::MAX_HISTORY_ITEMS;
@@ -9,7 +9,7 @@ use sea_query::{Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
use std::fs;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_http_response(&self, id: &str) -> Result<HttpResponse> {
self.find_one(HttpResponseIden::Id, id)
}
@@ -101,7 +101,7 @@ impl<'a> DbContext<'a> {
.values([(HttpResponseIden::State, closed.as_str().into())])
.cond_where(Expr::col(HttpResponseIden::State).ne(closed.as_str()))
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str())?;
let mut stmt = self.conn().prepare(sql.as_str())?;
stmt.execute(&*params.as_params())?;
Ok(())
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{KeyValue, KeyValueIden, UpsertModelInfo};
use crate::util::UpdateSource;
@@ -7,7 +7,7 @@ use log::error;
use sea_query::{Asterisk, Cond, Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn list_key_values(&self) -> Result<Vec<KeyValue>> {
let (sql, params) = Query::select()
.from(KeyValueIden::Table)
@@ -18,7 +18,7 @@ impl<'a> DbContext<'a> {
// TODO: Add migration to delete key/values with NULL IDs later on, then remove this
.cond_where(Expr::col(KeyValueIden::Id).is_not_null())
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str())?;
let mut stmt = self.conn().prepare(sql.as_str())?;
let items = stmt.query_map(&*params.as_params(), KeyValue::from_row)?;
Ok(items.map(|v| v.unwrap()).collect())
}
@@ -86,7 +86,7 @@ impl<'a> DbContext<'a> {
.add(Expr::col(KeyValueIden::Key).eq(key)),
)
.build_rusqlite(SqliteQueryBuilder);
self.conn.resolve().query_row(sql.as_str(), &*params.as_params(), KeyValue::from_row).ok()
self.conn().resolve().query_row(sql.as_str(), &*params.as_params(), KeyValue::from_row).ok()
}
pub fn set_key_value_dte(

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::util::ModelPayload;
use rusqlite::params;
@@ -11,13 +11,13 @@ pub struct PersistedModelChange {
pub payload: ModelPayload,
}
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn list_model_changes_after(
&self,
after_id: i64,
limit: usize,
) -> Result<Vec<PersistedModelChange>> {
let mut stmt = self.conn.prepare(
let mut stmt = self.conn().prepare(
r#"
SELECT id, created_at, payload
FROM model_changes
@@ -46,7 +46,7 @@ impl<'a> DbContext<'a> {
since_id: i64,
limit: usize,
) -> Result<Vec<PersistedModelChange>> {
let mut stmt = self.conn.prepare(
let mut stmt = self.conn().prepare(
r#"
SELECT id, created_at, payload
FROM model_changes
@@ -72,7 +72,7 @@ impl<'a> DbContext<'a> {
pub fn prune_model_changes_older_than_days(&self, days: i64) -> Result<usize> {
let offset = format!("-{days} days");
Ok(self.conn.resolve().execute(
Ok(self.conn().resolve().execute(
r#"
DELETE FROM model_changes
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)
@@ -83,7 +83,7 @@ impl<'a> DbContext<'a> {
pub fn prune_model_changes_older_than_hours(&self, hours: i64) -> Result<usize> {
let offset = format!("-{hours} hours");
Ok(self.conn.resolve().execute(
Ok(self.conn().resolve().execute(
r#"
DELETE FROM model_changes
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)
@@ -162,7 +162,7 @@ mod tests {
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
assert_eq!(changes.len(), 1);
db.conn
db.conn()
.resolve()
.execute(
"UPDATE model_changes SET created_at = '2000-01-01 00:00:00.000' WHERE id = ?1",
@@ -199,7 +199,7 @@ mod tests {
assert_eq!(all.len(), 2);
let fixed_ts = "2026-02-16 00:00:00.000";
db.conn
db.conn()
.resolve()
.execute("UPDATE model_changes SET created_at = ?1", params![fixed_ts])
.expect("Failed to normalize timestamps");
@@ -229,7 +229,7 @@ mod tests {
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
assert_eq!(changes.len(), 1);
db.conn
db.conn()
.resolve()
.execute(
"UPDATE model_changes SET created_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', '-2 hours') WHERE id = ?1",
@@ -264,7 +264,7 @@ mod tests {
"change": { "type": "upsert", "created": false }
});
db.conn
db.conn()
.resolve()
.execute(
r#"

View File

@@ -1,11 +1,11 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{PluginKeyValue, PluginKeyValueIden};
use sea_query::Keyword::CurrentTimestamp;
use sea_query::{Asterisk, Cond, Expr, OnConflict, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_plugin_key_value(&self, plugin_name: &str, key: &str) -> Option<PluginKeyValue> {
let (sql, params) = Query::select()
.from(PluginKeyValueIden::Table)
@@ -16,7 +16,7 @@ impl<'a> DbContext<'a> {
.add(Expr::col(PluginKeyValueIden::Key).eq(key)),
)
.build_rusqlite(SqliteQueryBuilder);
self.conn.resolve().query_row(sql.as_str(), &*params.as_params(), |row| row.try_into()).ok()
self.conn().resolve().query_row(sql.as_str(), &*params.as_params(), |row| row.try_into()).ok()
}
pub fn set_plugin_key_value(
@@ -52,7 +52,7 @@ impl<'a> DbContext<'a> {
.build_rusqlite(SqliteQueryBuilder);
let mut stmt =
self.conn.prepare(sql.as_str()).expect("Failed to prepare PluginKeyValue upsert");
self.conn().prepare(sql.as_str()).expect("Failed to prepare PluginKeyValue upsert");
let m: PluginKeyValue = stmt
.query_row(&*params.as_params(), |row| row.try_into())
.expect("Failed to upsert KeyValue");
@@ -73,7 +73,7 @@ impl<'a> DbContext<'a> {
.add(Expr::col(PluginKeyValueIden::Key).eq(key)),
)
.build_rusqlite(SqliteQueryBuilder);
self.conn.execute(sql.as_str(), &*params.as_params())?;
self.conn().execute(sql.as_str(), &*params.as_params())?;
Ok(true)
}
}

View File

@@ -1,9 +1,9 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{Plugin, PluginIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_plugin(&self, id: &str) -> Result<Plugin> {
self.find_one(PluginIden::Id, id)
}

View File

@@ -1,11 +1,11 @@
use std::collections::HashMap;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{EditorKeymap, Settings, SettingsIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_settings(&self) -> Settings {
let id = "default".to_string();

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{SyncState, SyncStateIden, UpsertModelInfo};
use crate::util::UpdateSource;
@@ -6,7 +6,7 @@ use sea_query::{Asterisk, Cond, Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
use std::path::Path;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_sync_state(&self, id: &str) -> Result<SyncState> {
self.find_one(SyncStateIden::Id, id)
}
@@ -29,7 +29,7 @@ impl<'a> DbContext<'a> {
.add(Expr::col(SyncStateIden::SyncDir).eq(sync_dir.to_string_lossy())),
)
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str())?;
let mut stmt = self.conn().prepare(sql.as_str())?;
let items = stmt.query_map(&*params.as_params(), SyncState::from_row)?;
Ok(items.map(|v| v.unwrap()).collect())
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{WebsocketConnection, WebsocketConnectionIden, WebsocketConnectionState};
use crate::queries::MAX_HISTORY_ITEMS;
@@ -7,7 +7,7 @@ use log::debug;
use sea_query::{Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_websocket_connection(&self, id: &str) -> Result<WebsocketConnection> {
self.find_one(WebsocketConnectionIden::Id, id)
}
@@ -90,7 +90,7 @@ impl<'a> DbContext<'a> {
.values([(WebsocketConnectionIden::State, closed.as_str().into())])
.cond_where(Expr::col(WebsocketConnectionIden::State).ne(closed.as_str()))
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str())?;
let mut stmt = self.conn().prepare(sql.as_str())?;
stmt.execute(&*params.as_params())?;
Ok(())
}

View File

@@ -1,9 +1,9 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{WebsocketEvent, WebsocketEventIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_websocket_event(&self, id: &str) -> Result<WebsocketEvent> {
self.find_one(WebsocketEventIden::Id, id)
}

View File

@@ -1,5 +1,5 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{
Folder, FolderIden, HttpRequestHeader, WebsocketRequest, WebsocketRequestIden,
@@ -8,7 +8,7 @@ use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_websocket_request(&self, id: &str) -> Result<WebsocketRequest> {
self.find_one(WebsocketRequestIden::Id, id)
}

View File

@@ -1,10 +1,10 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{WorkspaceMeta, WorkspaceMetaIden};
use crate::util::UpdateSource;
use log::info;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_workspace_meta(&self, workspace_id: &str) -> Option<WorkspaceMeta> {
self.find_optional(WorkspaceMetaIden::WorkspaceId, workspace_id)
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{
EnvironmentIden, FolderIden, GrpcRequestIden, HttpRequestHeader, HttpRequestIden,
@@ -8,7 +8,7 @@ use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_workspace(&self, id: &str) -> Result<Workspace> {
self.find_one(WorkspaceIden::Id, id)
}

View File

@@ -1,11 +1,11 @@
use crate::connection_or_tx::ConnectionOrTx;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Error::GenericError;
use crate::util::ModelPayload;
use r2d2::Pool;
use r2d2_sqlite::SqliteConnectionManager;
use rusqlite::TransactionBehavior;
use std::sync::{Arc, Mutex, mpsc};
use yaak_database::{ConnectionOrTx, DbContext};
#[derive(Debug, Clone)]
pub struct QueryManager {
@@ -18,19 +18,20 @@ impl QueryManager {
QueryManager { pool: Arc::new(Mutex::new(pool)), events_tx }
}
pub fn connect(&self) -> DbContext<'_> {
pub fn connect(&self) -> ClientDb<'_> {
let conn = self
.pool
.lock()
.expect("Failed to gain lock on DB")
.get()
.expect("Failed to get a new DB connection from the pool");
DbContext { _events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
let ctx = DbContext::new(ConnectionOrTx::Connection(conn));
ClientDb::new(ctx, self.events_tx.clone())
}
pub fn with_conn<F, T>(&self, func: F) -> T
where
F: FnOnce(&DbContext) -> T,
F: FnOnce(&ClientDb) -> T,
{
let conn = self
.pool
@@ -39,17 +40,15 @@ impl QueryManager {
.get()
.expect("Failed to get new DB connection from the pool");
let db_context = DbContext {
_events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Connection(conn),
};
let ctx = DbContext::new(ConnectionOrTx::Connection(conn));
let db = ClientDb::new(ctx, self.events_tx.clone());
func(&db_context)
func(&db)
}
pub fn with_tx<T, E>(
&self,
func: impl FnOnce(&DbContext) -> std::result::Result<T, E>,
func: impl FnOnce(&ClientDb) -> std::result::Result<T, E>,
) -> std::result::Result<T, E>
where
E: From<crate::error::Error>,
@@ -64,12 +63,10 @@ impl QueryManager {
.transaction_with_behavior(TransactionBehavior::Immediate)
.expect("Failed to start DB transaction");
let db_context = DbContext {
_events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Transaction(&tx),
};
let ctx = DbContext::new(ConnectionOrTx::Transaction(&tx));
let db = ClientDb::new(ctx, self.events_tx.clone());
match func(&db_context) {
match func(&db) {
Ok(val) => {
tx.commit()
.map_err(|e| GenericError(format!("Failed to commit transaction {e:?}")))?;

View File

@@ -1,34 +1,16 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{
AnyModel, Environment, Folder, GrpcRequest, HttpRequest, UpsertModelInfo, WebsocketRequest,
Workspace, WorkspaceIden,
};
use chrono::{NaiveDateTime, Utc};
use nanoid::nanoid;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use ts_rs::TS;
use yaak_core::WorkspaceContext;
pub fn generate_prefixed_id(prefix: &str) -> String {
format!("{prefix}_{}", generate_id())
}
pub fn generate_id() -> String {
generate_id_of_length(10)
}
pub fn generate_id_of_length(n: usize) -> String {
let alphabet: [char; 57] = [
'2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j',
'k', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C',
'D', 'E', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W',
'X', 'Y', 'Z',
];
nanoid!(n, &alphabet)
}
pub use yaak_database::{ModelChangeEvent, generate_id, generate_id_of_length, generate_prefixed_id};
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
@@ -39,14 +21,6 @@ pub struct ModelPayload {
pub change: ModelChangeEvent,
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
#[ts(export, export_to = "gen_models.ts")]
pub enum ModelChangeEvent {
Upsert { created: bool },
Delete,
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
#[ts(export, export_to = "gen_models.ts")]
@@ -62,6 +36,30 @@ impl UpdateSource {
pub fn from_window_label(label: impl Into<String>) -> Self {
Self::Window { label: label.into() }
}
pub fn to_db(&self) -> yaak_database::UpdateSource {
match self {
UpdateSource::Background => yaak_database::UpdateSource::Background,
UpdateSource::Import => yaak_database::UpdateSource::Import,
UpdateSource::Plugin => yaak_database::UpdateSource::Plugin,
UpdateSource::Sync => yaak_database::UpdateSource::Sync,
UpdateSource::Window { label } => {
yaak_database::UpdateSource::Window { label: label.clone() }
}
}
}
}
impl From<yaak_database::UpdateSource> for UpdateSource {
fn from(source: yaak_database::UpdateSource) -> Self {
match source {
yaak_database::UpdateSource::Background => UpdateSource::Background,
yaak_database::UpdateSource::Import => UpdateSource::Import,
yaak_database::UpdateSource::Plugin => UpdateSource::Plugin,
yaak_database::UpdateSource::Sync => UpdateSource::Sync,
yaak_database::UpdateSource::Window { label } => UpdateSource::Window { label },
}
}
}
#[derive(Default, Debug, Deserialize, Serialize)]
@@ -86,7 +84,7 @@ pub struct BatchUpsertResult {
}
pub fn get_workspace_export_resources(
db: &DbContext,
db: &ClientDb,
yaak_version: &str,
workspace_ids: Vec<&str>,
include_private_environments: bool,

View File

@@ -18,12 +18,12 @@ export type CallHttpAuthenticationActionRequest = { index: number, pluginRefId:
export type CallHttpAuthenticationRequest = { contextId: string, values: { [key in string]?: JsonPrimitive }, method: string, url: string, headers: Array<HttpHeader>, };
export type CallHttpAuthenticationResponse = {
export type CallHttpAuthenticationResponse = {
/**
* HTTP headers to add to the request. Existing headers will be replaced, while
* new headers will be added.
*/
setHeaders?: Array<HttpHeader>,
setHeaders?: Array<HttpHeader>,
/**
* Query parameters to add to the request. Existing params will be replaced, while
* new params will be added.
@@ -78,7 +78,7 @@ export type ExportHttpRequestRequest = { httpRequest: HttpRequest, };
export type ExportHttpRequestResponse = { content: string, };
export type FileFilter = { name: string,
export type FileFilter = { name: string,
/**
* File extensions to require
*/
@@ -100,149 +100,149 @@ export type FormInputAccordion = { label: string, inputs?: Array<FormInput>, hid
export type FormInputBanner = { inputs?: Array<FormInput>, hidden?: boolean, color?: Color, };
export type FormInputBase = {
export type FormInputBase = {
/**
* The name of the input. The value will be stored at this object attribute in the resulting data
*/
name: string,
name: string,
/**
* Whether this input is visible for the given configuration. Use this to
* make branching forms.
*/
hidden?: boolean,
hidden?: boolean,
/**
* Whether the user must fill in the argument
*/
optional?: boolean,
optional?: boolean,
/**
* The label of the input
*/
label?: string,
label?: string,
/**
* Visually hide the label of the input
*/
hideLabel?: boolean,
hideLabel?: boolean,
/**
* The default value
*/
defaultValue?: string, disabled?: boolean,
defaultValue?: string, disabled?: boolean,
/**
* Longer description of the input, likely shown in a tooltip
*/
description?: string, };
export type FormInputCheckbox = {
export type FormInputCheckbox = {
/**
* The name of the input. The value will be stored at this object attribute in the resulting data
*/
name: string,
name: string,
/**
* Whether this input is visible for the given configuration. Use this to
* make branching forms.
*/
hidden?: boolean,
hidden?: boolean,
/**
* Whether the user must fill in the argument
*/
optional?: boolean,
optional?: boolean,
/**
* The label of the input
*/
label?: string,
label?: string,
/**
* Visually hide the label of the input
*/
hideLabel?: boolean,
hideLabel?: boolean,
/**
* The default value
*/
defaultValue?: string, disabled?: boolean,
defaultValue?: string, disabled?: boolean,
/**
* Longer description of the input, likely shown in a tooltip
*/
description?: string, };
export type FormInputEditor = {
export type FormInputEditor = {
/**
* Placeholder for the text input
*/
placeholder?: string | null,
placeholder?: string | null,
/**
* Don't show the editor gutter (line numbers, folds, etc.)
*/
hideGutter?: boolean,
hideGutter?: boolean,
/**
* Language for syntax highlighting
*/
language?: EditorLanguage, readOnly?: boolean,
language?: EditorLanguage, readOnly?: boolean,
/**
* Fixed number of visible rows
*/
rows?: number, completionOptions?: Array<GenericCompletionOption>,
rows?: number, completionOptions?: Array<GenericCompletionOption>,
/**
* The name of the input. The value will be stored at this object attribute in the resulting data
*/
name: string,
name: string,
/**
* Whether this input is visible for the given configuration. Use this to
* make branching forms.
*/
hidden?: boolean,
hidden?: boolean,
/**
* Whether the user must fill in the argument
*/
optional?: boolean,
optional?: boolean,
/**
* The label of the input
*/
label?: string,
label?: string,
/**
* Visually hide the label of the input
*/
hideLabel?: boolean,
hideLabel?: boolean,
/**
* The default value
*/
defaultValue?: string, disabled?: boolean,
defaultValue?: string, disabled?: boolean,
/**
* Longer description of the input, likely shown in a tooltip
*/
description?: string, };
export type FormInputFile = {
export type FormInputFile = {
/**
* The title of the file selection window
*/
title: string,
title: string,
/**
* Allow selecting multiple files
*/
multiple?: boolean, directory?: boolean, defaultPath?: string, filters?: Array<FileFilter>,
multiple?: boolean, directory?: boolean, defaultPath?: string, filters?: Array<FileFilter>,
/**
* The name of the input. The value will be stored at this object attribute in the resulting data
*/
name: string,
name: string,
/**
* Whether this input is visible for the given configuration. Use this to
* make branching forms.
*/
hidden?: boolean,
hidden?: boolean,
/**
* Whether the user must fill in the argument
*/
optional?: boolean,
optional?: boolean,
/**
* The label of the input
*/
label?: string,
label?: string,
/**
* Visually hide the label of the input
*/
hideLabel?: boolean,
hideLabel?: boolean,
/**
* The default value
*/
defaultValue?: string, disabled?: boolean,
defaultValue?: string, disabled?: boolean,
/**
* Longer description of the input, likely shown in a tooltip
*/
@@ -250,63 +250,63 @@ description?: string, };
export type FormInputHStack = { inputs?: Array<FormInput>, hidden?: boolean, };
export type FormInputHttpRequest = {
export type FormInputHttpRequest = {
/**
* The name of the input. The value will be stored at this object attribute in the resulting data
*/
name: string,
name: string,
/**
* Whether this input is visible for the given configuration. Use this to
* make branching forms.
*/
hidden?: boolean,
hidden?: boolean,
/**
* Whether the user must fill in the argument
*/
optional?: boolean,
optional?: boolean,
/**
* The label of the input
*/
label?: string,
label?: string,
/**
* Visually hide the label of the input
*/
hideLabel?: boolean,
hideLabel?: boolean,
/**
* The default value
*/
defaultValue?: string, disabled?: boolean,
defaultValue?: string, disabled?: boolean,
/**
* Longer description of the input, likely shown in a tooltip
*/
description?: string, };
export type FormInputKeyValue = {
export type FormInputKeyValue = {
/**
* The name of the input. The value will be stored at this object attribute in the resulting data
*/
name: string,
name: string,
/**
* Whether this input is visible for the given configuration. Use this to
* make branching forms.
*/
hidden?: boolean,
hidden?: boolean,
/**
* Whether the user must fill in the argument
*/
optional?: boolean,
optional?: boolean,
/**
* The label of the input
*/
label?: string,
label?: string,
/**
* Visually hide the label of the input
*/
hideLabel?: boolean,
hideLabel?: boolean,
/**
* The default value
*/
defaultValue?: string, disabled?: boolean,
defaultValue?: string, disabled?: boolean,
/**
* Longer description of the input, likely shown in a tooltip
*/
@@ -314,36 +314,36 @@ description?: string, };
export type FormInputMarkdown = { content: string, hidden?: boolean, };
export type FormInputSelect = {
export type FormInputSelect = {
/**
* The options that will be available in the select input
*/
options: Array<FormInputSelectOption>,
options: Array<FormInputSelectOption>,
/**
* The name of the input. The value will be stored at this object attribute in the resulting data
*/
name: string,
name: string,
/**
* Whether this input is visible for the given configuration. Use this to
* make branching forms.
*/
hidden?: boolean,
hidden?: boolean,
/**
* Whether the user must fill in the argument
*/
optional?: boolean,
optional?: boolean,
/**
* The label of the input
*/
label?: string,
label?: string,
/**
* Visually hide the label of the input
*/
hideLabel?: boolean,
hideLabel?: boolean,
/**
* The default value
*/
defaultValue?: string, disabled?: boolean,
defaultValue?: string, disabled?: boolean,
/**
* Longer description of the input, likely shown in a tooltip
*/
@@ -351,44 +351,44 @@ description?: string, };
export type FormInputSelectOption = { label: string, value: string, };
export type FormInputText = {
export type FormInputText = {
/**
* Placeholder for the text input
*/
placeholder?: string | null,
placeholder?: string | null,
/**
* Placeholder for the text input
*/
password?: boolean,
password?: boolean,
/**
* Whether to allow newlines in the input, like a <textarea/>
*/
multiLine?: boolean, completionOptions?: Array<GenericCompletionOption>,
multiLine?: boolean, completionOptions?: Array<GenericCompletionOption>,
/**
* The name of the input. The value will be stored at this object attribute in the resulting data
*/
name: string,
name: string,
/**
* Whether this input is visible for the given configuration. Use this to
* make branching forms.
*/
hidden?: boolean,
hidden?: boolean,
/**
* Whether the user must fill in the argument
*/
optional?: boolean,
optional?: boolean,
/**
* The label of the input
*/
label?: string,
label?: string,
/**
* Visually hide the label of the input
*/
hideLabel?: boolean,
hideLabel?: boolean,
/**
* The default value
*/
defaultValue?: string, disabled?: boolean,
defaultValue?: string, disabled?: boolean,
/**
* Longer description of the input, likely shown in a tooltip
*/
@@ -474,7 +474,7 @@ export type ListOpenWorkspacesResponse = { workspaces: Array<WorkspaceInfo>, };
export type OpenExternalUrlRequest = { url: string, };
export type OpenWindowRequest = { url: string,
export type OpenWindowRequest = { url: string,
/**
* Label for the window. If not provided, a random one will be generated.
*/
@@ -486,15 +486,15 @@ export type PromptFormRequest = { id: string, title: string, description?: strin
export type PromptFormResponse = { values: { [key in string]?: JsonPrimitive } | null, done?: boolean, };
export type PromptTextRequest = { id: string, title: string, label: string, description?: string, defaultValue?: string, placeholder?: string,
export type PromptTextRequest = { id: string, title: string, label: string, description?: string, defaultValue?: string, placeholder?: string,
/**
* Text to add to the confirmation button
*/
confirmText?: string, password?: boolean,
confirmText?: string, password?: boolean,
/**
* Text to add to the cancel button
*/
cancelText?: string,
cancelText?: string,
/**
* Require the user to enter a non-empty value
*/
@@ -524,12 +524,12 @@ export type SetKeyValueResponse = {};
export type ShowToastRequest = { message: string, color?: Color, icon?: Icon, timeout?: number, };
export type TemplateFunction = { name: string, previewType?: TemplateFunctionPreviewType, description?: string,
export type TemplateFunction = { name: string, previewType?: TemplateFunctionPreviewType, description?: string,
/**
* Also support alternative names. This is useful for not breaking existing
* tags when changing the `name` property
*/
aliases?: Array<string>, args: Array<TemplateFunctionArg>,
aliases?: Array<string>, args: Array<TemplateFunctionArg>,
/**
* A list of arg names to show in the inline preview. If not provided, none will be shown (for privacy reasons).
*/
@@ -546,23 +546,23 @@ export type TemplateRenderRequest = { data: JsonValue, purpose: RenderPurpose, }
export type TemplateRenderResponse = { data: JsonValue, };
export type Theme = {
export type Theme = {
/**
* How the theme is identified. This should never be changed
*/
id: string,
id: string,
/**
* The friendly name of the theme to be displayed to the user
*/
label: string,
label: string,
/**
* Whether the theme will be used for dark or light appearance
*/
dark: boolean,
dark: boolean,
/**
* The default top-level colors for the theme
*/
base: ThemeComponentColors,
base: ThemeComponentColors,
/**
* Optionally override theme for individual UI components for more control
*/

View File

@@ -205,7 +205,7 @@ impl PluginManager {
pub fn get_plugins_dir(&self) -> PathBuf {
if self.dev_mode {
// Use plugins directly for easy development
// Tauri runs from crates-tauri/yaak-app/, so go up two levels to reach project root
// Tauri runs from crates-tauri/yaak-app-client/, so go up two levels to reach project root
env::current_dir()
.map(|cwd| cwd.join("../../plugins").canonicalize().unwrap())
.unwrap_or_else(|_| self.vendored_plugin_dir.clone())

View File

@@ -0,0 +1,25 @@
[package]
name = "yaak-proxy"
version = "0.1.0"
edition = "2024"
publish = false
[dependencies]
hyper = { version = "1", features = ["http1", "http2", "server", "client"] }
hyper-util = { version = "0.1", features = ["tokio", "server-auto", "client-legacy"] }
http-body-util = "0.1"
http = "1"
bytes = "1"
tokio = { workspace = true, features = [
"rt-multi-thread",
"net",
"sync",
"macros",
"time",
"io-util",
] }
rcgen = "0.13"
rustls = { workspace = true, features = ["ring"] }
rustls-native-certs = "0.8"
tokio-rustls = "0.26"
pem = "3"

View File

@@ -0,0 +1,114 @@
use std::pin::Pin;
use std::sync::mpsc as std_mpsc;
use std::task::{Context, Poll};
use std::time::Instant;
use bytes::Bytes;
use hyper::body::{Body, Frame};
use crate::ProxyEvent;
/// A body wrapper that emits `ResponseBodyChunk` per frame and
/// `ResponseBodyComplete` when the stream finishes.
pub struct MeasuredBody<B> {
inner: B,
request_id: u64,
bytes_count: u64,
chunks: Vec<Bytes>,
event_tx: std_mpsc::Sender<ProxyEvent>,
start: Instant,
finished: bool,
}
impl<B> MeasuredBody<B> {
pub fn new(
inner: B,
request_id: u64,
start: Instant,
event_tx: std_mpsc::Sender<ProxyEvent>,
) -> Self {
Self {
inner,
request_id,
bytes_count: 0,
chunks: Vec::new(),
event_tx,
start,
finished: false,
}
}
fn send_complete(&mut self) {
if !self.finished {
self.finished = true;
let body = if self.chunks.is_empty() {
None
} else {
let mut buf = Vec::with_capacity(self.bytes_count as usize);
for chunk in self.chunks.drain(..) {
buf.extend_from_slice(&chunk);
}
Some(buf)
};
let _ = self.event_tx.send(ProxyEvent::ResponseBodyComplete {
id: self.request_id,
body,
size: self.bytes_count,
elapsed_ms: self.start.elapsed().as_millis() as u64,
});
}
}
}
impl<B> Body for MeasuredBody<B>
where
B: Body<Data = Bytes> + Unpin,
B::Error: std::error::Error + Send + Sync + 'static,
{
type Data = Bytes;
type Error = B::Error;
fn poll_frame(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<Frame<Self::Data>, Self::Error>>> {
let inner = Pin::new(&mut self.inner);
match inner.poll_frame(cx) {
Poll::Ready(Some(Ok(frame))) => {
if let Some(data) = frame.data_ref() {
let len = data.len();
self.bytes_count += len as u64;
self.chunks.push(data.clone());
let _ = self.event_tx.send(ProxyEvent::ResponseBodyChunk {
id: self.request_id,
bytes: len,
});
}
Poll::Ready(Some(Ok(frame)))
}
Poll::Ready(Some(Err(e))) => {
self.send_complete();
Poll::Ready(Some(Err(e)))
}
Poll::Ready(None) => {
self.send_complete();
Poll::Ready(None)
}
Poll::Pending => Poll::Pending,
}
}
fn is_end_stream(&self) -> bool {
self.inner.is_end_stream()
}
fn size_hint(&self) -> hyper::body::SizeHint {
self.inner.size_hint()
}
}
impl<B> Drop for MeasuredBody<B> {
fn drop(&mut self) {
self.send_complete();
}
}

View File

@@ -0,0 +1,82 @@
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
use rcgen::{BasicConstraints, Certificate, CertificateParams, IsCa, KeyPair, KeyUsagePurpose};
use rustls::ServerConfig;
use rustls::pki_types::{CertificateDer, PrivateKeyDer, PrivatePkcs8KeyDer};
pub struct CertificateAuthority {
ca_cert: Certificate,
ca_cert_der: CertificateDer<'static>,
ca_key: KeyPair,
cache: Mutex<HashMap<String, Arc<ServerConfig>>>,
}
impl CertificateAuthority {
pub fn new() -> Result<Self, Box<dyn std::error::Error + Send + Sync>> {
let mut params = CertificateParams::default();
params.is_ca = IsCa::Ca(BasicConstraints::Unconstrained);
params.key_usages.push(KeyUsagePurpose::KeyCertSign);
params.key_usages.push(KeyUsagePurpose::CrlSign);
params
.distinguished_name
.push(rcgen::DnType::CommonName, "Debug Proxy CA");
params
.distinguished_name
.push(rcgen::DnType::OrganizationName, "Debug Proxy");
let key = KeyPair::generate()?;
let ca_cert = params.self_signed(&key)?;
let ca_cert_der = ca_cert.der().clone();
Ok(Self {
ca_cert,
ca_cert_der,
ca_key: key,
cache: Mutex::new(HashMap::new()),
})
}
pub fn ca_pem(&self) -> String {
pem::encode(&pem::Pem::new("CERTIFICATE", self.ca_cert_der.to_vec()))
}
pub fn server_config(
&self,
domain: &str,
) -> Result<Arc<ServerConfig>, Box<dyn std::error::Error + Send + Sync>> {
{
let cache = self.cache.lock().unwrap();
if let Some(config) = cache.get(domain) {
return Ok(config.clone());
}
}
let mut params = CertificateParams::new(vec![domain.to_string()])?;
params
.distinguished_name
.push(rcgen::DnType::CommonName, domain);
let leaf_key = KeyPair::generate()?;
let leaf_cert = params.signed_by(&leaf_key, &self.ca_cert, &self.ca_key)?;
let cert_der = leaf_cert.der().clone();
let key_der = leaf_key.serialize_der();
let mut config = ServerConfig::builder_with_provider(Arc::new(rustls::crypto::ring::default_provider()))
.with_safe_default_protocol_versions()?
.with_no_client_auth()
.with_single_cert(
vec![cert_der, self.ca_cert_der.clone()],
PrivateKeyDer::Pkcs8(PrivatePkcs8KeyDer::from(key_der)),
)?;
config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
let config = Arc::new(config);
self.cache
.lock()
.unwrap()
.insert(domain.to_string(), config.clone());
Ok(config)
}
}

View File

@@ -0,0 +1,32 @@
use std::sync::mpsc as std_mpsc;
use std::sync::Arc;
use hyper::server::conn::http1;
use hyper::service::service_fn;
use tokio::net::TcpStream;
use crate::ProxyEvent;
use crate::cert::CertificateAuthority;
use crate::request::handle_request;
pub(crate) async fn handle_connection(
stream: TcpStream,
event_tx: std_mpsc::Sender<ProxyEvent>,
ca: Arc<CertificateAuthority>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let tx = event_tx.clone();
http1::Builder::new()
.preserve_header_case(true)
.title_case_headers(true)
.serve_connection(
hyper_util::rt::TokioIo::new(stream),
service_fn(move |req| {
let tx = tx.clone();
let ca = ca.clone();
async move { handle_request(req, tx, ca).await }
}),
)
.with_upgrades()
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error + Send + Sync>)
}

View File

@@ -0,0 +1,175 @@
pub mod body;
pub mod cert;
mod connection;
mod request;
use std::net::SocketAddr;
use std::sync::atomic::AtomicU64;
use std::sync::mpsc as std_mpsc;
use std::sync::Arc;
use cert::CertificateAuthority;
use tokio::net::TcpListener;
use connection::handle_connection;
static REQUEST_ID: AtomicU64 = AtomicU64::new(1);
/// Granular events emitted during request/response lifecycle.
/// Each event carries a request `id` so consumers can correlate events.
#[derive(Debug, Clone)]
pub enum ProxyEvent {
/// A new request has been received from the client.
RequestStart {
id: u64,
method: String,
url: String,
http_version: String,
},
/// A request header sent to the upstream server.
RequestHeader { id: u64, name: String, value: String },
/// The full request body (buffered before forwarding).
RequestBody { id: u64, body: Vec<u8> },
/// Response headers received from upstream.
ResponseStart {
id: u64,
status: u16,
http_version: String,
elapsed_ms: u64,
},
/// A response header received from the upstream server.
ResponseHeader { id: u64, name: String, value: String },
/// A chunk of the response body was received (emitted per-frame).
ResponseBodyChunk { id: u64, bytes: usize },
/// The response body stream has completed.
ResponseBodyComplete {
id: u64,
body: Option<Vec<u8>>,
size: u64,
elapsed_ms: u64,
},
/// The upstream request failed.
Error { id: u64, error: String },
}
/// Accumulated view of a proxied request, built from `ProxyEvent`s.
#[derive(Debug, Clone)]
pub struct CapturedRequest {
pub id: u64,
pub method: String,
pub url: String,
pub status: Option<u16>,
pub elapsed_ms: Option<u64>,
pub http_version: String,
pub remote_http_version: Option<String>,
pub request_headers: Vec<(String, String)>,
pub request_body: Option<Vec<u8>>,
pub response_headers: Vec<(String, String)>,
pub response_body: Option<Vec<u8>>,
pub response_body_size: u64,
pub state: RequestState,
pub error: Option<String>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum RequestState {
Sending,
Receiving,
Complete,
Error,
}
pub struct ProxyHandle {
shutdown_tx: Option<tokio::sync::oneshot::Sender<()>>,
thread_handle: Option<std::thread::JoinHandle<()>>,
event_rx: Option<std_mpsc::Receiver<ProxyEvent>>,
pub port: u16,
pub ca_pem: String,
}
impl ProxyHandle {
/// Take the event receiver. Can only be called once — returns `None` after the first call.
pub fn take_event_rx(&mut self) -> Option<std_mpsc::Receiver<ProxyEvent>> {
self.event_rx.take()
}
}
impl Drop for ProxyHandle {
fn drop(&mut self) {
if let Some(tx) = self.shutdown_tx.take() {
let _ = tx.send(());
}
if let Some(handle) = self.thread_handle.take() {
let _ = handle.join();
}
}
}
pub fn start_proxy(port: u16) -> Result<ProxyHandle, String> {
let ca = CertificateAuthority::new().map_err(|e| format!("Failed to create CA: {e}"))?;
let ca_pem = ca.ca_pem();
let ca = Arc::new(ca);
let (event_tx, event_rx) = std_mpsc::channel();
let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel();
let (ready_tx, ready_rx) = std_mpsc::channel();
let thread_handle = std::thread::spawn(move || {
let rt = match tokio::runtime::Runtime::new() {
Ok(rt) => rt,
Err(e) => {
let _ = ready_tx.send(Err(format!("Failed to create runtime: {e}")));
return;
}
};
rt.block_on(async move {
let addr = SocketAddr::from(([127, 0, 0, 1], port));
let listener = match TcpListener::bind(addr).await {
Ok(l) => l,
Err(e) => {
let _ = ready_tx.send(Err(format!("Failed to bind: {e}")));
return;
}
};
let bound_port = listener.local_addr().unwrap().port();
let _ = ready_tx.send(Ok(bound_port));
let mut shutdown_rx = shutdown_rx;
loop {
tokio::select! {
result = listener.accept() => {
match result {
Ok((stream, _addr)) => {
let tx = event_tx.clone();
let ca = ca.clone();
tokio::spawn(async move {
if let Err(e) = handle_connection(stream, tx, ca).await {
eprintln!("Connection error: {e}");
}
});
}
Err(e) => eprintln!("Accept error: {e}"),
}
}
_ = &mut shutdown_rx => {
break;
}
}
}
});
});
match ready_rx.recv() {
Ok(Ok(bound_port)) => Ok(ProxyHandle {
shutdown_tx: Some(shutdown_tx),
thread_handle: Some(thread_handle),
event_rx: Some(event_rx),
port: bound_port,
ca_pem,
}),
Ok(Err(e)) => Err(e),
Err(_) => Err("Proxy thread died before binding".into()),
}
}

View File

@@ -0,0 +1,390 @@
use std::convert::Infallible;
use std::sync::Arc;
use std::sync::atomic::Ordering;
use std::sync::mpsc as std_mpsc;
use std::time::Instant;
use bytes::Bytes;
use http_body_util::{BodyExt, Full};
use hyper::body::Incoming;
use hyper::header::HeaderMap;
use hyper::service::service_fn;
use hyper::{Method, Request, Response, StatusCode, Uri};
use hyper_util::client::legacy::Client;
use hyper_util::rt::TokioExecutor;
use hyper_util::server::conn::auto;
use rustls::ClientConfig;
use rustls::pki_types::ServerName;
use tokio::net::TcpStream;
use tokio_rustls::TlsAcceptor;
use crate::body::MeasuredBody;
use crate::cert::CertificateAuthority;
use crate::{ProxyEvent, REQUEST_ID};
type BoxBody = http_body_util::combinators::BoxBody<Bytes, hyper::Error>;
fn full_body(bytes: Bytes) -> BoxBody {
Full::new(bytes).map_err(|never| match never {}).boxed()
}
fn measured_incoming(
incoming: Incoming,
id: u64,
start: Instant,
tx: std_mpsc::Sender<ProxyEvent>,
) -> BoxBody {
MeasuredBody::new(incoming, id, start, tx).boxed()
}
fn version_str(v: hyper::Version) -> String {
match v {
hyper::Version::HTTP_09 => "HTTP/0.9",
hyper::Version::HTTP_10 => "HTTP/1.0",
hyper::Version::HTTP_11 => "HTTP/1.1",
hyper::Version::HTTP_2 => "HTTP/2",
hyper::Version::HTTP_3 => "HTTP/3",
_ => "unknown",
}
.to_string()
}
fn emit_request_events(
tx: &std_mpsc::Sender<ProxyEvent>,
id: u64,
headers: &HeaderMap,
body: &Option<Vec<u8>>,
) {
for (name, value) in headers.iter() {
let _ = tx.send(ProxyEvent::RequestHeader {
id,
name: name.to_string(),
value: value.to_str().unwrap_or("<binary>").to_string(),
});
}
if let Some(body) = body {
let _ = tx.send(ProxyEvent::RequestBody {
id,
body: body.clone(),
});
}
}
fn emit_response_events(
tx: &std_mpsc::Sender<ProxyEvent>,
id: u64,
resp: &Response<Incoming>,
start: &Instant,
) {
let _ = tx.send(ProxyEvent::ResponseStart {
id,
status: resp.status().as_u16(),
http_version: version_str(resp.version()),
elapsed_ms: start.elapsed().as_millis() as u64,
});
for (name, value) in resp.headers().iter() {
let _ = tx.send(ProxyEvent::ResponseHeader {
id,
name: name.to_string(),
value: value.to_str().unwrap_or("<binary>").to_string(),
});
}
}
pub(crate) async fn handle_request(
req: Request<Incoming>,
event_tx: std_mpsc::Sender<ProxyEvent>,
ca: Arc<CertificateAuthority>,
) -> Result<Response<BoxBody>, Infallible> {
let result = if req.method() == Method::CONNECT {
handle_connect(req, event_tx, ca).await
} else {
handle_http(req, event_tx).await
};
match result {
Ok(resp) => Ok(resp),
Err(e) => {
eprintln!("Proxy error: {e}");
Ok(Response::builder()
.status(StatusCode::BAD_GATEWAY)
.body(full_body(Bytes::from(format!("Proxy error: {e}"))))
.unwrap())
}
}
}
async fn handle_http(
req: Request<Incoming>,
event_tx: std_mpsc::Sender<ProxyEvent>,
) -> Result<Response<BoxBody>, Box<dyn std::error::Error + Send + Sync>> {
let id = REQUEST_ID.fetch_add(1, Ordering::Relaxed);
let method = req.method().to_string();
let uri = req.uri().to_string();
let http_version = version_str(req.version());
let start = Instant::now();
let _ = event_tx.send(ProxyEvent::RequestStart {
id,
method,
url: uri.clone(),
http_version,
});
let client: Client<_, Full<Bytes>> = Client::builder(TokioExecutor::new()).build_http();
let (parts, body) = req.into_parts();
let body_bytes = body.collect().await?.to_bytes();
let request_body = if body_bytes.is_empty() {
None
} else {
Some(body_bytes.to_vec())
};
emit_request_events(&event_tx, id, &parts.headers, &request_body);
let outgoing_req = Request::from_parts(parts, Full::new(body_bytes));
match client.request(outgoing_req).await {
Ok(resp) => {
emit_response_events(&event_tx, id, &resp, &start);
let (parts, body) = resp.into_parts();
Ok(Response::from_parts(
parts,
measured_incoming(body, id, start, event_tx),
))
}
Err(e) => {
let _ = event_tx.send(ProxyEvent::Error {
id,
error: e.to_string(),
});
Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>)
}
}
}
async fn handle_connect(
req: Request<Incoming>,
event_tx: std_mpsc::Sender<ProxyEvent>,
ca: Arc<CertificateAuthority>,
) -> Result<Response<BoxBody>, Box<dyn std::error::Error + Send + Sync>> {
let authority = req
.uri()
.authority()
.map(|a| a.to_string())
.unwrap_or_default();
let (host, port) = parse_host_port(&authority);
let server_config = ca.server_config(&host)?;
let acceptor = TlsAcceptor::from(server_config);
let target_addr = format!("{host}:{port}");
tokio::spawn(async move {
let upgraded = match hyper::upgrade::on(req).await {
Ok(u) => u,
Err(e) => {
eprintln!("CONNECT upgrade failed: {e}");
return;
}
};
let tls_stream = match acceptor
.accept(hyper_util::rt::TokioIo::new(upgraded))
.await
{
Ok(s) => s,
Err(e) => {
eprintln!("TLS accept failed for {host}: {e}");
return;
}
};
let tx = event_tx.clone();
let host_for_requests = host.clone();
let mut builder = auto::Builder::new(TokioExecutor::new());
builder
.http1()
.preserve_header_case(true)
.title_case_headers(true);
if let Err(e) = builder
.serve_connection_with_upgrades(
hyper_util::rt::TokioIo::new(tls_stream),
service_fn(move |req| {
let tx = tx.clone();
let host = host_for_requests.clone();
let target_addr = target_addr.clone();
async move { handle_tunneled_request(req, tx, &host, &target_addr).await }
}),
)
.await
{
eprintln!("MITM connection error for {host}: {e}");
}
});
Ok(Response::new(full_body(Bytes::new())))
}
async fn handle_tunneled_request(
req: Request<Incoming>,
event_tx: std_mpsc::Sender<ProxyEvent>,
host: &str,
target_addr: &str,
) -> Result<Response<BoxBody>, Infallible> {
let result = forward_https(req, event_tx, host, target_addr).await;
match result {
Ok(resp) => Ok(resp),
Err(e) => {
eprintln!("HTTPS forward error: {e:?}");
Ok(Response::builder()
.status(StatusCode::BAD_GATEWAY)
.body(full_body(Bytes::from(format!("Proxy error: {e}"))))
.unwrap())
}
}
}
enum HttpSender {
H1(hyper::client::conn::http1::SendRequest<Full<Bytes>>),
H2(hyper::client::conn::http2::SendRequest<Full<Bytes>>),
}
impl HttpSender {
async fn send_request(
&mut self,
req: Request<Full<Bytes>>,
) -> Result<Response<Incoming>, hyper::Error> {
match self {
HttpSender::H1(s) => s.send_request(req).await,
HttpSender::H2(s) => s.send_request(req).await,
}
}
}
async fn forward_https(
req: Request<Incoming>,
event_tx: std_mpsc::Sender<ProxyEvent>,
host: &str,
target_addr: &str,
) -> Result<Response<BoxBody>, Box<dyn std::error::Error + Send + Sync>> {
let id = REQUEST_ID.fetch_add(1, Ordering::Relaxed);
let method = req.method().to_string();
let http_version = version_str(req.version());
let path = req
.uri()
.path_and_query()
.map(|pq| pq.to_string())
.unwrap_or_else(|| "/".into());
let uri_str = format!("https://{host}{path}");
let start = Instant::now();
let _ = event_tx.send(ProxyEvent::RequestStart {
id,
method,
url: uri_str.clone(),
http_version,
});
// Connect to upstream with TLS
let tcp_stream = TcpStream::connect(target_addr).await?;
let mut root_store = rustls::RootCertStore::empty();
for cert in rustls_native_certs::load_native_certs().certs {
let _ = root_store.add(cert);
}
let mut tls_config =
ClientConfig::builder_with_provider(Arc::new(rustls::crypto::ring::default_provider()))
.with_safe_default_protocol_versions()?
.with_root_certificates(root_store)
.with_no_client_auth();
tls_config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
let connector = tokio_rustls::TlsConnector::from(Arc::new(tls_config));
let server_name = ServerName::try_from(host.to_string())?;
let tls_stream = connector.connect(server_name, tcp_stream).await?;
let negotiated_h2 = tls_stream
.get_ref()
.1
.alpn_protocol()
.map_or(false, |p| p == b"h2");
let io = hyper_util::rt::TokioIo::new(tls_stream);
let mut sender = if negotiated_h2 {
let (sender, conn) = hyper::client::conn::http2::Builder::new(TokioExecutor::new())
.handshake(io)
.await?;
tokio::spawn(async move {
if let Err(e) = conn.await {
eprintln!("Upstream h2 connection error: {e}");
}
});
HttpSender::H2(sender)
} else {
let (sender, conn) = hyper::client::conn::http1::Builder::new()
.preserve_header_case(true)
.title_case_headers(true)
.handshake(io)
.await?;
tokio::spawn(async move {
if let Err(e) = conn.await {
eprintln!("Upstream h1 connection error: {e}");
}
});
HttpSender::H1(sender)
};
// Capture request metadata
let (mut parts, body) = req.into_parts();
let body_bytes = body.collect().await?.to_bytes();
let request_body = if body_bytes.is_empty() {
None
} else {
Some(body_bytes.to_vec())
};
emit_request_events(&event_tx, id, &parts.headers, &request_body);
if negotiated_h2 {
// HTTP/2 requires absolute-form URI with scheme + authority
parts.uri = uri_str.parse::<Uri>()?;
} else {
parts.uri = path.parse::<Uri>()?;
}
if !parts.headers.contains_key(hyper::header::HOST) {
parts.headers.insert(hyper::header::HOST, host.parse()?);
}
let outgoing = Request::from_parts(parts, Full::new(body_bytes));
match sender.send_request(outgoing).await {
Ok(resp) => {
emit_response_events(&event_tx, id, &resp, &start);
let (parts, body) = resp.into_parts();
Ok(Response::from_parts(
parts,
measured_incoming(body, id, start, event_tx),
))
}
Err(e) => {
let _ = event_tx.send(ProxyEvent::Error {
id,
error: e.to_string(),
});
Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>)
}
}
}
fn parse_host_port(authority: &str) -> (String, u16) {
if let Some((host, port_str)) = authority.rsplit_once(':') {
if let Ok(port) = port_str.parse::<u16>() {
return (host.to_string(), port);
}
}
(authority.to_string(), 443)
}

View File

@@ -2,24 +2,46 @@
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null,
/**
* Variables defined in this environment scope.
* Child environments override parent variables by name.
*/
variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
export type Folder = { model: "folder", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, sortPriority: number, };
export type GrpcRequest = { model: "grpc_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authenticationType: string | null, authentication: Record<string, any>, description: string, message: string, metadata: Array<HttpRequestHeader>, method: string | null, name: string, service: string | null, sortPriority: number, url: string, };
export type GrpcRequest = { model: "grpc_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authenticationType: string | null, authentication: Record<string, any>, description: string, message: string, metadata: Array<HttpRequestHeader>, method: string | null, name: string, service: string | null, sortPriority: number,
/**
* Server URL (http for plaintext or https for secure)
*/
url: string, };
export type HttpRequest = { model: "http_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, body: Record<string, any>, bodyType: string | null, description: string, headers: Array<HttpRequestHeader>, method: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type HttpRequest = { model: "http_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, body: Record<string, any>, bodyType: string | null, description: string, headers: Array<HttpRequestHeader>, method: string, name: string, sortPriority: number, url: string,
/**
* URL parameters used for both path placeholders (`:id`) and query string entries.
*/
urlParameters: Array<HttpUrlParameter>, };
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpUrlParameter = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpUrlParameter = { enabled?: boolean,
/**
* Colon-prefixed parameters are treated as path parameters if they match, like `/users/:id`
* Other entries are appended as query parameters
*/
name: string, value: string, id?: string, };
export type SyncModel = { "type": "workspace" } & Workspace | { "type": "environment" } & Environment | { "type": "folder" } & Folder | { "type": "http_request" } & HttpRequest | { "type": "grpc_request" } & GrpcRequest | { "type": "websocket_request" } & WebsocketRequest;
export type SyncState = { model: "sync_state", id: string, workspaceId: string, createdAt: string, updatedAt: string, flushedAt: string, modelId: string, checksum: string, relPath: string, syncDir: string, };
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string,
/**
* URL parameters used for both path placeholders (`:id`) and query string entries.
*/
urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };

View File

@@ -1,6 +1,6 @@
import { Channel, invoke } from "@tauri-apps/api/core";
import { emit } from "@tauri-apps/api/event";
import type { WatchResult } from "@yaakapp-internal/tauri";
import type { WatchResult } from "@yaakapp-internal/tauri-client";
import { SyncOp } from "./bindings/gen_sync";
import { WatchEvent } from "./bindings/gen_watch";

View File

@@ -10,7 +10,7 @@ use std::fs::File;
use std::io::Write;
use std::path::{Path, PathBuf};
use ts_rs::TS;
use yaak_models::db_context::DbContext;
use yaak_models::client_db::ClientDb;
use yaak_models::models::{SyncState, WorkspaceMeta};
use yaak_models::util::{UpdateSource, get_workspace_export_resources};
@@ -106,7 +106,7 @@ pub struct FsCandidate {
}
pub fn get_db_candidates(
db: &DbContext,
db: &ClientDb,
version: &str,
workspace_id: &str,
sync_dir: &Path,
@@ -296,7 +296,7 @@ pub fn compute_sync_ops(
.collect()
}
fn workspace_models(db: &DbContext, version: &str, workspace_id: &str) -> Result<Vec<SyncModel>> {
fn workspace_models(db: &ClientDb, version: &str, workspace_id: &str) -> Result<Vec<SyncModel>> {
// We want to include private environments here so that we can take them into account during
// the sync process. Otherwise, they would be treated as deleted.
let include_private_environments = true;
@@ -338,7 +338,7 @@ fn workspace_models(db: &DbContext, version: &str, workspace_id: &str) -> Result
/// Apply sync operations to the filesystem and database.
/// Returns a list of SyncStateOps that should be applied afterward.
pub fn apply_sync_ops(
db: &DbContext,
db: &ClientDb,
workspace_id: &str,
sync_dir: &Path,
sync_ops: Vec<SyncOp>,
@@ -502,7 +502,7 @@ pub enum SyncStateOp {
}
pub fn apply_sync_state_ops(
db: &DbContext,
db: &ClientDb,
workspace_id: &str,
sync_dir: &Path,
ops: Vec<SyncStateOp>,
@@ -547,7 +547,7 @@ fn derive_model_filename(m: &SyncModel) -> PathBuf {
Path::new(&rel).to_path_buf()
}
fn delete_model(db: &DbContext, model: &SyncModel) -> Result<()> {
fn delete_model(db: &ClientDb, model: &SyncModel) -> Result<()> {
match model {
SyncModel::Workspace(m) => {
db.delete_workspace(&m, &UpdateSource::Sync)?;

View File

@@ -1,5 +1,5 @@
/* tslint:disable */
/* eslint-disable */
export function unescape_template(template: string): any;
export function escape_template(template: string): any;
export function parse_template(template: string): any;
export function unescape_template(template: string): any;

View File

@@ -161,6 +161,20 @@ function takeFromExternrefTable0(idx) {
wasm.__externref_table_dealloc(idx);
return value;
}
/**
* @param {string} template
* @returns {any}
*/
export function unescape_template(template) {
const ptr0 = passStringToWasm0(template, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len0 = WASM_VECTOR_LEN;
const ret = wasm.unescape_template(ptr0, len0);
if (ret[2]) {
throw takeFromExternrefTable0(ret[1]);
}
return takeFromExternrefTable0(ret[0]);
}
/**
* @param {string} template
* @returns {any}
@@ -189,20 +203,6 @@ export function parse_template(template) {
return takeFromExternrefTable0(ret[0]);
}
/**
* @param {string} template
* @returns {any}
*/
export function unescape_template(template) {
const ptr0 = passStringToWasm0(template, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len0 = WASM_VECTOR_LEN;
const ret = wasm.unescape_template(ptr0, len0);
if (ret[2]) {
throw takeFromExternrefTable0(ret[1]);
}
return takeFromExternrefTable0(ret[0]);
}
export function __wbg_new_405e22f390576ce2() {
const ret = new Object();
return ret;

Binary file not shown.