Start extracting DBContext

This commit is contained in:
Gregory Schier
2026-03-08 08:56:08 -07:00
parent cf28229f5f
commit 4c37e62146
45 changed files with 695 additions and 242 deletions

View File

@@ -5,12 +5,15 @@ edition = "2024"
publish = false
[dependencies]
chrono = { version = "0.4.38", features = ["serde"] }
include_dir = "0.7"
log = { workspace = true }
nanoid = "0.4.0"
r2d2 = "0.8.10"
r2d2_sqlite = { version = "0.25.0" }
rusqlite = { version = "0.32.1", features = ["bundled", "chrono"] }
sea-query = { version = "0.32.1", features = ["with-chrono", "attr"] }
sea-query-rusqlite = { version = "0.7.0", features = ["with-chrono"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
thiserror = { workspace = true }

View File

@@ -1,33 +1,37 @@
use crate::connection_or_tx::ConnectionOrTx;
use crate::error::Error::ModelNotFound;
use crate::error::Result;
use crate::models::{AnyModel, UpsertModelInfo};
use crate::util::{ModelChangeEvent, ModelPayload, UpdateSource};
use rusqlite::{OptionalExtension, params};
use crate::traits::UpsertModelInfo;
use crate::update_source::UpdateSource;
use sea_query::{
Asterisk, Expr, Func, IntoColumnRef, IntoIden, IntoTableRef, OnConflict, Query, SimpleExpr,
Asterisk, Expr, Func, IntoColumnRef, IntoIden, OnConflict, Query, SimpleExpr,
SqliteQueryBuilder,
};
use sea_query_rusqlite::RusqliteBinder;
use std::fmt::Debug;
use std::sync::mpsc;
pub struct DbContext<'a> {
pub(crate) _events_tx: mpsc::Sender<ModelPayload>,
pub(crate) conn: ConnectionOrTx<'a>,
conn: ConnectionOrTx<'a>,
}
impl<'a> DbContext<'a> {
pub(crate) fn find_one<'s, M>(
pub fn new(conn: ConnectionOrTx<'a>) -> Self {
Self { conn }
}
pub fn conn(&self) -> &ConnectionOrTx<'a> {
&self.conn
}
pub fn find_one<M>(
&self,
col: impl IntoColumnRef + IntoIden + Clone,
value: impl Into<SimpleExpr> + Debug,
) -> Result<M>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
M: UpsertModelInfo,
{
let value_debug = format!("{:?}", value);
let value_expr = value.into();
let (sql, params) = Query::select()
.from(M::table_name())
@@ -47,13 +51,13 @@ impl<'a> DbContext<'a> {
}
}
pub(crate) fn find_optional<'s, M>(
pub fn find_optional<M>(
&self,
col: impl IntoColumnRef,
value: impl Into<SimpleExpr>,
) -> Option<M>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
M: UpsertModelInfo,
{
let (sql, params) = Query::select()
.from(M::table_name())
@@ -62,13 +66,12 @@ impl<'a> DbContext<'a> {
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str()).expect("Failed to prepare query");
stmt.query_row(&*params.as_params(), M::from_row)
.optional()
.expect("Failed to run find on DB")
.ok()
}
pub(crate) fn find_all<'s, M>(&self) -> Result<Vec<M>>
pub fn find_all<M>(&self) -> Result<Vec<M>>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
M: UpsertModelInfo,
{
let (order_by_col, order_by_dir) = M::order_by();
let (sql, params) = Query::select()
@@ -81,16 +84,15 @@ impl<'a> DbContext<'a> {
Ok(items.map(|v| v.unwrap()).collect())
}
pub(crate) fn find_many<'s, M>(
pub fn find_many<M>(
&self,
col: impl IntoColumnRef,
value: impl Into<SimpleExpr>,
limit: Option<u64>,
) -> Result<Vec<M>>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
M: UpsertModelInfo,
{
// TODO: Figure out how to do this conditional builder better
let (order_by_col, order_by_dir) = M::order_by();
let (sql, params) = if let Some(limit) = limit {
Query::select()
@@ -114,46 +116,30 @@ impl<'a> DbContext<'a> {
Ok(items.map(|v| v.unwrap()).collect())
}
pub(crate) fn upsert<M>(&self, model: &M, source: &UpdateSource) -> Result<M>
/// Upsert a model. Returns `(model, created)` where `created` is true if a new row was inserted.
pub fn upsert<M>(&self, model: &M, source: &UpdateSource) -> Result<(M, bool)>
where
M: Into<AnyModel> + From<AnyModel> + UpsertModelInfo + Clone,
M: UpsertModelInfo + Clone,
{
self.upsert_one(
M::table_name(),
M::id_column(),
model.get_id().as_str(),
model.clone().insert_values(source)?,
M::update_columns(),
source,
)
}
let id_iden = M::id_column().into_iden();
let id_val = model.get_id();
let other_values = model.clone().insert_values(source)?;
fn upsert_one<M>(
&self,
table: impl IntoTableRef,
id_col: impl IntoIden + Eq + Clone,
id_val: &str,
other_values: Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>,
update_columns: Vec<impl IntoIden>,
source: &UpdateSource,
) -> Result<M>
where
M: Into<AnyModel> + From<AnyModel> + UpsertModelInfo + Clone,
{
let id_iden = id_col.into_iden();
let mut column_vec = vec![id_iden.clone()];
let mut value_vec =
vec![if id_val == "" { M::generate_id().into() } else { id_val.into() }];
let mut value_vec = vec![
if id_val.is_empty() { M::generate_id().into() } else { id_val.into() },
];
for (col, val) in other_values {
value_vec.push(val.into());
column_vec.push(col.into_iden());
}
let on_conflict = OnConflict::column(id_iden).update_columns(update_columns).to_owned();
let on_conflict =
OnConflict::column(id_iden).update_columns(M::update_columns()).to_owned();
let (sql, params) = Query::insert()
.into_table(table)
.into_table(M::table_name())
.columns(column_vec)
.values_panic(value_vec)
.on_conflict(on_conflict)
@@ -173,59 +159,19 @@ impl<'a> DbContext<'a> {
})
})?;
let payload = ModelPayload {
model: m.clone().into(),
update_source: source.clone(),
change: ModelChangeEvent::Upsert { created },
};
self.record_model_change(&payload)?;
let _ = self._events_tx.send(payload);
Ok(m)
Ok((m, created))
}
pub(crate) fn delete<'s, M>(&self, m: &M, source: &UpdateSource) -> Result<M>
/// Delete a model by its ID. Returns the number of rows deleted.
pub fn delete<M>(&self, m: &M) -> Result<usize>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
M: UpsertModelInfo,
{
let (sql, params) = Query::delete()
.from_table(M::table_name())
.cond_where(Expr::col(M::id_column().into_iden()).eq(m.get_id()))
.build_rusqlite(SqliteQueryBuilder);
self.conn.execute(sql.as_str(), &*params.as_params())?;
let payload = ModelPayload {
model: m.clone().into(),
update_source: source.clone(),
change: ModelChangeEvent::Delete,
};
self.record_model_change(&payload)?;
let _ = self._events_tx.send(payload);
Ok(m.clone())
}
fn record_model_change(&self, payload: &ModelPayload) -> Result<()> {
let payload_json = serde_json::to_string(payload)?;
let source_json = serde_json::to_string(&payload.update_source)?;
let change_json = serde_json::to_string(&payload.change)?;
self.conn.resolve().execute(
r#"
INSERT INTO model_changes (model, model_id, change, update_source, payload)
VALUES (?1, ?2, ?3, ?4, ?5)
"#,
params![
payload.model.model(),
payload.model.id(),
change_json,
source_json,
payload_json,
],
)?;
Ok(())
let count = self.conn.execute(sql.as_str(), &*params.as_params())?;
Ok(count)
}
}

View File

@@ -1,15 +1,23 @@
pub mod connection_or_tx;
pub mod db_context;
pub mod error;
pub mod migrate;
pub mod traits;
pub mod update_source;
pub mod util;
// Re-export key types for convenience
pub use connection_or_tx::ConnectionOrTx;
pub use db_context::DbContext;
pub use error::{Error, Result};
pub use migrate::run_migrations;
pub use traits::{UpsertModelInfo, upsert_date};
pub use update_source::UpdateSource;
pub use util::{generate_id, generate_id_of_length, generate_prefixed_id};
// Re-export pool types that consumers will need
pub use r2d2;
pub use r2d2_sqlite;
pub use rusqlite;
pub use sea_query;
pub use sea_query_rusqlite;

View File

@@ -0,0 +1,36 @@
use crate::error::Result;
use crate::update_source::UpdateSource;
use chrono::{NaiveDateTime, Utc};
use rusqlite::Row;
use sea_query::{IntoColumnRef, IntoIden, IntoTableRef, Order, SimpleExpr};
pub trait UpsertModelInfo {
fn table_name() -> impl IntoTableRef + IntoIden;
fn id_column() -> impl IntoIden + Eq + Clone;
fn generate_id() -> String;
fn order_by() -> (impl IntoColumnRef, Order);
fn get_id(&self) -> String;
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>>;
fn update_columns() -> Vec<impl IntoIden>;
fn from_row(row: &Row) -> rusqlite::Result<Self>
where
Self: Sized;
}
/// Generate timestamps for upsert operations.
/// Sync and import operations preserve existing timestamps; other sources use current time.
pub fn upsert_date(update_source: &UpdateSource, dt: NaiveDateTime) -> SimpleExpr {
match update_source {
UpdateSource::Sync | UpdateSource::Import => {
if dt.and_utc().timestamp() == 0 {
Utc::now().naive_utc().into()
} else {
dt.into()
}
}
_ => Utc::now().naive_utc().into(),
}
}

View File

@@ -0,0 +1,17 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case", tag = "type")]
pub enum UpdateSource {
Background,
Import,
Plugin,
Sync,
Window { label: String },
}
impl UpdateSource {
pub fn from_window_label(label: impl Into<String>) -> Self {
Self::Window { label: label.into() }
}
}

View File

@@ -0,0 +1,127 @@
use crate::error::Result;
use crate::models::{AnyModel, UpsertModelInfo};
use crate::util::{ModelChangeEvent, ModelPayload, UpdateSource};
use rusqlite::params;
use sea_query::{IntoColumnRef, IntoIden, SimpleExpr};
use std::fmt::Debug;
use std::sync::mpsc;
use yaak_database::DbContext;
pub struct ClientDb<'a> {
pub(crate) ctx: DbContext<'a>,
pub(crate) events_tx: mpsc::Sender<ModelPayload>,
}
impl<'a> ClientDb<'a> {
pub fn new(ctx: DbContext<'a>, events_tx: mpsc::Sender<ModelPayload>) -> Self {
Self { ctx, events_tx }
}
/// Access the underlying connection for custom queries.
pub(crate) fn conn(&self) -> &yaak_database::ConnectionOrTx<'a> {
self.ctx.conn()
}
// --- Read delegates (thin wrappers over DbContext) ---
pub(crate) fn find_one<M>(
&self,
col: impl IntoColumnRef + IntoIden + Clone,
value: impl Into<SimpleExpr> + Debug,
) -> Result<M>
where
M: UpsertModelInfo,
{
Ok(self.ctx.find_one(col, value)?)
}
pub(crate) fn find_optional<M>(
&self,
col: impl IntoColumnRef,
value: impl Into<SimpleExpr>,
) -> Option<M>
where
M: UpsertModelInfo,
{
self.ctx.find_optional(col, value)
}
pub(crate) fn find_all<M>(&self) -> Result<Vec<M>>
where
M: UpsertModelInfo,
{
Ok(self.ctx.find_all()?)
}
pub(crate) fn find_many<M>(
&self,
col: impl IntoColumnRef,
value: impl Into<SimpleExpr>,
limit: Option<u64>,
) -> Result<Vec<M>>
where
M: UpsertModelInfo,
{
Ok(self.ctx.find_many(col, value, limit)?)
}
// --- Write operations (with event recording) ---
pub(crate) fn upsert<M>(&self, model: &M, source: &UpdateSource) -> Result<M>
where
M: Into<AnyModel> + UpsertModelInfo + Clone,
{
let (m, created) = self.ctx.upsert(model, &source.to_db())?;
let payload = ModelPayload {
model: m.clone().into(),
update_source: source.clone(),
change: ModelChangeEvent::Upsert { created },
};
self.record_model_change(&payload)?;
let _ = self.events_tx.send(payload);
Ok(m)
}
pub(crate) fn delete<M>(&self, m: &M, source: &UpdateSource) -> Result<M>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
{
self.ctx.delete(m)?;
let payload = ModelPayload {
model: m.clone().into(),
update_source: source.clone(),
change: ModelChangeEvent::Delete,
};
self.record_model_change(&payload)?;
let _ = self.events_tx.send(payload);
Ok(m.clone())
}
fn record_model_change(&self, payload: &ModelPayload) -> Result<()> {
let payload_json = serde_json::to_string(payload)?;
let source_json = serde_json::to_string(&payload.update_source)?;
let change_json = serde_json::to_string(&payload.change)?;
self.ctx.conn().resolve().execute(
r#"
INSERT INTO model_changes (model, model_id, change, update_source, payload)
VALUES (?1, ?2, ?3, ?4, ?5)
"#,
params![
payload.model.model(),
payload.model.id(),
change_json,
source_json,
payload_json,
],
)?;
Ok(())
}
}

View File

@@ -40,6 +40,20 @@ pub enum Error {
Unknown,
}
impl From<yaak_database::Error> for Error {
fn from(e: yaak_database::Error) -> Self {
match e {
yaak_database::Error::SqlError(e) => Error::SqlError(e),
yaak_database::Error::SqlPoolError(e) => Error::SqlPoolError(e),
yaak_database::Error::Database(s) => Error::Database(s),
yaak_database::Error::Io(e) => Error::Io(e),
yaak_database::Error::JsonError(e) => Error::JsonError(e),
yaak_database::Error::ModelNotFound(s) => Error::ModelNotFound(s),
yaak_database::Error::MigrationError(s) => Error::MigrationError(s),
}
}
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where

View File

@@ -12,8 +12,8 @@ use std::sync::mpsc;
use std::time::Duration;
pub mod blob_manager;
pub mod client_db;
mod connection_or_tx;
pub mod db_context;
pub mod error;
pub mod migrate;
pub mod models;

View File

@@ -3,7 +3,7 @@ use crate::models::HttpRequestIden::{
Authentication, AuthenticationType, Body, BodyType, CreatedAt, Description, FolderId, Headers,
Method, Name, SortPriority, UpdatedAt, Url, UrlParameters, WorkspaceId,
};
use crate::util::{UpdateSource, generate_prefixed_id};
use crate::util::generate_prefixed_id;
use chrono::{NaiveDateTime, Utc};
use rusqlite::Row;
use schemars::JsonSchema;
@@ -16,6 +16,8 @@ use std::collections::HashMap;
use std::fmt::{Debug, Display};
use std::str::FromStr;
use ts_rs::TS;
pub use yaak_database::{UpsertModelInfo, upsert_date};
use yaak_database::{UpdateSource, Result as DbResult};
#[macro_export]
macro_rules! impl_model {
@@ -190,7 +192,7 @@ impl UpsertModelInfo for Settings {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use SettingsIden::*;
let proxy = match self.proxy {
None => None,
@@ -346,7 +348,7 @@ impl UpsertModelInfo for Workspace {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use WorkspaceIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -453,7 +455,7 @@ impl UpsertModelInfo for WorkspaceMeta {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use WorkspaceMetaIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -554,7 +556,7 @@ impl UpsertModelInfo for CookieJar {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use CookieJarIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -642,7 +644,7 @@ impl UpsertModelInfo for Environment {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use EnvironmentIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -775,7 +777,7 @@ impl UpsertModelInfo for Folder {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use FolderIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -909,7 +911,7 @@ impl UpsertModelInfo for HttpRequest {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
(UpdatedAt, upsert_date(source, self.updated_at)),
@@ -1036,7 +1038,7 @@ impl UpsertModelInfo for WebsocketConnection {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use WebsocketConnectionIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1151,7 +1153,7 @@ impl UpsertModelInfo for WebsocketRequest {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use WebsocketRequestIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1276,7 +1278,7 @@ impl UpsertModelInfo for WebsocketEvent {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use WebsocketEventIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1397,7 +1399,7 @@ impl UpsertModelInfo for HttpResponse {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use HttpResponseIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1593,7 +1595,7 @@ impl UpsertModelInfo for HttpResponseEvent {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use HttpResponseEventIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1681,7 +1683,7 @@ impl UpsertModelInfo for GraphQlIntrospection {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use GraphQlIntrospectionIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1766,7 +1768,7 @@ impl UpsertModelInfo for GrpcRequest {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use GrpcRequestIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -1893,7 +1895,7 @@ impl UpsertModelInfo for GrpcConnection {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use GrpcConnectionIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -2013,7 +2015,7 @@ impl UpsertModelInfo for GrpcEvent {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use GrpcEventIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -2144,7 +2146,7 @@ impl UpsertModelInfo for Plugin {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use PluginIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -2229,7 +2231,7 @@ impl UpsertModelInfo for SyncState {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use SyncStateIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -2312,7 +2314,7 @@ impl UpsertModelInfo for KeyValue {
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
) -> DbResult<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use KeyValueIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
@@ -2525,36 +2527,3 @@ impl AnyModel {
}
}
pub trait UpsertModelInfo {
fn table_name() -> impl IntoTableRef + IntoIden;
fn id_column() -> impl IntoIden + Eq + Clone;
fn generate_id() -> String;
fn order_by() -> (impl IntoColumnRef, Order);
fn get_id(&self) -> String;
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>>;
fn update_columns() -> Vec<impl IntoIden>;
fn from_row(row: &Row) -> rusqlite::Result<Self>
where
Self: Sized;
}
// Generate the created_at or updated_at timestamps for an upsert operation, depending on the ID
// provided.
fn upsert_date(update_source: &UpdateSource, dt: NaiveDateTime) -> SimpleExpr {
match update_source {
// Sync and import operations always preserve timestamps
UpdateSource::Sync | UpdateSource::Import => {
if dt.and_utc().timestamp() == 0 {
// Sometimes data won't have timestamps (partial data)
Utc::now().naive_utc().into()
} else {
dt.into()
}
}
// Other sources will always update to the latest time
_ => Utc::now().naive_utc().into(),
}
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{GrpcRequest, HttpRequest, WebsocketRequest};
@@ -8,7 +8,7 @@ pub enum AnyRequest {
WebsocketRequest(WebsocketRequest),
}
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_any_request(&self, id: &str) -> Result<AnyRequest> {
if let Ok(http_request) = self.get_http_request(id) {
Ok(AnyRequest::HttpRequest(http_request))

View File

@@ -1,10 +1,10 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{Environment, Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace};
use crate::util::{BatchUpsertResult, UpdateSource};
use log::info;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn batch_upsert(
&self,
workspaces: Vec<Workspace>,

View File

@@ -1,9 +1,9 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{CookieJar, CookieJarIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_cookie_jar(&self, id: &str) -> Result<CookieJar> {
self.find_one(CookieJarIden::Id, id)
}

View File

@@ -1,11 +1,11 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Error::{MissingBaseEnvironment, MultipleBaseEnvironments};
use crate::error::Result;
use crate::models::{Environment, EnvironmentIden, EnvironmentVariable};
use crate::util::UpdateSource;
use log::{info, warn};
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_environment(&self, id: &str) -> Result<Environment> {
self.find_one(EnvironmentIden::Id, id)
}

View File

@@ -1,5 +1,5 @@
use crate::connection_or_tx::ConnectionOrTx;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{
Environment, EnvironmentIden, Folder, FolderIden, GrpcRequest, GrpcRequestIden, HttpRequest,
@@ -9,7 +9,7 @@ use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_folder(&self, id: &str) -> Result<Folder> {
self.find_one(FolderIden::Id, id)
}
@@ -19,7 +19,7 @@ impl<'a> DbContext<'a> {
}
pub fn delete_folder(&self, folder: &Folder, source: &UpdateSource) -> Result<Folder> {
match self.conn {
match self.conn() {
ConnectionOrTx::Connection(_) => {}
ConnectionOrTx::Transaction(_) => {}
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{GraphQlIntrospection, GraphQlIntrospectionIden};
use crate::util::UpdateSource;
@@ -6,7 +6,7 @@ use chrono::{Duration, Utc};
use sea_query::{Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_graphql_introspection(&self, request_id: &str) -> Option<GraphQlIntrospection> {
self.find_optional(GraphQlIntrospectionIden::RequestId, request_id)
}
@@ -44,7 +44,7 @@ impl<'a> DbContext<'a> {
.cond_where(Expr::col(GraphQlIntrospectionIden::UpdatedAt).lt(cutoff))
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.resolve().prepare(sql.as_str())?;
let mut stmt = self.conn().resolve().prepare(sql.as_str())?;
stmt.execute(&*params.as_params())?;
Ok(())
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{GrpcConnection, GrpcConnectionIden, GrpcConnectionState};
use crate::queries::MAX_HISTORY_ITEMS;
@@ -7,7 +7,7 @@ use log::debug;
use sea_query::{Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_grpc_connection(&self, id: &str) -> Result<GrpcConnection> {
self.find_one(GrpcConnectionIden::Id, id)
}
@@ -71,7 +71,7 @@ impl<'a> DbContext<'a> {
.values([(GrpcConnectionIden::State, closed.as_str().into())])
.cond_where(Expr::col(GrpcConnectionIden::State).ne(closed.as_str()))
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str())?;
let mut stmt = self.conn().prepare(sql.as_str())?;
stmt.execute(&*params.as_params())?;
Ok(())
}

View File

@@ -1,9 +1,9 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{GrpcEvent, GrpcEventIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_grpc_events(&self, id: &str) -> Result<GrpcEvent> {
self.find_one(GrpcEventIden::Id, id)
}

View File

@@ -1,12 +1,12 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{Folder, FolderIden, GrpcRequest, GrpcRequestIden, HttpRequestHeader};
use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_grpc_request(&self, id: &str) -> Result<GrpcRequest> {
self.find_one(GrpcRequestIden::Id, id)
}

View File

@@ -1,12 +1,12 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{Folder, FolderIden, HttpRequest, HttpRequestHeader, HttpRequestIden};
use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_http_request(&self, id: &str) -> Result<HttpRequest> {
self.find_one(HttpRequestIden::Id, id)
}

View File

@@ -1,9 +1,9 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{HttpResponseEvent, HttpResponseEventIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn list_http_response_events(&self, response_id: &str) -> Result<Vec<HttpResponseEvent>> {
self.find_many(HttpResponseEventIden::ResponseId, response_id, None)
}

View File

@@ -1,5 +1,5 @@
use crate::blob_manager::BlobManager;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{HttpResponse, HttpResponseIden, HttpResponseState};
use crate::queries::MAX_HISTORY_ITEMS;
@@ -9,7 +9,7 @@ use sea_query::{Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
use std::fs;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_http_response(&self, id: &str) -> Result<HttpResponse> {
self.find_one(HttpResponseIden::Id, id)
}
@@ -101,7 +101,7 @@ impl<'a> DbContext<'a> {
.values([(HttpResponseIden::State, closed.as_str().into())])
.cond_where(Expr::col(HttpResponseIden::State).ne(closed.as_str()))
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str())?;
let mut stmt = self.conn().prepare(sql.as_str())?;
stmt.execute(&*params.as_params())?;
Ok(())
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{KeyValue, KeyValueIden, UpsertModelInfo};
use crate::util::UpdateSource;
@@ -7,7 +7,7 @@ use log::error;
use sea_query::{Asterisk, Cond, Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn list_key_values(&self) -> Result<Vec<KeyValue>> {
let (sql, params) = Query::select()
.from(KeyValueIden::Table)
@@ -18,7 +18,7 @@ impl<'a> DbContext<'a> {
// TODO: Add migration to delete key/values with NULL IDs later on, then remove this
.cond_where(Expr::col(KeyValueIden::Id).is_not_null())
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str())?;
let mut stmt = self.conn().prepare(sql.as_str())?;
let items = stmt.query_map(&*params.as_params(), KeyValue::from_row)?;
Ok(items.map(|v| v.unwrap()).collect())
}
@@ -86,7 +86,7 @@ impl<'a> DbContext<'a> {
.add(Expr::col(KeyValueIden::Key).eq(key)),
)
.build_rusqlite(SqliteQueryBuilder);
self.conn.resolve().query_row(sql.as_str(), &*params.as_params(), KeyValue::from_row).ok()
self.conn().resolve().query_row(sql.as_str(), &*params.as_params(), KeyValue::from_row).ok()
}
pub fn set_key_value_dte(

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::util::ModelPayload;
use rusqlite::params;
@@ -11,13 +11,13 @@ pub struct PersistedModelChange {
pub payload: ModelPayload,
}
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn list_model_changes_after(
&self,
after_id: i64,
limit: usize,
) -> Result<Vec<PersistedModelChange>> {
let mut stmt = self.conn.prepare(
let mut stmt = self.conn().prepare(
r#"
SELECT id, created_at, payload
FROM model_changes
@@ -46,7 +46,7 @@ impl<'a> DbContext<'a> {
since_id: i64,
limit: usize,
) -> Result<Vec<PersistedModelChange>> {
let mut stmt = self.conn.prepare(
let mut stmt = self.conn().prepare(
r#"
SELECT id, created_at, payload
FROM model_changes
@@ -72,7 +72,7 @@ impl<'a> DbContext<'a> {
pub fn prune_model_changes_older_than_days(&self, days: i64) -> Result<usize> {
let offset = format!("-{days} days");
Ok(self.conn.resolve().execute(
Ok(self.conn().resolve().execute(
r#"
DELETE FROM model_changes
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)
@@ -83,7 +83,7 @@ impl<'a> DbContext<'a> {
pub fn prune_model_changes_older_than_hours(&self, hours: i64) -> Result<usize> {
let offset = format!("-{hours} hours");
Ok(self.conn.resolve().execute(
Ok(self.conn().resolve().execute(
r#"
DELETE FROM model_changes
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)

View File

@@ -1,11 +1,11 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{PluginKeyValue, PluginKeyValueIden};
use sea_query::Keyword::CurrentTimestamp;
use sea_query::{Asterisk, Cond, Expr, OnConflict, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_plugin_key_value(&self, plugin_name: &str, key: &str) -> Option<PluginKeyValue> {
let (sql, params) = Query::select()
.from(PluginKeyValueIden::Table)
@@ -16,7 +16,7 @@ impl<'a> DbContext<'a> {
.add(Expr::col(PluginKeyValueIden::Key).eq(key)),
)
.build_rusqlite(SqliteQueryBuilder);
self.conn.resolve().query_row(sql.as_str(), &*params.as_params(), |row| row.try_into()).ok()
self.conn().resolve().query_row(sql.as_str(), &*params.as_params(), |row| row.try_into()).ok()
}
pub fn set_plugin_key_value(
@@ -52,7 +52,7 @@ impl<'a> DbContext<'a> {
.build_rusqlite(SqliteQueryBuilder);
let mut stmt =
self.conn.prepare(sql.as_str()).expect("Failed to prepare PluginKeyValue upsert");
self.conn().prepare(sql.as_str()).expect("Failed to prepare PluginKeyValue upsert");
let m: PluginKeyValue = stmt
.query_row(&*params.as_params(), |row| row.try_into())
.expect("Failed to upsert KeyValue");
@@ -73,7 +73,7 @@ impl<'a> DbContext<'a> {
.add(Expr::col(PluginKeyValueIden::Key).eq(key)),
)
.build_rusqlite(SqliteQueryBuilder);
self.conn.execute(sql.as_str(), &*params.as_params())?;
self.conn().execute(sql.as_str(), &*params.as_params())?;
Ok(true)
}
}

View File

@@ -1,9 +1,9 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{Plugin, PluginIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_plugin(&self, id: &str) -> Result<Plugin> {
self.find_one(PluginIden::Id, id)
}

View File

@@ -1,11 +1,11 @@
use std::collections::HashMap;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{EditorKeymap, Settings, SettingsIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_settings(&self) -> Settings {
let id = "default".to_string();

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{SyncState, SyncStateIden, UpsertModelInfo};
use crate::util::UpdateSource;
@@ -6,7 +6,7 @@ use sea_query::{Asterisk, Cond, Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
use std::path::Path;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_sync_state(&self, id: &str) -> Result<SyncState> {
self.find_one(SyncStateIden::Id, id)
}
@@ -29,7 +29,7 @@ impl<'a> DbContext<'a> {
.add(Expr::col(SyncStateIden::SyncDir).eq(sync_dir.to_string_lossy())),
)
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str())?;
let mut stmt = self.conn().prepare(sql.as_str())?;
let items = stmt.query_map(&*params.as_params(), SyncState::from_row)?;
Ok(items.map(|v| v.unwrap()).collect())
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{WebsocketConnection, WebsocketConnectionIden, WebsocketConnectionState};
use crate::queries::MAX_HISTORY_ITEMS;
@@ -7,7 +7,7 @@ use log::debug;
use sea_query::{Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_websocket_connection(&self, id: &str) -> Result<WebsocketConnection> {
self.find_one(WebsocketConnectionIden::Id, id)
}
@@ -90,7 +90,7 @@ impl<'a> DbContext<'a> {
.values([(WebsocketConnectionIden::State, closed.as_str().into())])
.cond_where(Expr::col(WebsocketConnectionIden::State).ne(closed.as_str()))
.build_rusqlite(SqliteQueryBuilder);
let mut stmt = self.conn.prepare(sql.as_str())?;
let mut stmt = self.conn().prepare(sql.as_str())?;
stmt.execute(&*params.as_params())?;
Ok(())
}

View File

@@ -1,9 +1,9 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{WebsocketEvent, WebsocketEventIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_websocket_event(&self, id: &str) -> Result<WebsocketEvent> {
self.find_one(WebsocketEventIden::Id, id)
}

View File

@@ -1,5 +1,5 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{
Folder, FolderIden, HttpRequestHeader, WebsocketRequest, WebsocketRequestIden,
@@ -8,7 +8,7 @@ use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_websocket_request(&self, id: &str) -> Result<WebsocketRequest> {
self.find_one(WebsocketRequestIden::Id, id)
}

View File

@@ -1,10 +1,10 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{WorkspaceMeta, WorkspaceMetaIden};
use crate::util::UpdateSource;
use log::info;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_workspace_meta(&self, workspace_id: &str) -> Option<WorkspaceMeta> {
self.find_optional(WorkspaceMetaIden::WorkspaceId, workspace_id)
}

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{
EnvironmentIden, FolderIden, GrpcRequestIden, HttpRequestHeader, HttpRequestIden,
@@ -8,7 +8,7 @@ use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
impl<'a> DbContext<'a> {
impl<'a> ClientDb<'a> {
pub fn get_workspace(&self, id: &str) -> Result<Workspace> {
self.find_one(WorkspaceIden::Id, id)
}

View File

@@ -1,11 +1,11 @@
use crate::connection_or_tx::ConnectionOrTx;
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Error::GenericError;
use crate::util::ModelPayload;
use r2d2::Pool;
use r2d2_sqlite::SqliteConnectionManager;
use rusqlite::TransactionBehavior;
use std::sync::{Arc, Mutex, mpsc};
use yaak_database::{ConnectionOrTx, DbContext};
#[derive(Debug, Clone)]
pub struct QueryManager {
@@ -18,19 +18,20 @@ impl QueryManager {
QueryManager { pool: Arc::new(Mutex::new(pool)), events_tx }
}
pub fn connect(&self) -> DbContext<'_> {
pub fn connect(&self) -> ClientDb<'_> {
let conn = self
.pool
.lock()
.expect("Failed to gain lock on DB")
.get()
.expect("Failed to get a new DB connection from the pool");
DbContext { _events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
let ctx = DbContext::new(ConnectionOrTx::Connection(conn));
ClientDb::new(ctx, self.events_tx.clone())
}
pub fn with_conn<F, T>(&self, func: F) -> T
where
F: FnOnce(&DbContext) -> T,
F: FnOnce(&ClientDb) -> T,
{
let conn = self
.pool
@@ -39,17 +40,15 @@ impl QueryManager {
.get()
.expect("Failed to get new DB connection from the pool");
let db_context = DbContext {
_events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Connection(conn),
};
let ctx = DbContext::new(ConnectionOrTx::Connection(conn));
let db = ClientDb::new(ctx, self.events_tx.clone());
func(&db_context)
func(&db)
}
pub fn with_tx<T, E>(
&self,
func: impl FnOnce(&DbContext) -> std::result::Result<T, E>,
func: impl FnOnce(&ClientDb) -> std::result::Result<T, E>,
) -> std::result::Result<T, E>
where
E: From<crate::error::Error>,
@@ -64,12 +63,10 @@ impl QueryManager {
.transaction_with_behavior(TransactionBehavior::Immediate)
.expect("Failed to start DB transaction");
let db_context = DbContext {
_events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Transaction(&tx),
};
let ctx = DbContext::new(ConnectionOrTx::Transaction(&tx));
let db = ClientDb::new(ctx, self.events_tx.clone());
match func(&db_context) {
match func(&db) {
Ok(val) => {
tx.commit()
.map_err(|e| GenericError(format!("Failed to commit transaction {e:?}")))?;

View File

@@ -1,4 +1,4 @@
use crate::db_context::DbContext;
use crate::client_db::ClientDb;
use crate::error::Result;
use crate::models::{
AnyModel, Environment, Folder, GrpcRequest, HttpRequest, UpsertModelInfo, WebsocketRequest,
@@ -44,6 +44,30 @@ impl UpdateSource {
pub fn from_window_label(label: impl Into<String>) -> Self {
Self::Window { label: label.into() }
}
pub fn to_db(&self) -> yaak_database::UpdateSource {
match self {
UpdateSource::Background => yaak_database::UpdateSource::Background,
UpdateSource::Import => yaak_database::UpdateSource::Import,
UpdateSource::Plugin => yaak_database::UpdateSource::Plugin,
UpdateSource::Sync => yaak_database::UpdateSource::Sync,
UpdateSource::Window { label } => {
yaak_database::UpdateSource::Window { label: label.clone() }
}
}
}
}
impl From<yaak_database::UpdateSource> for UpdateSource {
fn from(source: yaak_database::UpdateSource) -> Self {
match source {
yaak_database::UpdateSource::Background => UpdateSource::Background,
yaak_database::UpdateSource::Import => UpdateSource::Import,
yaak_database::UpdateSource::Plugin => UpdateSource::Plugin,
yaak_database::UpdateSource::Sync => UpdateSource::Sync,
yaak_database::UpdateSource::Window { label } => UpdateSource::Window { label },
}
}
}
#[derive(Default, Debug, Deserialize, Serialize)]
@@ -68,7 +92,7 @@ pub struct BatchUpsertResult {
}
pub fn get_workspace_export_resources(
db: &DbContext,
db: &ClientDb,
yaak_version: &str,
workspace_ids: Vec<&str>,
include_private_environments: bool,

View File

@@ -10,7 +10,7 @@ use std::fs::File;
use std::io::Write;
use std::path::{Path, PathBuf};
use ts_rs::TS;
use yaak_models::db_context::DbContext;
use yaak_models::client_db::ClientDb;
use yaak_models::models::{SyncState, WorkspaceMeta};
use yaak_models::util::{UpdateSource, get_workspace_export_resources};
@@ -106,7 +106,7 @@ pub struct FsCandidate {
}
pub fn get_db_candidates(
db: &DbContext,
db: &ClientDb,
version: &str,
workspace_id: &str,
sync_dir: &Path,
@@ -296,7 +296,7 @@ pub fn compute_sync_ops(
.collect()
}
fn workspace_models(db: &DbContext, version: &str, workspace_id: &str) -> Result<Vec<SyncModel>> {
fn workspace_models(db: &ClientDb, version: &str, workspace_id: &str) -> Result<Vec<SyncModel>> {
// We want to include private environments here so that we can take them into account during
// the sync process. Otherwise, they would be treated as deleted.
let include_private_environments = true;
@@ -338,7 +338,7 @@ fn workspace_models(db: &DbContext, version: &str, workspace_id: &str) -> Result
/// Apply sync operations to the filesystem and database.
/// Returns a list of SyncStateOps that should be applied afterward.
pub fn apply_sync_ops(
db: &DbContext,
db: &ClientDb,
workspace_id: &str,
sync_dir: &Path,
sync_ops: Vec<SyncOp>,
@@ -502,7 +502,7 @@ pub enum SyncStateOp {
}
pub fn apply_sync_state_ops(
db: &DbContext,
db: &ClientDb,
workspace_id: &str,
sync_dir: &Path,
ops: Vec<SyncStateOp>,
@@ -547,7 +547,7 @@ fn derive_model_filename(m: &SyncModel) -> PathBuf {
Path::new(&rel).to_path_buf()
}
fn delete_model(db: &DbContext, model: &SyncModel) -> Result<()> {
fn delete_model(db: &ClientDb, model: &SyncModel) -> Result<()> {
match model {
SyncModel::Workspace(m) => {
db.delete_workspace(&m, &UpdateSource::Sync)?;