Cookie Support (#19)

This commit is contained in:
Gregory Schier
2024-01-28 14:39:51 -08:00
committed by GitHub
parent 5ffc75e0ad
commit 6798331ce5
45 changed files with 1152 additions and 145 deletions

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO cookie_jars (\n id,\n workspace_id,\n name,\n cookies\n )\n VALUES (?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n cookies = excluded.cookies\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "b5ed4dc77f8cf21de1a06f146e47821bdb51fcfe747170bea41e7a366d736bda"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM cookie_jars\n WHERE id = ?\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "b98609f65dd3a6bbd1ea8dc8bed2840a6d5d13fec1bbc0aa61ca4f60de98a09c"
}

View File

@@ -0,0 +1,56 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id,\n model,\n created_at,\n updated_at,\n workspace_id,\n name,\n cookies AS \"cookies!: sqlx::types::Json<Vec<JsonValue>>\"\n FROM cookie_jars WHERE workspace_id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "workspace_id",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "name",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "cookies!: sqlx::types::Json<Vec<JsonValue>>",
"ordinal": 6,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false
]
},
"hash": "cb939b45a715d91f7631dea6b2d1bdc59fb3dffbd44ff99bc15adb34ea7093f7"
}

View File

@@ -0,0 +1,56 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id,\n model,\n created_at,\n updated_at,\n workspace_id,\n name,\n cookies AS \"cookies!: sqlx::types::Json<Vec<JsonValue>>\"\n FROM cookie_jars WHERE id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "workspace_id",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "name",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "cookies!: sqlx::types::Json<Vec<JsonValue>>",
"ordinal": 6,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false
]
},
"hash": "f2ba4708d4a9ff9ce74c407a730040bd7883e9a5c0eb79ef0d8a6782a8eae299"
}

129
src-tauri/Cargo.lock generated
View File

@@ -83,9 +83,9 @@ checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
[[package]]
name = "async-compression"
version = "0.4.5"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc2d0cfb2a7388d34f590e76686704c494ed7aaceed62ee1ba35cbf363abc2a5"
checksum = "a116f46a969224200a0a97f29cfd4c50e7534e4b4826bd23ea2c3c533039c82c"
dependencies = [
"brotli",
"flate2",
@@ -614,6 +614,72 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
name = "cookie"
version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb"
dependencies = [
"percent-encoding",
"time",
"version_check 0.9.4",
]
[[package]]
name = "cookie"
version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7efb37c3e1ccb1ff97164ad95ac1606e8ccd35b3fa0a7d99a304c7f4a428cc24"
dependencies = [
"percent-encoding",
"time",
"version_check 0.9.4",
]
[[package]]
name = "cookie"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cd91cf61412820176e137621345ee43b3f4423e589e7ae4e50d601d93e35ef8"
dependencies = [
"time",
"version_check 0.9.4",
]
[[package]]
name = "cookie_store"
version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d606d0fba62e13cf04db20536c05cb7f13673c161cb47a47a82b9b9e7d3f1daa"
dependencies = [
"cookie 0.16.2",
"idna 0.2.3",
"log",
"publicsuffix",
"serde",
"serde_derive",
"serde_json",
"time",
"url",
]
[[package]]
name = "cookie_store"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "387461abbc748185c3a6e1673d826918b450b87ff22639429c694619a83b6cf6"
dependencies = [
"cookie 0.17.0",
"idna 0.3.0",
"log",
"publicsuffix",
"serde",
"serde_derive",
"serde_json",
"time",
"url",
]
[[package]]
name = "core-foundation"
version = "0.9.3"
@@ -1955,6 +2021,27 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8"
dependencies = [
"matches",
"unicode-bidi",
"unicode-normalization",
]
[[package]]
name = "idna"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6"
dependencies = [
"unicode-bidi",
"unicode-normalization",
]
[[package]]
name = "idna"
version = "0.4.0"
@@ -3143,6 +3230,22 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "psl-types"
version = "2.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33cb294fe86a74cbcf50d4445b37da762029549ebeea341421c7c70370f86cac"
[[package]]
name = "publicsuffix"
version = "2.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96a8c1bda5ae1af7f99a2962e49df150414a43d62404644d98dd5c3a93d07457"
dependencies = [
"idna 0.3.0",
"psl-types",
]
[[package]]
name = "quick-xml"
version = "0.31.0"
@@ -3339,13 +3442,15 @@ dependencies = [
[[package]]
name = "reqwest"
version = "0.11.22"
version = "0.11.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b"
checksum = "37b1ae8d9ac08420c66222fb9096fc5de435c3c48542bc5336c51892cffafb41"
dependencies = [
"async-compression",
"base64 0.21.5",
"bytes",
"cookie 0.16.2",
"cookie_store 0.16.2",
"encoding_rs",
"futures-core",
"futures-util",
@@ -3379,6 +3484,18 @@ dependencies = [
"winreg 0.50.0",
]
[[package]]
name = "reqwest_cookie_store"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba529055ea150e42e4eb9c11dcd380a41025ad4d594b0cb4904ef28b037e1061"
dependencies = [
"bytes",
"cookie_store 0.20.0",
"reqwest",
"url",
]
[[package]]
name = "rfd"
version = "0.10.0"
@@ -4974,7 +5091,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5"
dependencies = [
"form_urlencoded",
"idna",
"idna 0.4.0",
"percent-encoding",
"serde",
]
@@ -5805,6 +5922,7 @@ dependencies = [
"boa_runtime",
"chrono",
"cocoa 0.25.0",
"cookie 0.18.0",
"datetime",
"futures",
"http",
@@ -5813,6 +5931,7 @@ dependencies = [
"openssl-sys",
"rand 0.8.5",
"reqwest",
"reqwest_cookie_store",
"serde",
"serde_json",
"sqlx",

View File

@@ -28,7 +28,8 @@ chrono = { version = "0.4.31", features = ["serde"] }
futures = "0.3.26"
http = "0.2.8"
rand = "0.8.5"
reqwest = { version = "0.11.14", features = ["json", "multipart", "gzip", "brotli", "deflate"] }
reqwest = { version = "0.11.23", features = ["multipart", "cookies", "gzip", "brotli", "deflate"] }
cookie = { version = "0.18.0" }
serde = { version = "1.0.195", features = ["derive"] }
serde_json = { version = "1.0.111", features = ["raw_value"] }
sqlx = { version = "0.7.2", features = ["sqlite", "runtime-tokio-rustls", "json", "chrono", "time"] }
@@ -57,6 +58,7 @@ uuid = "1.3.0"
log = "0.4.20"
datetime = "0.5.2"
window-shadows = "0.2.2"
reqwest_cookie_store = "0.6.0"
[features]
# by default Tauri runs in production mode

View File

@@ -0,0 +1,10 @@
CREATE TABLE cookie_jars
(
id TEXT NOT NULL PRIMARY KEY,
model TEXT DEFAULT 'cookie_jar' NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
name TEXT NOT NULL,
cookies TEXT DEFAULT '[]' NOT NULL,
workspace_id TEXT NOT NULL
);

View File

@@ -14,6 +14,7 @@ pub enum AnalyticsResource {
App,
Sidebar,
Workspace,
CookieJar,
Environment,
Folder,
HttpRequest,
@@ -28,6 +29,7 @@ impl AnalyticsResource {
"Sidebar" => Some(AnalyticsResource::Sidebar),
"Workspace" => Some(AnalyticsResource::Workspace),
"Environment" => Some(AnalyticsResource::Environment),
"CookieJar" => Some(AnalyticsResource::CookieJar),
"Folder" => Some(AnalyticsResource::Folder),
"HttpRequest" => Some(AnalyticsResource::HttpRequest),
"HttpResponse" => Some(AnalyticsResource::HttpResponse),
@@ -50,6 +52,8 @@ pub enum AnalyticsAction {
Send,
Toggle,
Duplicate,
Import,
Export,
}
impl AnalyticsAction {
@@ -66,6 +70,8 @@ impl AnalyticsAction {
"Send" => Some(AnalyticsAction::Send),
"Duplicate" => Some(AnalyticsAction::Duplicate),
"Toggle" => Some(AnalyticsAction::Toggle),
"Import" => Some(AnalyticsAction::Import),
"Export" => Some(AnalyticsAction::Export),
_ => None,
}
}
@@ -77,6 +83,7 @@ fn resource_name(resource: AnalyticsResource) -> &'static str {
AnalyticsResource::Sidebar => "sidebar",
AnalyticsResource::Workspace => "workspace",
AnalyticsResource::Environment => "environment",
AnalyticsResource::CookieJar => "cookie_jar",
AnalyticsResource::Folder => "folder",
AnalyticsResource::HttpRequest => "http_request",
AnalyticsResource::HttpResponse => "http_response",
@@ -97,6 +104,8 @@ fn action_name(action: AnalyticsAction) -> &'static str {
AnalyticsAction::Send => "send",
AnalyticsAction::Duplicate => "duplicate",
AnalyticsAction::Toggle => "toggle",
AnalyticsAction::Import => "import",
AnalyticsAction::Export => "export",
}
}

View File

@@ -2,16 +2,18 @@ use std::fs;
use std::fs::{create_dir_all, File};
use std::io::Write;
use std::path::PathBuf;
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use base64::Engine;
use http::{HeaderMap, HeaderName, HeaderValue, Method};
use http::header::{ACCEPT, USER_AGENT};
use http::{HeaderMap, HeaderName, HeaderValue, Method};
use log::{error, info, warn};
use reqwest::multipart;
use reqwest::redirect::Policy;
use reqwest::{multipart, Url};
use sqlx::types::{Json, JsonValue};
use sqlx::{Pool, Sqlite};
use sqlx::types::Json;
use tauri::{AppHandle, Wry};
use crate::{emit_side_effect, models, render, response_err};
@@ -19,13 +21,13 @@ use crate::{emit_side_effect, models, render, response_err};
pub async fn send_http_request(
request: models::HttpRequest,
response: &models::HttpResponse,
environment_id: &str,
environment: Option<models::Environment>,
cookie_jar: Option<models::CookieJar>,
app_handle: &AppHandle<Wry>,
pool: &Pool<Sqlite>,
download_path: Option<PathBuf>,
) -> Result<models::HttpResponse, String> {
let start = std::time::Instant::now();
let environment = models::get_environment(environment_id, pool).await.ok();
let environment_ref = environment.as_ref();
let workspace = models::get_workspace(&request.workspace_id, pool)
.await
@@ -49,6 +51,32 @@ pub async fn send_http_request(
.danger_accept_invalid_certs(!workspace.setting_validate_certificates)
.tls_info(true);
// Add cookie store if specified
let maybe_cookie_manager = match cookie_jar.clone() {
Some(cj) => {
// HACK: Can't construct Cookie without serde, so we have to do this
let cookies = cj
.cookies
.0
.iter()
.map(|json_cookie| {
serde_json::from_value(json_cookie.clone())
.expect("Failed to deserialize cookie")
})
.map(|c| Ok(c))
.collect::<Vec<Result<_, ()>>>();
let store = reqwest_cookie_store::CookieStore::from_cookies(cookies, true)
.expect("Failed to create cookie store");
let cookie_store = reqwest_cookie_store::CookieStoreMutex::new(store);
let cookie_store = Arc::new(cookie_store);
client_builder = client_builder.cookie_provider(Arc::clone(&cookie_store));
Some((cookie_store, cj))
}
None => None,
};
if workspace.setting_request_timeout > 0 {
client_builder = client_builder.timeout(Duration::from_millis(
workspace.setting_request_timeout.unsigned_abs(),
@@ -58,14 +86,37 @@ pub async fn send_http_request(
// .use_rustls_tls() // TODO: Make this configurable (maybe)
let client = client_builder.build().expect("Failed to build client");
let url = match Url::from_str(url_string.as_str()) {
Ok(u) => u,
Err(e) => {
return response_err(response, e.to_string(), app_handle, pool).await;
}
};
let m = Method::from_bytes(request.method.to_uppercase().as_bytes())
.expect("Failed to create method");
let mut request_builder = client.request(m, url_string.to_string());
let mut request_builder = client.request(m, url.clone());
let mut headers = HeaderMap::new();
headers.insert(USER_AGENT, HeaderValue::from_static("yaak"));
headers.insert(ACCEPT, HeaderValue::from_static("*/*"));
// TODO: Set cookie header ourselves once we also handle redirects. We need to do this
// because reqwest doesn't give us a way to inspect the headers it sent (we have to do
// everything manually to know that).
// if let Some(cookie_store) = maybe_cookie_store.clone() {
// let values1 = cookie_store.get_request_values(&url);
// println!("COOKIE VLUAES: {:?}", values1.collect::<Vec<_>>());
// let raw_value = cookie_store.get_request_values(&url)
// .map(|(name, value)| format!("{}={}", name, value))
// .collect::<Vec<_>>()
// .join("; ");
// headers.insert(
// COOKIE,
// HeaderValue::from_str(&raw_value).expect("Failed to create cookie header"),
// );
// }
for h in request.headers.0 {
if h.name.is_empty() && h.value.is_empty() {
continue;
@@ -252,10 +303,11 @@ pub async fn send_http_request(
match raw_response {
Ok(v) => {
let mut response = response.clone();
let response_headers = v.headers().clone();
response.status = v.status().as_u16() as i64;
response.status_reason = v.status().canonical_reason().map(|s| s.to_string());
response.headers = Json(
v.headers()
response_headers
.iter()
.map(|(k, v)| models::HttpResponseHeader {
name: k.as_str().to_string(),
@@ -304,15 +356,42 @@ pub async fn send_http_request(
match (download_path, response.body_path.clone()) {
(Some(dl_path), Some(body_path)) => {
info!("Downloading response body to {}", dl_path.display());
fs::copy(body_path, dl_path).expect("Failed to copy file for response download");
fs::copy(body_path, dl_path)
.expect("Failed to copy file for response download");
}
_ => {}
};
// Add cookie store if specified
if let Some((cookie_store, mut cookie_jar)) = maybe_cookie_manager {
// let cookies = response_headers.get_all(SET_COOKIE).iter().map(|h| {
// println!("RESPONSE COOKIE: {}", h.to_str().unwrap());
// cookie_store::RawCookie::from_str(h.to_str().unwrap())
// .expect("Failed to parse cookie")
// });
// store.store_response_cookies(cookies, &url);
let json_cookies: Json<Vec<JsonValue>> = Json(
cookie_store
.lock()
.unwrap()
.iter_any()
.map(|c| serde_json::to_value(&c).expect("Failed to serialize cookie"))
.collect::<Vec<_>>(),
);
cookie_jar.cookies = json_cookies;
match models::upsert_cookie_jar(pool, &cookie_jar).await {
Ok(updated_jar) => {
emit_side_effect(app_handle, "updated_model", &updated_jar);
}
Err(e) => {
error!("Failed to update cookie jar: {}", e);
}
};
}
Ok(response)
}
Err(e) => {
response_err(response, e.to_string(), app_handle, pool).await
}
Err(e) => response_err(response, e.to_string(), app_handle, pool).await,
}
}

View File

@@ -3,27 +3,28 @@
windows_subsystem = "windows"
)]
extern crate core;
#[cfg(target_os = "macos")]
#[macro_use]
extern crate objc;
use std::collections::HashMap;
use std::env::current_dir;
use std::fs::{create_dir_all, read_to_string, File};
use std::fs::{create_dir_all, File, read_to_string};
use std::process::exit;
use fern::colors::ColoredLevelConfig;
use log::{debug, error, info, warn};
use rand::random;
use serde::Serialize;
use serde_json::Value;
use serde_json::{json, Value};
use sqlx::{Pool, Sqlite, SqlitePool};
use sqlx::migrate::Migrator;
use sqlx::types::Json;
use sqlx::{Pool, Sqlite, SqlitePool};
#[cfg(target_os = "macos")]
use tauri::TitleBarStyle;
use tauri::{AppHandle, RunEvent, State, Window, WindowUrl, Wry};
use tauri::{Manager, WindowEvent};
#[cfg(target_os = "macos")]
use tauri::TitleBarStyle;
use tauri_plugin_log::{fern, LogTarget};
use tauri_plugin_window_state::{StateFlags, WindowExt};
use tokio::sync::Mutex;
@@ -78,17 +79,36 @@ async fn migrate_db(
async fn send_ephemeral_request(
mut request: models::HttpRequest,
environment_id: Option<&str>,
cookie_jar_id: Option<&str>,
app_handle: AppHandle<Wry>,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<models::HttpResponse, String> {
let pool = &*db_instance.lock().await;
let response = models::HttpResponse::new();
let environment_id2 = environment_id.unwrap_or("n/a").to_string();
request.id = "".to_string();
let environment = match environment_id {
Some(id) => Some(
models::get_environment(id, pool)
.await
.expect("Failed to get environment"),
),
None => None,
};
let cookie_jar = match cookie_jar_id {
Some(id) => Some(
models::get_cookie_jar(id, pool)
.await
.expect("Failed to get cookie jar"),
),
None => None,
};
// let cookie_jar_id2 = cookie_jar_id.unwrap_or("").to_string();
send_http_request(
request,
&response,
&environment_id2,
environment,
cookie_jar,
&app_handle,
pool,
None,
@@ -151,6 +171,13 @@ async fn import_data(
)
.await
{
analytics::track_event(
&window.app_handle(),
AnalyticsResource::App,
AnalyticsAction::Import,
Some(json!({ "plugin": plugin_name })),
)
.await;
result = Some(r);
break;
}
@@ -217,8 +244,17 @@ async fn export_data(
serde_json::to_writer_pretty(&f, &export_data)
.map_err(|e| e.to_string())
.expect("Failed to write");
f.sync_all().expect("Failed to sync");
info!("Exported Yaak workspace to {:?}", export_path);
analytics::track_event(
&app_handle,
AnalyticsResource::App,
AnalyticsAction::Export,
None,
)
.await;
Ok(())
}
@@ -228,47 +264,56 @@ async fn send_request(
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
request_id: &str,
environment_id: Option<&str>,
cookie_jar_id: Option<&str>,
download_dir: Option<&str>,
) -> Result<models::HttpResponse, String> {
let pool = &*db_instance.lock().await;
let app_handle = window.app_handle();
let req = models::get_request(request_id, pool)
let request = models::get_request(request_id, pool)
.await
.expect("Failed to get request");
let response = models::create_response(&req.id, 0, "", 0, None, None, None, vec![], pool)
let environment = match environment_id {
Some(id) => Some(
models::get_environment(id, pool)
.await
.expect("Failed to get environment"),
),
None => None,
};
let cookie_jar = match cookie_jar_id {
Some(id) => Some(
models::get_cookie_jar(id, pool)
.await
.expect("Failed to get cookie jar"),
),
None => None,
};
let response = models::create_response(&request.id, 0, "", 0, None, None, None, vec![], pool)
.await
.expect("Failed to create response");
let response2 = response.clone();
let environment_id2 = environment_id.unwrap_or("n/a").to_string();
let app_handle2 = window.app_handle().clone();
let pool2 = pool.clone();
let download_path = if let Some(p) = download_dir {
Some(std::path::Path::new(p).to_path_buf())
} else {
None
};
tokio::spawn(async move {
if let Err(e) = send_http_request(
req,
&response2,
&environment_id2,
&app_handle2,
&pool2,
download_path,
)
.await
{
response_err(&response2, e, &app_handle2, &pool2)
.await
.expect("Failed to update response");
}
});
emit_side_effect(&app_handle, "created_model", response.clone());
emit_and_return(&window, "created_model", response)
send_http_request(
request.clone(),
&response,
environment,
cookie_jar,
&app_handle,
&pool,
download_path,
)
.await
}
async fn response_err(
@@ -362,6 +407,57 @@ async fn create_workspace(
emit_and_return(&window, "created_model", created_workspace)
}
#[tauri::command]
async fn update_cookie_jar(
cookie_jar: models::CookieJar,
window: Window<Wry>,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<models::CookieJar, String> {
let pool = &*db_instance.lock().await;
println!("Updating cookie jar {}", cookie_jar.cookies.len());
let updated = models::upsert_cookie_jar(pool, &cookie_jar)
.await
.expect("Failed to update cookie jar");
emit_and_return(&window, "updated_model", updated)
}
#[tauri::command]
async fn delete_cookie_jar(
window: Window<Wry>,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
cookie_jar_id: &str,
) -> Result<models::CookieJar, String> {
let pool = &*db_instance.lock().await;
let req = models::delete_cookie_jar(cookie_jar_id, pool)
.await
.expect("Failed to delete cookie jar");
emit_and_return(&window, "deleted_model", req)
}
#[tauri::command]
async fn create_cookie_jar(
workspace_id: &str,
name: &str,
window: Window<Wry>,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<models::CookieJar, String> {
let pool = &*db_instance.lock().await;
let created_cookie_jar = models::upsert_cookie_jar(
pool,
&models::CookieJar {
name: name.to_string(),
workspace_id: workspace_id.to_string(),
..Default::default()
},
)
.await
.expect("Failed to create cookie jar");
emit_and_return(&window, "created_model", created_cookie_jar)
}
#[tauri::command]
async fn create_environment(
workspace_id: &str,
@@ -627,6 +723,44 @@ async fn get_request(
.map_err(|e| e.to_string())
}
#[tauri::command]
async fn get_cookie_jar(
id: &str,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<models::CookieJar, String> {
let pool = &*db_instance.lock().await;
models::get_cookie_jar(id, pool)
.await
.map_err(|e| e.to_string())
}
#[tauri::command]
async fn list_cookie_jars(
workspace_id: &str,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<Vec<models::CookieJar>, String> {
let pool = &*db_instance.lock().await;
let cookie_jars = models::find_cookie_jars(workspace_id, pool)
.await
.expect("Failed to find cookie jars");
if cookie_jars.is_empty() {
let cookie_jar = models::upsert_cookie_jar(
pool,
&models::CookieJar {
name: "Default".to_string(),
workspace_id: workspace_id.to_string(),
..Default::default()
},
)
.await
.expect("Failed to create CookieJar");
Ok(vec![cookie_jar])
} else {
Ok(cookie_jars)
}
}
#[tauri::command]
async fn get_environment(
id: &str,
@@ -755,6 +889,7 @@ fn main() {
.level_for("tracing", log::LevelFilter::Info)
.level_for("reqwest", log::LevelFilter::Info)
.level_for("tokio_util", log::LevelFilter::Info)
.level_for("cookie_store", log::LevelFilter::Info)
.with_colors(ColoredLevelConfig::default())
.level(log::LevelFilter::Trace)
.build(),
@@ -807,11 +942,13 @@ fn main() {
})
.invoke_handler(tauri::generate_handler![
check_for_updates,
create_cookie_jar,
create_environment,
create_folder,
create_request,
create_workspace,
delete_all_responses,
delete_cookie_jar,
delete_environment,
delete_folder,
delete_request,
@@ -820,13 +957,15 @@ fn main() {
duplicate_request,
export_data,
filter_response,
get_key_value,
get_cookie_jar,
get_environment,
get_folder,
get_key_value,
get_request,
get_settings,
get_workspace,
import_data,
list_cookie_jars,
list_environments,
list_folders,
list_requests,
@@ -838,6 +977,7 @@ fn main() {
set_key_value,
set_update_mode,
track_event,
update_cookie_jar,
update_environment,
update_folder,
update_request,

View File

@@ -1,7 +1,7 @@
use std::collections::HashMap;
use std::fs;
use log::error;
use log::error;
use rand::distributions::{Alphanumeric, DistString};
use serde::{Deserialize, Serialize};
use sqlx::{Pool, Sqlite};
@@ -57,6 +57,23 @@ impl Workspace {
}
}
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
pub struct CookieX {
}
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
#[serde(default, rename_all = "camelCase")]
pub struct CookieJar {
pub id: String,
pub model: String,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub workspace_id: String,
pub name: String,
pub cookies: Json<Vec<JsonValue>>,
}
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
#[serde(default, rename_all = "camelCase")]
pub struct Environment {
@@ -351,6 +368,96 @@ pub async fn delete_workspace(id: &str, pool: &Pool<Sqlite>) -> Result<Workspace
Ok(workspace)
}
pub async fn get_cookie_jar(id: &str, pool: &Pool<Sqlite>) -> Result<CookieJar, sqlx::Error> {
sqlx::query_as!(
CookieJar,
r#"
SELECT
id,
model,
created_at,
updated_at,
workspace_id,
name,
cookies AS "cookies!: sqlx::types::Json<Vec<JsonValue>>"
FROM cookie_jars WHERE id = ?
"#,
id,
)
.fetch_one(pool)
.await
}
pub async fn find_cookie_jars(workspace_id: &str, pool: &Pool<Sqlite>) -> Result<Vec<CookieJar>, sqlx::Error> {
sqlx::query_as!(
CookieJar,
r#"
SELECT
id,
model,
created_at,
updated_at,
workspace_id,
name,
cookies AS "cookies!: sqlx::types::Json<Vec<JsonValue>>"
FROM cookie_jars WHERE workspace_id = ?
"#,
workspace_id,
)
.fetch_all(pool)
.await
}
pub async fn delete_cookie_jar(id: &str, pool: &Pool<Sqlite>) -> Result<CookieJar, sqlx::Error> {
let cookie_jar = get_cookie_jar(id, pool).await?;
let _ = sqlx::query!(
r#"
DELETE FROM cookie_jars
WHERE id = ?
"#,
id,
)
.execute(pool)
.await;
Ok(cookie_jar)
}
pub async fn upsert_cookie_jar(
pool: &Pool<Sqlite>,
cookie_jar: &CookieJar,
) -> Result<CookieJar, sqlx::Error> {
let id = match cookie_jar.id.as_str() {
"" => generate_id(Some("cj")),
_ => cookie_jar.id.to_string(),
};
let trimmed_name = cookie_jar.name.trim();
sqlx::query!(
r#"
INSERT INTO cookie_jars (
id,
workspace_id,
name,
cookies
)
VALUES (?, ?, ?, ?)
ON CONFLICT (id) DO UPDATE SET
updated_at = CURRENT_TIMESTAMP,
name = excluded.name,
cookies = excluded.cookies
"#,
id,
cookie_jar.workspace_id,
trimmed_name,
cookie_jar.cookies,
)
.execute(pool)
.await?;
get_cookie_jar(&id, pool).await
}
pub async fn find_environments(
workspace_id: &str,
pool: &Pool<Sqlite>,

View File

@@ -8,7 +8,7 @@
},
"package": {
"productName": "Yaak",
"version": "2024.1.0"
"version": "2024.2.0"
},
"tauri": {
"windows": [],