Cookie Support (#19)

This commit is contained in:
Gregory Schier
2024-01-28 14:39:51 -08:00
committed by GitHub
parent 0555420ad9
commit 7d183c6580
45 changed files with 1152 additions and 145 deletions

View File

@@ -14,6 +14,7 @@ pub enum AnalyticsResource {
App,
Sidebar,
Workspace,
CookieJar,
Environment,
Folder,
HttpRequest,
@@ -28,6 +29,7 @@ impl AnalyticsResource {
"Sidebar" => Some(AnalyticsResource::Sidebar),
"Workspace" => Some(AnalyticsResource::Workspace),
"Environment" => Some(AnalyticsResource::Environment),
"CookieJar" => Some(AnalyticsResource::CookieJar),
"Folder" => Some(AnalyticsResource::Folder),
"HttpRequest" => Some(AnalyticsResource::HttpRequest),
"HttpResponse" => Some(AnalyticsResource::HttpResponse),
@@ -50,6 +52,8 @@ pub enum AnalyticsAction {
Send,
Toggle,
Duplicate,
Import,
Export,
}
impl AnalyticsAction {
@@ -66,6 +70,8 @@ impl AnalyticsAction {
"Send" => Some(AnalyticsAction::Send),
"Duplicate" => Some(AnalyticsAction::Duplicate),
"Toggle" => Some(AnalyticsAction::Toggle),
"Import" => Some(AnalyticsAction::Import),
"Export" => Some(AnalyticsAction::Export),
_ => None,
}
}
@@ -77,6 +83,7 @@ fn resource_name(resource: AnalyticsResource) -> &'static str {
AnalyticsResource::Sidebar => "sidebar",
AnalyticsResource::Workspace => "workspace",
AnalyticsResource::Environment => "environment",
AnalyticsResource::CookieJar => "cookie_jar",
AnalyticsResource::Folder => "folder",
AnalyticsResource::HttpRequest => "http_request",
AnalyticsResource::HttpResponse => "http_response",
@@ -97,6 +104,8 @@ fn action_name(action: AnalyticsAction) -> &'static str {
AnalyticsAction::Send => "send",
AnalyticsAction::Duplicate => "duplicate",
AnalyticsAction::Toggle => "toggle",
AnalyticsAction::Import => "import",
AnalyticsAction::Export => "export",
}
}

View File

@@ -2,16 +2,18 @@ use std::fs;
use std::fs::{create_dir_all, File};
use std::io::Write;
use std::path::PathBuf;
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use base64::Engine;
use http::{HeaderMap, HeaderName, HeaderValue, Method};
use http::header::{ACCEPT, USER_AGENT};
use http::{HeaderMap, HeaderName, HeaderValue, Method};
use log::{error, info, warn};
use reqwest::multipart;
use reqwest::redirect::Policy;
use reqwest::{multipart, Url};
use sqlx::types::{Json, JsonValue};
use sqlx::{Pool, Sqlite};
use sqlx::types::Json;
use tauri::{AppHandle, Wry};
use crate::{emit_side_effect, models, render, response_err};
@@ -19,13 +21,13 @@ use crate::{emit_side_effect, models, render, response_err};
pub async fn send_http_request(
request: models::HttpRequest,
response: &models::HttpResponse,
environment_id: &str,
environment: Option<models::Environment>,
cookie_jar: Option<models::CookieJar>,
app_handle: &AppHandle<Wry>,
pool: &Pool<Sqlite>,
download_path: Option<PathBuf>,
) -> Result<models::HttpResponse, String> {
let start = std::time::Instant::now();
let environment = models::get_environment(environment_id, pool).await.ok();
let environment_ref = environment.as_ref();
let workspace = models::get_workspace(&request.workspace_id, pool)
.await
@@ -49,6 +51,32 @@ pub async fn send_http_request(
.danger_accept_invalid_certs(!workspace.setting_validate_certificates)
.tls_info(true);
// Add cookie store if specified
let maybe_cookie_manager = match cookie_jar.clone() {
Some(cj) => {
// HACK: Can't construct Cookie without serde, so we have to do this
let cookies = cj
.cookies
.0
.iter()
.map(|json_cookie| {
serde_json::from_value(json_cookie.clone())
.expect("Failed to deserialize cookie")
})
.map(|c| Ok(c))
.collect::<Vec<Result<_, ()>>>();
let store = reqwest_cookie_store::CookieStore::from_cookies(cookies, true)
.expect("Failed to create cookie store");
let cookie_store = reqwest_cookie_store::CookieStoreMutex::new(store);
let cookie_store = Arc::new(cookie_store);
client_builder = client_builder.cookie_provider(Arc::clone(&cookie_store));
Some((cookie_store, cj))
}
None => None,
};
if workspace.setting_request_timeout > 0 {
client_builder = client_builder.timeout(Duration::from_millis(
workspace.setting_request_timeout.unsigned_abs(),
@@ -58,14 +86,37 @@ pub async fn send_http_request(
// .use_rustls_tls() // TODO: Make this configurable (maybe)
let client = client_builder.build().expect("Failed to build client");
let url = match Url::from_str(url_string.as_str()) {
Ok(u) => u,
Err(e) => {
return response_err(response, e.to_string(), app_handle, pool).await;
}
};
let m = Method::from_bytes(request.method.to_uppercase().as_bytes())
.expect("Failed to create method");
let mut request_builder = client.request(m, url_string.to_string());
let mut request_builder = client.request(m, url.clone());
let mut headers = HeaderMap::new();
headers.insert(USER_AGENT, HeaderValue::from_static("yaak"));
headers.insert(ACCEPT, HeaderValue::from_static("*/*"));
// TODO: Set cookie header ourselves once we also handle redirects. We need to do this
// because reqwest doesn't give us a way to inspect the headers it sent (we have to do
// everything manually to know that).
// if let Some(cookie_store) = maybe_cookie_store.clone() {
// let values1 = cookie_store.get_request_values(&url);
// println!("COOKIE VLUAES: {:?}", values1.collect::<Vec<_>>());
// let raw_value = cookie_store.get_request_values(&url)
// .map(|(name, value)| format!("{}={}", name, value))
// .collect::<Vec<_>>()
// .join("; ");
// headers.insert(
// COOKIE,
// HeaderValue::from_str(&raw_value).expect("Failed to create cookie header"),
// );
// }
for h in request.headers.0 {
if h.name.is_empty() && h.value.is_empty() {
continue;
@@ -252,10 +303,11 @@ pub async fn send_http_request(
match raw_response {
Ok(v) => {
let mut response = response.clone();
let response_headers = v.headers().clone();
response.status = v.status().as_u16() as i64;
response.status_reason = v.status().canonical_reason().map(|s| s.to_string());
response.headers = Json(
v.headers()
response_headers
.iter()
.map(|(k, v)| models::HttpResponseHeader {
name: k.as_str().to_string(),
@@ -304,15 +356,42 @@ pub async fn send_http_request(
match (download_path, response.body_path.clone()) {
(Some(dl_path), Some(body_path)) => {
info!("Downloading response body to {}", dl_path.display());
fs::copy(body_path, dl_path).expect("Failed to copy file for response download");
fs::copy(body_path, dl_path)
.expect("Failed to copy file for response download");
}
_ => {}
};
// Add cookie store if specified
if let Some((cookie_store, mut cookie_jar)) = maybe_cookie_manager {
// let cookies = response_headers.get_all(SET_COOKIE).iter().map(|h| {
// println!("RESPONSE COOKIE: {}", h.to_str().unwrap());
// cookie_store::RawCookie::from_str(h.to_str().unwrap())
// .expect("Failed to parse cookie")
// });
// store.store_response_cookies(cookies, &url);
let json_cookies: Json<Vec<JsonValue>> = Json(
cookie_store
.lock()
.unwrap()
.iter_any()
.map(|c| serde_json::to_value(&c).expect("Failed to serialize cookie"))
.collect::<Vec<_>>(),
);
cookie_jar.cookies = json_cookies;
match models::upsert_cookie_jar(pool, &cookie_jar).await {
Ok(updated_jar) => {
emit_side_effect(app_handle, "updated_model", &updated_jar);
}
Err(e) => {
error!("Failed to update cookie jar: {}", e);
}
};
}
Ok(response)
}
Err(e) => {
response_err(response, e.to_string(), app_handle, pool).await
}
Err(e) => response_err(response, e.to_string(), app_handle, pool).await,
}
}

View File

@@ -3,27 +3,28 @@
windows_subsystem = "windows"
)]
extern crate core;
#[cfg(target_os = "macos")]
#[macro_use]
extern crate objc;
use std::collections::HashMap;
use std::env::current_dir;
use std::fs::{create_dir_all, read_to_string, File};
use std::fs::{create_dir_all, File, read_to_string};
use std::process::exit;
use fern::colors::ColoredLevelConfig;
use log::{debug, error, info, warn};
use rand::random;
use serde::Serialize;
use serde_json::Value;
use serde_json::{json, Value};
use sqlx::{Pool, Sqlite, SqlitePool};
use sqlx::migrate::Migrator;
use sqlx::types::Json;
use sqlx::{Pool, Sqlite, SqlitePool};
#[cfg(target_os = "macos")]
use tauri::TitleBarStyle;
use tauri::{AppHandle, RunEvent, State, Window, WindowUrl, Wry};
use tauri::{Manager, WindowEvent};
#[cfg(target_os = "macos")]
use tauri::TitleBarStyle;
use tauri_plugin_log::{fern, LogTarget};
use tauri_plugin_window_state::{StateFlags, WindowExt};
use tokio::sync::Mutex;
@@ -78,17 +79,36 @@ async fn migrate_db(
async fn send_ephemeral_request(
mut request: models::HttpRequest,
environment_id: Option<&str>,
cookie_jar_id: Option<&str>,
app_handle: AppHandle<Wry>,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<models::HttpResponse, String> {
let pool = &*db_instance.lock().await;
let response = models::HttpResponse::new();
let environment_id2 = environment_id.unwrap_or("n/a").to_string();
request.id = "".to_string();
let environment = match environment_id {
Some(id) => Some(
models::get_environment(id, pool)
.await
.expect("Failed to get environment"),
),
None => None,
};
let cookie_jar = match cookie_jar_id {
Some(id) => Some(
models::get_cookie_jar(id, pool)
.await
.expect("Failed to get cookie jar"),
),
None => None,
};
// let cookie_jar_id2 = cookie_jar_id.unwrap_or("").to_string();
send_http_request(
request,
&response,
&environment_id2,
environment,
cookie_jar,
&app_handle,
pool,
None,
@@ -151,6 +171,13 @@ async fn import_data(
)
.await
{
analytics::track_event(
&window.app_handle(),
AnalyticsResource::App,
AnalyticsAction::Import,
Some(json!({ "plugin": plugin_name })),
)
.await;
result = Some(r);
break;
}
@@ -217,8 +244,17 @@ async fn export_data(
serde_json::to_writer_pretty(&f, &export_data)
.map_err(|e| e.to_string())
.expect("Failed to write");
f.sync_all().expect("Failed to sync");
info!("Exported Yaak workspace to {:?}", export_path);
analytics::track_event(
&app_handle,
AnalyticsResource::App,
AnalyticsAction::Export,
None,
)
.await;
Ok(())
}
@@ -228,47 +264,56 @@ async fn send_request(
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
request_id: &str,
environment_id: Option<&str>,
cookie_jar_id: Option<&str>,
download_dir: Option<&str>,
) -> Result<models::HttpResponse, String> {
let pool = &*db_instance.lock().await;
let app_handle = window.app_handle();
let req = models::get_request(request_id, pool)
let request = models::get_request(request_id, pool)
.await
.expect("Failed to get request");
let response = models::create_response(&req.id, 0, "", 0, None, None, None, vec![], pool)
let environment = match environment_id {
Some(id) => Some(
models::get_environment(id, pool)
.await
.expect("Failed to get environment"),
),
None => None,
};
let cookie_jar = match cookie_jar_id {
Some(id) => Some(
models::get_cookie_jar(id, pool)
.await
.expect("Failed to get cookie jar"),
),
None => None,
};
let response = models::create_response(&request.id, 0, "", 0, None, None, None, vec![], pool)
.await
.expect("Failed to create response");
let response2 = response.clone();
let environment_id2 = environment_id.unwrap_or("n/a").to_string();
let app_handle2 = window.app_handle().clone();
let pool2 = pool.clone();
let download_path = if let Some(p) = download_dir {
Some(std::path::Path::new(p).to_path_buf())
} else {
None
};
tokio::spawn(async move {
if let Err(e) = send_http_request(
req,
&response2,
&environment_id2,
&app_handle2,
&pool2,
download_path,
)
.await
{
response_err(&response2, e, &app_handle2, &pool2)
.await
.expect("Failed to update response");
}
});
emit_side_effect(&app_handle, "created_model", response.clone());
emit_and_return(&window, "created_model", response)
send_http_request(
request.clone(),
&response,
environment,
cookie_jar,
&app_handle,
&pool,
download_path,
)
.await
}
async fn response_err(
@@ -362,6 +407,57 @@ async fn create_workspace(
emit_and_return(&window, "created_model", created_workspace)
}
#[tauri::command]
async fn update_cookie_jar(
cookie_jar: models::CookieJar,
window: Window<Wry>,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<models::CookieJar, String> {
let pool = &*db_instance.lock().await;
println!("Updating cookie jar {}", cookie_jar.cookies.len());
let updated = models::upsert_cookie_jar(pool, &cookie_jar)
.await
.expect("Failed to update cookie jar");
emit_and_return(&window, "updated_model", updated)
}
#[tauri::command]
async fn delete_cookie_jar(
window: Window<Wry>,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
cookie_jar_id: &str,
) -> Result<models::CookieJar, String> {
let pool = &*db_instance.lock().await;
let req = models::delete_cookie_jar(cookie_jar_id, pool)
.await
.expect("Failed to delete cookie jar");
emit_and_return(&window, "deleted_model", req)
}
#[tauri::command]
async fn create_cookie_jar(
workspace_id: &str,
name: &str,
window: Window<Wry>,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<models::CookieJar, String> {
let pool = &*db_instance.lock().await;
let created_cookie_jar = models::upsert_cookie_jar(
pool,
&models::CookieJar {
name: name.to_string(),
workspace_id: workspace_id.to_string(),
..Default::default()
},
)
.await
.expect("Failed to create cookie jar");
emit_and_return(&window, "created_model", created_cookie_jar)
}
#[tauri::command]
async fn create_environment(
workspace_id: &str,
@@ -627,6 +723,44 @@ async fn get_request(
.map_err(|e| e.to_string())
}
#[tauri::command]
async fn get_cookie_jar(
id: &str,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<models::CookieJar, String> {
let pool = &*db_instance.lock().await;
models::get_cookie_jar(id, pool)
.await
.map_err(|e| e.to_string())
}
#[tauri::command]
async fn list_cookie_jars(
workspace_id: &str,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<Vec<models::CookieJar>, String> {
let pool = &*db_instance.lock().await;
let cookie_jars = models::find_cookie_jars(workspace_id, pool)
.await
.expect("Failed to find cookie jars");
if cookie_jars.is_empty() {
let cookie_jar = models::upsert_cookie_jar(
pool,
&models::CookieJar {
name: "Default".to_string(),
workspace_id: workspace_id.to_string(),
..Default::default()
},
)
.await
.expect("Failed to create CookieJar");
Ok(vec![cookie_jar])
} else {
Ok(cookie_jars)
}
}
#[tauri::command]
async fn get_environment(
id: &str,
@@ -755,6 +889,7 @@ fn main() {
.level_for("tracing", log::LevelFilter::Info)
.level_for("reqwest", log::LevelFilter::Info)
.level_for("tokio_util", log::LevelFilter::Info)
.level_for("cookie_store", log::LevelFilter::Info)
.with_colors(ColoredLevelConfig::default())
.level(log::LevelFilter::Trace)
.build(),
@@ -807,11 +942,13 @@ fn main() {
})
.invoke_handler(tauri::generate_handler![
check_for_updates,
create_cookie_jar,
create_environment,
create_folder,
create_request,
create_workspace,
delete_all_responses,
delete_cookie_jar,
delete_environment,
delete_folder,
delete_request,
@@ -820,13 +957,15 @@ fn main() {
duplicate_request,
export_data,
filter_response,
get_key_value,
get_cookie_jar,
get_environment,
get_folder,
get_key_value,
get_request,
get_settings,
get_workspace,
import_data,
list_cookie_jars,
list_environments,
list_folders,
list_requests,
@@ -838,6 +977,7 @@ fn main() {
set_key_value,
set_update_mode,
track_event,
update_cookie_jar,
update_environment,
update_folder,
update_request,

View File

@@ -1,7 +1,7 @@
use std::collections::HashMap;
use std::fs;
use log::error;
use log::error;
use rand::distributions::{Alphanumeric, DistString};
use serde::{Deserialize, Serialize};
use sqlx::{Pool, Sqlite};
@@ -57,6 +57,23 @@ impl Workspace {
}
}
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
pub struct CookieX {
}
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
#[serde(default, rename_all = "camelCase")]
pub struct CookieJar {
pub id: String,
pub model: String,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub workspace_id: String,
pub name: String,
pub cookies: Json<Vec<JsonValue>>,
}
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
#[serde(default, rename_all = "camelCase")]
pub struct Environment {
@@ -351,6 +368,96 @@ pub async fn delete_workspace(id: &str, pool: &Pool<Sqlite>) -> Result<Workspace
Ok(workspace)
}
pub async fn get_cookie_jar(id: &str, pool: &Pool<Sqlite>) -> Result<CookieJar, sqlx::Error> {
sqlx::query_as!(
CookieJar,
r#"
SELECT
id,
model,
created_at,
updated_at,
workspace_id,
name,
cookies AS "cookies!: sqlx::types::Json<Vec<JsonValue>>"
FROM cookie_jars WHERE id = ?
"#,
id,
)
.fetch_one(pool)
.await
}
pub async fn find_cookie_jars(workspace_id: &str, pool: &Pool<Sqlite>) -> Result<Vec<CookieJar>, sqlx::Error> {
sqlx::query_as!(
CookieJar,
r#"
SELECT
id,
model,
created_at,
updated_at,
workspace_id,
name,
cookies AS "cookies!: sqlx::types::Json<Vec<JsonValue>>"
FROM cookie_jars WHERE workspace_id = ?
"#,
workspace_id,
)
.fetch_all(pool)
.await
}
pub async fn delete_cookie_jar(id: &str, pool: &Pool<Sqlite>) -> Result<CookieJar, sqlx::Error> {
let cookie_jar = get_cookie_jar(id, pool).await?;
let _ = sqlx::query!(
r#"
DELETE FROM cookie_jars
WHERE id = ?
"#,
id,
)
.execute(pool)
.await;
Ok(cookie_jar)
}
pub async fn upsert_cookie_jar(
pool: &Pool<Sqlite>,
cookie_jar: &CookieJar,
) -> Result<CookieJar, sqlx::Error> {
let id = match cookie_jar.id.as_str() {
"" => generate_id(Some("cj")),
_ => cookie_jar.id.to_string(),
};
let trimmed_name = cookie_jar.name.trim();
sqlx::query!(
r#"
INSERT INTO cookie_jars (
id,
workspace_id,
name,
cookies
)
VALUES (?, ?, ?, ?)
ON CONFLICT (id) DO UPDATE SET
updated_at = CURRENT_TIMESTAMP,
name = excluded.name,
cookies = excluded.cookies
"#,
id,
cookie_jar.workspace_id,
trimmed_name,
cookie_jar.cookies,
)
.execute(pool)
.await?;
get_cookie_jar(&id, pool).await
}
pub async fn find_environments(
workspace_id: &str,
pool: &Pool<Sqlite>,