A bunch more small things

This commit is contained in:
Gregory Schier
2023-02-25 23:04:31 -08:00
parent 83bb18df03
commit d85c021305
25 changed files with 1749 additions and 918 deletions

View File

@@ -8,7 +8,7 @@ CREATE TABLE workspaces
description TEXT NOT NULL
);
CREATE TABLE requests
CREATE TABLE http_requests
(
id TEXT NOT NULL PRIMARY KEY,
workspace_id TEXT NOT NULL REFERENCES workspaces (id) ON DELETE CASCADE,
@@ -22,17 +22,18 @@ CREATE TABLE requests
body TEXT
);
CREATE TABLE responses
CREATE TABLE http_responses
(
id TEXT NOT NULL PRIMARY KEY,
request_id TEXT NOT NULL REFERENCES requests (id) ON DELETE CASCADE,
request_id TEXT NOT NULL REFERENCES http_requests (id) ON DELETE CASCADE,
workspace_id TEXT NOT NULL REFERENCES workspaces (id) ON DELETE CASCADE,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at DATETIME,
elapsed INTEGER NOT NULL,
status INTEGER NOT NULL,
status_reason TEXT NOT NULL,
status_reason TEXT,
url TEXT NOT NULL,
body TEXT NOT NULL,
headers TEXT NOT NULL
);

View File

@@ -1,14 +1,346 @@
{
"db": "SQLite",
"74850a49fa21f4cb5f30905b8ede1fa76935c1ff7ad13c105c6de772d10ff742": {
"07d1a1c7b4f3d9625a766e60fd57bb779b71dae30e5bbce34885a911a5a42428": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 6
"Right": 1
}
},
"query": "\n INSERT INTO requests (id, workspace_id, name, url, method, body, updated_at, headers)\n VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, '{}')\n ON CONFLICT (id) DO UPDATE SET\n name = excluded.name,\n method = excluded.method,\n body = excluded.body,\n url = excluded.url;\n "
"query": "\n DELETE FROM http_responses\n WHERE id = ?\n "
},
"0fa36011553f7ca91113459a5cefd47f990f9b548a95e475ffd6e4b017059488": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 1
}
},
"query": "\n DELETE FROM http_responses\n WHERE request_id = ?\n "
},
"28675cd7ad73860417a667050694675e132b5e92cf6d3195a6eec218834e3a1d": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "request_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "deleted_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "created_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "status",
"ordinal": 6,
"type_info": "Int64"
},
{
"name": "status_reason",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "body",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "elapsed",
"ordinal": 9,
"type_info": "Int64"
},
{
"name": "url",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "headers!: sqlx::types::Json<Vec<HttpResponseHeader>>",
"ordinal": 11,
"type_info": "Text"
}
],
"nullable": [
false,
false,
false,
false,
true,
false,
false,
true,
false,
false,
false,
false
],
"parameters": {
"Right": 1
}
},
"query": "\n SELECT id, workspace_id, request_id, updated_at, deleted_at, created_at, status, status_reason, body, elapsed, url,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpResponseHeader>>\"\n FROM http_responses\n WHERE request_id = ?\n ORDER BY created_at DESC\n "
},
"3d2a542964d946ff9854d053b1adf04985d97a6de27b713188505c1f99c77707": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "deleted_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "url",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "method",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "body",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "headers!: sqlx::types::Json<Vec<HttpRequestHeader>>",
"ordinal": 9,
"type_info": "Text"
}
],
"nullable": [
false,
false,
false,
false,
true,
false,
false,
false,
true,
false
],
"parameters": {
"Right": 1
}
},
"query": "\n SELECT id, workspace_id, created_at, updated_at, deleted_at, name, url, method, body,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpRequestHeader>>\"\n FROM http_requests\n WHERE workspace_id = ?\n "
},
"3d3cc959cd3844950dde2426945bad638fa5f1a46c4681b5fe2bff60780dea62": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 7
}
},
"query": "\n INSERT INTO http_requests (id, workspace_id, name, url, method, body, headers, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n method = excluded.method,\n body = excluded.body,\n url = excluded.url\n "
},
"55eae4b20a2c313134579b0ea43bad4dc2dd313db6cd1654f783bac12602db8a": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "request_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "deleted_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "created_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "status",
"ordinal": 6,
"type_info": "Int64"
},
{
"name": "status_reason",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "body",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "elapsed",
"ordinal": 9,
"type_info": "Int64"
},
{
"name": "url",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "headers!: sqlx::types::Json<Vec<HttpResponseHeader>>",
"ordinal": 11,
"type_info": "Text"
}
],
"nullable": [
false,
false,
false,
false,
true,
false,
false,
true,
false,
false,
false,
false
],
"parameters": {
"Right": 1
}
},
"query": "\n SELECT id, workspace_id, request_id, updated_at, deleted_at, created_at, status, status_reason, body, elapsed, url,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpResponseHeader>>\"\n FROM http_responses\n WHERE id = ?\n "
},
"7ec60cbc3c9f26e8af86a21ef6b66e564f4fa518925c92308b04f882237a244e": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "deleted_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "url",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "method",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "body",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "headers!: sqlx::types::Json<Vec<HttpRequestHeader>>",
"ordinal": 9,
"type_info": "Text"
}
],
"nullable": [
false,
false,
false,
false,
true,
false,
false,
false,
true,
false
],
"parameters": {
"Right": 1
}
},
"query": "\n SELECT id, workspace_id, created_at, updated_at, deleted_at, name, url, method, body,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpRequestHeader>>\"\n FROM http_requests\n WHERE id = ?\n ORDER BY created_at DESC\n "
},
"8069c0bd326f659faca7b45b03e5317d7339a168f4cd7776d9f84304bb7ae7ac": {
"describe": {
@@ -58,149 +390,15 @@
},
"query": "\n SELECT id, created_at, updated_at, deleted_at, name, description\n FROM workspaces\n "
},
"d461b9471bdc1fd3f85ca9351f686def07634b4906c8429eeef343b11992b445": {
"e767522f92c8c49cd2e563e58737a05092daf9b1dc763bacc82a5c14d696d78e": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "deleted_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "url",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "method",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "body",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "headers",
"ordinal": 9,
"type_info": "Text"
}
],
"nullable": [
false,
false,
false,
false,
true,
false,
false,
false,
true,
false
],
"columns": [],
"nullable": [],
"parameters": {
"Right": 1
"Right": 9
}
},
"query": "\n SELECT id, workspace_id, created_at, updated_at, deleted_at, name, url, method, body, headers\n FROM requests\n WHERE id = ?\n "
},
"da08ebedec0942fd5c54ed1e180d7dc399629f83bfa1341c1c09a048123adac1": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "deleted_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "url",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "method",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "body",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "headers",
"ordinal": 9,
"type_info": "Text"
}
],
"nullable": [
false,
false,
false,
false,
true,
false,
false,
false,
true,
false
],
"parameters": {
"Right": 1
}
},
"query": "\n SELECT id, workspace_id, created_at, updated_at, deleted_at, name, url, method, body, headers\n FROM requests\n WHERE workspace_id = ?;\n "
"query": "\n INSERT INTO http_responses (id, request_id, workspace_id, elapsed, url, status, status_reason, body, headers)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);\n "
},
"f116d8cf9aad828135bb8c3a4c8b8e6b857ae13303989e9133a33b2d1cf20e96": {
"describe": {

View File

@@ -21,9 +21,10 @@ use tauri::{AppHandle, State, Wry};
use tauri::{CustomMenuItem, Manager, SystemTray, SystemTrayEvent, SystemTrayMenu, WindowEvent};
use tokio::sync::Mutex;
use crate::models::{create_workspace, find_workspaces, Request, Workspace};
use window_ext::WindowExt;
use crate::models::HttpRequestHeader;
mod models;
mod runtime;
mod window_ext;
@@ -54,15 +55,17 @@ async fn load_db(db_instance: State<'_, Mutex<Pool<Sqlite>>>) -> Result<(), Stri
async fn send_request(
app_handle: AppHandle<Wry>,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
url: &str,
method: &str,
body: Option<&str>,
) -> Result<CustomResponse, String> {
request_id: &str,
) -> Result<models::HttpResponse, String> {
let pool = &*db_instance.lock().await;
let req = models::get_request(request_id, pool)
.await
.expect("Failed to get request");
let start = std::time::Instant::now();
let mut abs_url = url.to_string();
let mut abs_url = req.url.to_string();
if !abs_url.starts_with("http://") && !abs_url.starts_with("https://") {
abs_url = format!("http://{}", url);
abs_url = format!("http://{}", req.url);
}
let client = reqwest::Client::builder()
@@ -78,24 +81,16 @@ async fn send_request(
HeaderValue::from_static("123-123-123"),
);
let m = Method::from_bytes(method.to_uppercase().as_bytes()).unwrap();
let m = Method::from_bytes(req.method.to_uppercase().as_bytes()).unwrap();
let builder = client.request(m, abs_url.to_string()).headers(headers);
let req = match body {
Some(b) => builder.body(b.to_string()).build(),
let sendable_req = match req.body {
Some(b) => builder.body(b).build(),
None => builder.build(),
};
}
.expect("Failed to build request");
let req = match req {
Ok(v) => v,
Err(e) => {
println!("Error: {}", e);
return Err(e.to_string());
}
};
let resp = client.execute(req).await;
let elapsed = start.elapsed().as_millis();
let resp = client.execute(sendable_req).await;
let p = app_handle
.path_resolver()
@@ -106,27 +101,33 @@ async fn send_request(
match resp {
Ok(v) => {
let url = v.url().to_string();
let status = v.status().as_u16();
let status = v.status().as_u16() as i64;
let status_reason = v.status().canonical_reason();
let method = method.to_string();
let headers = v
.headers()
.iter()
.map(|(k, v)| (k.as_str().to_string(), v.to_str().unwrap().to_string()))
.collect::<HashMap<String, String>>();
let body = v.text().await.unwrap();
let elapsed2 = start.elapsed().as_millis();
Ok(CustomResponse {
.map(|(k, v)| models::HttpResponseHeader {
name: k.as_str().to_string(),
value: v.to_str().unwrap().to_string(),
})
.collect();
let url = v.url().clone();
let body = v.text().await.expect("Failed to get body");
let elapsed = start.elapsed().as_millis() as i64;
let response = models::create_response(
&req.id,
elapsed,
url.as_str(),
status,
status_reason,
body,
elapsed,
elapsed2,
method,
url,
body.as_str(),
headers,
})
pool,
)
.await
.expect("Failed to create response");
Ok(response)
}
Err(e) => {
println!("Error: {}", e);
@@ -140,27 +141,23 @@ async fn upsert_request(
id: Option<&str>,
workspace_id: &str,
name: &str,
url: &str,
body: Option<&str>,
headers: Vec<HttpRequestHeader>,
method: &str,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<Request, String> {
) -> Result<models::HttpRequest, String> {
let pool = &*db_instance.lock().await;
models::upsert_request(
id,
workspace_id,
name,
"GET",
None,
"https://google.com",
pool,
)
.await
.map_err(|e| e.to_string())
models::upsert_request(id, workspace_id, name, method, body, url, headers, pool)
.await
.map_err(|e| e.to_string())
}
#[tauri::command]
async fn requests(
workspace_id: &str,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<Vec<Request>, String> {
) -> Result<Vec<models::HttpRequest>, String> {
let pool = &*db_instance.lock().await;
models::find_requests(workspace_id, pool)
.await
@@ -168,13 +165,48 @@ async fn requests(
}
#[tauri::command]
async fn workspaces(db_instance: State<'_, Mutex<Pool<Sqlite>>>) -> Result<Vec<Workspace>, String> {
async fn responses(
request_id: &str,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<Vec<models::HttpResponse>, String> {
let pool = &*db_instance.lock().await;
let workspaces = find_workspaces(pool)
models::find_responses(request_id, pool)
.await
.map_err(|e| e.to_string())
}
#[tauri::command]
async fn delete_response(
id: &str,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<(), String> {
let pool = &*db_instance.lock().await;
models::delete_response(id, pool)
.await
.map_err(|e| e.to_string())
}
#[tauri::command]
async fn delete_all_responses(
request_id: &str,
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<(), String> {
let pool = &*db_instance.lock().await;
models::delete_all_responses(request_id, pool)
.await
.map_err(|e| e.to_string())
}
#[tauri::command]
async fn workspaces(
db_instance: State<'_, Mutex<Pool<Sqlite>>>,
) -> Result<Vec<models::Workspace>, String> {
let pool = &*db_instance.lock().await;
let workspaces = models::find_workspaces(pool)
.await
.expect("Failed to find workspaces");
if workspaces.is_empty() {
let workspace = create_workspace("Default", "This is the default workspace", pool)
let workspace = models::create_workspace("Default", "This is the default workspace", pool)
.await
.expect("Failed to create workspace");
Ok(vec![workspace])
@@ -205,7 +237,7 @@ fn main() {
let dir = app.path_resolver().app_data_dir().unwrap();
create_dir_all(dir.clone()).expect("Problem creating App directory!");
let p = dir.join("db.sqlite");
let p_string = p.to_string_lossy().replace(" ", "%20");
let p_string = p.to_string_lossy().replace(' ', "%20");
let url = format!("sqlite://{}?mode=rwc", p_string);
println!("DB PATH: {}", p_string);
tauri::async_runtime::block_on(async move {
@@ -244,12 +276,15 @@ fn main() {
}
})
.invoke_handler(tauri::generate_handler![
send_request,
greet,
load_db,
workspaces,
requests,
send_request,
upsert_request,
responses,
delete_response,
delete_all_responses,
])
.run(tauri::generate_context!())
.expect("error while running tauri application");

View File

@@ -1,8 +1,7 @@
use std::collections::HashMap;
use rand::distributions::{Alphanumeric, DistString};
use serde::{Deserialize, Serialize};
use sqlx::types::chrono::NaiveDateTime;
use sqlx::types::Json;
use sqlx::{Pool, Sqlite};
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize)]
@@ -16,8 +15,16 @@ pub struct Workspace {
pub description: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HttpRequestHeader {
pub name: String,
pub value: String,
}
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize)]
pub struct Request {
#[serde(rename_all = "camelCase")]
pub struct HttpRequest {
pub id: String,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
@@ -27,26 +34,31 @@ pub struct Request {
pub url: String,
pub method: String,
pub body: Option<String>,
pub headers: String,
pub headers: Json<Vec<HttpRequestHeader>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HttpResponseHeader {
pub name: String,
pub value: String,
}
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize)]
pub struct Response {
#[serde(rename_all = "camelCase")]
pub struct HttpResponse {
pub id: String,
pub workspace_id: String,
pub request_id: String,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub deleted_at: Option<NaiveDateTime>,
pub name: String,
pub status: u16,
pub status_reason: Option<&'static str>,
pub body: String,
pub url: String,
pub method: String,
pub elapsed: u128,
pub elapsed2: u128,
pub headers: HashMap<String, String>,
pub elapsed: i64,
pub status: i64,
pub status_reason: Option<String>,
pub body: String,
pub headers: Json<Vec<HttpResponseHeader>>,
}
pub async fn find_workspaces(pool: &Pool<Sqlite>) -> Result<Vec<Workspace>, sqlx::Error> {
@@ -104,18 +116,28 @@ pub async fn upsert_request(
method: &str,
body: Option<&str>,
url: &str,
headers: Vec<HttpRequestHeader>,
pool: &Pool<Sqlite>,
) -> Result<Request, sqlx::Error> {
let id = generate_id("rq");
) -> Result<HttpRequest, sqlx::Error> {
let generated_id;
let id = match id {
Some(v) => v,
None => {
generated_id = generate_id("rq");
generated_id.as_str()
}
};
let headers_json = Json(headers);
sqlx::query!(
r#"
INSERT INTO requests (id, workspace_id, name, url, method, body, updated_at, headers)
VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, '{}')
INSERT INTO http_requests (id, workspace_id, name, url, method, body, headers, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
ON CONFLICT (id) DO UPDATE SET
updated_at = CURRENT_TIMESTAMP,
name = excluded.name,
method = excluded.method,
body = excluded.body,
url = excluded.url;
url = excluded.url
"#,
id,
workspace_id,
@@ -123,23 +145,25 @@ pub async fn upsert_request(
url,
method,
body,
headers_json,
)
.execute(pool)
.await
.expect("Failed to insert new request");
get_request(&id, pool).await
get_request(id, pool).await
}
pub async fn find_requests(
workspace_id: &str,
pool: &Pool<Sqlite>,
) -> Result<Vec<Request>, sqlx::Error> {
) -> Result<Vec<HttpRequest>, sqlx::Error> {
sqlx::query_as!(
Request,
HttpRequest,
r#"
SELECT id, workspace_id, created_at, updated_at, deleted_at, name, url, method, body, headers
FROM requests
WHERE workspace_id = ?;
SELECT id, workspace_id, created_at, updated_at, deleted_at, name, url, method, body,
headers AS "headers!: sqlx::types::Json<Vec<HttpRequestHeader>>"
FROM http_requests
WHERE workspace_id = ?
"#,
workspace_id,
)
@@ -147,12 +171,66 @@ pub async fn find_requests(
.await
}
pub async fn get_request(id: &str, pool: &Pool<Sqlite>) -> Result<Request, sqlx::Error> {
pub async fn get_request(id: &str, pool: &Pool<Sqlite>) -> Result<HttpRequest, sqlx::Error> {
sqlx::query_as!(
Request,
HttpRequest,
r#"
SELECT id, workspace_id, created_at, updated_at, deleted_at, name, url, method, body, headers
FROM requests
SELECT id, workspace_id, created_at, updated_at, deleted_at, name, url, method, body,
headers AS "headers!: sqlx::types::Json<Vec<HttpRequestHeader>>"
FROM http_requests
WHERE id = ?
ORDER BY created_at DESC
"#,
id,
)
.fetch_one(pool)
.await
}
pub async fn create_response(
request_id: &str,
elapsed: i64,
url: &str,
status: i64,
status_reason: Option<&str>,
body: &str,
headers: Vec<HttpResponseHeader>,
pool: &Pool<Sqlite>,
) -> Result<HttpResponse, sqlx::Error> {
let req = get_request(request_id, pool)
.await
.expect("Failed to get request");
let id = generate_id("rp");
let headers_json = Json(headers);
sqlx::query!(
r#"
INSERT INTO http_responses (id, request_id, workspace_id, elapsed, url, status, status_reason, body, headers)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);
"#,
id,
request_id,
req.workspace_id,
elapsed,
url,
status,
status_reason,
body,
headers_json,
)
.execute(pool)
.await
.expect("Failed to insert new response");
get_response(&id, pool).await
}
pub async fn get_response(id: &str, pool: &Pool<Sqlite>) -> Result<HttpResponse, sqlx::Error> {
sqlx::query_as!(
HttpResponse,
r#"
SELECT id, workspace_id, request_id, updated_at, deleted_at, created_at, status, status_reason, body, elapsed, url,
headers AS "headers!: sqlx::types::Json<Vec<HttpResponseHeader>>"
FROM http_responses
WHERE id = ?
"#,
id,
@@ -161,6 +239,56 @@ pub async fn get_request(id: &str, pool: &Pool<Sqlite>) -> Result<Request, sqlx:
.await
}
pub async fn find_responses(
request_id: &str,
pool: &Pool<Sqlite>,
) -> Result<Vec<HttpResponse>, sqlx::Error> {
sqlx::query_as!(
HttpResponse,
r#"
SELECT id, workspace_id, request_id, updated_at, deleted_at, created_at, status, status_reason, body, elapsed, url,
headers AS "headers!: sqlx::types::Json<Vec<HttpResponseHeader>>"
FROM http_responses
WHERE request_id = ?
ORDER BY created_at DESC
"#,
request_id,
)
.fetch_all(pool)
.await
}
pub async fn delete_response(id: &str, pool: &Pool<Sqlite>) -> Result<(), sqlx::Error> {
let _ = sqlx::query!(
r#"
DELETE FROM http_responses
WHERE id = ?
"#,
id,
)
.execute(pool)
.await;
Ok(())
}
pub async fn delete_all_responses(
request_id: &str,
pool: &Pool<Sqlite>,
) -> Result<(), sqlx::Error> {
let _ = sqlx::query!(
r#"
DELETE FROM http_responses
WHERE request_id = ?
"#,
request_id,
)
.execute(pool)
.await;
Ok(())
}
fn generate_id(prefix: &str) -> String {
format!(
"{prefix}_{}",