mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-04-20 15:51:23 +02:00
CLI command architecture and DB-backed model update syncing (#397)
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -51,3 +51,6 @@ flatpak-repo/
|
|||||||
flatpak/flatpak-builder-tools/
|
flatpak/flatpak-builder-tools/
|
||||||
flatpak/cargo-sources.json
|
flatpak/cargo-sources.json
|
||||||
flatpak/node-sources.json
|
flatpak/node-sources.json
|
||||||
|
|
||||||
|
# Local Codex desktop env state
|
||||||
|
.codex/environments/environment.toml
|
||||||
|
|||||||
116
Cargo.lock
generated
116
Cargo.lock
generated
@@ -221,6 +221,21 @@ dependencies = [
|
|||||||
"zbus",
|
"zbus",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "assert_cmd"
|
||||||
|
version = "2.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9c5bcfa8749ac45dd12cb11055aeeb6b27a3895560d60d71e3c23bf979e60514"
|
||||||
|
dependencies = [
|
||||||
|
"anstyle",
|
||||||
|
"bstr",
|
||||||
|
"libc",
|
||||||
|
"predicates",
|
||||||
|
"predicates-core",
|
||||||
|
"predicates-tree",
|
||||||
|
"wait-timeout",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "async-broadcast"
|
name = "async-broadcast"
|
||||||
version = "0.7.2"
|
version = "0.7.2"
|
||||||
@@ -639,6 +654,17 @@ dependencies = [
|
|||||||
"alloc-stdlib",
|
"alloc-stdlib",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bstr"
|
||||||
|
version = "1.12.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
"regex-automata",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.18.1"
|
version = "3.18.1"
|
||||||
@@ -1366,6 +1392,12 @@ dependencies = [
|
|||||||
"cipher",
|
"cipher",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "difflib"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "digest"
|
name = "digest"
|
||||||
version = "0.10.7"
|
version = "0.10.7"
|
||||||
@@ -1744,6 +1776,15 @@ dependencies = [
|
|||||||
"miniz_oxide",
|
"miniz_oxide",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "float-cmp"
|
||||||
|
version = "0.10.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8"
|
||||||
|
dependencies = [
|
||||||
|
"num-traits",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fnv"
|
name = "fnv"
|
||||||
version = "1.0.7"
|
version = "1.0.7"
|
||||||
@@ -3496,6 +3537,12 @@ dependencies = [
|
|||||||
"minimal-lexical",
|
"minimal-lexical",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "normalize-line-endings"
|
||||||
|
version = "0.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "notify"
|
name = "notify"
|
||||||
version = "8.0.0"
|
version = "8.0.0"
|
||||||
@@ -4373,6 +4420,36 @@ version = "0.1.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "predicates"
|
||||||
|
version = "3.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ada8f2932f28a27ee7b70dd6c1c39ea0675c55a36879ab92f3a715eaa1e63cfe"
|
||||||
|
dependencies = [
|
||||||
|
"anstyle",
|
||||||
|
"difflib",
|
||||||
|
"float-cmp",
|
||||||
|
"normalize-line-endings",
|
||||||
|
"predicates-core",
|
||||||
|
"regex 1.11.1",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "predicates-core"
|
||||||
|
version = "1.0.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "cad38746f3166b4031b1a0d39ad9f954dd291e7854fcc0eed52ee41a0b50d144"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "predicates-tree"
|
||||||
|
version = "1.0.13"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d0de1b847b39c8131db0467e9df1ff60e6d0562ab8e9a16e568ad0fdb372e2f2"
|
||||||
|
dependencies = [
|
||||||
|
"predicates-core",
|
||||||
|
"termtree",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro-crate"
|
name = "proc-macro-crate"
|
||||||
version = "1.3.1"
|
version = "1.3.1"
|
||||||
@@ -6411,6 +6488,12 @@ dependencies = [
|
|||||||
"winapi-util",
|
"winapi-util",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "termtree"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.69"
|
version = "1.0.69"
|
||||||
@@ -7184,6 +7267,15 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wait-timeout"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "walkdir"
|
name = "walkdir"
|
||||||
version = "2.5.0"
|
version = "2.5.0"
|
||||||
@@ -8147,6 +8239,24 @@ dependencies = [
|
|||||||
"rustix 1.0.7",
|
"rustix 1.0.7",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "yaak"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"log 0.4.29",
|
||||||
|
"md5 0.8.0",
|
||||||
|
"serde_json",
|
||||||
|
"thiserror 2.0.17",
|
||||||
|
"tokio",
|
||||||
|
"yaak-crypto",
|
||||||
|
"yaak-http",
|
||||||
|
"yaak-models",
|
||||||
|
"yaak-plugins",
|
||||||
|
"yaak-templates",
|
||||||
|
"yaak-tls",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "yaak-api"
|
name = "yaak-api"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
@@ -8198,6 +8308,7 @@ dependencies = [
|
|||||||
"ts-rs",
|
"ts-rs",
|
||||||
"url",
|
"url",
|
||||||
"uuid",
|
"uuid",
|
||||||
|
"yaak",
|
||||||
"yaak-api",
|
"yaak-api",
|
||||||
"yaak-common",
|
"yaak-common",
|
||||||
"yaak-core",
|
"yaak-core",
|
||||||
@@ -8222,12 +8333,17 @@ dependencies = [
|
|||||||
name = "yaak-cli"
|
name = "yaak-cli"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"assert_cmd",
|
||||||
"clap",
|
"clap",
|
||||||
"dirs",
|
"dirs",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"log 0.4.29",
|
"log 0.4.29",
|
||||||
|
"predicates",
|
||||||
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"tempfile",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"yaak",
|
||||||
"yaak-crypto",
|
"yaak-crypto",
|
||||||
"yaak-http",
|
"yaak-http",
|
||||||
"yaak-models",
|
"yaak-models",
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
|
"crates/yaak",
|
||||||
# Shared crates (no Tauri dependency)
|
# Shared crates (no Tauri dependency)
|
||||||
"crates/yaak-core",
|
"crates/yaak-core",
|
||||||
"crates/yaak-common",
|
"crates/yaak-common",
|
||||||
@@ -47,6 +48,7 @@ ts-rs = "11.1.0"
|
|||||||
|
|
||||||
# Internal crates - shared
|
# Internal crates - shared
|
||||||
yaak-core = { path = "crates/yaak-core" }
|
yaak-core = { path = "crates/yaak-core" }
|
||||||
|
yaak = { path = "crates/yaak" }
|
||||||
yaak-common = { path = "crates/yaak-common" }
|
yaak-common = { path = "crates/yaak-common" }
|
||||||
yaak-crypto = { path = "crates/yaak-crypto" }
|
yaak-crypto = { path = "crates/yaak-crypto" }
|
||||||
yaak-git = { path = "crates/yaak-git" }
|
yaak-git = { path = "crates/yaak-git" }
|
||||||
|
|||||||
@@ -13,10 +13,17 @@ clap = { version = "4", features = ["derive"] }
|
|||||||
dirs = "6"
|
dirs = "6"
|
||||||
env_logger = "0.11"
|
env_logger = "0.11"
|
||||||
log = { workspace = true }
|
log = { workspace = true }
|
||||||
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
||||||
|
yaak = { workspace = true }
|
||||||
yaak-crypto = { workspace = true }
|
yaak-crypto = { workspace = true }
|
||||||
yaak-http = { workspace = true }
|
yaak-http = { workspace = true }
|
||||||
yaak-models = { workspace = true }
|
yaak-models = { workspace = true }
|
||||||
yaak-plugins = { workspace = true }
|
yaak-plugins = { workspace = true }
|
||||||
yaak-templates = { workspace = true }
|
yaak-templates = { workspace = true }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
assert_cmd = "2"
|
||||||
|
predicates = "3"
|
||||||
|
tempfile = "3"
|
||||||
|
|||||||
340
crates-cli/yaak-cli/PLAN.md
Normal file
340
crates-cli/yaak-cli/PLAN.md
Normal file
@@ -0,0 +1,340 @@
|
|||||||
|
# CLI Command Architecture Plan
|
||||||
|
|
||||||
|
## Goal
|
||||||
|
|
||||||
|
Redesign the yaak-cli command structure to use a resource-oriented `<resource> <action>`
|
||||||
|
pattern that scales well, is discoverable, and supports both human and LLM workflows.
|
||||||
|
|
||||||
|
## Status Snapshot
|
||||||
|
|
||||||
|
Current branch state:
|
||||||
|
|
||||||
|
- Modular CLI structure with command modules and shared `CliContext`
|
||||||
|
- Resource/action hierarchy in place for:
|
||||||
|
- `workspace list|show|create|update|delete`
|
||||||
|
- `request list|show|create|update|send|delete`
|
||||||
|
- `folder list|show|create|update|delete`
|
||||||
|
- `environment list|show|create|update|delete`
|
||||||
|
- Top-level `send` exists as a request-send shortcut (not yet flexible request/folder/workspace resolution)
|
||||||
|
- Legacy `get` command removed
|
||||||
|
- JSON create/update flow implemented (`--json` and positional JSON shorthand)
|
||||||
|
- No `request schema` command yet
|
||||||
|
|
||||||
|
Progress checklist:
|
||||||
|
|
||||||
|
- [x] Phase 1 complete
|
||||||
|
- [x] Phase 2 complete
|
||||||
|
- [x] Phase 3 complete
|
||||||
|
- [ ] Phase 4 complete
|
||||||
|
- [ ] Phase 5 complete
|
||||||
|
- [ ] Phase 6 complete
|
||||||
|
|
||||||
|
## Command Architecture
|
||||||
|
|
||||||
|
### Design Principles
|
||||||
|
|
||||||
|
- **Resource-oriented**: top-level commands are nouns, subcommands are verbs
|
||||||
|
- **Polymorphic requests**: `request` covers HTTP, gRPC, and WebSocket — the CLI
|
||||||
|
resolves the type via `get_any_request` and adapts behavior accordingly
|
||||||
|
- **Simple creation, full-fidelity via JSON**: human-friendly flags for basic creation,
|
||||||
|
`--json` for full control (targeted at LLM and scripting workflows)
|
||||||
|
- **Runtime schema introspection**: `request schema` outputs JSON Schema for the request
|
||||||
|
models, with dynamic auth fields populated from loaded plugins at runtime
|
||||||
|
- **Destructive actions require confirmation**: `delete` commands prompt for user
|
||||||
|
confirmation before proceeding. Can be bypassed with `--yes` / `-y` for scripting
|
||||||
|
|
||||||
|
### Commands
|
||||||
|
|
||||||
|
```
|
||||||
|
# Top-level shortcut
|
||||||
|
yaakcli send <id> [-e <env_id>] # id can be a request, folder, or workspace
|
||||||
|
|
||||||
|
# Resource commands
|
||||||
|
yaakcli workspace list
|
||||||
|
yaakcli workspace show <id>
|
||||||
|
yaakcli workspace create --name <name>
|
||||||
|
yaakcli workspace create --json '{"name": "My Workspace"}'
|
||||||
|
yaakcli workspace create '{"name": "My Workspace"}' # positional JSON shorthand
|
||||||
|
yaakcli workspace update --json '{"id": "wk_abc", "name": "New Name"}'
|
||||||
|
yaakcli workspace delete <id>
|
||||||
|
|
||||||
|
yaakcli request list <workspace_id>
|
||||||
|
yaakcli request show <id>
|
||||||
|
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
|
||||||
|
yaakcli request create --json '{"workspaceId": "wk_abc", "url": "..."}'
|
||||||
|
yaakcli request update --json '{"id": "rq_abc", "url": "https://new.com"}'
|
||||||
|
yaakcli request send <id> [-e <env_id>]
|
||||||
|
yaakcli request delete <id>
|
||||||
|
yaakcli request schema <http|grpc|websocket>
|
||||||
|
|
||||||
|
yaakcli folder list <workspace_id>
|
||||||
|
yaakcli folder show <id>
|
||||||
|
yaakcli folder create <workspace_id> --name <name>
|
||||||
|
yaakcli folder create --json '{"workspaceId": "wk_abc", "name": "Auth"}'
|
||||||
|
yaakcli folder update --json '{"id": "fl_abc", "name": "New Name"}'
|
||||||
|
yaakcli folder delete <id>
|
||||||
|
|
||||||
|
yaakcli environment list <workspace_id>
|
||||||
|
yaakcli environment show <id>
|
||||||
|
yaakcli environment create <workspace_id> --name <name>
|
||||||
|
yaakcli environment create --json '{"workspaceId": "wk_abc", "name": "Production"}'
|
||||||
|
yaakcli environment update --json '{"id": "ev_abc", ...}'
|
||||||
|
yaakcli environment delete <id>
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### `send` — Top-Level Shortcut
|
||||||
|
|
||||||
|
`yaakcli send <id>` is a convenience alias that accepts any sendable ID. It tries
|
||||||
|
each type in order via DB lookups (short-circuiting on first match):
|
||||||
|
|
||||||
|
1. Request (HTTP, gRPC, or WebSocket via `get_any_request`)
|
||||||
|
2. Folder (sends all requests in the folder)
|
||||||
|
3. Workspace (sends all requests in the workspace)
|
||||||
|
|
||||||
|
ID prefixes exist (e.g. `rq_`, `fl_`, `wk_`) but are not relied upon — resolution
|
||||||
|
is purely by DB lookup.
|
||||||
|
|
||||||
|
`request send <id>` is the same but restricted to request IDs only.
|
||||||
|
|
||||||
|
### Request Send — Polymorphic Behavior
|
||||||
|
|
||||||
|
`send` means "execute this request" regardless of protocol:
|
||||||
|
|
||||||
|
- **HTTP**: send request, print response, exit
|
||||||
|
- **gRPC**: invoke the method; for streaming, stream output to stdout until done/Ctrl+C
|
||||||
|
- **WebSocket**: connect, stream messages to stdout until closed/Ctrl+C
|
||||||
|
|
||||||
|
### `request schema` — Runtime JSON Schema
|
||||||
|
|
||||||
|
Outputs a JSON Schema describing the full request shape, including dynamic fields:
|
||||||
|
|
||||||
|
1. Generate base schema from `schemars::JsonSchema` derive on the Rust model structs
|
||||||
|
2. Load plugins, collect auth strategy definitions and their form inputs
|
||||||
|
3. Merge plugin-defined auth fields into the `authentication` property as a `oneOf`
|
||||||
|
4. Output the combined schema as JSON
|
||||||
|
|
||||||
|
This lets an LLM call `schema`, read the shape, and construct valid JSON for
|
||||||
|
`create --json` or `update --json`.
|
||||||
|
|
||||||
|
## Implementation Steps
|
||||||
|
|
||||||
|
### Phase 1: Restructure commands (no new functionality)
|
||||||
|
|
||||||
|
Refactor `main.rs` into the new resource/action pattern using clap subcommand nesting.
|
||||||
|
Existing behavior stays the same, just reorganized. Remove the `get` command.
|
||||||
|
|
||||||
|
1. Create module structure: `commands/workspace.rs`, `commands/request.rs`, etc.
|
||||||
|
2. Define nested clap enums:
|
||||||
|
```rust
|
||||||
|
enum Commands {
|
||||||
|
Send(SendArgs),
|
||||||
|
Workspace(WorkspaceArgs),
|
||||||
|
Request(RequestArgs),
|
||||||
|
Folder(FolderArgs),
|
||||||
|
Environment(EnvironmentArgs),
|
||||||
|
}
|
||||||
|
```
|
||||||
|
3. Move existing `Workspaces` logic into `workspace list`
|
||||||
|
4. Move existing `Requests` logic into `request list`
|
||||||
|
5. Move existing `Send` logic into `request send`
|
||||||
|
6. Move existing `Create` logic into `request create`
|
||||||
|
7. Delete the `Get` command entirely
|
||||||
|
8. Extract shared setup (DB init, plugin init, encryption) into a reusable context struct
|
||||||
|
|
||||||
|
### Phase 2: Add missing CRUD commands
|
||||||
|
|
||||||
|
Status: complete
|
||||||
|
|
||||||
|
1. `workspace show <id>`
|
||||||
|
2. `workspace create --name <name>` (and `--json`)
|
||||||
|
3. `workspace update --json`
|
||||||
|
4. `workspace delete <id>`
|
||||||
|
5. `request show <id>` (JSON output of the full request model)
|
||||||
|
6. `request delete <id>`
|
||||||
|
7. `folder list <workspace_id>`
|
||||||
|
8. `folder show <id>`
|
||||||
|
9. `folder create <workspace_id> --name <name>` (and `--json`)
|
||||||
|
10. `folder update --json`
|
||||||
|
11. `folder delete <id>`
|
||||||
|
12. `environment list <workspace_id>`
|
||||||
|
13. `environment show <id>`
|
||||||
|
14. `environment create <workspace_id> --name <name>` (and `--json`)
|
||||||
|
15. `environment update --json`
|
||||||
|
16. `environment delete <id>`
|
||||||
|
|
||||||
|
### Phase 3: JSON input for create/update
|
||||||
|
|
||||||
|
Both commands accept JSON via `--json <string>` or as a positional argument (detected
|
||||||
|
by leading `{`). They follow the same upsert pattern as the plugin API.
|
||||||
|
|
||||||
|
- **`create --json`**: JSON must include `workspaceId`. Must NOT include `id` (or
|
||||||
|
use empty string `""`). Deserializes into the model with defaults for missing fields,
|
||||||
|
then upserts (insert).
|
||||||
|
- **`update --json`**: JSON must include `id`. Performs a fetch-merge-upsert:
|
||||||
|
1. Fetch the existing model from DB
|
||||||
|
2. Serialize it to `serde_json::Value`
|
||||||
|
3. Deep-merge the user's partial JSON on top (JSON Merge Patch / RFC 7386 semantics)
|
||||||
|
4. Deserialize back into the typed model
|
||||||
|
5. Upsert (update)
|
||||||
|
|
||||||
|
This matches how the MCP server plugin already does it (fetch existing, spread, override),
|
||||||
|
but the CLI handles the merge server-side so callers don't have to.
|
||||||
|
|
||||||
|
Setting a field to `null` removes it (for `Option<T>` fields), per RFC 7386.
|
||||||
|
|
||||||
|
Implementation:
|
||||||
|
1. Add `--json` flag and positional JSON detection to `create` commands
|
||||||
|
2. Add `update` commands with required `--json` flag
|
||||||
|
3. Implement JSON merge utility (or use `json-patch` crate)
|
||||||
|
|
||||||
|
### Phase 4: Runtime schema generation
|
||||||
|
|
||||||
|
1. Add `schemars` dependency to `yaak-models`
|
||||||
|
2. Derive `JsonSchema` on `HttpRequest`, `GrpcRequest`, `WebsocketRequest`, and their
|
||||||
|
nested types (`HttpRequestHeader`, `HttpUrlParameter`, etc.)
|
||||||
|
3. Implement `request schema` command:
|
||||||
|
- Generate base schema from schemars
|
||||||
|
- Query plugins for auth strategy form inputs
|
||||||
|
- Convert plugin form inputs into JSON Schema properties
|
||||||
|
- Merge into the `authentication` field
|
||||||
|
- Print to stdout
|
||||||
|
|
||||||
|
### Phase 5: Polymorphic send
|
||||||
|
|
||||||
|
1. Update `request send` to use `get_any_request` to resolve the request type
|
||||||
|
2. Match on `AnyRequest` variant and dispatch to the appropriate sender:
|
||||||
|
- `AnyRequest::HttpRequest` — existing HTTP send logic
|
||||||
|
- `AnyRequest::GrpcRequest` — gRPC invoke (future implementation)
|
||||||
|
- `AnyRequest::WebsocketRequest` — WebSocket connect (future implementation)
|
||||||
|
3. gRPC and WebSocket send can initially return "not yet implemented" errors
|
||||||
|
|
||||||
|
### Phase 6: Top-level `send` and folder/workspace send
|
||||||
|
|
||||||
|
1. Add top-level `yaakcli send <id>` command
|
||||||
|
2. Resolve ID by trying DB lookups in order: any_request → folder → workspace
|
||||||
|
3. For folder: list all requests in folder, send each
|
||||||
|
4. For workspace: list all requests in workspace, send each
|
||||||
|
5. Add execution options: `--sequential` (default), `--parallel`, `--fail-fast`
|
||||||
|
|
||||||
|
## Execution Plan (PR Slices)
|
||||||
|
|
||||||
|
### PR 1: Command tree refactor + compatibility aliases
|
||||||
|
|
||||||
|
Scope:
|
||||||
|
|
||||||
|
1. Introduce `commands/` modules and a `CliContext` for shared setup
|
||||||
|
2. Add new clap hierarchy (`workspace`, `request`, `folder`, `environment`)
|
||||||
|
3. Route existing behavior into:
|
||||||
|
- `workspace list`
|
||||||
|
- `request list <workspace_id>`
|
||||||
|
- `request send <id>`
|
||||||
|
- `request create <workspace_id> ...`
|
||||||
|
4. Keep compatibility aliases temporarily:
|
||||||
|
- `workspaces` -> `workspace list`
|
||||||
|
- `requests <workspace_id>` -> `request list <workspace_id>`
|
||||||
|
- `create ...` -> `request create ...`
|
||||||
|
5. Remove `get` and update help text
|
||||||
|
|
||||||
|
Acceptance criteria:
|
||||||
|
|
||||||
|
- `yaakcli --help` shows noun/verb structure
|
||||||
|
- Existing list/send/create workflows still work
|
||||||
|
- No behavior change in HTTP send output format
|
||||||
|
|
||||||
|
### PR 2: CRUD surface area
|
||||||
|
|
||||||
|
Scope:
|
||||||
|
|
||||||
|
1. Implement `show/create/update/delete` for `workspace`, `request`, `folder`, `environment`
|
||||||
|
2. Ensure delete commands require confirmation by default (`--yes` bypass)
|
||||||
|
3. Normalize output format for list/show/create/update/delete responses
|
||||||
|
|
||||||
|
Acceptance criteria:
|
||||||
|
|
||||||
|
- Every command listed in the "Commands" section parses and executes
|
||||||
|
- Delete commands are safe by default in interactive terminals
|
||||||
|
- `--yes` supports non-interactive scripts
|
||||||
|
|
||||||
|
### PR 3: JSON input + merge patch semantics
|
||||||
|
|
||||||
|
Scope:
|
||||||
|
|
||||||
|
1. Add shared parser for `--json` and positional JSON shorthand
|
||||||
|
2. Add `create --json` and `update --json` for all mutable resources
|
||||||
|
3. Implement server-side RFC 7386 merge patch behavior
|
||||||
|
4. Add guardrails:
|
||||||
|
- `create --json`: reject non-empty `id`
|
||||||
|
- `update --json`: require `id`
|
||||||
|
|
||||||
|
Acceptance criteria:
|
||||||
|
|
||||||
|
- Partial `update --json` only modifies provided keys
|
||||||
|
- `null` clears optional values
|
||||||
|
- Invalid JSON and missing required fields return actionable errors
|
||||||
|
|
||||||
|
### PR 4: `request schema` and plugin auth integration
|
||||||
|
|
||||||
|
Scope:
|
||||||
|
|
||||||
|
1. Add `schemars` to `yaak-models` and derive `JsonSchema` for request models
|
||||||
|
2. Implement `request schema <http|grpc|websocket>`
|
||||||
|
3. Merge plugin auth form inputs into `authentication` schema at runtime
|
||||||
|
|
||||||
|
Acceptance criteria:
|
||||||
|
|
||||||
|
- Command prints valid JSON schema
|
||||||
|
- Schema reflects installed auth providers at runtime
|
||||||
|
- No panic when plugins fail to initialize (degrade gracefully)
|
||||||
|
|
||||||
|
### PR 5: Polymorphic request send
|
||||||
|
|
||||||
|
Scope:
|
||||||
|
|
||||||
|
1. Replace request resolution in `request send` with `get_any_request`
|
||||||
|
2. Dispatch by request type
|
||||||
|
3. Keep HTTP fully functional
|
||||||
|
4. Return explicit NYI errors for gRPC/WebSocket until implemented
|
||||||
|
|
||||||
|
Acceptance criteria:
|
||||||
|
|
||||||
|
- HTTP behavior remains unchanged
|
||||||
|
- gRPC/WebSocket IDs are recognized and return explicit status
|
||||||
|
|
||||||
|
### PR 6: Top-level `send` + bulk execution
|
||||||
|
|
||||||
|
Scope:
|
||||||
|
|
||||||
|
1. Add top-level `send <id>` for request/folder/workspace IDs
|
||||||
|
2. Implement folder/workspace fan-out execution
|
||||||
|
3. Add execution controls: `--sequential`, `--parallel`, `--fail-fast`
|
||||||
|
|
||||||
|
Acceptance criteria:
|
||||||
|
|
||||||
|
- Correct ID dispatch order: request -> folder -> workspace
|
||||||
|
- Deterministic summary output (success/failure counts)
|
||||||
|
- Non-zero exit code when any request fails (unless explicitly configured otherwise)
|
||||||
|
|
||||||
|
## Validation Matrix
|
||||||
|
|
||||||
|
1. CLI parsing tests for every command path (including aliases while retained)
|
||||||
|
2. Integration tests against temp SQLite DB for CRUD flows
|
||||||
|
3. Snapshot tests for output text where scripting compatibility matters
|
||||||
|
4. Manual smoke tests:
|
||||||
|
- Send HTTP request with template/rendered vars
|
||||||
|
- JSON create/update for each resource
|
||||||
|
- Delete confirmation and `--yes`
|
||||||
|
- Top-level `send` on request/folder/workspace
|
||||||
|
|
||||||
|
## Open Questions
|
||||||
|
|
||||||
|
1. Should compatibility aliases (`workspaces`, `requests`, `create`) be removed immediately or after one release cycle?
|
||||||
|
2. For bulk `send`, should default behavior stop on first failure or continue and summarize?
|
||||||
|
3. Should command output default to human-readable text with an optional `--format json`, or return JSON by default for `show`/`list`?
|
||||||
|
4. For `request schema`, should plugin-derived auth fields be namespaced by plugin ID to avoid collisions?
|
||||||
|
|
||||||
|
## Crate Changes
|
||||||
|
|
||||||
|
- **yaak-cli**: restructure into modules, new clap hierarchy
|
||||||
|
- **yaak-models**: add `schemars` dependency, derive `JsonSchema` on model structs
|
||||||
|
(current derives: `Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS`)
|
||||||
87
crates-cli/yaak-cli/README.md
Normal file
87
crates-cli/yaak-cli/README.md
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
# yaak-cli
|
||||||
|
|
||||||
|
Command-line interface for Yaak.
|
||||||
|
|
||||||
|
## Command Overview
|
||||||
|
|
||||||
|
Current top-level commands:
|
||||||
|
|
||||||
|
```text
|
||||||
|
yaakcli send <request_id>
|
||||||
|
yaakcli workspace list
|
||||||
|
yaakcli workspace show <workspace_id>
|
||||||
|
yaakcli workspace create --name <name>
|
||||||
|
yaakcli workspace create --json '{"name":"My Workspace"}'
|
||||||
|
yaakcli workspace create '{"name":"My Workspace"}'
|
||||||
|
yaakcli workspace update --json '{"id":"wk_abc","description":"Updated"}'
|
||||||
|
yaakcli workspace delete <workspace_id> [--yes]
|
||||||
|
yaakcli request list <workspace_id>
|
||||||
|
yaakcli request show <request_id>
|
||||||
|
yaakcli request send <request_id>
|
||||||
|
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
|
||||||
|
yaakcli request create --json '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
|
||||||
|
yaakcli request create '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
|
||||||
|
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
|
||||||
|
yaakcli request delete <request_id> [--yes]
|
||||||
|
yaakcli folder list <workspace_id>
|
||||||
|
yaakcli folder show <folder_id>
|
||||||
|
yaakcli folder create <workspace_id> --name <name>
|
||||||
|
yaakcli folder create --json '{"workspaceId":"wk_abc","name":"Auth"}'
|
||||||
|
yaakcli folder create '{"workspaceId":"wk_abc","name":"Auth"}'
|
||||||
|
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
|
||||||
|
yaakcli folder delete <folder_id> [--yes]
|
||||||
|
yaakcli environment list <workspace_id>
|
||||||
|
yaakcli environment show <environment_id>
|
||||||
|
yaakcli environment create <workspace_id> --name <name>
|
||||||
|
yaakcli environment create --json '{"workspaceId":"wk_abc","name":"Production"}'
|
||||||
|
yaakcli environment create '{"workspaceId":"wk_abc","name":"Production"}'
|
||||||
|
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
|
||||||
|
yaakcli environment delete <environment_id> [--yes]
|
||||||
|
```
|
||||||
|
|
||||||
|
Global options:
|
||||||
|
|
||||||
|
- `--data-dir <path>`: use a custom data directory
|
||||||
|
- `-e, --environment <id>`: environment to use during request rendering/sending
|
||||||
|
- `-v, --verbose`: verbose logging and send output
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
|
||||||
|
- `send` is currently a shortcut for sending an HTTP request ID.
|
||||||
|
- `delete` commands prompt for confirmation unless `--yes` is provided.
|
||||||
|
- In non-interactive mode, `delete` commands require `--yes`.
|
||||||
|
- `create` and `update` commands support `--json` and positional JSON shorthand.
|
||||||
|
- `update` uses JSON Merge Patch semantics (RFC 7386) for partial updates.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yaakcli workspace list
|
||||||
|
yaakcli workspace create --name "My Workspace"
|
||||||
|
yaakcli workspace show wk_abc
|
||||||
|
yaakcli workspace update --json '{"id":"wk_abc","description":"Team workspace"}'
|
||||||
|
yaakcli request list wk_abc
|
||||||
|
yaakcli request show rq_abc
|
||||||
|
yaakcli request create wk_abc --name "Users" --url "https://api.example.com/users"
|
||||||
|
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
|
||||||
|
yaakcli request send rq_abc -e ev_abc
|
||||||
|
yaakcli request delete rq_abc --yes
|
||||||
|
yaakcli folder create wk_abc --name "Auth"
|
||||||
|
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
|
||||||
|
yaakcli environment create wk_abc --name "Production"
|
||||||
|
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Roadmap
|
||||||
|
|
||||||
|
Planned command expansion (request schema and polymorphic send) is tracked in `PLAN.md`.
|
||||||
|
|
||||||
|
When command behavior changes, update this README and verify with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo run -q -p yaak-cli -- --help
|
||||||
|
cargo run -q -p yaak-cli -- request --help
|
||||||
|
cargo run -q -p yaak-cli -- workspace --help
|
||||||
|
cargo run -q -p yaak-cli -- folder --help
|
||||||
|
cargo run -q -p yaak-cli -- environment --help
|
||||||
|
```
|
||||||
282
crates-cli/yaak-cli/src/cli.rs
Normal file
282
crates-cli/yaak-cli/src/cli.rs
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
use clap::{Args, Parser, Subcommand};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(name = "yaakcli")]
|
||||||
|
#[command(about = "Yaak CLI - API client from the command line")]
|
||||||
|
pub struct Cli {
|
||||||
|
/// Use a custom data directory
|
||||||
|
#[arg(long, global = true)]
|
||||||
|
pub data_dir: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// Environment ID to use for variable substitution
|
||||||
|
#[arg(long, short, global = true)]
|
||||||
|
pub environment: Option<String>,
|
||||||
|
|
||||||
|
/// Enable verbose logging
|
||||||
|
#[arg(long, short, global = true)]
|
||||||
|
pub verbose: bool,
|
||||||
|
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: Commands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum Commands {
|
||||||
|
/// Send an HTTP request by ID
|
||||||
|
Send(SendArgs),
|
||||||
|
|
||||||
|
/// Workspace commands
|
||||||
|
Workspace(WorkspaceArgs),
|
||||||
|
|
||||||
|
/// Request commands
|
||||||
|
Request(RequestArgs),
|
||||||
|
|
||||||
|
/// Folder commands
|
||||||
|
Folder(FolderArgs),
|
||||||
|
|
||||||
|
/// Environment commands
|
||||||
|
Environment(EnvironmentArgs),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct SendArgs {
|
||||||
|
/// Request ID
|
||||||
|
pub request_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct WorkspaceArgs {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: WorkspaceCommands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum WorkspaceCommands {
|
||||||
|
/// List all workspaces
|
||||||
|
List,
|
||||||
|
|
||||||
|
/// Show a workspace as JSON
|
||||||
|
Show {
|
||||||
|
/// Workspace ID
|
||||||
|
workspace_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Create a workspace
|
||||||
|
Create {
|
||||||
|
/// Workspace name
|
||||||
|
#[arg(short, long)]
|
||||||
|
name: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long, conflicts_with = "json_input")]
|
||||||
|
json: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload shorthand
|
||||||
|
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||||
|
json_input: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Update a workspace
|
||||||
|
Update {
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long, conflicts_with = "json_input")]
|
||||||
|
json: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload shorthand
|
||||||
|
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||||
|
json_input: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Delete a workspace
|
||||||
|
Delete {
|
||||||
|
/// Workspace ID
|
||||||
|
workspace_id: String,
|
||||||
|
|
||||||
|
/// Skip confirmation prompt
|
||||||
|
#[arg(short, long)]
|
||||||
|
yes: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct RequestArgs {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: RequestCommands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum RequestCommands {
|
||||||
|
/// List requests in a workspace
|
||||||
|
List {
|
||||||
|
/// Workspace ID
|
||||||
|
workspace_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Show a request as JSON
|
||||||
|
Show {
|
||||||
|
/// Request ID
|
||||||
|
request_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Send an HTTP request by ID
|
||||||
|
Send {
|
||||||
|
/// Request ID
|
||||||
|
request_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Create a new HTTP request
|
||||||
|
Create {
|
||||||
|
/// Workspace ID (or positional JSON payload shorthand)
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
|
||||||
|
/// Request name
|
||||||
|
#[arg(short, long)]
|
||||||
|
name: Option<String>,
|
||||||
|
|
||||||
|
/// HTTP method
|
||||||
|
#[arg(short, long)]
|
||||||
|
method: Option<String>,
|
||||||
|
|
||||||
|
/// URL
|
||||||
|
#[arg(short, long)]
|
||||||
|
url: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long)]
|
||||||
|
json: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Update an HTTP request
|
||||||
|
Update {
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long, conflicts_with = "json_input")]
|
||||||
|
json: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload shorthand
|
||||||
|
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||||
|
json_input: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Delete a request
|
||||||
|
Delete {
|
||||||
|
/// Request ID
|
||||||
|
request_id: String,
|
||||||
|
|
||||||
|
/// Skip confirmation prompt
|
||||||
|
#[arg(short, long)]
|
||||||
|
yes: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct FolderArgs {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: FolderCommands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum FolderCommands {
|
||||||
|
/// List folders in a workspace
|
||||||
|
List {
|
||||||
|
/// Workspace ID
|
||||||
|
workspace_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Show a folder as JSON
|
||||||
|
Show {
|
||||||
|
/// Folder ID
|
||||||
|
folder_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Create a folder
|
||||||
|
Create {
|
||||||
|
/// Workspace ID (or positional JSON payload shorthand)
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
|
||||||
|
/// Folder name
|
||||||
|
#[arg(short, long)]
|
||||||
|
name: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long)]
|
||||||
|
json: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Update a folder
|
||||||
|
Update {
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long, conflicts_with = "json_input")]
|
||||||
|
json: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload shorthand
|
||||||
|
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||||
|
json_input: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Delete a folder
|
||||||
|
Delete {
|
||||||
|
/// Folder ID
|
||||||
|
folder_id: String,
|
||||||
|
|
||||||
|
/// Skip confirmation prompt
|
||||||
|
#[arg(short, long)]
|
||||||
|
yes: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct EnvironmentArgs {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: EnvironmentCommands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum EnvironmentCommands {
|
||||||
|
/// List environments in a workspace
|
||||||
|
List {
|
||||||
|
/// Workspace ID
|
||||||
|
workspace_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Show an environment as JSON
|
||||||
|
Show {
|
||||||
|
/// Environment ID
|
||||||
|
environment_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Create an environment
|
||||||
|
Create {
|
||||||
|
/// Workspace ID (or positional JSON payload shorthand)
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
|
||||||
|
/// Environment name
|
||||||
|
#[arg(short, long)]
|
||||||
|
name: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long)]
|
||||||
|
json: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Update an environment
|
||||||
|
Update {
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long, conflicts_with = "json_input")]
|
||||||
|
json: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload shorthand
|
||||||
|
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||||
|
json_input: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Delete an environment
|
||||||
|
Delete {
|
||||||
|
/// Environment ID
|
||||||
|
environment_id: String,
|
||||||
|
|
||||||
|
/// Skip confirmation prompt
|
||||||
|
#[arg(short, long)]
|
||||||
|
yes: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
159
crates-cli/yaak-cli/src/commands/environment.rs
Normal file
159
crates-cli/yaak-cli/src/commands/environment.rs
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
use crate::cli::{EnvironmentArgs, EnvironmentCommands};
|
||||||
|
use crate::context::CliContext;
|
||||||
|
use crate::utils::confirm::confirm_delete;
|
||||||
|
use crate::utils::json::{
|
||||||
|
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||||
|
validate_create_id,
|
||||||
|
};
|
||||||
|
use yaak_models::models::Environment;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
|
||||||
|
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||||
|
|
||||||
|
pub fn run(ctx: &CliContext, args: EnvironmentArgs) -> i32 {
|
||||||
|
let result = match args.command {
|
||||||
|
EnvironmentCommands::List { workspace_id } => list(ctx, &workspace_id),
|
||||||
|
EnvironmentCommands::Show { environment_id } => show(ctx, &environment_id),
|
||||||
|
EnvironmentCommands::Create { workspace_id, name, json } => {
|
||||||
|
create(ctx, workspace_id, name, json)
|
||||||
|
}
|
||||||
|
EnvironmentCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||||
|
EnvironmentCommands::Delete { environment_id, yes } => delete(ctx, &environment_id, yes),
|
||||||
|
};
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||||
|
let environments = ctx
|
||||||
|
.db()
|
||||||
|
.list_environments_ensure_base(workspace_id)
|
||||||
|
.map_err(|e| format!("Failed to list environments: {e}"))?;
|
||||||
|
|
||||||
|
if environments.is_empty() {
|
||||||
|
println!("No environments found in workspace {}", workspace_id);
|
||||||
|
} else {
|
||||||
|
for environment in environments {
|
||||||
|
println!("{} - {} ({})", environment.id, environment.name, environment.parent_model);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn show(ctx: &CliContext, environment_id: &str) -> CommandResult {
|
||||||
|
let environment = ctx
|
||||||
|
.db()
|
||||||
|
.get_environment(environment_id)
|
||||||
|
.map_err(|e| format!("Failed to get environment: {e}"))?;
|
||||||
|
let output =
|
||||||
|
serde_json::to_string_pretty(&environment).map_err(|e| format!("Failed to serialize environment: {e}"))?;
|
||||||
|
println!("{output}");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create(
|
||||||
|
ctx: &CliContext,
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
name: Option<String>,
|
||||||
|
json: Option<String>,
|
||||||
|
) -> CommandResult {
|
||||||
|
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||||
|
return Err(
|
||||||
|
"environment create cannot combine workspace_id with --json payload".to_string()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let payload = parse_optional_json(
|
||||||
|
json,
|
||||||
|
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||||
|
"environment create",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if let Some(payload) = payload {
|
||||||
|
if name.is_some() {
|
||||||
|
return Err("environment create cannot combine --name with JSON payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_create_id(&payload, "environment")?;
|
||||||
|
let mut environment: Environment =
|
||||||
|
serde_json::from_value(payload)
|
||||||
|
.map_err(|e| format!("Failed to parse environment create JSON: {e}"))?;
|
||||||
|
|
||||||
|
if environment.workspace_id.is_empty() {
|
||||||
|
return Err("environment create JSON requires non-empty \"workspaceId\"".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if environment.parent_model.is_empty() {
|
||||||
|
environment.parent_model = "environment".to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_environment(&environment, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create environment: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created environment: {}", created.id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let workspace_id = workspace_id.ok_or_else(|| {
|
||||||
|
"environment create requires workspace_id unless JSON payload is provided".to_string()
|
||||||
|
})?;
|
||||||
|
let name = name
|
||||||
|
.ok_or_else(|| "environment create requires --name unless JSON payload is provided".to_string())?;
|
||||||
|
|
||||||
|
let environment = Environment {
|
||||||
|
workspace_id,
|
||||||
|
name,
|
||||||
|
parent_model: "environment".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_environment(&environment, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create environment: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created environment: {}", created.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||||
|
let patch = parse_required_json(json, json_input, "environment update")?;
|
||||||
|
let id = require_id(&patch, "environment update")?;
|
||||||
|
|
||||||
|
let existing = ctx
|
||||||
|
.db()
|
||||||
|
.get_environment(&id)
|
||||||
|
.map_err(|e| format!("Failed to get environment for update: {e}"))?;
|
||||||
|
let updated = apply_merge_patch(&existing, &patch, &id, "environment update")?;
|
||||||
|
|
||||||
|
let saved = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_environment(&updated, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to update environment: {e}"))?;
|
||||||
|
|
||||||
|
println!("Updated environment: {}", saved.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(ctx: &CliContext, environment_id: &str, yes: bool) -> CommandResult {
|
||||||
|
if !yes && !confirm_delete("environment", environment_id) {
|
||||||
|
println!("Aborted");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let deleted = ctx
|
||||||
|
.db()
|
||||||
|
.delete_environment_by_id(environment_id, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to delete environment: {e}"))?;
|
||||||
|
|
||||||
|
println!("Deleted environment: {}", deleted.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
139
crates-cli/yaak-cli/src/commands/folder.rs
Normal file
139
crates-cli/yaak-cli/src/commands/folder.rs
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
use crate::cli::{FolderArgs, FolderCommands};
|
||||||
|
use crate::context::CliContext;
|
||||||
|
use crate::utils::confirm::confirm_delete;
|
||||||
|
use crate::utils::json::{
|
||||||
|
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||||
|
validate_create_id,
|
||||||
|
};
|
||||||
|
use yaak_models::models::Folder;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
|
||||||
|
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||||
|
|
||||||
|
pub fn run(ctx: &CliContext, args: FolderArgs) -> i32 {
|
||||||
|
let result = match args.command {
|
||||||
|
FolderCommands::List { workspace_id } => list(ctx, &workspace_id),
|
||||||
|
FolderCommands::Show { folder_id } => show(ctx, &folder_id),
|
||||||
|
FolderCommands::Create { workspace_id, name, json } => {
|
||||||
|
create(ctx, workspace_id, name, json)
|
||||||
|
}
|
||||||
|
FolderCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||||
|
FolderCommands::Delete { folder_id, yes } => delete(ctx, &folder_id, yes),
|
||||||
|
};
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||||
|
let folders = ctx.db().list_folders(workspace_id).map_err(|e| format!("Failed to list folders: {e}"))?;
|
||||||
|
if folders.is_empty() {
|
||||||
|
println!("No folders found in workspace {}", workspace_id);
|
||||||
|
} else {
|
||||||
|
for folder in folders {
|
||||||
|
println!("{} - {}", folder.id, folder.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn show(ctx: &CliContext, folder_id: &str) -> CommandResult {
|
||||||
|
let folder = ctx.db().get_folder(folder_id).map_err(|e| format!("Failed to get folder: {e}"))?;
|
||||||
|
let output =
|
||||||
|
serde_json::to_string_pretty(&folder).map_err(|e| format!("Failed to serialize folder: {e}"))?;
|
||||||
|
println!("{output}");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create(
|
||||||
|
ctx: &CliContext,
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
name: Option<String>,
|
||||||
|
json: Option<String>,
|
||||||
|
) -> CommandResult {
|
||||||
|
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||||
|
return Err("folder create cannot combine workspace_id with --json payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let payload = parse_optional_json(
|
||||||
|
json,
|
||||||
|
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||||
|
"folder create",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if let Some(payload) = payload {
|
||||||
|
if name.is_some() {
|
||||||
|
return Err("folder create cannot combine --name with JSON payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_create_id(&payload, "folder")?;
|
||||||
|
let folder: Folder =
|
||||||
|
serde_json::from_value(payload).map_err(|e| format!("Failed to parse folder create JSON: {e}"))?;
|
||||||
|
|
||||||
|
if folder.workspace_id.is_empty() {
|
||||||
|
return Err("folder create JSON requires non-empty \"workspaceId\"".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_folder(&folder, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create folder: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created folder: {}", created.id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let workspace_id = workspace_id
|
||||||
|
.ok_or_else(|| "folder create requires workspace_id unless JSON payload is provided".to_string())?;
|
||||||
|
let name =
|
||||||
|
name.ok_or_else(|| "folder create requires --name unless JSON payload is provided".to_string())?;
|
||||||
|
|
||||||
|
let folder = Folder { workspace_id, name, ..Default::default() };
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_folder(&folder, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create folder: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created folder: {}", created.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||||
|
let patch = parse_required_json(json, json_input, "folder update")?;
|
||||||
|
let id = require_id(&patch, "folder update")?;
|
||||||
|
|
||||||
|
let existing = ctx
|
||||||
|
.db()
|
||||||
|
.get_folder(&id)
|
||||||
|
.map_err(|e| format!("Failed to get folder for update: {e}"))?;
|
||||||
|
let updated = apply_merge_patch(&existing, &patch, &id, "folder update")?;
|
||||||
|
|
||||||
|
let saved = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_folder(&updated, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to update folder: {e}"))?;
|
||||||
|
|
||||||
|
println!("Updated folder: {}", saved.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(ctx: &CliContext, folder_id: &str, yes: bool) -> CommandResult {
|
||||||
|
if !yes && !confirm_delete("folder", folder_id) {
|
||||||
|
println!("Aborted");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let deleted = ctx
|
||||||
|
.db()
|
||||||
|
.delete_folder_by_id(folder_id, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to delete folder: {e}"))?;
|
||||||
|
|
||||||
|
println!("Deleted folder: {}", deleted.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
5
crates-cli/yaak-cli/src/commands/mod.rs
Normal file
5
crates-cli/yaak-cli/src/commands/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
pub mod environment;
|
||||||
|
pub mod folder;
|
||||||
|
pub mod request;
|
||||||
|
pub mod send;
|
||||||
|
pub mod workspace;
|
||||||
233
crates-cli/yaak-cli/src/commands/request.rs
Normal file
233
crates-cli/yaak-cli/src/commands/request.rs
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
use crate::cli::{RequestArgs, RequestCommands};
|
||||||
|
use crate::context::CliContext;
|
||||||
|
use crate::utils::confirm::confirm_delete;
|
||||||
|
use crate::utils::json::{
|
||||||
|
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||||
|
validate_create_id,
|
||||||
|
};
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use yaak::send::{SendHttpRequestByIdWithPluginsParams, send_http_request_by_id_with_plugins};
|
||||||
|
use yaak_models::models::HttpRequest;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
use yaak_plugins::events::PluginContext;
|
||||||
|
|
||||||
|
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||||
|
|
||||||
|
pub async fn run(
|
||||||
|
ctx: &CliContext,
|
||||||
|
args: RequestArgs,
|
||||||
|
environment: Option<&str>,
|
||||||
|
verbose: bool,
|
||||||
|
) -> i32 {
|
||||||
|
let result = match args.command {
|
||||||
|
RequestCommands::List { workspace_id } => list(ctx, &workspace_id),
|
||||||
|
RequestCommands::Show { request_id } => show(ctx, &request_id),
|
||||||
|
RequestCommands::Send { request_id } => {
|
||||||
|
return match send_request_by_id(ctx, &request_id, environment, verbose).await {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
RequestCommands::Create { workspace_id, name, method, url, json } => {
|
||||||
|
create(ctx, workspace_id, name, method, url, json)
|
||||||
|
}
|
||||||
|
RequestCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||||
|
RequestCommands::Delete { request_id, yes } => delete(ctx, &request_id, yes),
|
||||||
|
};
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||||
|
let requests = ctx
|
||||||
|
.db()
|
||||||
|
.list_http_requests(workspace_id)
|
||||||
|
.map_err(|e| format!("Failed to list requests: {e}"))?;
|
||||||
|
if requests.is_empty() {
|
||||||
|
println!("No requests found in workspace {}", workspace_id);
|
||||||
|
} else {
|
||||||
|
for request in requests {
|
||||||
|
println!("{} - {} {}", request.id, request.method, request.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create(
|
||||||
|
ctx: &CliContext,
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
name: Option<String>,
|
||||||
|
method: Option<String>,
|
||||||
|
url: Option<String>,
|
||||||
|
json: Option<String>,
|
||||||
|
) -> CommandResult {
|
||||||
|
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||||
|
return Err("request create cannot combine workspace_id with --json payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let payload = parse_optional_json(
|
||||||
|
json,
|
||||||
|
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||||
|
"request create",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if let Some(payload) = payload {
|
||||||
|
if name.is_some() || method.is_some() || url.is_some() {
|
||||||
|
return Err("request create cannot combine simple flags with JSON payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_create_id(&payload, "request")?;
|
||||||
|
let request: HttpRequest = serde_json::from_value(payload)
|
||||||
|
.map_err(|e| format!("Failed to parse request create JSON: {e}"))?;
|
||||||
|
|
||||||
|
if request.workspace_id.is_empty() {
|
||||||
|
return Err("request create JSON requires non-empty \"workspaceId\"".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create request: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created request: {}", created.id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let workspace_id = workspace_id.ok_or_else(|| {
|
||||||
|
"request create requires workspace_id unless JSON payload is provided".to_string()
|
||||||
|
})?;
|
||||||
|
let name = name.unwrap_or_default();
|
||||||
|
let url = url.unwrap_or_default();
|
||||||
|
let method = method.unwrap_or_else(|| "GET".to_string());
|
||||||
|
|
||||||
|
let request = HttpRequest {
|
||||||
|
workspace_id,
|
||||||
|
name,
|
||||||
|
method: method.to_uppercase(),
|
||||||
|
url,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create request: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created request: {}", created.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||||
|
let patch = parse_required_json(json, json_input, "request update")?;
|
||||||
|
let id = require_id(&patch, "request update")?;
|
||||||
|
|
||||||
|
let existing = ctx
|
||||||
|
.db()
|
||||||
|
.get_http_request(&id)
|
||||||
|
.map_err(|e| format!("Failed to get request for update: {e}"))?;
|
||||||
|
let updated = apply_merge_patch(&existing, &patch, &id, "request update")?;
|
||||||
|
|
||||||
|
let saved = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_http_request(&updated, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to update request: {e}"))?;
|
||||||
|
|
||||||
|
println!("Updated request: {}", saved.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn show(ctx: &CliContext, request_id: &str) -> CommandResult {
|
||||||
|
let request = ctx
|
||||||
|
.db()
|
||||||
|
.get_http_request(request_id)
|
||||||
|
.map_err(|e| format!("Failed to get request: {e}"))?;
|
||||||
|
let output =
|
||||||
|
serde_json::to_string_pretty(&request).map_err(|e| format!("Failed to serialize request: {e}"))?;
|
||||||
|
println!("{output}");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(ctx: &CliContext, request_id: &str, yes: bool) -> CommandResult {
|
||||||
|
if !yes && !confirm_delete("request", request_id) {
|
||||||
|
println!("Aborted");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let deleted = ctx
|
||||||
|
.db()
|
||||||
|
.delete_http_request_by_id(request_id, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to delete request: {e}"))?;
|
||||||
|
println!("Deleted request: {}", deleted.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a request by ID and print response in the same format as legacy `send`.
|
||||||
|
pub async fn send_request_by_id(
|
||||||
|
ctx: &CliContext,
|
||||||
|
request_id: &str,
|
||||||
|
environment: Option<&str>,
|
||||||
|
verbose: bool,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let request =
|
||||||
|
ctx.db().get_http_request(request_id).map_err(|e| format!("Failed to get request: {e}"))?;
|
||||||
|
|
||||||
|
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
|
||||||
|
|
||||||
|
let (event_tx, mut event_rx) = mpsc::channel(100);
|
||||||
|
let event_handle = tokio::spawn(async move {
|
||||||
|
while let Some(event) = event_rx.recv().await {
|
||||||
|
if verbose {
|
||||||
|
println!("{}", event);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let response_dir = ctx.data_dir().join("responses");
|
||||||
|
|
||||||
|
let result = send_http_request_by_id_with_plugins(SendHttpRequestByIdWithPluginsParams {
|
||||||
|
query_manager: ctx.query_manager(),
|
||||||
|
blob_manager: ctx.blob_manager(),
|
||||||
|
request_id,
|
||||||
|
environment_id: environment,
|
||||||
|
update_source: UpdateSource::Sync,
|
||||||
|
cookie_jar_id: None,
|
||||||
|
response_dir: &response_dir,
|
||||||
|
emit_events_to: Some(event_tx),
|
||||||
|
plugin_manager: ctx.plugin_manager(),
|
||||||
|
encryption_manager: ctx.encryption_manager.clone(),
|
||||||
|
plugin_context: &plugin_context,
|
||||||
|
cancelled_rx: None,
|
||||||
|
connection_manager: None,
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let _ = event_handle.await;
|
||||||
|
let result = result.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
if verbose {
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
println!(
|
||||||
|
"HTTP {} {}",
|
||||||
|
result.response.status,
|
||||||
|
result.response.status_reason.as_deref().unwrap_or("")
|
||||||
|
);
|
||||||
|
if verbose {
|
||||||
|
for header in &result.response.headers {
|
||||||
|
println!("{}: {}", header.name, header.value);
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
let body = String::from_utf8(result.response_body)
|
||||||
|
.map_err(|e| format!("Failed to read response body: {e}"))?;
|
||||||
|
println!("{}", body);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
18
crates-cli/yaak-cli/src/commands/send.rs
Normal file
18
crates-cli/yaak-cli/src/commands/send.rs
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
use crate::cli::SendArgs;
|
||||||
|
use crate::commands::request;
|
||||||
|
use crate::context::CliContext;
|
||||||
|
|
||||||
|
pub async fn run(
|
||||||
|
ctx: &CliContext,
|
||||||
|
args: SendArgs,
|
||||||
|
environment: Option<&str>,
|
||||||
|
verbose: bool,
|
||||||
|
) -> i32 {
|
||||||
|
match request::send_request_by_id(ctx, &args.request_id, environment, verbose).await {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
121
crates-cli/yaak-cli/src/commands/workspace.rs
Normal file
121
crates-cli/yaak-cli/src/commands/workspace.rs
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
use crate::cli::{WorkspaceArgs, WorkspaceCommands};
|
||||||
|
use crate::context::CliContext;
|
||||||
|
use crate::utils::confirm::confirm_delete;
|
||||||
|
use crate::utils::json::{
|
||||||
|
apply_merge_patch, parse_optional_json, parse_required_json, require_id, validate_create_id,
|
||||||
|
};
|
||||||
|
use yaak_models::models::Workspace;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
|
||||||
|
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||||
|
|
||||||
|
pub fn run(ctx: &CliContext, args: WorkspaceArgs) -> i32 {
|
||||||
|
let result = match args.command {
|
||||||
|
WorkspaceCommands::List => list(ctx),
|
||||||
|
WorkspaceCommands::Show { workspace_id } => show(ctx, &workspace_id),
|
||||||
|
WorkspaceCommands::Create { name, json, json_input } => create(ctx, name, json, json_input),
|
||||||
|
WorkspaceCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||||
|
WorkspaceCommands::Delete { workspace_id, yes } => delete(ctx, &workspace_id, yes),
|
||||||
|
};
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list(ctx: &CliContext) -> CommandResult {
|
||||||
|
let workspaces = ctx.db().list_workspaces().map_err(|e| format!("Failed to list workspaces: {e}"))?;
|
||||||
|
if workspaces.is_empty() {
|
||||||
|
println!("No workspaces found");
|
||||||
|
} else {
|
||||||
|
for workspace in workspaces {
|
||||||
|
println!("{} - {}", workspace.id, workspace.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn show(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||||
|
let workspace = ctx
|
||||||
|
.db()
|
||||||
|
.get_workspace(workspace_id)
|
||||||
|
.map_err(|e| format!("Failed to get workspace: {e}"))?;
|
||||||
|
let output = serde_json::to_string_pretty(&workspace)
|
||||||
|
.map_err(|e| format!("Failed to serialize workspace: {e}"))?;
|
||||||
|
println!("{output}");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create(
|
||||||
|
ctx: &CliContext,
|
||||||
|
name: Option<String>,
|
||||||
|
json: Option<String>,
|
||||||
|
json_input: Option<String>,
|
||||||
|
) -> CommandResult {
|
||||||
|
let payload = parse_optional_json(json, json_input, "workspace create")?;
|
||||||
|
|
||||||
|
if let Some(payload) = payload {
|
||||||
|
if name.is_some() {
|
||||||
|
return Err("workspace create cannot combine --name with JSON payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_create_id(&payload, "workspace")?;
|
||||||
|
let workspace: Workspace = serde_json::from_value(payload)
|
||||||
|
.map_err(|e| format!("Failed to parse workspace create JSON: {e}"))?;
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create workspace: {e}"))?;
|
||||||
|
println!("Created workspace: {}", created.id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let name =
|
||||||
|
name.ok_or_else(|| "workspace create requires --name unless JSON payload is provided".to_string())?;
|
||||||
|
|
||||||
|
let workspace = Workspace { name, ..Default::default() };
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create workspace: {e}"))?;
|
||||||
|
println!("Created workspace: {}", created.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||||
|
let patch = parse_required_json(json, json_input, "workspace update")?;
|
||||||
|
let id = require_id(&patch, "workspace update")?;
|
||||||
|
|
||||||
|
let existing = ctx
|
||||||
|
.db()
|
||||||
|
.get_workspace(&id)
|
||||||
|
.map_err(|e| format!("Failed to get workspace for update: {e}"))?;
|
||||||
|
let updated = apply_merge_patch(&existing, &patch, &id, "workspace update")?;
|
||||||
|
|
||||||
|
let saved = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_workspace(&updated, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to update workspace: {e}"))?;
|
||||||
|
|
||||||
|
println!("Updated workspace: {}", saved.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(ctx: &CliContext, workspace_id: &str, yes: bool) -> CommandResult {
|
||||||
|
if !yes && !confirm_delete("workspace", workspace_id) {
|
||||||
|
println!("Aborted");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let deleted = ctx
|
||||||
|
.db()
|
||||||
|
.delete_workspace_by_id(workspace_id, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to delete workspace: {e}"))?;
|
||||||
|
println!("Deleted workspace: {}", deleted.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
96
crates-cli/yaak-cli/src/context.rs
Normal file
96
crates-cli/yaak-cli/src/context.rs
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use yaak_crypto::manager::EncryptionManager;
|
||||||
|
use yaak_models::blob_manager::BlobManager;
|
||||||
|
use yaak_models::db_context::DbContext;
|
||||||
|
use yaak_models::query_manager::QueryManager;
|
||||||
|
use yaak_plugins::events::PluginContext;
|
||||||
|
use yaak_plugins::manager::PluginManager;
|
||||||
|
|
||||||
|
pub struct CliContext {
|
||||||
|
data_dir: PathBuf,
|
||||||
|
query_manager: QueryManager,
|
||||||
|
blob_manager: BlobManager,
|
||||||
|
pub encryption_manager: Arc<EncryptionManager>,
|
||||||
|
plugin_manager: Option<Arc<PluginManager>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CliContext {
|
||||||
|
pub async fn initialize(data_dir: PathBuf, app_id: &str, with_plugins: bool) -> Self {
|
||||||
|
let db_path = data_dir.join("db.sqlite");
|
||||||
|
let blob_path = data_dir.join("blobs.sqlite");
|
||||||
|
|
||||||
|
let (query_manager, blob_manager, _rx) = yaak_models::init_standalone(&db_path, &blob_path)
|
||||||
|
.expect("Failed to initialize database");
|
||||||
|
|
||||||
|
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
|
||||||
|
|
||||||
|
let plugin_manager = if with_plugins {
|
||||||
|
let vendored_plugin_dir = data_dir.join("vendored-plugins");
|
||||||
|
let installed_plugin_dir = data_dir.join("installed-plugins");
|
||||||
|
let node_bin_path = PathBuf::from("node");
|
||||||
|
|
||||||
|
let plugin_runtime_main =
|
||||||
|
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
|
||||||
|
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||||
|
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
||||||
|
});
|
||||||
|
|
||||||
|
let plugin_manager = Arc::new(
|
||||||
|
PluginManager::new(
|
||||||
|
vendored_plugin_dir,
|
||||||
|
installed_plugin_dir,
|
||||||
|
node_bin_path,
|
||||||
|
plugin_runtime_main,
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
.await,
|
||||||
|
);
|
||||||
|
|
||||||
|
let plugins = query_manager.connect().list_plugins().unwrap_or_default();
|
||||||
|
if !plugins.is_empty() {
|
||||||
|
let errors = plugin_manager
|
||||||
|
.initialize_all_plugins(plugins, &PluginContext::new_empty())
|
||||||
|
.await;
|
||||||
|
for (plugin_dir, error_msg) in errors {
|
||||||
|
eprintln!(
|
||||||
|
"Warning: Failed to initialize plugin '{}': {}",
|
||||||
|
plugin_dir, error_msg
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(plugin_manager)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Self { data_dir, query_manager, blob_manager, encryption_manager, plugin_manager }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn data_dir(&self) -> &Path {
|
||||||
|
&self.data_dir
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn db(&self) -> DbContext<'_> {
|
||||||
|
self.query_manager.connect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn query_manager(&self) -> &QueryManager {
|
||||||
|
&self.query_manager
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn blob_manager(&self) -> &BlobManager {
|
||||||
|
&self.blob_manager
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn plugin_manager(&self) -> Arc<PluginManager> {
|
||||||
|
self.plugin_manager.clone().expect("Plugin manager was not initialized for this command")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn shutdown(&self) {
|
||||||
|
if let Some(plugin_manager) = &self.plugin_manager {
|
||||||
|
plugin_manager.terminate().await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,409 +1,49 @@
|
|||||||
use clap::{Parser, Subcommand};
|
mod cli;
|
||||||
use log::info;
|
mod commands;
|
||||||
use serde_json::Value;
|
mod context;
|
||||||
use std::collections::BTreeMap;
|
mod utils;
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::mpsc;
|
|
||||||
use yaak_crypto::manager::EncryptionManager;
|
|
||||||
use yaak_http::path_placeholders::apply_path_placeholders;
|
|
||||||
use yaak_http::sender::{HttpSender, ReqwestSender};
|
|
||||||
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
|
|
||||||
use yaak_models::models::{HttpRequest, HttpRequestHeader, HttpUrlParameter};
|
|
||||||
use yaak_models::render::make_vars_hashmap;
|
|
||||||
use yaak_models::util::UpdateSource;
|
|
||||||
use yaak_plugins::events::{PluginContext, RenderPurpose};
|
|
||||||
use yaak_plugins::manager::PluginManager;
|
|
||||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
|
||||||
use yaak_templates::{RenderOptions, parse_and_render, render_json_value_raw};
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
use clap::Parser;
|
||||||
#[command(name = "yaakcli")]
|
use cli::{Cli, Commands, RequestCommands};
|
||||||
#[command(about = "Yaak CLI - API client from the command line")]
|
use context::CliContext;
|
||||||
struct Cli {
|
|
||||||
/// Use a custom data directory
|
|
||||||
#[arg(long, global = true)]
|
|
||||||
data_dir: Option<PathBuf>,
|
|
||||||
|
|
||||||
/// Environment ID to use for variable substitution
|
|
||||||
#[arg(long, short, global = true)]
|
|
||||||
environment: Option<String>,
|
|
||||||
|
|
||||||
/// Enable verbose logging
|
|
||||||
#[arg(long, short, global = true)]
|
|
||||||
verbose: bool,
|
|
||||||
|
|
||||||
#[command(subcommand)]
|
|
||||||
command: Commands,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Subcommand)]
|
|
||||||
enum Commands {
|
|
||||||
/// List all workspaces
|
|
||||||
Workspaces,
|
|
||||||
/// List requests in a workspace
|
|
||||||
Requests {
|
|
||||||
/// Workspace ID
|
|
||||||
workspace_id: String,
|
|
||||||
},
|
|
||||||
/// Send an HTTP request by ID
|
|
||||||
Send {
|
|
||||||
/// Request ID
|
|
||||||
request_id: String,
|
|
||||||
},
|
|
||||||
/// Send a GET request to a URL
|
|
||||||
Get {
|
|
||||||
/// URL to request
|
|
||||||
url: String,
|
|
||||||
},
|
|
||||||
/// Create a new HTTP request
|
|
||||||
Create {
|
|
||||||
/// Workspace ID
|
|
||||||
workspace_id: String,
|
|
||||||
/// Request name
|
|
||||||
#[arg(short, long)]
|
|
||||||
name: String,
|
|
||||||
/// HTTP method
|
|
||||||
#[arg(short, long, default_value = "GET")]
|
|
||||||
method: String,
|
|
||||||
/// URL
|
|
||||||
#[arg(short, long)]
|
|
||||||
url: String,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Render an HTTP request with template variables and plugin functions
|
|
||||||
async fn render_http_request(
|
|
||||||
r: &HttpRequest,
|
|
||||||
environment_chain: Vec<yaak_models::models::Environment>,
|
|
||||||
cb: &PluginTemplateCallback,
|
|
||||||
opt: &RenderOptions,
|
|
||||||
) -> yaak_templates::error::Result<HttpRequest> {
|
|
||||||
let vars = &make_vars_hashmap(environment_chain);
|
|
||||||
|
|
||||||
let mut url_parameters = Vec::new();
|
|
||||||
for p in r.url_parameters.clone() {
|
|
||||||
if !p.enabled {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
url_parameters.push(HttpUrlParameter {
|
|
||||||
enabled: p.enabled,
|
|
||||||
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
|
|
||||||
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
|
|
||||||
id: p.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut headers = Vec::new();
|
|
||||||
for p in r.headers.clone() {
|
|
||||||
if !p.enabled {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
headers.push(HttpRequestHeader {
|
|
||||||
enabled: p.enabled,
|
|
||||||
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
|
|
||||||
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
|
|
||||||
id: p.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut body = BTreeMap::new();
|
|
||||||
for (k, v) in r.body.clone() {
|
|
||||||
body.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
|
|
||||||
}
|
|
||||||
|
|
||||||
let authentication = {
|
|
||||||
let mut disabled = false;
|
|
||||||
let mut auth = BTreeMap::new();
|
|
||||||
match r.authentication.get("disabled") {
|
|
||||||
Some(Value::Bool(true)) => {
|
|
||||||
disabled = true;
|
|
||||||
}
|
|
||||||
Some(Value::String(tmpl)) => {
|
|
||||||
disabled = parse_and_render(tmpl.as_str(), vars, cb, opt)
|
|
||||||
.await
|
|
||||||
.unwrap_or_default()
|
|
||||||
.is_empty();
|
|
||||||
info!(
|
|
||||||
"Rendering authentication.disabled as a template: {disabled} from \"{tmpl}\""
|
|
||||||
);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
if disabled {
|
|
||||||
auth.insert("disabled".to_string(), Value::Bool(true));
|
|
||||||
} else {
|
|
||||||
for (k, v) in r.authentication.clone() {
|
|
||||||
if k == "disabled" {
|
|
||||||
auth.insert(k, Value::Bool(false));
|
|
||||||
} else {
|
|
||||||
auth.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
auth
|
|
||||||
};
|
|
||||||
|
|
||||||
let url = parse_and_render(r.url.clone().as_str(), vars, cb, opt).await?;
|
|
||||||
|
|
||||||
// Apply path placeholders (e.g., /users/:id -> /users/123)
|
|
||||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
|
||||||
|
|
||||||
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
let cli = Cli::parse();
|
let Cli { data_dir, environment, verbose, command } = Cli::parse();
|
||||||
|
|
||||||
// Initialize logging
|
if verbose {
|
||||||
if cli.verbose {
|
|
||||||
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use the same app_id for both data directory and keyring
|
|
||||||
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
|
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
|
||||||
|
|
||||||
let data_dir = cli.data_dir.unwrap_or_else(|| {
|
let data_dir = data_dir.unwrap_or_else(|| {
|
||||||
dirs::data_dir().expect("Could not determine data directory").join(app_id)
|
dirs::data_dir().expect("Could not determine data directory").join(app_id)
|
||||||
});
|
});
|
||||||
|
|
||||||
let db_path = data_dir.join("db.sqlite");
|
let needs_plugins = matches!(
|
||||||
let blob_path = data_dir.join("blobs.sqlite");
|
&command,
|
||||||
|
Commands::Send(_)
|
||||||
let (query_manager, _blob_manager, _rx) =
|
| Commands::Request(cli::RequestArgs { command: RequestCommands::Send { .. } })
|
||||||
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize database");
|
|
||||||
|
|
||||||
let db = query_manager.connect();
|
|
||||||
|
|
||||||
// Initialize encryption manager for secure() template function
|
|
||||||
// Use the same app_id as the Tauri app for keyring access
|
|
||||||
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
|
|
||||||
|
|
||||||
// Initialize plugin manager for template functions
|
|
||||||
let vendored_plugin_dir = data_dir.join("vendored-plugins");
|
|
||||||
let installed_plugin_dir = data_dir.join("installed-plugins");
|
|
||||||
|
|
||||||
// Use system node for CLI (must be in PATH)
|
|
||||||
let node_bin_path = PathBuf::from("node");
|
|
||||||
|
|
||||||
// Find the plugin runtime - check YAAK_PLUGIN_RUNTIME env var, then fallback to development path
|
|
||||||
let plugin_runtime_main =
|
|
||||||
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
|
|
||||||
// Development fallback: look relative to crate root
|
|
||||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
|
||||||
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create plugin manager (plugins may not be available in CLI context)
|
|
||||||
let plugin_manager = Arc::new(
|
|
||||||
PluginManager::new(
|
|
||||||
vendored_plugin_dir,
|
|
||||||
installed_plugin_dir,
|
|
||||||
node_bin_path,
|
|
||||||
plugin_runtime_main,
|
|
||||||
false,
|
|
||||||
)
|
|
||||||
.await,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Initialize plugins from database
|
let context = CliContext::initialize(data_dir, app_id, needs_plugins).await;
|
||||||
let plugins = db.list_plugins().unwrap_or_default();
|
|
||||||
if !plugins.is_empty() {
|
let exit_code = match command {
|
||||||
let errors =
|
Commands::Send(args) => {
|
||||||
plugin_manager.initialize_all_plugins(plugins, &PluginContext::new_empty()).await;
|
commands::send::run(&context, args, environment.as_deref(), verbose).await
|
||||||
for (plugin_dir, error_msg) in errors {
|
|
||||||
eprintln!("Warning: Failed to initialize plugin '{}': {}", plugin_dir, error_msg);
|
|
||||||
}
|
}
|
||||||
|
Commands::Workspace(args) => commands::workspace::run(&context, args),
|
||||||
|
Commands::Request(args) => {
|
||||||
|
commands::request::run(&context, args, environment.as_deref(), verbose).await
|
||||||
|
}
|
||||||
|
Commands::Folder(args) => commands::folder::run(&context, args),
|
||||||
|
Commands::Environment(args) => commands::environment::run(&context, args),
|
||||||
|
};
|
||||||
|
|
||||||
|
context.shutdown().await;
|
||||||
|
|
||||||
|
if exit_code != 0 {
|
||||||
|
std::process::exit(exit_code);
|
||||||
}
|
}
|
||||||
|
|
||||||
match cli.command {
|
|
||||||
Commands::Workspaces => {
|
|
||||||
let workspaces = db.list_workspaces().expect("Failed to list workspaces");
|
|
||||||
if workspaces.is_empty() {
|
|
||||||
println!("No workspaces found");
|
|
||||||
} else {
|
|
||||||
for ws in workspaces {
|
|
||||||
println!("{} - {}", ws.id, ws.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Commands::Requests { workspace_id } => {
|
|
||||||
let requests = db.list_http_requests(&workspace_id).expect("Failed to list requests");
|
|
||||||
if requests.is_empty() {
|
|
||||||
println!("No requests found in workspace {}", workspace_id);
|
|
||||||
} else {
|
|
||||||
for req in requests {
|
|
||||||
println!("{} - {} {}", req.id, req.method, req.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Commands::Send { request_id } => {
|
|
||||||
let request = db.get_http_request(&request_id).expect("Failed to get request");
|
|
||||||
|
|
||||||
// Resolve environment chain for variable substitution
|
|
||||||
let environment_chain = db
|
|
||||||
.resolve_environments(
|
|
||||||
&request.workspace_id,
|
|
||||||
request.folder_id.as_deref(),
|
|
||||||
cli.environment.as_deref(),
|
|
||||||
)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
// Create template callback with plugin support
|
|
||||||
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
|
|
||||||
let template_callback = PluginTemplateCallback::new(
|
|
||||||
plugin_manager.clone(),
|
|
||||||
encryption_manager.clone(),
|
|
||||||
&plugin_context,
|
|
||||||
RenderPurpose::Send,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Render templates in the request
|
|
||||||
let rendered_request = render_http_request(
|
|
||||||
&request,
|
|
||||||
environment_chain,
|
|
||||||
&template_callback,
|
|
||||||
&RenderOptions::throw(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.expect("Failed to render request templates");
|
|
||||||
|
|
||||||
if cli.verbose {
|
|
||||||
println!("> {} {}", rendered_request.method, rendered_request.url);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to sendable request
|
|
||||||
let sendable = SendableHttpRequest::from_http_request(
|
|
||||||
&rendered_request,
|
|
||||||
SendableHttpRequestOptions::default(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.expect("Failed to build request");
|
|
||||||
|
|
||||||
// Create event channel for progress
|
|
||||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
|
||||||
|
|
||||||
// Spawn task to print events if verbose
|
|
||||||
let verbose = cli.verbose;
|
|
||||||
let verbose_handle = if verbose {
|
|
||||||
Some(tokio::spawn(async move {
|
|
||||||
while let Some(event) = event_rx.recv().await {
|
|
||||||
println!("{}", event);
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
// Drain events silently
|
|
||||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// Send the request
|
|
||||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
|
||||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
|
||||||
|
|
||||||
// Wait for event handler to finish
|
|
||||||
if let Some(handle) = verbose_handle {
|
|
||||||
let _ = handle.await;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print response
|
|
||||||
if verbose {
|
|
||||||
println!();
|
|
||||||
}
|
|
||||||
println!(
|
|
||||||
"HTTP {} {}",
|
|
||||||
response.status,
|
|
||||||
response.status_reason.as_deref().unwrap_or("")
|
|
||||||
);
|
|
||||||
|
|
||||||
if verbose {
|
|
||||||
for (name, value) in &response.headers {
|
|
||||||
println!("{}: {}", name, value);
|
|
||||||
}
|
|
||||||
println!();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print body
|
|
||||||
let (body, _stats) = response.text().await.expect("Failed to read response body");
|
|
||||||
println!("{}", body);
|
|
||||||
}
|
|
||||||
Commands::Get { url } => {
|
|
||||||
if cli.verbose {
|
|
||||||
println!("> GET {}", url);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build a simple GET request
|
|
||||||
let sendable = SendableHttpRequest {
|
|
||||||
url: url.clone(),
|
|
||||||
method: "GET".to_string(),
|
|
||||||
headers: vec![],
|
|
||||||
body: None,
|
|
||||||
options: SendableHttpRequestOptions::default(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create event channel for progress
|
|
||||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
|
||||||
|
|
||||||
// Spawn task to print events if verbose
|
|
||||||
let verbose = cli.verbose;
|
|
||||||
let verbose_handle = if verbose {
|
|
||||||
Some(tokio::spawn(async move {
|
|
||||||
while let Some(event) = event_rx.recv().await {
|
|
||||||
println!("{}", event);
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// Send the request
|
|
||||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
|
||||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
|
||||||
|
|
||||||
if let Some(handle) = verbose_handle {
|
|
||||||
let _ = handle.await;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print response
|
|
||||||
if verbose {
|
|
||||||
println!();
|
|
||||||
}
|
|
||||||
println!(
|
|
||||||
"HTTP {} {}",
|
|
||||||
response.status,
|
|
||||||
response.status_reason.as_deref().unwrap_or("")
|
|
||||||
);
|
|
||||||
|
|
||||||
if verbose {
|
|
||||||
for (name, value) in &response.headers {
|
|
||||||
println!("{}: {}", name, value);
|
|
||||||
}
|
|
||||||
println!();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print body
|
|
||||||
let (body, _stats) = response.text().await.expect("Failed to read response body");
|
|
||||||
println!("{}", body);
|
|
||||||
}
|
|
||||||
Commands::Create { workspace_id, name, method, url } => {
|
|
||||||
let request = HttpRequest {
|
|
||||||
workspace_id,
|
|
||||||
name,
|
|
||||||
method: method.to_uppercase(),
|
|
||||||
url,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let created = db
|
|
||||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
|
||||||
.expect("Failed to create request");
|
|
||||||
|
|
||||||
println!("Created request: {}", created.id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Terminate plugin manager gracefully
|
|
||||||
plugin_manager.terminate().await;
|
|
||||||
}
|
}
|
||||||
|
|||||||
16
crates-cli/yaak-cli/src/utils/confirm.rs
Normal file
16
crates-cli/yaak-cli/src/utils/confirm.rs
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
use std::io::{self, IsTerminal, Write};
|
||||||
|
|
||||||
|
pub fn confirm_delete(resource_name: &str, resource_id: &str) -> bool {
|
||||||
|
if !io::stdin().is_terminal() {
|
||||||
|
eprintln!("Refusing to delete in non-interactive mode without --yes");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
print!("Delete {resource_name} {resource_id}? [y/N]: ");
|
||||||
|
io::stdout().flush().expect("Failed to flush stdout");
|
||||||
|
|
||||||
|
let mut input = String::new();
|
||||||
|
io::stdin().read_line(&mut input).expect("Failed to read confirmation");
|
||||||
|
|
||||||
|
matches!(input.trim().to_lowercase().as_str(), "y" | "yes")
|
||||||
|
}
|
||||||
110
crates-cli/yaak-cli/src/utils/json.rs
Normal file
110
crates-cli/yaak-cli/src/utils/json.rs
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
use serde::Serialize;
|
||||||
|
use serde::de::DeserializeOwned;
|
||||||
|
use serde_json::{Map, Value};
|
||||||
|
|
||||||
|
type JsonResult<T> = std::result::Result<T, String>;
|
||||||
|
|
||||||
|
pub fn is_json_shorthand(input: &str) -> bool {
|
||||||
|
input.trim_start().starts_with('{')
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_json_object(raw: &str, context: &str) -> JsonResult<Value> {
|
||||||
|
let value: Value = serde_json::from_str(raw)
|
||||||
|
.map_err(|error| format!("Invalid JSON for {context}: {error}"))?;
|
||||||
|
|
||||||
|
if !value.is_object() {
|
||||||
|
return Err(format!("JSON payload for {context} must be an object"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_optional_json(
|
||||||
|
json_flag: Option<String>,
|
||||||
|
json_shorthand: Option<String>,
|
||||||
|
context: &str,
|
||||||
|
) -> JsonResult<Option<Value>> {
|
||||||
|
match (json_flag, json_shorthand) {
|
||||||
|
(Some(_), Some(_)) => Err(format!(
|
||||||
|
"Cannot provide both --json and positional JSON for {context}"
|
||||||
|
)),
|
||||||
|
(Some(raw), None) => parse_json_object(&raw, context).map(Some),
|
||||||
|
(None, Some(raw)) => parse_json_object(&raw, context).map(Some),
|
||||||
|
(None, None) => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_required_json(
|
||||||
|
json_flag: Option<String>,
|
||||||
|
json_shorthand: Option<String>,
|
||||||
|
context: &str,
|
||||||
|
) -> JsonResult<Value> {
|
||||||
|
parse_optional_json(json_flag, json_shorthand, context)?.ok_or_else(|| {
|
||||||
|
format!("Missing JSON payload for {context}. Use --json or positional JSON")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn require_id(payload: &Value, context: &str) -> JsonResult<String> {
|
||||||
|
payload
|
||||||
|
.get("id")
|
||||||
|
.and_then(|value| value.as_str())
|
||||||
|
.filter(|value| !value.is_empty())
|
||||||
|
.map(|value| value.to_string())
|
||||||
|
.ok_or_else(|| format!("{context} requires a non-empty \"id\" field"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn validate_create_id(payload: &Value, context: &str) -> JsonResult<()> {
|
||||||
|
let Some(id_value) = payload.get("id") else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
match id_value {
|
||||||
|
Value::String(id) if id.is_empty() => Ok(()),
|
||||||
|
_ => Err(format!(
|
||||||
|
"{context} create JSON must omit \"id\" or set it to an empty string"
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn apply_merge_patch<T>(existing: &T, patch: &Value, id: &str, context: &str) -> JsonResult<T>
|
||||||
|
where
|
||||||
|
T: Serialize + DeserializeOwned,
|
||||||
|
{
|
||||||
|
let mut base = serde_json::to_value(existing)
|
||||||
|
.map_err(|error| format!("Failed to serialize existing model for {context}: {error}"))?;
|
||||||
|
merge_patch(&mut base, patch);
|
||||||
|
|
||||||
|
let Some(base_object) = base.as_object_mut() else {
|
||||||
|
return Err(format!("Merged payload for {context} must be an object"));
|
||||||
|
};
|
||||||
|
base_object.insert("id".to_string(), Value::String(id.to_string()));
|
||||||
|
|
||||||
|
serde_json::from_value(base)
|
||||||
|
.map_err(|error| format!("Failed to deserialize merged payload for {context}: {error}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn merge_patch(target: &mut Value, patch: &Value) {
|
||||||
|
match patch {
|
||||||
|
Value::Object(patch_map) => {
|
||||||
|
if !target.is_object() {
|
||||||
|
*target = Value::Object(Map::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
let target_map =
|
||||||
|
target.as_object_mut().expect("merge_patch target expected to be object");
|
||||||
|
|
||||||
|
for (key, patch_value) in patch_map {
|
||||||
|
if patch_value.is_null() {
|
||||||
|
target_map.remove(key);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let target_entry = target_map.entry(key.clone()).or_insert(Value::Null);
|
||||||
|
merge_patch(target_entry, patch_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
*target = patch.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
2
crates-cli/yaak-cli/src/utils/mod.rs
Normal file
2
crates-cli/yaak-cli/src/utils/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
pub mod confirm;
|
||||||
|
pub mod json;
|
||||||
42
crates-cli/yaak-cli/tests/common/http_server.rs
Normal file
42
crates-cli/yaak-cli/tests/common/http_server.rs
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
use std::io::{Read, Write};
|
||||||
|
use std::net::TcpListener;
|
||||||
|
use std::thread;
|
||||||
|
|
||||||
|
pub struct TestHttpServer {
|
||||||
|
pub url: String,
|
||||||
|
handle: Option<thread::JoinHandle<()>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TestHttpServer {
|
||||||
|
pub fn spawn_ok(body: &'static str) -> Self {
|
||||||
|
let listener = TcpListener::bind("127.0.0.1:0").expect("Failed to bind test HTTP server");
|
||||||
|
let addr = listener.local_addr().expect("Failed to get local addr");
|
||||||
|
let url = format!("http://{addr}/test");
|
||||||
|
let body_bytes = body.as_bytes().to_vec();
|
||||||
|
|
||||||
|
let handle = thread::spawn(move || {
|
||||||
|
if let Ok((mut stream, _)) = listener.accept() {
|
||||||
|
let mut request_buf = [0u8; 4096];
|
||||||
|
let _ = stream.read(&mut request_buf);
|
||||||
|
|
||||||
|
let response = format!(
|
||||||
|
"HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\nContent-Length: {}\r\nConnection: close\r\n\r\n",
|
||||||
|
body_bytes.len()
|
||||||
|
);
|
||||||
|
let _ = stream.write_all(response.as_bytes());
|
||||||
|
let _ = stream.write_all(&body_bytes);
|
||||||
|
let _ = stream.flush();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Self { url, handle: Some(handle) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for TestHttpServer {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if let Some(handle) = self.handle.take() {
|
||||||
|
let _ = handle.join();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
62
crates-cli/yaak-cli/tests/common/mod.rs
Normal file
62
crates-cli/yaak-cli/tests/common/mod.rs
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
pub mod http_server;
|
||||||
|
|
||||||
|
use assert_cmd::Command;
|
||||||
|
use assert_cmd::cargo::cargo_bin_cmd;
|
||||||
|
use std::path::Path;
|
||||||
|
use yaak_models::models::{HttpRequest, Workspace};
|
||||||
|
use yaak_models::query_manager::QueryManager;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
|
||||||
|
pub fn cli_cmd(data_dir: &Path) -> Command {
|
||||||
|
let mut cmd = cargo_bin_cmd!("yaakcli");
|
||||||
|
cmd.arg("--data-dir").arg(data_dir);
|
||||||
|
cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_created_id(stdout: &[u8], label: &str) -> String {
|
||||||
|
String::from_utf8_lossy(stdout)
|
||||||
|
.trim()
|
||||||
|
.split_once(": ")
|
||||||
|
.map(|(_, id)| id.to_string())
|
||||||
|
.unwrap_or_else(|| panic!("Expected id in '{label}' output"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn query_manager(data_dir: &Path) -> QueryManager {
|
||||||
|
let db_path = data_dir.join("db.sqlite");
|
||||||
|
let blob_path = data_dir.join("blobs.sqlite");
|
||||||
|
let (query_manager, _blob_manager, _rx) =
|
||||||
|
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize DB");
|
||||||
|
query_manager
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn seed_workspace(data_dir: &Path, workspace_id: &str) {
|
||||||
|
let workspace = Workspace {
|
||||||
|
id: workspace_id.to_string(),
|
||||||
|
name: "Seed Workspace".to_string(),
|
||||||
|
description: "Seeded for integration tests".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
query_manager(data_dir)
|
||||||
|
.connect()
|
||||||
|
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||||
|
.expect("Failed to seed workspace");
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn seed_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
|
||||||
|
let request = HttpRequest {
|
||||||
|
id: request_id.to_string(),
|
||||||
|
workspace_id: workspace_id.to_string(),
|
||||||
|
name: "Seeded Request".to_string(),
|
||||||
|
method: "GET".to_string(),
|
||||||
|
url: "https://example.com".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
query_manager(data_dir)
|
||||||
|
.connect()
|
||||||
|
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||||
|
.expect("Failed to seed request");
|
||||||
|
}
|
||||||
80
crates-cli/yaak-cli/tests/environment_commands.rs
Normal file
80
crates-cli/yaak-cli/tests/environment_commands.rs
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
mod common;
|
||||||
|
|
||||||
|
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
|
||||||
|
use predicates::str::contains;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_list_show_delete_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["environment", "list", "wk_test"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("Global Variables"));
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args(["environment", "create", "wk_test", "--name", "Production"])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["environment", "list", "wk_test"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(&environment_id))
|
||||||
|
.stdout(contains("Production"));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["environment", "show", &environment_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("\"id\": \"{environment_id}\"")))
|
||||||
|
.stdout(contains("\"parentModel\": \"environment\""));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["environment", "delete", &environment_id, "--yes"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Deleted environment: {environment_id}")));
|
||||||
|
|
||||||
|
assert!(query_manager(data_dir).connect().get_environment(&environment_id).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_create_and_update_merge_patch_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"environment",
|
||||||
|
"create",
|
||||||
|
r#"{"workspaceId":"wk_test","name":"Json Environment"}"#,
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"environment",
|
||||||
|
"update",
|
||||||
|
&format!(r##"{{"id":"{}","color":"#00ff00"}}"##, environment_id),
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Updated environment: {environment_id}")));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["environment", "show", &environment_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("\"name\": \"Json Environment\""))
|
||||||
|
.stdout(contains("\"color\": \"#00ff00\""));
|
||||||
|
}
|
||||||
74
crates-cli/yaak-cli/tests/folder_commands.rs
Normal file
74
crates-cli/yaak-cli/tests/folder_commands.rs
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
mod common;
|
||||||
|
|
||||||
|
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
|
||||||
|
use predicates::str::contains;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_list_show_delete_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args(["folder", "create", "wk_test", "--name", "Auth"])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["folder", "list", "wk_test"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(&folder_id))
|
||||||
|
.stdout(contains("Auth"));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["folder", "show", &folder_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("\"id\": \"{folder_id}\"")))
|
||||||
|
.stdout(contains("\"workspaceId\": \"wk_test\""));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["folder", "delete", &folder_id, "--yes"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Deleted folder: {folder_id}")));
|
||||||
|
|
||||||
|
assert!(query_manager(data_dir).connect().get_folder(&folder_id).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_create_and_update_merge_patch_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"folder",
|
||||||
|
"create",
|
||||||
|
r#"{"workspaceId":"wk_test","name":"Json Folder"}"#,
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"folder",
|
||||||
|
"update",
|
||||||
|
&format!(r#"{{"id":"{}","description":"Folder Description"}}"#, folder_id),
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Updated folder: {folder_id}")));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["folder", "show", &folder_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("\"name\": \"Json Folder\""))
|
||||||
|
.stdout(contains("\"description\": \"Folder Description\""));
|
||||||
|
}
|
||||||
179
crates-cli/yaak-cli/tests/request_commands.rs
Normal file
179
crates-cli/yaak-cli/tests/request_commands.rs
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
mod common;
|
||||||
|
|
||||||
|
use common::http_server::TestHttpServer;
|
||||||
|
use common::{cli_cmd, parse_created_id, query_manager, seed_request, seed_workspace};
|
||||||
|
use predicates::str::contains;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
use yaak_models::models::HttpResponseState;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn show_and_delete_yes_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"request",
|
||||||
|
"create",
|
||||||
|
"wk_test",
|
||||||
|
"--name",
|
||||||
|
"Smoke Test",
|
||||||
|
"--url",
|
||||||
|
"https://example.com",
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
|
||||||
|
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "show", &request_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("\"id\": \"{request_id}\"")))
|
||||||
|
.stdout(contains("\"workspaceId\": \"wk_test\""));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "delete", &request_id, "--yes"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Deleted request: {request_id}")));
|
||||||
|
|
||||||
|
assert!(query_manager(data_dir).connect().get_http_request(&request_id).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn delete_without_yes_fails_in_non_interactive_mode() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
seed_request(data_dir, "wk_test", "rq_seed_delete_noninteractive");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "delete", "rq_seed_delete_noninteractive"])
|
||||||
|
.assert()
|
||||||
|
.failure()
|
||||||
|
.code(1)
|
||||||
|
.stderr(contains("Refusing to delete in non-interactive mode without --yes"));
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
query_manager(data_dir).connect().get_http_request("rq_seed_delete_noninteractive").is_ok()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_create_and_update_merge_patch_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"request",
|
||||||
|
"create",
|
||||||
|
r#"{"workspaceId":"wk_test","name":"Json Request","url":"https://example.com"}"#,
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"request",
|
||||||
|
"update",
|
||||||
|
&format!(r#"{{"id":"{}","name":"Renamed Request"}}"#, request_id),
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Updated request: {request_id}")));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "show", &request_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("\"name\": \"Renamed Request\""))
|
||||||
|
.stdout(contains("\"url\": \"https://example.com\""));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn update_requires_id_in_json_payload() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "update", r#"{"name":"No ID"}"#])
|
||||||
|
.assert()
|
||||||
|
.failure()
|
||||||
|
.stderr(contains("request update requires a non-empty \"id\" field"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_allows_workspace_only_with_empty_defaults() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert =
|
||||||
|
cli_cmd(data_dir).args(["request", "create", "wk_test"]).assert().success();
|
||||||
|
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||||
|
|
||||||
|
let request = query_manager(data_dir)
|
||||||
|
.connect()
|
||||||
|
.get_http_request(&request_id)
|
||||||
|
.expect("Failed to load created request");
|
||||||
|
assert_eq!(request.workspace_id, "wk_test");
|
||||||
|
assert_eq!(request.method, "GET");
|
||||||
|
assert_eq!(request.name, "");
|
||||||
|
assert_eq!(request.url, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn request_send_persists_response_body_and_events() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let server = TestHttpServer::spawn_ok("hello from integration test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"request",
|
||||||
|
"create",
|
||||||
|
"wk_test",
|
||||||
|
"--name",
|
||||||
|
"Send Test",
|
||||||
|
"--url",
|
||||||
|
&server.url,
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "send", &request_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("HTTP 200 OK"))
|
||||||
|
.stdout(contains("hello from integration test"));
|
||||||
|
|
||||||
|
let qm = query_manager(data_dir);
|
||||||
|
let db = qm.connect();
|
||||||
|
let responses =
|
||||||
|
db.list_http_responses_for_request(&request_id, None).expect("Failed to load responses");
|
||||||
|
assert_eq!(responses.len(), 1, "expected exactly one persisted response");
|
||||||
|
|
||||||
|
let response = &responses[0];
|
||||||
|
assert_eq!(response.status, 200);
|
||||||
|
assert!(matches!(response.state, HttpResponseState::Closed));
|
||||||
|
assert!(response.error.is_none());
|
||||||
|
|
||||||
|
let body_path =
|
||||||
|
response.body_path.as_ref().expect("expected persisted response body path").to_string();
|
||||||
|
let body = std::fs::read_to_string(&body_path).expect("Failed to read response body file");
|
||||||
|
assert_eq!(body, "hello from integration test");
|
||||||
|
|
||||||
|
let events =
|
||||||
|
db.list_http_response_events(&response.id).expect("Failed to load response events");
|
||||||
|
assert!(!events.is_empty(), "expected at least one persisted response event");
|
||||||
|
}
|
||||||
59
crates-cli/yaak-cli/tests/workspace_commands.rs
Normal file
59
crates-cli/yaak-cli/tests/workspace_commands.rs
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
mod common;
|
||||||
|
|
||||||
|
use common::{cli_cmd, parse_created_id, query_manager};
|
||||||
|
use predicates::str::contains;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_show_delete_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
|
||||||
|
let create_assert =
|
||||||
|
cli_cmd(data_dir).args(["workspace", "create", "--name", "WS One"]).assert().success();
|
||||||
|
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["workspace", "show", &workspace_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("\"id\": \"{workspace_id}\"")))
|
||||||
|
.stdout(contains("\"name\": \"WS One\""));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["workspace", "delete", &workspace_id, "--yes"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Deleted workspace: {workspace_id}")));
|
||||||
|
|
||||||
|
assert!(query_manager(data_dir).connect().get_workspace(&workspace_id).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_create_and_update_merge_patch_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args(["workspace", "create", r#"{"name":"Json Workspace"}"#])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"workspace",
|
||||||
|
"update",
|
||||||
|
&format!(r#"{{"id":"{}","description":"Updated via JSON"}}"#, workspace_id),
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Updated workspace: {workspace_id}")));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["workspace", "show", &workspace_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("\"name\": \"Json Workspace\""))
|
||||||
|
.stdout(contains("\"description\": \"Updated via JSON\""));
|
||||||
|
}
|
||||||
@@ -61,6 +61,7 @@ yaak-api = { workspace = true }
|
|||||||
yaak-common = { workspace = true }
|
yaak-common = { workspace = true }
|
||||||
yaak-tauri-utils = { workspace = true }
|
yaak-tauri-utils = { workspace = true }
|
||||||
yaak-core = { workspace = true }
|
yaak-core = { workspace = true }
|
||||||
|
yaak = { workspace = true }
|
||||||
yaak-crypto = { workspace = true }
|
yaak-crypto = { workspace = true }
|
||||||
yaak-fonts = { workspace = true }
|
yaak-fonts = { workspace = true }
|
||||||
yaak-git = { workspace = true }
|
yaak-git = { workspace = true }
|
||||||
|
|||||||
@@ -3,45 +3,18 @@ use crate::error::Error::GenericError;
|
|||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::models_ext::BlobManagerExt;
|
use crate::models_ext::BlobManagerExt;
|
||||||
use crate::models_ext::QueryManagerExt;
|
use crate::models_ext::QueryManagerExt;
|
||||||
use crate::render::render_http_request;
|
use log::warn;
|
||||||
use log::{debug, warn};
|
|
||||||
use std::pin::Pin;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::atomic::{AtomicI32, Ordering};
|
use std::time::Instant;
|
||||||
use std::time::{Duration, Instant};
|
|
||||||
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
|
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
|
||||||
use tokio::fs::{File, create_dir_all};
|
|
||||||
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWriteExt};
|
|
||||||
use tokio::sync::watch::Receiver;
|
use tokio::sync::watch::Receiver;
|
||||||
use tokio_util::bytes::Bytes;
|
use yaak::send::{SendHttpRequestWithPluginsParams, send_http_request_with_plugins};
|
||||||
use yaak_crypto::manager::EncryptionManager;
|
use yaak_crypto::manager::EncryptionManager;
|
||||||
use yaak_http::client::{
|
use yaak_http::manager::HttpConnectionManager;
|
||||||
HttpConnectionOptions, HttpConnectionProxySetting, HttpConnectionProxySettingAuth,
|
use yaak_models::models::{CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseState};
|
||||||
};
|
|
||||||
use yaak_http::cookies::CookieStore;
|
|
||||||
use yaak_http::manager::{CachedClient, HttpConnectionManager};
|
|
||||||
use yaak_http::sender::ReqwestSender;
|
|
||||||
use yaak_http::tee_reader::TeeReader;
|
|
||||||
use yaak_http::transaction::HttpTransaction;
|
|
||||||
use yaak_http::types::{
|
|
||||||
SendableBody, SendableHttpRequest, SendableHttpRequestOptions, append_query_params,
|
|
||||||
};
|
|
||||||
use yaak_models::blob_manager::BodyChunk;
|
|
||||||
use yaak_models::models::{
|
|
||||||
CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseHeader,
|
|
||||||
HttpResponseState, ProxySetting, ProxySettingAuth,
|
|
||||||
};
|
|
||||||
use yaak_models::util::UpdateSource;
|
use yaak_models::util::UpdateSource;
|
||||||
use yaak_plugins::events::{
|
use yaak_plugins::events::PluginContext;
|
||||||
CallHttpAuthenticationRequest, HttpHeader, PluginContext, RenderPurpose,
|
|
||||||
};
|
|
||||||
use yaak_plugins::manager::PluginManager;
|
use yaak_plugins::manager::PluginManager;
|
||||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
|
||||||
use yaak_templates::RenderOptions;
|
|
||||||
use yaak_tls::find_client_certificate;
|
|
||||||
|
|
||||||
/// Chunk size for storing request bodies (1MB)
|
|
||||||
const REQUEST_BODY_CHUNK_SIZE: usize = 1024 * 1024;
|
|
||||||
|
|
||||||
/// Context for managing response state during HTTP transactions.
|
/// Context for managing response state during HTTP transactions.
|
||||||
/// Handles both persisted responses (stored in DB) and ephemeral responses (in-memory only).
|
/// Handles both persisted responses (stored in DB) and ephemeral responses (in-memory only).
|
||||||
@@ -168,148 +141,30 @@ async fn send_http_request_inner<R: Runtime>(
|
|||||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||||
let connection_manager = app_handle.state::<HttpConnectionManager>();
|
let connection_manager = app_handle.state::<HttpConnectionManager>();
|
||||||
let settings = window.db().get_settings();
|
|
||||||
let workspace_id = &unrendered_request.workspace_id;
|
|
||||||
let folder_id = unrendered_request.folder_id.as_deref();
|
|
||||||
let environment_id = environment.map(|e| e.id);
|
let environment_id = environment.map(|e| e.id);
|
||||||
let workspace = window.db().get_workspace(workspace_id)?;
|
let cookie_jar_id = cookie_jar.as_ref().map(|jar| jar.id.clone());
|
||||||
let (resolved, auth_context_id) = resolve_http_request(window, unrendered_request)?;
|
|
||||||
let cb = PluginTemplateCallback::new(
|
|
||||||
plugin_manager.clone(),
|
|
||||||
encryption_manager.clone(),
|
|
||||||
&plugin_context,
|
|
||||||
RenderPurpose::Send,
|
|
||||||
);
|
|
||||||
let env_chain =
|
|
||||||
window.db().resolve_environments(&workspace.id, folder_id, environment_id.as_deref())?;
|
|
||||||
let mut cancel_rx = cancelled_rx.clone();
|
|
||||||
let render_options = RenderOptions::throw();
|
|
||||||
let request = tokio::select! {
|
|
||||||
result = render_http_request(&resolved, env_chain, &cb, &render_options) => result?,
|
|
||||||
_ = cancel_rx.changed() => {
|
|
||||||
return Err(GenericError("Request canceled".to_string()));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Build the sendable request using the new SendableHttpRequest type
|
let response_dir = app_handle.path().app_data_dir()?.join("responses");
|
||||||
let options = SendableHttpRequestOptions {
|
let result = send_http_request_with_plugins(SendHttpRequestWithPluginsParams {
|
||||||
follow_redirects: workspace.setting_follow_redirects,
|
query_manager: app_handle.db_manager().inner(),
|
||||||
timeout: if workspace.setting_request_timeout > 0 {
|
blob_manager: app_handle.blob_manager().inner(),
|
||||||
Some(Duration::from_millis(workspace.setting_request_timeout.unsigned_abs() as u64))
|
request: unrendered_request.clone(),
|
||||||
} else {
|
environment_id: environment_id.as_deref(),
|
||||||
None
|
update_source: response_ctx.update_source.clone(),
|
||||||
},
|
cookie_jar_id,
|
||||||
};
|
response_dir: &response_dir,
|
||||||
let mut sendable_request = SendableHttpRequest::from_http_request(&request, options).await?;
|
emit_events_to: None,
|
||||||
|
existing_response: Some(response_ctx.response().clone()),
|
||||||
|
plugin_manager,
|
||||||
|
encryption_manager,
|
||||||
|
plugin_context,
|
||||||
|
cancelled_rx: Some(cancelled_rx.clone()),
|
||||||
|
connection_manager: Some(connection_manager.inner()),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| GenericError(e.to_string()))?;
|
||||||
|
|
||||||
debug!("Sending request to {} {}", sendable_request.method, sendable_request.url);
|
Ok(result.response)
|
||||||
|
|
||||||
let proxy_setting = match settings.proxy {
|
|
||||||
None => HttpConnectionProxySetting::System,
|
|
||||||
Some(ProxySetting::Disabled) => HttpConnectionProxySetting::Disabled,
|
|
||||||
Some(ProxySetting::Enabled { http, https, auth, bypass, disabled }) => {
|
|
||||||
if disabled {
|
|
||||||
HttpConnectionProxySetting::System
|
|
||||||
} else {
|
|
||||||
HttpConnectionProxySetting::Enabled {
|
|
||||||
http,
|
|
||||||
https,
|
|
||||||
bypass,
|
|
||||||
auth: match auth {
|
|
||||||
None => None,
|
|
||||||
Some(ProxySettingAuth { user, password }) => {
|
|
||||||
Some(HttpConnectionProxySettingAuth { user, password })
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let client_certificate =
|
|
||||||
find_client_certificate(&sendable_request.url, &settings.client_certificates);
|
|
||||||
|
|
||||||
// Create cookie store if a cookie jar is specified
|
|
||||||
let maybe_cookie_store = match cookie_jar.clone() {
|
|
||||||
Some(CookieJar { id, .. }) => {
|
|
||||||
// NOTE: We need to refetch the cookie jar because a chained request might have
|
|
||||||
// updated cookies when we rendered the request.
|
|
||||||
let cj = window.db().get_cookie_jar(&id)?;
|
|
||||||
let cookie_store = CookieStore::from_cookies(cj.cookies.clone());
|
|
||||||
Some((cookie_store, cj))
|
|
||||||
}
|
|
||||||
None => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let cached_client = connection_manager
|
|
||||||
.get_client(&HttpConnectionOptions {
|
|
||||||
id: plugin_context.id.clone(),
|
|
||||||
validate_certificates: workspace.setting_validate_certificates,
|
|
||||||
proxy: proxy_setting,
|
|
||||||
client_certificate,
|
|
||||||
dns_overrides: workspace.setting_dns_overrides.clone(),
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Apply authentication to the request, racing against cancellation since
|
|
||||||
// auth plugins (e.g. OAuth2) can block indefinitely waiting for user action.
|
|
||||||
let mut cancel_rx = cancelled_rx.clone();
|
|
||||||
tokio::select! {
|
|
||||||
result = apply_authentication(
|
|
||||||
&window,
|
|
||||||
&mut sendable_request,
|
|
||||||
&request,
|
|
||||||
auth_context_id,
|
|
||||||
&plugin_manager,
|
|
||||||
plugin_context,
|
|
||||||
) => result?,
|
|
||||||
_ = cancel_rx.changed() => {
|
|
||||||
return Err(GenericError("Request canceled".to_string()));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let cookie_store = maybe_cookie_store.as_ref().map(|(cs, _)| cs.clone());
|
|
||||||
let result = execute_transaction(
|
|
||||||
cached_client,
|
|
||||||
sendable_request,
|
|
||||||
response_ctx,
|
|
||||||
cancelled_rx.clone(),
|
|
||||||
cookie_store,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Wait for blob writing to complete and check for errors
|
|
||||||
let final_result = match result {
|
|
||||||
Ok((response, maybe_blob_write_handle)) => {
|
|
||||||
// Check if blob writing failed
|
|
||||||
if let Some(handle) = maybe_blob_write_handle {
|
|
||||||
if let Ok(Err(e)) = handle.await {
|
|
||||||
// Update response with the storage error
|
|
||||||
let _ = response_ctx.update(|r| {
|
|
||||||
let error_msg =
|
|
||||||
format!("Request succeeded but failed to store request body: {}", e);
|
|
||||||
r.error = Some(match &r.error {
|
|
||||||
Some(existing) => format!("{}; {}", existing, error_msg),
|
|
||||||
None => error_msg,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(response)
|
|
||||||
}
|
|
||||||
Err(e) => Err(e),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Persist cookies back to the database after the request completes
|
|
||||||
if let Some((cookie_store, mut cj)) = maybe_cookie_store {
|
|
||||||
let cookies = cookie_store.get_all_cookies();
|
|
||||||
cj.cookies = cookies;
|
|
||||||
if let Err(e) = window.db().upsert_cookie_jar(&cj, &UpdateSource::Background) {
|
|
||||||
warn!("Failed to persist cookies to database: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
final_result
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_http_request<R: Runtime>(
|
pub fn resolve_http_request<R: Runtime>(
|
||||||
@@ -328,395 +183,3 @@ pub fn resolve_http_request<R: Runtime>(
|
|||||||
|
|
||||||
Ok((new_request, authentication_context_id))
|
Ok((new_request, authentication_context_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn execute_transaction<R: Runtime>(
|
|
||||||
cached_client: CachedClient,
|
|
||||||
mut sendable_request: SendableHttpRequest,
|
|
||||||
response_ctx: &mut ResponseContext<R>,
|
|
||||||
mut cancelled_rx: Receiver<bool>,
|
|
||||||
cookie_store: Option<CookieStore>,
|
|
||||||
) -> Result<(HttpResponse, Option<tauri::async_runtime::JoinHandle<Result<()>>>)> {
|
|
||||||
let app_handle = &response_ctx.app_handle.clone();
|
|
||||||
let response_id = response_ctx.response().id.clone();
|
|
||||||
let workspace_id = response_ctx.response().workspace_id.clone();
|
|
||||||
let is_persisted = response_ctx.is_persisted();
|
|
||||||
|
|
||||||
// Keep a reference to the resolver for DNS timing events
|
|
||||||
let resolver = cached_client.resolver.clone();
|
|
||||||
|
|
||||||
let sender = ReqwestSender::with_client(cached_client.client);
|
|
||||||
let transaction = match cookie_store {
|
|
||||||
Some(cs) => HttpTransaction::with_cookie_store(sender, cs),
|
|
||||||
None => HttpTransaction::new(sender),
|
|
||||||
};
|
|
||||||
let start = Instant::now();
|
|
||||||
|
|
||||||
// Capture request headers before sending
|
|
||||||
let request_headers: Vec<HttpResponseHeader> = sendable_request
|
|
||||||
.headers
|
|
||||||
.iter()
|
|
||||||
.map(|(name, value)| HttpResponseHeader { name: name.clone(), value: value.clone() })
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// Update response with headers info
|
|
||||||
response_ctx.update(|r| {
|
|
||||||
r.url = sendable_request.url.clone();
|
|
||||||
r.request_headers = request_headers;
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Create bounded channel for receiving events and spawn a task to store them in DB
|
|
||||||
// Buffer size of 100 events provides back pressure if DB writes are slow
|
|
||||||
let (event_tx, mut event_rx) =
|
|
||||||
tokio::sync::mpsc::channel::<yaak_http::sender::HttpResponseEvent>(100);
|
|
||||||
|
|
||||||
// Set the event sender on the DNS resolver so it can emit DNS timing events
|
|
||||||
resolver.set_event_sender(Some(event_tx.clone())).await;
|
|
||||||
|
|
||||||
// Shared state to capture DNS timing from the event processing task
|
|
||||||
let dns_elapsed = Arc::new(AtomicI32::new(0));
|
|
||||||
|
|
||||||
// Write events to DB in a task (only for persisted responses)
|
|
||||||
if is_persisted {
|
|
||||||
let response_id = response_id.clone();
|
|
||||||
let app_handle = app_handle.clone();
|
|
||||||
let update_source = response_ctx.update_source.clone();
|
|
||||||
let workspace_id = workspace_id.clone();
|
|
||||||
let dns_elapsed = dns_elapsed.clone();
|
|
||||||
tokio::spawn(async move {
|
|
||||||
while let Some(event) = event_rx.recv().await {
|
|
||||||
// Capture DNS timing when we see a DNS event
|
|
||||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
|
||||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
|
||||||
}
|
|
||||||
let db_event = HttpResponseEvent::new(&response_id, &workspace_id, event.into());
|
|
||||||
let _ = app_handle.db().upsert_http_response_event(&db_event, &update_source);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
// For ephemeral responses, just drain the events but still capture DNS timing
|
|
||||||
let dns_elapsed = dns_elapsed.clone();
|
|
||||||
tokio::spawn(async move {
|
|
||||||
while let Some(event) = event_rx.recv().await {
|
|
||||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
|
||||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
// Capture request body as it's sent (only for persisted responses)
|
|
||||||
let body_id = format!("{}.request", response_id);
|
|
||||||
let maybe_blob_write_handle = match sendable_request.body {
|
|
||||||
Some(SendableBody::Bytes(bytes)) => {
|
|
||||||
if is_persisted {
|
|
||||||
write_bytes_to_db_sync(response_ctx, &body_id, bytes.clone())?;
|
|
||||||
}
|
|
||||||
sendable_request.body = Some(SendableBody::Bytes(bytes));
|
|
||||||
None
|
|
||||||
}
|
|
||||||
Some(SendableBody::Stream { data: stream, content_length }) => {
|
|
||||||
// Wrap stream with TeeReader to capture data as it's read
|
|
||||||
// Use unbounded channel to ensure all data is captured without blocking the HTTP request
|
|
||||||
let (body_chunk_tx, body_chunk_rx) = tokio::sync::mpsc::unbounded_channel::<Vec<u8>>();
|
|
||||||
let tee_reader = TeeReader::new(stream, body_chunk_tx);
|
|
||||||
let pinned: Pin<Box<dyn AsyncRead + Send + 'static>> = Box::pin(tee_reader);
|
|
||||||
|
|
||||||
let handle = if is_persisted {
|
|
||||||
// Spawn task to write request body chunks to blob DB
|
|
||||||
let app_handle = app_handle.clone();
|
|
||||||
let response_id = response_id.clone();
|
|
||||||
let workspace_id = workspace_id.clone();
|
|
||||||
let body_id = body_id.clone();
|
|
||||||
let update_source = response_ctx.update_source.clone();
|
|
||||||
Some(tauri::async_runtime::spawn(async move {
|
|
||||||
write_stream_chunks_to_db(
|
|
||||||
app_handle,
|
|
||||||
&body_id,
|
|
||||||
&workspace_id,
|
|
||||||
&response_id,
|
|
||||||
&update_source,
|
|
||||||
body_chunk_rx,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
// For ephemeral responses, just drain the body chunks
|
|
||||||
tauri::async_runtime::spawn(async move {
|
|
||||||
let mut rx = body_chunk_rx;
|
|
||||||
while rx.recv().await.is_some() {}
|
|
||||||
});
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
sendable_request.body = Some(SendableBody::Stream { data: pinned, content_length });
|
|
||||||
handle
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
sendable_request.body = None;
|
|
||||||
None
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Execute the transaction with cancellation support
|
|
||||||
// This returns the response with headers, but body is not yet consumed
|
|
||||||
// Events (headers, settings, chunks) are sent through the channel
|
|
||||||
let mut http_response = transaction
|
|
||||||
.execute_with_cancellation(sendable_request, cancelled_rx.clone(), event_tx)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Prepare the response path before consuming the body
|
|
||||||
let body_path = if response_id.is_empty() {
|
|
||||||
// Ephemeral responses: use OS temp directory for automatic cleanup
|
|
||||||
let temp_dir = std::env::temp_dir().join("yaak-ephemeral-responses");
|
|
||||||
create_dir_all(&temp_dir).await?;
|
|
||||||
temp_dir.join(uuid::Uuid::new_v4().to_string())
|
|
||||||
} else {
|
|
||||||
// Persisted responses: use app data directory
|
|
||||||
let dir = app_handle.path().app_data_dir()?;
|
|
||||||
let base_dir = dir.join("responses");
|
|
||||||
create_dir_all(&base_dir).await?;
|
|
||||||
base_dir.join(&response_id)
|
|
||||||
};
|
|
||||||
|
|
||||||
// Extract metadata before consuming the body (headers are available immediately)
|
|
||||||
// Url might change, so update again
|
|
||||||
response_ctx.update(|r| {
|
|
||||||
r.body_path = Some(body_path.to_string_lossy().to_string());
|
|
||||||
r.elapsed_headers = start.elapsed().as_millis() as i32;
|
|
||||||
r.status = http_response.status as i32;
|
|
||||||
r.status_reason = http_response.status_reason.clone();
|
|
||||||
r.url = http_response.url.clone();
|
|
||||||
r.remote_addr = http_response.remote_addr.clone();
|
|
||||||
r.version = http_response.version.clone();
|
|
||||||
r.headers = http_response
|
|
||||||
.headers
|
|
||||||
.iter()
|
|
||||||
.map(|(name, value)| HttpResponseHeader { name: name.clone(), value: value.clone() })
|
|
||||||
.collect();
|
|
||||||
r.content_length = http_response.content_length.map(|l| l as i32);
|
|
||||||
r.state = HttpResponseState::Connected;
|
|
||||||
r.request_headers = http_response
|
|
||||||
.request_headers
|
|
||||||
.iter()
|
|
||||||
.map(|(n, v)| HttpResponseHeader { name: n.clone(), value: v.clone() })
|
|
||||||
.collect();
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Get the body stream for manual consumption
|
|
||||||
let mut body_stream = http_response.into_body_stream()?;
|
|
||||||
|
|
||||||
// Open file for writing
|
|
||||||
let mut file = File::options()
|
|
||||||
.create(true)
|
|
||||||
.truncate(true)
|
|
||||||
.write(true)
|
|
||||||
.open(&body_path)
|
|
||||||
.await
|
|
||||||
.map_err(|e| GenericError(format!("Failed to open file: {}", e)))?;
|
|
||||||
|
|
||||||
// Stream body to file, with throttled DB updates to avoid excessive writes
|
|
||||||
let mut written_bytes: usize = 0;
|
|
||||||
let mut last_update_time = start;
|
|
||||||
let mut buf = [0u8; 8192];
|
|
||||||
|
|
||||||
// Throttle settings: update DB at most every 100ms
|
|
||||||
const UPDATE_INTERVAL_MS: u128 = 100;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
// Check for cancellation. If we already have headers/body, just close cleanly without error
|
|
||||||
if *cancelled_rx.borrow() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use select! to race between reading and cancellation, so cancellation is immediate
|
|
||||||
let read_result = tokio::select! {
|
|
||||||
biased;
|
|
||||||
_ = cancelled_rx.changed() => {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
result = body_stream.read(&mut buf) => result,
|
|
||||||
};
|
|
||||||
|
|
||||||
match read_result {
|
|
||||||
Ok(0) => break, // EOF
|
|
||||||
Ok(n) => {
|
|
||||||
file.write_all(&buf[..n])
|
|
||||||
.await
|
|
||||||
.map_err(|e| GenericError(format!("Failed to write to file: {}", e)))?;
|
|
||||||
file.flush()
|
|
||||||
.await
|
|
||||||
.map_err(|e| GenericError(format!("Failed to flush file: {}", e)))?;
|
|
||||||
written_bytes += n;
|
|
||||||
|
|
||||||
// Throttle DB updates: only update if enough time has passed
|
|
||||||
let now = Instant::now();
|
|
||||||
let elapsed_since_update = now.duration_since(last_update_time).as_millis();
|
|
||||||
|
|
||||||
if elapsed_since_update >= UPDATE_INTERVAL_MS {
|
|
||||||
response_ctx.update(|r| {
|
|
||||||
r.elapsed = start.elapsed().as_millis() as i32;
|
|
||||||
r.content_length = Some(written_bytes as i32);
|
|
||||||
})?;
|
|
||||||
last_update_time = now;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
return Err(GenericError(format!("Failed to read response body: {}", e)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Final update with closed state and accurate byte count
|
|
||||||
response_ctx.update(|r| {
|
|
||||||
r.elapsed = start.elapsed().as_millis() as i32;
|
|
||||||
r.elapsed_dns = dns_elapsed.load(Ordering::SeqCst);
|
|
||||||
r.content_length = Some(written_bytes as i32);
|
|
||||||
r.state = HttpResponseState::Closed;
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Clear the event sender from the resolver since this request is done
|
|
||||||
resolver.set_event_sender(None).await;
|
|
||||||
|
|
||||||
Ok((response_ctx.response().clone(), maybe_blob_write_handle))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_bytes_to_db_sync<R: Runtime>(
|
|
||||||
response_ctx: &mut ResponseContext<R>,
|
|
||||||
body_id: &str,
|
|
||||||
data: Bytes,
|
|
||||||
) -> Result<()> {
|
|
||||||
if data.is_empty() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write in chunks if data is large
|
|
||||||
let mut offset = 0;
|
|
||||||
let mut chunk_index = 0;
|
|
||||||
while offset < data.len() {
|
|
||||||
let end = std::cmp::min(offset + REQUEST_BODY_CHUNK_SIZE, data.len());
|
|
||||||
let chunk_data = data.slice(offset..end).to_vec();
|
|
||||||
let chunk = BodyChunk::new(body_id, chunk_index, chunk_data);
|
|
||||||
response_ctx.app_handle.blobs().insert_chunk(&chunk)?;
|
|
||||||
offset = end;
|
|
||||||
chunk_index += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the response with the total request body size
|
|
||||||
response_ctx.update(|r| {
|
|
||||||
r.request_content_length = Some(data.len() as i32);
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn write_stream_chunks_to_db<R: Runtime>(
|
|
||||||
app_handle: AppHandle<R>,
|
|
||||||
body_id: &str,
|
|
||||||
workspace_id: &str,
|
|
||||||
response_id: &str,
|
|
||||||
update_source: &UpdateSource,
|
|
||||||
mut rx: tokio::sync::mpsc::UnboundedReceiver<Vec<u8>>,
|
|
||||||
) -> Result<()> {
|
|
||||||
let mut buffer = Vec::with_capacity(REQUEST_BODY_CHUNK_SIZE);
|
|
||||||
let mut chunk_index = 0;
|
|
||||||
let mut total_bytes: usize = 0;
|
|
||||||
|
|
||||||
while let Some(data) = rx.recv().await {
|
|
||||||
total_bytes += data.len();
|
|
||||||
buffer.extend_from_slice(&data);
|
|
||||||
|
|
||||||
// Flush when buffer reaches chunk size
|
|
||||||
while buffer.len() >= REQUEST_BODY_CHUNK_SIZE {
|
|
||||||
debug!("Writing chunk {chunk_index} to DB");
|
|
||||||
let chunk_data: Vec<u8> = buffer.drain(..REQUEST_BODY_CHUNK_SIZE).collect();
|
|
||||||
let chunk = BodyChunk::new(body_id, chunk_index, chunk_data);
|
|
||||||
app_handle.blobs().insert_chunk(&chunk)?;
|
|
||||||
app_handle.db().upsert_http_response_event(
|
|
||||||
&HttpResponseEvent::new(
|
|
||||||
response_id,
|
|
||||||
workspace_id,
|
|
||||||
yaak_http::sender::HttpResponseEvent::ChunkSent {
|
|
||||||
bytes: REQUEST_BODY_CHUNK_SIZE,
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
update_source,
|
|
||||||
)?;
|
|
||||||
chunk_index += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Flush remaining data
|
|
||||||
if !buffer.is_empty() {
|
|
||||||
let chunk = BodyChunk::new(body_id, chunk_index, buffer);
|
|
||||||
debug!("Flushing remaining data {chunk_index} {}", chunk.data.len());
|
|
||||||
app_handle.blobs().insert_chunk(&chunk)?;
|
|
||||||
app_handle.db().upsert_http_response_event(
|
|
||||||
&HttpResponseEvent::new(
|
|
||||||
response_id,
|
|
||||||
workspace_id,
|
|
||||||
yaak_http::sender::HttpResponseEvent::ChunkSent { bytes: chunk.data.len() }.into(),
|
|
||||||
),
|
|
||||||
update_source,
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the response with the total request body size
|
|
||||||
app_handle.with_tx(|tx| {
|
|
||||||
debug!("Updating final body length {total_bytes}");
|
|
||||||
if let Ok(mut response) = tx.get_http_response(&response_id) {
|
|
||||||
response.request_content_length = Some(total_bytes as i32);
|
|
||||||
tx.update_http_response_if_id(&response, update_source)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn apply_authentication<R: Runtime>(
|
|
||||||
_window: &WebviewWindow<R>,
|
|
||||||
sendable_request: &mut SendableHttpRequest,
|
|
||||||
request: &HttpRequest,
|
|
||||||
auth_context_id: String,
|
|
||||||
plugin_manager: &PluginManager,
|
|
||||||
plugin_context: &PluginContext,
|
|
||||||
) -> Result<()> {
|
|
||||||
match &request.authentication_type {
|
|
||||||
None => {
|
|
||||||
// No authentication found. Not even inherited
|
|
||||||
}
|
|
||||||
Some(authentication_type) if authentication_type == "none" => {
|
|
||||||
// Explicitly no authentication
|
|
||||||
}
|
|
||||||
Some(authentication_type) => {
|
|
||||||
let req = CallHttpAuthenticationRequest {
|
|
||||||
context_id: format!("{:x}", md5::compute(auth_context_id)),
|
|
||||||
values: serde_json::from_value(serde_json::to_value(&request.authentication)?)?,
|
|
||||||
url: sendable_request.url.clone(),
|
|
||||||
method: sendable_request.method.clone(),
|
|
||||||
headers: sendable_request
|
|
||||||
.headers
|
|
||||||
.iter()
|
|
||||||
.map(|(name, value)| HttpHeader {
|
|
||||||
name: name.to_string(),
|
|
||||||
value: value.to_string(),
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
};
|
|
||||||
let plugin_result = plugin_manager
|
|
||||||
.call_http_authentication(plugin_context, &authentication_type, req)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
for header in plugin_result.set_headers.unwrap_or_default() {
|
|
||||||
sendable_request.insert_header((header.name, header.value));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(params) = plugin_result.set_query_parameters {
|
|
||||||
let params = params.into_iter().map(|p| (p.name, p.value)).collect::<Vec<_>>();
|
|
||||||
sendable_request.url = append_query_params(&sendable_request.url, params);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1095,13 +1095,9 @@ async fn cmd_get_http_authentication_config<R: Runtime>(
|
|||||||
|
|
||||||
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
||||||
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
||||||
let rendered_json = render_json_value(
|
let rendered_json =
|
||||||
values_json,
|
render_json_value(values_json, environment_chain, &cb, &RenderOptions::return_empty())
|
||||||
environment_chain,
|
.await?;
|
||||||
&cb,
|
|
||||||
&RenderOptions::return_empty(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Convert back to HashMap<String, JsonPrimitive>
|
// Convert back to HashMap<String, JsonPrimitive>
|
||||||
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
||||||
|
|||||||
@@ -3,6 +3,9 @@
|
|||||||
//! This module provides the Tauri plugin initialization and extension traits
|
//! This module provides the Tauri plugin initialization and extension traits
|
||||||
//! that allow accessing QueryManager and BlobManager from Tauri's Manager types.
|
//! that allow accessing QueryManager and BlobManager from Tauri's Manager types.
|
||||||
|
|
||||||
|
use chrono::Utc;
|
||||||
|
use log::error;
|
||||||
|
use std::time::Duration;
|
||||||
use tauri::plugin::TauriPlugin;
|
use tauri::plugin::TauriPlugin;
|
||||||
use tauri::{Emitter, Manager, Runtime, State};
|
use tauri::{Emitter, Manager, Runtime, State};
|
||||||
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
||||||
@@ -13,6 +16,74 @@ use yaak_models::models::{AnyModel, GraphQlIntrospection, GrpcEvent, Settings, W
|
|||||||
use yaak_models::query_manager::QueryManager;
|
use yaak_models::query_manager::QueryManager;
|
||||||
use yaak_models::util::UpdateSource;
|
use yaak_models::util::UpdateSource;
|
||||||
|
|
||||||
|
const MODEL_CHANGES_RETENTION_HOURS: i64 = 1;
|
||||||
|
const MODEL_CHANGES_POLL_INTERVAL_MS: u64 = 1000;
|
||||||
|
const MODEL_CHANGES_POLL_BATCH_SIZE: usize = 200;
|
||||||
|
|
||||||
|
struct ModelChangeCursor {
|
||||||
|
created_at: String,
|
||||||
|
id: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ModelChangeCursor {
|
||||||
|
fn from_launch_time() -> Self {
|
||||||
|
Self {
|
||||||
|
created_at: Utc::now().naive_utc().format("%Y-%m-%d %H:%M:%S%.3f").to_string(),
|
||||||
|
id: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn drain_model_changes_batch<R: Runtime>(
|
||||||
|
query_manager: &QueryManager,
|
||||||
|
app_handle: &tauri::AppHandle<R>,
|
||||||
|
cursor: &mut ModelChangeCursor,
|
||||||
|
) -> bool {
|
||||||
|
let changes = match query_manager.connect().list_model_changes_since(
|
||||||
|
&cursor.created_at,
|
||||||
|
cursor.id,
|
||||||
|
MODEL_CHANGES_POLL_BATCH_SIZE,
|
||||||
|
) {
|
||||||
|
Ok(changes) => changes,
|
||||||
|
Err(err) => {
|
||||||
|
error!("Failed to poll model_changes rows: {err:?}");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if changes.is_empty() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let fetched_count = changes.len();
|
||||||
|
for change in changes {
|
||||||
|
cursor.created_at = change.created_at;
|
||||||
|
cursor.id = change.id;
|
||||||
|
|
||||||
|
// Local window-originated writes are forwarded immediately from the
|
||||||
|
// in-memory model event channel.
|
||||||
|
if matches!(change.payload.update_source, UpdateSource::Window { .. }) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if let Err(err) = app_handle.emit("model_write", change.payload) {
|
||||||
|
error!("Failed to emit model_write event: {err:?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fetched_count == MODEL_CHANGES_POLL_BATCH_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run_model_change_poller<R: Runtime>(
|
||||||
|
query_manager: QueryManager,
|
||||||
|
app_handle: tauri::AppHandle<R>,
|
||||||
|
mut cursor: ModelChangeCursor,
|
||||||
|
) {
|
||||||
|
loop {
|
||||||
|
while drain_model_changes_batch(&query_manager, &app_handle, &mut cursor) {}
|
||||||
|
tokio::time::sleep(Duration::from_millis(MODEL_CHANGES_POLL_INTERVAL_MS)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
||||||
pub trait QueryManagerExt<'a, R> {
|
pub trait QueryManagerExt<'a, R> {
|
||||||
fn db_manager(&'a self) -> State<'a, QueryManager>;
|
fn db_manager(&'a self) -> State<'a, QueryManager>;
|
||||||
@@ -262,14 +333,37 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let db = query_manager.connect();
|
||||||
|
if let Err(err) = db.prune_model_changes_older_than_hours(MODEL_CHANGES_RETENTION_HOURS)
|
||||||
|
{
|
||||||
|
error!("Failed to prune model_changes rows on startup: {err:?}");
|
||||||
|
}
|
||||||
|
// Only stream writes that happen after this app launch.
|
||||||
|
let cursor = ModelChangeCursor::from_launch_time();
|
||||||
|
|
||||||
|
let poll_query_manager = query_manager.clone();
|
||||||
|
|
||||||
app_handle.manage(query_manager);
|
app_handle.manage(query_manager);
|
||||||
app_handle.manage(blob_manager);
|
app_handle.manage(blob_manager);
|
||||||
|
|
||||||
// Forward model change events to the frontend
|
// Poll model_changes so all writers (including external CLI processes) update the UI.
|
||||||
let app_handle = app_handle.clone();
|
let app_handle_poll = app_handle.clone();
|
||||||
|
let query_manager = poll_query_manager;
|
||||||
|
tauri::async_runtime::spawn(async move {
|
||||||
|
run_model_change_poller(query_manager, app_handle_poll, cursor).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fast path for local app writes initiated by frontend windows. This keeps the
|
||||||
|
// current sync-model UX snappy, while DB polling handles external writers (CLI).
|
||||||
|
let app_handle_local = app_handle.clone();
|
||||||
tauri::async_runtime::spawn(async move {
|
tauri::async_runtime::spawn(async move {
|
||||||
for payload in rx {
|
for payload in rx {
|
||||||
app_handle.emit("model_write", payload).unwrap();
|
if !matches!(payload.update_source, UpdateSource::Window { .. }) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if let Err(err) = app_handle_local.emit("model_write", payload) {
|
||||||
|
error!("Failed to emit local model_write event: {err:?}");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -8,9 +8,9 @@ use serde::{Deserialize, Serialize};
|
|||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
|
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
|
||||||
use ts_rs::TS;
|
use ts_rs::TS;
|
||||||
|
use yaak_api::yaak_api_client;
|
||||||
use yaak_common::platform::get_os_str;
|
use yaak_common::platform::get_os_str;
|
||||||
use yaak_models::util::UpdateSource;
|
use yaak_models::util::UpdateSource;
|
||||||
use yaak_api::yaak_api_client;
|
|
||||||
|
|
||||||
// Check for updates every hour
|
// Check for updates every hour
|
||||||
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ use tauri::{
|
|||||||
};
|
};
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
use ts_rs::TS;
|
use ts_rs::TS;
|
||||||
|
use yaak_api::yaak_api_client;
|
||||||
use yaak_models::models::Plugin;
|
use yaak_models::models::Plugin;
|
||||||
use yaak_models::util::UpdateSource;
|
use yaak_models::util::UpdateSource;
|
||||||
use yaak_plugins::api::{
|
use yaak_plugins::api::{
|
||||||
@@ -31,7 +32,6 @@ use yaak_plugins::events::{Color, Icon, PluginContext, ShowToastRequest};
|
|||||||
use yaak_plugins::install::{delete_and_uninstall, download_and_install};
|
use yaak_plugins::install::{delete_and_uninstall, download_and_install};
|
||||||
use yaak_plugins::manager::PluginManager;
|
use yaak_plugins::manager::PluginManager;
|
||||||
use yaak_plugins::plugin_meta::get_plugin_meta;
|
use yaak_plugins::plugin_meta::get_plugin_meta;
|
||||||
use yaak_api::yaak_api_client;
|
|
||||||
|
|
||||||
static EXITING: AtomicBool = AtomicBool::new(false);
|
static EXITING: AtomicBool = AtomicBool::new(false);
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
use log::info;
|
use log::info;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use yaak_http::path_placeholders::apply_path_placeholders;
|
pub use yaak::render::render_http_request;
|
||||||
use yaak_models::models::{
|
use yaak_models::models::{Environment, GrpcRequest, HttpRequestHeader};
|
||||||
Environment, GrpcRequest, HttpRequest, HttpRequestHeader, HttpUrlParameter,
|
|
||||||
};
|
|
||||||
use yaak_models::render::make_vars_hashmap;
|
use yaak_models::render::make_vars_hashmap;
|
||||||
use yaak_templates::{RenderOptions, TemplateCallback, parse_and_render, render_json_value_raw};
|
use yaak_templates::{RenderOptions, TemplateCallback, parse_and_render, render_json_value_raw};
|
||||||
|
|
||||||
@@ -85,151 +83,3 @@ pub async fn render_grpc_request<T: TemplateCallback>(
|
|||||||
|
|
||||||
Ok(GrpcRequest { url, metadata, authentication, ..r.to_owned() })
|
Ok(GrpcRequest { url, metadata, authentication, ..r.to_owned() })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn render_http_request<T: TemplateCallback>(
|
|
||||||
r: &HttpRequest,
|
|
||||||
environment_chain: Vec<Environment>,
|
|
||||||
cb: &T,
|
|
||||||
opt: &RenderOptions,
|
|
||||||
) -> yaak_templates::error::Result<HttpRequest> {
|
|
||||||
let vars = &make_vars_hashmap(environment_chain);
|
|
||||||
|
|
||||||
let mut url_parameters = Vec::new();
|
|
||||||
for p in r.url_parameters.clone() {
|
|
||||||
if !p.enabled {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
url_parameters.push(HttpUrlParameter {
|
|
||||||
enabled: p.enabled,
|
|
||||||
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
|
|
||||||
value: parse_and_render(p.value.as_str(), vars, cb, &opt).await?,
|
|
||||||
id: p.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut headers = Vec::new();
|
|
||||||
for p in r.headers.clone() {
|
|
||||||
if !p.enabled {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
headers.push(HttpRequestHeader {
|
|
||||||
enabled: p.enabled,
|
|
||||||
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
|
|
||||||
value: parse_and_render(p.value.as_str(), vars, cb, &opt).await?,
|
|
||||||
id: p.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut body = BTreeMap::new();
|
|
||||||
for (k, v) in r.body.clone() {
|
|
||||||
let v = if k == "form" { strip_disabled_form_entries(v) } else { v };
|
|
||||||
body.insert(k, render_json_value_raw(v, vars, cb, &opt).await?);
|
|
||||||
}
|
|
||||||
|
|
||||||
let authentication = {
|
|
||||||
let mut disabled = false;
|
|
||||||
let mut auth = BTreeMap::new();
|
|
||||||
match r.authentication.get("disabled") {
|
|
||||||
Some(Value::Bool(true)) => {
|
|
||||||
disabled = true;
|
|
||||||
}
|
|
||||||
Some(Value::String(tmpl)) => {
|
|
||||||
disabled = parse_and_render(tmpl.as_str(), vars, cb, &opt)
|
|
||||||
.await
|
|
||||||
.unwrap_or_default()
|
|
||||||
.is_empty();
|
|
||||||
info!(
|
|
||||||
"Rendering authentication.disabled as a template: {disabled} from \"{tmpl}\""
|
|
||||||
);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
if disabled {
|
|
||||||
auth.insert("disabled".to_string(), Value::Bool(true));
|
|
||||||
} else {
|
|
||||||
for (k, v) in r.authentication.clone() {
|
|
||||||
if k == "disabled" {
|
|
||||||
auth.insert(k, Value::Bool(false));
|
|
||||||
} else {
|
|
||||||
auth.insert(k, render_json_value_raw(v, vars, cb, &opt).await?);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
auth
|
|
||||||
};
|
|
||||||
|
|
||||||
let url = parse_and_render(r.url.clone().as_str(), vars, cb, &opt).await?;
|
|
||||||
|
|
||||||
// This doesn't fit perfectly with the concept of "rendering" but it kind of does
|
|
||||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
|
||||||
|
|
||||||
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Strip disabled entries from a JSON array of form objects.
|
|
||||||
fn strip_disabled_form_entries(v: Value) -> Value {
|
|
||||||
match v {
|
|
||||||
Value::Array(items) => Value::Array(
|
|
||||||
items
|
|
||||||
.into_iter()
|
|
||||||
.filter(|item| item.get("enabled").and_then(|e| e.as_bool()).unwrap_or(true))
|
|
||||||
.collect(),
|
|
||||||
),
|
|
||||||
v => v,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_disabled_form_entries() {
|
|
||||||
let input = json!([
|
|
||||||
{"enabled": true, "name": "foo", "value": "bar"},
|
|
||||||
{"enabled": false, "name": "disabled", "value": "gone"},
|
|
||||||
{"enabled": true, "name": "baz", "value": "qux"},
|
|
||||||
]);
|
|
||||||
let result = strip_disabled_form_entries(input);
|
|
||||||
assert_eq!(
|
|
||||||
result,
|
|
||||||
json!([
|
|
||||||
{"enabled": true, "name": "foo", "value": "bar"},
|
|
||||||
{"enabled": true, "name": "baz", "value": "qux"},
|
|
||||||
])
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_disabled_form_entries_all_disabled() {
|
|
||||||
let input = json!([
|
|
||||||
{"enabled": false, "name": "a", "value": "b"},
|
|
||||||
{"enabled": false, "name": "c", "value": "d"},
|
|
||||||
]);
|
|
||||||
let result = strip_disabled_form_entries(input);
|
|
||||||
assert_eq!(result, json!([]));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_disabled_form_entries_missing_enabled_defaults_to_kept() {
|
|
||||||
let input = json!([
|
|
||||||
{"name": "no_enabled_field", "value": "kept"},
|
|
||||||
{"enabled": false, "name": "disabled", "value": "gone"},
|
|
||||||
]);
|
|
||||||
let result = strip_disabled_form_entries(input);
|
|
||||||
assert_eq!(
|
|
||||||
result,
|
|
||||||
json!([
|
|
||||||
{"name": "no_enabled_field", "value": "kept"},
|
|
||||||
])
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_disabled_form_entries_non_array_passthrough() {
|
|
||||||
let input = json!("just a string");
|
|
||||||
let result = strip_disabled_form_entries(input.clone());
|
|
||||||
assert_eq!(result, input);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -8,11 +8,11 @@ use std::fs;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
|
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
|
||||||
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
|
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
|
||||||
|
use yaak_api::yaak_api_client;
|
||||||
use yaak_models::util::generate_id;
|
use yaak_models::util::generate_id;
|
||||||
use yaak_plugins::events::{Color, ShowToastRequest};
|
use yaak_plugins::events::{Color, ShowToastRequest};
|
||||||
use yaak_plugins::install::download_and_install;
|
use yaak_plugins::install::download_and_install;
|
||||||
use yaak_plugins::manager::PluginManager;
|
use yaak_plugins::manager::PluginManager;
|
||||||
use yaak_api::yaak_api_client;
|
|
||||||
|
|
||||||
pub(crate) async fn handle_deep_link<R: Runtime>(
|
pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||||
app_handle: &AppHandle<R>,
|
app_handle: &AppHandle<R>,
|
||||||
|
|||||||
@@ -153,11 +153,8 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
|
|||||||
.build(app_handle)?,
|
.build(app_handle)?,
|
||||||
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
|
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
|
||||||
.build(app_handle)?,
|
.build(app_handle)?,
|
||||||
&MenuItemBuilder::with_id(
|
&MenuItemBuilder::with_id("dev.reset_size_16x9".to_string(), "Resize to 16x9")
|
||||||
"dev.reset_size_16x9".to_string(),
|
.build(app_handle)?,
|
||||||
"Resize to 16x9",
|
|
||||||
)
|
|
||||||
.build(app_handle)?,
|
|
||||||
&MenuItemBuilder::with_id(
|
&MenuItemBuilder::with_id(
|
||||||
"dev.reset_size_16x10".to_string(),
|
"dev.reset_size_16x10".to_string(),
|
||||||
"Resize to 16x10",
|
"Resize to 16x10",
|
||||||
|
|||||||
@@ -7,11 +7,11 @@ use std::ops::Add;
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
|
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
|
||||||
use ts_rs::TS;
|
use ts_rs::TS;
|
||||||
|
use yaak_api::yaak_api_client;
|
||||||
use yaak_common::platform::get_os_str;
|
use yaak_common::platform::get_os_str;
|
||||||
use yaak_models::db_context::DbContext;
|
use yaak_models::db_context::DbContext;
|
||||||
use yaak_models::query_manager::QueryManager;
|
use yaak_models::query_manager::QueryManager;
|
||||||
use yaak_models::util::UpdateSource;
|
use yaak_models::util::UpdateSource;
|
||||||
use yaak_api::yaak_api_client;
|
|
||||||
|
|
||||||
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
||||||
/// This is needed temporarily until all crates are refactored to not use Tauri.
|
/// This is needed temporarily until all crates are refactored to not use Tauri.
|
||||||
@@ -159,10 +159,8 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
|
|||||||
let app_version = window.app_handle().package_info().version.to_string();
|
let app_version = window.app_handle().package_info().version.to_string();
|
||||||
let client = yaak_api_client(&app_version)?;
|
let client = yaak_api_client(&app_version)?;
|
||||||
let path = format!("/licenses/activations/{}/deactivate", activation_id);
|
let path = format!("/licenses/activations/{}/deactivate", activation_id);
|
||||||
let payload = DeactivateLicenseRequestPayload {
|
let payload =
|
||||||
app_platform: get_os_str().to_string(),
|
DeactivateLicenseRequestPayload { app_platform: get_os_str().to_string(), app_version };
|
||||||
app_version,
|
|
||||||
};
|
|
||||||
let response = client.post(build_url(&path)).json(&payload).send().await?;
|
let response = client.post(build_url(&path)).json(&payload).send().await?;
|
||||||
|
|
||||||
if response.status().is_client_error() {
|
if response.status().is_client_error() {
|
||||||
@@ -189,10 +187,8 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
|
|||||||
|
|
||||||
pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<LicenseCheckStatus> {
|
pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<LicenseCheckStatus> {
|
||||||
let app_version = window.app_handle().package_info().version.to_string();
|
let app_version = window.app_handle().package_info().version.to_string();
|
||||||
let payload = CheckActivationRequestPayload {
|
let payload =
|
||||||
app_platform: get_os_str().to_string(),
|
CheckActivationRequestPayload { app_platform: get_os_str().to_string(), app_version };
|
||||||
app_version,
|
|
||||||
};
|
|
||||||
let activation_id = get_activation_id(window.app_handle()).await;
|
let activation_id = get_activation_id(window.app_handle()).await;
|
||||||
|
|
||||||
let settings = window.db().get_settings();
|
let settings = window.db().get_settings();
|
||||||
|
|||||||
@@ -74,15 +74,31 @@ impl Display for HttpResponseEvent {
|
|||||||
};
|
};
|
||||||
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
|
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
|
||||||
}
|
}
|
||||||
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
|
HttpResponseEvent::SendUrl {
|
||||||
|
method,
|
||||||
|
scheme,
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
path,
|
||||||
|
query,
|
||||||
|
fragment,
|
||||||
|
} => {
|
||||||
let auth_str = if username.is_empty() && password.is_empty() {
|
let auth_str = if username.is_empty() && password.is_empty() {
|
||||||
String::new()
|
String::new()
|
||||||
} else {
|
} else {
|
||||||
format!("{}:{}@", username, password)
|
format!("{}:{}@", username, password)
|
||||||
};
|
};
|
||||||
let query_str = if query.is_empty() { String::new() } else { format!("?{}", query) };
|
let query_str =
|
||||||
let fragment_str = if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
|
if query.is_empty() { String::new() } else { format!("?{}", query) };
|
||||||
write!(f, "> {} {}://{}{}:{}{}{}{}", method, scheme, auth_str, host, port, path, query_str, fragment_str)
|
let fragment_str =
|
||||||
|
if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"> {} {}://{}{}:{}{}{}{}",
|
||||||
|
method, scheme, auth_str, host, port, path, query_str, fragment_str
|
||||||
|
)
|
||||||
}
|
}
|
||||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||||
write!(f, "< {} {}", version_to_str(version), status)
|
write!(f, "< {} {}", version_to_str(version), status)
|
||||||
@@ -122,7 +138,17 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
|
|||||||
RedirectBehavior::DropBody => "drop_body".to_string(),
|
RedirectBehavior::DropBody => "drop_body".to_string(),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
|
HttpResponseEvent::SendUrl {
|
||||||
|
method,
|
||||||
|
scheme,
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
path,
|
||||||
|
query,
|
||||||
|
fragment,
|
||||||
|
} => {
|
||||||
D::SendUrl { method, scheme, username, password, host, port, path, query, fragment }
|
D::SendUrl { method, scheme, username, password, host, port, path, query, fragment }
|
||||||
}
|
}
|
||||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||||
@@ -546,7 +572,10 @@ impl<S> SizedBody<S> {
|
|||||||
|
|
||||||
impl<S> HttpBody for SizedBody<S>
|
impl<S> HttpBody for SizedBody<S>
|
||||||
where
|
where
|
||||||
S: futures_util::Stream<Item = std::result::Result<Bytes, std::io::Error>> + Send + Unpin + 'static,
|
S: futures_util::Stream<Item = std::result::Result<Bytes, std::io::Error>>
|
||||||
|
+ Send
|
||||||
|
+ Unpin
|
||||||
|
+ 'static,
|
||||||
{
|
{
|
||||||
type Data = Bytes;
|
type Data = Bytes;
|
||||||
type Error = std::io::Error;
|
type Error = std::io::Error;
|
||||||
|
|||||||
@@ -37,10 +37,9 @@ impl From<SendableBodyWithMeta> for SendableBody {
|
|||||||
fn from(value: SendableBodyWithMeta) -> Self {
|
fn from(value: SendableBodyWithMeta) -> Self {
|
||||||
match value {
|
match value {
|
||||||
SendableBodyWithMeta::Bytes(b) => SendableBody::Bytes(b),
|
SendableBodyWithMeta::Bytes(b) => SendableBody::Bytes(b),
|
||||||
SendableBodyWithMeta::Stream { data, content_length } => SendableBody::Stream {
|
SendableBodyWithMeta::Stream { data, content_length } => {
|
||||||
data,
|
SendableBody::Stream { data, content_length: content_length.map(|l| l as u64) }
|
||||||
content_length: content_length.map(|l| l as u64),
|
}
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,12 @@
|
|||||||
|
CREATE TABLE model_changes
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
model TEXT NOT NULL,
|
||||||
|
model_id TEXT NOT NULL,
|
||||||
|
change TEXT NOT NULL,
|
||||||
|
update_source TEXT NOT NULL,
|
||||||
|
payload TEXT NOT NULL,
|
||||||
|
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_model_changes_created_at ON model_changes (created_at);
|
||||||
@@ -3,8 +3,7 @@ use crate::error::Error::ModelNotFound;
|
|||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::models::{AnyModel, UpsertModelInfo};
|
use crate::models::{AnyModel, UpsertModelInfo};
|
||||||
use crate::util::{ModelChangeEvent, ModelPayload, UpdateSource};
|
use crate::util::{ModelChangeEvent, ModelPayload, UpdateSource};
|
||||||
use log::error;
|
use rusqlite::{OptionalExtension, params};
|
||||||
use rusqlite::OptionalExtension;
|
|
||||||
use sea_query::{
|
use sea_query::{
|
||||||
Asterisk, Expr, Func, IntoColumnRef, IntoIden, IntoTableRef, OnConflict, Query, SimpleExpr,
|
Asterisk, Expr, Func, IntoColumnRef, IntoIden, IntoTableRef, OnConflict, Query, SimpleExpr,
|
||||||
SqliteQueryBuilder,
|
SqliteQueryBuilder,
|
||||||
@@ -14,7 +13,7 @@ use std::fmt::Debug;
|
|||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
|
|
||||||
pub struct DbContext<'a> {
|
pub struct DbContext<'a> {
|
||||||
pub(crate) events_tx: mpsc::Sender<ModelPayload>,
|
pub(crate) _events_tx: mpsc::Sender<ModelPayload>,
|
||||||
pub(crate) conn: ConnectionOrTx<'a>,
|
pub(crate) conn: ConnectionOrTx<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -180,9 +179,8 @@ impl<'a> DbContext<'a> {
|
|||||||
change: ModelChangeEvent::Upsert { created },
|
change: ModelChangeEvent::Upsert { created },
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Err(e) = self.events_tx.send(payload.clone()) {
|
self.record_model_change(&payload)?;
|
||||||
error!("Failed to send model change {source:?}: {e:?}");
|
let _ = self._events_tx.send(payload);
|
||||||
}
|
|
||||||
|
|
||||||
Ok(m)
|
Ok(m)
|
||||||
}
|
}
|
||||||
@@ -203,9 +201,31 @@ impl<'a> DbContext<'a> {
|
|||||||
change: ModelChangeEvent::Delete,
|
change: ModelChangeEvent::Delete,
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Err(e) = self.events_tx.send(payload) {
|
self.record_model_change(&payload)?;
|
||||||
error!("Failed to send model change {source:?}: {e:?}");
|
let _ = self._events_tx.send(payload);
|
||||||
}
|
|
||||||
Ok(m.clone())
|
Ok(m.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn record_model_change(&self, payload: &ModelPayload) -> Result<()> {
|
||||||
|
let payload_json = serde_json::to_string(payload)?;
|
||||||
|
let source_json = serde_json::to_string(&payload.update_source)?;
|
||||||
|
let change_json = serde_json::to_string(&payload.change)?;
|
||||||
|
|
||||||
|
self.conn.resolve().execute(
|
||||||
|
r#"
|
||||||
|
INSERT INTO model_changes (model, model_id, change, update_source, payload)
|
||||||
|
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||||
|
"#,
|
||||||
|
params![
|
||||||
|
payload.model.model(),
|
||||||
|
payload.model.id(),
|
||||||
|
change_json,
|
||||||
|
source_json,
|
||||||
|
payload_json,
|
||||||
|
],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2347,6 +2347,15 @@ macro_rules! define_any_model {
|
|||||||
)*
|
)*
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn model(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
$(
|
||||||
|
AnyModel::$type(inner) => &inner.model,
|
||||||
|
)*
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$(
|
$(
|
||||||
@@ -2400,30 +2409,29 @@ impl<'de> Deserialize<'de> for AnyModel {
|
|||||||
{
|
{
|
||||||
let value = Value::deserialize(deserializer)?;
|
let value = Value::deserialize(deserializer)?;
|
||||||
let model = value.as_object().unwrap();
|
let model = value.as_object().unwrap();
|
||||||
|
use AnyModel::*;
|
||||||
use serde_json::from_value as fv;
|
use serde_json::from_value as fv;
|
||||||
|
|
||||||
let model = match model.get("model") {
|
let model = match model.get("model") {
|
||||||
Some(m) if m == "cookie_jar" => AnyModel::CookieJar(fv(value).unwrap()),
|
Some(m) if m == "cookie_jar" => CookieJar(fv(value).unwrap()),
|
||||||
Some(m) if m == "environment" => AnyModel::Environment(fv(value).unwrap()),
|
Some(m) if m == "environment" => Environment(fv(value).unwrap()),
|
||||||
Some(m) if m == "folder" => AnyModel::Folder(fv(value).unwrap()),
|
Some(m) if m == "folder" => Folder(fv(value).unwrap()),
|
||||||
Some(m) if m == "graphql_introspection" => {
|
Some(m) if m == "graphql_introspection" => GraphQlIntrospection(fv(value).unwrap()),
|
||||||
AnyModel::GraphQlIntrospection(fv(value).unwrap())
|
Some(m) if m == "grpc_connection" => GrpcConnection(fv(value).unwrap()),
|
||||||
}
|
Some(m) if m == "grpc_event" => GrpcEvent(fv(value).unwrap()),
|
||||||
Some(m) if m == "grpc_connection" => AnyModel::GrpcConnection(fv(value).unwrap()),
|
Some(m) if m == "grpc_request" => GrpcRequest(fv(value).unwrap()),
|
||||||
Some(m) if m == "grpc_event" => AnyModel::GrpcEvent(fv(value).unwrap()),
|
Some(m) if m == "http_request" => HttpRequest(fv(value).unwrap()),
|
||||||
Some(m) if m == "grpc_request" => AnyModel::GrpcRequest(fv(value).unwrap()),
|
Some(m) if m == "http_response" => HttpResponse(fv(value).unwrap()),
|
||||||
Some(m) if m == "http_request" => AnyModel::HttpRequest(fv(value).unwrap()),
|
Some(m) if m == "http_response_event" => HttpResponseEvent(fv(value).unwrap()),
|
||||||
Some(m) if m == "http_response" => AnyModel::HttpResponse(fv(value).unwrap()),
|
Some(m) if m == "key_value" => KeyValue(fv(value).unwrap()),
|
||||||
Some(m) if m == "key_value" => AnyModel::KeyValue(fv(value).unwrap()),
|
Some(m) if m == "plugin" => Plugin(fv(value).unwrap()),
|
||||||
Some(m) if m == "plugin" => AnyModel::Plugin(fv(value).unwrap()),
|
Some(m) if m == "settings" => Settings(fv(value).unwrap()),
|
||||||
Some(m) if m == "settings" => AnyModel::Settings(fv(value).unwrap()),
|
Some(m) if m == "sync_state" => SyncState(fv(value).unwrap()),
|
||||||
Some(m) if m == "websocket_connection" => {
|
Some(m) if m == "websocket_connection" => WebsocketConnection(fv(value).unwrap()),
|
||||||
AnyModel::WebsocketConnection(fv(value).unwrap())
|
Some(m) if m == "websocket_event" => WebsocketEvent(fv(value).unwrap()),
|
||||||
}
|
Some(m) if m == "websocket_request" => WebsocketRequest(fv(value).unwrap()),
|
||||||
Some(m) if m == "websocket_event" => AnyModel::WebsocketEvent(fv(value).unwrap()),
|
Some(m) if m == "workspace" => Workspace(fv(value).unwrap()),
|
||||||
Some(m) if m == "websocket_request" => AnyModel::WebsocketRequest(fv(value).unwrap()),
|
Some(m) if m == "workspace_meta" => WorkspaceMeta(fv(value).unwrap()),
|
||||||
Some(m) if m == "workspace" => AnyModel::Workspace(fv(value).unwrap()),
|
|
||||||
Some(m) if m == "workspace_meta" => AnyModel::WorkspaceMeta(fv(value).unwrap()),
|
|
||||||
Some(m) => {
|
Some(m) => {
|
||||||
return Err(serde::de::Error::custom(format!(
|
return Err(serde::de::Error::custom(format!(
|
||||||
"Failed to deserialize AnyModel {}",
|
"Failed to deserialize AnyModel {}",
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ mod http_requests;
|
|||||||
mod http_response_events;
|
mod http_response_events;
|
||||||
mod http_responses;
|
mod http_responses;
|
||||||
mod key_values;
|
mod key_values;
|
||||||
|
mod model_changes;
|
||||||
mod plugin_key_values;
|
mod plugin_key_values;
|
||||||
mod plugins;
|
mod plugins;
|
||||||
mod settings;
|
mod settings;
|
||||||
@@ -20,6 +21,7 @@ mod websocket_events;
|
|||||||
mod websocket_requests;
|
mod websocket_requests;
|
||||||
mod workspace_metas;
|
mod workspace_metas;
|
||||||
pub mod workspaces;
|
pub mod workspaces;
|
||||||
|
pub use model_changes::PersistedModelChange;
|
||||||
|
|
||||||
const MAX_HISTORY_ITEMS: usize = 20;
|
const MAX_HISTORY_ITEMS: usize = 20;
|
||||||
|
|
||||||
|
|||||||
289
crates/yaak-models/src/queries/model_changes.rs
Normal file
289
crates/yaak-models/src/queries/model_changes.rs
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
use crate::db_context::DbContext;
|
||||||
|
use crate::error::Result;
|
||||||
|
use crate::util::ModelPayload;
|
||||||
|
use rusqlite::params;
|
||||||
|
use rusqlite::types::Type;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct PersistedModelChange {
|
||||||
|
pub id: i64,
|
||||||
|
pub created_at: String,
|
||||||
|
pub payload: ModelPayload,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> DbContext<'a> {
|
||||||
|
pub fn list_model_changes_after(
|
||||||
|
&self,
|
||||||
|
after_id: i64,
|
||||||
|
limit: usize,
|
||||||
|
) -> Result<Vec<PersistedModelChange>> {
|
||||||
|
let mut stmt = self.conn.prepare(
|
||||||
|
r#"
|
||||||
|
SELECT id, created_at, payload
|
||||||
|
FROM model_changes
|
||||||
|
WHERE id > ?1
|
||||||
|
ORDER BY id ASC
|
||||||
|
LIMIT ?2
|
||||||
|
"#,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let items = stmt.query_map(params![after_id, limit as i64], |row| {
|
||||||
|
let id: i64 = row.get(0)?;
|
||||||
|
let created_at: String = row.get(1)?;
|
||||||
|
let payload_raw: String = row.get(2)?;
|
||||||
|
let payload = serde_json::from_str::<ModelPayload>(&payload_raw).map_err(|e| {
|
||||||
|
rusqlite::Error::FromSqlConversionFailure(2, Type::Text, Box::new(e))
|
||||||
|
})?;
|
||||||
|
Ok(PersistedModelChange { id, created_at, payload })
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(items.collect::<std::result::Result<Vec<_>, rusqlite::Error>>()?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn list_model_changes_since(
|
||||||
|
&self,
|
||||||
|
since_created_at: &str,
|
||||||
|
since_id: i64,
|
||||||
|
limit: usize,
|
||||||
|
) -> Result<Vec<PersistedModelChange>> {
|
||||||
|
let mut stmt = self.conn.prepare(
|
||||||
|
r#"
|
||||||
|
SELECT id, created_at, payload
|
||||||
|
FROM model_changes
|
||||||
|
WHERE created_at > ?1
|
||||||
|
OR (created_at = ?1 AND id > ?2)
|
||||||
|
ORDER BY created_at ASC, id ASC
|
||||||
|
LIMIT ?3
|
||||||
|
"#,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let items = stmt.query_map(params![since_created_at, since_id, limit as i64], |row| {
|
||||||
|
let id: i64 = row.get(0)?;
|
||||||
|
let created_at: String = row.get(1)?;
|
||||||
|
let payload_raw: String = row.get(2)?;
|
||||||
|
let payload = serde_json::from_str::<ModelPayload>(&payload_raw).map_err(|e| {
|
||||||
|
rusqlite::Error::FromSqlConversionFailure(2, Type::Text, Box::new(e))
|
||||||
|
})?;
|
||||||
|
Ok(PersistedModelChange { id, created_at, payload })
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(items.collect::<std::result::Result<Vec<_>, rusqlite::Error>>()?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prune_model_changes_older_than_days(&self, days: i64) -> Result<usize> {
|
||||||
|
let offset = format!("-{days} days");
|
||||||
|
Ok(self.conn.resolve().execute(
|
||||||
|
r#"
|
||||||
|
DELETE FROM model_changes
|
||||||
|
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)
|
||||||
|
"#,
|
||||||
|
params![offset],
|
||||||
|
)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prune_model_changes_older_than_hours(&self, hours: i64) -> Result<usize> {
|
||||||
|
let offset = format!("-{hours} hours");
|
||||||
|
Ok(self.conn.resolve().execute(
|
||||||
|
r#"
|
||||||
|
DELETE FROM model_changes
|
||||||
|
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)
|
||||||
|
"#,
|
||||||
|
params![offset],
|
||||||
|
)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::init_in_memory;
|
||||||
|
use crate::models::Workspace;
|
||||||
|
use crate::util::{ModelChangeEvent, UpdateSource};
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn records_model_changes_for_upsert_and_delete() {
|
||||||
|
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||||
|
let db = query_manager.connect();
|
||||||
|
|
||||||
|
let workspace = db
|
||||||
|
.upsert_workspace(
|
||||||
|
&Workspace {
|
||||||
|
name: "Changes Test".to_string(),
|
||||||
|
setting_follow_redirects: true,
|
||||||
|
setting_validate_certificates: true,
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
&UpdateSource::Sync,
|
||||||
|
)
|
||||||
|
.expect("Failed to upsert workspace");
|
||||||
|
|
||||||
|
let created_changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||||
|
assert_eq!(created_changes.len(), 1);
|
||||||
|
assert_eq!(created_changes[0].payload.model.id(), workspace.id);
|
||||||
|
assert_eq!(created_changes[0].payload.model.model(), "workspace");
|
||||||
|
assert!(matches!(
|
||||||
|
created_changes[0].payload.change,
|
||||||
|
ModelChangeEvent::Upsert { created: true }
|
||||||
|
));
|
||||||
|
assert!(matches!(created_changes[0].payload.update_source, UpdateSource::Sync));
|
||||||
|
|
||||||
|
db.delete_workspace_by_id(&workspace.id, &UpdateSource::Sync)
|
||||||
|
.expect("Failed to delete workspace");
|
||||||
|
|
||||||
|
let all_changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||||
|
assert_eq!(all_changes.len(), 2);
|
||||||
|
assert!(matches!(all_changes[1].payload.change, ModelChangeEvent::Delete));
|
||||||
|
assert!(all_changes[1].id > all_changes[0].id);
|
||||||
|
|
||||||
|
let changes_after_first = db
|
||||||
|
.list_model_changes_after(all_changes[0].id, 10)
|
||||||
|
.expect("Failed to list changes after cursor");
|
||||||
|
assert_eq!(changes_after_first.len(), 1);
|
||||||
|
assert!(matches!(changes_after_first[0].payload.change, ModelChangeEvent::Delete));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn prunes_old_model_changes() {
|
||||||
|
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||||
|
let db = query_manager.connect();
|
||||||
|
|
||||||
|
db.upsert_workspace(
|
||||||
|
&Workspace {
|
||||||
|
name: "Prune Test".to_string(),
|
||||||
|
setting_follow_redirects: true,
|
||||||
|
setting_validate_certificates: true,
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
&UpdateSource::Sync,
|
||||||
|
)
|
||||||
|
.expect("Failed to upsert workspace");
|
||||||
|
|
||||||
|
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||||
|
assert_eq!(changes.len(), 1);
|
||||||
|
|
||||||
|
db.conn
|
||||||
|
.resolve()
|
||||||
|
.execute(
|
||||||
|
"UPDATE model_changes SET created_at = '2000-01-01 00:00:00.000' WHERE id = ?1",
|
||||||
|
params![changes[0].id],
|
||||||
|
)
|
||||||
|
.expect("Failed to age model change row");
|
||||||
|
|
||||||
|
let pruned =
|
||||||
|
db.prune_model_changes_older_than_days(30).expect("Failed to prune model changes");
|
||||||
|
assert_eq!(pruned, 1);
|
||||||
|
assert!(db.list_model_changes_after(0, 10).expect("Failed to list changes").is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn list_model_changes_since_uses_timestamp_with_id_tiebreaker() {
|
||||||
|
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||||
|
let db = query_manager.connect();
|
||||||
|
|
||||||
|
let workspace = db
|
||||||
|
.upsert_workspace(
|
||||||
|
&Workspace {
|
||||||
|
name: "Cursor Test".to_string(),
|
||||||
|
setting_follow_redirects: true,
|
||||||
|
setting_validate_certificates: true,
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
&UpdateSource::Sync,
|
||||||
|
)
|
||||||
|
.expect("Failed to upsert workspace");
|
||||||
|
db.delete_workspace_by_id(&workspace.id, &UpdateSource::Sync)
|
||||||
|
.expect("Failed to delete workspace");
|
||||||
|
|
||||||
|
let all = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||||
|
assert_eq!(all.len(), 2);
|
||||||
|
|
||||||
|
let fixed_ts = "2026-02-16 00:00:00.000";
|
||||||
|
db.conn
|
||||||
|
.resolve()
|
||||||
|
.execute("UPDATE model_changes SET created_at = ?1", params![fixed_ts])
|
||||||
|
.expect("Failed to normalize timestamps");
|
||||||
|
|
||||||
|
let after_first =
|
||||||
|
db.list_model_changes_since(fixed_ts, all[0].id, 10).expect("Failed to query cursor");
|
||||||
|
assert_eq!(after_first.len(), 1);
|
||||||
|
assert_eq!(after_first[0].id, all[1].id);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn prunes_old_model_changes_by_hours() {
|
||||||
|
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||||
|
let db = query_manager.connect();
|
||||||
|
|
||||||
|
db.upsert_workspace(
|
||||||
|
&Workspace {
|
||||||
|
name: "Prune Hour Test".to_string(),
|
||||||
|
setting_follow_redirects: true,
|
||||||
|
setting_validate_certificates: true,
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
&UpdateSource::Sync,
|
||||||
|
)
|
||||||
|
.expect("Failed to upsert workspace");
|
||||||
|
|
||||||
|
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||||
|
assert_eq!(changes.len(), 1);
|
||||||
|
|
||||||
|
db.conn
|
||||||
|
.resolve()
|
||||||
|
.execute(
|
||||||
|
"UPDATE model_changes SET created_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', '-2 hours') WHERE id = ?1",
|
||||||
|
params![changes[0].id],
|
||||||
|
)
|
||||||
|
.expect("Failed to age model change row");
|
||||||
|
|
||||||
|
let pruned =
|
||||||
|
db.prune_model_changes_older_than_hours(1).expect("Failed to prune model changes");
|
||||||
|
assert_eq!(pruned, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn list_model_changes_deserializes_http_response_event_payload() {
|
||||||
|
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||||
|
let db = query_manager.connect();
|
||||||
|
|
||||||
|
let payload = json!({
|
||||||
|
"model": {
|
||||||
|
"model": "http_response_event",
|
||||||
|
"id": "re_test",
|
||||||
|
"createdAt": "2026-02-16T21:01:34.809162",
|
||||||
|
"updatedAt": "2026-02-16T21:01:34.809163",
|
||||||
|
"workspaceId": "wk_test",
|
||||||
|
"responseId": "rs_test",
|
||||||
|
"event": {
|
||||||
|
"type": "info",
|
||||||
|
"message": "hello"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"updateSource": { "type": "sync" },
|
||||||
|
"change": { "type": "upsert", "created": false }
|
||||||
|
});
|
||||||
|
|
||||||
|
db.conn
|
||||||
|
.resolve()
|
||||||
|
.execute(
|
||||||
|
r#"
|
||||||
|
INSERT INTO model_changes (model, model_id, change, update_source, payload)
|
||||||
|
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||||
|
"#,
|
||||||
|
params![
|
||||||
|
"http_response_event",
|
||||||
|
"re_test",
|
||||||
|
r#"{"type":"upsert","created":false}"#,
|
||||||
|
r#"{"type":"sync"}"#,
|
||||||
|
payload.to_string(),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.expect("Failed to insert model change row");
|
||||||
|
|
||||||
|
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||||
|
assert_eq!(changes.len(), 1);
|
||||||
|
assert_eq!(changes[0].payload.model.model(), "http_response_event");
|
||||||
|
assert_eq!(changes[0].payload.model.id(), "re_test");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -25,7 +25,7 @@ impl QueryManager {
|
|||||||
.expect("Failed to gain lock on DB")
|
.expect("Failed to gain lock on DB")
|
||||||
.get()
|
.get()
|
||||||
.expect("Failed to get a new DB connection from the pool");
|
.expect("Failed to get a new DB connection from the pool");
|
||||||
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
|
DbContext { _events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_conn<F, T>(&self, func: F) -> T
|
pub fn with_conn<F, T>(&self, func: F) -> T
|
||||||
@@ -39,8 +39,10 @@ impl QueryManager {
|
|||||||
.get()
|
.get()
|
||||||
.expect("Failed to get new DB connection from the pool");
|
.expect("Failed to get new DB connection from the pool");
|
||||||
|
|
||||||
let db_context =
|
let db_context = DbContext {
|
||||||
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) };
|
_events_tx: self.events_tx.clone(),
|
||||||
|
conn: ConnectionOrTx::Connection(conn),
|
||||||
|
};
|
||||||
|
|
||||||
func(&db_context)
|
func(&db_context)
|
||||||
}
|
}
|
||||||
@@ -62,8 +64,10 @@ impl QueryManager {
|
|||||||
.transaction_with_behavior(TransactionBehavior::Immediate)
|
.transaction_with_behavior(TransactionBehavior::Immediate)
|
||||||
.expect("Failed to start DB transaction");
|
.expect("Failed to start DB transaction");
|
||||||
|
|
||||||
let db_context =
|
let db_context = DbContext {
|
||||||
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Transaction(&tx) };
|
_events_tx: self.events_tx.clone(),
|
||||||
|
conn: ConnectionOrTx::Transaction(&tx),
|
||||||
|
};
|
||||||
|
|
||||||
match func(&db_context) {
|
match func(&db_context) {
|
||||||
Ok(val) => {
|
Ok(val) => {
|
||||||
|
|||||||
@@ -68,7 +68,9 @@ pub async fn start_nodejs_plugin_runtime(
|
|||||||
// Handle kill signal
|
// Handle kill signal
|
||||||
let mut kill_rx = kill_rx.clone();
|
let mut kill_rx = kill_rx.clone();
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
kill_rx.wait_for(|b| *b == true).await.expect("Kill channel errored");
|
if kill_rx.wait_for(|b| *b == true).await.is_err() {
|
||||||
|
warn!("Kill channel closed before explicit shutdown; terminating plugin runtime");
|
||||||
|
}
|
||||||
info!("Killing plugin runtime");
|
info!("Killing plugin runtime");
|
||||||
if let Err(e) = child.kill().await {
|
if let Err(e) = child.kill().await {
|
||||||
warn!("Failed to kill plugin runtime: {e}");
|
warn!("Failed to kill plugin runtime: {e}");
|
||||||
|
|||||||
19
crates/yaak/Cargo.toml
Normal file
19
crates/yaak/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[package]
|
||||||
|
name = "yaak"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
publish = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
async-trait = "0.1"
|
||||||
|
log = { workspace = true }
|
||||||
|
md5 = "0.8.0"
|
||||||
|
serde_json = { workspace = true }
|
||||||
|
thiserror = { workspace = true }
|
||||||
|
tokio = { workspace = true, features = ["sync", "rt"] }
|
||||||
|
yaak-http = { workspace = true }
|
||||||
|
yaak-crypto = { workspace = true }
|
||||||
|
yaak-models = { workspace = true }
|
||||||
|
yaak-plugins = { workspace = true }
|
||||||
|
yaak-templates = { workspace = true }
|
||||||
|
yaak-tls = { workspace = true }
|
||||||
9
crates/yaak/src/error.rs
Normal file
9
crates/yaak/src/error.rs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error(transparent)]
|
||||||
|
Send(#[from] crate::send::SendHttpRequestError),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
6
crates/yaak/src/lib.rs
Normal file
6
crates/yaak/src/lib.rs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
pub mod error;
|
||||||
|
pub mod render;
|
||||||
|
pub mod send;
|
||||||
|
|
||||||
|
pub use error::Error;
|
||||||
|
pub type Result<T> = error::Result<T>;
|
||||||
157
crates/yaak/src/render.rs
Normal file
157
crates/yaak/src/render.rs
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
use log::info;
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use yaak_http::path_placeholders::apply_path_placeholders;
|
||||||
|
use yaak_models::models::{Environment, HttpRequest, HttpRequestHeader, HttpUrlParameter};
|
||||||
|
use yaak_models::render::make_vars_hashmap;
|
||||||
|
use yaak_templates::{RenderOptions, TemplateCallback, parse_and_render, render_json_value_raw};
|
||||||
|
|
||||||
|
pub async fn render_http_request<T: TemplateCallback>(
|
||||||
|
request: &HttpRequest,
|
||||||
|
environment_chain: Vec<Environment>,
|
||||||
|
callback: &T,
|
||||||
|
options: &RenderOptions,
|
||||||
|
) -> yaak_templates::error::Result<HttpRequest> {
|
||||||
|
let vars = &make_vars_hashmap(environment_chain);
|
||||||
|
|
||||||
|
let mut url_parameters = Vec::new();
|
||||||
|
for parameter in request.url_parameters.clone() {
|
||||||
|
if !parameter.enabled {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
url_parameters.push(HttpUrlParameter {
|
||||||
|
enabled: parameter.enabled,
|
||||||
|
name: parse_and_render(parameter.name.as_str(), vars, callback, options).await?,
|
||||||
|
value: parse_and_render(parameter.value.as_str(), vars, callback, options).await?,
|
||||||
|
id: parameter.id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut headers = Vec::new();
|
||||||
|
for header in request.headers.clone() {
|
||||||
|
if !header.enabled {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
headers.push(HttpRequestHeader {
|
||||||
|
enabled: header.enabled,
|
||||||
|
name: parse_and_render(header.name.as_str(), vars, callback, options).await?,
|
||||||
|
value: parse_and_render(header.value.as_str(), vars, callback, options).await?,
|
||||||
|
id: header.id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut body = BTreeMap::new();
|
||||||
|
for (key, value) in request.body.clone() {
|
||||||
|
let value = if key == "form" { strip_disabled_form_entries(value) } else { value };
|
||||||
|
body.insert(key, render_json_value_raw(value, vars, callback, options).await?);
|
||||||
|
}
|
||||||
|
|
||||||
|
let authentication = {
|
||||||
|
let mut disabled = false;
|
||||||
|
let mut auth = BTreeMap::new();
|
||||||
|
|
||||||
|
match request.authentication.get("disabled") {
|
||||||
|
Some(Value::Bool(true)) => {
|
||||||
|
disabled = true;
|
||||||
|
}
|
||||||
|
Some(Value::String(template)) => {
|
||||||
|
disabled = parse_and_render(template.as_str(), vars, callback, options)
|
||||||
|
.await
|
||||||
|
.unwrap_or_default()
|
||||||
|
.is_empty();
|
||||||
|
info!(
|
||||||
|
"Rendering authentication.disabled as a template: {disabled} from \"{template}\""
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if disabled {
|
||||||
|
auth.insert("disabled".to_string(), Value::Bool(true));
|
||||||
|
} else {
|
||||||
|
for (key, value) in request.authentication.clone() {
|
||||||
|
if key == "disabled" {
|
||||||
|
auth.insert(key, Value::Bool(false));
|
||||||
|
} else {
|
||||||
|
auth.insert(key, render_json_value_raw(value, vars, callback, options).await?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auth
|
||||||
|
};
|
||||||
|
|
||||||
|
let url = parse_and_render(request.url.clone().as_str(), vars, callback, options).await?;
|
||||||
|
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
||||||
|
|
||||||
|
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..request.to_owned() })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn strip_disabled_form_entries(v: Value) -> Value {
|
||||||
|
match v {
|
||||||
|
Value::Array(items) => Value::Array(
|
||||||
|
items
|
||||||
|
.into_iter()
|
||||||
|
.filter(|item| item.get("enabled").and_then(|e| e.as_bool()).unwrap_or(true))
|
||||||
|
.collect(),
|
||||||
|
),
|
||||||
|
v => v,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strip_disabled_form_entries() {
|
||||||
|
let input = json!([
|
||||||
|
{"enabled": true, "name": "foo", "value": "bar"},
|
||||||
|
{"enabled": false, "name": "disabled", "value": "gone"},
|
||||||
|
{"enabled": true, "name": "baz", "value": "qux"},
|
||||||
|
]);
|
||||||
|
let result = strip_disabled_form_entries(input);
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
json!([
|
||||||
|
{"enabled": true, "name": "foo", "value": "bar"},
|
||||||
|
{"enabled": true, "name": "baz", "value": "qux"},
|
||||||
|
])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strip_disabled_form_entries_all_disabled() {
|
||||||
|
let input = json!([
|
||||||
|
{"enabled": false, "name": "a", "value": "b"},
|
||||||
|
{"enabled": false, "name": "c", "value": "d"},
|
||||||
|
]);
|
||||||
|
let result = strip_disabled_form_entries(input);
|
||||||
|
assert_eq!(result, json!([]));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strip_disabled_form_entries_missing_enabled_defaults_to_kept() {
|
||||||
|
let input = json!([
|
||||||
|
{"name": "no_enabled_field", "value": "kept"},
|
||||||
|
{"enabled": false, "name": "disabled", "value": "gone"},
|
||||||
|
]);
|
||||||
|
let result = strip_disabled_form_entries(input);
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
json!([
|
||||||
|
{"name": "no_enabled_field", "value": "kept"},
|
||||||
|
])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strip_disabled_form_entries_non_array_passthrough() {
|
||||||
|
let input = json!("just a string");
|
||||||
|
let result = strip_disabled_form_entries(input.clone());
|
||||||
|
assert_eq!(result, input);
|
||||||
|
}
|
||||||
|
}
|
||||||
813
crates/yaak/src/send.rs
Normal file
813
crates/yaak/src/send.rs
Normal file
@@ -0,0 +1,813 @@
|
|||||||
|
use crate::render::render_http_request;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use log::warn;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::Instant;
|
||||||
|
use thiserror::Error;
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use tokio::sync::watch;
|
||||||
|
use yaak_crypto::manager::EncryptionManager;
|
||||||
|
use yaak_http::client::{
|
||||||
|
HttpConnectionOptions, HttpConnectionProxySetting, HttpConnectionProxySettingAuth,
|
||||||
|
};
|
||||||
|
use yaak_http::cookies::CookieStore;
|
||||||
|
use yaak_http::manager::HttpConnectionManager;
|
||||||
|
use yaak_http::sender::{HttpResponseEvent as SenderHttpResponseEvent, ReqwestSender};
|
||||||
|
use yaak_http::transaction::HttpTransaction;
|
||||||
|
use yaak_http::types::{
|
||||||
|
SendableBody, SendableHttpRequest, SendableHttpRequestOptions, append_query_params,
|
||||||
|
};
|
||||||
|
use yaak_models::blob_manager::BlobManager;
|
||||||
|
use yaak_models::models::{
|
||||||
|
ClientCertificate, CookieJar, DnsOverride, Environment, HttpRequest, HttpResponse,
|
||||||
|
HttpResponseEvent, HttpResponseHeader, HttpResponseState, ProxySetting, ProxySettingAuth,
|
||||||
|
};
|
||||||
|
use yaak_models::query_manager::QueryManager;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
use yaak_plugins::events::{
|
||||||
|
CallHttpAuthenticationRequest, HttpHeader, PluginContext, RenderPurpose,
|
||||||
|
};
|
||||||
|
use yaak_plugins::manager::PluginManager;
|
||||||
|
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||||
|
use yaak_templates::{RenderOptions, TemplateCallback};
|
||||||
|
use yaak_tls::find_client_certificate;
|
||||||
|
|
||||||
|
const HTTP_EVENT_CHANNEL_CAPACITY: usize = 100;
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum SendHttpRequestError {
|
||||||
|
#[error("Failed to load request: {0}")]
|
||||||
|
LoadRequest(#[source] yaak_models::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to load workspace: {0}")]
|
||||||
|
LoadWorkspace(#[source] yaak_models::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to resolve environments: {0}")]
|
||||||
|
ResolveEnvironments(#[source] yaak_models::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to resolve inherited request settings: {0}")]
|
||||||
|
ResolveRequestInheritance(#[source] yaak_models::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to load cookie jar: {0}")]
|
||||||
|
LoadCookieJar(#[source] yaak_models::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to persist cookie jar: {0}")]
|
||||||
|
PersistCookieJar(#[source] yaak_models::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to render request templates: {0}")]
|
||||||
|
RenderRequest(#[source] yaak_templates::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to prepare request before send: {0}")]
|
||||||
|
PrepareSendableRequest(String),
|
||||||
|
|
||||||
|
#[error("Failed to persist response metadata: {0}")]
|
||||||
|
PersistResponse(#[source] yaak_models::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to create HTTP client: {0}")]
|
||||||
|
CreateHttpClient(#[source] yaak_http::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to build sendable request: {0}")]
|
||||||
|
BuildSendableRequest(#[source] yaak_http::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to send request: {0}")]
|
||||||
|
SendRequest(#[source] yaak_http::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to read response body: {0}")]
|
||||||
|
ReadResponseBody(#[source] yaak_http::error::Error),
|
||||||
|
|
||||||
|
#[error("Failed to create response directory {path:?}: {source}")]
|
||||||
|
CreateResponseDirectory {
|
||||||
|
path: PathBuf,
|
||||||
|
#[source]
|
||||||
|
source: std::io::Error,
|
||||||
|
},
|
||||||
|
|
||||||
|
#[error("Failed to write response body to {path:?}: {source}")]
|
||||||
|
WriteResponseBody {
|
||||||
|
path: PathBuf,
|
||||||
|
#[source]
|
||||||
|
source: std::io::Error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type Result<T> = std::result::Result<T, SendHttpRequestError>;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait PrepareSendableRequest: Send + Sync {
|
||||||
|
async fn prepare_sendable_request(
|
||||||
|
&self,
|
||||||
|
rendered_request: &HttpRequest,
|
||||||
|
auth_context_id: &str,
|
||||||
|
sendable_request: &mut SendableHttpRequest,
|
||||||
|
) -> std::result::Result<(), String>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait SendRequestExecutor: Send + Sync {
|
||||||
|
async fn send(
|
||||||
|
&self,
|
||||||
|
sendable_request: SendableHttpRequest,
|
||||||
|
event_tx: mpsc::Sender<SenderHttpResponseEvent>,
|
||||||
|
cookie_store: Option<CookieStore>,
|
||||||
|
) -> yaak_http::error::Result<yaak_http::sender::HttpResponse>;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DefaultSendRequestExecutor;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl SendRequestExecutor for DefaultSendRequestExecutor {
|
||||||
|
async fn send(
|
||||||
|
&self,
|
||||||
|
sendable_request: SendableHttpRequest,
|
||||||
|
event_tx: mpsc::Sender<SenderHttpResponseEvent>,
|
||||||
|
cookie_store: Option<CookieStore>,
|
||||||
|
) -> yaak_http::error::Result<yaak_http::sender::HttpResponse> {
|
||||||
|
let sender = ReqwestSender::new()?;
|
||||||
|
let transaction = match cookie_store {
|
||||||
|
Some(store) => HttpTransaction::with_cookie_store(sender, store),
|
||||||
|
None => HttpTransaction::new(sender),
|
||||||
|
};
|
||||||
|
let (_cancel_tx, cancel_rx) = watch::channel(false);
|
||||||
|
transaction.execute_with_cancellation(sendable_request, cancel_rx, event_tx).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct PluginPrepareSendableRequest {
|
||||||
|
plugin_manager: Arc<PluginManager>,
|
||||||
|
plugin_context: PluginContext,
|
||||||
|
cancelled_rx: Option<watch::Receiver<bool>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl PrepareSendableRequest for PluginPrepareSendableRequest {
|
||||||
|
async fn prepare_sendable_request(
|
||||||
|
&self,
|
||||||
|
rendered_request: &HttpRequest,
|
||||||
|
auth_context_id: &str,
|
||||||
|
sendable_request: &mut SendableHttpRequest,
|
||||||
|
) -> std::result::Result<(), String> {
|
||||||
|
if let Some(cancelled_rx) = &self.cancelled_rx {
|
||||||
|
let mut cancelled_rx = cancelled_rx.clone();
|
||||||
|
tokio::select! {
|
||||||
|
result = apply_plugin_authentication(
|
||||||
|
sendable_request,
|
||||||
|
rendered_request,
|
||||||
|
auth_context_id,
|
||||||
|
&self.plugin_manager,
|
||||||
|
&self.plugin_context,
|
||||||
|
) => result,
|
||||||
|
_ = cancelled_rx.changed() => Err("Request canceled".to_string()),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
apply_plugin_authentication(
|
||||||
|
sendable_request,
|
||||||
|
rendered_request,
|
||||||
|
auth_context_id,
|
||||||
|
&self.plugin_manager,
|
||||||
|
&self.plugin_context,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ConnectionManagerSendRequestExecutor<'a> {
|
||||||
|
connection_manager: &'a HttpConnectionManager,
|
||||||
|
plugin_context_id: String,
|
||||||
|
query_manager: QueryManager,
|
||||||
|
workspace_id: String,
|
||||||
|
cancelled_rx: Option<watch::Receiver<bool>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl SendRequestExecutor for ConnectionManagerSendRequestExecutor<'_> {
|
||||||
|
async fn send(
|
||||||
|
&self,
|
||||||
|
sendable_request: SendableHttpRequest,
|
||||||
|
event_tx: mpsc::Sender<SenderHttpResponseEvent>,
|
||||||
|
cookie_store: Option<CookieStore>,
|
||||||
|
) -> yaak_http::error::Result<yaak_http::sender::HttpResponse> {
|
||||||
|
let runtime_config =
|
||||||
|
resolve_http_send_runtime_config(&self.query_manager, &self.workspace_id)
|
||||||
|
.map_err(|e| yaak_http::error::Error::RequestError(e.to_string()))?;
|
||||||
|
let client_certificate =
|
||||||
|
find_client_certificate(&sendable_request.url, &runtime_config.client_certificates);
|
||||||
|
let cached_client = self
|
||||||
|
.connection_manager
|
||||||
|
.get_client(&HttpConnectionOptions {
|
||||||
|
id: self.plugin_context_id.clone(),
|
||||||
|
validate_certificates: runtime_config.validate_certificates,
|
||||||
|
proxy: runtime_config.proxy,
|
||||||
|
client_certificate,
|
||||||
|
dns_overrides: runtime_config.dns_overrides,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
cached_client.resolver.set_event_sender(Some(event_tx.clone())).await;
|
||||||
|
|
||||||
|
let sender = ReqwestSender::with_client(cached_client.client);
|
||||||
|
let transaction = match cookie_store {
|
||||||
|
Some(cs) => HttpTransaction::with_cookie_store(sender, cs),
|
||||||
|
None => HttpTransaction::new(sender),
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = if let Some(cancelled_rx) = self.cancelled_rx.clone() {
|
||||||
|
transaction.execute_with_cancellation(sendable_request, cancelled_rx, event_tx).await
|
||||||
|
} else {
|
||||||
|
let (_cancel_tx, cancel_rx) = watch::channel(false);
|
||||||
|
transaction.execute_with_cancellation(sendable_request, cancel_rx, event_tx).await
|
||||||
|
};
|
||||||
|
cached_client.resolver.set_event_sender(None).await;
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SendHttpRequestByIdParams<'a, T: TemplateCallback> {
|
||||||
|
pub query_manager: &'a QueryManager,
|
||||||
|
pub blob_manager: &'a BlobManager,
|
||||||
|
pub request_id: &'a str,
|
||||||
|
pub environment_id: Option<&'a str>,
|
||||||
|
pub template_callback: &'a T,
|
||||||
|
pub update_source: UpdateSource,
|
||||||
|
pub cookie_jar_id: Option<String>,
|
||||||
|
pub response_dir: &'a Path,
|
||||||
|
pub emit_events_to: Option<mpsc::Sender<SenderHttpResponseEvent>>,
|
||||||
|
pub prepare_sendable_request: Option<&'a dyn PrepareSendableRequest>,
|
||||||
|
pub executor: Option<&'a dyn SendRequestExecutor>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SendHttpRequestParams<'a, T: TemplateCallback> {
|
||||||
|
pub query_manager: &'a QueryManager,
|
||||||
|
pub blob_manager: &'a BlobManager,
|
||||||
|
pub request: HttpRequest,
|
||||||
|
pub environment_id: Option<&'a str>,
|
||||||
|
pub template_callback: &'a T,
|
||||||
|
pub send_options: Option<SendableHttpRequestOptions>,
|
||||||
|
pub update_source: UpdateSource,
|
||||||
|
pub cookie_jar_id: Option<String>,
|
||||||
|
pub response_dir: &'a Path,
|
||||||
|
pub emit_events_to: Option<mpsc::Sender<SenderHttpResponseEvent>>,
|
||||||
|
pub auth_context_id: Option<String>,
|
||||||
|
pub existing_response: Option<HttpResponse>,
|
||||||
|
pub prepare_sendable_request: Option<&'a dyn PrepareSendableRequest>,
|
||||||
|
pub executor: Option<&'a dyn SendRequestExecutor>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SendHttpRequestWithPluginsParams<'a> {
|
||||||
|
pub query_manager: &'a QueryManager,
|
||||||
|
pub blob_manager: &'a BlobManager,
|
||||||
|
pub request: HttpRequest,
|
||||||
|
pub environment_id: Option<&'a str>,
|
||||||
|
pub update_source: UpdateSource,
|
||||||
|
pub cookie_jar_id: Option<String>,
|
||||||
|
pub response_dir: &'a Path,
|
||||||
|
pub emit_events_to: Option<mpsc::Sender<SenderHttpResponseEvent>>,
|
||||||
|
pub existing_response: Option<HttpResponse>,
|
||||||
|
pub plugin_manager: Arc<PluginManager>,
|
||||||
|
pub encryption_manager: Arc<EncryptionManager>,
|
||||||
|
pub plugin_context: &'a PluginContext,
|
||||||
|
pub cancelled_rx: Option<watch::Receiver<bool>>,
|
||||||
|
pub connection_manager: Option<&'a HttpConnectionManager>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SendHttpRequestByIdWithPluginsParams<'a> {
|
||||||
|
pub query_manager: &'a QueryManager,
|
||||||
|
pub blob_manager: &'a BlobManager,
|
||||||
|
pub request_id: &'a str,
|
||||||
|
pub environment_id: Option<&'a str>,
|
||||||
|
pub update_source: UpdateSource,
|
||||||
|
pub cookie_jar_id: Option<String>,
|
||||||
|
pub response_dir: &'a Path,
|
||||||
|
pub emit_events_to: Option<mpsc::Sender<SenderHttpResponseEvent>>,
|
||||||
|
pub plugin_manager: Arc<PluginManager>,
|
||||||
|
pub encryption_manager: Arc<EncryptionManager>,
|
||||||
|
pub plugin_context: &'a PluginContext,
|
||||||
|
pub cancelled_rx: Option<watch::Receiver<bool>>,
|
||||||
|
pub connection_manager: Option<&'a HttpConnectionManager>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SendHttpRequestResult {
|
||||||
|
pub rendered_request: HttpRequest,
|
||||||
|
pub response: HttpResponse,
|
||||||
|
pub response_body: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HttpSendRuntimeConfig {
|
||||||
|
pub send_options: SendableHttpRequestOptions,
|
||||||
|
pub validate_certificates: bool,
|
||||||
|
pub proxy: HttpConnectionProxySetting,
|
||||||
|
pub dns_overrides: Vec<DnsOverride>,
|
||||||
|
pub client_certificates: Vec<ClientCertificate>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve_http_send_runtime_config(
|
||||||
|
query_manager: &QueryManager,
|
||||||
|
workspace_id: &str,
|
||||||
|
) -> Result<HttpSendRuntimeConfig> {
|
||||||
|
let db = query_manager.connect();
|
||||||
|
let workspace = db.get_workspace(workspace_id).map_err(SendHttpRequestError::LoadWorkspace)?;
|
||||||
|
let settings = db.get_settings();
|
||||||
|
|
||||||
|
Ok(HttpSendRuntimeConfig {
|
||||||
|
send_options: SendableHttpRequestOptions {
|
||||||
|
follow_redirects: workspace.setting_follow_redirects,
|
||||||
|
timeout: if workspace.setting_request_timeout > 0 {
|
||||||
|
Some(std::time::Duration::from_millis(
|
||||||
|
workspace.setting_request_timeout.unsigned_abs() as u64,
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
},
|
||||||
|
validate_certificates: workspace.setting_validate_certificates,
|
||||||
|
proxy: proxy_setting_from_settings(settings.proxy),
|
||||||
|
dns_overrides: workspace.setting_dns_overrides,
|
||||||
|
client_certificates: settings.client_certificates,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_http_request_by_id_with_plugins(
|
||||||
|
params: SendHttpRequestByIdWithPluginsParams<'_>,
|
||||||
|
) -> Result<SendHttpRequestResult> {
|
||||||
|
let request = params
|
||||||
|
.query_manager
|
||||||
|
.connect()
|
||||||
|
.get_http_request(params.request_id)
|
||||||
|
.map_err(SendHttpRequestError::LoadRequest)?;
|
||||||
|
|
||||||
|
send_http_request_with_plugins(SendHttpRequestWithPluginsParams {
|
||||||
|
query_manager: params.query_manager,
|
||||||
|
blob_manager: params.blob_manager,
|
||||||
|
request,
|
||||||
|
environment_id: params.environment_id,
|
||||||
|
update_source: params.update_source,
|
||||||
|
cookie_jar_id: params.cookie_jar_id,
|
||||||
|
response_dir: params.response_dir,
|
||||||
|
emit_events_to: params.emit_events_to,
|
||||||
|
existing_response: None,
|
||||||
|
plugin_manager: params.plugin_manager,
|
||||||
|
encryption_manager: params.encryption_manager,
|
||||||
|
plugin_context: params.plugin_context,
|
||||||
|
cancelled_rx: params.cancelled_rx,
|
||||||
|
connection_manager: params.connection_manager,
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_http_request_with_plugins(
|
||||||
|
params: SendHttpRequestWithPluginsParams<'_>,
|
||||||
|
) -> Result<SendHttpRequestResult> {
|
||||||
|
let template_callback = PluginTemplateCallback::new(
|
||||||
|
params.plugin_manager.clone(),
|
||||||
|
params.encryption_manager.clone(),
|
||||||
|
params.plugin_context,
|
||||||
|
RenderPurpose::Send,
|
||||||
|
);
|
||||||
|
let auth_hook = PluginPrepareSendableRequest {
|
||||||
|
plugin_manager: params.plugin_manager,
|
||||||
|
plugin_context: params.plugin_context.clone(),
|
||||||
|
cancelled_rx: params.cancelled_rx.clone(),
|
||||||
|
};
|
||||||
|
let executor =
|
||||||
|
params.connection_manager.map(|connection_manager| ConnectionManagerSendRequestExecutor {
|
||||||
|
connection_manager,
|
||||||
|
plugin_context_id: params.plugin_context.id.clone(),
|
||||||
|
query_manager: params.query_manager.clone(),
|
||||||
|
workspace_id: params.request.workspace_id.clone(),
|
||||||
|
cancelled_rx: params.cancelled_rx.clone(),
|
||||||
|
});
|
||||||
|
|
||||||
|
send_http_request(SendHttpRequestParams {
|
||||||
|
query_manager: params.query_manager,
|
||||||
|
blob_manager: params.blob_manager,
|
||||||
|
request: params.request,
|
||||||
|
environment_id: params.environment_id,
|
||||||
|
template_callback: &template_callback,
|
||||||
|
send_options: None,
|
||||||
|
update_source: params.update_source,
|
||||||
|
cookie_jar_id: params.cookie_jar_id,
|
||||||
|
response_dir: params.response_dir,
|
||||||
|
emit_events_to: params.emit_events_to,
|
||||||
|
auth_context_id: None,
|
||||||
|
existing_response: params.existing_response,
|
||||||
|
prepare_sendable_request: Some(&auth_hook),
|
||||||
|
executor: executor.as_ref().map(|e| e as &dyn SendRequestExecutor),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_http_request_by_id<T: TemplateCallback>(
|
||||||
|
params: SendHttpRequestByIdParams<'_, T>,
|
||||||
|
) -> Result<SendHttpRequestResult> {
|
||||||
|
let request = params
|
||||||
|
.query_manager
|
||||||
|
.connect()
|
||||||
|
.get_http_request(params.request_id)
|
||||||
|
.map_err(SendHttpRequestError::LoadRequest)?;
|
||||||
|
let (request, auth_context_id) = resolve_inherited_request(params.query_manager, &request)?;
|
||||||
|
|
||||||
|
send_http_request(SendHttpRequestParams {
|
||||||
|
query_manager: params.query_manager,
|
||||||
|
blob_manager: params.blob_manager,
|
||||||
|
request,
|
||||||
|
environment_id: params.environment_id,
|
||||||
|
template_callback: params.template_callback,
|
||||||
|
send_options: None,
|
||||||
|
update_source: params.update_source,
|
||||||
|
cookie_jar_id: params.cookie_jar_id,
|
||||||
|
response_dir: params.response_dir,
|
||||||
|
emit_events_to: params.emit_events_to,
|
||||||
|
existing_response: None,
|
||||||
|
prepare_sendable_request: params.prepare_sendable_request,
|
||||||
|
executor: params.executor,
|
||||||
|
auth_context_id: Some(auth_context_id),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_http_request<T: TemplateCallback>(
|
||||||
|
params: SendHttpRequestParams<'_, T>,
|
||||||
|
) -> Result<SendHttpRequestResult> {
|
||||||
|
let environment_chain =
|
||||||
|
resolve_environment_chain(params.query_manager, ¶ms.request, params.environment_id)?;
|
||||||
|
let (resolved_request, auth_context_id) =
|
||||||
|
if let Some(auth_context_id) = params.auth_context_id.clone() {
|
||||||
|
(params.request.clone(), auth_context_id)
|
||||||
|
} else {
|
||||||
|
resolve_inherited_request(params.query_manager, ¶ms.request)?
|
||||||
|
};
|
||||||
|
let runtime_config =
|
||||||
|
resolve_http_send_runtime_config(params.query_manager, ¶ms.request.workspace_id)?;
|
||||||
|
let send_options = params.send_options.unwrap_or(runtime_config.send_options);
|
||||||
|
let mut cookie_jar = load_cookie_jar(params.query_manager, params.cookie_jar_id.as_deref())?;
|
||||||
|
let cookie_store =
|
||||||
|
cookie_jar.as_ref().map(|jar| CookieStore::from_cookies(jar.cookies.clone()));
|
||||||
|
|
||||||
|
let rendered_request = render_http_request(
|
||||||
|
&resolved_request,
|
||||||
|
environment_chain,
|
||||||
|
params.template_callback,
|
||||||
|
&RenderOptions::throw(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(SendHttpRequestError::RenderRequest)?;
|
||||||
|
|
||||||
|
let mut sendable_request =
|
||||||
|
SendableHttpRequest::from_http_request(&rendered_request, send_options)
|
||||||
|
.await
|
||||||
|
.map_err(SendHttpRequestError::BuildSendableRequest)?;
|
||||||
|
|
||||||
|
if let Some(hook) = params.prepare_sendable_request {
|
||||||
|
hook.prepare_sendable_request(&rendered_request, &auth_context_id, &mut sendable_request)
|
||||||
|
.await
|
||||||
|
.map_err(SendHttpRequestError::PrepareSendableRequest)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let request_content_length = sendable_body_length(sendable_request.body.as_ref());
|
||||||
|
let mut response = params.existing_response.unwrap_or_default();
|
||||||
|
response.request_id = params.request.id.clone();
|
||||||
|
response.workspace_id = params.request.workspace_id.clone();
|
||||||
|
response.request_content_length = request_content_length;
|
||||||
|
response.request_headers = sendable_request
|
||||||
|
.headers
|
||||||
|
.iter()
|
||||||
|
.map(|(name, value)| HttpResponseHeader { name: name.clone(), value: value.clone() })
|
||||||
|
.collect();
|
||||||
|
response.url = sendable_request.url.clone();
|
||||||
|
response.state = HttpResponseState::Initialized;
|
||||||
|
response.error = None;
|
||||||
|
response.content_length = None;
|
||||||
|
response.content_length_compressed = None;
|
||||||
|
response.body_path = None;
|
||||||
|
response.status = 0;
|
||||||
|
response.status_reason = None;
|
||||||
|
response.headers = Vec::new();
|
||||||
|
response.remote_addr = None;
|
||||||
|
response.version = None;
|
||||||
|
response.elapsed = 0;
|
||||||
|
response.elapsed_headers = 0;
|
||||||
|
response.elapsed_dns = 0;
|
||||||
|
response = params
|
||||||
|
.query_manager
|
||||||
|
.connect()
|
||||||
|
.upsert_http_response(&response, ¶ms.update_source, params.blob_manager)
|
||||||
|
.map_err(SendHttpRequestError::PersistResponse)?;
|
||||||
|
|
||||||
|
let (event_tx, mut event_rx) =
|
||||||
|
mpsc::channel::<SenderHttpResponseEvent>(HTTP_EVENT_CHANNEL_CAPACITY);
|
||||||
|
let event_query_manager = params.query_manager.clone();
|
||||||
|
let event_response_id = response.id.clone();
|
||||||
|
let event_workspace_id = params.request.workspace_id.clone();
|
||||||
|
let event_update_source = params.update_source.clone();
|
||||||
|
let emit_events_to = params.emit_events_to.clone();
|
||||||
|
let event_handle = tokio::spawn(async move {
|
||||||
|
while let Some(event) = event_rx.recv().await {
|
||||||
|
let db_event = HttpResponseEvent::new(
|
||||||
|
&event_response_id,
|
||||||
|
&event_workspace_id,
|
||||||
|
event.clone().into(),
|
||||||
|
);
|
||||||
|
if let Err(err) = event_query_manager
|
||||||
|
.connect()
|
||||||
|
.upsert_http_response_event(&db_event, &event_update_source)
|
||||||
|
{
|
||||||
|
warn!("Failed to persist HTTP response event: {}", err);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(tx) = emit_events_to.as_ref() {
|
||||||
|
let _ = tx.try_send(event);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let default_executor = DefaultSendRequestExecutor;
|
||||||
|
let executor = params.executor.unwrap_or(&default_executor);
|
||||||
|
let started_at = Instant::now();
|
||||||
|
let request_started_url = sendable_request.url.clone();
|
||||||
|
|
||||||
|
let http_response = match executor.send(sendable_request, event_tx, cookie_store.clone()).await
|
||||||
|
{
|
||||||
|
Ok(response) => response,
|
||||||
|
Err(err) => {
|
||||||
|
persist_cookie_jar(params.query_manager, cookie_jar.as_mut(), cookie_store.as_ref())?;
|
||||||
|
let _ = persist_response_error(
|
||||||
|
params.query_manager,
|
||||||
|
params.blob_manager,
|
||||||
|
¶ms.update_source,
|
||||||
|
&response,
|
||||||
|
started_at,
|
||||||
|
err.to_string(),
|
||||||
|
request_started_url,
|
||||||
|
);
|
||||||
|
if let Err(join_err) = event_handle.await {
|
||||||
|
warn!("Failed to join response event task: {}", join_err);
|
||||||
|
}
|
||||||
|
return Err(SendHttpRequestError::SendRequest(err));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let headers_elapsed = duration_to_i32(started_at.elapsed());
|
||||||
|
response = params
|
||||||
|
.query_manager
|
||||||
|
.connect()
|
||||||
|
.upsert_http_response(
|
||||||
|
&HttpResponse {
|
||||||
|
state: HttpResponseState::Connected,
|
||||||
|
elapsed_headers: headers_elapsed,
|
||||||
|
status: i32::from(http_response.status),
|
||||||
|
status_reason: http_response.status_reason.clone(),
|
||||||
|
url: http_response.url.clone(),
|
||||||
|
remote_addr: http_response.remote_addr.clone(),
|
||||||
|
version: http_response.version.clone(),
|
||||||
|
headers: http_response
|
||||||
|
.headers
|
||||||
|
.iter()
|
||||||
|
.map(|(name, value)| HttpResponseHeader {
|
||||||
|
name: name.clone(),
|
||||||
|
value: value.clone(),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
request_headers: http_response
|
||||||
|
.request_headers
|
||||||
|
.iter()
|
||||||
|
.map(|(name, value)| HttpResponseHeader {
|
||||||
|
name: name.clone(),
|
||||||
|
value: value.clone(),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
..response
|
||||||
|
},
|
||||||
|
¶ms.update_source,
|
||||||
|
params.blob_manager,
|
||||||
|
)
|
||||||
|
.map_err(SendHttpRequestError::PersistResponse)?;
|
||||||
|
|
||||||
|
let (response_body, body_stats) =
|
||||||
|
http_response.bytes().await.map_err(SendHttpRequestError::ReadResponseBody)?;
|
||||||
|
|
||||||
|
std::fs::create_dir_all(params.response_dir).map_err(|source| {
|
||||||
|
SendHttpRequestError::CreateResponseDirectory {
|
||||||
|
path: params.response_dir.to_path_buf(),
|
||||||
|
source,
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let body_path = params.response_dir.join(&response.id);
|
||||||
|
std::fs::write(&body_path, &response_body).map_err(|source| {
|
||||||
|
SendHttpRequestError::WriteResponseBody { path: body_path.clone(), source }
|
||||||
|
})?;
|
||||||
|
|
||||||
|
response = params
|
||||||
|
.query_manager
|
||||||
|
.connect()
|
||||||
|
.upsert_http_response(
|
||||||
|
&HttpResponse {
|
||||||
|
body_path: Some(body_path.to_string_lossy().to_string()),
|
||||||
|
content_length: Some(usize_to_i32(response_body.len())),
|
||||||
|
content_length_compressed: Some(u64_to_i32(body_stats.size_compressed)),
|
||||||
|
elapsed: duration_to_i32(started_at.elapsed()),
|
||||||
|
elapsed_headers: headers_elapsed,
|
||||||
|
state: HttpResponseState::Closed,
|
||||||
|
..response
|
||||||
|
},
|
||||||
|
¶ms.update_source,
|
||||||
|
params.blob_manager,
|
||||||
|
)
|
||||||
|
.map_err(SendHttpRequestError::PersistResponse)?;
|
||||||
|
|
||||||
|
if let Err(join_err) = event_handle.await {
|
||||||
|
warn!("Failed to join response event task: {}", join_err);
|
||||||
|
}
|
||||||
|
persist_cookie_jar(params.query_manager, cookie_jar.as_mut(), cookie_store.as_ref())?;
|
||||||
|
|
||||||
|
Ok(SendHttpRequestResult { rendered_request, response, response_body })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_environment_chain(
|
||||||
|
query_manager: &QueryManager,
|
||||||
|
request: &HttpRequest,
|
||||||
|
environment_id: Option<&str>,
|
||||||
|
) -> Result<Vec<Environment>> {
|
||||||
|
let db = query_manager.connect();
|
||||||
|
db.resolve_environments(&request.workspace_id, request.folder_id.as_deref(), environment_id)
|
||||||
|
.map_err(SendHttpRequestError::ResolveEnvironments)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_inherited_request(
|
||||||
|
query_manager: &QueryManager,
|
||||||
|
request: &HttpRequest,
|
||||||
|
) -> Result<(HttpRequest, String)> {
|
||||||
|
let db = query_manager.connect();
|
||||||
|
let (authentication_type, authentication, auth_context_id) = db
|
||||||
|
.resolve_auth_for_http_request(request)
|
||||||
|
.map_err(SendHttpRequestError::ResolveRequestInheritance)?;
|
||||||
|
let resolved_headers = db
|
||||||
|
.resolve_headers_for_http_request(request)
|
||||||
|
.map_err(SendHttpRequestError::ResolveRequestInheritance)?;
|
||||||
|
|
||||||
|
let mut request = request.clone();
|
||||||
|
request.authentication_type = authentication_type;
|
||||||
|
request.authentication = authentication;
|
||||||
|
request.headers = resolved_headers;
|
||||||
|
|
||||||
|
Ok((request, auth_context_id))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_cookie_jar(
|
||||||
|
query_manager: &QueryManager,
|
||||||
|
cookie_jar_id: Option<&str>,
|
||||||
|
) -> Result<Option<CookieJar>> {
|
||||||
|
let Some(cookie_jar_id) = cookie_jar_id else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
query_manager
|
||||||
|
.connect()
|
||||||
|
.get_cookie_jar(cookie_jar_id)
|
||||||
|
.map(Some)
|
||||||
|
.map_err(SendHttpRequestError::LoadCookieJar)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn persist_cookie_jar(
|
||||||
|
query_manager: &QueryManager,
|
||||||
|
cookie_jar: Option<&mut CookieJar>,
|
||||||
|
cookie_store: Option<&CookieStore>,
|
||||||
|
) -> Result<()> {
|
||||||
|
match (cookie_jar, cookie_store) {
|
||||||
|
(Some(cookie_jar), Some(cookie_store)) => {
|
||||||
|
cookie_jar.cookies = cookie_store.get_all_cookies();
|
||||||
|
query_manager
|
||||||
|
.connect()
|
||||||
|
.upsert_cookie_jar(cookie_jar, &UpdateSource::Background)
|
||||||
|
.map_err(SendHttpRequestError::PersistCookieJar)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
_ => Ok(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn proxy_setting_from_settings(proxy: Option<ProxySetting>) -> HttpConnectionProxySetting {
|
||||||
|
match proxy {
|
||||||
|
None => HttpConnectionProxySetting::System,
|
||||||
|
Some(ProxySetting::Disabled) => HttpConnectionProxySetting::Disabled,
|
||||||
|
Some(ProxySetting::Enabled { http, https, auth, bypass, disabled }) => {
|
||||||
|
if disabled {
|
||||||
|
HttpConnectionProxySetting::System
|
||||||
|
} else {
|
||||||
|
HttpConnectionProxySetting::Enabled {
|
||||||
|
http,
|
||||||
|
https,
|
||||||
|
bypass,
|
||||||
|
auth: auth.map(|ProxySettingAuth { user, password }| {
|
||||||
|
HttpConnectionProxySettingAuth { user, password }
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn apply_plugin_authentication(
|
||||||
|
sendable_request: &mut SendableHttpRequest,
|
||||||
|
request: &HttpRequest,
|
||||||
|
auth_context_id: &str,
|
||||||
|
plugin_manager: &PluginManager,
|
||||||
|
plugin_context: &PluginContext,
|
||||||
|
) -> std::result::Result<(), String> {
|
||||||
|
match &request.authentication_type {
|
||||||
|
None => {}
|
||||||
|
Some(authentication_type) if authentication_type == "none" => {}
|
||||||
|
Some(authentication_type) => {
|
||||||
|
let req = CallHttpAuthenticationRequest {
|
||||||
|
context_id: format!("{:x}", md5::compute(auth_context_id)),
|
||||||
|
values: serde_json::from_value(
|
||||||
|
serde_json::to_value(&request.authentication)
|
||||||
|
.map_err(|e| format!("Failed to serialize auth values: {e}"))?,
|
||||||
|
)
|
||||||
|
.map_err(|e| format!("Failed to parse auth values: {e}"))?,
|
||||||
|
url: sendable_request.url.clone(),
|
||||||
|
method: sendable_request.method.clone(),
|
||||||
|
headers: sendable_request
|
||||||
|
.headers
|
||||||
|
.iter()
|
||||||
|
.map(|(name, value)| HttpHeader {
|
||||||
|
name: name.to_string(),
|
||||||
|
value: value.to_string(),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
};
|
||||||
|
let plugin_result = plugin_manager
|
||||||
|
.call_http_authentication(plugin_context, authentication_type, req)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to apply authentication plugin: {e}"))?;
|
||||||
|
|
||||||
|
for header in plugin_result.set_headers.unwrap_or_default() {
|
||||||
|
sendable_request.insert_header((header.name, header.value));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(params) = plugin_result.set_query_parameters {
|
||||||
|
let params = params.into_iter().map(|p| (p.name, p.value)).collect::<Vec<_>>();
|
||||||
|
sendable_request.url = append_query_params(&sendable_request.url, params);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn persist_response_error(
|
||||||
|
query_manager: &QueryManager,
|
||||||
|
blob_manager: &BlobManager,
|
||||||
|
update_source: &UpdateSource,
|
||||||
|
response: &HttpResponse,
|
||||||
|
started_at: Instant,
|
||||||
|
error: String,
|
||||||
|
fallback_url: String,
|
||||||
|
) -> Result<HttpResponse> {
|
||||||
|
let elapsed = duration_to_i32(started_at.elapsed());
|
||||||
|
query_manager
|
||||||
|
.connect()
|
||||||
|
.upsert_http_response(
|
||||||
|
&HttpResponse {
|
||||||
|
state: HttpResponseState::Closed,
|
||||||
|
elapsed,
|
||||||
|
elapsed_headers: if response.elapsed_headers == 0 {
|
||||||
|
elapsed
|
||||||
|
} else {
|
||||||
|
response.elapsed_headers
|
||||||
|
},
|
||||||
|
error: Some(error),
|
||||||
|
url: if response.url.is_empty() { fallback_url } else { response.url.clone() },
|
||||||
|
..response.clone()
|
||||||
|
},
|
||||||
|
update_source,
|
||||||
|
blob_manager,
|
||||||
|
)
|
||||||
|
.map_err(SendHttpRequestError::PersistResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sendable_body_length(body: Option<&SendableBody>) -> Option<i32> {
|
||||||
|
match body {
|
||||||
|
Some(SendableBody::Bytes(bytes)) => Some(usize_to_i32(bytes.len())),
|
||||||
|
Some(SendableBody::Stream { content_length: Some(length), .. }) => {
|
||||||
|
Some(u64_to_i32(*length))
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn duration_to_i32(duration: std::time::Duration) -> i32 {
|
||||||
|
u128_to_i32(duration.as_millis())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usize_to_i32(value: usize) -> i32 {
|
||||||
|
if value > i32::MAX as usize { i32::MAX } else { value as i32 }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn u64_to_i32(value: u64) -> i32 {
|
||||||
|
if value > i32::MAX as u64 { i32::MAX } else { value as i32 }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn u128_to_i32(value: u128) -> i32 {
|
||||||
|
if value > i32::MAX as u128 { i32::MAX } else { value as i32 }
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user