Compare commits

..

17 Commits

Author SHA1 Message Date
Gregory Schier
9856383566 Refine AnyModel deserialization 2026-02-16 13:20:36 -08:00
Gregory Schier
b75e9479e6 Refine model change polling and move queries out of db_context 2026-02-16 13:00:50 -08:00
Gregory Schier
0d1d8d4afa Merge remote-tracking branch 'origin/main' into cli-command-architecture 2026-02-16 09:37:20 -08:00
Gregory Schier
5888e69956 Ignore local Codex environment state file 2026-02-16 09:34:44 -08:00
Gregory Schier
e48a0894de Handle CLI send errors without panicking 2026-02-16 09:33:43 -08:00
Gregory Schier
ea0b083d25 Limit CLI plugin runtime startup and harden shutdown watcher 2026-02-16 09:31:46 -08:00
Gregory Schier
f6c20283f0 Add DB-backed model change polling and startup pruning 2026-02-16 09:21:48 -08:00
Gregory Schier
0d57f91ca4 add json create/update workflows across cli resources 2026-02-16 09:08:05 -08:00
Pathik
0a4ffde319 Support moving multiple requests to another workspace (#396)
Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com>
Co-authored-by: Gregory Schier <gschier1990@gmail.com>
2026-02-16 08:42:42 -08:00
Gregory Schier
570676dffb chore: apply rustfmt formatting updates 2026-02-16 08:40:48 -08:00
Gregory Schier
0bf24c0dc1 add workspace/folder/environment commands and reorganize cli tests 2026-02-16 08:29:35 -08:00
Gregory Schier
6a23f0a5ee merge origin/main into cli-command-architecture 2026-02-16 07:27:57 -08:00
Gregory Schier
91e0660a7a add request show/delete commands and cli integration tests 2026-02-16 07:27:15 -08:00
Gregory Schier
26e145942a refactor yaak-cli phase 1 command architecture 2026-02-16 06:59:23 -08:00
Gregory Schier
cc4d598af3 Update skill 2026-02-16 06:02:03 -08:00
Davide Becker
f5d11cb6d3 Add support for client assertions in the OAuth 2 plugin (#395)
Co-authored-by: Davide Becker <github@reg.davide.me>
Co-authored-by: Gregory Schier <gschier1990@gmail.com>
2026-02-14 07:38:54 -08:00
Gregory Schier
8023603ebe Add CLI command architecture plan 2026-02-08 08:02:34 -08:00
47 changed files with 3008 additions and 530 deletions

View File

@@ -1,35 +1,46 @@
---
description: Review a PR in a new worktree
allowed-tools: Bash(git worktree:*), Bash(gh pr:*)
allowed-tools: Bash(git worktree:*), Bash(gh pr:*), Bash(git branch:*)
---
Review a GitHub pull request in a new git worktree.
Check out a GitHub pull request for review.
## Usage
```
/review-pr <PR_NUMBER>
/check-out-pr <PR_NUMBER>
```
## What to do
1. List all open pull requests and ask the user to select one
1. If no PR number is provided, list all open pull requests and ask the user to select one
2. Get PR information using `gh pr view <PR_NUMBER> --json number,headRefName`
3. Extract the branch name from the PR
4. Create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>` using `git worktree add` with a timeout of at least 300000ms (5 minutes) since the post-checkout hook runs a bootstrap script
5. Checkout the PR branch in the new worktree using `gh pr checkout <PR_NUMBER>`
6. The post-checkout hook will automatically:
3. **Ask the user** whether they want to:
- **A) Check out in current directory** — simple `gh pr checkout <PR_NUMBER>`
- **B) Create a new worktree** — isolated copy at `../yaak-worktrees/pr-<PR_NUMBER>`
4. Follow the appropriate path below
## Option A: Check out in current directory
1. Run `gh pr checkout <PR_NUMBER>`
2. Inform the user which branch they're now on
## Option B: Create a new worktree
1. Create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>` using `git worktree add` with a timeout of at least 300000ms (5 minutes) since the post-checkout hook runs a bootstrap script
2. Checkout the PR branch in the new worktree using `gh pr checkout <PR_NUMBER>`
3. The post-checkout hook will automatically:
- Create `.env.local` with unique ports
- Copy editor config folders
- Run `npm install && npm run bootstrap`
7. Inform the user:
4. Inform the user:
- Where the worktree was created
- What ports were assigned
- How to access it (cd command)
- How to run the dev server
- How to remove the worktree when done
## Example Output
### Example worktree output
```
Created worktree for PR #123 at ../yaak-worktrees/pr-123

3
.gitignore vendored
View File

@@ -51,3 +51,6 @@ flatpak-repo/
flatpak/flatpak-builder-tools/
flatpak/cargo-sources.json
flatpak/node-sources.json
# Local Codex desktop env state
.codex/environments/environment.toml

96
Cargo.lock generated
View File

@@ -221,6 +221,21 @@ dependencies = [
"zbus",
]
[[package]]
name = "assert_cmd"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c5bcfa8749ac45dd12cb11055aeeb6b27a3895560d60d71e3c23bf979e60514"
dependencies = [
"anstyle",
"bstr",
"libc",
"predicates",
"predicates-core",
"predicates-tree",
"wait-timeout",
]
[[package]]
name = "async-broadcast"
version = "0.7.2"
@@ -639,6 +654,17 @@ dependencies = [
"alloc-stdlib",
]
[[package]]
name = "bstr"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab"
dependencies = [
"memchr",
"regex-automata",
"serde",
]
[[package]]
name = "bumpalo"
version = "3.18.1"
@@ -1366,6 +1392,12 @@ dependencies = [
"cipher",
]
[[package]]
name = "difflib"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8"
[[package]]
name = "digest"
version = "0.10.7"
@@ -1744,6 +1776,15 @@ dependencies = [
"miniz_oxide",
]
[[package]]
name = "float-cmp"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8"
dependencies = [
"num-traits",
]
[[package]]
name = "fnv"
version = "1.0.7"
@@ -3496,6 +3537,12 @@ dependencies = [
"minimal-lexical",
]
[[package]]
name = "normalize-line-endings"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
[[package]]
name = "notify"
version = "8.0.0"
@@ -4373,6 +4420,36 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
[[package]]
name = "predicates"
version = "3.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ada8f2932f28a27ee7b70dd6c1c39ea0675c55a36879ab92f3a715eaa1e63cfe"
dependencies = [
"anstyle",
"difflib",
"float-cmp",
"normalize-line-endings",
"predicates-core",
"regex 1.11.1",
]
[[package]]
name = "predicates-core"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cad38746f3166b4031b1a0d39ad9f954dd291e7854fcc0eed52ee41a0b50d144"
[[package]]
name = "predicates-tree"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0de1b847b39c8131db0467e9df1ff60e6d0562ab8e9a16e568ad0fdb372e2f2"
dependencies = [
"predicates-core",
"termtree",
]
[[package]]
name = "proc-macro-crate"
version = "1.3.1"
@@ -6411,6 +6488,12 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "termtree"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683"
[[package]]
name = "thiserror"
version = "1.0.69"
@@ -7184,6 +7267,15 @@ dependencies = [
"libc",
]
[[package]]
name = "wait-timeout"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11"
dependencies = [
"libc",
]
[[package]]
name = "walkdir"
version = "2.5.0"
@@ -8222,11 +8314,15 @@ dependencies = [
name = "yaak-cli"
version = "0.1.0"
dependencies = [
"assert_cmd",
"clap",
"dirs",
"env_logger",
"log 0.4.29",
"predicates",
"serde",
"serde_json",
"tempfile",
"tokio",
"yaak-crypto",
"yaak-http",

View File

@@ -13,6 +13,7 @@ clap = { version = "4", features = ["derive"] }
dirs = "6"
env_logger = "0.11"
log = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
yaak-crypto = { workspace = true }
@@ -20,3 +21,8 @@ yaak-http = { workspace = true }
yaak-models = { workspace = true }
yaak-plugins = { workspace = true }
yaak-templates = { workspace = true }
[dev-dependencies]
assert_cmd = "2"
predicates = "3"
tempfile = "3"

340
crates-cli/yaak-cli/PLAN.md Normal file
View File

@@ -0,0 +1,340 @@
# CLI Command Architecture Plan
## Goal
Redesign the yaak-cli command structure to use a resource-oriented `<resource> <action>`
pattern that scales well, is discoverable, and supports both human and LLM workflows.
## Status Snapshot
Current branch state:
- Modular CLI structure with command modules and shared `CliContext`
- Resource/action hierarchy in place for:
- `workspace list|show|create|update|delete`
- `request list|show|create|update|send|delete`
- `folder list|show|create|update|delete`
- `environment list|show|create|update|delete`
- Top-level `send` exists as a request-send shortcut (not yet flexible request/folder/workspace resolution)
- Legacy `get` command removed
- JSON create/update flow implemented (`--json` and positional JSON shorthand)
- No `request schema` command yet
Progress checklist:
- [x] Phase 1 complete
- [x] Phase 2 complete
- [x] Phase 3 complete
- [ ] Phase 4 complete
- [ ] Phase 5 complete
- [ ] Phase 6 complete
## Command Architecture
### Design Principles
- **Resource-oriented**: top-level commands are nouns, subcommands are verbs
- **Polymorphic requests**: `request` covers HTTP, gRPC, and WebSocket — the CLI
resolves the type via `get_any_request` and adapts behavior accordingly
- **Simple creation, full-fidelity via JSON**: human-friendly flags for basic creation,
`--json` for full control (targeted at LLM and scripting workflows)
- **Runtime schema introspection**: `request schema` outputs JSON Schema for the request
models, with dynamic auth fields populated from loaded plugins at runtime
- **Destructive actions require confirmation**: `delete` commands prompt for user
confirmation before proceeding. Can be bypassed with `--yes` / `-y` for scripting
### Commands
```
# Top-level shortcut
yaakcli send <id> [-e <env_id>] # id can be a request, folder, or workspace
# Resource commands
yaakcli workspace list
yaakcli workspace show <id>
yaakcli workspace create --name <name>
yaakcli workspace create --json '{"name": "My Workspace"}'
yaakcli workspace create '{"name": "My Workspace"}' # positional JSON shorthand
yaakcli workspace update --json '{"id": "wk_abc", "name": "New Name"}'
yaakcli workspace delete <id>
yaakcli request list <workspace_id>
yaakcli request show <id>
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
yaakcli request create --json '{"workspaceId": "wk_abc", "url": "..."}'
yaakcli request update --json '{"id": "rq_abc", "url": "https://new.com"}'
yaakcli request send <id> [-e <env_id>]
yaakcli request delete <id>
yaakcli request schema <http|grpc|websocket>
yaakcli folder list <workspace_id>
yaakcli folder show <id>
yaakcli folder create <workspace_id> --name <name>
yaakcli folder create --json '{"workspaceId": "wk_abc", "name": "Auth"}'
yaakcli folder update --json '{"id": "fl_abc", "name": "New Name"}'
yaakcli folder delete <id>
yaakcli environment list <workspace_id>
yaakcli environment show <id>
yaakcli environment create <workspace_id> --name <name>
yaakcli environment create --json '{"workspaceId": "wk_abc", "name": "Production"}'
yaakcli environment update --json '{"id": "ev_abc", ...}'
yaakcli environment delete <id>
```
### `send` — Top-Level Shortcut
`yaakcli send <id>` is a convenience alias that accepts any sendable ID. It tries
each type in order via DB lookups (short-circuiting on first match):
1. Request (HTTP, gRPC, or WebSocket via `get_any_request`)
2. Folder (sends all requests in the folder)
3. Workspace (sends all requests in the workspace)
ID prefixes exist (e.g. `rq_`, `fl_`, `wk_`) but are not relied upon — resolution
is purely by DB lookup.
`request send <id>` is the same but restricted to request IDs only.
### Request Send — Polymorphic Behavior
`send` means "execute this request" regardless of protocol:
- **HTTP**: send request, print response, exit
- **gRPC**: invoke the method; for streaming, stream output to stdout until done/Ctrl+C
- **WebSocket**: connect, stream messages to stdout until closed/Ctrl+C
### `request schema` — Runtime JSON Schema
Outputs a JSON Schema describing the full request shape, including dynamic fields:
1. Generate base schema from `schemars::JsonSchema` derive on the Rust model structs
2. Load plugins, collect auth strategy definitions and their form inputs
3. Merge plugin-defined auth fields into the `authentication` property as a `oneOf`
4. Output the combined schema as JSON
This lets an LLM call `schema`, read the shape, and construct valid JSON for
`create --json` or `update --json`.
## Implementation Steps
### Phase 1: Restructure commands (no new functionality)
Refactor `main.rs` into the new resource/action pattern using clap subcommand nesting.
Existing behavior stays the same, just reorganized. Remove the `get` command.
1. Create module structure: `commands/workspace.rs`, `commands/request.rs`, etc.
2. Define nested clap enums:
```rust
enum Commands {
Send(SendArgs),
Workspace(WorkspaceArgs),
Request(RequestArgs),
Folder(FolderArgs),
Environment(EnvironmentArgs),
}
```
3. Move existing `Workspaces` logic into `workspace list`
4. Move existing `Requests` logic into `request list`
5. Move existing `Send` logic into `request send`
6. Move existing `Create` logic into `request create`
7. Delete the `Get` command entirely
8. Extract shared setup (DB init, plugin init, encryption) into a reusable context struct
### Phase 2: Add missing CRUD commands
Status: complete
1. `workspace show <id>`
2. `workspace create --name <name>` (and `--json`)
3. `workspace update --json`
4. `workspace delete <id>`
5. `request show <id>` (JSON output of the full request model)
6. `request delete <id>`
7. `folder list <workspace_id>`
8. `folder show <id>`
9. `folder create <workspace_id> --name <name>` (and `--json`)
10. `folder update --json`
11. `folder delete <id>`
12. `environment list <workspace_id>`
13. `environment show <id>`
14. `environment create <workspace_id> --name <name>` (and `--json`)
15. `environment update --json`
16. `environment delete <id>`
### Phase 3: JSON input for create/update
Both commands accept JSON via `--json <string>` or as a positional argument (detected
by leading `{`). They follow the same upsert pattern as the plugin API.
- **`create --json`**: JSON must include `workspaceId`. Must NOT include `id` (or
use empty string `""`). Deserializes into the model with defaults for missing fields,
then upserts (insert).
- **`update --json`**: JSON must include `id`. Performs a fetch-merge-upsert:
1. Fetch the existing model from DB
2. Serialize it to `serde_json::Value`
3. Deep-merge the user's partial JSON on top (JSON Merge Patch / RFC 7386 semantics)
4. Deserialize back into the typed model
5. Upsert (update)
This matches how the MCP server plugin already does it (fetch existing, spread, override),
but the CLI handles the merge server-side so callers don't have to.
Setting a field to `null` removes it (for `Option<T>` fields), per RFC 7386.
Implementation:
1. Add `--json` flag and positional JSON detection to `create` commands
2. Add `update` commands with required `--json` flag
3. Implement JSON merge utility (or use `json-patch` crate)
### Phase 4: Runtime schema generation
1. Add `schemars` dependency to `yaak-models`
2. Derive `JsonSchema` on `HttpRequest`, `GrpcRequest`, `WebsocketRequest`, and their
nested types (`HttpRequestHeader`, `HttpUrlParameter`, etc.)
3. Implement `request schema` command:
- Generate base schema from schemars
- Query plugins for auth strategy form inputs
- Convert plugin form inputs into JSON Schema properties
- Merge into the `authentication` field
- Print to stdout
### Phase 5: Polymorphic send
1. Update `request send` to use `get_any_request` to resolve the request type
2. Match on `AnyRequest` variant and dispatch to the appropriate sender:
- `AnyRequest::HttpRequest` — existing HTTP send logic
- `AnyRequest::GrpcRequest` — gRPC invoke (future implementation)
- `AnyRequest::WebsocketRequest` — WebSocket connect (future implementation)
3. gRPC and WebSocket send can initially return "not yet implemented" errors
### Phase 6: Top-level `send` and folder/workspace send
1. Add top-level `yaakcli send <id>` command
2. Resolve ID by trying DB lookups in order: any_request → folder → workspace
3. For folder: list all requests in folder, send each
4. For workspace: list all requests in workspace, send each
5. Add execution options: `--sequential` (default), `--parallel`, `--fail-fast`
## Execution Plan (PR Slices)
### PR 1: Command tree refactor + compatibility aliases
Scope:
1. Introduce `commands/` modules and a `CliContext` for shared setup
2. Add new clap hierarchy (`workspace`, `request`, `folder`, `environment`)
3. Route existing behavior into:
- `workspace list`
- `request list <workspace_id>`
- `request send <id>`
- `request create <workspace_id> ...`
4. Keep compatibility aliases temporarily:
- `workspaces` -> `workspace list`
- `requests <workspace_id>` -> `request list <workspace_id>`
- `create ...` -> `request create ...`
5. Remove `get` and update help text
Acceptance criteria:
- `yaakcli --help` shows noun/verb structure
- Existing list/send/create workflows still work
- No behavior change in HTTP send output format
### PR 2: CRUD surface area
Scope:
1. Implement `show/create/update/delete` for `workspace`, `request`, `folder`, `environment`
2. Ensure delete commands require confirmation by default (`--yes` bypass)
3. Normalize output format for list/show/create/update/delete responses
Acceptance criteria:
- Every command listed in the "Commands" section parses and executes
- Delete commands are safe by default in interactive terminals
- `--yes` supports non-interactive scripts
### PR 3: JSON input + merge patch semantics
Scope:
1. Add shared parser for `--json` and positional JSON shorthand
2. Add `create --json` and `update --json` for all mutable resources
3. Implement server-side RFC 7386 merge patch behavior
4. Add guardrails:
- `create --json`: reject non-empty `id`
- `update --json`: require `id`
Acceptance criteria:
- Partial `update --json` only modifies provided keys
- `null` clears optional values
- Invalid JSON and missing required fields return actionable errors
### PR 4: `request schema` and plugin auth integration
Scope:
1. Add `schemars` to `yaak-models` and derive `JsonSchema` for request models
2. Implement `request schema <http|grpc|websocket>`
3. Merge plugin auth form inputs into `authentication` schema at runtime
Acceptance criteria:
- Command prints valid JSON schema
- Schema reflects installed auth providers at runtime
- No panic when plugins fail to initialize (degrade gracefully)
### PR 5: Polymorphic request send
Scope:
1. Replace request resolution in `request send` with `get_any_request`
2. Dispatch by request type
3. Keep HTTP fully functional
4. Return explicit NYI errors for gRPC/WebSocket until implemented
Acceptance criteria:
- HTTP behavior remains unchanged
- gRPC/WebSocket IDs are recognized and return explicit status
### PR 6: Top-level `send` + bulk execution
Scope:
1. Add top-level `send <id>` for request/folder/workspace IDs
2. Implement folder/workspace fan-out execution
3. Add execution controls: `--sequential`, `--parallel`, `--fail-fast`
Acceptance criteria:
- Correct ID dispatch order: request -> folder -> workspace
- Deterministic summary output (success/failure counts)
- Non-zero exit code when any request fails (unless explicitly configured otherwise)
## Validation Matrix
1. CLI parsing tests for every command path (including aliases while retained)
2. Integration tests against temp SQLite DB for CRUD flows
3. Snapshot tests for output text where scripting compatibility matters
4. Manual smoke tests:
- Send HTTP request with template/rendered vars
- JSON create/update for each resource
- Delete confirmation and `--yes`
- Top-level `send` on request/folder/workspace
## Open Questions
1. Should compatibility aliases (`workspaces`, `requests`, `create`) be removed immediately or after one release cycle?
2. For bulk `send`, should default behavior stop on first failure or continue and summarize?
3. Should command output default to human-readable text with an optional `--format json`, or return JSON by default for `show`/`list`?
4. For `request schema`, should plugin-derived auth fields be namespaced by plugin ID to avoid collisions?
## Crate Changes
- **yaak-cli**: restructure into modules, new clap hierarchy
- **yaak-models**: add `schemars` dependency, derive `JsonSchema` on model structs
(current derives: `Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS`)

View File

@@ -0,0 +1,87 @@
# yaak-cli
Command-line interface for Yaak.
## Command Overview
Current top-level commands:
```text
yaakcli send <request_id>
yaakcli workspace list
yaakcli workspace show <workspace_id>
yaakcli workspace create --name <name>
yaakcli workspace create --json '{"name":"My Workspace"}'
yaakcli workspace create '{"name":"My Workspace"}'
yaakcli workspace update --json '{"id":"wk_abc","description":"Updated"}'
yaakcli workspace delete <workspace_id> [--yes]
yaakcli request list <workspace_id>
yaakcli request show <request_id>
yaakcli request send <request_id>
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
yaakcli request create --json '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
yaakcli request create '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
yaakcli request delete <request_id> [--yes]
yaakcli folder list <workspace_id>
yaakcli folder show <folder_id>
yaakcli folder create <workspace_id> --name <name>
yaakcli folder create --json '{"workspaceId":"wk_abc","name":"Auth"}'
yaakcli folder create '{"workspaceId":"wk_abc","name":"Auth"}'
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
yaakcli folder delete <folder_id> [--yes]
yaakcli environment list <workspace_id>
yaakcli environment show <environment_id>
yaakcli environment create <workspace_id> --name <name>
yaakcli environment create --json '{"workspaceId":"wk_abc","name":"Production"}'
yaakcli environment create '{"workspaceId":"wk_abc","name":"Production"}'
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
yaakcli environment delete <environment_id> [--yes]
```
Global options:
- `--data-dir <path>`: use a custom data directory
- `-e, --environment <id>`: environment to use during request rendering/sending
- `-v, --verbose`: verbose logging and send output
Notes:
- `send` is currently a shortcut for sending an HTTP request ID.
- `delete` commands prompt for confirmation unless `--yes` is provided.
- In non-interactive mode, `delete` commands require `--yes`.
- `create` and `update` commands support `--json` and positional JSON shorthand.
- `update` uses JSON Merge Patch semantics (RFC 7386) for partial updates.
## Examples
```bash
yaakcli workspace list
yaakcli workspace create --name "My Workspace"
yaakcli workspace show wk_abc
yaakcli workspace update --json '{"id":"wk_abc","description":"Team workspace"}'
yaakcli request list wk_abc
yaakcli request show rq_abc
yaakcli request create wk_abc --name "Users" --url "https://api.example.com/users"
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
yaakcli request send rq_abc -e ev_abc
yaakcli request delete rq_abc --yes
yaakcli folder create wk_abc --name "Auth"
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
yaakcli environment create wk_abc --name "Production"
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
```
## Roadmap
Planned command expansion (request schema and polymorphic send) is tracked in `PLAN.md`.
When command behavior changes, update this README and verify with:
```bash
cargo run -q -p yaak-cli -- --help
cargo run -q -p yaak-cli -- request --help
cargo run -q -p yaak-cli -- workspace --help
cargo run -q -p yaak-cli -- folder --help
cargo run -q -p yaak-cli -- environment --help
```

View File

@@ -0,0 +1,282 @@
use clap::{Args, Parser, Subcommand};
use std::path::PathBuf;
#[derive(Parser)]
#[command(name = "yaakcli")]
#[command(about = "Yaak CLI - API client from the command line")]
pub struct Cli {
/// Use a custom data directory
#[arg(long, global = true)]
pub data_dir: Option<PathBuf>,
/// Environment ID to use for variable substitution
#[arg(long, short, global = true)]
pub environment: Option<String>,
/// Enable verbose logging
#[arg(long, short, global = true)]
pub verbose: bool,
#[command(subcommand)]
pub command: Commands,
}
#[derive(Subcommand)]
pub enum Commands {
/// Send an HTTP request by ID
Send(SendArgs),
/// Workspace commands
Workspace(WorkspaceArgs),
/// Request commands
Request(RequestArgs),
/// Folder commands
Folder(FolderArgs),
/// Environment commands
Environment(EnvironmentArgs),
}
#[derive(Args)]
pub struct SendArgs {
/// Request ID
pub request_id: String,
}
#[derive(Args)]
pub struct WorkspaceArgs {
#[command(subcommand)]
pub command: WorkspaceCommands,
}
#[derive(Subcommand)]
pub enum WorkspaceCommands {
/// List all workspaces
List,
/// Show a workspace as JSON
Show {
/// Workspace ID
workspace_id: String,
},
/// Create a workspace
Create {
/// Workspace name
#[arg(short, long)]
name: Option<String>,
/// JSON payload
#[arg(long, conflicts_with = "json_input")]
json: Option<String>,
/// JSON payload shorthand
#[arg(value_name = "JSON", conflicts_with = "json")]
json_input: Option<String>,
},
/// Update a workspace
Update {
/// JSON payload
#[arg(long, conflicts_with = "json_input")]
json: Option<String>,
/// JSON payload shorthand
#[arg(value_name = "JSON", conflicts_with = "json")]
json_input: Option<String>,
},
/// Delete a workspace
Delete {
/// Workspace ID
workspace_id: String,
/// Skip confirmation prompt
#[arg(short, long)]
yes: bool,
},
}
#[derive(Args)]
pub struct RequestArgs {
#[command(subcommand)]
pub command: RequestCommands,
}
#[derive(Subcommand)]
pub enum RequestCommands {
/// List requests in a workspace
List {
/// Workspace ID
workspace_id: String,
},
/// Show a request as JSON
Show {
/// Request ID
request_id: String,
},
/// Send an HTTP request by ID
Send {
/// Request ID
request_id: String,
},
/// Create a new HTTP request
Create {
/// Workspace ID (or positional JSON payload shorthand)
workspace_id: Option<String>,
/// Request name
#[arg(short, long)]
name: Option<String>,
/// HTTP method
#[arg(short, long)]
method: Option<String>,
/// URL
#[arg(short, long)]
url: Option<String>,
/// JSON payload
#[arg(long)]
json: Option<String>,
},
/// Update an HTTP request
Update {
/// JSON payload
#[arg(long, conflicts_with = "json_input")]
json: Option<String>,
/// JSON payload shorthand
#[arg(value_name = "JSON", conflicts_with = "json")]
json_input: Option<String>,
},
/// Delete a request
Delete {
/// Request ID
request_id: String,
/// Skip confirmation prompt
#[arg(short, long)]
yes: bool,
},
}
#[derive(Args)]
pub struct FolderArgs {
#[command(subcommand)]
pub command: FolderCommands,
}
#[derive(Subcommand)]
pub enum FolderCommands {
/// List folders in a workspace
List {
/// Workspace ID
workspace_id: String,
},
/// Show a folder as JSON
Show {
/// Folder ID
folder_id: String,
},
/// Create a folder
Create {
/// Workspace ID (or positional JSON payload shorthand)
workspace_id: Option<String>,
/// Folder name
#[arg(short, long)]
name: Option<String>,
/// JSON payload
#[arg(long)]
json: Option<String>,
},
/// Update a folder
Update {
/// JSON payload
#[arg(long, conflicts_with = "json_input")]
json: Option<String>,
/// JSON payload shorthand
#[arg(value_name = "JSON", conflicts_with = "json")]
json_input: Option<String>,
},
/// Delete a folder
Delete {
/// Folder ID
folder_id: String,
/// Skip confirmation prompt
#[arg(short, long)]
yes: bool,
},
}
#[derive(Args)]
pub struct EnvironmentArgs {
#[command(subcommand)]
pub command: EnvironmentCommands,
}
#[derive(Subcommand)]
pub enum EnvironmentCommands {
/// List environments in a workspace
List {
/// Workspace ID
workspace_id: String,
},
/// Show an environment as JSON
Show {
/// Environment ID
environment_id: String,
},
/// Create an environment
Create {
/// Workspace ID (or positional JSON payload shorthand)
workspace_id: Option<String>,
/// Environment name
#[arg(short, long)]
name: Option<String>,
/// JSON payload
#[arg(long)]
json: Option<String>,
},
/// Update an environment
Update {
/// JSON payload
#[arg(long, conflicts_with = "json_input")]
json: Option<String>,
/// JSON payload shorthand
#[arg(value_name = "JSON", conflicts_with = "json")]
json_input: Option<String>,
},
/// Delete an environment
Delete {
/// Environment ID
environment_id: String,
/// Skip confirmation prompt
#[arg(short, long)]
yes: bool,
},
}

View File

@@ -0,0 +1,16 @@
use std::io::{self, IsTerminal, Write};
pub fn confirm_delete(resource_name: &str, resource_id: &str) -> bool {
if !io::stdin().is_terminal() {
eprintln!("Refusing to delete in non-interactive mode without --yes");
std::process::exit(1);
}
print!("Delete {resource_name} {resource_id}? [y/N]: ");
io::stdout().flush().expect("Failed to flush stdout");
let mut input = String::new();
io::stdin().read_line(&mut input).expect("Failed to read confirmation");
matches!(input.trim().to_lowercase().as_str(), "y" | "yes")
}

View File

@@ -0,0 +1,134 @@
use crate::cli::{EnvironmentArgs, EnvironmentCommands};
use crate::commands::confirm::confirm_delete;
use crate::commands::json::{
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
validate_create_id,
};
use crate::context::CliContext;
use yaak_models::models::Environment;
use yaak_models::util::UpdateSource;
pub fn run(ctx: &CliContext, args: EnvironmentArgs) {
match args.command {
EnvironmentCommands::List { workspace_id } => list(ctx, &workspace_id),
EnvironmentCommands::Show { environment_id } => show(ctx, &environment_id),
EnvironmentCommands::Create { workspace_id, name, json } => {
create(ctx, workspace_id, name, json)
}
EnvironmentCommands::Update { json, json_input } => update(ctx, json, json_input),
EnvironmentCommands::Delete { environment_id, yes } => delete(ctx, &environment_id, yes),
}
}
fn list(ctx: &CliContext, workspace_id: &str) {
let environments =
ctx.db().list_environments_ensure_base(workspace_id).expect("Failed to list environments");
if environments.is_empty() {
println!("No environments found in workspace {}", workspace_id);
} else {
for environment in environments {
println!("{} - {} ({})", environment.id, environment.name, environment.parent_model);
}
}
}
fn show(ctx: &CliContext, environment_id: &str) {
let environment = ctx.db().get_environment(environment_id).expect("Failed to get environment");
let output =
serde_json::to_string_pretty(&environment).expect("Failed to serialize environment");
println!("{output}");
}
fn create(
ctx: &CliContext,
workspace_id: Option<String>,
name: Option<String>,
json: Option<String>,
) {
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
panic!("environment create cannot combine workspace_id with --json payload");
}
let payload = parse_optional_json(
json,
workspace_id.clone().filter(|v| is_json_shorthand(v)),
"environment create",
);
if let Some(payload) = payload {
if name.is_some() {
panic!("environment create cannot combine --name with JSON payload");
}
validate_create_id(&payload, "environment");
let mut environment: Environment =
serde_json::from_value(payload).expect("Failed to parse environment create JSON");
if environment.workspace_id.is_empty() {
panic!("environment create JSON requires non-empty \"workspaceId\"");
}
if environment.parent_model.is_empty() {
environment.parent_model = "environment".to_string();
}
let created = ctx
.db()
.upsert_environment(&environment, &UpdateSource::Sync)
.expect("Failed to create environment");
println!("Created environment: {}", created.id);
return;
}
let workspace_id = workspace_id.unwrap_or_else(|| {
panic!("environment create requires workspace_id unless JSON payload is provided")
});
let name = name.unwrap_or_else(|| {
panic!("environment create requires --name unless JSON payload is provided")
});
let environment = Environment {
workspace_id,
name,
parent_model: "environment".to_string(),
..Default::default()
};
let created = ctx
.db()
.upsert_environment(&environment, &UpdateSource::Sync)
.expect("Failed to create environment");
println!("Created environment: {}", created.id);
}
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
let patch = parse_required_json(json, json_input, "environment update");
let id = require_id(&patch, "environment update");
let existing = ctx.db().get_environment(&id).expect("Failed to get environment for update");
let updated = apply_merge_patch(&existing, &patch, &id, "environment update");
let saved = ctx
.db()
.upsert_environment(&updated, &UpdateSource::Sync)
.expect("Failed to update environment");
println!("Updated environment: {}", saved.id);
}
fn delete(ctx: &CliContext, environment_id: &str, yes: bool) {
if !yes && !confirm_delete("environment", environment_id) {
println!("Aborted");
return;
}
let deleted = ctx
.db()
.delete_environment_by_id(environment_id, &UpdateSource::Sync)
.expect("Failed to delete environment");
println!("Deleted environment: {}", deleted.id);
}

View File

@@ -0,0 +1,115 @@
use crate::cli::{FolderArgs, FolderCommands};
use crate::commands::confirm::confirm_delete;
use crate::commands::json::{
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
validate_create_id,
};
use crate::context::CliContext;
use yaak_models::models::Folder;
use yaak_models::util::UpdateSource;
pub fn run(ctx: &CliContext, args: FolderArgs) {
match args.command {
FolderCommands::List { workspace_id } => list(ctx, &workspace_id),
FolderCommands::Show { folder_id } => show(ctx, &folder_id),
FolderCommands::Create { workspace_id, name, json } => {
create(ctx, workspace_id, name, json)
}
FolderCommands::Update { json, json_input } => update(ctx, json, json_input),
FolderCommands::Delete { folder_id, yes } => delete(ctx, &folder_id, yes),
}
}
fn list(ctx: &CliContext, workspace_id: &str) {
let folders = ctx.db().list_folders(workspace_id).expect("Failed to list folders");
if folders.is_empty() {
println!("No folders found in workspace {}", workspace_id);
} else {
for folder in folders {
println!("{} - {}", folder.id, folder.name);
}
}
}
fn show(ctx: &CliContext, folder_id: &str) {
let folder = ctx.db().get_folder(folder_id).expect("Failed to get folder");
let output = serde_json::to_string_pretty(&folder).expect("Failed to serialize folder");
println!("{output}");
}
fn create(
ctx: &CliContext,
workspace_id: Option<String>,
name: Option<String>,
json: Option<String>,
) {
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
panic!("folder create cannot combine workspace_id with --json payload");
}
let payload = parse_optional_json(
json,
workspace_id.clone().filter(|v| is_json_shorthand(v)),
"folder create",
);
if let Some(payload) = payload {
if name.is_some() {
panic!("folder create cannot combine --name with JSON payload");
}
validate_create_id(&payload, "folder");
let folder: Folder =
serde_json::from_value(payload).expect("Failed to parse folder create JSON");
if folder.workspace_id.is_empty() {
panic!("folder create JSON requires non-empty \"workspaceId\"");
}
let created =
ctx.db().upsert_folder(&folder, &UpdateSource::Sync).expect("Failed to create folder");
println!("Created folder: {}", created.id);
return;
}
let workspace_id = workspace_id.unwrap_or_else(|| {
panic!("folder create requires workspace_id unless JSON payload is provided")
});
let name = name
.unwrap_or_else(|| panic!("folder create requires --name unless JSON payload is provided"));
let folder = Folder { workspace_id, name, ..Default::default() };
let created =
ctx.db().upsert_folder(&folder, &UpdateSource::Sync).expect("Failed to create folder");
println!("Created folder: {}", created.id);
}
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
let patch = parse_required_json(json, json_input, "folder update");
let id = require_id(&patch, "folder update");
let existing = ctx.db().get_folder(&id).expect("Failed to get folder for update");
let updated = apply_merge_patch(&existing, &patch, &id, "folder update");
let saved =
ctx.db().upsert_folder(&updated, &UpdateSource::Sync).expect("Failed to update folder");
println!("Updated folder: {}", saved.id);
}
fn delete(ctx: &CliContext, folder_id: &str, yes: bool) {
if !yes && !confirm_delete("folder", folder_id) {
println!("Aborted");
return;
}
let deleted = ctx
.db()
.delete_folder_by_id(folder_id, &UpdateSource::Sync)
.expect("Failed to delete folder");
println!("Deleted folder: {}", deleted.id);
}

View File

@@ -0,0 +1,108 @@
use serde::Serialize;
use serde::de::DeserializeOwned;
use serde_json::{Map, Value};
pub fn is_json_shorthand(input: &str) -> bool {
input.trim_start().starts_with('{')
}
pub fn parse_json_object(raw: &str, context: &str) -> Value {
let value: Value = serde_json::from_str(raw)
.unwrap_or_else(|error| panic!("Invalid JSON for {context}: {error}"));
if !value.is_object() {
panic!("JSON payload for {context} must be an object");
}
value
}
pub fn parse_optional_json(
json_flag: Option<String>,
json_shorthand: Option<String>,
context: &str,
) -> Option<Value> {
match (json_flag, json_shorthand) {
(Some(_), Some(_)) => {
panic!("Cannot provide both --json and positional JSON for {context}")
}
(Some(raw), None) => Some(parse_json_object(&raw, context)),
(None, Some(raw)) => Some(parse_json_object(&raw, context)),
(None, None) => None,
}
}
pub fn parse_required_json(
json_flag: Option<String>,
json_shorthand: Option<String>,
context: &str,
) -> Value {
parse_optional_json(json_flag, json_shorthand, context).unwrap_or_else(|| {
panic!("Missing JSON payload for {context}. Use --json or positional JSON")
})
}
pub fn require_id(payload: &Value, context: &str) -> String {
payload
.get("id")
.and_then(|value| value.as_str())
.filter(|value| !value.is_empty())
.map(|value| value.to_string())
.unwrap_or_else(|| panic!("{context} requires a non-empty \"id\" field"))
}
pub fn validate_create_id(payload: &Value, context: &str) {
let Some(id_value) = payload.get("id") else {
return;
};
match id_value {
Value::String(id) if id.is_empty() => {}
_ => panic!("{context} create JSON must omit \"id\" or set it to an empty string"),
}
}
pub fn apply_merge_patch<T>(existing: &T, patch: &Value, id: &str, context: &str) -> T
where
T: Serialize + DeserializeOwned,
{
let mut base = serde_json::to_value(existing).unwrap_or_else(|error| {
panic!("Failed to serialize existing model for {context}: {error}")
});
merge_patch(&mut base, patch);
let Some(base_object) = base.as_object_mut() else {
panic!("Merged payload for {context} must be an object");
};
base_object.insert("id".to_string(), Value::String(id.to_string()));
serde_json::from_value(base).unwrap_or_else(|error| {
panic!("Failed to deserialize merged payload for {context}: {error}")
})
}
fn merge_patch(target: &mut Value, patch: &Value) {
match patch {
Value::Object(patch_map) => {
if !target.is_object() {
*target = Value::Object(Map::new());
}
let target_map =
target.as_object_mut().expect("merge_patch target expected to be object");
for (key, patch_value) in patch_map {
if patch_value.is_null() {
target_map.remove(key);
continue;
}
let target_entry = target_map.entry(key.clone()).or_insert(Value::Null);
merge_patch(target_entry, patch_value);
}
}
_ => {
*target = patch.clone();
}
}
}

View File

@@ -0,0 +1,7 @@
pub mod confirm;
pub mod environment;
pub mod folder;
pub mod json;
pub mod request;
pub mod send;
pub mod workspace;

View File

@@ -0,0 +1,338 @@
use crate::cli::{RequestArgs, RequestCommands};
use crate::commands::confirm::confirm_delete;
use crate::commands::json::{
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
validate_create_id,
};
use crate::context::CliContext;
use log::info;
use serde_json::Value;
use std::collections::BTreeMap;
use tokio::sync::mpsc;
use yaak_http::path_placeholders::apply_path_placeholders;
use yaak_http::sender::{HttpSender, ReqwestSender};
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
use yaak_models::models::{Environment, HttpRequest, HttpRequestHeader, HttpUrlParameter};
use yaak_models::render::make_vars_hashmap;
use yaak_models::util::UpdateSource;
use yaak_plugins::events::{PluginContext, RenderPurpose};
use yaak_plugins::template_callback::PluginTemplateCallback;
use yaak_templates::{RenderOptions, parse_and_render, render_json_value_raw};
pub async fn run(
ctx: &CliContext,
args: RequestArgs,
environment: Option<&str>,
verbose: bool,
) -> i32 {
match args.command {
RequestCommands::List { workspace_id } => {
list(ctx, &workspace_id);
0
}
RequestCommands::Show { request_id } => {
show(ctx, &request_id);
0
}
RequestCommands::Send { request_id } => {
match send_request_by_id(ctx, &request_id, environment, verbose).await {
Ok(()) => 0,
Err(error) => {
eprintln!("Error: {error}");
1
}
}
}
RequestCommands::Create { workspace_id, name, method, url, json } => {
create(ctx, workspace_id, name, method, url, json);
0
}
RequestCommands::Update { json, json_input } => {
update(ctx, json, json_input);
0
}
RequestCommands::Delete { request_id, yes } => {
delete(ctx, &request_id, yes);
0
}
}
}
fn list(ctx: &CliContext, workspace_id: &str) {
let requests = ctx.db().list_http_requests(workspace_id).expect("Failed to list requests");
if requests.is_empty() {
println!("No requests found in workspace {}", workspace_id);
} else {
for request in requests {
println!("{} - {} {}", request.id, request.method, request.name);
}
}
}
fn create(
ctx: &CliContext,
workspace_id: Option<String>,
name: Option<String>,
method: Option<String>,
url: Option<String>,
json: Option<String>,
) {
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
panic!("request create cannot combine workspace_id with --json payload");
}
let payload = parse_optional_json(
json,
workspace_id.clone().filter(|v| is_json_shorthand(v)),
"request create",
);
if let Some(payload) = payload {
if name.is_some() || method.is_some() || url.is_some() {
panic!("request create cannot combine simple flags with JSON payload");
}
validate_create_id(&payload, "request");
let request: HttpRequest =
serde_json::from_value(payload).expect("Failed to parse request create JSON");
if request.workspace_id.is_empty() {
panic!("request create JSON requires non-empty \"workspaceId\"");
}
let created = ctx
.db()
.upsert_http_request(&request, &UpdateSource::Sync)
.expect("Failed to create request");
println!("Created request: {}", created.id);
return;
}
let workspace_id = workspace_id.unwrap_or_else(|| {
panic!("request create requires workspace_id unless JSON payload is provided")
});
let name = name.unwrap_or_else(|| {
panic!("request create requires --name unless JSON payload is provided")
});
let url = url
.unwrap_or_else(|| panic!("request create requires --url unless JSON payload is provided"));
let method = method.unwrap_or_else(|| "GET".to_string());
let request = HttpRequest {
workspace_id,
name,
method: method.to_uppercase(),
url,
..Default::default()
};
let created = ctx
.db()
.upsert_http_request(&request, &UpdateSource::Sync)
.expect("Failed to create request");
println!("Created request: {}", created.id);
}
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
let patch = parse_required_json(json, json_input, "request update");
let id = require_id(&patch, "request update");
let existing = ctx.db().get_http_request(&id).expect("Failed to get request for update");
let updated = apply_merge_patch(&existing, &patch, &id, "request update");
let saved = ctx
.db()
.upsert_http_request(&updated, &UpdateSource::Sync)
.expect("Failed to update request");
println!("Updated request: {}", saved.id);
}
fn show(ctx: &CliContext, request_id: &str) {
let request = ctx.db().get_http_request(request_id).expect("Failed to get request");
let output = serde_json::to_string_pretty(&request).expect("Failed to serialize request");
println!("{output}");
}
fn delete(ctx: &CliContext, request_id: &str, yes: bool) {
if !yes && !confirm_delete("request", request_id) {
println!("Aborted");
return;
}
let deleted = ctx
.db()
.delete_http_request_by_id(request_id, &UpdateSource::Sync)
.expect("Failed to delete request");
println!("Deleted request: {}", deleted.id);
}
/// Send a request by ID and print response in the same format as legacy `send`.
pub async fn send_request_by_id(
ctx: &CliContext,
request_id: &str,
environment: Option<&str>,
verbose: bool,
) -> Result<(), String> {
let request =
ctx.db().get_http_request(request_id).map_err(|e| format!("Failed to get request: {e}"))?;
let environment_chain = ctx
.db()
.resolve_environments(&request.workspace_id, request.folder_id.as_deref(), environment)
.map_err(|e| format!("Failed to resolve environments: {e}"))?;
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
let template_callback = PluginTemplateCallback::new(
ctx.plugin_manager(),
ctx.encryption_manager.clone(),
&plugin_context,
RenderPurpose::Send,
);
let rendered_request = render_http_request(
&request,
environment_chain,
&template_callback,
&RenderOptions::throw(),
)
.await
.map_err(|e| format!("Failed to render request templates: {e}"))?;
if verbose {
println!("> {} {}", rendered_request.method, rendered_request.url);
}
let sendable = SendableHttpRequest::from_http_request(
&rendered_request,
SendableHttpRequestOptions::default(),
)
.await
.map_err(|e| format!("Failed to build request: {e}"))?;
let (event_tx, mut event_rx) = mpsc::channel(100);
let verbose_handle = if verbose {
Some(tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
println!("{}", event);
}
}))
} else {
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
None
};
let sender = ReqwestSender::new().map_err(|e| format!("Failed to create HTTP client: {e}"))?;
let response = sender
.send(sendable, event_tx)
.await
.map_err(|e| format!("Failed to send request: {e}"))?;
if let Some(handle) = verbose_handle {
let _ = handle.await;
}
if verbose {
println!();
}
println!("HTTP {} {}", response.status, response.status_reason.as_deref().unwrap_or(""));
if verbose {
for (name, value) in &response.headers {
println!("{}: {}", name, value);
}
println!();
}
let (body, _stats) =
response.text().await.map_err(|e| format!("Failed to read response body: {e}"))?;
println!("{}", body);
Ok(())
}
/// Render an HTTP request with template variables and plugin functions.
async fn render_http_request(
request: &HttpRequest,
environment_chain: Vec<Environment>,
callback: &PluginTemplateCallback,
options: &RenderOptions,
) -> yaak_templates::error::Result<HttpRequest> {
let vars = &make_vars_hashmap(environment_chain);
let mut url_parameters = Vec::new();
for parameter in request.url_parameters.clone() {
if !parameter.enabled {
continue;
}
url_parameters.push(HttpUrlParameter {
enabled: parameter.enabled,
name: parse_and_render(parameter.name.as_str(), vars, callback, options).await?,
value: parse_and_render(parameter.value.as_str(), vars, callback, options).await?,
id: parameter.id,
})
}
let mut headers = Vec::new();
for header in request.headers.clone() {
if !header.enabled {
continue;
}
headers.push(HttpRequestHeader {
enabled: header.enabled,
name: parse_and_render(header.name.as_str(), vars, callback, options).await?,
value: parse_and_render(header.value.as_str(), vars, callback, options).await?,
id: header.id,
})
}
let mut body = BTreeMap::new();
for (key, value) in request.body.clone() {
body.insert(key, render_json_value_raw(value, vars, callback, options).await?);
}
let authentication = {
let mut disabled = false;
let mut auth = BTreeMap::new();
match request.authentication.get("disabled") {
Some(Value::Bool(true)) => {
disabled = true;
}
Some(Value::String(template)) => {
disabled = parse_and_render(template.as_str(), vars, callback, options)
.await
.unwrap_or_default()
.is_empty();
info!(
"Rendering authentication.disabled as a template: {disabled} from \"{template}\""
);
}
_ => {}
}
if disabled {
auth.insert("disabled".to_string(), Value::Bool(true));
} else {
for (key, value) in request.authentication.clone() {
if key == "disabled" {
auth.insert(key, Value::Bool(false));
} else {
auth.insert(key, render_json_value_raw(value, vars, callback, options).await?);
}
}
}
auth
};
let url = parse_and_render(request.url.clone().as_str(), vars, callback, options).await?;
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..request.to_owned() })
}

View File

@@ -0,0 +1,18 @@
use crate::cli::SendArgs;
use crate::commands::request;
use crate::context::CliContext;
pub async fn run(
ctx: &CliContext,
args: SendArgs,
environment: Option<&str>,
verbose: bool,
) -> i32 {
match request::send_request_by_id(ctx, &args.request_id, environment, verbose).await {
Ok(()) => 0,
Err(error) => {
eprintln!("Error: {error}");
1
}
}
}

View File

@@ -0,0 +1,100 @@
use crate::cli::{WorkspaceArgs, WorkspaceCommands};
use crate::commands::confirm::confirm_delete;
use crate::commands::json::{
apply_merge_patch, parse_optional_json, parse_required_json, require_id, validate_create_id,
};
use crate::context::CliContext;
use yaak_models::models::Workspace;
use yaak_models::util::UpdateSource;
pub fn run(ctx: &CliContext, args: WorkspaceArgs) {
match args.command {
WorkspaceCommands::List => list(ctx),
WorkspaceCommands::Show { workspace_id } => show(ctx, &workspace_id),
WorkspaceCommands::Create { name, json, json_input } => create(ctx, name, json, json_input),
WorkspaceCommands::Update { json, json_input } => update(ctx, json, json_input),
WorkspaceCommands::Delete { workspace_id, yes } => delete(ctx, &workspace_id, yes),
}
}
fn list(ctx: &CliContext) {
let workspaces = ctx.db().list_workspaces().expect("Failed to list workspaces");
if workspaces.is_empty() {
println!("No workspaces found");
} else {
for workspace in workspaces {
println!("{} - {}", workspace.id, workspace.name);
}
}
}
fn show(ctx: &CliContext, workspace_id: &str) {
let workspace = ctx.db().get_workspace(workspace_id).expect("Failed to get workspace");
let output = serde_json::to_string_pretty(&workspace).expect("Failed to serialize workspace");
println!("{output}");
}
fn create(
ctx: &CliContext,
name: Option<String>,
json: Option<String>,
json_input: Option<String>,
) {
let payload = parse_optional_json(json, json_input, "workspace create");
if let Some(payload) = payload {
if name.is_some() {
panic!("workspace create cannot combine --name with JSON payload");
}
validate_create_id(&payload, "workspace");
let workspace: Workspace =
serde_json::from_value(payload).expect("Failed to parse workspace create JSON");
let created = ctx
.db()
.upsert_workspace(&workspace, &UpdateSource::Sync)
.expect("Failed to create workspace");
println!("Created workspace: {}", created.id);
return;
}
let name = name.unwrap_or_else(|| {
panic!("workspace create requires --name unless JSON payload is provided")
});
let workspace = Workspace { name, ..Default::default() };
let created = ctx
.db()
.upsert_workspace(&workspace, &UpdateSource::Sync)
.expect("Failed to create workspace");
println!("Created workspace: {}", created.id);
}
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
let patch = parse_required_json(json, json_input, "workspace update");
let id = require_id(&patch, "workspace update");
let existing = ctx.db().get_workspace(&id).expect("Failed to get workspace for update");
let updated = apply_merge_patch(&existing, &patch, &id, "workspace update");
let saved = ctx
.db()
.upsert_workspace(&updated, &UpdateSource::Sync)
.expect("Failed to update workspace");
println!("Updated workspace: {}", saved.id);
}
fn delete(ctx: &CliContext, workspace_id: &str, yes: bool) {
if !yes && !confirm_delete("workspace", workspace_id) {
println!("Aborted");
return;
}
let deleted = ctx
.db()
.delete_workspace_by_id(workspace_id, &UpdateSource::Sync)
.expect("Failed to delete workspace");
println!("Deleted workspace: {}", deleted.id);
}

View File

@@ -0,0 +1,82 @@
use std::path::PathBuf;
use std::sync::Arc;
use yaak_crypto::manager::EncryptionManager;
use yaak_models::db_context::DbContext;
use yaak_models::query_manager::QueryManager;
use yaak_plugins::events::PluginContext;
use yaak_plugins::manager::PluginManager;
pub struct CliContext {
query_manager: QueryManager,
pub encryption_manager: Arc<EncryptionManager>,
plugin_manager: Option<Arc<PluginManager>>,
}
impl CliContext {
pub async fn initialize(data_dir: PathBuf, app_id: &str, with_plugins: bool) -> Self {
let db_path = data_dir.join("db.sqlite");
let blob_path = data_dir.join("blobs.sqlite");
let (query_manager, _blob_manager, _rx) =
yaak_models::init_standalone(&db_path, &blob_path)
.expect("Failed to initialize database");
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
let plugin_manager = if with_plugins {
let vendored_plugin_dir = data_dir.join("vendored-plugins");
let installed_plugin_dir = data_dir.join("installed-plugins");
let node_bin_path = PathBuf::from("node");
let plugin_runtime_main =
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
});
let plugin_manager = Arc::new(
PluginManager::new(
vendored_plugin_dir,
installed_plugin_dir,
node_bin_path,
plugin_runtime_main,
false,
)
.await,
);
let plugins = query_manager.connect().list_plugins().unwrap_or_default();
if !plugins.is_empty() {
let errors = plugin_manager
.initialize_all_plugins(plugins, &PluginContext::new_empty())
.await;
for (plugin_dir, error_msg) in errors {
eprintln!(
"Warning: Failed to initialize plugin '{}': {}",
plugin_dir, error_msg
);
}
}
Some(plugin_manager)
} else {
None
};
Self { query_manager, encryption_manager, plugin_manager }
}
pub fn db(&self) -> DbContext<'_> {
self.query_manager.connect()
}
pub fn plugin_manager(&self) -> Arc<PluginManager> {
self.plugin_manager.clone().expect("Plugin manager was not initialized for this command")
}
pub async fn shutdown(&self) {
if let Some(plugin_manager) = &self.plugin_manager {
plugin_manager.terminate().await;
}
}
}

View File

@@ -1,409 +1,57 @@
use clap::{Parser, Subcommand};
use log::info;
use serde_json::Value;
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::mpsc;
use yaak_crypto::manager::EncryptionManager;
use yaak_http::path_placeholders::apply_path_placeholders;
use yaak_http::sender::{HttpSender, ReqwestSender};
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
use yaak_models::models::{HttpRequest, HttpRequestHeader, HttpUrlParameter};
use yaak_models::render::make_vars_hashmap;
use yaak_models::util::UpdateSource;
use yaak_plugins::events::{PluginContext, RenderPurpose};
use yaak_plugins::manager::PluginManager;
use yaak_plugins::template_callback::PluginTemplateCallback;
use yaak_templates::{RenderOptions, parse_and_render, render_json_value_raw};
mod cli;
mod commands;
mod context;
#[derive(Parser)]
#[command(name = "yaakcli")]
#[command(about = "Yaak CLI - API client from the command line")]
struct Cli {
/// Use a custom data directory
#[arg(long, global = true)]
data_dir: Option<PathBuf>,
/// Environment ID to use for variable substitution
#[arg(long, short, global = true)]
environment: Option<String>,
/// Enable verbose logging
#[arg(long, short, global = true)]
verbose: bool,
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
/// List all workspaces
Workspaces,
/// List requests in a workspace
Requests {
/// Workspace ID
workspace_id: String,
},
/// Send an HTTP request by ID
Send {
/// Request ID
request_id: String,
},
/// Send a GET request to a URL
Get {
/// URL to request
url: String,
},
/// Create a new HTTP request
Create {
/// Workspace ID
workspace_id: String,
/// Request name
#[arg(short, long)]
name: String,
/// HTTP method
#[arg(short, long, default_value = "GET")]
method: String,
/// URL
#[arg(short, long)]
url: String,
},
}
/// Render an HTTP request with template variables and plugin functions
async fn render_http_request(
r: &HttpRequest,
environment_chain: Vec<yaak_models::models::Environment>,
cb: &PluginTemplateCallback,
opt: &RenderOptions,
) -> yaak_templates::error::Result<HttpRequest> {
let vars = &make_vars_hashmap(environment_chain);
let mut url_parameters = Vec::new();
for p in r.url_parameters.clone() {
if !p.enabled {
continue;
}
url_parameters.push(HttpUrlParameter {
enabled: p.enabled,
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
id: p.id,
})
}
let mut headers = Vec::new();
for p in r.headers.clone() {
if !p.enabled {
continue;
}
headers.push(HttpRequestHeader {
enabled: p.enabled,
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
id: p.id,
})
}
let mut body = BTreeMap::new();
for (k, v) in r.body.clone() {
body.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
}
let authentication = {
let mut disabled = false;
let mut auth = BTreeMap::new();
match r.authentication.get("disabled") {
Some(Value::Bool(true)) => {
disabled = true;
}
Some(Value::String(tmpl)) => {
disabled = parse_and_render(tmpl.as_str(), vars, cb, opt)
.await
.unwrap_or_default()
.is_empty();
info!(
"Rendering authentication.disabled as a template: {disabled} from \"{tmpl}\""
);
}
_ => {}
}
if disabled {
auth.insert("disabled".to_string(), Value::Bool(true));
} else {
for (k, v) in r.authentication.clone() {
if k == "disabled" {
auth.insert(k, Value::Bool(false));
} else {
auth.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
}
}
}
auth
};
let url = parse_and_render(r.url.clone().as_str(), vars, cb, opt).await?;
// Apply path placeholders (e.g., /users/:id -> /users/123)
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
}
use clap::Parser;
use cli::{Cli, Commands, RequestCommands};
use context::CliContext;
#[tokio::main]
async fn main() {
let cli = Cli::parse();
let Cli { data_dir, environment, verbose, command } = Cli::parse();
// Initialize logging
if cli.verbose {
if verbose {
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
}
// Use the same app_id for both data directory and keyring
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
let data_dir = cli.data_dir.unwrap_or_else(|| {
let data_dir = data_dir.unwrap_or_else(|| {
dirs::data_dir().expect("Could not determine data directory").join(app_id)
});
let db_path = data_dir.join("db.sqlite");
let blob_path = data_dir.join("blobs.sqlite");
let (query_manager, _blob_manager, _rx) =
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize database");
let db = query_manager.connect();
// Initialize encryption manager for secure() template function
// Use the same app_id as the Tauri app for keyring access
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
// Initialize plugin manager for template functions
let vendored_plugin_dir = data_dir.join("vendored-plugins");
let installed_plugin_dir = data_dir.join("installed-plugins");
// Use system node for CLI (must be in PATH)
let node_bin_path = PathBuf::from("node");
// Find the plugin runtime - check YAAK_PLUGIN_RUNTIME env var, then fallback to development path
let plugin_runtime_main =
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
// Development fallback: look relative to crate root
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
});
// Create plugin manager (plugins may not be available in CLI context)
let plugin_manager = Arc::new(
PluginManager::new(
vendored_plugin_dir,
installed_plugin_dir,
node_bin_path,
plugin_runtime_main,
false,
)
.await,
let needs_plugins = matches!(
&command,
Commands::Send(_)
| Commands::Request(cli::RequestArgs { command: RequestCommands::Send { .. } })
);
// Initialize plugins from database
let plugins = db.list_plugins().unwrap_or_default();
if !plugins.is_empty() {
let errors =
plugin_manager.initialize_all_plugins(plugins, &PluginContext::new_empty()).await;
for (plugin_dir, error_msg) in errors {
eprintln!("Warning: Failed to initialize plugin '{}': {}", plugin_dir, error_msg);
let context = CliContext::initialize(data_dir, app_id, needs_plugins).await;
let exit_code = match command {
Commands::Send(args) => {
commands::send::run(&context, args, environment.as_deref(), verbose).await
}
Commands::Workspace(args) => {
commands::workspace::run(&context, args);
0
}
Commands::Request(args) => {
commands::request::run(&context, args, environment.as_deref(), verbose).await
}
Commands::Folder(args) => {
commands::folder::run(&context, args);
0
}
Commands::Environment(args) => {
commands::environment::run(&context, args);
0
}
};
context.shutdown().await;
if exit_code != 0 {
std::process::exit(exit_code);
}
match cli.command {
Commands::Workspaces => {
let workspaces = db.list_workspaces().expect("Failed to list workspaces");
if workspaces.is_empty() {
println!("No workspaces found");
} else {
for ws in workspaces {
println!("{} - {}", ws.id, ws.name);
}
}
}
Commands::Requests { workspace_id } => {
let requests = db.list_http_requests(&workspace_id).expect("Failed to list requests");
if requests.is_empty() {
println!("No requests found in workspace {}", workspace_id);
} else {
for req in requests {
println!("{} - {} {}", req.id, req.method, req.name);
}
}
}
Commands::Send { request_id } => {
let request = db.get_http_request(&request_id).expect("Failed to get request");
// Resolve environment chain for variable substitution
let environment_chain = db
.resolve_environments(
&request.workspace_id,
request.folder_id.as_deref(),
cli.environment.as_deref(),
)
.unwrap_or_default();
// Create template callback with plugin support
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
let template_callback = PluginTemplateCallback::new(
plugin_manager.clone(),
encryption_manager.clone(),
&plugin_context,
RenderPurpose::Send,
);
// Render templates in the request
let rendered_request = render_http_request(
&request,
environment_chain,
&template_callback,
&RenderOptions::throw(),
)
.await
.expect("Failed to render request templates");
if cli.verbose {
println!("> {} {}", rendered_request.method, rendered_request.url);
}
// Convert to sendable request
let sendable = SendableHttpRequest::from_http_request(
&rendered_request,
SendableHttpRequestOptions::default(),
)
.await
.expect("Failed to build request");
// Create event channel for progress
let (event_tx, mut event_rx) = mpsc::channel(100);
// Spawn task to print events if verbose
let verbose = cli.verbose;
let verbose_handle = if verbose {
Some(tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
println!("{}", event);
}
}))
} else {
// Drain events silently
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
None
};
// Send the request
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
// Wait for event handler to finish
if let Some(handle) = verbose_handle {
let _ = handle.await;
}
// Print response
if verbose {
println!();
}
println!(
"HTTP {} {}",
response.status,
response.status_reason.as_deref().unwrap_or("")
);
if verbose {
for (name, value) in &response.headers {
println!("{}: {}", name, value);
}
println!();
}
// Print body
let (body, _stats) = response.text().await.expect("Failed to read response body");
println!("{}", body);
}
Commands::Get { url } => {
if cli.verbose {
println!("> GET {}", url);
}
// Build a simple GET request
let sendable = SendableHttpRequest {
url: url.clone(),
method: "GET".to_string(),
headers: vec![],
body: None,
options: SendableHttpRequestOptions::default(),
};
// Create event channel for progress
let (event_tx, mut event_rx) = mpsc::channel(100);
// Spawn task to print events if verbose
let verbose = cli.verbose;
let verbose_handle = if verbose {
Some(tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
println!("{}", event);
}
}))
} else {
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
None
};
// Send the request
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
if let Some(handle) = verbose_handle {
let _ = handle.await;
}
// Print response
if verbose {
println!();
}
println!(
"HTTP {} {}",
response.status,
response.status_reason.as_deref().unwrap_or("")
);
if verbose {
for (name, value) in &response.headers {
println!("{}: {}", name, value);
}
println!();
}
// Print body
let (body, _stats) = response.text().await.expect("Failed to read response body");
println!("{}", body);
}
Commands::Create { workspace_id, name, method, url } => {
let request = HttpRequest {
workspace_id,
name,
method: method.to_uppercase(),
url,
..Default::default()
};
let created = db
.upsert_http_request(&request, &UpdateSource::Sync)
.expect("Failed to create request");
println!("Created request: {}", created.id);
}
}
// Terminate plugin manager gracefully
plugin_manager.terminate().await;
}

View File

@@ -0,0 +1,60 @@
#![allow(dead_code)]
use assert_cmd::Command;
use assert_cmd::cargo::cargo_bin_cmd;
use std::path::Path;
use yaak_models::models::{HttpRequest, Workspace};
use yaak_models::query_manager::QueryManager;
use yaak_models::util::UpdateSource;
pub fn cli_cmd(data_dir: &Path) -> Command {
let mut cmd = cargo_bin_cmd!("yaakcli");
cmd.arg("--data-dir").arg(data_dir);
cmd
}
pub fn parse_created_id(stdout: &[u8], label: &str) -> String {
String::from_utf8_lossy(stdout)
.trim()
.split_once(": ")
.map(|(_, id)| id.to_string())
.unwrap_or_else(|| panic!("Expected id in '{label}' output"))
}
pub fn query_manager(data_dir: &Path) -> QueryManager {
let db_path = data_dir.join("db.sqlite");
let blob_path = data_dir.join("blobs.sqlite");
let (query_manager, _blob_manager, _rx) =
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize DB");
query_manager
}
pub fn seed_workspace(data_dir: &Path, workspace_id: &str) {
let workspace = Workspace {
id: workspace_id.to_string(),
name: "Seed Workspace".to_string(),
description: "Seeded for integration tests".to_string(),
..Default::default()
};
query_manager(data_dir)
.connect()
.upsert_workspace(&workspace, &UpdateSource::Sync)
.expect("Failed to seed workspace");
}
pub fn seed_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
let request = HttpRequest {
id: request_id.to_string(),
workspace_id: workspace_id.to_string(),
name: "Seeded Request".to_string(),
method: "GET".to_string(),
url: "https://example.com".to_string(),
..Default::default()
};
query_manager(data_dir)
.connect()
.upsert_http_request(&request, &UpdateSource::Sync)
.expect("Failed to seed request");
}

View File

@@ -0,0 +1,80 @@
mod common;
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
use predicates::str::contains;
use tempfile::TempDir;
#[test]
fn create_list_show_delete_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
cli_cmd(data_dir)
.args(["environment", "list", "wk_test"])
.assert()
.success()
.stdout(contains("Global Variables"));
let create_assert = cli_cmd(data_dir)
.args(["environment", "create", "wk_test", "--name", "Production"])
.assert()
.success();
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
cli_cmd(data_dir)
.args(["environment", "list", "wk_test"])
.assert()
.success()
.stdout(contains(&environment_id))
.stdout(contains("Production"));
cli_cmd(data_dir)
.args(["environment", "show", &environment_id])
.assert()
.success()
.stdout(contains(format!("\"id\": \"{environment_id}\"")))
.stdout(contains("\"parentModel\": \"environment\""));
cli_cmd(data_dir)
.args(["environment", "delete", &environment_id, "--yes"])
.assert()
.success()
.stdout(contains(format!("Deleted environment: {environment_id}")));
assert!(query_manager(data_dir).connect().get_environment(&environment_id).is_err());
}
#[test]
fn json_create_and_update_merge_patch_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args([
"environment",
"create",
r#"{"workspaceId":"wk_test","name":"Json Environment"}"#,
])
.assert()
.success();
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
cli_cmd(data_dir)
.args([
"environment",
"update",
&format!(r##"{{"id":"{}","color":"#00ff00"}}"##, environment_id),
])
.assert()
.success()
.stdout(contains(format!("Updated environment: {environment_id}")));
cli_cmd(data_dir)
.args(["environment", "show", &environment_id])
.assert()
.success()
.stdout(contains("\"name\": \"Json Environment\""))
.stdout(contains("\"color\": \"#00ff00\""));
}

View File

@@ -0,0 +1,74 @@
mod common;
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
use predicates::str::contains;
use tempfile::TempDir;
#[test]
fn create_list_show_delete_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args(["folder", "create", "wk_test", "--name", "Auth"])
.assert()
.success();
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
cli_cmd(data_dir)
.args(["folder", "list", "wk_test"])
.assert()
.success()
.stdout(contains(&folder_id))
.stdout(contains("Auth"));
cli_cmd(data_dir)
.args(["folder", "show", &folder_id])
.assert()
.success()
.stdout(contains(format!("\"id\": \"{folder_id}\"")))
.stdout(contains("\"workspaceId\": \"wk_test\""));
cli_cmd(data_dir)
.args(["folder", "delete", &folder_id, "--yes"])
.assert()
.success()
.stdout(contains(format!("Deleted folder: {folder_id}")));
assert!(query_manager(data_dir).connect().get_folder(&folder_id).is_err());
}
#[test]
fn json_create_and_update_merge_patch_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args([
"folder",
"create",
r#"{"workspaceId":"wk_test","name":"Json Folder"}"#,
])
.assert()
.success();
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
cli_cmd(data_dir)
.args([
"folder",
"update",
&format!(r#"{{"id":"{}","description":"Folder Description"}}"#, folder_id),
])
.assert()
.success()
.stdout(contains(format!("Updated folder: {folder_id}")));
cli_cmd(data_dir)
.args(["folder", "show", &folder_id])
.assert()
.success()
.stdout(contains("\"name\": \"Json Folder\""))
.stdout(contains("\"description\": \"Folder Description\""));
}

View File

@@ -0,0 +1,107 @@
mod common;
use common::{cli_cmd, parse_created_id, query_manager, seed_request, seed_workspace};
use predicates::str::contains;
use tempfile::TempDir;
#[test]
fn show_and_delete_yes_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args([
"request",
"create",
"wk_test",
"--name",
"Smoke Test",
"--url",
"https://example.com",
])
.assert()
.success();
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
cli_cmd(data_dir)
.args(["request", "show", &request_id])
.assert()
.success()
.stdout(contains(format!("\"id\": \"{request_id}\"")))
.stdout(contains("\"workspaceId\": \"wk_test\""));
cli_cmd(data_dir)
.args(["request", "delete", &request_id, "--yes"])
.assert()
.success()
.stdout(contains(format!("Deleted request: {request_id}")));
assert!(query_manager(data_dir).connect().get_http_request(&request_id).is_err());
}
#[test]
fn delete_without_yes_fails_in_non_interactive_mode() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
seed_request(data_dir, "wk_test", "rq_seed_delete_noninteractive");
cli_cmd(data_dir)
.args(["request", "delete", "rq_seed_delete_noninteractive"])
.assert()
.failure()
.code(1)
.stderr(contains("Refusing to delete in non-interactive mode without --yes"));
assert!(
query_manager(data_dir).connect().get_http_request("rq_seed_delete_noninteractive").is_ok()
);
}
#[test]
fn json_create_and_update_merge_patch_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args([
"request",
"create",
r#"{"workspaceId":"wk_test","name":"Json Request","url":"https://example.com"}"#,
])
.assert()
.success();
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
cli_cmd(data_dir)
.args([
"request",
"update",
&format!(r#"{{"id":"{}","name":"Renamed Request"}}"#, request_id),
])
.assert()
.success()
.stdout(contains(format!("Updated request: {request_id}")));
cli_cmd(data_dir)
.args(["request", "show", &request_id])
.assert()
.success()
.stdout(contains("\"name\": \"Renamed Request\""))
.stdout(contains("\"url\": \"https://example.com\""));
}
#[test]
fn update_requires_id_in_json_payload() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
cli_cmd(data_dir)
.args(["request", "update", r#"{"name":"No ID"}"#])
.assert()
.failure()
.stderr(contains("request update requires a non-empty \"id\" field"));
}

View File

@@ -0,0 +1,59 @@
mod common;
use common::{cli_cmd, parse_created_id, query_manager};
use predicates::str::contains;
use tempfile::TempDir;
#[test]
fn create_show_delete_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
let create_assert =
cli_cmd(data_dir).args(["workspace", "create", "--name", "WS One"]).assert().success();
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
cli_cmd(data_dir)
.args(["workspace", "show", &workspace_id])
.assert()
.success()
.stdout(contains(format!("\"id\": \"{workspace_id}\"")))
.stdout(contains("\"name\": \"WS One\""));
cli_cmd(data_dir)
.args(["workspace", "delete", &workspace_id, "--yes"])
.assert()
.success()
.stdout(contains(format!("Deleted workspace: {workspace_id}")));
assert!(query_manager(data_dir).connect().get_workspace(&workspace_id).is_err());
}
#[test]
fn json_create_and_update_merge_patch_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
let create_assert = cli_cmd(data_dir)
.args(["workspace", "create", r#"{"name":"Json Workspace"}"#])
.assert()
.success();
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
cli_cmd(data_dir)
.args([
"workspace",
"update",
&format!(r#"{{"id":"{}","description":"Updated via JSON"}}"#, workspace_id),
])
.assert()
.success()
.stdout(contains(format!("Updated workspace: {workspace_id}")));
cli_cmd(data_dir)
.args(["workspace", "show", &workspace_id])
.assert()
.success()
.stdout(contains("\"name\": \"Json Workspace\""))
.stdout(contains("\"description\": \"Updated via JSON\""));
}

View File

@@ -1095,13 +1095,9 @@ async fn cmd_get_http_authentication_config<R: Runtime>(
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
let values_json: serde_json::Value = serde_json::to_value(&values)?;
let rendered_json = render_json_value(
values_json,
environment_chain,
&cb,
&RenderOptions::return_empty(),
)
.await?;
let rendered_json =
render_json_value(values_json, environment_chain, &cb, &RenderOptions::return_empty())
.await?;
// Convert back to HashMap<String, JsonPrimitive>
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;

View File

@@ -3,6 +3,9 @@
//! This module provides the Tauri plugin initialization and extension traits
//! that allow accessing QueryManager and BlobManager from Tauri's Manager types.
use chrono::Utc;
use log::error;
use std::time::Duration;
use tauri::plugin::TauriPlugin;
use tauri::{Emitter, Manager, Runtime, State};
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
@@ -13,6 +16,74 @@ use yaak_models::models::{AnyModel, GraphQlIntrospection, GrpcEvent, Settings, W
use yaak_models::query_manager::QueryManager;
use yaak_models::util::UpdateSource;
const MODEL_CHANGES_RETENTION_HOURS: i64 = 1;
const MODEL_CHANGES_POLL_INTERVAL_MS: u64 = 250;
const MODEL_CHANGES_POLL_BATCH_SIZE: usize = 200;
struct ModelChangeCursor {
created_at: String,
id: i64,
}
impl ModelChangeCursor {
fn from_launch_time() -> Self {
Self {
created_at: Utc::now().naive_utc().format("%Y-%m-%d %H:%M:%S%.3f").to_string(),
id: 0,
}
}
}
fn drain_model_changes_batch<R: Runtime>(
query_manager: &QueryManager,
app_handle: &tauri::AppHandle<R>,
cursor: &mut ModelChangeCursor,
) -> bool {
let changes = match query_manager.connect().list_model_changes_since(
&cursor.created_at,
cursor.id,
MODEL_CHANGES_POLL_BATCH_SIZE,
) {
Ok(changes) => changes,
Err(err) => {
error!("Failed to poll model_changes rows: {err:?}");
return false;
}
};
if changes.is_empty() {
return false;
}
let fetched_count = changes.len();
for change in changes {
cursor.created_at = change.created_at;
cursor.id = change.id;
// Local window-originated writes are forwarded immediately from the
// in-memory model event channel.
if matches!(change.payload.update_source, UpdateSource::Window { .. }) {
continue;
}
if let Err(err) = app_handle.emit("model_write", change.payload) {
error!("Failed to emit model_write event: {err:?}");
}
}
fetched_count == MODEL_CHANGES_POLL_BATCH_SIZE
}
async fn run_model_change_poller<R: Runtime>(
query_manager: QueryManager,
app_handle: tauri::AppHandle<R>,
mut cursor: ModelChangeCursor,
) {
loop {
while drain_model_changes_batch(&query_manager, &app_handle, &mut cursor) {}
tokio::time::sleep(Duration::from_millis(MODEL_CHANGES_POLL_INTERVAL_MS)).await;
}
}
/// Extension trait for accessing the QueryManager from Tauri Manager types.
pub trait QueryManagerExt<'a, R> {
fn db_manager(&'a self) -> State<'a, QueryManager>;
@@ -262,14 +333,37 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
}
};
let db = query_manager.connect();
if let Err(err) = db.prune_model_changes_older_than_hours(MODEL_CHANGES_RETENTION_HOURS)
{
error!("Failed to prune model_changes rows on startup: {err:?}");
}
// Only stream writes that happen after this app launch.
let cursor = ModelChangeCursor::from_launch_time();
let poll_query_manager = query_manager.clone();
app_handle.manage(query_manager);
app_handle.manage(blob_manager);
// Forward model change events to the frontend
let app_handle = app_handle.clone();
// Poll model_changes so all writers (including external CLI processes) update the UI.
let app_handle_poll = app_handle.clone();
let query_manager = poll_query_manager;
tauri::async_runtime::spawn(async move {
run_model_change_poller(query_manager, app_handle_poll, cursor).await;
});
// Fast path for local app writes initiated by frontend windows. This keeps the
// current sync-model UX snappy, while DB polling handles external writers (CLI).
let app_handle_local = app_handle.clone();
tauri::async_runtime::spawn(async move {
for payload in rx {
app_handle.emit("model_write", payload).unwrap();
if !matches!(payload.update_source, UpdateSource::Window { .. }) {
continue;
}
if let Err(err) = app_handle_local.emit("model_write", payload) {
error!("Failed to emit local model_write event: {err:?}");
}
}
});

View File

@@ -8,9 +8,9 @@ use serde::{Deserialize, Serialize};
use std::time::Instant;
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
use ts_rs::TS;
use yaak_api::yaak_api_client;
use yaak_common::platform::get_os_str;
use yaak_models::util::UpdateSource;
use yaak_api::yaak_api_client;
// Check for updates every hour
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;

View File

@@ -21,6 +21,7 @@ use tauri::{
};
use tokio::sync::Mutex;
use ts_rs::TS;
use yaak_api::yaak_api_client;
use yaak_models::models::Plugin;
use yaak_models::util::UpdateSource;
use yaak_plugins::api::{
@@ -31,7 +32,6 @@ use yaak_plugins::events::{Color, Icon, PluginContext, ShowToastRequest};
use yaak_plugins::install::{delete_and_uninstall, download_and_install};
use yaak_plugins::manager::PluginManager;
use yaak_plugins::plugin_meta::get_plugin_meta;
use yaak_api::yaak_api_client;
static EXITING: AtomicBool = AtomicBool::new(false);

View File

@@ -8,11 +8,11 @@ use std::fs;
use std::sync::Arc;
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
use yaak_api::yaak_api_client;
use yaak_models::util::generate_id;
use yaak_plugins::events::{Color, ShowToastRequest};
use yaak_plugins::install::download_and_install;
use yaak_plugins::manager::PluginManager;
use yaak_api::yaak_api_client;
pub(crate) async fn handle_deep_link<R: Runtime>(
app_handle: &AppHandle<R>,

View File

@@ -153,11 +153,8 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
.build(app_handle)?,
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
.build(app_handle)?,
&MenuItemBuilder::with_id(
"dev.reset_size_16x9".to_string(),
"Resize to 16x9",
)
.build(app_handle)?,
&MenuItemBuilder::with_id("dev.reset_size_16x9".to_string(), "Resize to 16x9")
.build(app_handle)?,
&MenuItemBuilder::with_id(
"dev.reset_size_16x10".to_string(),
"Resize to 16x10",

View File

@@ -7,11 +7,11 @@ use std::ops::Add;
use std::time::Duration;
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
use ts_rs::TS;
use yaak_api::yaak_api_client;
use yaak_common::platform::get_os_str;
use yaak_models::db_context::DbContext;
use yaak_models::query_manager::QueryManager;
use yaak_models::util::UpdateSource;
use yaak_api::yaak_api_client;
/// Extension trait for accessing the QueryManager from Tauri Manager types.
/// This is needed temporarily until all crates are refactored to not use Tauri.
@@ -159,10 +159,8 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
let app_version = window.app_handle().package_info().version.to_string();
let client = yaak_api_client(&app_version)?;
let path = format!("/licenses/activations/{}/deactivate", activation_id);
let payload = DeactivateLicenseRequestPayload {
app_platform: get_os_str().to_string(),
app_version,
};
let payload =
DeactivateLicenseRequestPayload { app_platform: get_os_str().to_string(), app_version };
let response = client.post(build_url(&path)).json(&payload).send().await?;
if response.status().is_client_error() {
@@ -189,10 +187,8 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<LicenseCheckStatus> {
let app_version = window.app_handle().package_info().version.to_string();
let payload = CheckActivationRequestPayload {
app_platform: get_os_str().to_string(),
app_version,
};
let payload =
CheckActivationRequestPayload { app_platform: get_os_str().to_string(), app_version };
let activation_id = get_activation_id(window.app_handle()).await;
let settings = window.db().get_settings();

View File

@@ -74,15 +74,31 @@ impl Display for HttpResponseEvent {
};
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
}
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
HttpResponseEvent::SendUrl {
method,
scheme,
username,
password,
host,
port,
path,
query,
fragment,
} => {
let auth_str = if username.is_empty() && password.is_empty() {
String::new()
} else {
format!("{}:{}@", username, password)
};
let query_str = if query.is_empty() { String::new() } else { format!("?{}", query) };
let fragment_str = if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
write!(f, "> {} {}://{}{}:{}{}{}{}", method, scheme, auth_str, host, port, path, query_str, fragment_str)
let query_str =
if query.is_empty() { String::new() } else { format!("?{}", query) };
let fragment_str =
if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
write!(
f,
"> {} {}://{}{}:{}{}{}{}",
method, scheme, auth_str, host, port, path, query_str, fragment_str
)
}
HttpResponseEvent::ReceiveUrl { version, status } => {
write!(f, "< {} {}", version_to_str(version), status)
@@ -122,7 +138,17 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
RedirectBehavior::DropBody => "drop_body".to_string(),
},
},
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
HttpResponseEvent::SendUrl {
method,
scheme,
username,
password,
host,
port,
path,
query,
fragment,
} => {
D::SendUrl { method, scheme, username, password, host, port, path, query, fragment }
}
HttpResponseEvent::ReceiveUrl { version, status } => {
@@ -546,7 +572,10 @@ impl<S> SizedBody<S> {
impl<S> HttpBody for SizedBody<S>
where
S: futures_util::Stream<Item = std::result::Result<Bytes, std::io::Error>> + Send + Unpin + 'static,
S: futures_util::Stream<Item = std::result::Result<Bytes, std::io::Error>>
+ Send
+ Unpin
+ 'static,
{
type Data = Bytes;
type Error = std::io::Error;

View File

@@ -37,10 +37,9 @@ impl From<SendableBodyWithMeta> for SendableBody {
fn from(value: SendableBodyWithMeta) -> Self {
match value {
SendableBodyWithMeta::Bytes(b) => SendableBody::Bytes(b),
SendableBodyWithMeta::Stream { data, content_length } => SendableBody::Stream {
data,
content_length: content_length.map(|l| l as u64),
},
SendableBodyWithMeta::Stream { data, content_length } => {
SendableBody::Stream { data, content_length: content_length.map(|l| l as u64) }
}
}
}
}

View File

@@ -0,0 +1,12 @@
CREATE TABLE model_changes
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
model TEXT NOT NULL,
model_id TEXT NOT NULL,
change TEXT NOT NULL,
update_source TEXT NOT NULL,
payload TEXT NOT NULL,
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL
);
CREATE INDEX idx_model_changes_created_at ON model_changes (created_at);

View File

@@ -3,8 +3,7 @@ use crate::error::Error::ModelNotFound;
use crate::error::Result;
use crate::models::{AnyModel, UpsertModelInfo};
use crate::util::{ModelChangeEvent, ModelPayload, UpdateSource};
use log::error;
use rusqlite::OptionalExtension;
use rusqlite::{OptionalExtension, params};
use sea_query::{
Asterisk, Expr, Func, IntoColumnRef, IntoIden, IntoTableRef, OnConflict, Query, SimpleExpr,
SqliteQueryBuilder,
@@ -14,7 +13,7 @@ use std::fmt::Debug;
use std::sync::mpsc;
pub struct DbContext<'a> {
pub(crate) events_tx: mpsc::Sender<ModelPayload>,
pub(crate) _events_tx: mpsc::Sender<ModelPayload>,
pub(crate) conn: ConnectionOrTx<'a>,
}
@@ -180,9 +179,8 @@ impl<'a> DbContext<'a> {
change: ModelChangeEvent::Upsert { created },
};
if let Err(e) = self.events_tx.send(payload.clone()) {
error!("Failed to send model change {source:?}: {e:?}");
}
self.record_model_change(&payload)?;
let _ = self._events_tx.send(payload);
Ok(m)
}
@@ -203,9 +201,31 @@ impl<'a> DbContext<'a> {
change: ModelChangeEvent::Delete,
};
if let Err(e) = self.events_tx.send(payload) {
error!("Failed to send model change {source:?}: {e:?}");
}
self.record_model_change(&payload)?;
let _ = self._events_tx.send(payload);
Ok(m.clone())
}
fn record_model_change(&self, payload: &ModelPayload) -> Result<()> {
let payload_json = serde_json::to_string(payload)?;
let source_json = serde_json::to_string(&payload.update_source)?;
let change_json = serde_json::to_string(&payload.change)?;
self.conn.resolve().execute(
r#"
INSERT INTO model_changes (model, model_id, change, update_source, payload)
VALUES (?1, ?2, ?3, ?4, ?5)
"#,
params![
payload.model.model(),
payload.model.id(),
change_json,
source_json,
payload_json,
],
)?;
Ok(())
}
}

View File

@@ -2347,6 +2347,15 @@ macro_rules! define_any_model {
)*
}
}
#[inline]
pub fn model(&self) -> &str {
match self {
$(
AnyModel::$type(inner) => &inner.model,
)*
}
}
}
$(
@@ -2400,30 +2409,29 @@ impl<'de> Deserialize<'de> for AnyModel {
{
let value = Value::deserialize(deserializer)?;
let model = value.as_object().unwrap();
use AnyModel::*;
use serde_json::from_value as fv;
let model = match model.get("model") {
Some(m) if m == "cookie_jar" => AnyModel::CookieJar(fv(value).unwrap()),
Some(m) if m == "environment" => AnyModel::Environment(fv(value).unwrap()),
Some(m) if m == "folder" => AnyModel::Folder(fv(value).unwrap()),
Some(m) if m == "graphql_introspection" => {
AnyModel::GraphQlIntrospection(fv(value).unwrap())
}
Some(m) if m == "grpc_connection" => AnyModel::GrpcConnection(fv(value).unwrap()),
Some(m) if m == "grpc_event" => AnyModel::GrpcEvent(fv(value).unwrap()),
Some(m) if m == "grpc_request" => AnyModel::GrpcRequest(fv(value).unwrap()),
Some(m) if m == "http_request" => AnyModel::HttpRequest(fv(value).unwrap()),
Some(m) if m == "http_response" => AnyModel::HttpResponse(fv(value).unwrap()),
Some(m) if m == "key_value" => AnyModel::KeyValue(fv(value).unwrap()),
Some(m) if m == "plugin" => AnyModel::Plugin(fv(value).unwrap()),
Some(m) if m == "settings" => AnyModel::Settings(fv(value).unwrap()),
Some(m) if m == "websocket_connection" => {
AnyModel::WebsocketConnection(fv(value).unwrap())
}
Some(m) if m == "websocket_event" => AnyModel::WebsocketEvent(fv(value).unwrap()),
Some(m) if m == "websocket_request" => AnyModel::WebsocketRequest(fv(value).unwrap()),
Some(m) if m == "workspace" => AnyModel::Workspace(fv(value).unwrap()),
Some(m) if m == "workspace_meta" => AnyModel::WorkspaceMeta(fv(value).unwrap()),
Some(m) if m == "cookie_jar" => CookieJar(fv(value).unwrap()),
Some(m) if m == "environment" => Environment(fv(value).unwrap()),
Some(m) if m == "folder" => Folder(fv(value).unwrap()),
Some(m) if m == "graphql_introspection" => GraphQlIntrospection(fv(value).unwrap()),
Some(m) if m == "grpc_connection" => GrpcConnection(fv(value).unwrap()),
Some(m) if m == "grpc_event" => GrpcEvent(fv(value).unwrap()),
Some(m) if m == "grpc_request" => GrpcRequest(fv(value).unwrap()),
Some(m) if m == "http_request" => HttpRequest(fv(value).unwrap()),
Some(m) if m == "http_response" => HttpResponse(fv(value).unwrap()),
Some(m) if m == "http_response_event" => HttpResponseEvent(fv(value).unwrap()),
Some(m) if m == "key_value" => KeyValue(fv(value).unwrap()),
Some(m) if m == "plugin" => Plugin(fv(value).unwrap()),
Some(m) if m == "settings" => Settings(fv(value).unwrap()),
Some(m) if m == "sync_state" => SyncState(fv(value).unwrap()),
Some(m) if m == "websocket_connection" => WebsocketConnection(fv(value).unwrap()),
Some(m) if m == "websocket_event" => WebsocketEvent(fv(value).unwrap()),
Some(m) if m == "websocket_request" => WebsocketRequest(fv(value).unwrap()),
Some(m) if m == "workspace" => Workspace(fv(value).unwrap()),
Some(m) if m == "workspace_meta" => WorkspaceMeta(fv(value).unwrap()),
Some(m) => {
return Err(serde::de::Error::custom(format!(
"Failed to deserialize AnyModel {}",

View File

@@ -11,6 +11,7 @@ mod http_requests;
mod http_response_events;
mod http_responses;
mod key_values;
mod model_changes;
mod plugin_key_values;
mod plugins;
mod settings;
@@ -20,6 +21,7 @@ mod websocket_events;
mod websocket_requests;
mod workspace_metas;
pub mod workspaces;
pub use model_changes::PersistedModelChange;
const MAX_HISTORY_ITEMS: usize = 20;

View File

@@ -0,0 +1,289 @@
use crate::db_context::DbContext;
use crate::error::Result;
use crate::util::ModelPayload;
use rusqlite::params;
use rusqlite::types::Type;
#[derive(Debug, Clone)]
pub struct PersistedModelChange {
pub id: i64,
pub created_at: String,
pub payload: ModelPayload,
}
impl<'a> DbContext<'a> {
pub fn list_model_changes_after(
&self,
after_id: i64,
limit: usize,
) -> Result<Vec<PersistedModelChange>> {
let mut stmt = self.conn.prepare(
r#"
SELECT id, created_at, payload
FROM model_changes
WHERE id > ?1
ORDER BY id ASC
LIMIT ?2
"#,
)?;
let items = stmt.query_map(params![after_id, limit as i64], |row| {
let id: i64 = row.get(0)?;
let created_at: String = row.get(1)?;
let payload_raw: String = row.get(2)?;
let payload = serde_json::from_str::<ModelPayload>(&payload_raw).map_err(|e| {
rusqlite::Error::FromSqlConversionFailure(2, Type::Text, Box::new(e))
})?;
Ok(PersistedModelChange { id, created_at, payload })
})?;
Ok(items.collect::<std::result::Result<Vec<_>, rusqlite::Error>>()?)
}
pub fn list_model_changes_since(
&self,
since_created_at: &str,
since_id: i64,
limit: usize,
) -> Result<Vec<PersistedModelChange>> {
let mut stmt = self.conn.prepare(
r#"
SELECT id, created_at, payload
FROM model_changes
WHERE created_at > ?1
OR (created_at = ?1 AND id > ?2)
ORDER BY created_at ASC, id ASC
LIMIT ?3
"#,
)?;
let items = stmt.query_map(params![since_created_at, since_id, limit as i64], |row| {
let id: i64 = row.get(0)?;
let created_at: String = row.get(1)?;
let payload_raw: String = row.get(2)?;
let payload = serde_json::from_str::<ModelPayload>(&payload_raw).map_err(|e| {
rusqlite::Error::FromSqlConversionFailure(2, Type::Text, Box::new(e))
})?;
Ok(PersistedModelChange { id, created_at, payload })
})?;
Ok(items.collect::<std::result::Result<Vec<_>, rusqlite::Error>>()?)
}
pub fn prune_model_changes_older_than_days(&self, days: i64) -> Result<usize> {
let offset = format!("-{days} days");
Ok(self.conn.resolve().execute(
r#"
DELETE FROM model_changes
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)
"#,
params![offset],
)?)
}
pub fn prune_model_changes_older_than_hours(&self, hours: i64) -> Result<usize> {
let offset = format!("-{hours} hours");
Ok(self.conn.resolve().execute(
r#"
DELETE FROM model_changes
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)
"#,
params![offset],
)?)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::init_in_memory;
use crate::models::Workspace;
use crate::util::{ModelChangeEvent, UpdateSource};
use serde_json::json;
#[test]
fn records_model_changes_for_upsert_and_delete() {
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
let db = query_manager.connect();
let workspace = db
.upsert_workspace(
&Workspace {
name: "Changes Test".to_string(),
setting_follow_redirects: true,
setting_validate_certificates: true,
..Default::default()
},
&UpdateSource::Sync,
)
.expect("Failed to upsert workspace");
let created_changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
assert_eq!(created_changes.len(), 1);
assert_eq!(created_changes[0].payload.model.id(), workspace.id);
assert_eq!(created_changes[0].payload.model.model(), "workspace");
assert!(matches!(
created_changes[0].payload.change,
ModelChangeEvent::Upsert { created: true }
));
assert!(matches!(created_changes[0].payload.update_source, UpdateSource::Sync));
db.delete_workspace_by_id(&workspace.id, &UpdateSource::Sync)
.expect("Failed to delete workspace");
let all_changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
assert_eq!(all_changes.len(), 2);
assert!(matches!(all_changes[1].payload.change, ModelChangeEvent::Delete));
assert!(all_changes[1].id > all_changes[0].id);
let changes_after_first = db
.list_model_changes_after(all_changes[0].id, 10)
.expect("Failed to list changes after cursor");
assert_eq!(changes_after_first.len(), 1);
assert!(matches!(changes_after_first[0].payload.change, ModelChangeEvent::Delete));
}
#[test]
fn prunes_old_model_changes() {
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
let db = query_manager.connect();
db.upsert_workspace(
&Workspace {
name: "Prune Test".to_string(),
setting_follow_redirects: true,
setting_validate_certificates: true,
..Default::default()
},
&UpdateSource::Sync,
)
.expect("Failed to upsert workspace");
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
assert_eq!(changes.len(), 1);
db.conn
.resolve()
.execute(
"UPDATE model_changes SET created_at = '2000-01-01 00:00:00.000' WHERE id = ?1",
params![changes[0].id],
)
.expect("Failed to age model change row");
let pruned =
db.prune_model_changes_older_than_days(30).expect("Failed to prune model changes");
assert_eq!(pruned, 1);
assert!(db.list_model_changes_after(0, 10).expect("Failed to list changes").is_empty());
}
#[test]
fn list_model_changes_since_uses_timestamp_with_id_tiebreaker() {
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
let db = query_manager.connect();
let workspace = db
.upsert_workspace(
&Workspace {
name: "Cursor Test".to_string(),
setting_follow_redirects: true,
setting_validate_certificates: true,
..Default::default()
},
&UpdateSource::Sync,
)
.expect("Failed to upsert workspace");
db.delete_workspace_by_id(&workspace.id, &UpdateSource::Sync)
.expect("Failed to delete workspace");
let all = db.list_model_changes_after(0, 10).expect("Failed to list changes");
assert_eq!(all.len(), 2);
let fixed_ts = "2026-02-16 00:00:00.000";
db.conn
.resolve()
.execute("UPDATE model_changes SET created_at = ?1", params![fixed_ts])
.expect("Failed to normalize timestamps");
let after_first =
db.list_model_changes_since(fixed_ts, all[0].id, 10).expect("Failed to query cursor");
assert_eq!(after_first.len(), 1);
assert_eq!(after_first[0].id, all[1].id);
}
#[test]
fn prunes_old_model_changes_by_hours() {
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
let db = query_manager.connect();
db.upsert_workspace(
&Workspace {
name: "Prune Hour Test".to_string(),
setting_follow_redirects: true,
setting_validate_certificates: true,
..Default::default()
},
&UpdateSource::Sync,
)
.expect("Failed to upsert workspace");
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
assert_eq!(changes.len(), 1);
db.conn
.resolve()
.execute(
"UPDATE model_changes SET created_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', '-2 hours') WHERE id = ?1",
params![changes[0].id],
)
.expect("Failed to age model change row");
let pruned =
db.prune_model_changes_older_than_hours(1).expect("Failed to prune model changes");
assert_eq!(pruned, 1);
}
#[test]
fn list_model_changes_deserializes_http_response_event_payload() {
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
let db = query_manager.connect();
let payload = json!({
"model": {
"model": "http_response_event",
"id": "re_test",
"createdAt": "2026-02-16T21:01:34.809162",
"updatedAt": "2026-02-16T21:01:34.809163",
"workspaceId": "wk_test",
"responseId": "rs_test",
"event": {
"type": "info",
"message": "hello"
}
},
"updateSource": { "type": "sync" },
"change": { "type": "upsert", "created": false }
});
db.conn
.resolve()
.execute(
r#"
INSERT INTO model_changes (model, model_id, change, update_source, payload)
VALUES (?1, ?2, ?3, ?4, ?5)
"#,
params![
"http_response_event",
"re_test",
r#"{"type":"upsert","created":false}"#,
r#"{"type":"sync"}"#,
payload.to_string(),
],
)
.expect("Failed to insert model change row");
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
assert_eq!(changes.len(), 1);
assert_eq!(changes[0].payload.model.model(), "http_response_event");
assert_eq!(changes[0].payload.model.id(), "re_test");
}
}

View File

@@ -25,7 +25,7 @@ impl QueryManager {
.expect("Failed to gain lock on DB")
.get()
.expect("Failed to get a new DB connection from the pool");
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
DbContext { _events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
}
pub fn with_conn<F, T>(&self, func: F) -> T
@@ -39,8 +39,10 @@ impl QueryManager {
.get()
.expect("Failed to get new DB connection from the pool");
let db_context =
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) };
let db_context = DbContext {
_events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Connection(conn),
};
func(&db_context)
}
@@ -62,8 +64,10 @@ impl QueryManager {
.transaction_with_behavior(TransactionBehavior::Immediate)
.expect("Failed to start DB transaction");
let db_context =
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Transaction(&tx) };
let db_context = DbContext {
_events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Transaction(&tx),
};
match func(&db_context) {
Ok(val) => {

View File

@@ -68,7 +68,9 @@ pub async fn start_nodejs_plugin_runtime(
// Handle kill signal
let mut kill_rx = kill_rx.clone();
tokio::spawn(async move {
kill_rx.wait_for(|b| *b == true).await.expect("Kill channel errored");
if kill_rx.wait_for(|b| *b == true).await.is_err() {
warn!("Kill channel closed before explicit shutdown; terminating plugin runtime");
}
info!("Killing plugin runtime");
if let Err(e) = child.kill().await {
warn!("Failed to kill plugin runtime: {e}");

View File

@@ -13,5 +13,11 @@
"build": "yaakcli build",
"dev": "yaakcli dev",
"test": "vitest --run tests"
},
"dependencies": {
"jsonwebtoken": "^9.0.2"
},
"devDependencies": {
"@types/jsonwebtoken": "^9.0.7"
}
}

View File

@@ -4,26 +4,16 @@ import type { AccessTokenRawResponse } from './store';
export async function fetchAccessToken(
ctx: Context,
{
accessTokenUrl,
scope,
audience,
params,
grantType,
credentialsInBody,
clientId,
clientSecret,
}: {
args: {
clientId: string;
clientSecret: string;
grantType: string;
accessTokenUrl: string;
scope: string | null;
audience: string | null;
credentialsInBody: boolean;
params: HttpUrlParameter[];
},
} & ({ clientAssertion: string } | { clientSecret: string; credentialsInBody: boolean }),
): Promise<AccessTokenRawResponse> {
const { clientId, grantType, accessTokenUrl, scope, audience, params } = args;
console.log('[oauth2] Getting access token', accessTokenUrl);
const httpRequest: Partial<HttpRequest> = {
method: 'POST',
@@ -34,7 +24,10 @@ export async function fetchAccessToken(
},
headers: [
{ name: 'User-Agent', value: 'yaak' },
{ name: 'Accept', value: 'application/x-www-form-urlencoded, application/json' },
{
name: 'Accept',
value: 'application/x-www-form-urlencoded, application/json',
},
{ name: 'Content-Type', value: 'application/x-www-form-urlencoded' },
],
};
@@ -42,11 +35,24 @@ export async function fetchAccessToken(
if (scope) httpRequest.body?.form.push({ name: 'scope', value: scope });
if (audience) httpRequest.body?.form.push({ name: 'audience', value: audience });
if (credentialsInBody) {
if ('clientAssertion' in args) {
httpRequest.body?.form.push({ name: 'client_id', value: clientId });
httpRequest.body?.form.push({ name: 'client_secret', value: clientSecret });
httpRequest.body?.form.push({
name: 'client_assertion_type',
value: 'urn:ietf:params:oauth:client-assertion-type:jwt-bearer',
});
httpRequest.body?.form.push({
name: 'client_assertion',
value: args.clientAssertion,
});
} else if (args.credentialsInBody) {
httpRequest.body?.form.push({ name: 'client_id', value: clientId });
httpRequest.body?.form.push({
name: 'client_secret',
value: args.clientSecret,
});
} else {
const value = `Basic ${Buffer.from(`${clientId}:${clientSecret}`).toString('base64')}`;
const value = `Basic ${Buffer.from(`${clientId}:${args.clientSecret}`).toString('base64')}`;
httpRequest.headers?.push({ name: 'Authorization', value });
}

View File

@@ -1,9 +1,99 @@
import { createPrivateKey, randomUUID } from 'node:crypto';
import type { Context } from '@yaakapp/api';
import jwt, { type Algorithm } from 'jsonwebtoken';
import { fetchAccessToken } from '../fetchAccessToken';
import type { TokenStoreArgs } from '../store';
import { getToken, storeToken } from '../store';
import { isTokenExpired } from '../util';
export const jwtAlgorithms = [
'HS256',
'HS384',
'HS512',
'RS256',
'RS384',
'RS512',
'PS256',
'PS384',
'PS512',
'ES256',
'ES384',
'ES512',
'none',
] as const;
export const defaultJwtAlgorithm = jwtAlgorithms[0];
/**
* Build a signed JWT for the client_assertion parameter (RFC 7523).
*
* The `secret` value is auto-detected as one of:
* - **JWK** a JSON string containing a private-key object (has a `kty` field).
* - **PEM** a string whose trimmed form starts with `-----`.
* - **HMAC secret** anything else, used as-is for HS* algorithms.
*/
function buildClientAssertionJwt(params: {
clientId: string;
accessTokenUrl: string;
secret: string;
algorithm: Algorithm;
}): string {
const { clientId, accessTokenUrl, secret, algorithm } = params;
const isHmac = algorithm.startsWith('HS') || algorithm === 'none';
// Resolve the signing key depending on format
let signingKey: jwt.Secret;
let kid: string | undefined;
const trimmed = secret.trim();
if (isHmac) {
// HMAC algorithms use the raw secret (string or Buffer)
signingKey = secret;
} else if (trimmed.startsWith('{')) {
// Looks like JSON - treat as JWK. There is surely a better way to detect JWK vs a raw secret, but this should work in most cases.
let jwk: any;
try {
jwk = JSON.parse(trimmed);
} catch {
throw new Error('Client Assertion secret looks like JSON but is not valid');
}
kid = jwk?.kid;
signingKey = createPrivateKey({ key: jwk, format: 'jwk' });
} else if (trimmed.startsWith('-----')) {
// PEM-encoded key
signingKey = createPrivateKey({ key: trimmed, format: 'pem' });
} else {
throw new Error(
'Client Assertion secret must be a JWK JSON object, a PEM-encoded key ' +
'(starting with -----), or a raw secret for HMAC algorithms.',
);
}
const now = Math.floor(Date.now() / 1000);
const payload = {
iss: clientId,
sub: clientId,
aud: accessTokenUrl,
iat: now,
exp: now + 300, // 5 minutes
jti: randomUUID(),
};
// Build the JWT header; include "kid" when available
const header: jwt.JwtHeader = { alg: algorithm, typ: 'JWT' };
if (kid) {
header.kid = kid;
}
return jwt.sign(JSON.stringify(payload), signingKey, {
algorithm: algorithm as jwt.Algorithm,
header,
});
}
export async function getClientCredentials(
ctx: Context,
contextId: string,
@@ -14,6 +104,10 @@ export async function getClientCredentials(
scope,
audience,
credentialsInBody,
clientAssertionSecret,
clientAssertionSecretBase64,
clientCredentialsMethod,
clientAssertionAlgorithm,
}: {
accessTokenUrl: string;
clientId: string;
@@ -21,6 +115,10 @@ export async function getClientCredentials(
scope: string | null;
audience: string | null;
credentialsInBody: boolean;
clientAssertionSecret: string;
clientAssertionSecretBase64: boolean;
clientCredentialsMethod: string;
clientAssertionAlgorithm: string;
},
) {
const tokenArgs: TokenStoreArgs = {
@@ -34,16 +132,38 @@ export async function getClientCredentials(
return token;
}
const response = await fetchAccessToken(ctx, {
const common: Omit<
Parameters<typeof fetchAccessToken>[1],
'clientAssertion' | 'clientSecret' | 'credentialsInBody'
> = {
grantType: 'client_credentials',
accessTokenUrl,
audience,
clientId,
clientSecret,
scope,
credentialsInBody,
params: [],
});
};
const fetchParams: Parameters<typeof fetchAccessToken>[1] =
clientCredentialsMethod === 'client_assertion'
? {
...common,
clientAssertion: buildClientAssertionJwt({
clientId,
algorithm: clientAssertionAlgorithm as Algorithm,
accessTokenUrl,
secret: clientAssertionSecretBase64
? Buffer.from(clientAssertionSecret, 'base64').toString('utf-8')
: clientAssertionSecret,
}),
}
: {
...common,
clientSecret,
credentialsInBody,
};
const response = await fetchAccessToken(ctx, fetchParams);
return storeToken(ctx, tokenArgs, response);
}

View File

@@ -5,6 +5,7 @@ import type {
JsonPrimitive,
PluginDefinition,
} from '@yaakapp/api';
import type { Algorithm } from 'jsonwebtoken';
import { DEFAULT_LOCALHOST_PORT, HOSTED_CALLBACK_URL, stopActiveServer } from './callbackServer';
import {
type CallbackType,
@@ -14,7 +15,11 @@ import {
PKCE_PLAIN,
PKCE_SHA256,
} from './grants/authorizationCode';
import { getClientCredentials } from './grants/clientCredentials';
import {
defaultJwtAlgorithm,
getClientCredentials,
jwtAlgorithms,
} from './grants/clientCredentials';
import { getImplicit } from './grants/implicit';
import { getPassword } from './grants/password';
import type { AccessToken, TokenStoreArgs } from './store';
@@ -97,7 +102,10 @@ export const plugin: PluginDefinition = {
};
const token = await getToken(ctx, tokenArgs);
if (token == null) {
await ctx.toast.show({ message: 'No token to copy', color: 'warning' });
await ctx.toast.show({
message: 'No token to copy',
color: 'warning',
});
} else {
await ctx.clipboard.copyText(token.response.access_token);
await ctx.toast.show({
@@ -118,9 +126,15 @@ export const plugin: PluginDefinition = {
clientId: stringArg(values, 'clientId'),
};
if (await deleteToken(ctx, tokenArgs)) {
await ctx.toast.show({ message: 'Token deleted', color: 'success' });
await ctx.toast.show({
message: 'Token deleted',
color: 'success',
});
} else {
await ctx.toast.show({ message: 'No token to delete', color: 'warning' });
await ctx.toast.show({
message: 'No token to delete',
color: 'warning',
});
}
},
},
@@ -139,6 +153,19 @@ export const plugin: PluginDefinition = {
defaultValue: defaultGrantType,
options: grantTypes,
},
{
type: 'select',
name: 'clientCredentialsMethod',
label: 'Authentication Method',
description:
'"Client Secret" sends client_secret. \n' + '"Client Assertion" sends a signed JWT.',
defaultValue: 'client_secret',
options: [
{ label: 'Client Secret', value: 'client_secret' },
{ label: 'Client Assertion', value: 'client_assertion' },
],
dynamic: hiddenIfNot(['client_credentials']),
},
{
type: 'text',
name: 'clientId',
@@ -151,7 +178,47 @@ export const plugin: PluginDefinition = {
label: 'Client Secret',
optional: true,
password: true,
dynamic: hiddenIfNot(['authorization_code', 'password', 'client_credentials']),
dynamic: hiddenIfNot(
['authorization_code', 'password', 'client_credentials'],
(values) => values.clientCredentialsMethod === 'client_secret',
),
},
{
type: 'select',
name: 'clientAssertionAlgorithm',
label: 'JWT Algorithm',
defaultValue: defaultJwtAlgorithm,
options: jwtAlgorithms.map((value) => ({
label: value === 'none' ? 'None' : value,
value,
})),
dynamic: hiddenIfNot(
['client_credentials'],
({ clientCredentialsMethod }) => clientCredentialsMethod === 'client_assertion',
),
},
{
type: 'text',
name: 'clientAssertionSecret',
label: 'JWT Secret',
description:
'Can be HMAC, PEM or JWK. Make sure you pick the correct algorithm type above.',
password: true,
optional: true,
multiLine: true,
dynamic: hiddenIfNot(
['client_credentials'],
({ clientCredentialsMethod }) => clientCredentialsMethod === 'client_assertion',
),
},
{
type: 'checkbox',
name: 'clientAssertionSecretBase64',
label: 'JWT secret is base64 encoded',
dynamic: hiddenIfNot(
['client_credentials'],
({ clientCredentialsMethod }) => clientCredentialsMethod === 'client_assertion',
),
},
{
type: 'text',
@@ -160,7 +227,10 @@ export const plugin: PluginDefinition = {
label: 'Authorization URL',
dynamic: hiddenIfNot(['authorization_code', 'implicit']),
placeholder: authorizationUrls[0],
completionOptions: authorizationUrls.map((url) => ({ label: url, value: url })),
completionOptions: authorizationUrls.map((url) => ({
label: url,
value: url,
})),
},
{
type: 'text',
@@ -169,7 +239,10 @@ export const plugin: PluginDefinition = {
label: 'Access Token URL',
placeholder: accessTokenUrls[0],
dynamic: hiddenIfNot(['authorization_code', 'password', 'client_credentials']),
completionOptions: accessTokenUrls.map((url) => ({ label: url, value: url })),
completionOptions: accessTokenUrls.map((url) => ({
label: url,
value: url,
})),
},
{
type: 'banner',
@@ -186,7 +259,8 @@ export const plugin: PluginDefinition = {
{
type: 'text',
name: 'redirectUri',
label: 'Redirect URI',
label: 'Redirect URI (can be any valid URL)',
placeholder: 'https://mysite.example.com/oauth/callback',
description:
'URI the OAuth provider redirects to after authorization. Yaak intercepts this automatically in its embedded browser so any valid URI will work.',
optional: true,
@@ -383,6 +457,11 @@ export const plugin: PluginDefinition = {
{ label: 'In Request Body', value: 'body' },
{ label: 'As Basic Authentication', value: 'basic' },
],
dynamic: (_ctx: Context, { values }: GetHttpAuthenticationConfigRequest) => ({
hidden:
values.grantType === 'client_credentials' &&
values.clientCredentialsMethod === 'client_assertion',
}),
},
],
},
@@ -484,7 +563,11 @@ export const plugin: PluginDefinition = {
? accessTokenUrl
: `https://${accessTokenUrl}`,
clientId: stringArg(values, 'clientId'),
clientAssertionAlgorithm: stringArg(values, 'clientAssertionAlgorithm') as Algorithm,
clientSecret: stringArg(values, 'clientSecret'),
clientCredentialsMethod: stringArg(values, 'clientCredentialsMethod'),
clientAssertionSecret: stringArg(values, 'clientAssertionSecret'),
clientAssertionSecretBase64: !!values.clientAssertionSecretBase64,
scope: stringArgOrNull(values, 'scope'),
audience: stringArgOrNull(values, 'audience'),
credentialsInBody,

View File

@@ -3,23 +3,30 @@ import type { GrpcRequest, HttpRequest, WebsocketRequest } from '@yaakapp-intern
import { MoveToWorkspaceDialog } from '../components/MoveToWorkspaceDialog';
import { activeWorkspaceIdAtom } from '../hooks/useActiveWorkspace';
import { createFastMutation } from '../hooks/useFastMutation';
import { pluralizeCount } from '../lib/pluralize';
import { showDialog } from '../lib/dialog';
import { jotaiStore } from '../lib/jotai';
export const moveToWorkspace = createFastMutation({
mutationKey: ['move_workspace'],
mutationFn: async (request: HttpRequest | GrpcRequest | WebsocketRequest) => {
mutationFn: async (requests: (HttpRequest | GrpcRequest | WebsocketRequest)[]) => {
const activeWorkspaceId = jotaiStore.get(activeWorkspaceIdAtom);
if (activeWorkspaceId == null) return;
if (requests.length === 0) return;
const title =
requests.length === 1
? 'Move Request'
: `Move ${pluralizeCount('Request', requests.length)}`;
showDialog({
id: 'change-workspace',
title: 'Move Workspace',
title,
size: 'sm',
render: ({ hide }) => (
<MoveToWorkspaceDialog
onDone={hide}
request={request}
requests={requests}
activeWorkspaceId={activeWorkspaceId}
/>
),

View File

@@ -616,5 +616,16 @@ function KeyValueArg({
function hasVisibleInputs(inputs: FormInput[] | undefined): boolean {
if (!inputs) return false;
return inputs.some((i) => !i.hidden);
for (const input of inputs) {
if ('inputs' in input && !hasVisibleInputs(input.inputs)) {
// Has children, but none are visible
return false;
}
if (!input.hidden) {
return true;
}
}
return false;
}

View File

@@ -2,6 +2,7 @@ import type { GrpcRequest, HttpRequest, WebsocketRequest } from '@yaakapp-intern
import { patchModel, workspacesAtom } from '@yaakapp-internal/models';
import { useAtomValue } from 'jotai';
import { useState } from 'react';
import { pluralizeCount } from '../lib/pluralize';
import { resolvedModelName } from '../lib/resolvedModelName';
import { router } from '../lib/router';
import { showToast } from '../lib/toast';
@@ -12,18 +13,21 @@ import { VStack } from './core/Stacks';
interface Props {
activeWorkspaceId: string;
request: HttpRequest | GrpcRequest | WebsocketRequest;
requests: (HttpRequest | GrpcRequest | WebsocketRequest)[];
onDone: () => void;
}
export function MoveToWorkspaceDialog({ onDone, request, activeWorkspaceId }: Props) {
export function MoveToWorkspaceDialog({ onDone, requests, activeWorkspaceId }: Props) {
const workspaces = useAtomValue(workspacesAtom);
const [selectedWorkspaceId, setSelectedWorkspaceId] = useState<string>(activeWorkspaceId);
const targetWorkspace = workspaces.find((w) => w.id === selectedWorkspaceId);
const isSameWorkspace = selectedWorkspaceId === activeWorkspaceId;
return (
<VStack space={4} className="mb-4">
<Select
label="New Workspace"
label="Target Workspace"
name="workspace"
value={selectedWorkspaceId}
onChange={setSelectedWorkspaceId}
@@ -34,27 +38,31 @@ export function MoveToWorkspaceDialog({ onDone, request, activeWorkspaceId }: Pr
/>
<Button
color="primary"
disabled={selectedWorkspaceId === activeWorkspaceId}
disabled={isSameWorkspace}
onClick={async () => {
const patch = {
workspaceId: selectedWorkspaceId,
folderId: null,
};
await patchModel(request, patch);
await Promise.all(requests.map((r) => patchModel(r, patch)));
// Hide after a moment, to give time for request to disappear
// Hide after a moment, to give time for requests to disappear
setTimeout(onDone, 100);
showToast({
id: 'workspace-moved',
message: (
<>
<InlineCode>{resolvedModelName(request)}</InlineCode> moved to{' '}
<InlineCode>
{workspaces.find((w) => w.id === selectedWorkspaceId)?.name ?? 'unknown'}
</InlineCode>
</>
),
message:
requests.length === 1 && requests[0] != null ? (
<>
<InlineCode>{resolvedModelName(requests[0])}</InlineCode> moved to{' '}
<InlineCode>{targetWorkspace?.name ?? 'unknown'}</InlineCode>
</>
) : (
<>
{pluralizeCount('request', requests.length)} moved to{' '}
<InlineCode>{targetWorkspace?.name ?? 'unknown'}</InlineCode>
</>
),
action: ({ hide }) => (
<Button
size="xs"
@@ -74,7 +82,7 @@ export function MoveToWorkspaceDialog({ onDone, request, activeWorkspaceId }: Pr
});
}}
>
Move
{requests.length === 1 ? 'Move' : `Move ${pluralizeCount('Request', requests.length)}`}
</Button>
</VStack>
);

View File

@@ -278,6 +278,7 @@ function Sidebar({ className }: { className?: string }) {
},
},
'sidebar.selected.duplicate': {
// Higher priority so this takes precedence over model.duplicate (same Meta+d binding)
priority: 10,
enable,
cb: async (items: SidebarModel[]) => {
@@ -290,6 +291,18 @@ function Sidebar({ className }: { className?: string }) {
}
},
},
'sidebar.selected.move': {
enable,
cb: async (items: SidebarModel[]) => {
const requests = items.filter(
(i): i is HttpRequest | GrpcRequest | WebsocketRequest =>
i.model === 'http_request' || i.model === 'grpc_request' || i.model === 'websocket_request'
);
if (requests.length > 0) {
moveToWorkspace.mutate(requests);
}
},
},
'request.send': {
enable,
cb: async (items: SidebarModel[]) => {
@@ -320,6 +333,10 @@ function Sidebar({ className }: { className?: string }) {
const workspaces = jotaiStore.get(workspacesAtom);
const onlyHttpRequests = items.every((i) => i.model === 'http_request');
const requestItems = items.filter(
(i) =>
i.model === 'http_request' || i.model === 'grpc_request' || i.model === 'websocket_request',
);
const initialItems: ContextMenuProps['items'] = [
{
@@ -416,16 +433,13 @@ function Sidebar({ className }: { className?: string }) {
onSelect: () => actions['sidebar.selected.duplicate'].cb(items),
},
{
label: 'Move',
label: items.length <= 1 ? 'Move' : `Move ${requestItems.length} Requests`,
hotKeyAction: 'sidebar.selected.move',
hotKeyLabelOnly: true,
leftSlot: <Icon icon="arrow_right_circle" />,
hidden:
workspaces.length <= 1 ||
items.length > 1 ||
child.model === 'folder' ||
child.model === 'workspace',
hidden: workspaces.length <= 1 || requestItems.length === 0 || requestItems.length !== items.length,
onSelect: () => {
if (child.model === 'folder' || child.model === 'workspace') return;
moveToWorkspace.mutate(child);
actions['sidebar.selected.move'].cb(items);
},
},
{

View File

@@ -28,6 +28,7 @@ export type HotkeyAction =
| 'sidebar.filter'
| 'sidebar.selected.delete'
| 'sidebar.selected.duplicate'
| 'sidebar.selected.move'
| 'sidebar.selected.rename'
| 'sidebar.expand_all'
| 'sidebar.collapse_all'
@@ -58,6 +59,7 @@ const defaultHotkeysMac: Record<HotkeyAction, string[]> = {
'sidebar.collapse_all': ['Meta+Shift+Minus'],
'sidebar.selected.delete': ['Delete', 'Meta+Backspace'],
'sidebar.selected.duplicate': ['Meta+d'],
'sidebar.selected.move': [],
'sidebar.selected.rename': ['Enter'],
'sidebar.focus': ['Meta+b'],
'sidebar.context_menu': ['Control+Enter'],
@@ -87,6 +89,7 @@ const defaultHotkeysOther: Record<HotkeyAction, string[]> = {
'sidebar.collapse_all': ['Control+Shift+Minus'],
'sidebar.selected.delete': ['Delete', 'Control+Backspace'],
'sidebar.selected.duplicate': ['Control+d'],
'sidebar.selected.move': [],
'sidebar.selected.rename': ['Enter'],
'sidebar.focus': ['Control+b'],
'sidebar.context_menu': ['Alt+Insert'],
@@ -141,6 +144,7 @@ const hotkeyLabels: Record<HotkeyAction, string> = {
'sidebar.collapse_all': 'Collapse All Folders',
'sidebar.selected.delete': 'Delete Selected Sidebar Item',
'sidebar.selected.duplicate': 'Duplicate Selected Sidebar Item',
'sidebar.selected.move': 'Move Selected to Workspace',
'sidebar.selected.rename': 'Rename Selected Sidebar Item',
'sidebar.focus': 'Focus or Toggle Sidebar',
'sidebar.context_menu': 'Show Context Menu',