Compare commits

..

14 Commits

Author SHA1 Message Date
Gregory Schier
50c7992b42 Unify plugin bootstrap and prep vendored assets in CLI release 2026-02-22 15:01:34 -08:00
Gregory Schier
5e9aebda6f Embed CLI plugin assets and share bundled plugin registration 2026-02-22 14:44:40 -08:00
Gregory Schier
a1e84c7785 Bump @yaakapp/cli to 0.4.0-beta.2 2026-02-22 14:28:49 -08:00
Gregory Schier
fea4411afa Remove recursive API npm publish script 2026-02-22 14:21:07 -08:00
Gregory Schier
8315e4afad Add API release workflow and harden CLI npm publish 2026-02-22 14:11:26 -08:00
Gregory Schier
a19ee9b502 fix(cli-release): set npm dist-tag for prerelease publishes 2026-02-22 10:42:14 -08:00
Gregory Schier
0130bdee6f Install dbus build deps for Linux CLI release builds 2026-02-22 10:22:11 -08:00
Gregory Schier
71ae9f41ed Use npm trusted publishing for CLI release workflow 2026-02-22 10:10:06 -08:00
Gregory Schier
d06b6ce636 Merge plugin CLI into here (#404) 2026-02-22 10:06:24 -08:00
Gregory Schier
f5727b28c4 faker: render Date outputs as ISO strings 2026-02-21 07:24:07 -08:00
Gregory Schier
c62db7be06 Add contribution policy docs and PR checklist template 2026-02-20 14:09:59 -08:00
Gregory Schier
4e56daa555 CLI send enhancements and shared plugin event routing (#398) 2026-02-20 13:21:55 -08:00
dependabot[bot]
746bedf885 Bump hono from 4.11.7 to 4.11.10 (#403)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-02-20 09:01:23 -08:00
Gregory Schier
949c4a445a Fix NTLM challenge parsing when WWW-Authenticate has Negotiate first (#402) 2026-02-20 08:48:27 -08:00
74 changed files with 5656 additions and 810 deletions

18
.github/pull_request_template.md vendored Normal file
View File

@@ -0,0 +1,18 @@
## Summary
<!-- Describe the bug and the fix in 1-3 sentences. -->
## Submission
- [ ] This PR is a bug fix or small-scope improvement.
- [ ] If this PR is not a bug fix or small-scope improvement, I linked an approved feedback item below.
- [ ] I have read and followed [`CONTRIBUTING.md`](CONTRIBUTING.md).
- [ ] I tested this change locally.
- [ ] I added or updated tests when reasonable.
Approved feedback item (required if not a bug fix or small-scope improvement):
<!-- https://yaak.app/feedback/... -->
## Related
<!-- Link related issues, discussions, or feedback items. -->

59
.github/workflows/release-api-npm.yml vendored Normal file
View File

@@ -0,0 +1,59 @@
name: Release API to NPM
on:
push:
tags: [yaak-api-*]
workflow_dispatch:
inputs:
version:
description: API version to publish (for example 0.9.0 or v0.9.0)
required: true
type: string
permissions:
contents: read
jobs:
publish-npm:
name: Publish @yaakapp/api
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: lts/*
registry-url: https://registry.npmjs.org
- name: Install dependencies
run: npm ci
- name: Set @yaakapp/api version
shell: bash
env:
WORKFLOW_VERSION: ${{ inputs.version }}
run: |
set -euo pipefail
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
VERSION="$WORKFLOW_VERSION"
else
VERSION="${GITHUB_REF_NAME#yaak-api-}"
fi
VERSION="${VERSION#v}"
echo "Preparing @yaakapp/api version: $VERSION"
cd packages/plugin-runtime-types
npm version "$VERSION" --no-git-tag-version --allow-same-version
- name: Build @yaakapp/api
working-directory: packages/plugin-runtime-types
run: npm run build
- name: Publish @yaakapp/api
working-directory: packages/plugin-runtime-types
run: npm publish --provenance --access public

View File

@@ -1,4 +1,4 @@
name: Generate Artifacts
name: Release App Artifacts
on:
push:
tags: [v*]

198
.github/workflows/release-cli-npm.yml vendored Normal file
View File

@@ -0,0 +1,198 @@
name: Release CLI to NPM
on:
push:
tags: [yaak-cli-*]
workflow_dispatch:
inputs:
version:
description: CLI version to publish (for example 0.4.0 or v0.4.0)
required: true
type: string
permissions:
contents: read
jobs:
prepare-vendored-assets:
name: Prepare vendored plugin assets
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: lts/*
- name: Install dependencies
run: npm ci
- name: Build plugin assets
run: |
npm run build-plugins
npm run vendor:vendor-plugins
- name: Upload vendored assets
uses: actions/upload-artifact@v4
with:
name: vendored-assets
path: |
crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs
crates-tauri/yaak-app/vendored/plugins
if-no-files-found: error
build-binaries:
name: Build ${{ matrix.pkg }}
needs: prepare-vendored-assets
runs-on: ${{ matrix.runner }}
strategy:
fail-fast: false
matrix:
include:
- pkg: cli-darwin-arm64
runner: macos-latest
target: aarch64-apple-darwin
binary: yaak
- pkg: cli-darwin-x64
runner: macos-latest
target: x86_64-apple-darwin
binary: yaak
- pkg: cli-linux-arm64
runner: ubuntu-22.04-arm
target: aarch64-unknown-linux-gnu
binary: yaak
- pkg: cli-linux-x64
runner: ubuntu-22.04
target: x86_64-unknown-linux-gnu
binary: yaak
- pkg: cli-win32-arm64
runner: windows-latest
target: aarch64-pc-windows-msvc
binary: yaak.exe
- pkg: cli-win32-x64
runner: windows-latest
target: x86_64-pc-windows-msvc
binary: yaak.exe
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Rust stable
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.target }}
- name: Restore Rust cache
uses: Swatinem/rust-cache@v2
with:
shared-key: release-cli-npm
cache-on-failure: true
- name: Install Linux build dependencies
if: startsWith(matrix.runner, 'ubuntu')
run: |
sudo apt-get update
sudo apt-get install -y pkg-config libdbus-1-dev
- name: Download vendored assets
uses: actions/download-artifact@v4
with:
name: vendored-assets
path: crates-tauri/yaak-app/vendored
- name: Build yaak
run: cargo build --locked --release -p yaak-cli --bin yaak --target ${{ matrix.target }}
- name: Stage binary artifact
shell: bash
run: |
set -euo pipefail
mkdir -p "npm/dist/${{ matrix.pkg }}"
cp "target/${{ matrix.target }}/release/${{ matrix.binary }}" "npm/dist/${{ matrix.pkg }}/${{ matrix.binary }}"
- name: Upload binary artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.pkg }}
path: npm/dist/${{ matrix.pkg }}/${{ matrix.binary }}
if-no-files-found: error
publish-npm:
name: Publish @yaakapp/cli packages
needs: build-binaries
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: lts/*
registry-url: https://registry.npmjs.org
- name: Download binary artifacts
uses: actions/download-artifact@v4
with:
pattern: cli-*
path: npm/dist
merge-multiple: false
- name: Prepare npm packages
shell: bash
env:
WORKFLOW_VERSION: ${{ inputs.version }}
run: |
set -euo pipefail
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
VERSION="$WORKFLOW_VERSION"
else
VERSION="${GITHUB_REF_NAME#yaak-cli-}"
fi
VERSION="${VERSION#v}"
if [[ "$VERSION" == *-* ]]; then
PRERELEASE="${VERSION#*-}"
NPM_TAG="${PRERELEASE%%.*}"
else
NPM_TAG="latest"
fi
echo "Preparing CLI npm packages for version: $VERSION"
echo "Publishing with npm dist-tag: $NPM_TAG"
echo "NPM_TAG=$NPM_TAG" >> "$GITHUB_ENV"
YAAK_CLI_VERSION="$VERSION" node npm/prepare-publish.js
- name: Publish @yaakapp/cli-darwin-arm64
run: npm publish --provenance --access public --tag "$NPM_TAG"
working-directory: npm/cli-darwin-arm64
- name: Publish @yaakapp/cli-darwin-x64
run: npm publish --provenance --access public --tag "$NPM_TAG"
working-directory: npm/cli-darwin-x64
- name: Publish @yaakapp/cli-linux-arm64
run: npm publish --provenance --access public --tag "$NPM_TAG"
working-directory: npm/cli-linux-arm64
- name: Publish @yaakapp/cli-linux-x64
run: npm publish --provenance --access public --tag "$NPM_TAG"
working-directory: npm/cli-linux-x64
- name: Publish @yaakapp/cli-win32-arm64
run: npm publish --provenance --access public --tag "$NPM_TAG"
working-directory: npm/cli-win32-arm64
- name: Publish @yaakapp/cli-win32-x64
run: npm publish --provenance --access public --tag "$NPM_TAG"
working-directory: npm/cli-win32-x64
- name: Publish @yaakapp/cli
run: npm publish --provenance --access public --tag "$NPM_TAG"
working-directory: npm/cli

16
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,16 @@
# Contributing to Yaak
Yaak accepts community pull requests for:
- Bug fixes
- Small-scope improvements directly tied to existing behavior
Pull requests that introduce broad new features, major redesigns, or large refactors are out of scope unless explicitly approved first.
## Approval for Non-Bugfix Changes
If your PR is not a bug fix or small-scope improvement, include a link to the approved [feedback item](https://yaak.app/feedback) where contribution approval was explicitly stated.
## Development Setup
For local setup and development workflows, see [`DEVELOPMENT.md`](DEVELOPMENT.md).

2010
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -35,6 +35,7 @@ log = "0.4.29"
reqwest = "0.12.20"
rustls = { version = "0.23.34", default-features = false }
rustls-platform-verifier = "0.6.2"
schemars = { version = "0.8.22", features = ["chrono"] }
serde = "1.0.228"
serde_json = "1.0.145"
sha2 = "0.10.9"

View File

@@ -58,8 +58,10 @@ Built with [Tauri](https://tauri.app), Rust, and React, its fast, lightweight
## Contribution Policy
Yaak is open source but only accepting contributions for bug fixes. To get started,
visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment.
> [!IMPORTANT]
> Community PRs are currently limited to bug fixes and small-scope improvements.
> If your PR is out of scope, link an approved feedback item from [yaak.app/feedback](https://yaak.app/feedback).
> See [`CONTRIBUTING.md`](CONTRIBUTING.md) for policy details and [`DEVELOPMENT.md`](DEVELOPMENT.md) for local setup.
## Useful Resources

View File

@@ -5,17 +5,32 @@ edition = "2024"
publish = false
[[bin]]
name = "yaakcli"
name = "yaak"
path = "src/main.rs"
[dependencies]
base64 = "0.22"
clap = { version = "4", features = ["derive"] }
console = "0.15"
dirs = "6"
env_logger = "0.11"
futures = "0.3"
hex = { workspace = true }
include_dir = "0.7"
keyring = { workspace = true, features = ["apple-native", "windows-native", "sync-secret-service"] }
log = { workspace = true }
rand = "0.8"
reqwest = { workspace = true }
rolldown = "0.1.0"
oxc_resolver = "=11.10.0"
schemars = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
sha2 = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread", "macros", "io-util", "net", "signal", "time"] }
walkdir = "2"
webbrowser = "1"
zip = "4"
yaak = { workspace = true }
yaak-crypto = { workspace = true }
yaak-http = { workspace = true }

View File

@@ -1,340 +0,0 @@
# CLI Command Architecture Plan
## Goal
Redesign the yaak-cli command structure to use a resource-oriented `<resource> <action>`
pattern that scales well, is discoverable, and supports both human and LLM workflows.
## Status Snapshot
Current branch state:
- Modular CLI structure with command modules and shared `CliContext`
- Resource/action hierarchy in place for:
- `workspace list|show|create|update|delete`
- `request list|show|create|update|send|delete`
- `folder list|show|create|update|delete`
- `environment list|show|create|update|delete`
- Top-level `send` exists as a request-send shortcut (not yet flexible request/folder/workspace resolution)
- Legacy `get` command removed
- JSON create/update flow implemented (`--json` and positional JSON shorthand)
- No `request schema` command yet
Progress checklist:
- [x] Phase 1 complete
- [x] Phase 2 complete
- [x] Phase 3 complete
- [ ] Phase 4 complete
- [ ] Phase 5 complete
- [ ] Phase 6 complete
## Command Architecture
### Design Principles
- **Resource-oriented**: top-level commands are nouns, subcommands are verbs
- **Polymorphic requests**: `request` covers HTTP, gRPC, and WebSocket — the CLI
resolves the type via `get_any_request` and adapts behavior accordingly
- **Simple creation, full-fidelity via JSON**: human-friendly flags for basic creation,
`--json` for full control (targeted at LLM and scripting workflows)
- **Runtime schema introspection**: `request schema` outputs JSON Schema for the request
models, with dynamic auth fields populated from loaded plugins at runtime
- **Destructive actions require confirmation**: `delete` commands prompt for user
confirmation before proceeding. Can be bypassed with `--yes` / `-y` for scripting
### Commands
```
# Top-level shortcut
yaakcli send <id> [-e <env_id>] # id can be a request, folder, or workspace
# Resource commands
yaakcli workspace list
yaakcli workspace show <id>
yaakcli workspace create --name <name>
yaakcli workspace create --json '{"name": "My Workspace"}'
yaakcli workspace create '{"name": "My Workspace"}' # positional JSON shorthand
yaakcli workspace update --json '{"id": "wk_abc", "name": "New Name"}'
yaakcli workspace delete <id>
yaakcli request list <workspace_id>
yaakcli request show <id>
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
yaakcli request create --json '{"workspaceId": "wk_abc", "url": "..."}'
yaakcli request update --json '{"id": "rq_abc", "url": "https://new.com"}'
yaakcli request send <id> [-e <env_id>]
yaakcli request delete <id>
yaakcli request schema <http|grpc|websocket>
yaakcli folder list <workspace_id>
yaakcli folder show <id>
yaakcli folder create <workspace_id> --name <name>
yaakcli folder create --json '{"workspaceId": "wk_abc", "name": "Auth"}'
yaakcli folder update --json '{"id": "fl_abc", "name": "New Name"}'
yaakcli folder delete <id>
yaakcli environment list <workspace_id>
yaakcli environment show <id>
yaakcli environment create <workspace_id> --name <name>
yaakcli environment create --json '{"workspaceId": "wk_abc", "name": "Production"}'
yaakcli environment update --json '{"id": "ev_abc", ...}'
yaakcli environment delete <id>
```
### `send` — Top-Level Shortcut
`yaakcli send <id>` is a convenience alias that accepts any sendable ID. It tries
each type in order via DB lookups (short-circuiting on first match):
1. Request (HTTP, gRPC, or WebSocket via `get_any_request`)
2. Folder (sends all requests in the folder)
3. Workspace (sends all requests in the workspace)
ID prefixes exist (e.g. `rq_`, `fl_`, `wk_`) but are not relied upon — resolution
is purely by DB lookup.
`request send <id>` is the same but restricted to request IDs only.
### Request Send — Polymorphic Behavior
`send` means "execute this request" regardless of protocol:
- **HTTP**: send request, print response, exit
- **gRPC**: invoke the method; for streaming, stream output to stdout until done/Ctrl+C
- **WebSocket**: connect, stream messages to stdout until closed/Ctrl+C
### `request schema` — Runtime JSON Schema
Outputs a JSON Schema describing the full request shape, including dynamic fields:
1. Generate base schema from `schemars::JsonSchema` derive on the Rust model structs
2. Load plugins, collect auth strategy definitions and their form inputs
3. Merge plugin-defined auth fields into the `authentication` property as a `oneOf`
4. Output the combined schema as JSON
This lets an LLM call `schema`, read the shape, and construct valid JSON for
`create --json` or `update --json`.
## Implementation Steps
### Phase 1: Restructure commands (no new functionality)
Refactor `main.rs` into the new resource/action pattern using clap subcommand nesting.
Existing behavior stays the same, just reorganized. Remove the `get` command.
1. Create module structure: `commands/workspace.rs`, `commands/request.rs`, etc.
2. Define nested clap enums:
```rust
enum Commands {
Send(SendArgs),
Workspace(WorkspaceArgs),
Request(RequestArgs),
Folder(FolderArgs),
Environment(EnvironmentArgs),
}
```
3. Move existing `Workspaces` logic into `workspace list`
4. Move existing `Requests` logic into `request list`
5. Move existing `Send` logic into `request send`
6. Move existing `Create` logic into `request create`
7. Delete the `Get` command entirely
8. Extract shared setup (DB init, plugin init, encryption) into a reusable context struct
### Phase 2: Add missing CRUD commands
Status: complete
1. `workspace show <id>`
2. `workspace create --name <name>` (and `--json`)
3. `workspace update --json`
4. `workspace delete <id>`
5. `request show <id>` (JSON output of the full request model)
6. `request delete <id>`
7. `folder list <workspace_id>`
8. `folder show <id>`
9. `folder create <workspace_id> --name <name>` (and `--json`)
10. `folder update --json`
11. `folder delete <id>`
12. `environment list <workspace_id>`
13. `environment show <id>`
14. `environment create <workspace_id> --name <name>` (and `--json`)
15. `environment update --json`
16. `environment delete <id>`
### Phase 3: JSON input for create/update
Both commands accept JSON via `--json <string>` or as a positional argument (detected
by leading `{`). They follow the same upsert pattern as the plugin API.
- **`create --json`**: JSON must include `workspaceId`. Must NOT include `id` (or
use empty string `""`). Deserializes into the model with defaults for missing fields,
then upserts (insert).
- **`update --json`**: JSON must include `id`. Performs a fetch-merge-upsert:
1. Fetch the existing model from DB
2. Serialize it to `serde_json::Value`
3. Deep-merge the user's partial JSON on top (JSON Merge Patch / RFC 7386 semantics)
4. Deserialize back into the typed model
5. Upsert (update)
This matches how the MCP server plugin already does it (fetch existing, spread, override),
but the CLI handles the merge server-side so callers don't have to.
Setting a field to `null` removes it (for `Option<T>` fields), per RFC 7386.
Implementation:
1. Add `--json` flag and positional JSON detection to `create` commands
2. Add `update` commands with required `--json` flag
3. Implement JSON merge utility (or use `json-patch` crate)
### Phase 4: Runtime schema generation
1. Add `schemars` dependency to `yaak-models`
2. Derive `JsonSchema` on `HttpRequest`, `GrpcRequest`, `WebsocketRequest`, and their
nested types (`HttpRequestHeader`, `HttpUrlParameter`, etc.)
3. Implement `request schema` command:
- Generate base schema from schemars
- Query plugins for auth strategy form inputs
- Convert plugin form inputs into JSON Schema properties
- Merge into the `authentication` field
- Print to stdout
### Phase 5: Polymorphic send
1. Update `request send` to use `get_any_request` to resolve the request type
2. Match on `AnyRequest` variant and dispatch to the appropriate sender:
- `AnyRequest::HttpRequest` — existing HTTP send logic
- `AnyRequest::GrpcRequest` — gRPC invoke (future implementation)
- `AnyRequest::WebsocketRequest` — WebSocket connect (future implementation)
3. gRPC and WebSocket send can initially return "not yet implemented" errors
### Phase 6: Top-level `send` and folder/workspace send
1. Add top-level `yaakcli send <id>` command
2. Resolve ID by trying DB lookups in order: any_request → folder → workspace
3. For folder: list all requests in folder, send each
4. For workspace: list all requests in workspace, send each
5. Add execution options: `--sequential` (default), `--parallel`, `--fail-fast`
## Execution Plan (PR Slices)
### PR 1: Command tree refactor + compatibility aliases
Scope:
1. Introduce `commands/` modules and a `CliContext` for shared setup
2. Add new clap hierarchy (`workspace`, `request`, `folder`, `environment`)
3. Route existing behavior into:
- `workspace list`
- `request list <workspace_id>`
- `request send <id>`
- `request create <workspace_id> ...`
4. Keep compatibility aliases temporarily:
- `workspaces` -> `workspace list`
- `requests <workspace_id>` -> `request list <workspace_id>`
- `create ...` -> `request create ...`
5. Remove `get` and update help text
Acceptance criteria:
- `yaakcli --help` shows noun/verb structure
- Existing list/send/create workflows still work
- No behavior change in HTTP send output format
### PR 2: CRUD surface area
Scope:
1. Implement `show/create/update/delete` for `workspace`, `request`, `folder`, `environment`
2. Ensure delete commands require confirmation by default (`--yes` bypass)
3. Normalize output format for list/show/create/update/delete responses
Acceptance criteria:
- Every command listed in the "Commands" section parses and executes
- Delete commands are safe by default in interactive terminals
- `--yes` supports non-interactive scripts
### PR 3: JSON input + merge patch semantics
Scope:
1. Add shared parser for `--json` and positional JSON shorthand
2. Add `create --json` and `update --json` for all mutable resources
3. Implement server-side RFC 7386 merge patch behavior
4. Add guardrails:
- `create --json`: reject non-empty `id`
- `update --json`: require `id`
Acceptance criteria:
- Partial `update --json` only modifies provided keys
- `null` clears optional values
- Invalid JSON and missing required fields return actionable errors
### PR 4: `request schema` and plugin auth integration
Scope:
1. Add `schemars` to `yaak-models` and derive `JsonSchema` for request models
2. Implement `request schema <http|grpc|websocket>`
3. Merge plugin auth form inputs into `authentication` schema at runtime
Acceptance criteria:
- Command prints valid JSON schema
- Schema reflects installed auth providers at runtime
- No panic when plugins fail to initialize (degrade gracefully)
### PR 5: Polymorphic request send
Scope:
1. Replace request resolution in `request send` with `get_any_request`
2. Dispatch by request type
3. Keep HTTP fully functional
4. Return explicit NYI errors for gRPC/WebSocket until implemented
Acceptance criteria:
- HTTP behavior remains unchanged
- gRPC/WebSocket IDs are recognized and return explicit status
### PR 6: Top-level `send` + bulk execution
Scope:
1. Add top-level `send <id>` for request/folder/workspace IDs
2. Implement folder/workspace fan-out execution
3. Add execution controls: `--sequential`, `--parallel`, `--fail-fast`
Acceptance criteria:
- Correct ID dispatch order: request -> folder -> workspace
- Deterministic summary output (success/failure counts)
- Non-zero exit code when any request fails (unless explicitly configured otherwise)
## Validation Matrix
1. CLI parsing tests for every command path (including aliases while retained)
2. Integration tests against temp SQLite DB for CRUD flows
3. Snapshot tests for output text where scripting compatibility matters
4. Manual smoke tests:
- Send HTTP request with template/rendered vars
- JSON create/update for each resource
- Delete confirmation and `--yes`
- Top-level `send` on request/folder/workspace
## Open Questions
1. Should compatibility aliases (`workspaces`, `requests`, `create`) be removed immediately or after one release cycle?
2. For bulk `send`, should default behavior stop on first failure or continue and summarize?
3. Should command output default to human-readable text with an optional `--format json`, or return JSON by default for `show`/`list`?
4. For `request schema`, should plugin-derived auth fields be namespaced by plugin ID to avoid collisions?
## Crate Changes
- **yaak-cli**: restructure into modules, new clap hierarchy
- **yaak-models**: add `schemars` dependency, derive `JsonSchema` on model structs
(current derives: `Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS`)

View File

@@ -1,9 +1,10 @@
use clap::{Args, Parser, Subcommand};
use clap::{Args, Parser, Subcommand, ValueEnum};
use std::path::PathBuf;
#[derive(Parser)]
#[command(name = "yaakcli")]
#[command(name = "yaak")]
#[command(about = "Yaak CLI - API client from the command line")]
#[command(version)]
pub struct Cli {
/// Use a custom data directory
#[arg(long, global = true)]
@@ -23,7 +24,19 @@ pub struct Cli {
#[derive(Subcommand)]
pub enum Commands {
/// Send an HTTP request by ID
/// Authentication commands
Auth(AuthArgs),
/// Plugin development and publishing commands
Plugin(PluginArgs),
#[command(hide = true)]
Build(PluginPathArg),
#[command(hide = true)]
Dev(PluginPathArg),
/// Send a request, folder, or workspace by ID
Send(SendArgs),
/// Workspace commands
@@ -41,8 +54,20 @@ pub enum Commands {
#[derive(Args)]
pub struct SendArgs {
/// Request ID
pub request_id: String,
/// Request, folder, or workspace ID
pub id: String,
/// Execute requests sequentially (default)
#[arg(long, conflicts_with = "parallel")]
pub sequential: bool,
/// Execute requests in parallel
#[arg(long, conflicts_with = "sequential")]
pub parallel: bool,
/// Stop on first request failure when sending folders/workspaces
#[arg(long, conflicts_with = "parallel")]
pub fail_fast: bool,
}
#[derive(Args)]
@@ -119,12 +144,18 @@ pub enum RequestCommands {
request_id: String,
},
/// Send an HTTP request by ID
/// Send a request by ID
Send {
/// Request ID
request_id: String,
},
/// Output JSON schema for request create/update payloads
Schema {
#[arg(value_enum)]
request_type: RequestSchemaType,
},
/// Create a new HTTP request
Create {
/// Workspace ID (or positional JSON payload shorthand)
@@ -169,6 +200,13 @@ pub enum RequestCommands {
},
}
#[derive(Clone, Copy, Debug, ValueEnum)]
pub enum RequestSchemaType {
Http,
Grpc,
Websocket,
}
#[derive(Args)]
pub struct FolderArgs {
#[command(subcommand)]
@@ -280,3 +318,59 @@ pub enum EnvironmentCommands {
yes: bool,
},
}
#[derive(Args)]
pub struct AuthArgs {
#[command(subcommand)]
pub command: AuthCommands,
}
#[derive(Subcommand)]
pub enum AuthCommands {
/// Login to Yaak via web browser
Login,
/// Sign out of the Yaak CLI
Logout,
/// Print the current logged-in user's info
Whoami,
}
#[derive(Args)]
pub struct PluginArgs {
#[command(subcommand)]
pub command: PluginCommands,
}
#[derive(Subcommand)]
pub enum PluginCommands {
/// Transpile code into a runnable plugin bundle
Build(PluginPathArg),
/// Build plugin bundle continuously when the filesystem changes
Dev(PluginPathArg),
/// Generate a "Hello World" Yaak plugin
Generate(GenerateArgs),
/// Publish a Yaak plugin version to the plugin registry
Publish(PluginPathArg),
}
#[derive(Args, Clone)]
pub struct PluginPathArg {
/// Path to plugin directory (defaults to current working directory)
pub path: Option<PathBuf>,
}
#[derive(Args, Clone)]
pub struct GenerateArgs {
/// Plugin name (defaults to a generated name in interactive mode)
#[arg(long)]
pub name: Option<String>,
/// Output directory for the generated plugin (defaults to ./<name> in interactive mode)
#[arg(long)]
pub dir: Option<PathBuf>,
}

View File

@@ -0,0 +1,556 @@
use crate::cli::{AuthArgs, AuthCommands};
use crate::ui;
use base64::Engine as _;
use keyring::Entry;
use rand::RngCore;
use rand::rngs::OsRng;
use reqwest::Url;
use serde_json::Value;
use sha2::{Digest, Sha256};
use std::io::{self, IsTerminal, Write};
use std::time::Duration;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::net::{TcpListener, TcpStream};
const OAUTH_CLIENT_ID: &str = "a1fe44800c2d7e803cad1b4bf07a291c";
const KEYRING_USER: &str = "yaak";
const AUTH_TIMEOUT: Duration = Duration::from_secs(300);
const MAX_REQUEST_BYTES: usize = 16 * 1024;
type CommandResult<T = ()> = std::result::Result<T, String>;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
enum Environment {
Production,
Staging,
Development,
}
impl Environment {
fn app_base_url(self) -> &'static str {
match self {
Environment::Production => "https://yaak.app",
Environment::Staging => "https://todo.yaak.app",
Environment::Development => "http://localhost:9444",
}
}
fn api_base_url(self) -> &'static str {
match self {
Environment::Production => "https://api.yaak.app",
Environment::Staging => "https://todo.yaak.app",
Environment::Development => "http://localhost:9444",
}
}
fn keyring_service(self) -> &'static str {
match self {
Environment::Production => "app.yaak.cli.Token",
Environment::Staging => "app.yaak.cli.staging.Token",
Environment::Development => "app.yaak.cli.dev.Token",
}
}
}
struct OAuthFlow {
app_base_url: String,
auth_url: Url,
token_url: String,
redirect_url: String,
state: String,
code_verifier: String,
}
pub async fn run(args: AuthArgs) -> i32 {
let result = match args.command {
AuthCommands::Login => login().await,
AuthCommands::Logout => logout(),
AuthCommands::Whoami => whoami().await,
};
match result {
Ok(()) => 0,
Err(error) => {
ui::error(&error);
1
}
}
}
async fn login() -> CommandResult {
let environment = current_environment();
let listener = TcpListener::bind("127.0.0.1:0")
.await
.map_err(|e| format!("Failed to start OAuth callback server: {e}"))?;
let port = listener
.local_addr()
.map_err(|e| format!("Failed to determine callback server port: {e}"))?
.port();
let oauth = build_oauth_flow(environment, port)?;
ui::info(&format!("Initiating login to {}", oauth.auth_url));
if !confirm_open_browser()? {
ui::info("Login canceled");
return Ok(());
}
if let Err(err) = webbrowser::open(oauth.auth_url.as_ref()) {
ui::warning(&format!("Failed to open browser: {err}"));
ui::info(&format!("Open this URL manually:\n{}", oauth.auth_url));
}
ui::info("Waiting for authentication...");
let code = tokio::select! {
result = receive_oauth_code(listener, &oauth.state, &oauth.app_base_url) => result?,
_ = tokio::signal::ctrl_c() => {
return Err("Interrupted by user".to_string());
}
_ = tokio::time::sleep(AUTH_TIMEOUT) => {
return Err("Timeout waiting for authentication".to_string());
}
};
let token = exchange_access_token(&oauth, &code).await?;
store_auth_token(environment, &token)?;
ui::success("Authentication successful!");
Ok(())
}
fn logout() -> CommandResult {
delete_auth_token(current_environment())?;
ui::success("Signed out of Yaak");
Ok(())
}
async fn whoami() -> CommandResult {
let environment = current_environment();
let token = match get_auth_token(environment)? {
Some(token) => token,
None => {
ui::warning("Not logged in");
ui::info("Please run `yaak auth login`");
return Ok(());
}
};
let url = format!("{}/api/v1/whoami", environment.api_base_url());
let response = reqwest::Client::new()
.get(url)
.header("X-Yaak-Session", token)
.header(reqwest::header::USER_AGENT, user_agent())
.send()
.await
.map_err(|e| format!("Failed to call whoami endpoint: {e}"))?;
let status = response.status();
let body =
response.text().await.map_err(|e| format!("Failed to read whoami response body: {e}"))?;
if !status.is_success() {
if status.as_u16() == 401 {
let _ = delete_auth_token(environment);
return Err(
"Unauthorized to access CLI. Run `yaak auth login` to refresh credentials."
.to_string(),
);
}
return Err(parse_api_error(status.as_u16(), &body));
}
println!("{body}");
Ok(())
}
fn current_environment() -> Environment {
let value = std::env::var("ENVIRONMENT").ok();
parse_environment(value.as_deref())
}
fn parse_environment(value: Option<&str>) -> Environment {
match value {
Some("staging") => Environment::Staging,
Some("development") => Environment::Development,
_ => Environment::Production,
}
}
fn build_oauth_flow(environment: Environment, callback_port: u16) -> CommandResult<OAuthFlow> {
let code_verifier = random_hex(32);
let state = random_hex(24);
let redirect_url = format!("http://127.0.0.1:{callback_port}/oauth/callback");
let code_challenge = base64::engine::general_purpose::URL_SAFE_NO_PAD
.encode(Sha256::digest(code_verifier.as_bytes()));
let mut auth_url = Url::parse(&format!("{}/login/oauth/authorize", environment.app_base_url()))
.map_err(|e| format!("Failed to build OAuth authorize URL: {e}"))?;
auth_url
.query_pairs_mut()
.append_pair("response_type", "code")
.append_pair("client_id", OAUTH_CLIENT_ID)
.append_pair("redirect_uri", &redirect_url)
.append_pair("state", &state)
.append_pair("code_challenge_method", "S256")
.append_pair("code_challenge", &code_challenge);
Ok(OAuthFlow {
app_base_url: environment.app_base_url().to_string(),
auth_url,
token_url: format!("{}/login/oauth/access_token", environment.app_base_url()),
redirect_url,
state,
code_verifier,
})
}
async fn receive_oauth_code(
listener: TcpListener,
expected_state: &str,
app_base_url: &str,
) -> CommandResult<String> {
loop {
let (mut stream, _) = listener
.accept()
.await
.map_err(|e| format!("OAuth callback server accept error: {e}"))?;
match parse_callback_request(&mut stream).await {
Ok((state, code)) => {
if state != expected_state {
let _ = write_bad_request(&mut stream, "Invalid OAuth state").await;
continue;
}
let success_redirect = format!("{app_base_url}/login/oauth/success");
write_redirect(&mut stream, &success_redirect)
.await
.map_err(|e| format!("Failed responding to OAuth callback: {e}"))?;
return Ok(code);
}
Err(error) => {
let _ = write_bad_request(&mut stream, &error).await;
if error.starts_with("OAuth provider returned error:") {
return Err(error);
}
}
}
}
}
async fn parse_callback_request(stream: &mut TcpStream) -> CommandResult<(String, String)> {
let target = read_http_target(stream).await?;
if !target.starts_with("/oauth/callback") {
return Err("Expected /oauth/callback path".to_string());
}
let url = Url::parse(&format!("http://127.0.0.1{target}"))
.map_err(|e| format!("Failed to parse callback URL: {e}"))?;
let mut state: Option<String> = None;
let mut code: Option<String> = None;
let mut oauth_error: Option<String> = None;
let mut oauth_error_description: Option<String> = None;
for (k, v) in url.query_pairs() {
if k == "state" {
state = Some(v.into_owned());
} else if k == "code" {
code = Some(v.into_owned());
} else if k == "error" {
oauth_error = Some(v.into_owned());
} else if k == "error_description" {
oauth_error_description = Some(v.into_owned());
}
}
if let Some(error) = oauth_error {
let mut message = format!("OAuth provider returned error: {error}");
if let Some(description) = oauth_error_description.filter(|d| !d.is_empty()) {
message.push_str(&format!(" ({description})"));
}
return Err(message);
}
let state = state.ok_or_else(|| "Missing 'state' query parameter".to_string())?;
let code = code.ok_or_else(|| "Missing 'code' query parameter".to_string())?;
if code.is_empty() {
return Err("Missing 'code' query parameter".to_string());
}
Ok((state, code))
}
async fn read_http_target(stream: &mut TcpStream) -> CommandResult<String> {
let mut buf = vec![0_u8; MAX_REQUEST_BYTES];
let mut total_read = 0_usize;
loop {
let n = stream
.read(&mut buf[total_read..])
.await
.map_err(|e| format!("Failed reading callback request: {e}"))?;
if n == 0 {
break;
}
total_read += n;
if buf[..total_read].windows(4).any(|w| w == b"\r\n\r\n") {
break;
}
if total_read == MAX_REQUEST_BYTES {
return Err("OAuth callback request too large".to_string());
}
}
let req = String::from_utf8_lossy(&buf[..total_read]);
let request_line =
req.lines().next().ok_or_else(|| "Invalid callback request line".to_string())?;
let mut parts = request_line.split_whitespace();
let method = parts.next().unwrap_or_default();
let target = parts.next().unwrap_or_default();
if method != "GET" {
return Err(format!("Expected GET callback request, got '{method}'"));
}
if target.is_empty() {
return Err("Missing callback request target".to_string());
}
Ok(target.to_string())
}
async fn write_bad_request(stream: &mut TcpStream, message: &str) -> std::io::Result<()> {
let body = format!("Failed to authenticate: {message}");
let response = format!(
"HTTP/1.1 400 Bad Request\r\nContent-Type: text/plain; charset=utf-8\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{}",
body.len(),
body
);
stream.write_all(response.as_bytes()).await?;
stream.shutdown().await
}
async fn write_redirect(stream: &mut TcpStream, location: &str) -> std::io::Result<()> {
let response = format!(
"HTTP/1.1 302 Found\r\nLocation: {location}\r\nContent-Length: 0\r\nConnection: close\r\n\r\n"
);
stream.write_all(response.as_bytes()).await?;
stream.shutdown().await
}
async fn exchange_access_token(oauth: &OAuthFlow, code: &str) -> CommandResult<String> {
let response = reqwest::Client::new()
.post(&oauth.token_url)
.header(reqwest::header::USER_AGENT, user_agent())
.form(&[
("grant_type", "authorization_code"),
("client_id", OAUTH_CLIENT_ID),
("code", code),
("redirect_uri", oauth.redirect_url.as_str()),
("code_verifier", oauth.code_verifier.as_str()),
])
.send()
.await
.map_err(|e| format!("Failed to exchange OAuth code for access token: {e}"))?;
let status = response.status();
let body =
response.text().await.map_err(|e| format!("Failed to read token response body: {e}"))?;
if !status.is_success() {
return Err(format!(
"Failed to fetch access token: status={} body={}",
status.as_u16(),
body
));
}
let parsed: Value =
serde_json::from_str(&body).map_err(|e| format!("Invalid token response JSON: {e}"))?;
let token = parsed
.get("access_token")
.and_then(Value::as_str)
.filter(|s| !s.is_empty())
.ok_or_else(|| format!("Token response missing access_token: {body}"))?;
Ok(token.to_string())
}
fn keyring_entry(environment: Environment) -> CommandResult<Entry> {
Entry::new(environment.keyring_service(), KEYRING_USER)
.map_err(|e| format!("Failed to initialize auth keyring entry: {e}"))
}
fn get_auth_token(environment: Environment) -> CommandResult<Option<String>> {
let entry = keyring_entry(environment)?;
match entry.get_password() {
Ok(token) => Ok(Some(token)),
Err(keyring::Error::NoEntry) => Ok(None),
Err(err) => Err(format!("Failed to read auth token: {err}")),
}
}
fn store_auth_token(environment: Environment, token: &str) -> CommandResult {
let entry = keyring_entry(environment)?;
entry.set_password(token).map_err(|e| format!("Failed to store auth token: {e}"))
}
fn delete_auth_token(environment: Environment) -> CommandResult {
let entry = keyring_entry(environment)?;
match entry.delete_credential() {
Ok(()) | Err(keyring::Error::NoEntry) => Ok(()),
Err(err) => Err(format!("Failed to delete auth token: {err}")),
}
}
fn parse_api_error(status: u16, body: &str) -> String {
if let Ok(value) = serde_json::from_str::<Value>(body) {
if let Some(message) = value.get("message").and_then(Value::as_str) {
return message.to_string();
}
if let Some(error) = value.get("error").and_then(Value::as_str) {
return error.to_string();
}
}
format!("API error {status}: {body}")
}
fn random_hex(bytes: usize) -> String {
let mut data = vec![0_u8; bytes];
OsRng.fill_bytes(&mut data);
hex::encode(data)
}
fn user_agent() -> String {
format!("YaakCli/{} ({})", env!("CARGO_PKG_VERSION"), ua_platform())
}
fn ua_platform() -> &'static str {
match std::env::consts::OS {
"windows" => "Win",
"darwin" => "Mac",
"linux" => "Linux",
_ => "Unknown",
}
}
fn confirm_open_browser() -> CommandResult<bool> {
if !io::stdin().is_terminal() {
return Ok(true);
}
loop {
print!("Open default browser? [Y/n]: ");
io::stdout().flush().map_err(|e| format!("Failed to flush stdout: {e}"))?;
let mut input = String::new();
io::stdin().read_line(&mut input).map_err(|e| format!("Failed to read input: {e}"))?;
match input.trim().to_ascii_lowercase().as_str() {
"" | "y" | "yes" => return Ok(true),
"n" | "no" => return Ok(false),
_ => ui::warning("Please answer y or n"),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn environment_mapping() {
assert_eq!(parse_environment(Some("staging")), Environment::Staging);
assert_eq!(parse_environment(Some("development")), Environment::Development);
assert_eq!(parse_environment(Some("production")), Environment::Production);
assert_eq!(parse_environment(None), Environment::Production);
}
#[tokio::test]
async fn parses_callback_request() {
let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind");
let addr = listener.local_addr().expect("local addr");
let server = tokio::spawn(async move {
let (mut stream, _) = listener.accept().await.expect("accept");
parse_callback_request(&mut stream).await
});
let mut client = TcpStream::connect(addr).await.expect("connect");
client
.write_all(
b"GET /oauth/callback?code=abc123&state=xyz HTTP/1.1\r\nHost: localhost\r\n\r\n",
)
.await
.expect("write");
let parsed = server.await.expect("join").expect("parse");
assert_eq!(parsed.0, "xyz");
assert_eq!(parsed.1, "abc123");
}
#[tokio::test]
async fn parse_callback_request_oauth_error() {
let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind");
let addr = listener.local_addr().expect("local addr");
let server = tokio::spawn(async move {
let (mut stream, _) = listener.accept().await.expect("accept");
parse_callback_request(&mut stream).await
});
let mut client = TcpStream::connect(addr).await.expect("connect");
client
.write_all(
b"GET /oauth/callback?error=access_denied&error_description=User%20denied&state=xyz HTTP/1.1\r\nHost: localhost\r\n\r\n",
)
.await
.expect("write");
let err = server.await.expect("join").expect_err("should fail");
assert!(err.contains("OAuth provider returned error: access_denied"));
assert!(err.contains("User denied"));
}
#[tokio::test]
async fn receive_oauth_code_fails_fast_on_provider_error() {
let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind");
let addr = listener.local_addr().expect("local addr");
let server = tokio::spawn(async move {
receive_oauth_code(listener, "expected-state", "http://localhost:9444").await
});
let mut client = TcpStream::connect(addr).await.expect("connect");
client
.write_all(
b"GET /oauth/callback?error=access_denied&state=expected-state HTTP/1.1\r\nHost: localhost\r\n\r\n",
)
.await
.expect("write");
let result = tokio::time::timeout(std::time::Duration::from_secs(2), server)
.await
.expect("should not timeout")
.expect("join");
let err = result.expect_err("should return oauth error");
assert!(err.contains("OAuth provider returned error: access_denied"));
}
#[test]
fn builds_oauth_flow_with_pkce() {
let flow = build_oauth_flow(Environment::Development, 8080).expect("flow");
assert!(flow.auth_url.as_str().contains("code_challenge_method=S256"));
assert!(
flow.auth_url
.as_str()
.contains("redirect_uri=http%3A%2F%2F127.0.0.1%3A8080%2Foauth%2Fcallback")
);
assert_eq!(flow.redirect_url, "http://127.0.0.1:8080/oauth/callback");
assert_eq!(flow.token_url, "http://localhost:9444/login/oauth/access_token");
}
}

View File

@@ -51,8 +51,8 @@ fn show(ctx: &CliContext, environment_id: &str) -> CommandResult {
.db()
.get_environment(environment_id)
.map_err(|e| format!("Failed to get environment: {e}"))?;
let output =
serde_json::to_string_pretty(&environment).map_err(|e| format!("Failed to serialize environment: {e}"))?;
let output = serde_json::to_string_pretty(&environment)
.map_err(|e| format!("Failed to serialize environment: {e}"))?;
println!("{output}");
Ok(())
}
@@ -81,9 +81,8 @@ fn create(
}
validate_create_id(&payload, "environment")?;
let mut environment: Environment =
serde_json::from_value(payload)
.map_err(|e| format!("Failed to parse environment create JSON: {e}"))?;
let mut environment: Environment = serde_json::from_value(payload)
.map_err(|e| format!("Failed to parse environment create JSON: {e}"))?;
if environment.workspace_id.is_empty() {
return Err("environment create JSON requires non-empty \"workspaceId\"".to_string());
@@ -105,8 +104,9 @@ fn create(
let workspace_id = workspace_id.ok_or_else(|| {
"environment create requires workspace_id unless JSON payload is provided".to_string()
})?;
let name = name
.ok_or_else(|| "environment create requires --name unless JSON payload is provided".to_string())?;
let name = name.ok_or_else(|| {
"environment create requires --name unless JSON payload is provided".to_string()
})?;
let environment = Environment {
workspace_id,

View File

@@ -31,7 +31,8 @@ pub fn run(ctx: &CliContext, args: FolderArgs) -> i32 {
}
fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
let folders = ctx.db().list_folders(workspace_id).map_err(|e| format!("Failed to list folders: {e}"))?;
let folders =
ctx.db().list_folders(workspace_id).map_err(|e| format!("Failed to list folders: {e}"))?;
if folders.is_empty() {
println!("No folders found in workspace {}", workspace_id);
} else {
@@ -43,9 +44,10 @@ fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
}
fn show(ctx: &CliContext, folder_id: &str) -> CommandResult {
let folder = ctx.db().get_folder(folder_id).map_err(|e| format!("Failed to get folder: {e}"))?;
let output =
serde_json::to_string_pretty(&folder).map_err(|e| format!("Failed to serialize folder: {e}"))?;
let folder =
ctx.db().get_folder(folder_id).map_err(|e| format!("Failed to get folder: {e}"))?;
let output = serde_json::to_string_pretty(&folder)
.map_err(|e| format!("Failed to serialize folder: {e}"))?;
println!("{output}");
Ok(())
}
@@ -72,8 +74,8 @@ fn create(
}
validate_create_id(&payload, "folder")?;
let folder: Folder =
serde_json::from_value(payload).map_err(|e| format!("Failed to parse folder create JSON: {e}"))?;
let folder: Folder = serde_json::from_value(payload)
.map_err(|e| format!("Failed to parse folder create JSON: {e}"))?;
if folder.workspace_id.is_empty() {
return Err("folder create JSON requires non-empty \"workspaceId\"".to_string());
@@ -88,10 +90,12 @@ fn create(
return Ok(());
}
let workspace_id = workspace_id
.ok_or_else(|| "folder create requires workspace_id unless JSON payload is provided".to_string())?;
let name =
name.ok_or_else(|| "folder create requires --name unless JSON payload is provided".to_string())?;
let workspace_id = workspace_id.ok_or_else(|| {
"folder create requires workspace_id unless JSON payload is provided".to_string()
})?;
let name = name.ok_or_else(|| {
"folder create requires --name unless JSON payload is provided".to_string()
})?;
let folder = Folder { workspace_id, name, ..Default::default() };
@@ -108,10 +112,8 @@ fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) ->
let patch = parse_required_json(json, json_input, "folder update")?;
let id = require_id(&patch, "folder update")?;
let existing = ctx
.db()
.get_folder(&id)
.map_err(|e| format!("Failed to get folder for update: {e}"))?;
let existing =
ctx.db().get_folder(&id).map_err(|e| format!("Failed to get folder for update: {e}"))?;
let updated = apply_merge_patch(&existing, &patch, &id, "folder update")?;
let saved = ctx

View File

@@ -1,5 +1,7 @@
pub mod auth;
pub mod environment;
pub mod folder;
pub mod plugin;
pub mod request;
pub mod send;
pub mod workspace;

View File

@@ -0,0 +1,553 @@
use crate::cli::{GenerateArgs, PluginArgs, PluginCommands, PluginPathArg};
use crate::ui;
use keyring::Entry;
use rand::Rng;
use rolldown::{
Bundler, BundlerOptions, ExperimentalOptions, InputItem, LogLevel, OutputFormat, Platform,
WatchOption, Watcher,
};
use serde::Deserialize;
use serde_json::Value;
use std::collections::HashSet;
use std::fs;
use std::io::{self, IsTerminal, Read, Write};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use tokio::sync::Mutex;
use walkdir::WalkDir;
use zip::CompressionMethod;
use zip::write::SimpleFileOptions;
type CommandResult<T = ()> = std::result::Result<T, String>;
const KEYRING_USER: &str = "yaak";
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
enum Environment {
Production,
Staging,
Development,
}
impl Environment {
fn api_base_url(self) -> &'static str {
match self {
Environment::Production => "https://api.yaak.app",
Environment::Staging => "https://todo.yaak.app",
Environment::Development => "http://localhost:9444",
}
}
fn keyring_service(self) -> &'static str {
match self {
Environment::Production => "app.yaak.cli.Token",
Environment::Staging => "app.yaak.cli.staging.Token",
Environment::Development => "app.yaak.cli.dev.Token",
}
}
}
pub async fn run_build(args: PluginPathArg) -> i32 {
match build(args).await {
Ok(()) => 0,
Err(error) => {
ui::error(&error);
1
}
}
}
pub async fn run(args: PluginArgs) -> i32 {
match args.command {
PluginCommands::Build(args) => run_build(args).await,
PluginCommands::Dev(args) => run_dev(args).await,
PluginCommands::Generate(args) => run_generate(args).await,
PluginCommands::Publish(args) => run_publish(args).await,
}
}
pub async fn run_dev(args: PluginPathArg) -> i32 {
match dev(args).await {
Ok(()) => 0,
Err(error) => {
ui::error(&error);
1
}
}
}
pub async fn run_generate(args: GenerateArgs) -> i32 {
match generate(args) {
Ok(()) => 0,
Err(error) => {
ui::error(&error);
1
}
}
}
pub async fn run_publish(args: PluginPathArg) -> i32 {
match publish(args).await {
Ok(()) => 0,
Err(error) => {
ui::error(&error);
1
}
}
}
async fn build(args: PluginPathArg) -> CommandResult {
let plugin_dir = resolve_plugin_dir(args.path)?;
ensure_plugin_build_inputs(&plugin_dir)?;
ui::info(&format!("Building plugin {}...", plugin_dir.display()));
let warnings = build_plugin_bundle(&plugin_dir).await?;
for warning in warnings {
ui::warning(&warning);
}
ui::success(&format!("Built plugin bundle at {}", plugin_dir.join("build/index.js").display()));
Ok(())
}
async fn dev(args: PluginPathArg) -> CommandResult {
let plugin_dir = resolve_plugin_dir(args.path)?;
ensure_plugin_build_inputs(&plugin_dir)?;
ui::info(&format!("Watching plugin {}...", plugin_dir.display()));
ui::info("Press Ctrl-C to stop");
let bundler = Bundler::new(bundler_options(&plugin_dir, true))
.map_err(|err| format!("Failed to initialize Rolldown watcher: {err}"))?;
let watcher = Watcher::new(vec![Arc::new(Mutex::new(bundler))], None)
.map_err(|err| format!("Failed to start Rolldown watcher: {err}"))?;
watcher.start().await;
Ok(())
}
fn generate(args: GenerateArgs) -> CommandResult {
let default_name = random_name();
let name = match args.name {
Some(name) => name,
None => prompt_with_default("Plugin name", &default_name)?,
};
let default_dir = format!("./{name}");
let output_dir = match args.dir {
Some(dir) => dir,
None => PathBuf::from(prompt_with_default("Plugin dir", &default_dir)?),
};
if output_dir.exists() {
return Err(format!("Plugin directory already exists: {}", output_dir.display()));
}
ui::info(&format!("Generating plugin in {}", output_dir.display()));
fs::create_dir_all(output_dir.join("src"))
.map_err(|e| format!("Failed creating plugin directory {}: {e}", output_dir.display()))?;
write_file(&output_dir.join(".gitignore"), TEMPLATE_GITIGNORE)?;
write_file(
&output_dir.join("package.json"),
&TEMPLATE_PACKAGE_JSON.replace("yaak-plugin-name", &name),
)?;
write_file(&output_dir.join("tsconfig.json"), TEMPLATE_TSCONFIG)?;
write_file(&output_dir.join("README.md"), &TEMPLATE_README.replace("yaak-plugin-name", &name))?;
write_file(
&output_dir.join("src/index.ts"),
&TEMPLATE_INDEX_TS.replace("yaak-plugin-name", &name),
)?;
write_file(&output_dir.join("src/index.test.ts"), TEMPLATE_INDEX_TEST_TS)?;
ui::success("Plugin scaffold generated");
ui::info("Next steps:");
println!(" 1. cd {}", output_dir.display());
println!(" 2. npm install");
println!(" 3. yaak plugin build");
Ok(())
}
async fn publish(args: PluginPathArg) -> CommandResult {
let plugin_dir = resolve_plugin_dir(args.path)?;
ensure_plugin_build_inputs(&plugin_dir)?;
let environment = current_environment();
let token = get_auth_token(environment)?
.ok_or_else(|| "Not logged in. Run `yaak auth login`.".to_string())?;
ui::info(&format!("Building plugin {}...", plugin_dir.display()));
let warnings = build_plugin_bundle(&plugin_dir).await?;
for warning in warnings {
ui::warning(&warning);
}
ui::info("Archiving plugin");
let archive = create_publish_archive(&plugin_dir)?;
ui::info("Uploading plugin");
let url = format!("{}/api/v1/plugins/publish", environment.api_base_url());
let response = reqwest::Client::new()
.post(url)
.header("X-Yaak-Session", token)
.header(reqwest::header::USER_AGENT, user_agent())
.header(reqwest::header::CONTENT_TYPE, "application/zip")
.body(archive)
.send()
.await
.map_err(|e| format!("Failed to upload plugin: {e}"))?;
let status = response.status();
let body =
response.text().await.map_err(|e| format!("Failed reading publish response body: {e}"))?;
if !status.is_success() {
return Err(parse_api_error(status.as_u16(), &body));
}
let published: PublishResponse = serde_json::from_str(&body)
.map_err(|e| format!("Failed parsing publish response JSON: {e}\nResponse: {body}"))?;
ui::success(&format!("Plugin published {}", published.version));
println!(" -> {}", published.url);
Ok(())
}
#[derive(Deserialize)]
struct PublishResponse {
version: String,
url: String,
}
async fn build_plugin_bundle(plugin_dir: &Path) -> CommandResult<Vec<String>> {
prepare_build_output_dir(plugin_dir)?;
let mut bundler = Bundler::new(bundler_options(plugin_dir, false))
.map_err(|err| format!("Failed to initialize Rolldown: {err}"))?;
let output = bundler.write().await.map_err(|err| format!("Plugin build failed:\n{err}"))?;
Ok(output.warnings.into_iter().map(|w| w.to_string()).collect())
}
fn prepare_build_output_dir(plugin_dir: &Path) -> CommandResult {
let build_dir = plugin_dir.join("build");
if build_dir.exists() {
fs::remove_dir_all(&build_dir)
.map_err(|e| format!("Failed to clean build directory {}: {e}", build_dir.display()))?;
}
fs::create_dir_all(&build_dir)
.map_err(|e| format!("Failed to create build directory {}: {e}", build_dir.display()))
}
fn bundler_options(plugin_dir: &Path, watch: bool) -> BundlerOptions {
BundlerOptions {
input: Some(vec![InputItem { import: "./src/index.ts".to_string(), ..Default::default() }]),
cwd: Some(plugin_dir.to_path_buf()),
file: Some("build/index.js".to_string()),
format: Some(OutputFormat::Cjs),
platform: Some(Platform::Node),
log_level: Some(LogLevel::Info),
experimental: watch
.then_some(ExperimentalOptions { incremental_build: Some(true), ..Default::default() }),
watch: watch.then_some(WatchOption::default()),
..Default::default()
}
}
fn resolve_plugin_dir(path: Option<PathBuf>) -> CommandResult<PathBuf> {
let cwd =
std::env::current_dir().map_err(|e| format!("Failed to read current directory: {e}"))?;
let candidate = match path {
Some(path) if path.is_absolute() => path,
Some(path) => cwd.join(path),
None => cwd,
};
if !candidate.exists() {
return Err(format!("Plugin directory does not exist: {}", candidate.display()));
}
if !candidate.is_dir() {
return Err(format!("Plugin path is not a directory: {}", candidate.display()));
}
candidate
.canonicalize()
.map_err(|e| format!("Failed to resolve plugin directory {}: {e}", candidate.display()))
}
fn ensure_plugin_build_inputs(plugin_dir: &Path) -> CommandResult {
let package_json = plugin_dir.join("package.json");
if !package_json.is_file() {
return Err(format!(
"{} does not exist. Ensure that you are in a plugin directory.",
package_json.display()
));
}
let entry = plugin_dir.join("src/index.ts");
if !entry.is_file() {
return Err(format!("Required entrypoint missing: {}", entry.display()));
}
Ok(())
}
fn create_publish_archive(plugin_dir: &Path) -> CommandResult<Vec<u8>> {
let required_files = [
"README.md",
"package.json",
"build/index.js",
"src/index.ts",
];
let optional_files = ["package-lock.json"];
let mut selected = HashSet::new();
for required in required_files {
let required_path = plugin_dir.join(required);
if !required_path.is_file() {
return Err(format!("Missing required file: {required}"));
}
selected.insert(required.to_string());
}
for optional in optional_files {
selected.insert(optional.to_string());
}
let cursor = std::io::Cursor::new(Vec::new());
let mut zip = zip::ZipWriter::new(cursor);
let options = SimpleFileOptions::default().compression_method(CompressionMethod::Deflated);
for entry in WalkDir::new(plugin_dir) {
let entry = entry.map_err(|e| format!("Failed walking plugin directory: {e}"))?;
if !entry.file_type().is_file() {
continue;
}
let path = entry.path();
let rel = path
.strip_prefix(plugin_dir)
.map_err(|e| format!("Failed deriving relative path for {}: {e}", path.display()))?;
let rel = rel.to_string_lossy().replace('\\', "/");
let keep = rel.starts_with("src/") || rel.starts_with("build/") || selected.contains(&rel);
if !keep {
continue;
}
zip.start_file(rel, options).map_err(|e| format!("Failed adding file to archive: {e}"))?;
let mut file = fs::File::open(path)
.map_err(|e| format!("Failed opening file {}: {e}", path.display()))?;
let mut contents = Vec::new();
file.read_to_end(&mut contents)
.map_err(|e| format!("Failed reading file {}: {e}", path.display()))?;
zip.write_all(&contents).map_err(|e| format!("Failed writing archive contents: {e}"))?;
}
let cursor = zip.finish().map_err(|e| format!("Failed finalizing plugin archive: {e}"))?;
Ok(cursor.into_inner())
}
fn write_file(path: &Path, contents: &str) -> CommandResult {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.map_err(|e| format!("Failed creating directory {}: {e}", parent.display()))?;
}
fs::write(path, contents).map_err(|e| format!("Failed writing file {}: {e}", path.display()))
}
fn prompt_with_default(label: &str, default: &str) -> CommandResult<String> {
if !io::stdin().is_terminal() {
return Ok(default.to_string());
}
print!("{label} [{default}]: ");
io::stdout().flush().map_err(|e| format!("Failed to flush stdout: {e}"))?;
let mut input = String::new();
io::stdin().read_line(&mut input).map_err(|e| format!("Failed to read input: {e}"))?;
let trimmed = input.trim();
if trimmed.is_empty() { Ok(default.to_string()) } else { Ok(trimmed.to_string()) }
}
fn current_environment() -> Environment {
match std::env::var("ENVIRONMENT").as_deref() {
Ok("staging") => Environment::Staging,
Ok("development") => Environment::Development,
_ => Environment::Production,
}
}
fn keyring_entry(environment: Environment) -> CommandResult<Entry> {
Entry::new(environment.keyring_service(), KEYRING_USER)
.map_err(|e| format!("Failed to initialize auth keyring entry: {e}"))
}
fn get_auth_token(environment: Environment) -> CommandResult<Option<String>> {
let entry = keyring_entry(environment)?;
match entry.get_password() {
Ok(token) => Ok(Some(token)),
Err(keyring::Error::NoEntry) => Ok(None),
Err(err) => Err(format!("Failed to read auth token: {err}")),
}
}
fn parse_api_error(status: u16, body: &str) -> String {
if let Ok(value) = serde_json::from_str::<Value>(body) {
if let Some(message) = value.get("message").and_then(Value::as_str) {
return message.to_string();
}
if let Some(error) = value.get("error").and_then(Value::as_str) {
return error.to_string();
}
}
format!("API error {status}: {body}")
}
fn user_agent() -> String {
format!("YaakCli/{} ({})", env!("CARGO_PKG_VERSION"), ua_platform())
}
fn ua_platform() -> &'static str {
match std::env::consts::OS {
"windows" => "Win",
"darwin" => "Mac",
"linux" => "Linux",
_ => "Unknown",
}
}
fn random_name() -> String {
const ADJECTIVES: &[&str] = &[
"young", "youthful", "yellow", "yielding", "yappy", "yawning", "yummy", "yucky", "yearly",
"yester", "yeasty", "yelling",
];
const NOUNS: &[&str] = &[
"yak", "yarn", "year", "yell", "yoke", "yoga", "yam", "yacht", "yodel",
];
let mut rng = rand::thread_rng();
let adjective = ADJECTIVES[rng.gen_range(0..ADJECTIVES.len())];
let noun = NOUNS[rng.gen_range(0..NOUNS.len())];
format!("{adjective}-{noun}")
}
const TEMPLATE_GITIGNORE: &str = "node_modules\n";
const TEMPLATE_PACKAGE_JSON: &str = r#"{
"name": "yaak-plugin-name",
"private": true,
"version": "0.0.1",
"scripts": {
"build": "yaak plugin build",
"dev": "yaak plugin dev"
},
"devDependencies": {
"@types/node": "^24.10.1",
"typescript": "^5.9.3",
"vitest": "^4.0.14"
},
"dependencies": {
"@yaakapp/api": "^0.7.0"
}
}
"#;
const TEMPLATE_TSCONFIG: &str = r#"{
"compilerOptions": {
"target": "es2021",
"lib": ["DOM", "DOM.Iterable", "ESNext"],
"useDefineForClassFields": true,
"allowJs": false,
"skipLibCheck": true,
"esModuleInterop": false,
"allowSyntheticDefaultImports": true,
"strict": true,
"noUncheckedIndexedAccess": true,
"forceConsistentCasingInFileNames": true,
"module": "ESNext",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx"
},
"include": ["src"]
}
"#;
const TEMPLATE_README: &str = r#"# yaak-plugin-name
Describe what your plugin does.
"#;
const TEMPLATE_INDEX_TS: &str = r#"import type { PluginDefinition } from "@yaakapp/api";
export const plugin: PluginDefinition = {
httpRequestActions: [
{
label: "Hello, From Plugin",
icon: "info",
async onSelect(ctx, args) {
await ctx.toast.show({
color: "success",
message: `You clicked the request ${args.httpRequest.id}`,
});
},
},
],
};
"#;
const TEMPLATE_INDEX_TEST_TS: &str = r#"import { describe, expect, test } from "vitest";
import { plugin } from "./index";
describe("Example Plugin", () => {
test("Exports plugin object", () => {
expect(plugin).toBeTypeOf("object");
});
});
"#;
#[cfg(test)]
mod tests {
use super::create_publish_archive;
use std::collections::HashSet;
use std::fs;
use std::io::Cursor;
use tempfile::TempDir;
use zip::ZipArchive;
#[test]
fn publish_archive_includes_required_and_optional_files() {
let dir = TempDir::new().expect("temp dir");
let root = dir.path();
fs::create_dir_all(root.join("src")).expect("create src");
fs::create_dir_all(root.join("build")).expect("create build");
fs::create_dir_all(root.join("ignored")).expect("create ignored");
fs::write(root.join("README.md"), "# Demo\n").expect("write README");
fs::write(root.join("package.json"), "{}").expect("write package.json");
fs::write(root.join("package-lock.json"), "{}").expect("write package-lock.json");
fs::write(root.join("src/index.ts"), "export const plugin = {};\n")
.expect("write src/index.ts");
fs::write(root.join("build/index.js"), "exports.plugin = {};\n")
.expect("write build/index.js");
fs::write(root.join("ignored/secret.txt"), "do-not-ship").expect("write ignored file");
let archive = create_publish_archive(root).expect("create archive");
let mut zip = ZipArchive::new(Cursor::new(archive)).expect("open zip");
let mut names = HashSet::new();
for i in 0..zip.len() {
let file = zip.by_index(i).expect("zip entry");
names.insert(file.name().to_string());
}
assert!(names.contains("README.md"));
assert!(names.contains("package.json"));
assert!(names.contains("package-lock.json"));
assert!(names.contains("src/index.ts"));
assert!(names.contains("build/index.js"));
assert!(!names.contains("ignored/secret.txt"));
}
}

View File

@@ -1,15 +1,19 @@
use crate::cli::{RequestArgs, RequestCommands};
use crate::cli::{RequestArgs, RequestCommands, RequestSchemaType};
use crate::context::CliContext;
use crate::utils::confirm::confirm_delete;
use crate::utils::json::{
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
validate_create_id,
};
use schemars::schema_for;
use serde_json::{Map, Value, json};
use std::collections::HashMap;
use tokio::sync::mpsc;
use yaak::send::{SendHttpRequestByIdWithPluginsParams, send_http_request_by_id_with_plugins};
use yaak_models::models::HttpRequest;
use yaak_models::models::{GrpcRequest, HttpRequest, WebsocketRequest};
use yaak_models::queries::any_request::AnyRequest;
use yaak_models::util::UpdateSource;
use yaak_plugins::events::PluginContext;
use yaak_plugins::events::{FormInput, FormInputBase, JsonPrimitive, PluginContext};
type CommandResult<T = ()> = std::result::Result<T, String>;
@@ -31,6 +35,15 @@ pub async fn run(
}
};
}
RequestCommands::Schema { request_type } => {
return match schema(ctx, request_type).await {
Ok(()) => 0,
Err(error) => {
eprintln!("Error: {error}");
1
}
};
}
RequestCommands::Create { workspace_id, name, method, url, json } => {
create(ctx, workspace_id, name, method, url, json)
}
@@ -62,6 +75,221 @@ fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
Ok(())
}
async fn schema(ctx: &CliContext, request_type: RequestSchemaType) -> CommandResult {
let mut schema = match request_type {
RequestSchemaType::Http => serde_json::to_value(schema_for!(HttpRequest))
.map_err(|e| format!("Failed to serialize HTTP request schema: {e}"))?,
RequestSchemaType::Grpc => serde_json::to_value(schema_for!(GrpcRequest))
.map_err(|e| format!("Failed to serialize gRPC request schema: {e}"))?,
RequestSchemaType::Websocket => serde_json::to_value(schema_for!(WebsocketRequest))
.map_err(|e| format!("Failed to serialize WebSocket request schema: {e}"))?,
};
if let Err(error) = merge_auth_schema_from_plugins(ctx, &mut schema).await {
eprintln!("Warning: Failed to enrich authentication schema from plugins: {error}");
}
let output = serde_json::to_string_pretty(&schema)
.map_err(|e| format!("Failed to format schema JSON: {e}"))?;
println!("{output}");
Ok(())
}
async fn merge_auth_schema_from_plugins(
ctx: &CliContext,
schema: &mut Value,
) -> Result<(), String> {
let plugin_context = PluginContext::new_empty();
let plugin_manager = ctx.plugin_manager();
let summaries = plugin_manager
.get_http_authentication_summaries(&plugin_context)
.await
.map_err(|e| e.to_string())?;
let mut auth_variants = Vec::new();
for (_, summary) in summaries {
let config = match plugin_manager
.get_http_authentication_config(
&plugin_context,
&summary.name,
HashMap::<String, JsonPrimitive>::new(),
"yaakcli_request_schema",
)
.await
{
Ok(config) => config,
Err(error) => {
eprintln!(
"Warning: Failed to load auth config for strategy '{}': {}",
summary.name, error
);
continue;
}
};
auth_variants.push(auth_variant_schema(&summary.name, &summary.label, &config.args));
}
let Some(properties) = schema.get_mut("properties").and_then(Value::as_object_mut) else {
return Ok(());
};
let Some(auth_schema) = properties.get_mut("authentication") else {
return Ok(());
};
if !auth_variants.is_empty() {
let mut one_of = vec![auth_schema.clone()];
one_of.extend(auth_variants);
*auth_schema = json!({ "oneOf": one_of });
}
Ok(())
}
fn auth_variant_schema(auth_name: &str, auth_label: &str, args: &[FormInput]) -> Value {
let mut properties = Map::new();
let mut required = Vec::new();
for input in args {
add_input_schema(input, &mut properties, &mut required);
}
let mut schema = json!({
"title": auth_label,
"description": format!("Authentication values for strategy '{}'", auth_name),
"type": "object",
"properties": properties,
"additionalProperties": true
});
if !required.is_empty() {
schema["required"] = json!(required);
}
schema
}
fn add_input_schema(
input: &FormInput,
properties: &mut Map<String, Value>,
required: &mut Vec<String>,
) {
match input {
FormInput::Text(v) => add_base_schema(
&v.base,
json!({
"type": "string",
"writeOnly": v.password.unwrap_or(false),
}),
properties,
required,
),
FormInput::Editor(v) => add_base_schema(
&v.base,
json!({
"type": "string",
"x-editorLanguage": v.language.clone(),
}),
properties,
required,
),
FormInput::Select(v) => {
let options: Vec<Value> =
v.options.iter().map(|o| Value::String(o.value.clone())).collect();
add_base_schema(
&v.base,
json!({
"type": "string",
"enum": options,
}),
properties,
required,
);
}
FormInput::Checkbox(v) => {
add_base_schema(&v.base, json!({ "type": "boolean" }), properties, required);
}
FormInput::File(v) => {
if v.multiple.unwrap_or(false) {
add_base_schema(
&v.base,
json!({
"type": "array",
"items": { "type": "string" },
}),
properties,
required,
);
} else {
add_base_schema(&v.base, json!({ "type": "string" }), properties, required);
}
}
FormInput::HttpRequest(v) => {
add_base_schema(&v.base, json!({ "type": "string" }), properties, required);
}
FormInput::KeyValue(v) => {
add_base_schema(
&v.base,
json!({
"type": "object",
"additionalProperties": true,
}),
properties,
required,
);
}
FormInput::Accordion(v) => {
if let Some(children) = &v.inputs {
for child in children {
add_input_schema(child, properties, required);
}
}
}
FormInput::HStack(v) => {
if let Some(children) = &v.inputs {
for child in children {
add_input_schema(child, properties, required);
}
}
}
FormInput::Banner(v) => {
if let Some(children) = &v.inputs {
for child in children {
add_input_schema(child, properties, required);
}
}
}
FormInput::Markdown(_) => {}
}
}
fn add_base_schema(
base: &FormInputBase,
mut schema: Value,
properties: &mut Map<String, Value>,
required: &mut Vec<String>,
) {
if base.hidden.unwrap_or(false) || base.name.trim().is_empty() {
return;
}
if let Some(description) = &base.description {
schema["description"] = Value::String(description.clone());
}
if let Some(label) = &base.label {
schema["title"] = Value::String(label.clone());
}
if let Some(default_value) = &base.default_value {
schema["default"] = Value::String(default_value.clone());
}
let name = base.name.clone();
properties.insert(name.clone(), schema);
if !base.optional.unwrap_or(false) {
required.push(name);
}
}
fn create(
ctx: &CliContext,
workspace_id: Option<String>,
@@ -146,12 +374,10 @@ fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) ->
}
fn show(ctx: &CliContext, request_id: &str) -> CommandResult {
let request = ctx
.db()
.get_http_request(request_id)
.map_err(|e| format!("Failed to get request: {e}"))?;
let output =
serde_json::to_string_pretty(&request).map_err(|e| format!("Failed to serialize request: {e}"))?;
let request =
ctx.db().get_http_request(request_id).map_err(|e| format!("Failed to get request: {e}"))?;
let output = serde_json::to_string_pretty(&request)
.map_err(|e| format!("Failed to serialize request: {e}"))?;
println!("{output}");
Ok(())
}
@@ -178,9 +404,35 @@ pub async fn send_request_by_id(
verbose: bool,
) -> Result<(), String> {
let request =
ctx.db().get_http_request(request_id).map_err(|e| format!("Failed to get request: {e}"))?;
ctx.db().get_any_request(request_id).map_err(|e| format!("Failed to get request: {e}"))?;
match request {
AnyRequest::HttpRequest(http_request) => {
send_http_request_by_id(
ctx,
&http_request.id,
&http_request.workspace_id,
environment,
verbose,
)
.await
}
AnyRequest::GrpcRequest(_) => {
Err("gRPC request send is not implemented yet in yaak-cli".to_string())
}
AnyRequest::WebsocketRequest(_) => {
Err("WebSocket request send is not implemented yet in yaak-cli".to_string())
}
}
}
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
async fn send_http_request_by_id(
ctx: &CliContext,
request_id: &str,
workspace_id: &str,
environment: Option<&str>,
verbose: bool,
) -> Result<(), String> {
let plugin_context = PluginContext::new(None, Some(workspace_id.to_string()));
let (event_tx, mut event_rx) = mpsc::channel(100);
let event_handle = tokio::spawn(async move {

View File

@@ -1,6 +1,12 @@
use crate::cli::SendArgs;
use crate::commands::request;
use crate::context::CliContext;
use futures::future::join_all;
enum ExecutionMode {
Sequential,
Parallel,
}
pub async fn run(
ctx: &CliContext,
@@ -8,7 +14,7 @@ pub async fn run(
environment: Option<&str>,
verbose: bool,
) -> i32 {
match request::send_request_by_id(ctx, &args.request_id, environment, verbose).await {
match send_target(ctx, args, environment, verbose).await {
Ok(()) => 0,
Err(error) => {
eprintln!("Error: {error}");
@@ -16,3 +22,163 @@ pub async fn run(
}
}
}
async fn send_target(
ctx: &CliContext,
args: SendArgs,
environment: Option<&str>,
verbose: bool,
) -> Result<(), String> {
let mode = if args.parallel { ExecutionMode::Parallel } else { ExecutionMode::Sequential };
if ctx.db().get_any_request(&args.id).is_ok() {
return request::send_request_by_id(ctx, &args.id, environment, verbose).await;
}
if ctx.db().get_folder(&args.id).is_ok() {
let request_ids = collect_folder_request_ids(ctx, &args.id)?;
if request_ids.is_empty() {
println!("No requests found in folder {}", args.id);
return Ok(());
}
return send_many(ctx, request_ids, mode, args.fail_fast, environment, verbose).await;
}
if ctx.db().get_workspace(&args.id).is_ok() {
let request_ids = collect_workspace_request_ids(ctx, &args.id)?;
if request_ids.is_empty() {
println!("No requests found in workspace {}", args.id);
return Ok(());
}
return send_many(ctx, request_ids, mode, args.fail_fast, environment, verbose).await;
}
Err(format!("Could not resolve ID '{}' as request, folder, or workspace", args.id))
}
fn collect_folder_request_ids(ctx: &CliContext, folder_id: &str) -> Result<Vec<String>, String> {
let mut ids = Vec::new();
let mut http_ids = ctx
.db()
.list_http_requests_for_folder_recursive(folder_id)
.map_err(|e| format!("Failed to list HTTP requests in folder: {e}"))?
.into_iter()
.map(|r| r.id)
.collect::<Vec<_>>();
ids.append(&mut http_ids);
let mut grpc_ids = ctx
.db()
.list_grpc_requests_for_folder_recursive(folder_id)
.map_err(|e| format!("Failed to list gRPC requests in folder: {e}"))?
.into_iter()
.map(|r| r.id)
.collect::<Vec<_>>();
ids.append(&mut grpc_ids);
let mut websocket_ids = ctx
.db()
.list_websocket_requests_for_folder_recursive(folder_id)
.map_err(|e| format!("Failed to list WebSocket requests in folder: {e}"))?
.into_iter()
.map(|r| r.id)
.collect::<Vec<_>>();
ids.append(&mut websocket_ids);
Ok(ids)
}
fn collect_workspace_request_ids(
ctx: &CliContext,
workspace_id: &str,
) -> Result<Vec<String>, String> {
let mut ids = Vec::new();
let mut http_ids = ctx
.db()
.list_http_requests(workspace_id)
.map_err(|e| format!("Failed to list HTTP requests in workspace: {e}"))?
.into_iter()
.map(|r| r.id)
.collect::<Vec<_>>();
ids.append(&mut http_ids);
let mut grpc_ids = ctx
.db()
.list_grpc_requests(workspace_id)
.map_err(|e| format!("Failed to list gRPC requests in workspace: {e}"))?
.into_iter()
.map(|r| r.id)
.collect::<Vec<_>>();
ids.append(&mut grpc_ids);
let mut websocket_ids = ctx
.db()
.list_websocket_requests(workspace_id)
.map_err(|e| format!("Failed to list WebSocket requests in workspace: {e}"))?
.into_iter()
.map(|r| r.id)
.collect::<Vec<_>>();
ids.append(&mut websocket_ids);
Ok(ids)
}
async fn send_many(
ctx: &CliContext,
request_ids: Vec<String>,
mode: ExecutionMode,
fail_fast: bool,
environment: Option<&str>,
verbose: bool,
) -> Result<(), String> {
let mut success_count = 0usize;
let mut failures: Vec<(String, String)> = Vec::new();
match mode {
ExecutionMode::Sequential => {
for request_id in request_ids {
match request::send_request_by_id(ctx, &request_id, environment, verbose).await {
Ok(()) => success_count += 1,
Err(error) => {
failures.push((request_id, error));
if fail_fast {
break;
}
}
}
}
}
ExecutionMode::Parallel => {
let tasks = request_ids
.iter()
.map(|request_id| async move {
(
request_id.clone(),
request::send_request_by_id(ctx, request_id, environment, verbose).await,
)
})
.collect::<Vec<_>>();
for (request_id, result) in join_all(tasks).await {
match result {
Ok(()) => success_count += 1,
Err(error) => failures.push((request_id, error)),
}
}
}
}
let failure_count = failures.len();
println!("Send summary: {success_count} succeeded, {failure_count} failed");
if failure_count == 0 {
return Ok(());
}
for (request_id, error) in failures {
eprintln!(" {}: {}", request_id, error);
}
Err("One or more requests failed".to_string())
}

View File

@@ -28,7 +28,8 @@ pub fn run(ctx: &CliContext, args: WorkspaceArgs) -> i32 {
}
fn list(ctx: &CliContext) -> CommandResult {
let workspaces = ctx.db().list_workspaces().map_err(|e| format!("Failed to list workspaces: {e}"))?;
let workspaces =
ctx.db().list_workspaces().map_err(|e| format!("Failed to list workspaces: {e}"))?;
if workspaces.is_empty() {
println!("No workspaces found");
} else {
@@ -75,8 +76,9 @@ fn create(
return Ok(());
}
let name =
name.ok_or_else(|| "workspace create requires --name unless JSON payload is provided".to_string())?;
let name = name.ok_or_else(|| {
"workspace create requires --name unless JSON payload is provided".to_string()
})?;
let workspace = Workspace { name, ..Default::default() };
let created = ctx

View File

@@ -1,18 +1,31 @@
use crate::plugin_events::CliPluginEventBridge;
use include_dir::{Dir, include_dir};
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use tokio::sync::Mutex;
use yaak_crypto::manager::EncryptionManager;
use yaak_models::blob_manager::BlobManager;
use yaak_models::db_context::DbContext;
use yaak_models::query_manager::QueryManager;
use yaak_plugins::bootstrap;
use yaak_plugins::events::PluginContext;
use yaak_plugins::manager::PluginManager;
const EMBEDDED_PLUGIN_RUNTIME: &str = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs"
));
static EMBEDDED_VENDORED_PLUGINS: Dir<'_> =
include_dir!("$CARGO_MANIFEST_DIR/../../crates-tauri/yaak-app/vendored/plugins");
pub struct CliContext {
data_dir: PathBuf,
query_manager: QueryManager,
blob_manager: BlobManager,
pub encryption_manager: Arc<EncryptionManager>,
plugin_manager: Option<Arc<PluginManager>>,
plugin_event_bridge: Mutex<Option<CliPluginEventBridge>>,
}
impl CliContext {
@@ -30,42 +43,50 @@ impl CliContext {
let installed_plugin_dir = data_dir.join("installed-plugins");
let node_bin_path = PathBuf::from("node");
prepare_embedded_vendored_plugins(&vendored_plugin_dir)
.expect("Failed to prepare bundled plugins");
let plugin_runtime_main =
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
prepare_embedded_plugin_runtime(&data_dir)
.expect("Failed to prepare embedded plugin runtime")
});
let plugin_manager = Arc::new(
PluginManager::new(
vendored_plugin_dir,
installed_plugin_dir,
node_bin_path,
plugin_runtime_main,
false,
)
.await,
);
let plugins = query_manager.connect().list_plugins().unwrap_or_default();
if !plugins.is_empty() {
let errors = plugin_manager
.initialize_all_plugins(plugins, &PluginContext::new_empty())
.await;
for (plugin_dir, error_msg) in errors {
eprintln!(
"Warning: Failed to initialize plugin '{}': {}",
plugin_dir, error_msg
);
match bootstrap::create_and_initialize_manager(
vendored_plugin_dir,
installed_plugin_dir,
node_bin_path,
plugin_runtime_main,
&query_manager,
&PluginContext::new_empty(),
false,
)
.await
{
Ok(plugin_manager) => Some(plugin_manager),
Err(err) => {
eprintln!("Warning: Failed to initialize plugins: {err}");
None
}
}
Some(plugin_manager)
} else {
None
};
Self { data_dir, query_manager, blob_manager, encryption_manager, plugin_manager }
let plugin_event_bridge = if let Some(plugin_manager) = &plugin_manager {
Some(CliPluginEventBridge::start(plugin_manager.clone(), query_manager.clone()).await)
} else {
None
};
Self {
data_dir,
query_manager,
blob_manager,
encryption_manager,
plugin_manager,
plugin_event_bridge: Mutex::new(plugin_event_bridge),
}
}
pub fn data_dir(&self) -> &Path {
@@ -90,7 +111,24 @@ impl CliContext {
pub async fn shutdown(&self) {
if let Some(plugin_manager) = &self.plugin_manager {
if let Some(plugin_event_bridge) = self.plugin_event_bridge.lock().await.take() {
plugin_event_bridge.shutdown(plugin_manager).await;
}
plugin_manager.terminate().await;
}
}
}
fn prepare_embedded_plugin_runtime(data_dir: &Path) -> std::io::Result<PathBuf> {
let runtime_dir = data_dir.join("vendored").join("plugin-runtime");
fs::create_dir_all(&runtime_dir)?;
let runtime_main = runtime_dir.join("index.cjs");
fs::write(&runtime_main, EMBEDDED_PLUGIN_RUNTIME)?;
Ok(runtime_main)
}
fn prepare_embedded_vendored_plugins(vendored_plugin_dir: &Path) -> std::io::Result<()> {
fs::create_dir_all(vendored_plugin_dir)?;
EMBEDDED_VENDORED_PLUGINS.extract(vendored_plugin_dir)?;
Ok(())
}

View File

@@ -1,6 +1,8 @@
mod cli;
mod commands;
mod context;
mod plugin_events;
mod ui;
mod utils;
use clap::Parser;
@@ -21,27 +23,68 @@ async fn main() {
dirs::data_dir().expect("Could not determine data directory").join(app_id)
});
let needs_context = matches!(
&command,
Commands::Send(_)
| Commands::Workspace(_)
| Commands::Request(_)
| Commands::Folder(_)
| Commands::Environment(_)
);
let needs_plugins = matches!(
&command,
Commands::Send(_)
| Commands::Request(cli::RequestArgs { command: RequestCommands::Send { .. } })
| Commands::Request(cli::RequestArgs {
command: RequestCommands::Send { .. } | RequestCommands::Schema { .. },
})
);
let context = CliContext::initialize(data_dir, app_id, needs_plugins).await;
let exit_code = match command {
Commands::Send(args) => {
commands::send::run(&context, args, environment.as_deref(), verbose).await
}
Commands::Workspace(args) => commands::workspace::run(&context, args),
Commands::Request(args) => {
commands::request::run(&context, args, environment.as_deref(), verbose).await
}
Commands::Folder(args) => commands::folder::run(&context, args),
Commands::Environment(args) => commands::environment::run(&context, args),
let context = if needs_context {
Some(CliContext::initialize(data_dir, app_id, needs_plugins).await)
} else {
None
};
context.shutdown().await;
let exit_code = match command {
Commands::Auth(args) => commands::auth::run(args).await,
Commands::Plugin(args) => commands::plugin::run(args).await,
Commands::Build(args) => commands::plugin::run_build(args).await,
Commands::Dev(args) => commands::plugin::run_dev(args).await,
Commands::Send(args) => {
commands::send::run(
context.as_ref().expect("context initialized for send"),
args,
environment.as_deref(),
verbose,
)
.await
}
Commands::Workspace(args) => commands::workspace::run(
context.as_ref().expect("context initialized for workspace"),
args,
),
Commands::Request(args) => {
commands::request::run(
context.as_ref().expect("context initialized for request"),
args,
environment.as_deref(),
verbose,
)
.await
}
Commands::Folder(args) => {
commands::folder::run(context.as_ref().expect("context initialized for folder"), args)
}
Commands::Environment(args) => commands::environment::run(
context.as_ref().expect("context initialized for environment"),
args,
),
};
if let Some(context) = &context {
context.shutdown().await;
}
if exit_code != 0 {
std::process::exit(exit_code);

View File

@@ -0,0 +1,212 @@
use std::sync::Arc;
use tokio::task::JoinHandle;
use yaak::plugin_events::{
GroupedPluginEvent, HostRequest, SharedPluginEventContext, handle_shared_plugin_event,
};
use yaak_models::query_manager::QueryManager;
use yaak_plugins::events::{
EmptyPayload, ErrorResponse, InternalEvent, InternalEventPayload, ListOpenWorkspacesResponse,
WorkspaceInfo,
};
use yaak_plugins::manager::PluginManager;
pub struct CliPluginEventBridge {
rx_id: String,
task: JoinHandle<()>,
}
impl CliPluginEventBridge {
pub async fn start(plugin_manager: Arc<PluginManager>, query_manager: QueryManager) -> Self {
let (rx_id, mut rx) = plugin_manager.subscribe("cli").await;
let rx_id_for_task = rx_id.clone();
let pm = plugin_manager.clone();
let task = tokio::spawn(async move {
while let Some(event) = rx.recv().await {
// Events with reply IDs are replies to app-originated requests.
if event.reply_id.is_some() {
continue;
}
let Some(plugin_handle) = pm.get_plugin_by_ref_id(&event.plugin_ref_id).await
else {
eprintln!(
"Warning: Ignoring plugin event with unknown plugin ref '{}'",
event.plugin_ref_id
);
continue;
};
let plugin_name = plugin_handle.info().name;
let Some(reply_payload) = build_plugin_reply(&query_manager, &event, &plugin_name)
else {
continue;
};
if let Err(err) = pm.reply(&event, &reply_payload).await {
eprintln!("Warning: Failed replying to plugin event: {err}");
}
}
pm.unsubscribe(&rx_id_for_task).await;
});
Self { rx_id, task }
}
pub async fn shutdown(self, plugin_manager: &PluginManager) {
plugin_manager.unsubscribe(&self.rx_id).await;
self.task.abort();
let _ = self.task.await;
}
}
fn build_plugin_reply(
query_manager: &QueryManager,
event: &InternalEvent,
plugin_name: &str,
) -> Option<InternalEventPayload> {
match handle_shared_plugin_event(
query_manager,
&event.payload,
SharedPluginEventContext {
plugin_name,
workspace_id: event.context.workspace_id.as_deref(),
},
) {
GroupedPluginEvent::Handled(payload) => payload,
GroupedPluginEvent::ToHandle(host_request) => match host_request {
HostRequest::ErrorResponse(resp) => {
eprintln!("[plugin:{}] error: {}", plugin_name, resp.error);
None
}
HostRequest::ReloadResponse(_) => None,
HostRequest::ShowToast(req) => {
eprintln!("[plugin:{}] {}", plugin_name, req.message);
Some(InternalEventPayload::ShowToastResponse(EmptyPayload {}))
}
HostRequest::ListOpenWorkspaces(_) => {
let workspaces = match query_manager.connect().list_workspaces() {
Ok(workspaces) => workspaces
.into_iter()
.map(|w| WorkspaceInfo { id: w.id.clone(), name: w.name, label: w.id })
.collect(),
Err(err) => {
return Some(InternalEventPayload::ErrorResponse(ErrorResponse {
error: format!("Failed to list workspaces in CLI: {err}"),
}));
}
};
Some(InternalEventPayload::ListOpenWorkspacesResponse(ListOpenWorkspacesResponse {
workspaces,
}))
}
req => Some(InternalEventPayload::ErrorResponse(ErrorResponse {
error: format!("Unsupported plugin request in CLI: {}", req.type_name()),
})),
},
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
use yaak_plugins::events::{GetKeyValueRequest, PluginContext, WindowInfoRequest};
fn query_manager_for_test() -> (QueryManager, TempDir) {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let db_path = temp_dir.path().join("db.sqlite");
let blob_path = temp_dir.path().join("blobs.sqlite");
let (query_manager, _blob_manager, _rx) =
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize DB");
(query_manager, temp_dir)
}
fn event(payload: InternalEventPayload) -> InternalEvent {
InternalEvent {
id: "evt_1".to_string(),
plugin_ref_id: "plugin_ref_1".to_string(),
plugin_name: "@yaak/test-plugin".to_string(),
reply_id: None,
context: PluginContext::new_empty(),
payload,
}
}
#[test]
fn key_value_requests_round_trip() {
let (query_manager, _temp_dir) = query_manager_for_test();
let plugin_name = "@yaak/test-plugin";
let get_missing = build_plugin_reply(
&query_manager,
&event(InternalEventPayload::GetKeyValueRequest(GetKeyValueRequest {
key: "missing".to_string(),
})),
plugin_name,
);
match get_missing {
Some(InternalEventPayload::GetKeyValueResponse(r)) => assert_eq!(r.value, None),
other => panic!("unexpected payload for missing get: {other:?}"),
}
let set = build_plugin_reply(
&query_manager,
&event(InternalEventPayload::SetKeyValueRequest(
yaak_plugins::events::SetKeyValueRequest {
key: "token".to_string(),
value: "{\"access_token\":\"abc\"}".to_string(),
},
)),
plugin_name,
);
assert!(matches!(set, Some(InternalEventPayload::SetKeyValueResponse(_))));
let get_present = build_plugin_reply(
&query_manager,
&event(InternalEventPayload::GetKeyValueRequest(GetKeyValueRequest {
key: "token".to_string(),
})),
plugin_name,
);
match get_present {
Some(InternalEventPayload::GetKeyValueResponse(r)) => {
assert_eq!(r.value, Some("{\"access_token\":\"abc\"}".to_string()))
}
other => panic!("unexpected payload for present get: {other:?}"),
}
let delete = build_plugin_reply(
&query_manager,
&event(InternalEventPayload::DeleteKeyValueRequest(
yaak_plugins::events::DeleteKeyValueRequest { key: "token".to_string() },
)),
plugin_name,
);
match delete {
Some(InternalEventPayload::DeleteKeyValueResponse(r)) => assert!(r.deleted),
other => panic!("unexpected payload for delete: {other:?}"),
}
}
#[test]
fn unsupported_request_gets_error_reply() {
let (query_manager, _temp_dir) = query_manager_for_test();
let payload = build_plugin_reply(
&query_manager,
&event(InternalEventPayload::WindowInfoRequest(WindowInfoRequest {
label: "main".to_string(),
})),
"@yaak/test-plugin",
);
match payload {
Some(InternalEventPayload::ErrorResponse(err)) => {
assert!(err.error.contains("Unsupported plugin request in CLI"));
assert!(err.error.contains("window_info_request"));
}
other => panic!("unexpected payload for unsupported request: {other:?}"),
}
}
}

View File

@@ -0,0 +1,34 @@
use console::style;
use std::io::{self, IsTerminal};
pub fn info(message: &str) {
if io::stdout().is_terminal() {
println!("{:<8} {}", style("INFO").cyan().bold(), style(message).cyan());
} else {
println!("INFO {message}");
}
}
pub fn warning(message: &str) {
if io::stdout().is_terminal() {
println!("{:<8} {}", style("WARNING").yellow().bold(), style(message).yellow());
} else {
println!("WARNING {message}");
}
}
pub fn success(message: &str) {
if io::stdout().is_terminal() {
println!("{:<8} {}", style("SUCCESS").green().bold(), style(message).green());
} else {
println!("SUCCESS {message}");
}
}
pub fn error(message: &str) {
if io::stderr().is_terminal() {
eprintln!("{:<8} {}", style("ERROR").red().bold(), style(message).red());
} else {
eprintln!("Error: {message}");
}
}

View File

@@ -25,9 +25,9 @@ pub fn parse_optional_json(
context: &str,
) -> JsonResult<Option<Value>> {
match (json_flag, json_shorthand) {
(Some(_), Some(_)) => Err(format!(
"Cannot provide both --json and positional JSON for {context}"
)),
(Some(_), Some(_)) => {
Err(format!("Cannot provide both --json and positional JSON for {context}"))
}
(Some(raw), None) => parse_json_object(&raw, context).map(Some),
(None, Some(raw)) => parse_json_object(&raw, context).map(Some),
(None, None) => Ok(None),
@@ -39,9 +39,8 @@ pub fn parse_required_json(
json_shorthand: Option<String>,
context: &str,
) -> JsonResult<Value> {
parse_optional_json(json_flag, json_shorthand, context)?.ok_or_else(|| {
format!("Missing JSON payload for {context}. Use --json or positional JSON")
})
parse_optional_json(json_flag, json_shorthand, context)?
.ok_or_else(|| format!("Missing JSON payload for {context}. Use --json or positional JSON"))
}
pub fn require_id(payload: &Value, context: &str) -> JsonResult<String> {
@@ -60,9 +59,7 @@ pub fn validate_create_id(payload: &Value, context: &str) -> JsonResult<()> {
match id_value {
Value::String(id) if id.is_empty() => Ok(()),
_ => Err(format!(
"{context} create JSON must omit \"id\" or set it to an empty string"
)),
_ => Err(format!("{context} create JSON must omit \"id\" or set it to an empty string")),
}
}

View File

@@ -5,12 +5,12 @@ pub mod http_server;
use assert_cmd::Command;
use assert_cmd::cargo::cargo_bin_cmd;
use std::path::Path;
use yaak_models::models::{HttpRequest, Workspace};
use yaak_models::models::{Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace};
use yaak_models::query_manager::QueryManager;
use yaak_models::util::UpdateSource;
pub fn cli_cmd(data_dir: &Path) -> Command {
let mut cmd = cargo_bin_cmd!("yaakcli");
let mut cmd = cargo_bin_cmd!("yaak");
cmd.arg("--data-dir").arg(data_dir);
cmd
}
@@ -60,3 +60,47 @@ pub fn seed_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
.upsert_http_request(&request, &UpdateSource::Sync)
.expect("Failed to seed request");
}
pub fn seed_folder(data_dir: &Path, workspace_id: &str, folder_id: &str) {
let folder = Folder {
id: folder_id.to_string(),
workspace_id: workspace_id.to_string(),
name: "Seed Folder".to_string(),
..Default::default()
};
query_manager(data_dir)
.connect()
.upsert_folder(&folder, &UpdateSource::Sync)
.expect("Failed to seed folder");
}
pub fn seed_grpc_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
let request = GrpcRequest {
id: request_id.to_string(),
workspace_id: workspace_id.to_string(),
name: "Seeded gRPC Request".to_string(),
url: "https://example.com".to_string(),
..Default::default()
};
query_manager(data_dir)
.connect()
.upsert_grpc_request(&request, &UpdateSource::Sync)
.expect("Failed to seed gRPC request");
}
pub fn seed_websocket_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
let request = WebsocketRequest {
id: request_id.to_string(),
workspace_id: workspace_id.to_string(),
name: "Seeded WebSocket Request".to_string(),
url: "wss://example.com/socket".to_string(),
..Default::default()
};
query_manager(data_dir)
.connect()
.upsert_websocket_request(&request, &UpdateSource::Sync)
.expect("Failed to seed WebSocket request");
}

View File

@@ -1,7 +1,10 @@
mod common;
use common::http_server::TestHttpServer;
use common::{cli_cmd, parse_created_id, query_manager, seed_request, seed_workspace};
use common::{
cli_cmd, parse_created_id, query_manager, seed_grpc_request, seed_request,
seed_websocket_request, seed_workspace,
};
use predicates::str::contains;
use tempfile::TempDir;
use yaak_models::models::HttpResponseState;
@@ -114,8 +117,7 @@ fn create_allows_workspace_only_with_empty_defaults() {
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert =
cli_cmd(data_dir).args(["request", "create", "wk_test"]).assert().success();
let create_assert = cli_cmd(data_dir).args(["request", "create", "wk_test"]).assert().success();
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
let request = query_manager(data_dir)
@@ -177,3 +179,46 @@ fn request_send_persists_response_body_and_events() {
db.list_http_response_events(&response.id).expect("Failed to load response events");
assert!(!events.is_empty(), "expected at least one persisted response event");
}
#[test]
fn request_schema_http_outputs_json_schema() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
cli_cmd(data_dir)
.args(["request", "schema", "http"])
.assert()
.success()
.stdout(contains("\"type\": \"object\""))
.stdout(contains("\"authentication\""));
}
#[test]
fn request_send_grpc_returns_explicit_nyi_error() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
seed_grpc_request(data_dir, "wk_test", "gr_seed_nyi");
cli_cmd(data_dir)
.args(["request", "send", "gr_seed_nyi"])
.assert()
.failure()
.code(1)
.stderr(contains("gRPC request send is not implemented yet in yaak-cli"));
}
#[test]
fn request_send_websocket_returns_explicit_nyi_error() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
seed_websocket_request(data_dir, "wk_test", "wr_seed_nyi");
cli_cmd(data_dir)
.args(["request", "send", "wr_seed_nyi"])
.assert()
.failure()
.code(1)
.stderr(contains("WebSocket request send is not implemented yet in yaak-cli"));
}

View File

@@ -0,0 +1,81 @@
mod common;
use common::http_server::TestHttpServer;
use common::{cli_cmd, query_manager, seed_folder, seed_workspace};
use predicates::str::contains;
use tempfile::TempDir;
use yaak_models::models::HttpRequest;
use yaak_models::util::UpdateSource;
#[test]
fn top_level_send_workspace_sends_http_requests_and_prints_summary() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let server = TestHttpServer::spawn_ok("workspace bulk send");
let request = HttpRequest {
id: "rq_workspace_send".to_string(),
workspace_id: "wk_test".to_string(),
name: "Workspace Send".to_string(),
method: "GET".to_string(),
url: server.url.clone(),
..Default::default()
};
query_manager(data_dir)
.connect()
.upsert_http_request(&request, &UpdateSource::Sync)
.expect("Failed to seed workspace request");
cli_cmd(data_dir)
.args(["send", "wk_test"])
.assert()
.success()
.stdout(contains("HTTP 200 OK"))
.stdout(contains("workspace bulk send"))
.stdout(contains("Send summary: 1 succeeded, 0 failed"));
}
#[test]
fn top_level_send_folder_sends_http_requests_and_prints_summary() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
seed_folder(data_dir, "wk_test", "fl_test");
let server = TestHttpServer::spawn_ok("folder bulk send");
let request = HttpRequest {
id: "rq_folder_send".to_string(),
workspace_id: "wk_test".to_string(),
folder_id: Some("fl_test".to_string()),
name: "Folder Send".to_string(),
method: "GET".to_string(),
url: server.url.clone(),
..Default::default()
};
query_manager(data_dir)
.connect()
.upsert_http_request(&request, &UpdateSource::Sync)
.expect("Failed to seed folder request");
cli_cmd(data_dir)
.args(["send", "fl_test"])
.assert()
.success()
.stdout(contains("HTTP 200 OK"))
.stdout(contains("folder bulk send"))
.stdout(contains("Send summary: 1 succeeded, 0 failed"));
}
#[test]
fn top_level_send_unknown_id_fails_with_clear_error() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
cli_cmd(data_dir)
.args(["send", "does_not_exist"])
.assert()
.failure()
.code(1)
.stderr(contains("Could not resolve ID 'does_not_exist' as request, folder, or workspace"));
}

View File

@@ -15,18 +15,20 @@ use std::sync::Arc;
use tauri::{AppHandle, Emitter, Listener, Manager, Runtime};
use tauri_plugin_clipboard_manager::ClipboardExt;
use tauri_plugin_opener::OpenerExt;
use yaak::plugin_events::{
GroupedPluginEvent, HostRequest, SharedPluginEventContext, handle_shared_plugin_event,
};
use yaak_crypto::manager::EncryptionManager;
use yaak_models::models::{AnyModel, HttpResponse, Plugin};
use yaak_models::queries::any_request::AnyRequest;
use yaak_models::util::UpdateSource;
use yaak_plugins::error::Error::PluginErr;
use yaak_plugins::events::{
Color, DeleteKeyValueResponse, EmptyPayload, ErrorResponse, FindHttpResponsesResponse,
GetCookieValueResponse, GetHttpRequestByIdResponse, GetKeyValueResponse, Icon, InternalEvent,
InternalEventPayload, ListCookieNamesResponse, ListHttpRequestsResponse,
ListWorkspacesResponse, RenderGrpcRequestResponse, RenderHttpRequestResponse,
SendHttpRequestResponse, SetKeyValueResponse, ShowToastRequest, TemplateRenderResponse,
WindowInfoResponse, WindowNavigateEvent, WorkspaceInfo,
Color, EmptyPayload, ErrorResponse, FindHttpResponsesResponse, GetCookieValueResponse, Icon,
InternalEvent, InternalEventPayload, ListCookieNamesResponse, ListOpenWorkspacesResponse,
RenderGrpcRequestResponse, RenderHttpRequestResponse, SendHttpRequestResponse,
ShowToastRequest, TemplateRenderResponse, WindowInfoResponse, WindowNavigateEvent,
WorkspaceInfo,
};
use yaak_plugins::manager::PluginManager;
use yaak_plugins::plugin_handle::PluginHandle;
@@ -41,30 +43,112 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
) -> Result<Option<InternalEventPayload>> {
// log::debug!("Got event to app {event:?}");
let plugin_context = event.context.to_owned();
match event.clone().payload {
InternalEventPayload::CopyTextRequest(req) => {
let plugin_name = plugin_handle.info().name;
let fallback_workspace_id = plugin_context.workspace_id.clone().or_else(|| {
plugin_context
.label
.as_ref()
.and_then(|label| app_handle.get_webview_window(label))
.and_then(|window| workspace_from_window(&window).map(|workspace| workspace.id))
});
match handle_shared_plugin_event(
app_handle.db_manager().inner(),
&event.payload,
SharedPluginEventContext {
plugin_name: &plugin_name,
workspace_id: fallback_workspace_id.as_deref(),
},
) {
GroupedPluginEvent::Handled(payload) => Ok(payload),
GroupedPluginEvent::ToHandle(host_request) => {
handle_host_plugin_request(
app_handle,
event,
plugin_handle,
&plugin_context,
host_request,
)
.await
}
}
}
async fn handle_host_plugin_request<R: Runtime>(
app_handle: &AppHandle<R>,
event: &InternalEvent,
plugin_handle: &PluginHandle,
plugin_context: &yaak_plugins::events::PluginContext,
host_request: HostRequest<'_>,
) -> Result<Option<InternalEventPayload>> {
match host_request {
HostRequest::ErrorResponse(resp) => {
error!("Plugin error: {}: {:?}", resp.error, resp);
let toast_event = plugin_handle.build_event_to_send(
plugin_context,
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
message: format!(
"Plugin error from {}: {}",
plugin_handle.info().name,
resp.error
),
color: Some(Color::Danger),
timeout: Some(30000),
..Default::default()
}),
None,
);
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
}
HostRequest::ReloadResponse(req) => {
let plugins = app_handle.db().list_plugins()?;
for plugin in plugins {
if plugin.directory != plugin_handle.dir {
continue;
}
let new_plugin = Plugin { updated_at: Utc::now().naive_utc(), ..plugin };
app_handle.db().upsert_plugin(&new_plugin, &UpdateSource::Plugin)?;
}
if !req.silent {
let info = plugin_handle.info();
let toast_event = plugin_handle.build_event_to_send(
plugin_context,
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
message: format!("Reloaded plugin {}@{}", info.name, info.version),
icon: Some(Icon::Info),
timeout: Some(3000),
..Default::default()
}),
None,
);
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
} else {
Ok(None)
}
}
HostRequest::CopyText(req) => {
app_handle.clipboard().write_text(req.text.as_str())?;
Ok(Some(InternalEventPayload::CopyTextResponse(EmptyPayload {})))
}
InternalEventPayload::ShowToastRequest(req) => {
match plugin_context.label {
HostRequest::ShowToast(req) => {
match &plugin_context.label {
Some(label) => app_handle.emit_to(label, "show_toast", req)?,
None => app_handle.emit("show_toast", req)?,
};
Ok(Some(InternalEventPayload::ShowToastResponse(EmptyPayload {})))
}
InternalEventPayload::PromptTextRequest(_) => {
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
HostRequest::PromptText(_) => {
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
Ok(call_frontend(&window, event).await)
}
InternalEventPayload::PromptFormRequest(_) => {
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
HostRequest::PromptForm(_) => {
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
if event.reply_id.is_some() {
// Follow-up update from plugin runtime with resolved inputs — forward to frontend
window.emit_to(window.label(), "plugin_event", event.clone())?;
Ok(None)
} else {
// Initial request — set up bidirectional communication
window.emit_to(window.label(), "plugin_event", event.clone()).unwrap();
let event_id = event.id.clone();
@@ -72,17 +156,14 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
let plugin_context = plugin_context.clone();
let window = window.clone();
// Spawn async task to handle bidirectional form communication
tauri::async_runtime::spawn(async move {
let (tx, mut rx) = tokio::sync::mpsc::channel::<InternalEvent>(128);
// Listen for replies from the frontend
let listener_id = window.listen(event_id, move |ev: tauri::Event| {
let resp: InternalEvent = serde_json::from_str(ev.payload()).unwrap();
let _ = tx.try_send(resp);
});
// Forward each reply to the plugin runtime
while let Some(resp) = rx.recv().await {
let is_done = matches!(
&resp.payload,
@@ -109,7 +190,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
Ok(None)
}
}
InternalEventPayload::FindHttpResponsesRequest(req) => {
HostRequest::FindHttpResponses(req) => {
let http_responses = app_handle
.db()
.list_http_responses_for_request(&req.request_id, req.limit.map(|l| l as u64))
@@ -118,32 +199,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
http_responses,
})))
}
InternalEventPayload::ListHttpRequestsRequest(req) => {
let w = get_window_from_plugin_context(app_handle, &plugin_context)?;
let workspace = workspace_from_window(&w)
.ok_or(PluginErr("Failed to get workspace from window".into()))?;
let http_requests = if let Some(folder_id) = req.folder_id {
app_handle.db().list_http_requests_for_folder_recursive(&folder_id)?
} else {
app_handle.db().list_http_requests(&workspace.id)?
};
Ok(Some(InternalEventPayload::ListHttpRequestsResponse(ListHttpRequestsResponse {
http_requests,
})))
}
InternalEventPayload::ListFoldersRequest(_req) => {
let w = get_window_from_plugin_context(app_handle, &plugin_context)?;
let workspace = workspace_from_window(&w)
.ok_or(PluginErr("Failed to get workspace from window".into()))?;
let folders = app_handle.db().list_folders(&workspace.id)?;
Ok(Some(InternalEventPayload::ListFoldersResponse(
yaak_plugins::events::ListFoldersResponse { folders },
)))
}
InternalEventPayload::UpsertModelRequest(req) => {
HostRequest::UpsertModel(req) => {
use AnyModel::*;
let model = match &req.model {
HttpRequest(m) => {
@@ -171,7 +227,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
yaak_plugins::events::UpsertModelResponse { model },
)))
}
InternalEventPayload::DeleteModelRequest(req) => {
HostRequest::DeleteModel(req) => {
let model = match req.model.as_str() {
"http_request" => AnyModel::HttpRequest(
app_handle.db().delete_http_request_by_id(&req.id, &UpdateSource::Plugin)?,
@@ -199,14 +255,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
yaak_plugins::events::DeleteModelResponse { model },
)))
}
InternalEventPayload::GetHttpRequestByIdRequest(req) => {
let http_request = app_handle.db().get_http_request(&req.id).ok();
Ok(Some(InternalEventPayload::GetHttpRequestByIdResponse(GetHttpRequestByIdResponse {
http_request,
})))
}
InternalEventPayload::RenderGrpcRequestRequest(req) => {
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
HostRequest::RenderGrpcRequest(req) => {
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
let workspace =
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
@@ -221,8 +271,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
plugin_context,
req.purpose.clone(),
);
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let grpc_request =
@@ -231,8 +281,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
grpc_request,
})))
}
InternalEventPayload::RenderHttpRequestRequest(req) => {
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
HostRequest::RenderHttpRequest(req) => {
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
let workspace =
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
@@ -247,18 +297,18 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
plugin_context,
req.purpose.clone(),
);
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let http_request =
render_http_request(&req.http_request, environment_chain, &cb, &opt).await?;
render_http_request(&req.http_request, environment_chain, &cb, opt).await?;
Ok(Some(InternalEventPayload::RenderHttpRequestResponse(RenderHttpRequestResponse {
http_request,
})))
}
InternalEventPayload::TemplateRenderRequest(req) => {
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
HostRequest::TemplateRender(req) => {
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
let workspace =
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
@@ -283,65 +333,16 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
plugin_context,
req.purpose.clone(),
);
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let data = render_json_value(req.data, environment_chain, &cb, &opt).await?;
let data = render_json_value(req.data.clone(), environment_chain, &cb, &opt).await?;
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))
}
InternalEventPayload::ErrorResponse(resp) => {
error!("Plugin error: {}: {:?}", resp.error, resp);
let toast_event = plugin_handle.build_event_to_send(
&plugin_context,
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
message: format!(
"Plugin error from {}: {}",
plugin_handle.info().name,
resp.error
),
color: Some(Color::Danger),
timeout: Some(30000),
..Default::default()
}),
None,
);
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
}
InternalEventPayload::ReloadResponse(req) => {
let plugins = app_handle.db().list_plugins()?;
for plugin in plugins {
if plugin.directory != plugin_handle.dir {
continue;
}
let new_plugin = Plugin {
updated_at: Utc::now().naive_utc(), // TODO: Add reloaded_at field to use instead
..plugin
};
app_handle.db().upsert_plugin(&new_plugin, &UpdateSource::Plugin)?;
}
if !req.silent {
let info = plugin_handle.info();
let toast_event = plugin_handle.build_event_to_send(
&plugin_context,
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
message: format!("Reloaded plugin {}@{}", info.name, info.version),
icon: Some(Icon::Info),
timeout: Some(3000),
..Default::default()
}),
None,
);
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
} else {
Ok(None)
}
}
InternalEventPayload::SendHttpRequestRequest(req) => {
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
let mut http_request = req.http_request;
HostRequest::SendHttpRequest(req) => {
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
let mut http_request = req.http_request.clone();
let workspace =
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
let cookie_jar = cookie_jar_from_window(&window);
@@ -372,8 +373,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
&http_response,
environment,
cookie_jar,
&mut tokio::sync::watch::channel(false).1, // No-op cancel channel
&plugin_context,
&mut tokio::sync::watch::channel(false).1,
plugin_context,
)
.await?;
@@ -381,7 +382,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
http_response,
})))
}
InternalEventPayload::OpenWindowRequest(req) => {
HostRequest::OpenWindow(req) => {
let (navigation_tx, mut navigation_rx) = tokio::sync::mpsc::channel(128);
let (close_tx, mut close_rx) = tokio::sync::mpsc::channel(128);
let win_config = CreateWindowConfig {
@@ -396,7 +397,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
};
if let Err(e) = create_window(app_handle, win_config) {
let error_event = plugin_handle.build_event_to_send(
&plugin_context,
plugin_context,
&InternalEventPayload::ErrorResponse(ErrorResponse {
error: format!("Failed to create window: {:?}", e),
}),
@@ -414,7 +415,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
while let Some(url) = navigation_rx.recv().await {
let url = url.to_string();
let event_to_send = plugin_handle.build_event_to_send(
&plugin_context, // NOTE: Sending existing context on purpose here
&plugin_context,
&InternalEventPayload::WindowNavigateEvent(WindowNavigateEvent { url }),
Some(event_id.clone()),
);
@@ -428,7 +429,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
let plugin_handle = plugin_handle.clone();
let plugin_context = plugin_context.clone();
tauri::async_runtime::spawn(async move {
while let Some(_) = close_rx.recv().await {
while close_rx.recv().await.is_some() {
let event_to_send = plugin_handle.build_event_to_send(
&plugin_context,
&InternalEventPayload::WindowCloseEvent,
@@ -441,35 +442,33 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
Ok(None)
}
InternalEventPayload::CloseWindowRequest(req) => {
HostRequest::CloseWindow(req) => {
if let Some(window) = app_handle.webview_windows().get(&req.label) {
window.close()?;
}
Ok(None)
}
InternalEventPayload::OpenExternalUrlRequest(req) => {
HostRequest::OpenExternalUrl(req) => {
app_handle.opener().open_url(&req.url, None::<&str>)?;
Ok(Some(InternalEventPayload::OpenExternalUrlResponse(EmptyPayload {})))
}
InternalEventPayload::SetKeyValueRequest(req) => {
let name = plugin_handle.info().name;
app_handle.db().set_plugin_key_value(&name, &req.key, &req.value);
Ok(Some(InternalEventPayload::SetKeyValueResponse(SetKeyValueResponse {})))
}
InternalEventPayload::GetKeyValueRequest(req) => {
let name = plugin_handle.info().name;
let value = app_handle.db().get_plugin_key_value(&name, &req.key).map(|v| v.value);
Ok(Some(InternalEventPayload::GetKeyValueResponse(GetKeyValueResponse { value })))
}
InternalEventPayload::DeleteKeyValueRequest(req) => {
let name = plugin_handle.info().name;
let deleted = app_handle.db().delete_plugin_key_value(&name, &req.key)?;
Ok(Some(InternalEventPayload::DeleteKeyValueResponse(DeleteKeyValueResponse {
deleted,
HostRequest::ListOpenWorkspaces(_) => {
let mut workspaces = Vec::new();
for (_, window) in app_handle.webview_windows() {
if let Some(workspace) = workspace_from_window(&window) {
workspaces.push(WorkspaceInfo {
id: workspace.id.clone(),
name: workspace.name.clone(),
label: window.label().to_string(),
});
}
}
Ok(Some(InternalEventPayload::ListOpenWorkspacesResponse(ListOpenWorkspacesResponse {
workspaces,
})))
}
InternalEventPayload::ListCookieNamesRequest(_req) => {
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
HostRequest::ListCookieNames(_) => {
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
let names = match cookie_jar_from_window(&window) {
None => Vec::new(),
Some(j) => j
@@ -482,8 +481,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
names,
})))
}
InternalEventPayload::GetCookieValueRequest(req) => {
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
HostRequest::GetCookieValue(req) => {
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
let value = match cookie_jar_from_window(&window) {
None => None,
Some(j) => j.cookies.into_iter().find_map(|c| match Cookie::parse(c.raw_cookie) {
@@ -495,12 +494,11 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
};
Ok(Some(InternalEventPayload::GetCookieValueResponse(GetCookieValueResponse { value })))
}
InternalEventPayload::WindowInfoRequest(req) => {
HostRequest::WindowInfo(req) => {
let w = app_handle
.get_webview_window(&req.label)
.ok_or(PluginErr(format!("Failed to find window for {}", req.label)))?;
// Actually look up the data so we never return an invalid ID
let environment_id = environment_from_window(&w).map(|m| m.id);
let workspace_id = workspace_from_window(&w).map(|m| m.id);
let request_id =
@@ -518,25 +516,13 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
environment_id,
})))
}
InternalEventPayload::ListWorkspacesRequest(_) => {
let mut workspaces = Vec::new();
for (_, window) in app_handle.webview_windows() {
if let Some(workspace) = workspace_from_window(&window) {
workspaces.push(WorkspaceInfo {
id: workspace.id.clone(),
name: workspace.name.clone(),
label: window.label().to_string(),
});
}
}
Ok(Some(InternalEventPayload::ListWorkspacesResponse(ListWorkspacesResponse {
workspaces,
HostRequest::OtherRequest(req) => {
Ok(Some(InternalEventPayload::ErrorResponse(ErrorResponse {
error: format!(
"Unsupported plugin request in app host handler: {}",
req.type_name()
),
})))
}
_ => Ok(None),
}
}

View File

@@ -23,12 +23,12 @@ use tokio::sync::Mutex;
use ts_rs::TS;
use yaak_api::yaak_api_client;
use yaak_models::models::Plugin;
use yaak_models::util::UpdateSource;
use yaak_plugins::api::{
PluginNameVersion, PluginSearchResponse, PluginUpdatesResponse, check_plugin_updates,
search_plugins,
};
use yaak_plugins::events::{Color, Icon, PluginContext, ShowToastRequest};
use yaak_plugins::bootstrap;
use yaak_plugins::events::PluginContext;
use yaak_plugins::install::{delete_and_uninstall, download_and_install};
use yaak_plugins::manager::PluginManager;
use yaak_plugins::plugin_meta::get_plugin_meta;
@@ -268,62 +268,23 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
.join("index.cjs");
let dev_mode = is_dev();
let query_manager =
app_handle.state::<yaak_models::query_manager::QueryManager>().inner().clone();
// Create plugin manager asynchronously
let app_handle_clone = app_handle.clone();
tauri::async_runtime::block_on(async move {
let manager = PluginManager::new(
let manager = bootstrap::create_and_initialize_manager(
vendored_plugin_dir,
installed_plugin_dir,
node_bin_path,
plugin_runtime_main,
&query_manager,
&PluginContext::new_empty(),
dev_mode,
)
.await;
// Initialize all plugins after manager is created
let bundled_dirs = manager
.list_bundled_plugin_dirs()
.await
.expect("Failed to list bundled plugins");
// Ensure all bundled plugins make it into the database
let db = app_handle_clone.db();
for dir in &bundled_dirs {
if db.get_plugin_by_directory(dir).is_none() {
db.upsert_plugin(
&Plugin {
directory: dir.clone(),
enabled: true,
url: None,
..Default::default()
},
&UpdateSource::Background,
)
.expect("Failed to upsert bundled plugin");
}
}
// Get all plugins from database and initialize
let plugins = db.list_plugins().expect("Failed to list plugins from database");
drop(db); // Explicitly drop the connection before await
let errors =
manager.initialize_all_plugins(plugins, &PluginContext::new_empty()).await;
// Show toast for any failed plugins
for (plugin_dir, error_msg) in errors {
let plugin_name = plugin_dir.split('/').last().unwrap_or(&plugin_dir);
let toast = ShowToastRequest {
message: format!("Failed to start plugin '{}': {}", plugin_name, error_msg),
color: Some(Color::Danger),
icon: Some(Icon::AlertTriangle),
timeout: Some(10000),
};
if let Err(emit_err) = app_handle_clone.emit("show_toast", toast) {
error!("Failed to emit toast for plugin error: {emit_err:?}");
}
}
.await
.expect("Failed to initialize plugins");
app_handle_clone.manage(manager);
});

View File

@@ -55,6 +55,7 @@ mod tests {
let mut out = Vec::new();
super::collect_any_types(json, &mut out);
out.sort();
assert_eq!(out, vec!["foo.bar", "mount_source.MountSourceRBDVolume"]);
}
}

View File

@@ -17,6 +17,7 @@ sea-query = { version = "0.32.1", features = ["with-chrono", "attr"] }
sea-query-rusqlite = { version = "0.7.0", features = ["with-chrono"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
schemars = { workspace = true }
sha2 = { workspace = true }
thiserror = { workspace = true }
ts-rs = { workspace = true, features = ["chrono-impl", "serde-json-impl"] }

View File

@@ -6,6 +6,7 @@ use crate::models::HttpRequestIden::{
use crate::util::{UpdateSource, generate_prefixed_id};
use chrono::{NaiveDateTime, Utc};
use rusqlite::Row;
use schemars::JsonSchema;
use sea_query::Order::Desc;
use sea_query::{IntoColumnRef, IntoIden, IntoTableRef, Order, SimpleExpr, enum_def};
use serde::{Deserialize, Deserializer, Serialize};
@@ -824,7 +825,7 @@ impl UpsertModelInfo for Folder {
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, JsonSchema, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
pub struct HttpRequestHeader {
@@ -837,7 +838,7 @@ pub struct HttpRequestHeader {
pub id: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, JsonSchema, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
pub struct HttpUrlParameter {
@@ -850,7 +851,7 @@ pub struct HttpUrlParameter {
pub id: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, JsonSchema, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
#[enum_def(table_name = "http_requests")]
@@ -1095,7 +1096,7 @@ impl Default for WebsocketMessageType {
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, JsonSchema, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
#[enum_def(table_name = "websocket_requests")]
@@ -1704,7 +1705,7 @@ impl UpsertModelInfo for GraphQlIntrospection {
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, JsonSchema, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
#[enum_def(table_name = "grpc_requests")]

View File

@@ -1,7 +1,7 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{GrpcRequest, GrpcRequestIden, HttpRequestHeader};
use crate::models::{Folder, FolderIden, GrpcRequest, GrpcRequestIden, HttpRequestHeader};
use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
@@ -15,6 +15,20 @@ impl<'a> DbContext<'a> {
self.find_many(GrpcRequestIden::WorkspaceId, workspace_id, None)
}
pub fn list_grpc_requests_for_folder_recursive(
&self,
folder_id: &str,
) -> Result<Vec<GrpcRequest>> {
let mut children = Vec::new();
for folder in self.find_many::<Folder>(FolderIden::FolderId, folder_id, None)? {
children.extend(self.list_grpc_requests_for_folder_recursive(&folder.id)?);
}
for request in self.find_many::<GrpcRequest>(GrpcRequestIden::FolderId, folder_id, None)? {
children.push(request);
}
Ok(children)
}
pub fn delete_grpc_request(
&self,
m: &GrpcRequest,

View File

@@ -1,7 +1,9 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{HttpRequestHeader, WebsocketRequest, WebsocketRequestIden};
use crate::models::{
Folder, FolderIden, HttpRequestHeader, WebsocketRequest, WebsocketRequestIden,
};
use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
@@ -15,6 +17,22 @@ impl<'a> DbContext<'a> {
self.find_many(WebsocketRequestIden::WorkspaceId, workspace_id, None)
}
pub fn list_websocket_requests_for_folder_recursive(
&self,
folder_id: &str,
) -> Result<Vec<WebsocketRequest>> {
let mut children = Vec::new();
for folder in self.find_many::<Folder>(FolderIden::FolderId, folder_id, None)? {
children.extend(self.list_websocket_requests_for_folder_recursive(&folder.id)?);
}
for request in
self.find_many::<WebsocketRequest>(WebsocketRequestIden::FolderId, folder_id, None)?
{
children.push(request);
}
Ok(children)
}
pub fn delete_websocket_request(
&self,
websocket_request: &WebsocketRequest,

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,66 @@
use crate::error::{Error, Result};
use crate::events::PluginContext;
use crate::manager::PluginManager;
use std::path::PathBuf;
use std::sync::Arc;
use yaak_models::models::Plugin;
use yaak_models::query_manager::QueryManager;
use yaak_models::util::UpdateSource;
/// Create a plugin manager and initialize all registered plugins.
///
/// This performs:
/// 1. Plugin runtime startup (`PluginManager::new`)
/// 2. Bundled plugin registration in DB (if missing)
/// 3. Plugin initialization from DB
pub async fn create_and_initialize_manager(
vendored_plugin_dir: PathBuf,
installed_plugin_dir: PathBuf,
node_bin_path: PathBuf,
plugin_runtime_main: PathBuf,
query_manager: &QueryManager,
plugin_context: &PluginContext,
dev_mode: bool,
) -> Result<Arc<PluginManager>> {
let plugin_manager = Arc::new(
PluginManager::new(
vendored_plugin_dir,
installed_plugin_dir,
node_bin_path,
plugin_runtime_main,
dev_mode,
)
.await,
);
let bundled_dirs = plugin_manager.list_bundled_plugin_dirs().await?;
let db = query_manager.connect();
for dir in bundled_dirs {
if db.get_plugin_by_directory(&dir).is_none() {
db.upsert_plugin(
&Plugin {
directory: dir,
enabled: true,
url: None,
..Default::default()
},
&UpdateSource::Background,
)?;
}
}
let plugins = db.list_plugins()?;
drop(db);
let init_errors = plugin_manager.initialize_all_plugins(plugins, plugin_context).await;
if !init_errors.is_empty() {
let joined = init_errors
.into_iter()
.map(|(dir, err)| format!("{dir}: {err}"))
.collect::<Vec<_>>()
.join("; ");
return Err(Error::PluginErr(format!("Failed to initialize plugin(s): {joined}")));
}
Ok(plugin_manager)
}

View File

@@ -163,8 +163,8 @@ pub enum InternalEventPayload {
WindowInfoRequest(WindowInfoRequest),
WindowInfoResponse(WindowInfoResponse),
ListWorkspacesRequest(ListWorkspacesRequest),
ListWorkspacesResponse(ListWorkspacesResponse),
ListOpenWorkspacesRequest(ListOpenWorkspacesRequest),
ListOpenWorkspacesResponse(ListOpenWorkspacesResponse),
GetHttpRequestByIdRequest(GetHttpRequestByIdRequest),
GetHttpRequestByIdResponse(GetHttpRequestByIdResponse),
@@ -631,12 +631,12 @@ pub struct WindowInfoResponse {
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_events.ts")]
pub struct ListWorkspacesRequest {}
pub struct ListOpenWorkspacesRequest {}
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_events.ts")]
pub struct ListWorkspacesResponse {
pub struct ListOpenWorkspacesResponse {
pub workspaces: Vec<WorkspaceInfo>,
}

View File

@@ -7,6 +7,7 @@
//! by yaak-app's plugins_ext module.
pub mod api;
pub mod bootstrap;
mod checksum;
pub mod error;
pub mod events;

View File

@@ -17,3 +17,6 @@ yaak-models = { workspace = true }
yaak-plugins = { workspace = true }
yaak-templates = { workspace = true }
yaak-tls = { workspace = true }
[dev-dependencies]
tempfile = "3"

View File

@@ -1,4 +1,5 @@
pub mod error;
pub mod plugin_events;
pub mod render;
pub mod send;

View File

@@ -0,0 +1,416 @@
use yaak_models::query_manager::QueryManager;
use yaak_plugins::events::{
CloseWindowRequest, CopyTextRequest, DeleteKeyValueRequest, DeleteKeyValueResponse,
DeleteModelRequest, ErrorResponse, FindHttpResponsesRequest, GetCookieValueRequest,
GetHttpRequestByIdRequest, GetHttpRequestByIdResponse, GetKeyValueRequest, GetKeyValueResponse,
InternalEventPayload, ListCookieNamesRequest, ListFoldersRequest, ListFoldersResponse,
ListHttpRequestsRequest, ListHttpRequestsResponse, ListOpenWorkspacesRequest,
OpenExternalUrlRequest, OpenWindowRequest, PromptFormRequest, PromptTextRequest,
ReloadResponse, RenderGrpcRequestRequest, RenderHttpRequestRequest, SendHttpRequestRequest,
SetKeyValueRequest, ShowToastRequest, TemplateRenderRequest, UpsertModelRequest,
WindowInfoRequest,
};
pub struct SharedPluginEventContext<'a> {
pub plugin_name: &'a str,
pub workspace_id: Option<&'a str>,
}
#[derive(Debug)]
pub enum GroupedPluginEvent<'a> {
Handled(Option<InternalEventPayload>),
ToHandle(HostRequest<'a>),
}
#[derive(Debug)]
pub enum GroupedPluginRequest<'a> {
Shared(SharedRequest<'a>),
Host(HostRequest<'a>),
Ignore,
}
#[derive(Debug)]
pub enum SharedRequest<'a> {
GetKeyValue(&'a GetKeyValueRequest),
SetKeyValue(&'a SetKeyValueRequest),
DeleteKeyValue(&'a DeleteKeyValueRequest),
GetHttpRequestById(&'a GetHttpRequestByIdRequest),
ListFolders(&'a ListFoldersRequest),
ListHttpRequests(&'a ListHttpRequestsRequest),
}
#[derive(Debug)]
pub enum HostRequest<'a> {
ShowToast(&'a ShowToastRequest),
CopyText(&'a CopyTextRequest),
PromptText(&'a PromptTextRequest),
PromptForm(&'a PromptFormRequest),
FindHttpResponses(&'a FindHttpResponsesRequest),
UpsertModel(&'a UpsertModelRequest),
DeleteModel(&'a DeleteModelRequest),
RenderGrpcRequest(&'a RenderGrpcRequestRequest),
RenderHttpRequest(&'a RenderHttpRequestRequest),
TemplateRender(&'a TemplateRenderRequest),
SendHttpRequest(&'a SendHttpRequestRequest),
OpenWindow(&'a OpenWindowRequest),
CloseWindow(&'a CloseWindowRequest),
OpenExternalUrl(&'a OpenExternalUrlRequest),
ListOpenWorkspaces(&'a ListOpenWorkspacesRequest),
ListCookieNames(&'a ListCookieNamesRequest),
GetCookieValue(&'a GetCookieValueRequest),
WindowInfo(&'a WindowInfoRequest),
ErrorResponse(&'a ErrorResponse),
ReloadResponse(&'a ReloadResponse),
OtherRequest(&'a InternalEventPayload),
}
impl HostRequest<'_> {
pub fn type_name(&self) -> String {
match self {
HostRequest::ShowToast(_) => "show_toast_request".to_string(),
HostRequest::CopyText(_) => "copy_text_request".to_string(),
HostRequest::PromptText(_) => "prompt_text_request".to_string(),
HostRequest::PromptForm(_) => "prompt_form_request".to_string(),
HostRequest::FindHttpResponses(_) => "find_http_responses_request".to_string(),
HostRequest::UpsertModel(_) => "upsert_model_request".to_string(),
HostRequest::DeleteModel(_) => "delete_model_request".to_string(),
HostRequest::RenderGrpcRequest(_) => "render_grpc_request_request".to_string(),
HostRequest::RenderHttpRequest(_) => "render_http_request_request".to_string(),
HostRequest::TemplateRender(_) => "template_render_request".to_string(),
HostRequest::SendHttpRequest(_) => "send_http_request_request".to_string(),
HostRequest::OpenWindow(_) => "open_window_request".to_string(),
HostRequest::CloseWindow(_) => "close_window_request".to_string(),
HostRequest::OpenExternalUrl(_) => "open_external_url_request".to_string(),
HostRequest::ListOpenWorkspaces(_) => "list_open_workspaces_request".to_string(),
HostRequest::ListCookieNames(_) => "list_cookie_names_request".to_string(),
HostRequest::GetCookieValue(_) => "get_cookie_value_request".to_string(),
HostRequest::WindowInfo(_) => "window_info_request".to_string(),
HostRequest::ErrorResponse(_) => "error_response".to_string(),
HostRequest::ReloadResponse(_) => "reload_response".to_string(),
HostRequest::OtherRequest(payload) => payload.type_name(),
}
}
}
impl<'a> From<&'a InternalEventPayload> for GroupedPluginRequest<'a> {
fn from(payload: &'a InternalEventPayload) -> Self {
match payload {
InternalEventPayload::GetKeyValueRequest(req) => {
GroupedPluginRequest::Shared(SharedRequest::GetKeyValue(req))
}
InternalEventPayload::SetKeyValueRequest(req) => {
GroupedPluginRequest::Shared(SharedRequest::SetKeyValue(req))
}
InternalEventPayload::DeleteKeyValueRequest(req) => {
GroupedPluginRequest::Shared(SharedRequest::DeleteKeyValue(req))
}
InternalEventPayload::GetHttpRequestByIdRequest(req) => {
GroupedPluginRequest::Shared(SharedRequest::GetHttpRequestById(req))
}
InternalEventPayload::ErrorResponse(resp) => {
GroupedPluginRequest::Host(HostRequest::ErrorResponse(resp))
}
InternalEventPayload::ReloadResponse(req) => {
GroupedPluginRequest::Host(HostRequest::ReloadResponse(req))
}
InternalEventPayload::ListOpenWorkspacesRequest(req) => {
GroupedPluginRequest::Host(HostRequest::ListOpenWorkspaces(req))
}
InternalEventPayload::ListFoldersRequest(req) => {
GroupedPluginRequest::Shared(SharedRequest::ListFolders(req))
}
InternalEventPayload::ListHttpRequestsRequest(req) => {
GroupedPluginRequest::Shared(SharedRequest::ListHttpRequests(req))
}
InternalEventPayload::ShowToastRequest(req) => {
GroupedPluginRequest::Host(HostRequest::ShowToast(req))
}
InternalEventPayload::CopyTextRequest(req) => {
GroupedPluginRequest::Host(HostRequest::CopyText(req))
}
InternalEventPayload::PromptTextRequest(req) => {
GroupedPluginRequest::Host(HostRequest::PromptText(req))
}
InternalEventPayload::PromptFormRequest(req) => {
GroupedPluginRequest::Host(HostRequest::PromptForm(req))
}
InternalEventPayload::FindHttpResponsesRequest(req) => {
GroupedPluginRequest::Host(HostRequest::FindHttpResponses(req))
}
InternalEventPayload::UpsertModelRequest(req) => {
GroupedPluginRequest::Host(HostRequest::UpsertModel(req))
}
InternalEventPayload::DeleteModelRequest(req) => {
GroupedPluginRequest::Host(HostRequest::DeleteModel(req))
}
InternalEventPayload::RenderGrpcRequestRequest(req) => {
GroupedPluginRequest::Host(HostRequest::RenderGrpcRequest(req))
}
InternalEventPayload::RenderHttpRequestRequest(req) => {
GroupedPluginRequest::Host(HostRequest::RenderHttpRequest(req))
}
InternalEventPayload::TemplateRenderRequest(req) => {
GroupedPluginRequest::Host(HostRequest::TemplateRender(req))
}
InternalEventPayload::SendHttpRequestRequest(req) => {
GroupedPluginRequest::Host(HostRequest::SendHttpRequest(req))
}
InternalEventPayload::OpenWindowRequest(req) => {
GroupedPluginRequest::Host(HostRequest::OpenWindow(req))
}
InternalEventPayload::CloseWindowRequest(req) => {
GroupedPluginRequest::Host(HostRequest::CloseWindow(req))
}
InternalEventPayload::OpenExternalUrlRequest(req) => {
GroupedPluginRequest::Host(HostRequest::OpenExternalUrl(req))
}
InternalEventPayload::ListCookieNamesRequest(req) => {
GroupedPluginRequest::Host(HostRequest::ListCookieNames(req))
}
InternalEventPayload::GetCookieValueRequest(req) => {
GroupedPluginRequest::Host(HostRequest::GetCookieValue(req))
}
InternalEventPayload::WindowInfoRequest(req) => {
GroupedPluginRequest::Host(HostRequest::WindowInfo(req))
}
payload if payload.type_name().ends_with("_request") => {
GroupedPluginRequest::Host(HostRequest::OtherRequest(payload))
}
_ => GroupedPluginRequest::Ignore,
}
}
}
pub fn handle_shared_plugin_event<'a>(
query_manager: &QueryManager,
payload: &'a InternalEventPayload,
context: SharedPluginEventContext<'_>,
) -> GroupedPluginEvent<'a> {
match GroupedPluginRequest::from(payload) {
GroupedPluginRequest::Shared(req) => {
GroupedPluginEvent::Handled(Some(build_shared_reply(query_manager, req, context)))
}
GroupedPluginRequest::Host(req) => GroupedPluginEvent::ToHandle(req),
GroupedPluginRequest::Ignore => GroupedPluginEvent::Handled(None),
}
}
fn build_shared_reply(
query_manager: &QueryManager,
request: SharedRequest<'_>,
context: SharedPluginEventContext<'_>,
) -> InternalEventPayload {
match request {
SharedRequest::GetKeyValue(req) => {
let value = query_manager
.connect()
.get_plugin_key_value(context.plugin_name, &req.key)
.map(|v| v.value);
InternalEventPayload::GetKeyValueResponse(GetKeyValueResponse { value })
}
SharedRequest::SetKeyValue(req) => {
query_manager.connect().set_plugin_key_value(context.plugin_name, &req.key, &req.value);
InternalEventPayload::SetKeyValueResponse(yaak_plugins::events::SetKeyValueResponse {})
}
SharedRequest::DeleteKeyValue(req) => {
match query_manager.connect().delete_plugin_key_value(context.plugin_name, &req.key) {
Ok(deleted) => {
InternalEventPayload::DeleteKeyValueResponse(DeleteKeyValueResponse { deleted })
}
Err(err) => InternalEventPayload::ErrorResponse(ErrorResponse {
error: format!("Failed to delete plugin key '{}' : {err}", req.key),
}),
}
}
SharedRequest::GetHttpRequestById(req) => {
let http_request = query_manager.connect().get_http_request(&req.id).ok();
InternalEventPayload::GetHttpRequestByIdResponse(GetHttpRequestByIdResponse {
http_request,
})
}
SharedRequest::ListFolders(_) => {
let Some(workspace_id) = context.workspace_id else {
return InternalEventPayload::ErrorResponse(ErrorResponse {
error: "workspace_id is required for list_folders_request".to_string(),
});
};
let folders = match query_manager.connect().list_folders(workspace_id) {
Ok(folders) => folders,
Err(err) => {
return InternalEventPayload::ErrorResponse(ErrorResponse {
error: format!("Failed to list folders: {err}"),
});
}
};
InternalEventPayload::ListFoldersResponse(ListFoldersResponse { folders })
}
SharedRequest::ListHttpRequests(req) => {
let http_requests = if let Some(folder_id) = req.folder_id.as_deref() {
match query_manager.connect().list_http_requests_for_folder_recursive(folder_id) {
Ok(http_requests) => http_requests,
Err(err) => {
return InternalEventPayload::ErrorResponse(ErrorResponse {
error: format!("Failed to list HTTP requests for folder: {err}"),
});
}
}
} else {
let Some(workspace_id) = context.workspace_id else {
return InternalEventPayload::ErrorResponse(ErrorResponse {
error:
"workspace_id is required for list_http_requests_request without folder_id"
.to_string(),
});
};
match query_manager.connect().list_http_requests(workspace_id) {
Ok(http_requests) => http_requests,
Err(err) => {
return InternalEventPayload::ErrorResponse(ErrorResponse {
error: format!("Failed to list HTTP requests: {err}"),
});
}
}
};
InternalEventPayload::ListHttpRequestsResponse(ListHttpRequestsResponse {
http_requests,
})
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use yaak_models::models::{Folder, HttpRequest, Workspace};
use yaak_models::util::UpdateSource;
fn seed_query_manager() -> QueryManager {
let temp_dir = tempfile::TempDir::new().expect("Failed to create temp dir");
let db_path = temp_dir.path().join("db.sqlite");
let blob_path = temp_dir.path().join("blobs.sqlite");
let (query_manager, _blob_manager, _rx) =
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize DB");
query_manager
.connect()
.upsert_workspace(
&Workspace {
id: "wk_test".to_string(),
name: "Workspace".to_string(),
..Default::default()
},
&UpdateSource::Sync,
)
.expect("Failed to seed workspace");
query_manager
.connect()
.upsert_folder(
&Folder {
id: "fl_test".to_string(),
workspace_id: "wk_test".to_string(),
name: "Folder".to_string(),
..Default::default()
},
&UpdateSource::Sync,
)
.expect("Failed to seed folder");
query_manager
.connect()
.upsert_http_request(
&HttpRequest {
id: "rq_test".to_string(),
workspace_id: "wk_test".to_string(),
folder_id: Some("fl_test".to_string()),
name: "Request".to_string(),
method: "GET".to_string(),
url: "https://example.com".to_string(),
..Default::default()
},
&UpdateSource::Sync,
)
.expect("Failed to seed request");
query_manager
}
#[test]
fn list_requests_requires_workspace_when_folder_missing() {
let query_manager = seed_query_manager();
let payload = InternalEventPayload::ListHttpRequestsRequest(
yaak_plugins::events::ListHttpRequestsRequest { folder_id: None },
);
let result = handle_shared_plugin_event(
&query_manager,
&payload,
SharedPluginEventContext { plugin_name: "@yaak/test", workspace_id: None },
);
assert!(matches!(
result,
GroupedPluginEvent::Handled(Some(InternalEventPayload::ErrorResponse(_)))
));
}
#[test]
fn list_requests_by_workspace_and_folder() {
let query_manager = seed_query_manager();
let by_workspace_payload = InternalEventPayload::ListHttpRequestsRequest(
yaak_plugins::events::ListHttpRequestsRequest { folder_id: None },
);
let by_workspace = handle_shared_plugin_event(
&query_manager,
&by_workspace_payload,
SharedPluginEventContext { plugin_name: "@yaak/test", workspace_id: Some("wk_test") },
);
match by_workspace {
GroupedPluginEvent::Handled(Some(InternalEventPayload::ListHttpRequestsResponse(
resp,
))) => {
assert_eq!(resp.http_requests.len(), 1);
}
other => panic!("unexpected workspace response: {other:?}"),
}
let by_folder_payload = InternalEventPayload::ListHttpRequestsRequest(
yaak_plugins::events::ListHttpRequestsRequest {
folder_id: Some("fl_test".to_string()),
},
);
let by_folder = handle_shared_plugin_event(
&query_manager,
&by_folder_payload,
SharedPluginEventContext { plugin_name: "@yaak/test", workspace_id: None },
);
match by_folder {
GroupedPluginEvent::Handled(Some(InternalEventPayload::ListHttpRequestsResponse(
resp,
))) => {
assert_eq!(resp.http_requests.len(), 1);
}
other => panic!("unexpected folder response: {other:?}"),
}
}
#[test]
fn host_request_classification_works() {
let query_manager = seed_query_manager();
let payload = InternalEventPayload::WindowInfoRequest(WindowInfoRequest {
label: "main".to_string(),
});
let result = handle_shared_plugin_event(
&query_manager,
&payload,
SharedPluginEventContext { plugin_name: "@yaak/test", workspace_id: None },
);
match result {
GroupedPluginEvent::ToHandle(HostRequest::WindowInfo(req)) => {
assert_eq!(req.label, "main")
}
other => panic!("unexpected host classification: {other:?}"),
}
}
}

7
npm/README.md Normal file
View File

@@ -0,0 +1,7 @@
# Yaak CLI NPM Packages
The Rust `yaak` CLI binary is published to NPM with a meta package (`@yaakapp/cli`) and
platform-specific optional dependency packages. The package exposes both `yaak` and `yaakcli`
commands for compatibility.
This follows the same strategy previously used in the standalone `yaak-cli` repo.

View File

View File

@@ -0,0 +1,10 @@
{
"name": "@yaakapp/cli-darwin-arm64",
"version": "0.0.1",
"repository": {
"type": "git",
"url": "git+https://github.com/mountain-loop/yaak.git"
},
"os": ["darwin"],
"cpu": ["arm64"]
}

View File

View File

@@ -0,0 +1,10 @@
{
"name": "@yaakapp/cli-darwin-x64",
"version": "0.0.1",
"repository": {
"type": "git",
"url": "git+https://github.com/mountain-loop/yaak.git"
},
"os": ["darwin"],
"cpu": ["x64"]
}

View File

View File

@@ -0,0 +1,10 @@
{
"name": "@yaakapp/cli-linux-arm64",
"version": "0.0.1",
"repository": {
"type": "git",
"url": "git+https://github.com/mountain-loop/yaak.git"
},
"os": ["linux"],
"cpu": ["arm64"]
}

View File

View File

@@ -0,0 +1,10 @@
{
"name": "@yaakapp/cli-linux-x64",
"version": "0.0.1",
"repository": {
"type": "git",
"url": "git+https://github.com/mountain-loop/yaak.git"
},
"os": ["linux"],
"cpu": ["x64"]
}

View File

View File

@@ -0,0 +1,10 @@
{
"name": "@yaakapp/cli-win32-arm64",
"version": "0.0.1",
"repository": {
"type": "git",
"url": "git+https://github.com/mountain-loop/yaak.git"
},
"os": ["win32"],
"cpu": ["arm64"]
}

View File

View File

@@ -0,0 +1,10 @@
{
"name": "@yaakapp/cli-win32-x64",
"version": "0.0.1",
"repository": {
"type": "git",
"url": "git+https://github.com/mountain-loop/yaak.git"
},
"os": ["win32"],
"cpu": ["x64"]
}

2
npm/cli/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
yaak
yaak.exe

30
npm/cli/bin/cli.js Executable file
View File

@@ -0,0 +1,30 @@
#!/usr/bin/env node
const path = require("path");
const childProcess = require("child_process");
const { BINARY_NAME, PLATFORM_SPECIFIC_PACKAGE_NAME } = require("../common");
function getBinaryPath() {
try {
if (!PLATFORM_SPECIFIC_PACKAGE_NAME) {
throw new Error("unsupported platform");
}
return require.resolve(`${PLATFORM_SPECIFIC_PACKAGE_NAME}/bin/${BINARY_NAME}`);
} catch (_) {
return path.join(__dirname, "..", BINARY_NAME);
}
}
const result = childProcess.spawnSync(getBinaryPath(), process.argv.slice(2), {
stdio: "inherit"
});
if (result.error) {
throw result.error;
}
if (result.signal) {
process.kill(process.pid, result.signal);
}
process.exit(result.status ?? 1);

20
npm/cli/common.js Normal file
View File

@@ -0,0 +1,20 @@
const BINARY_DISTRIBUTION_PACKAGES = {
darwin_arm64: "@yaakapp/cli-darwin-arm64",
darwin_x64: "@yaakapp/cli-darwin-x64",
linux_arm64: "@yaakapp/cli-linux-arm64",
linux_x64: "@yaakapp/cli-linux-x64",
win32_x64: "@yaakapp/cli-win32-x64",
win32_arm64: "@yaakapp/cli-win32-arm64"
};
const BINARY_DISTRIBUTION_VERSION = require("./package.json").version;
const BINARY_NAME = process.platform === "win32" ? "yaak.exe" : "yaak";
const PLATFORM_SPECIFIC_PACKAGE_NAME =
BINARY_DISTRIBUTION_PACKAGES[`${process.platform}_${process.arch}`];
module.exports = {
BINARY_DISTRIBUTION_PACKAGES,
BINARY_DISTRIBUTION_VERSION,
BINARY_NAME,
PLATFORM_SPECIFIC_PACKAGE_NAME
};

20
npm/cli/index.js Normal file
View File

@@ -0,0 +1,20 @@
const path = require("path");
const childProcess = require("child_process");
const { PLATFORM_SPECIFIC_PACKAGE_NAME, BINARY_NAME } = require("./common");
function getBinaryPath() {
try {
if (!PLATFORM_SPECIFIC_PACKAGE_NAME) {
throw new Error("unsupported platform");
}
return require.resolve(`${PLATFORM_SPECIFIC_PACKAGE_NAME}/bin/${BINARY_NAME}`);
} catch (_) {
return path.join(__dirname, BINARY_NAME);
}
}
module.exports.runBinary = function runBinary(...args) {
childProcess.execFileSync(getBinaryPath(), args, {
stdio: "inherit"
});
};

97
npm/cli/install.js Normal file
View File

@@ -0,0 +1,97 @@
const fs = require("node:fs");
const path = require("node:path");
const zlib = require("node:zlib");
const https = require("node:https");
const {
BINARY_DISTRIBUTION_VERSION,
BINARY_NAME,
PLATFORM_SPECIFIC_PACKAGE_NAME
} = require("./common");
const fallbackBinaryPath = path.join(__dirname, BINARY_NAME);
function makeRequest(url) {
return new Promise((resolve, reject) => {
https
.get(url, (response) => {
if (response.statusCode >= 200 && response.statusCode < 300) {
const chunks = [];
response.on("data", (chunk) => chunks.push(chunk));
response.on("end", () => resolve(Buffer.concat(chunks)));
} else if (
response.statusCode >= 300 &&
response.statusCode < 400 &&
response.headers.location
) {
makeRequest(response.headers.location).then(resolve, reject);
} else {
reject(
new Error(
`npm responded with status code ${response.statusCode} when downloading package ${url}`
)
);
}
})
.on("error", (error) => reject(error));
});
}
function extractFileFromTarball(tarballBuffer, filepath) {
let offset = 0;
while (offset < tarballBuffer.length) {
const header = tarballBuffer.subarray(offset, offset + 512);
offset += 512;
const fileName = header.toString("utf-8", 0, 100).replace(/\0.*/g, "");
const fileSize = parseInt(header.toString("utf-8", 124, 136).replace(/\0.*/g, ""), 8);
if (fileName === filepath) {
return tarballBuffer.subarray(offset, offset + fileSize);
}
offset = (offset + fileSize + 511) & ~511;
}
return null;
}
async function downloadBinaryFromNpm() {
if (!PLATFORM_SPECIFIC_PACKAGE_NAME) {
throw new Error(`Unsupported platform: ${process.platform}/${process.arch}`);
}
const packageNameWithoutScope = PLATFORM_SPECIFIC_PACKAGE_NAME.split("/")[1];
const tarballUrl = `https://registry.npmjs.org/${PLATFORM_SPECIFIC_PACKAGE_NAME}/-/${packageNameWithoutScope}-${BINARY_DISTRIBUTION_VERSION}.tgz`;
const tarballDownloadBuffer = await makeRequest(tarballUrl);
const tarballBuffer = zlib.unzipSync(tarballDownloadBuffer);
const binary = extractFileFromTarball(tarballBuffer, `package/bin/${BINARY_NAME}`);
if (!binary) {
throw new Error(`Could not find package/bin/${BINARY_NAME} in tarball`);
}
fs.writeFileSync(fallbackBinaryPath, binary);
fs.chmodSync(fallbackBinaryPath, "755");
}
function isPlatformSpecificPackageInstalled() {
try {
if (!PLATFORM_SPECIFIC_PACKAGE_NAME) {
return false;
}
require.resolve(`${PLATFORM_SPECIFIC_PACKAGE_NAME}/bin/${BINARY_NAME}`);
return true;
} catch (_) {
return false;
}
}
if (!isPlatformSpecificPackageInstalled()) {
console.log("Platform package missing. Downloading Yaak CLI binary from npm...");
downloadBinaryFromNpm().catch((err) => {
console.error("Failed to install Yaak CLI binary:", err);
process.exitCode = 1;
});
} else {
console.log("Platform package present. Using bundled Yaak CLI binary.");
}

25
npm/cli/package.json Normal file
View File

@@ -0,0 +1,25 @@
{
"name": "@yaakapp/cli",
"version": "0.0.1",
"main": "./index.js",
"repository": {
"type": "git",
"url": "git+https://github.com/mountain-loop/yaak.git"
},
"scripts": {
"postinstall": "node ./install.js",
"prepublishOnly": "node ./prepublish.js"
},
"bin": {
"yaak": "bin/cli.js",
"yaakcli": "bin/cli.js"
},
"optionalDependencies": {
"@yaakapp/cli-darwin-x64": "0.0.1",
"@yaakapp/cli-darwin-arm64": "0.0.1",
"@yaakapp/cli-linux-arm64": "0.0.1",
"@yaakapp/cli-linux-x64": "0.0.1",
"@yaakapp/cli-win32-x64": "0.0.1",
"@yaakapp/cli-win32-arm64": "0.0.1"
}
}

5
npm/cli/prepublish.js Normal file
View File

@@ -0,0 +1,5 @@
const fs = require("node:fs");
const path = require("node:path");
const readme = path.join(__dirname, "..", "..", "README.md");
fs.copyFileSync(readme, path.join(__dirname, "README.md"));

77
npm/prepare-publish.js Normal file
View File

@@ -0,0 +1,77 @@
const { chmodSync, copyFileSync, existsSync, readFileSync, writeFileSync } = require("node:fs");
const { join } = require("node:path");
const version = process.env.YAAK_CLI_VERSION?.replace(/^v/, "");
if (!version) {
console.error("YAAK_CLI_VERSION is not set");
process.exit(1);
}
const packages = [
"cli",
"cli-darwin-arm64",
"cli-darwin-x64",
"cli-linux-arm64",
"cli-linux-x64",
"cli-win32-arm64",
"cli-win32-x64"
];
const binaries = [
{
src: join(__dirname, "dist", "cli-darwin-arm64", "yaak"),
dest: join(__dirname, "cli-darwin-arm64", "bin", "yaak")
},
{
src: join(__dirname, "dist", "cli-darwin-x64", "yaak"),
dest: join(__dirname, "cli-darwin-x64", "bin", "yaak")
},
{
src: join(__dirname, "dist", "cli-linux-arm64", "yaak"),
dest: join(__dirname, "cli-linux-arm64", "bin", "yaak")
},
{
src: join(__dirname, "dist", "cli-linux-x64", "yaak"),
dest: join(__dirname, "cli-linux-x64", "bin", "yaak")
},
{
src: join(__dirname, "dist", "cli-win32-arm64", "yaak.exe"),
dest: join(__dirname, "cli-win32-arm64", "bin", "yaak.exe")
},
{
src: join(__dirname, "dist", "cli-win32-x64", "yaak.exe"),
dest: join(__dirname, "cli-win32-x64", "bin", "yaak.exe")
}
];
for (const { src, dest } of binaries) {
if (!existsSync(src)) {
console.error(`Missing binary artifact: ${src}`);
process.exit(1);
}
copyFileSync(src, dest);
if (!dest.endsWith(".exe")) {
chmodSync(dest, 0o755);
}
}
for (const pkg of packages) {
const filepath = join(__dirname, pkg, "package.json");
const json = JSON.parse(readFileSync(filepath, "utf-8"));
json.version = version;
if (json.name === "@yaakapp/cli") {
json.optionalDependencies = {
"@yaakapp/cli-darwin-x64": version,
"@yaakapp/cli-darwin-arm64": version,
"@yaakapp/cli-linux-arm64": version,
"@yaakapp/cli-linux-x64": version,
"@yaakapp/cli-win32-x64": version,
"@yaakapp/cli-win32-arm64": version
};
}
writeFileSync(filepath, `${JSON.stringify(json, null, 2)}\n`);
}
console.log(`Prepared @yaakapp/cli npm packages for ${version}`);

73
package-lock.json generated
View File

@@ -73,7 +73,7 @@
"devDependencies": {
"@biomejs/biome": "^2.3.13",
"@tauri-apps/cli": "^2.9.6",
"@yaakapp/cli": "^0.3.4",
"@yaakapp/cli": "^0.4.0-beta.2",
"dotenv-cli": "^11.0.0",
"husky": "^9.1.7",
"nodejs-file-downloader": "^4.13.0",
@@ -4326,27 +4326,28 @@
"link": true
},
"node_modules/@yaakapp/cli": {
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/@yaakapp/cli/-/cli-0.3.4.tgz",
"integrity": "sha512-bSSL3noEfyoPC0M+bj34jbBZbB+gwYLCHL9cf6BYHgkRQKlHFpvN6z8M2jQZljb+CTQdHK0NzosmwHLpjMmAVA==",
"version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli/-/cli-0.4.0-beta.2.tgz",
"integrity": "sha512-UXPxTS9oWVCIr4rShC7HjcAX+gSmw/BQ5F1Xp3Rub3vY/G7+513JJsc1HhLGVZqFfOVRSMEKRxtF9/9okSyiHg==",
"dev": true,
"hasInstallScript": true,
"bin": {
"yaak": "bin/cli.js",
"yaakcli": "bin/cli.js"
},
"optionalDependencies": {
"@yaakapp/cli-darwin-arm64": "0.3.4",
"@yaakapp/cli-darwin-x64": "0.3.4",
"@yaakapp/cli-linux-arm64": "0.3.4",
"@yaakapp/cli-linux-x64": "0.3.4",
"@yaakapp/cli-win32-arm64": "0.3.4",
"@yaakapp/cli-win32-x64": "0.3.4"
"@yaakapp/cli-darwin-arm64": "0.4.0-beta.2",
"@yaakapp/cli-darwin-x64": "0.4.0-beta.2",
"@yaakapp/cli-linux-arm64": "0.4.0-beta.2",
"@yaakapp/cli-linux-x64": "0.4.0-beta.2",
"@yaakapp/cli-win32-arm64": "0.4.0-beta.2",
"@yaakapp/cli-win32-x64": "0.4.0-beta.2"
}
},
"node_modules/@yaakapp/cli-darwin-arm64": {
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-darwin-arm64/-/cli-darwin-arm64-0.3.4.tgz",
"integrity": "sha512-iTohEO7XSVZwSvTgEQE9my3wGyWtTl1q8yfol7hHwVFTX7G8Geh8X2j2vVokHhj7J9OZL9jtYQWIsM1ekOHSEQ==",
"version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-darwin-arm64/-/cli-darwin-arm64-0.4.0-beta.2.tgz",
"integrity": "sha512-mqkyH5tIPRLs9JumP9ZmzjB5gIwmOL1yCDoJ1qVU8DIJ7mwlcQaPGYTK98pVdBcKOjofVakBTcpol9P8rBv4qw==",
"cpu": [
"arm64"
],
@@ -4357,9 +4358,9 @@
]
},
"node_modules/@yaakapp/cli-darwin-x64": {
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-darwin-x64/-/cli-darwin-x64-0.3.4.tgz",
"integrity": "sha512-gz7IcjFGKA0cCAum1Aq8kmVg7erYYSrZ9pliDw0NZyObjrBysJcsDXLodEU437u0pihtdCfoLsq3rsYYs8uwCA==",
"version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-darwin-x64/-/cli-darwin-x64-0.4.0-beta.2.tgz",
"integrity": "sha512-QI/H2yUF8CkJq+cnRthoUWWTEJPH4QPA78FYcGjFRhvBaj1m2G/GlCA5NkTXm/fvIjNkQEODSihXrhU+zoSSCw==",
"cpu": [
"x64"
],
@@ -4370,9 +4371,9 @@
]
},
"node_modules/@yaakapp/cli-linux-arm64": {
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-linux-arm64/-/cli-linux-arm64-0.3.4.tgz",
"integrity": "sha512-Yiwz8PBkXngmr0lTMW1pgy+F/kUISkzvqofdoBseXTrS/GDxoW3ILnG3If30LuIyWWPgqpuU+qKMtbVDzuncPQ==",
"version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-linux-arm64/-/cli-linux-arm64-0.4.0-beta.2.tgz",
"integrity": "sha512-nvAp97LkgRpqVHyMwDdpkzlKOWG2kJXezCLRZaRWaEpbnNuviSF+0yzCuFGZRHEEspj7B0TiM+sKGkpvjNlweA==",
"cpu": [
"arm64"
],
@@ -4383,9 +4384,9 @@
]
},
"node_modules/@yaakapp/cli-linux-x64": {
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-linux-x64/-/cli-linux-x64-0.3.4.tgz",
"integrity": "sha512-j7/r18UYNlFChDVU5N5ye3mmL+OR9Uu3LY72JxW+s/SyV69Bo8Griii75Wt19z/jj2ES8pxD+4IJq56VF3wJ7w==",
"version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-linux-x64/-/cli-linux-x64-0.4.0-beta.2.tgz",
"integrity": "sha512-9/qAMNrtE9glxih3XWGfFssIJpQ4mHNUTuWYKroc0aZZUrunnCw3tX1tQtFDxy0QRIZcGlBeBRtgxuuBd2fYbg==",
"cpu": [
"x64"
],
@@ -4396,9 +4397,9 @@
]
},
"node_modules/@yaakapp/cli-win32-arm64": {
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-win32-arm64/-/cli-win32-arm64-0.3.4.tgz",
"integrity": "sha512-OUSKOKrSnzrTAGW0c+2ZCwA4yhgw/bA+gyeTvpf7cELVuB0qooGkEcJ3lM7fPMKmUbFU0r+K/Ggq1QMUr7cJLQ==",
"version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-win32-arm64/-/cli-win32-arm64-0.4.0-beta.2.tgz",
"integrity": "sha512-eM1zL+hl0y3NBLxWO90y9VyaFsAf0HAsECBWvhKhvEdd6KG4K1XzpXrC30cHQBGePIrCa/az8eSuvTde0Z2C/g==",
"cpu": [
"arm64"
],
@@ -4409,9 +4410,9 @@
]
},
"node_modules/@yaakapp/cli-win32-x64": {
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-win32-x64/-/cli-win32-x64-0.3.4.tgz",
"integrity": "sha512-sVYnW1rROLbzFUCyeZ++ibN+8gJS7FdPnBRHIE0KORfeI4e7Gw/aMUji2qpSZ1gt3DrAU95DDNjBkDvGBAgqag==",
"version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-win32-x64/-/cli-win32-x64-0.4.0-beta.2.tgz",
"integrity": "sha512-ySdiK0h216EqURkM5KZoqbPTgbIX4eNK/IgrKwSazxRb369HOZYQ8X68as+VRxEL4NCMmWlQNdbBDuf+apg/mg==",
"cpu": [
"x64"
],
@@ -7984,9 +7985,9 @@
}
},
"node_modules/hono": {
"version": "4.11.7",
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.7.tgz",
"integrity": "sha512-l7qMiNee7t82bH3SeyUCt9UF15EVmaBvsppY2zQtrbIhl/yzBTny+YUxsVjSjQ6gaqaeVtZmGocom8TzBlA4Yw==",
"version": "4.11.10",
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.10.tgz",
"integrity": "sha512-kyWP5PAiMooEvGrA9jcD3IXF7ATu8+o7B3KCbPXid5se52NPqnOpM/r9qeW2heMnOekF4kqR1fXJqCYeCLKrZg==",
"license": "MIT",
"engines": {
"node": ">=16.9.0"
@@ -16019,7 +16020,7 @@
"@hono/mcp": "^0.2.3",
"@hono/node-server": "^1.19.7",
"@modelcontextprotocol/sdk": "^1.26.0",
"hono": "^4.11.7",
"hono": "^4.11.10",
"zod": "^3.25.76"
},
"devDependencies": {
@@ -16087,7 +16088,13 @@
},
"plugins/auth-oauth2": {
"name": "@yaak/auth-oauth2",
"version": "0.1.0"
"version": "0.1.0",
"dependencies": {
"jsonwebtoken": "^9.0.2"
},
"devDependencies": {
"@types/jsonwebtoken": "^9.0.7"
}
},
"plugins/filter-jsonpath": {
"name": "@yaak/filter-jsonpath",

View File

@@ -98,7 +98,7 @@
"devDependencies": {
"@biomejs/biome": "^2.3.13",
"@tauri-apps/cli": "^2.9.6",
"@yaakapp/cli": "^0.3.4",
"@yaakapp/cli": "^0.4.0-beta.2",
"dotenv-cli": "^11.0.0",
"husky": "^9.1.7",
"nodejs-file-downloader": "^4.13.0",

View File

@@ -27,7 +27,6 @@
"build:copy-types": "run-p build:copy-types:*",
"build:copy-types:root": "cpy --flat ../../crates/yaak-plugins/bindings/*.ts ./src/bindings",
"build:copy-types:next": "cpy --flat ../../crates/yaak-plugins/bindings/serde_json/*.ts ./src/bindings/serde_json",
"publish": "npm publish",
"prepublishOnly": "npm run build"
},
"dependencies": {

File diff suppressed because one or more lines are too long

View File

@@ -33,7 +33,7 @@ import type {
ListFoldersResponse,
ListHttpRequestsRequest,
ListHttpRequestsResponse,
ListWorkspacesResponse,
ListOpenWorkspacesResponse,
PluginContext,
PromptFormResponse,
PromptTextResponse,
@@ -942,9 +942,9 @@ export class PluginInstance {
workspace: {
list: async () => {
const payload = {
type: 'list_workspaces_request',
type: 'list_open_workspaces_request',
} as InternalEventPayload;
const response = await this.#sendForReply<ListWorkspacesResponse>(context, payload);
const response = await this.#sendForReply<ListOpenWorkspacesResponse>(context, payload);
return response.workspaces.map((w) => {
// Internal workspace info includes label field not in public API
type WorkspaceInfoInternal = typeof w & { label?: string };

View File

@@ -29,6 +29,7 @@ const modules = [
function normalizeResult(result: unknown): string {
if (typeof result === 'string') return result;
if (result instanceof Date) return result.toISOString();
return JSON.stringify(result);
}

View File

@@ -9,4 +9,18 @@ describe('template-function-faker', () => {
// accidental additions, removals, or renames across faker upgrades.
expect(names).toMatchSnapshot();
});
it('renders date results as unquoted ISO strings', async () => {
const { plugin } = await import('../src/index');
const fn = plugin.templateFunctions?.find((fn) => fn.name === 'faker.date.future');
expect(fn?.onRender).toBeTypeOf('function');
const result = await fn!.onRender!(
{} as Parameters<NonNullable<typeof fn.onRender>>[0],
{ values: {} } as Parameters<NonNullable<typeof fn.onRender>>[1],
);
expect(result).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
});
});

View File

@@ -18,7 +18,7 @@
"@hono/mcp": "^0.2.3",
"@hono/node-server": "^1.19.7",
"@modelcontextprotocol/sdk": "^1.26.0",
"hono": "^4.11.7",
"hono": "^4.11.10",
"zod": "^3.25.76"
},
"devDependencies": {

View File

@@ -11,7 +11,8 @@
"version": "0.1.0",
"scripts": {
"build": "yaakcli build",
"dev": "yaakcli dev"
"dev": "yaakcli dev",
"test": "vitest --run tests"
},
"dependencies": {
"httpntlm": "^1.8.13"

View File

@@ -2,6 +2,16 @@ import type { PluginDefinition } from '@yaakapp/api';
import { ntlm } from 'httpntlm';
function extractNtlmChallenge(headers: Array<{ name: string; value: string }>): string | null {
const authValues = headers
.filter((h) => h.name.toLowerCase() === 'www-authenticate')
.flatMap((h) => h.value.split(','))
.map((v) => v.trim())
.filter(Boolean);
return authValues.find((v) => /^NTLM\s+\S+/i.test(v)) ?? null;
}
export const plugin: PluginDefinition = {
authentication: {
name: 'windows',
@@ -68,15 +78,12 @@ export const plugin: PluginDefinition = {
},
});
const wwwAuthenticateHeader = negotiateResponse.headers.find(
(h) => h.name.toLowerCase() === 'www-authenticate',
);
if (!wwwAuthenticateHeader?.value) {
throw new Error('Unable to find www-authenticate response header for NTLM');
const ntlmChallenge = extractNtlmChallenge(negotiateResponse.headers);
if (ntlmChallenge == null) {
throw new Error('Unable to find NTLM challenge in WWW-Authenticate response headers');
}
const type2 = ntlm.parseType2Message(wwwAuthenticateHeader.value, (err: Error | null) => {
const type2 = ntlm.parseType2Message(ntlmChallenge, (err: Error | null) => {
if (err != null) throw err;
});
const type3 = ntlm.createType3Message(type2, options);

View File

@@ -0,0 +1,84 @@
import type { Context } from '@yaakapp/api';
import { beforeEach, describe, expect, test, vi } from 'vitest';
const ntlmMock = vi.hoisted(() => ({
createType1Message: vi.fn(),
parseType2Message: vi.fn(),
createType3Message: vi.fn(),
}));
vi.mock('httpntlm', () => ({ ntlm: ntlmMock }));
import { plugin } from '../src';
describe('auth-ntlm', () => {
beforeEach(() => {
ntlmMock.createType1Message.mockReset();
ntlmMock.parseType2Message.mockReset();
ntlmMock.createType3Message.mockReset();
ntlmMock.createType1Message.mockReturnValue('NTLM TYPE1');
ntlmMock.parseType2Message.mockReturnValue({} as any);
ntlmMock.createType3Message.mockReturnValue('NTLM TYPE3');
});
test('uses NTLM challenge when Negotiate and NTLM headers are separate', async () => {
const send = vi.fn().mockResolvedValue({
headers: [
{ name: 'WWW-Authenticate', value: 'Negotiate' },
{ name: 'WWW-Authenticate', value: 'NTLM TlRMTVNTUAACAAAAAA==' },
],
});
const ctx = { httpRequest: { send } } as unknown as Context;
const result = await plugin.authentication?.onApply(ctx, {
values: {},
headers: [],
url: 'https://example.local/resource',
method: 'GET',
contextId: 'ctx',
});
expect(ntlmMock.parseType2Message).toHaveBeenCalledWith(
'NTLM TlRMTVNTUAACAAAAAA==',
expect.any(Function),
);
expect(result).toEqual({ setHeaders: [{ name: 'Authorization', value: 'NTLM TYPE3' }] });
});
test('uses NTLM challenge when auth schemes are comma-separated in one header', async () => {
const send = vi.fn().mockResolvedValue({
headers: [{ name: 'www-authenticate', value: 'Negotiate, NTLM TlRMTVNTUAACAAAAAA==' }],
});
const ctx = { httpRequest: { send } } as unknown as Context;
await plugin.authentication?.onApply(ctx, {
values: {},
headers: [],
url: 'https://example.local/resource',
method: 'GET',
contextId: 'ctx',
});
expect(ntlmMock.parseType2Message).toHaveBeenCalledWith(
'NTLM TlRMTVNTUAACAAAAAA==',
expect.any(Function),
);
});
test('throws a clear error when NTLM challenge is missing', async () => {
const send = vi.fn().mockResolvedValue({
headers: [{ name: 'WWW-Authenticate', value: 'Negotiate' }],
});
const ctx = { httpRequest: { send } } as unknown as Context;
await expect(
plugin.authentication?.onApply(ctx, {
values: {},
headers: [],
url: 'https://example.local/resource',
method: 'GET',
contextId: 'ctx',
}),
).rejects.toThrow('Unable to find NTLM challenge in WWW-Authenticate response headers');
});
});

View File

@@ -1,4 +1,4 @@
const { readdirSync, cpSync, existsSync } = require('node:fs');
const { readdirSync, cpSync, existsSync, mkdirSync } = require('node:fs');
const path = require('node:path');
const pluginsDir = path.join(__dirname, '..', 'plugins');
@@ -24,6 +24,7 @@ for (const name of readdirSync(pluginsDir)) {
continue;
}
const destDir = path.join(__dirname, '../crates-tauri/yaak-app/vendored/plugins/', name);
mkdirSync(destDir, { recursive: true });
console.log(`Copying ${name} to ${destDir}`);
cpSync(path.join(dir, 'package.json'), path.join(destDir, 'package.json'));
cpSync(path.join(dir, 'build'), path.join(destDir, 'build'), { recursive: true });