Compare commits

..

2 Commits

Author SHA1 Message Date
Gregory Schier
0a52032988 Merge branch 'main' into omnara/premium-deviator 2026-01-09 20:23:20 -08:00
Gregory Schier
4b7497a908 feat: implement layered settings system for HTTP requests and folders
Add support for settings overrides at folder and HTTP request levels. Introduces nullable settings columns to database tables and implements resolution logic to merge workspace, folder, and request-level settings with proper precedence.
2026-01-09 20:22:53 -08:00
249 changed files with 3580 additions and 11893 deletions

View File

@@ -1,46 +1,35 @@
---
description: Review a PR in a new worktree
allowed-tools: Bash(git worktree:*), Bash(gh pr:*), Bash(git branch:*)
allowed-tools: Bash(git worktree:*), Bash(gh pr:*)
---
Check out a GitHub pull request for review.
Review a GitHub pull request in a new git worktree.
## Usage
```
/check-out-pr <PR_NUMBER>
/review-pr <PR_NUMBER>
```
## What to do
1. If no PR number is provided, list all open pull requests and ask the user to select one
1. List all open pull requests and ask the user to select one
2. Get PR information using `gh pr view <PR_NUMBER> --json number,headRefName`
3. **Ask the user** whether they want to:
- **A) Check out in current directory** — simple `gh pr checkout <PR_NUMBER>`
- **B) Create a new worktree** — isolated copy at `../yaak-worktrees/pr-<PR_NUMBER>`
4. Follow the appropriate path below
## Option A: Check out in current directory
1. Run `gh pr checkout <PR_NUMBER>`
2. Inform the user which branch they're now on
## Option B: Create a new worktree
1. Create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>` using `git worktree add` with a timeout of at least 300000ms (5 minutes) since the post-checkout hook runs a bootstrap script
2. Checkout the PR branch in the new worktree using `gh pr checkout <PR_NUMBER>`
3. The post-checkout hook will automatically:
3. Extract the branch name from the PR
4. Create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>` using `git worktree add` with a timeout of at least 300000ms (5 minutes) since the post-checkout hook runs a bootstrap script
5. Checkout the PR branch in the new worktree using `gh pr checkout <PR_NUMBER>`
6. The post-checkout hook will automatically:
- Create `.env.local` with unique ports
- Copy editor config folders
- Run `npm install && npm run bootstrap`
4. Inform the user:
7. Inform the user:
- Where the worktree was created
- What ports were assigned
- How to access it (cd command)
- How to run the dev server
- How to remove the worktree when done
### Example worktree output
## Example Output
```
Created worktree for PR #123 at ../yaak-worktrees/pr-123

View File

@@ -37,13 +37,3 @@ The skill generates markdown-formatted release notes following this structure:
**IMPORTANT**: Always add a blank lines around the markdown code fence and output the markdown code block last
**IMPORTANT**: PRs by `@gschier` should not mention the @username
## After Generating Release Notes
After outputting the release notes, ask the user if they would like to create a draft GitHub release with these notes. If they confirm, create the release using:
```bash
gh release create <tag> --draft --prerelease --title "Release <version>" --notes '<release notes>'
```
**IMPORTANT**: The release title format is "Release XXXX" where XXXX is the version WITHOUT the `v` prefix. For example, tag `v2026.2.1-beta.1` gets title "Release 2026.2.1-beta.1".

View File

@@ -1,52 +0,0 @@
name: Update Flathub
on:
release:
types: [published]
permissions:
contents: read
jobs:
update-flathub:
name: Update Flathub manifest
runs-on: ubuntu-latest
# Only run for stable releases (skip betas/pre-releases)
if: ${{ !github.event.release.prerelease }}
steps:
- name: Checkout app repo
uses: actions/checkout@v4
- name: Checkout Flathub repo
uses: actions/checkout@v4
with:
repository: flathub/app.yaak.Yaak
token: ${{ secrets.FLATHUB_TOKEN }}
path: flathub-repo
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: "22"
- name: Install source generators
run: |
pip install flatpak-node-generator tomlkit aiohttp
git clone --depth 1 https://github.com/flatpak/flatpak-builder-tools flatpak/flatpak-builder-tools
- name: Run update-manifest.sh
run: bash flatpak/update-manifest.sh "${{ github.event.release.tag_name }}" flathub-repo
- name: Commit and push to Flathub
working-directory: flathub-repo
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add -A
git diff --cached --quiet && echo "No changes to commit" && exit 0
git commit -m "Update to ${{ github.event.release.tag_name }}"
git push

View File

@@ -1,7 +1,7 @@
name: Generate Artifacts
on:
push:
tags: [v*]
tags: [ v* ]
jobs:
build-artifacts:
@@ -13,37 +13,37 @@ jobs:
fail-fast: false
matrix:
include:
- platform: "macos-latest" # for Arm-based Macs (M1 and above).
args: "--target aarch64-apple-darwin"
yaak_arch: "arm64"
os: "macos"
targets: "aarch64-apple-darwin"
- platform: "macos-latest" # for Intel-based Macs.
args: "--target x86_64-apple-darwin"
yaak_arch: "x64"
os: "macos"
targets: "x86_64-apple-darwin"
- platform: "ubuntu-22.04"
args: ""
yaak_arch: "x64"
os: "ubuntu"
targets: ""
- platform: "ubuntu-22.04-arm"
args: ""
yaak_arch: "arm64"
os: "ubuntu"
targets: ""
- platform: "windows-latest"
args: ""
yaak_arch: "x64"
os: "windows"
targets: ""
- platform: 'macos-latest' # for Arm-based Macs (M1 and above).
args: '--target aarch64-apple-darwin'
yaak_arch: 'arm64'
os: 'macos'
targets: 'aarch64-apple-darwin'
- platform: 'macos-latest' # for Intel-based Macs.
args: '--target x86_64-apple-darwin'
yaak_arch: 'x64'
os: 'macos'
targets: 'x86_64-apple-darwin'
- platform: 'ubuntu-22.04'
args: ''
yaak_arch: 'x64'
os: 'ubuntu'
targets: ''
- platform: 'ubuntu-22.04-arm'
args: ''
yaak_arch: 'arm64'
os: 'ubuntu'
targets: ''
- platform: 'windows-latest'
args: ''
yaak_arch: 'x64'
os: 'windows'
targets: ''
# Windows ARM64
- platform: "windows-latest"
args: "--target aarch64-pc-windows-msvc"
yaak_arch: "arm64"
os: "windows"
targets: "aarch64-pc-windows-msvc"
- platform: 'windows-latest'
args: '--target aarch64-pc-windows-msvc'
yaak_arch: 'arm64'
os: 'windows'
targets: 'aarch64-pc-windows-msvc'
runs-on: ${{ matrix.platform }}
timeout-minutes: 40
steps:
@@ -88,9 +88,6 @@ jobs:
& $exe --version
- run: npm ci
- run: npm run bootstrap
env:
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
- run: npm run lint
- name: Run JS Tests
run: npm test
@@ -102,29 +99,6 @@ jobs:
env:
YAAK_VERSION: ${{ github.ref_name }}
- name: Sign vendored binaries (macOS only)
if: matrix.os == 'macos'
env:
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
KEYCHAIN_PASSWORD: ${{ secrets.KEYCHAIN_PASSWORD }}
run: |
# Create keychain
KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db
security create-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
security unlock-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
# Import certificate
echo "$APPLE_CERTIFICATE" | base64 --decode > certificate.p12
security import certificate.p12 -P "$APPLE_CERTIFICATE_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
security list-keychain -d user -s $KEYCHAIN_PATH
# Sign vendored binaries with hardened runtime and their specific entitlements
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaakprotoc.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/protoc/yaakprotoc || true
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaaknode.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/node/yaaknode || true
- uses: tauri-apps/tauri-action@v0
env:
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
@@ -147,9 +121,9 @@ jobs:
AZURE_CLIENT_SECRET: ${{ matrix.os == 'windows' && secrets.AZURE_CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ matrix.os == 'windows' && secrets.AZURE_TENANT_ID }}
with:
tagName: "v__VERSION__"
releaseName: "Release __VERSION__"
releaseBody: "[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)"
tagName: 'v__VERSION__'
releaseName: 'Release __VERSION__'
releaseBody: '[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)'
releaseDraft: true
prerelease: true
args: "${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json"
args: '${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json'

10
.gitignore vendored
View File

@@ -44,13 +44,3 @@ crates-tauri/yaak-app/tauri.worktree.conf.json
# Tauri auto-generated permission files
**/permissions/autogenerated
**/permissions/schemas
# Flatpak build artifacts
flatpak-repo/
.flatpak-builder/
flatpak/flatpak-builder-tools/
flatpak/cargo-sources.json
flatpak/node-sources.json
# Local Codex desktop env state
.codex/environments/environment.toml

458
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -15,7 +15,6 @@ members = [
"crates/yaak-templates",
"crates/yaak-tls",
"crates/yaak-ws",
"crates/yaak-api",
# CLI crates
"crates-cli/yaak-cli",
# Tauri-specific crates
@@ -59,7 +58,6 @@ yaak-sync = { path = "crates/yaak-sync" }
yaak-templates = { path = "crates/yaak-templates" }
yaak-tls = { path = "crates/yaak-tls" }
yaak-ws = { path = "crates/yaak-ws" }
yaak-api = { path = "crates/yaak-api" }
# Internal crates - Tauri-specific
yaak-fonts = { path = "crates-tauri/yaak-fonts" }

View File

@@ -1,6 +1,6 @@
<p align="center">
<a href="https://github.com/JamesIves/github-sponsors-readme-action">
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/crates-tauri/yaak-app/icons/icon.png">
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/src-tauri/icons/icon.png">
</a>
</p>
@@ -22,7 +22,7 @@
<!-- sponsors-premium --><a href="https://github.com/MVST-Solutions"><img src="https:&#x2F;&#x2F;github.com&#x2F;MVST-Solutions.png" width="80px" alt="User avatar: MVST-Solutions" /></a>&nbsp;&nbsp;<a href="https://github.com/dharsanb"><img src="https:&#x2F;&#x2F;github.com&#x2F;dharsanb.png" width="80px" alt="User avatar: dharsanb" /></a>&nbsp;&nbsp;<a href="https://github.com/railwayapp"><img src="https:&#x2F;&#x2F;github.com&#x2F;railwayapp.png" width="80px" alt="User avatar: railwayapp" /></a>&nbsp;&nbsp;<a href="https://github.com/caseyamcl"><img src="https:&#x2F;&#x2F;github.com&#x2F;caseyamcl.png" width="80px" alt="User avatar: caseyamcl" /></a>&nbsp;&nbsp;<a href="https://github.com/bytebase"><img src="https:&#x2F;&#x2F;github.com&#x2F;bytebase.png" width="80px" alt="User avatar: bytebase" /></a>&nbsp;&nbsp;<a href="https://github.com/"><img src="https:&#x2F;&#x2F;raw.githubusercontent.com&#x2F;JamesIves&#x2F;github-sponsors-readme-action&#x2F;dev&#x2F;.github&#x2F;assets&#x2F;placeholder.png" width="80px" alt="User avatar: " /></a>&nbsp;&nbsp;<!-- sponsors-premium -->
</p>
<p align="center">
<!-- sponsors-base --><a href="https://github.com/seanwash"><img src="https:&#x2F;&#x2F;github.com&#x2F;seanwash.png" width="50px" alt="User avatar: seanwash" /></a>&nbsp;&nbsp;<a href="https://github.com/jerath"><img src="https:&#x2F;&#x2F;github.com&#x2F;jerath.png" width="50px" alt="User avatar: jerath" /></a>&nbsp;&nbsp;<a href="https://github.com/itsa-sh"><img src="https:&#x2F;&#x2F;github.com&#x2F;itsa-sh.png" width="50px" alt="User avatar: itsa-sh" /></a>&nbsp;&nbsp;<a href="https://github.com/dmmulroy"><img src="https:&#x2F;&#x2F;github.com&#x2F;dmmulroy.png" width="50px" alt="User avatar: dmmulroy" /></a>&nbsp;&nbsp;<a href="https://github.com/timcole"><img src="https:&#x2F;&#x2F;github.com&#x2F;timcole.png" width="50px" alt="User avatar: timcole" /></a>&nbsp;&nbsp;<a href="https://github.com/VLZH"><img src="https:&#x2F;&#x2F;github.com&#x2F;VLZH.png" width="50px" alt="User avatar: VLZH" /></a>&nbsp;&nbsp;<a href="https://github.com/terasaka2k"><img src="https:&#x2F;&#x2F;github.com&#x2F;terasaka2k.png" width="50px" alt="User avatar: terasaka2k" /></a>&nbsp;&nbsp;<a href="https://github.com/andriyor"><img src="https:&#x2F;&#x2F;github.com&#x2F;andriyor.png" width="50px" alt="User avatar: andriyor" /></a>&nbsp;&nbsp;<a href="https://github.com/majudhu"><img src="https:&#x2F;&#x2F;github.com&#x2F;majudhu.png" width="50px" alt="User avatar: majudhu" /></a>&nbsp;&nbsp;<a href="https://github.com/axelrindle"><img src="https:&#x2F;&#x2F;github.com&#x2F;axelrindle.png" width="50px" alt="User avatar: axelrindle" /></a>&nbsp;&nbsp;<a href="https://github.com/jirizverina"><img src="https:&#x2F;&#x2F;github.com&#x2F;jirizverina.png" width="50px" alt="User avatar: jirizverina" /></a>&nbsp;&nbsp;<a href="https://github.com/chip-well"><img src="https:&#x2F;&#x2F;github.com&#x2F;chip-well.png" width="50px" alt="User avatar: chip-well" /></a>&nbsp;&nbsp;<a href="https://github.com/GRAYAH"><img src="https:&#x2F;&#x2F;github.com&#x2F;GRAYAH.png" width="50px" alt="User avatar: GRAYAH" /></a>&nbsp;&nbsp;<a href="https://github.com/flashblaze"><img src="https:&#x2F;&#x2F;github.com&#x2F;flashblaze.png" width="50px" alt="User avatar: flashblaze" /></a>&nbsp;&nbsp;<!-- sponsors-base -->
<!-- sponsors-base --><a href="https://github.com/seanwash"><img src="https:&#x2F;&#x2F;github.com&#x2F;seanwash.png" width="50px" alt="User avatar: seanwash" /></a>&nbsp;&nbsp;<a href="https://github.com/jerath"><img src="https:&#x2F;&#x2F;github.com&#x2F;jerath.png" width="50px" alt="User avatar: jerath" /></a>&nbsp;&nbsp;<a href="https://github.com/itsa-sh"><img src="https:&#x2F;&#x2F;github.com&#x2F;itsa-sh.png" width="50px" alt="User avatar: itsa-sh" /></a>&nbsp;&nbsp;<a href="https://github.com/dmmulroy"><img src="https:&#x2F;&#x2F;github.com&#x2F;dmmulroy.png" width="50px" alt="User avatar: dmmulroy" /></a>&nbsp;&nbsp;<a href="https://github.com/timcole"><img src="https:&#x2F;&#x2F;github.com&#x2F;timcole.png" width="50px" alt="User avatar: timcole" /></a>&nbsp;&nbsp;<a href="https://github.com/VLZH"><img src="https:&#x2F;&#x2F;github.com&#x2F;VLZH.png" width="50px" alt="User avatar: VLZH" /></a>&nbsp;&nbsp;<a href="https://github.com/terasaka2k"><img src="https:&#x2F;&#x2F;github.com&#x2F;terasaka2k.png" width="50px" alt="User avatar: terasaka2k" /></a>&nbsp;&nbsp;<a href="https://github.com/andriyor"><img src="https:&#x2F;&#x2F;github.com&#x2F;andriyor.png" width="50px" alt="User avatar: andriyor" /></a>&nbsp;&nbsp;<a href="https://github.com/majudhu"><img src="https:&#x2F;&#x2F;github.com&#x2F;majudhu.png" width="50px" alt="User avatar: majudhu" /></a>&nbsp;&nbsp;<a href="https://github.com/axelrindle"><img src="https:&#x2F;&#x2F;github.com&#x2F;axelrindle.png" width="50px" alt="User avatar: axelrindle" /></a>&nbsp;&nbsp;<a href="https://github.com/jirizverina"><img src="https:&#x2F;&#x2F;github.com&#x2F;jirizverina.png" width="50px" alt="User avatar: jirizverina" /></a>&nbsp;&nbsp;<a href="https://github.com/chip-well"><img src="https:&#x2F;&#x2F;github.com&#x2F;chip-well.png" width="50px" alt="User avatar: chip-well" /></a>&nbsp;&nbsp;<a href="https://github.com/GRAYAH"><img src="https:&#x2F;&#x2F;github.com&#x2F;GRAYAH.png" width="50px" alt="User avatar: GRAYAH" /></a>&nbsp;&nbsp;<!-- sponsors-base -->
</p>
![Yaak API Client](https://yaak.app/static/screenshot.png)
@@ -64,7 +64,7 @@ visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment
## Useful Resources
- [Feedback and Bug Reports](https://feedback.yaak.app)
- [Documentation](https://yaak.app/docs)
- [Documentation](https://feedback.yaak.app/help)
- [Yaak vs Postman](https://yaak.app/alternatives/postman)
- [Yaak vs Bruno](https://yaak.app/alternatives/bruno)
- [Yaak vs Insomnia](https://yaak.app/alternatives/insomnia)

View File

@@ -47,8 +47,7 @@
"!src-web/vite.config.ts",
"!src-web/routeTree.gen.ts",
"!packages/plugin-runtime-types/lib",
"!**/bindings",
"!flatpak"
"!**/bindings"
]
}
}

View File

@@ -13,7 +13,6 @@ clap = { version = "4", features = ["derive"] }
dirs = "6"
env_logger = "0.11"
log = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
yaak-crypto = { workspace = true }
@@ -21,8 +20,3 @@ yaak-http = { workspace = true }
yaak-models = { workspace = true }
yaak-plugins = { workspace = true }
yaak-templates = { workspace = true }
[dev-dependencies]
assert_cmd = "2"
predicates = "3"
tempfile = "3"

View File

@@ -1,340 +0,0 @@
# CLI Command Architecture Plan
## Goal
Redesign the yaak-cli command structure to use a resource-oriented `<resource> <action>`
pattern that scales well, is discoverable, and supports both human and LLM workflows.
## Status Snapshot
Current branch state:
- Modular CLI structure with command modules and shared `CliContext`
- Resource/action hierarchy in place for:
- `workspace list|show|create|update|delete`
- `request list|show|create|update|send|delete`
- `folder list|show|create|update|delete`
- `environment list|show|create|update|delete`
- Top-level `send` exists as a request-send shortcut (not yet flexible request/folder/workspace resolution)
- Legacy `get` command removed
- JSON create/update flow implemented (`--json` and positional JSON shorthand)
- No `request schema` command yet
Progress checklist:
- [x] Phase 1 complete
- [x] Phase 2 complete
- [x] Phase 3 complete
- [ ] Phase 4 complete
- [ ] Phase 5 complete
- [ ] Phase 6 complete
## Command Architecture
### Design Principles
- **Resource-oriented**: top-level commands are nouns, subcommands are verbs
- **Polymorphic requests**: `request` covers HTTP, gRPC, and WebSocket — the CLI
resolves the type via `get_any_request` and adapts behavior accordingly
- **Simple creation, full-fidelity via JSON**: human-friendly flags for basic creation,
`--json` for full control (targeted at LLM and scripting workflows)
- **Runtime schema introspection**: `request schema` outputs JSON Schema for the request
models, with dynamic auth fields populated from loaded plugins at runtime
- **Destructive actions require confirmation**: `delete` commands prompt for user
confirmation before proceeding. Can be bypassed with `--yes` / `-y` for scripting
### Commands
```
# Top-level shortcut
yaakcli send <id> [-e <env_id>] # id can be a request, folder, or workspace
# Resource commands
yaakcli workspace list
yaakcli workspace show <id>
yaakcli workspace create --name <name>
yaakcli workspace create --json '{"name": "My Workspace"}'
yaakcli workspace create '{"name": "My Workspace"}' # positional JSON shorthand
yaakcli workspace update --json '{"id": "wk_abc", "name": "New Name"}'
yaakcli workspace delete <id>
yaakcli request list <workspace_id>
yaakcli request show <id>
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
yaakcli request create --json '{"workspaceId": "wk_abc", "url": "..."}'
yaakcli request update --json '{"id": "rq_abc", "url": "https://new.com"}'
yaakcli request send <id> [-e <env_id>]
yaakcli request delete <id>
yaakcli request schema <http|grpc|websocket>
yaakcli folder list <workspace_id>
yaakcli folder show <id>
yaakcli folder create <workspace_id> --name <name>
yaakcli folder create --json '{"workspaceId": "wk_abc", "name": "Auth"}'
yaakcli folder update --json '{"id": "fl_abc", "name": "New Name"}'
yaakcli folder delete <id>
yaakcli environment list <workspace_id>
yaakcli environment show <id>
yaakcli environment create <workspace_id> --name <name>
yaakcli environment create --json '{"workspaceId": "wk_abc", "name": "Production"}'
yaakcli environment update --json '{"id": "ev_abc", ...}'
yaakcli environment delete <id>
```
### `send` — Top-Level Shortcut
`yaakcli send <id>` is a convenience alias that accepts any sendable ID. It tries
each type in order via DB lookups (short-circuiting on first match):
1. Request (HTTP, gRPC, or WebSocket via `get_any_request`)
2. Folder (sends all requests in the folder)
3. Workspace (sends all requests in the workspace)
ID prefixes exist (e.g. `rq_`, `fl_`, `wk_`) but are not relied upon — resolution
is purely by DB lookup.
`request send <id>` is the same but restricted to request IDs only.
### Request Send — Polymorphic Behavior
`send` means "execute this request" regardless of protocol:
- **HTTP**: send request, print response, exit
- **gRPC**: invoke the method; for streaming, stream output to stdout until done/Ctrl+C
- **WebSocket**: connect, stream messages to stdout until closed/Ctrl+C
### `request schema` — Runtime JSON Schema
Outputs a JSON Schema describing the full request shape, including dynamic fields:
1. Generate base schema from `schemars::JsonSchema` derive on the Rust model structs
2. Load plugins, collect auth strategy definitions and their form inputs
3. Merge plugin-defined auth fields into the `authentication` property as a `oneOf`
4. Output the combined schema as JSON
This lets an LLM call `schema`, read the shape, and construct valid JSON for
`create --json` or `update --json`.
## Implementation Steps
### Phase 1: Restructure commands (no new functionality)
Refactor `main.rs` into the new resource/action pattern using clap subcommand nesting.
Existing behavior stays the same, just reorganized. Remove the `get` command.
1. Create module structure: `commands/workspace.rs`, `commands/request.rs`, etc.
2. Define nested clap enums:
```rust
enum Commands {
Send(SendArgs),
Workspace(WorkspaceArgs),
Request(RequestArgs),
Folder(FolderArgs),
Environment(EnvironmentArgs),
}
```
3. Move existing `Workspaces` logic into `workspace list`
4. Move existing `Requests` logic into `request list`
5. Move existing `Send` logic into `request send`
6. Move existing `Create` logic into `request create`
7. Delete the `Get` command entirely
8. Extract shared setup (DB init, plugin init, encryption) into a reusable context struct
### Phase 2: Add missing CRUD commands
Status: complete
1. `workspace show <id>`
2. `workspace create --name <name>` (and `--json`)
3. `workspace update --json`
4. `workspace delete <id>`
5. `request show <id>` (JSON output of the full request model)
6. `request delete <id>`
7. `folder list <workspace_id>`
8. `folder show <id>`
9. `folder create <workspace_id> --name <name>` (and `--json`)
10. `folder update --json`
11. `folder delete <id>`
12. `environment list <workspace_id>`
13. `environment show <id>`
14. `environment create <workspace_id> --name <name>` (and `--json`)
15. `environment update --json`
16. `environment delete <id>`
### Phase 3: JSON input for create/update
Both commands accept JSON via `--json <string>` or as a positional argument (detected
by leading `{`). They follow the same upsert pattern as the plugin API.
- **`create --json`**: JSON must include `workspaceId`. Must NOT include `id` (or
use empty string `""`). Deserializes into the model with defaults for missing fields,
then upserts (insert).
- **`update --json`**: JSON must include `id`. Performs a fetch-merge-upsert:
1. Fetch the existing model from DB
2. Serialize it to `serde_json::Value`
3. Deep-merge the user's partial JSON on top (JSON Merge Patch / RFC 7386 semantics)
4. Deserialize back into the typed model
5. Upsert (update)
This matches how the MCP server plugin already does it (fetch existing, spread, override),
but the CLI handles the merge server-side so callers don't have to.
Setting a field to `null` removes it (for `Option<T>` fields), per RFC 7386.
Implementation:
1. Add `--json` flag and positional JSON detection to `create` commands
2. Add `update` commands with required `--json` flag
3. Implement JSON merge utility (or use `json-patch` crate)
### Phase 4: Runtime schema generation
1. Add `schemars` dependency to `yaak-models`
2. Derive `JsonSchema` on `HttpRequest`, `GrpcRequest`, `WebsocketRequest`, and their
nested types (`HttpRequestHeader`, `HttpUrlParameter`, etc.)
3. Implement `request schema` command:
- Generate base schema from schemars
- Query plugins for auth strategy form inputs
- Convert plugin form inputs into JSON Schema properties
- Merge into the `authentication` field
- Print to stdout
### Phase 5: Polymorphic send
1. Update `request send` to use `get_any_request` to resolve the request type
2. Match on `AnyRequest` variant and dispatch to the appropriate sender:
- `AnyRequest::HttpRequest` — existing HTTP send logic
- `AnyRequest::GrpcRequest` — gRPC invoke (future implementation)
- `AnyRequest::WebsocketRequest` — WebSocket connect (future implementation)
3. gRPC and WebSocket send can initially return "not yet implemented" errors
### Phase 6: Top-level `send` and folder/workspace send
1. Add top-level `yaakcli send <id>` command
2. Resolve ID by trying DB lookups in order: any_request → folder → workspace
3. For folder: list all requests in folder, send each
4. For workspace: list all requests in workspace, send each
5. Add execution options: `--sequential` (default), `--parallel`, `--fail-fast`
## Execution Plan (PR Slices)
### PR 1: Command tree refactor + compatibility aliases
Scope:
1. Introduce `commands/` modules and a `CliContext` for shared setup
2. Add new clap hierarchy (`workspace`, `request`, `folder`, `environment`)
3. Route existing behavior into:
- `workspace list`
- `request list <workspace_id>`
- `request send <id>`
- `request create <workspace_id> ...`
4. Keep compatibility aliases temporarily:
- `workspaces` -> `workspace list`
- `requests <workspace_id>` -> `request list <workspace_id>`
- `create ...` -> `request create ...`
5. Remove `get` and update help text
Acceptance criteria:
- `yaakcli --help` shows noun/verb structure
- Existing list/send/create workflows still work
- No behavior change in HTTP send output format
### PR 2: CRUD surface area
Scope:
1. Implement `show/create/update/delete` for `workspace`, `request`, `folder`, `environment`
2. Ensure delete commands require confirmation by default (`--yes` bypass)
3. Normalize output format for list/show/create/update/delete responses
Acceptance criteria:
- Every command listed in the "Commands" section parses and executes
- Delete commands are safe by default in interactive terminals
- `--yes` supports non-interactive scripts
### PR 3: JSON input + merge patch semantics
Scope:
1. Add shared parser for `--json` and positional JSON shorthand
2. Add `create --json` and `update --json` for all mutable resources
3. Implement server-side RFC 7386 merge patch behavior
4. Add guardrails:
- `create --json`: reject non-empty `id`
- `update --json`: require `id`
Acceptance criteria:
- Partial `update --json` only modifies provided keys
- `null` clears optional values
- Invalid JSON and missing required fields return actionable errors
### PR 4: `request schema` and plugin auth integration
Scope:
1. Add `schemars` to `yaak-models` and derive `JsonSchema` for request models
2. Implement `request schema <http|grpc|websocket>`
3. Merge plugin auth form inputs into `authentication` schema at runtime
Acceptance criteria:
- Command prints valid JSON schema
- Schema reflects installed auth providers at runtime
- No panic when plugins fail to initialize (degrade gracefully)
### PR 5: Polymorphic request send
Scope:
1. Replace request resolution in `request send` with `get_any_request`
2. Dispatch by request type
3. Keep HTTP fully functional
4. Return explicit NYI errors for gRPC/WebSocket until implemented
Acceptance criteria:
- HTTP behavior remains unchanged
- gRPC/WebSocket IDs are recognized and return explicit status
### PR 6: Top-level `send` + bulk execution
Scope:
1. Add top-level `send <id>` for request/folder/workspace IDs
2. Implement folder/workspace fan-out execution
3. Add execution controls: `--sequential`, `--parallel`, `--fail-fast`
Acceptance criteria:
- Correct ID dispatch order: request -> folder -> workspace
- Deterministic summary output (success/failure counts)
- Non-zero exit code when any request fails (unless explicitly configured otherwise)
## Validation Matrix
1. CLI parsing tests for every command path (including aliases while retained)
2. Integration tests against temp SQLite DB for CRUD flows
3. Snapshot tests for output text where scripting compatibility matters
4. Manual smoke tests:
- Send HTTP request with template/rendered vars
- JSON create/update for each resource
- Delete confirmation and `--yes`
- Top-level `send` on request/folder/workspace
## Open Questions
1. Should compatibility aliases (`workspaces`, `requests`, `create`) be removed immediately or after one release cycle?
2. For bulk `send`, should default behavior stop on first failure or continue and summarize?
3. Should command output default to human-readable text with an optional `--format json`, or return JSON by default for `show`/`list`?
4. For `request schema`, should plugin-derived auth fields be namespaced by plugin ID to avoid collisions?
## Crate Changes
- **yaak-cli**: restructure into modules, new clap hierarchy
- **yaak-models**: add `schemars` dependency, derive `JsonSchema` on model structs
(current derives: `Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS`)

View File

@@ -1,87 +0,0 @@
# yaak-cli
Command-line interface for Yaak.
## Command Overview
Current top-level commands:
```text
yaakcli send <request_id>
yaakcli workspace list
yaakcli workspace show <workspace_id>
yaakcli workspace create --name <name>
yaakcli workspace create --json '{"name":"My Workspace"}'
yaakcli workspace create '{"name":"My Workspace"}'
yaakcli workspace update --json '{"id":"wk_abc","description":"Updated"}'
yaakcli workspace delete <workspace_id> [--yes]
yaakcli request list <workspace_id>
yaakcli request show <request_id>
yaakcli request send <request_id>
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
yaakcli request create --json '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
yaakcli request create '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
yaakcli request delete <request_id> [--yes]
yaakcli folder list <workspace_id>
yaakcli folder show <folder_id>
yaakcli folder create <workspace_id> --name <name>
yaakcli folder create --json '{"workspaceId":"wk_abc","name":"Auth"}'
yaakcli folder create '{"workspaceId":"wk_abc","name":"Auth"}'
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
yaakcli folder delete <folder_id> [--yes]
yaakcli environment list <workspace_id>
yaakcli environment show <environment_id>
yaakcli environment create <workspace_id> --name <name>
yaakcli environment create --json '{"workspaceId":"wk_abc","name":"Production"}'
yaakcli environment create '{"workspaceId":"wk_abc","name":"Production"}'
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
yaakcli environment delete <environment_id> [--yes]
```
Global options:
- `--data-dir <path>`: use a custom data directory
- `-e, --environment <id>`: environment to use during request rendering/sending
- `-v, --verbose`: verbose logging and send output
Notes:
- `send` is currently a shortcut for sending an HTTP request ID.
- `delete` commands prompt for confirmation unless `--yes` is provided.
- In non-interactive mode, `delete` commands require `--yes`.
- `create` and `update` commands support `--json` and positional JSON shorthand.
- `update` uses JSON Merge Patch semantics (RFC 7386) for partial updates.
## Examples
```bash
yaakcli workspace list
yaakcli workspace create --name "My Workspace"
yaakcli workspace show wk_abc
yaakcli workspace update --json '{"id":"wk_abc","description":"Team workspace"}'
yaakcli request list wk_abc
yaakcli request show rq_abc
yaakcli request create wk_abc --name "Users" --url "https://api.example.com/users"
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
yaakcli request send rq_abc -e ev_abc
yaakcli request delete rq_abc --yes
yaakcli folder create wk_abc --name "Auth"
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
yaakcli environment create wk_abc --name "Production"
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
```
## Roadmap
Planned command expansion (request schema and polymorphic send) is tracked in `PLAN.md`.
When command behavior changes, update this README and verify with:
```bash
cargo run -q -p yaak-cli -- --help
cargo run -q -p yaak-cli -- request --help
cargo run -q -p yaak-cli -- workspace --help
cargo run -q -p yaak-cli -- folder --help
cargo run -q -p yaak-cli -- environment --help
```

View File

@@ -1,282 +0,0 @@
use clap::{Args, Parser, Subcommand};
use std::path::PathBuf;
#[derive(Parser)]
#[command(name = "yaakcli")]
#[command(about = "Yaak CLI - API client from the command line")]
pub struct Cli {
/// Use a custom data directory
#[arg(long, global = true)]
pub data_dir: Option<PathBuf>,
/// Environment ID to use for variable substitution
#[arg(long, short, global = true)]
pub environment: Option<String>,
/// Enable verbose logging
#[arg(long, short, global = true)]
pub verbose: bool,
#[command(subcommand)]
pub command: Commands,
}
#[derive(Subcommand)]
pub enum Commands {
/// Send an HTTP request by ID
Send(SendArgs),
/// Workspace commands
Workspace(WorkspaceArgs),
/// Request commands
Request(RequestArgs),
/// Folder commands
Folder(FolderArgs),
/// Environment commands
Environment(EnvironmentArgs),
}
#[derive(Args)]
pub struct SendArgs {
/// Request ID
pub request_id: String,
}
#[derive(Args)]
pub struct WorkspaceArgs {
#[command(subcommand)]
pub command: WorkspaceCommands,
}
#[derive(Subcommand)]
pub enum WorkspaceCommands {
/// List all workspaces
List,
/// Show a workspace as JSON
Show {
/// Workspace ID
workspace_id: String,
},
/// Create a workspace
Create {
/// Workspace name
#[arg(short, long)]
name: Option<String>,
/// JSON payload
#[arg(long, conflicts_with = "json_input")]
json: Option<String>,
/// JSON payload shorthand
#[arg(value_name = "JSON", conflicts_with = "json")]
json_input: Option<String>,
},
/// Update a workspace
Update {
/// JSON payload
#[arg(long, conflicts_with = "json_input")]
json: Option<String>,
/// JSON payload shorthand
#[arg(value_name = "JSON", conflicts_with = "json")]
json_input: Option<String>,
},
/// Delete a workspace
Delete {
/// Workspace ID
workspace_id: String,
/// Skip confirmation prompt
#[arg(short, long)]
yes: bool,
},
}
#[derive(Args)]
pub struct RequestArgs {
#[command(subcommand)]
pub command: RequestCommands,
}
#[derive(Subcommand)]
pub enum RequestCommands {
/// List requests in a workspace
List {
/// Workspace ID
workspace_id: String,
},
/// Show a request as JSON
Show {
/// Request ID
request_id: String,
},
/// Send an HTTP request by ID
Send {
/// Request ID
request_id: String,
},
/// Create a new HTTP request
Create {
/// Workspace ID (or positional JSON payload shorthand)
workspace_id: Option<String>,
/// Request name
#[arg(short, long)]
name: Option<String>,
/// HTTP method
#[arg(short, long)]
method: Option<String>,
/// URL
#[arg(short, long)]
url: Option<String>,
/// JSON payload
#[arg(long)]
json: Option<String>,
},
/// Update an HTTP request
Update {
/// JSON payload
#[arg(long, conflicts_with = "json_input")]
json: Option<String>,
/// JSON payload shorthand
#[arg(value_name = "JSON", conflicts_with = "json")]
json_input: Option<String>,
},
/// Delete a request
Delete {
/// Request ID
request_id: String,
/// Skip confirmation prompt
#[arg(short, long)]
yes: bool,
},
}
#[derive(Args)]
pub struct FolderArgs {
#[command(subcommand)]
pub command: FolderCommands,
}
#[derive(Subcommand)]
pub enum FolderCommands {
/// List folders in a workspace
List {
/// Workspace ID
workspace_id: String,
},
/// Show a folder as JSON
Show {
/// Folder ID
folder_id: String,
},
/// Create a folder
Create {
/// Workspace ID (or positional JSON payload shorthand)
workspace_id: Option<String>,
/// Folder name
#[arg(short, long)]
name: Option<String>,
/// JSON payload
#[arg(long)]
json: Option<String>,
},
/// Update a folder
Update {
/// JSON payload
#[arg(long, conflicts_with = "json_input")]
json: Option<String>,
/// JSON payload shorthand
#[arg(value_name = "JSON", conflicts_with = "json")]
json_input: Option<String>,
},
/// Delete a folder
Delete {
/// Folder ID
folder_id: String,
/// Skip confirmation prompt
#[arg(short, long)]
yes: bool,
},
}
#[derive(Args)]
pub struct EnvironmentArgs {
#[command(subcommand)]
pub command: EnvironmentCommands,
}
#[derive(Subcommand)]
pub enum EnvironmentCommands {
/// List environments in a workspace
List {
/// Workspace ID
workspace_id: String,
},
/// Show an environment as JSON
Show {
/// Environment ID
environment_id: String,
},
/// Create an environment
Create {
/// Workspace ID (or positional JSON payload shorthand)
workspace_id: Option<String>,
/// Environment name
#[arg(short, long)]
name: Option<String>,
/// JSON payload
#[arg(long)]
json: Option<String>,
},
/// Update an environment
Update {
/// JSON payload
#[arg(long, conflicts_with = "json_input")]
json: Option<String>,
/// JSON payload shorthand
#[arg(value_name = "JSON", conflicts_with = "json")]
json_input: Option<String>,
},
/// Delete an environment
Delete {
/// Environment ID
environment_id: String,
/// Skip confirmation prompt
#[arg(short, long)]
yes: bool,
},
}

View File

@@ -1,16 +0,0 @@
use std::io::{self, IsTerminal, Write};
pub fn confirm_delete(resource_name: &str, resource_id: &str) -> bool {
if !io::stdin().is_terminal() {
eprintln!("Refusing to delete in non-interactive mode without --yes");
std::process::exit(1);
}
print!("Delete {resource_name} {resource_id}? [y/N]: ");
io::stdout().flush().expect("Failed to flush stdout");
let mut input = String::new();
io::stdin().read_line(&mut input).expect("Failed to read confirmation");
matches!(input.trim().to_lowercase().as_str(), "y" | "yes")
}

View File

@@ -1,134 +0,0 @@
use crate::cli::{EnvironmentArgs, EnvironmentCommands};
use crate::commands::confirm::confirm_delete;
use crate::commands::json::{
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
validate_create_id,
};
use crate::context::CliContext;
use yaak_models::models::Environment;
use yaak_models::util::UpdateSource;
pub fn run(ctx: &CliContext, args: EnvironmentArgs) {
match args.command {
EnvironmentCommands::List { workspace_id } => list(ctx, &workspace_id),
EnvironmentCommands::Show { environment_id } => show(ctx, &environment_id),
EnvironmentCommands::Create { workspace_id, name, json } => {
create(ctx, workspace_id, name, json)
}
EnvironmentCommands::Update { json, json_input } => update(ctx, json, json_input),
EnvironmentCommands::Delete { environment_id, yes } => delete(ctx, &environment_id, yes),
}
}
fn list(ctx: &CliContext, workspace_id: &str) {
let environments =
ctx.db().list_environments_ensure_base(workspace_id).expect("Failed to list environments");
if environments.is_empty() {
println!("No environments found in workspace {}", workspace_id);
} else {
for environment in environments {
println!("{} - {} ({})", environment.id, environment.name, environment.parent_model);
}
}
}
fn show(ctx: &CliContext, environment_id: &str) {
let environment = ctx.db().get_environment(environment_id).expect("Failed to get environment");
let output =
serde_json::to_string_pretty(&environment).expect("Failed to serialize environment");
println!("{output}");
}
fn create(
ctx: &CliContext,
workspace_id: Option<String>,
name: Option<String>,
json: Option<String>,
) {
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
panic!("environment create cannot combine workspace_id with --json payload");
}
let payload = parse_optional_json(
json,
workspace_id.clone().filter(|v| is_json_shorthand(v)),
"environment create",
);
if let Some(payload) = payload {
if name.is_some() {
panic!("environment create cannot combine --name with JSON payload");
}
validate_create_id(&payload, "environment");
let mut environment: Environment =
serde_json::from_value(payload).expect("Failed to parse environment create JSON");
if environment.workspace_id.is_empty() {
panic!("environment create JSON requires non-empty \"workspaceId\"");
}
if environment.parent_model.is_empty() {
environment.parent_model = "environment".to_string();
}
let created = ctx
.db()
.upsert_environment(&environment, &UpdateSource::Sync)
.expect("Failed to create environment");
println!("Created environment: {}", created.id);
return;
}
let workspace_id = workspace_id.unwrap_or_else(|| {
panic!("environment create requires workspace_id unless JSON payload is provided")
});
let name = name.unwrap_or_else(|| {
panic!("environment create requires --name unless JSON payload is provided")
});
let environment = Environment {
workspace_id,
name,
parent_model: "environment".to_string(),
..Default::default()
};
let created = ctx
.db()
.upsert_environment(&environment, &UpdateSource::Sync)
.expect("Failed to create environment");
println!("Created environment: {}", created.id);
}
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
let patch = parse_required_json(json, json_input, "environment update");
let id = require_id(&patch, "environment update");
let existing = ctx.db().get_environment(&id).expect("Failed to get environment for update");
let updated = apply_merge_patch(&existing, &patch, &id, "environment update");
let saved = ctx
.db()
.upsert_environment(&updated, &UpdateSource::Sync)
.expect("Failed to update environment");
println!("Updated environment: {}", saved.id);
}
fn delete(ctx: &CliContext, environment_id: &str, yes: bool) {
if !yes && !confirm_delete("environment", environment_id) {
println!("Aborted");
return;
}
let deleted = ctx
.db()
.delete_environment_by_id(environment_id, &UpdateSource::Sync)
.expect("Failed to delete environment");
println!("Deleted environment: {}", deleted.id);
}

View File

@@ -1,115 +0,0 @@
use crate::cli::{FolderArgs, FolderCommands};
use crate::commands::confirm::confirm_delete;
use crate::commands::json::{
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
validate_create_id,
};
use crate::context::CliContext;
use yaak_models::models::Folder;
use yaak_models::util::UpdateSource;
pub fn run(ctx: &CliContext, args: FolderArgs) {
match args.command {
FolderCommands::List { workspace_id } => list(ctx, &workspace_id),
FolderCommands::Show { folder_id } => show(ctx, &folder_id),
FolderCommands::Create { workspace_id, name, json } => {
create(ctx, workspace_id, name, json)
}
FolderCommands::Update { json, json_input } => update(ctx, json, json_input),
FolderCommands::Delete { folder_id, yes } => delete(ctx, &folder_id, yes),
}
}
fn list(ctx: &CliContext, workspace_id: &str) {
let folders = ctx.db().list_folders(workspace_id).expect("Failed to list folders");
if folders.is_empty() {
println!("No folders found in workspace {}", workspace_id);
} else {
for folder in folders {
println!("{} - {}", folder.id, folder.name);
}
}
}
fn show(ctx: &CliContext, folder_id: &str) {
let folder = ctx.db().get_folder(folder_id).expect("Failed to get folder");
let output = serde_json::to_string_pretty(&folder).expect("Failed to serialize folder");
println!("{output}");
}
fn create(
ctx: &CliContext,
workspace_id: Option<String>,
name: Option<String>,
json: Option<String>,
) {
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
panic!("folder create cannot combine workspace_id with --json payload");
}
let payload = parse_optional_json(
json,
workspace_id.clone().filter(|v| is_json_shorthand(v)),
"folder create",
);
if let Some(payload) = payload {
if name.is_some() {
panic!("folder create cannot combine --name with JSON payload");
}
validate_create_id(&payload, "folder");
let folder: Folder =
serde_json::from_value(payload).expect("Failed to parse folder create JSON");
if folder.workspace_id.is_empty() {
panic!("folder create JSON requires non-empty \"workspaceId\"");
}
let created =
ctx.db().upsert_folder(&folder, &UpdateSource::Sync).expect("Failed to create folder");
println!("Created folder: {}", created.id);
return;
}
let workspace_id = workspace_id.unwrap_or_else(|| {
panic!("folder create requires workspace_id unless JSON payload is provided")
});
let name = name
.unwrap_or_else(|| panic!("folder create requires --name unless JSON payload is provided"));
let folder = Folder { workspace_id, name, ..Default::default() };
let created =
ctx.db().upsert_folder(&folder, &UpdateSource::Sync).expect("Failed to create folder");
println!("Created folder: {}", created.id);
}
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
let patch = parse_required_json(json, json_input, "folder update");
let id = require_id(&patch, "folder update");
let existing = ctx.db().get_folder(&id).expect("Failed to get folder for update");
let updated = apply_merge_patch(&existing, &patch, &id, "folder update");
let saved =
ctx.db().upsert_folder(&updated, &UpdateSource::Sync).expect("Failed to update folder");
println!("Updated folder: {}", saved.id);
}
fn delete(ctx: &CliContext, folder_id: &str, yes: bool) {
if !yes && !confirm_delete("folder", folder_id) {
println!("Aborted");
return;
}
let deleted = ctx
.db()
.delete_folder_by_id(folder_id, &UpdateSource::Sync)
.expect("Failed to delete folder");
println!("Deleted folder: {}", deleted.id);
}

View File

@@ -1,108 +0,0 @@
use serde::Serialize;
use serde::de::DeserializeOwned;
use serde_json::{Map, Value};
pub fn is_json_shorthand(input: &str) -> bool {
input.trim_start().starts_with('{')
}
pub fn parse_json_object(raw: &str, context: &str) -> Value {
let value: Value = serde_json::from_str(raw)
.unwrap_or_else(|error| panic!("Invalid JSON for {context}: {error}"));
if !value.is_object() {
panic!("JSON payload for {context} must be an object");
}
value
}
pub fn parse_optional_json(
json_flag: Option<String>,
json_shorthand: Option<String>,
context: &str,
) -> Option<Value> {
match (json_flag, json_shorthand) {
(Some(_), Some(_)) => {
panic!("Cannot provide both --json and positional JSON for {context}")
}
(Some(raw), None) => Some(parse_json_object(&raw, context)),
(None, Some(raw)) => Some(parse_json_object(&raw, context)),
(None, None) => None,
}
}
pub fn parse_required_json(
json_flag: Option<String>,
json_shorthand: Option<String>,
context: &str,
) -> Value {
parse_optional_json(json_flag, json_shorthand, context).unwrap_or_else(|| {
panic!("Missing JSON payload for {context}. Use --json or positional JSON")
})
}
pub fn require_id(payload: &Value, context: &str) -> String {
payload
.get("id")
.and_then(|value| value.as_str())
.filter(|value| !value.is_empty())
.map(|value| value.to_string())
.unwrap_or_else(|| panic!("{context} requires a non-empty \"id\" field"))
}
pub fn validate_create_id(payload: &Value, context: &str) {
let Some(id_value) = payload.get("id") else {
return;
};
match id_value {
Value::String(id) if id.is_empty() => {}
_ => panic!("{context} create JSON must omit \"id\" or set it to an empty string"),
}
}
pub fn apply_merge_patch<T>(existing: &T, patch: &Value, id: &str, context: &str) -> T
where
T: Serialize + DeserializeOwned,
{
let mut base = serde_json::to_value(existing).unwrap_or_else(|error| {
panic!("Failed to serialize existing model for {context}: {error}")
});
merge_patch(&mut base, patch);
let Some(base_object) = base.as_object_mut() else {
panic!("Merged payload for {context} must be an object");
};
base_object.insert("id".to_string(), Value::String(id.to_string()));
serde_json::from_value(base).unwrap_or_else(|error| {
panic!("Failed to deserialize merged payload for {context}: {error}")
})
}
fn merge_patch(target: &mut Value, patch: &Value) {
match patch {
Value::Object(patch_map) => {
if !target.is_object() {
*target = Value::Object(Map::new());
}
let target_map =
target.as_object_mut().expect("merge_patch target expected to be object");
for (key, patch_value) in patch_map {
if patch_value.is_null() {
target_map.remove(key);
continue;
}
let target_entry = target_map.entry(key.clone()).or_insert(Value::Null);
merge_patch(target_entry, patch_value);
}
}
_ => {
*target = patch.clone();
}
}
}

View File

@@ -1,7 +0,0 @@
pub mod confirm;
pub mod environment;
pub mod folder;
pub mod json;
pub mod request;
pub mod send;
pub mod workspace;

View File

@@ -1,338 +0,0 @@
use crate::cli::{RequestArgs, RequestCommands};
use crate::commands::confirm::confirm_delete;
use crate::commands::json::{
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
validate_create_id,
};
use crate::context::CliContext;
use log::info;
use serde_json::Value;
use std::collections::BTreeMap;
use tokio::sync::mpsc;
use yaak_http::path_placeholders::apply_path_placeholders;
use yaak_http::sender::{HttpSender, ReqwestSender};
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
use yaak_models::models::{Environment, HttpRequest, HttpRequestHeader, HttpUrlParameter};
use yaak_models::render::make_vars_hashmap;
use yaak_models::util::UpdateSource;
use yaak_plugins::events::{PluginContext, RenderPurpose};
use yaak_plugins::template_callback::PluginTemplateCallback;
use yaak_templates::{RenderOptions, parse_and_render, render_json_value_raw};
pub async fn run(
ctx: &CliContext,
args: RequestArgs,
environment: Option<&str>,
verbose: bool,
) -> i32 {
match args.command {
RequestCommands::List { workspace_id } => {
list(ctx, &workspace_id);
0
}
RequestCommands::Show { request_id } => {
show(ctx, &request_id);
0
}
RequestCommands::Send { request_id } => {
match send_request_by_id(ctx, &request_id, environment, verbose).await {
Ok(()) => 0,
Err(error) => {
eprintln!("Error: {error}");
1
}
}
}
RequestCommands::Create { workspace_id, name, method, url, json } => {
create(ctx, workspace_id, name, method, url, json);
0
}
RequestCommands::Update { json, json_input } => {
update(ctx, json, json_input);
0
}
RequestCommands::Delete { request_id, yes } => {
delete(ctx, &request_id, yes);
0
}
}
}
fn list(ctx: &CliContext, workspace_id: &str) {
let requests = ctx.db().list_http_requests(workspace_id).expect("Failed to list requests");
if requests.is_empty() {
println!("No requests found in workspace {}", workspace_id);
} else {
for request in requests {
println!("{} - {} {}", request.id, request.method, request.name);
}
}
}
fn create(
ctx: &CliContext,
workspace_id: Option<String>,
name: Option<String>,
method: Option<String>,
url: Option<String>,
json: Option<String>,
) {
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
panic!("request create cannot combine workspace_id with --json payload");
}
let payload = parse_optional_json(
json,
workspace_id.clone().filter(|v| is_json_shorthand(v)),
"request create",
);
if let Some(payload) = payload {
if name.is_some() || method.is_some() || url.is_some() {
panic!("request create cannot combine simple flags with JSON payload");
}
validate_create_id(&payload, "request");
let request: HttpRequest =
serde_json::from_value(payload).expect("Failed to parse request create JSON");
if request.workspace_id.is_empty() {
panic!("request create JSON requires non-empty \"workspaceId\"");
}
let created = ctx
.db()
.upsert_http_request(&request, &UpdateSource::Sync)
.expect("Failed to create request");
println!("Created request: {}", created.id);
return;
}
let workspace_id = workspace_id.unwrap_or_else(|| {
panic!("request create requires workspace_id unless JSON payload is provided")
});
let name = name.unwrap_or_else(|| {
panic!("request create requires --name unless JSON payload is provided")
});
let url = url
.unwrap_or_else(|| panic!("request create requires --url unless JSON payload is provided"));
let method = method.unwrap_or_else(|| "GET".to_string());
let request = HttpRequest {
workspace_id,
name,
method: method.to_uppercase(),
url,
..Default::default()
};
let created = ctx
.db()
.upsert_http_request(&request, &UpdateSource::Sync)
.expect("Failed to create request");
println!("Created request: {}", created.id);
}
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
let patch = parse_required_json(json, json_input, "request update");
let id = require_id(&patch, "request update");
let existing = ctx.db().get_http_request(&id).expect("Failed to get request for update");
let updated = apply_merge_patch(&existing, &patch, &id, "request update");
let saved = ctx
.db()
.upsert_http_request(&updated, &UpdateSource::Sync)
.expect("Failed to update request");
println!("Updated request: {}", saved.id);
}
fn show(ctx: &CliContext, request_id: &str) {
let request = ctx.db().get_http_request(request_id).expect("Failed to get request");
let output = serde_json::to_string_pretty(&request).expect("Failed to serialize request");
println!("{output}");
}
fn delete(ctx: &CliContext, request_id: &str, yes: bool) {
if !yes && !confirm_delete("request", request_id) {
println!("Aborted");
return;
}
let deleted = ctx
.db()
.delete_http_request_by_id(request_id, &UpdateSource::Sync)
.expect("Failed to delete request");
println!("Deleted request: {}", deleted.id);
}
/// Send a request by ID and print response in the same format as legacy `send`.
pub async fn send_request_by_id(
ctx: &CliContext,
request_id: &str,
environment: Option<&str>,
verbose: bool,
) -> Result<(), String> {
let request =
ctx.db().get_http_request(request_id).map_err(|e| format!("Failed to get request: {e}"))?;
let environment_chain = ctx
.db()
.resolve_environments(&request.workspace_id, request.folder_id.as_deref(), environment)
.map_err(|e| format!("Failed to resolve environments: {e}"))?;
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
let template_callback = PluginTemplateCallback::new(
ctx.plugin_manager(),
ctx.encryption_manager.clone(),
&plugin_context,
RenderPurpose::Send,
);
let rendered_request = render_http_request(
&request,
environment_chain,
&template_callback,
&RenderOptions::throw(),
)
.await
.map_err(|e| format!("Failed to render request templates: {e}"))?;
if verbose {
println!("> {} {}", rendered_request.method, rendered_request.url);
}
let sendable = SendableHttpRequest::from_http_request(
&rendered_request,
SendableHttpRequestOptions::default(),
)
.await
.map_err(|e| format!("Failed to build request: {e}"))?;
let (event_tx, mut event_rx) = mpsc::channel(100);
let verbose_handle = if verbose {
Some(tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
println!("{}", event);
}
}))
} else {
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
None
};
let sender = ReqwestSender::new().map_err(|e| format!("Failed to create HTTP client: {e}"))?;
let response = sender
.send(sendable, event_tx)
.await
.map_err(|e| format!("Failed to send request: {e}"))?;
if let Some(handle) = verbose_handle {
let _ = handle.await;
}
if verbose {
println!();
}
println!("HTTP {} {}", response.status, response.status_reason.as_deref().unwrap_or(""));
if verbose {
for (name, value) in &response.headers {
println!("{}: {}", name, value);
}
println!();
}
let (body, _stats) =
response.text().await.map_err(|e| format!("Failed to read response body: {e}"))?;
println!("{}", body);
Ok(())
}
/// Render an HTTP request with template variables and plugin functions.
async fn render_http_request(
request: &HttpRequest,
environment_chain: Vec<Environment>,
callback: &PluginTemplateCallback,
options: &RenderOptions,
) -> yaak_templates::error::Result<HttpRequest> {
let vars = &make_vars_hashmap(environment_chain);
let mut url_parameters = Vec::new();
for parameter in request.url_parameters.clone() {
if !parameter.enabled {
continue;
}
url_parameters.push(HttpUrlParameter {
enabled: parameter.enabled,
name: parse_and_render(parameter.name.as_str(), vars, callback, options).await?,
value: parse_and_render(parameter.value.as_str(), vars, callback, options).await?,
id: parameter.id,
})
}
let mut headers = Vec::new();
for header in request.headers.clone() {
if !header.enabled {
continue;
}
headers.push(HttpRequestHeader {
enabled: header.enabled,
name: parse_and_render(header.name.as_str(), vars, callback, options).await?,
value: parse_and_render(header.value.as_str(), vars, callback, options).await?,
id: header.id,
})
}
let mut body = BTreeMap::new();
for (key, value) in request.body.clone() {
body.insert(key, render_json_value_raw(value, vars, callback, options).await?);
}
let authentication = {
let mut disabled = false;
let mut auth = BTreeMap::new();
match request.authentication.get("disabled") {
Some(Value::Bool(true)) => {
disabled = true;
}
Some(Value::String(template)) => {
disabled = parse_and_render(template.as_str(), vars, callback, options)
.await
.unwrap_or_default()
.is_empty();
info!(
"Rendering authentication.disabled as a template: {disabled} from \"{template}\""
);
}
_ => {}
}
if disabled {
auth.insert("disabled".to_string(), Value::Bool(true));
} else {
for (key, value) in request.authentication.clone() {
if key == "disabled" {
auth.insert(key, Value::Bool(false));
} else {
auth.insert(key, render_json_value_raw(value, vars, callback, options).await?);
}
}
}
auth
};
let url = parse_and_render(request.url.clone().as_str(), vars, callback, options).await?;
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..request.to_owned() })
}

View File

@@ -1,18 +0,0 @@
use crate::cli::SendArgs;
use crate::commands::request;
use crate::context::CliContext;
pub async fn run(
ctx: &CliContext,
args: SendArgs,
environment: Option<&str>,
verbose: bool,
) -> i32 {
match request::send_request_by_id(ctx, &args.request_id, environment, verbose).await {
Ok(()) => 0,
Err(error) => {
eprintln!("Error: {error}");
1
}
}
}

View File

@@ -1,100 +0,0 @@
use crate::cli::{WorkspaceArgs, WorkspaceCommands};
use crate::commands::confirm::confirm_delete;
use crate::commands::json::{
apply_merge_patch, parse_optional_json, parse_required_json, require_id, validate_create_id,
};
use crate::context::CliContext;
use yaak_models::models::Workspace;
use yaak_models::util::UpdateSource;
pub fn run(ctx: &CliContext, args: WorkspaceArgs) {
match args.command {
WorkspaceCommands::List => list(ctx),
WorkspaceCommands::Show { workspace_id } => show(ctx, &workspace_id),
WorkspaceCommands::Create { name, json, json_input } => create(ctx, name, json, json_input),
WorkspaceCommands::Update { json, json_input } => update(ctx, json, json_input),
WorkspaceCommands::Delete { workspace_id, yes } => delete(ctx, &workspace_id, yes),
}
}
fn list(ctx: &CliContext) {
let workspaces = ctx.db().list_workspaces().expect("Failed to list workspaces");
if workspaces.is_empty() {
println!("No workspaces found");
} else {
for workspace in workspaces {
println!("{} - {}", workspace.id, workspace.name);
}
}
}
fn show(ctx: &CliContext, workspace_id: &str) {
let workspace = ctx.db().get_workspace(workspace_id).expect("Failed to get workspace");
let output = serde_json::to_string_pretty(&workspace).expect("Failed to serialize workspace");
println!("{output}");
}
fn create(
ctx: &CliContext,
name: Option<String>,
json: Option<String>,
json_input: Option<String>,
) {
let payload = parse_optional_json(json, json_input, "workspace create");
if let Some(payload) = payload {
if name.is_some() {
panic!("workspace create cannot combine --name with JSON payload");
}
validate_create_id(&payload, "workspace");
let workspace: Workspace =
serde_json::from_value(payload).expect("Failed to parse workspace create JSON");
let created = ctx
.db()
.upsert_workspace(&workspace, &UpdateSource::Sync)
.expect("Failed to create workspace");
println!("Created workspace: {}", created.id);
return;
}
let name = name.unwrap_or_else(|| {
panic!("workspace create requires --name unless JSON payload is provided")
});
let workspace = Workspace { name, ..Default::default() };
let created = ctx
.db()
.upsert_workspace(&workspace, &UpdateSource::Sync)
.expect("Failed to create workspace");
println!("Created workspace: {}", created.id);
}
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
let patch = parse_required_json(json, json_input, "workspace update");
let id = require_id(&patch, "workspace update");
let existing = ctx.db().get_workspace(&id).expect("Failed to get workspace for update");
let updated = apply_merge_patch(&existing, &patch, &id, "workspace update");
let saved = ctx
.db()
.upsert_workspace(&updated, &UpdateSource::Sync)
.expect("Failed to update workspace");
println!("Updated workspace: {}", saved.id);
}
fn delete(ctx: &CliContext, workspace_id: &str, yes: bool) {
if !yes && !confirm_delete("workspace", workspace_id) {
println!("Aborted");
return;
}
let deleted = ctx
.db()
.delete_workspace_by_id(workspace_id, &UpdateSource::Sync)
.expect("Failed to delete workspace");
println!("Deleted workspace: {}", deleted.id);
}

View File

@@ -1,82 +0,0 @@
use std::path::PathBuf;
use std::sync::Arc;
use yaak_crypto::manager::EncryptionManager;
use yaak_models::db_context::DbContext;
use yaak_models::query_manager::QueryManager;
use yaak_plugins::events::PluginContext;
use yaak_plugins::manager::PluginManager;
pub struct CliContext {
query_manager: QueryManager,
pub encryption_manager: Arc<EncryptionManager>,
plugin_manager: Option<Arc<PluginManager>>,
}
impl CliContext {
pub async fn initialize(data_dir: PathBuf, app_id: &str, with_plugins: bool) -> Self {
let db_path = data_dir.join("db.sqlite");
let blob_path = data_dir.join("blobs.sqlite");
let (query_manager, _blob_manager, _rx) =
yaak_models::init_standalone(&db_path, &blob_path)
.expect("Failed to initialize database");
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
let plugin_manager = if with_plugins {
let vendored_plugin_dir = data_dir.join("vendored-plugins");
let installed_plugin_dir = data_dir.join("installed-plugins");
let node_bin_path = PathBuf::from("node");
let plugin_runtime_main =
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
});
let plugin_manager = Arc::new(
PluginManager::new(
vendored_plugin_dir,
installed_plugin_dir,
node_bin_path,
plugin_runtime_main,
false,
)
.await,
);
let plugins = query_manager.connect().list_plugins().unwrap_or_default();
if !plugins.is_empty() {
let errors = plugin_manager
.initialize_all_plugins(plugins, &PluginContext::new_empty())
.await;
for (plugin_dir, error_msg) in errors {
eprintln!(
"Warning: Failed to initialize plugin '{}': {}",
plugin_dir, error_msg
);
}
}
Some(plugin_manager)
} else {
None
};
Self { query_manager, encryption_manager, plugin_manager }
}
pub fn db(&self) -> DbContext<'_> {
self.query_manager.connect()
}
pub fn plugin_manager(&self) -> Arc<PluginManager> {
self.plugin_manager.clone().expect("Plugin manager was not initialized for this command")
}
pub async fn shutdown(&self) {
if let Some(plugin_manager) = &self.plugin_manager {
plugin_manager.terminate().await;
}
}
}

View File

@@ -1,57 +1,448 @@
mod cli;
mod commands;
mod context;
use clap::{Parser, Subcommand};
use log::info;
use serde_json::Value;
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::mpsc;
use yaak_crypto::manager::EncryptionManager;
use yaak_http::path_placeholders::apply_path_placeholders;
use yaak_http::sender::{HttpSender, ReqwestSender};
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
use yaak_models::models::{HttpRequest, HttpRequestHeader, HttpUrlParameter};
use yaak_models::render::make_vars_hashmap;
use yaak_models::util::UpdateSource;
use yaak_plugins::events::{PluginContext, RenderPurpose};
use yaak_plugins::manager::PluginManager;
use yaak_plugins::template_callback::PluginTemplateCallback;
use yaak_templates::{parse_and_render, render_json_value_raw, RenderOptions};
use clap::Parser;
use cli::{Cli, Commands, RequestCommands};
use context::CliContext;
#[derive(Parser)]
#[command(name = "yaakcli")]
#[command(about = "Yaak CLI - API client from the command line")]
struct Cli {
/// Use a custom data directory
#[arg(long, global = true)]
data_dir: Option<PathBuf>,
/// Environment ID to use for variable substitution
#[arg(long, short, global = true)]
environment: Option<String>,
/// Enable verbose logging
#[arg(long, short, global = true)]
verbose: bool,
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
/// List all workspaces
Workspaces,
/// List requests in a workspace
Requests {
/// Workspace ID
workspace_id: String,
},
/// Send an HTTP request by ID
Send {
/// Request ID
request_id: String,
},
/// Send a GET request to a URL
Get {
/// URL to request
url: String,
},
/// Create a new HTTP request
Create {
/// Workspace ID
workspace_id: String,
/// Request name
#[arg(short, long)]
name: String,
/// HTTP method
#[arg(short, long, default_value = "GET")]
method: String,
/// URL
#[arg(short, long)]
url: String,
},
}
/// Render an HTTP request with template variables and plugin functions
async fn render_http_request(
r: &HttpRequest,
environment_chain: Vec<yaak_models::models::Environment>,
cb: &PluginTemplateCallback,
opt: &RenderOptions,
) -> yaak_templates::error::Result<HttpRequest> {
let vars = &make_vars_hashmap(environment_chain);
let mut url_parameters = Vec::new();
for p in r.url_parameters.clone() {
if !p.enabled {
continue;
}
url_parameters.push(HttpUrlParameter {
enabled: p.enabled,
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
id: p.id,
})
}
let mut headers = Vec::new();
for p in r.headers.clone() {
if !p.enabled {
continue;
}
headers.push(HttpRequestHeader {
enabled: p.enabled,
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
id: p.id,
})
}
let mut body = BTreeMap::new();
for (k, v) in r.body.clone() {
body.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
}
let authentication = {
let mut disabled = false;
let mut auth = BTreeMap::new();
match r.authentication.get("disabled") {
Some(Value::Bool(true)) => {
disabled = true;
}
Some(Value::String(tmpl)) => {
disabled = parse_and_render(tmpl.as_str(), vars, cb, opt)
.await
.unwrap_or_default()
.is_empty();
info!(
"Rendering authentication.disabled as a template: {disabled} from \"{tmpl}\""
);
}
_ => {}
}
if disabled {
auth.insert("disabled".to_string(), Value::Bool(true));
} else {
for (k, v) in r.authentication.clone() {
if k == "disabled" {
auth.insert(k, Value::Bool(false));
} else {
auth.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
}
}
}
auth
};
let url = parse_and_render(r.url.clone().as_str(), vars, cb, opt).await?;
// Apply path placeholders (e.g., /users/:id -> /users/123)
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
Ok(HttpRequest {
url,
url_parameters,
headers,
body,
authentication,
..r.to_owned()
})
}
#[tokio::main]
async fn main() {
let Cli { data_dir, environment, verbose, command } = Cli::parse();
let cli = Cli::parse();
if verbose {
// Initialize logging
if cli.verbose {
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
}
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
let data_dir = data_dir.unwrap_or_else(|| {
dirs::data_dir().expect("Could not determine data directory").join(app_id)
});
let needs_plugins = matches!(
&command,
Commands::Send(_)
| Commands::Request(cli::RequestArgs { command: RequestCommands::Send { .. } })
);
let context = CliContext::initialize(data_dir, app_id, needs_plugins).await;
let exit_code = match command {
Commands::Send(args) => {
commands::send::run(&context, args, environment.as_deref(), verbose).await
}
Commands::Workspace(args) => {
commands::workspace::run(&context, args);
0
}
Commands::Request(args) => {
commands::request::run(&context, args, environment.as_deref(), verbose).await
}
Commands::Folder(args) => {
commands::folder::run(&context, args);
0
}
Commands::Environment(args) => {
commands::environment::run(&context, args);
0
}
// Use the same app_id for both data directory and keyring
let app_id = if cfg!(debug_assertions) {
"app.yaak.desktop.dev"
} else {
"app.yaak.desktop"
};
context.shutdown().await;
let data_dir = cli.data_dir.unwrap_or_else(|| {
dirs::data_dir()
.expect("Could not determine data directory")
.join(app_id)
});
if exit_code != 0 {
std::process::exit(exit_code);
let db_path = data_dir.join("db.sqlite");
let blob_path = data_dir.join("blobs.sqlite");
let (query_manager, _blob_manager, _rx) =
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize database");
let db = query_manager.connect();
// Initialize encryption manager for secure() template function
// Use the same app_id as the Tauri app for keyring access
let encryption_manager = Arc::new(
EncryptionManager::new(query_manager.clone(), app_id),
);
// Initialize plugin manager for template functions
let vendored_plugin_dir = data_dir.join("vendored-plugins");
let installed_plugin_dir = data_dir.join("installed-plugins");
// Use system node for CLI (must be in PATH)
let node_bin_path = PathBuf::from("node");
// Find the plugin runtime - check YAAK_PLUGIN_RUNTIME env var, then fallback to development path
let plugin_runtime_main = std::env::var("YAAK_PLUGIN_RUNTIME")
.map(PathBuf::from)
.unwrap_or_else(|_| {
// Development fallback: look relative to crate root
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
});
// Create plugin manager (plugins may not be available in CLI context)
let plugin_manager = Arc::new(
PluginManager::new(
vendored_plugin_dir,
installed_plugin_dir,
node_bin_path,
plugin_runtime_main,
false,
)
.await,
);
// Initialize plugins from database
let plugins = db.list_plugins().unwrap_or_default();
if !plugins.is_empty() {
let errors = plugin_manager
.initialize_all_plugins(plugins, &PluginContext::new_empty())
.await;
for (plugin_dir, error_msg) in errors {
eprintln!(
"Warning: Failed to initialize plugin '{}': {}",
plugin_dir, error_msg
);
}
}
match cli.command {
Commands::Workspaces => {
let workspaces = db.list_workspaces().expect("Failed to list workspaces");
if workspaces.is_empty() {
println!("No workspaces found");
} else {
for ws in workspaces {
println!("{} - {}", ws.id, ws.name);
}
}
}
Commands::Requests { workspace_id } => {
let requests = db
.list_http_requests(&workspace_id)
.expect("Failed to list requests");
if requests.is_empty() {
println!("No requests found in workspace {}", workspace_id);
} else {
for req in requests {
println!("{} - {} {}", req.id, req.method, req.name);
}
}
}
Commands::Send { request_id } => {
let request = db
.get_http_request(&request_id)
.expect("Failed to get request");
// Resolve environment chain for variable substitution
let environment_chain = db
.resolve_environments(
&request.workspace_id,
request.folder_id.as_deref(),
cli.environment.as_deref(),
)
.unwrap_or_default();
// Create template callback with plugin support
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
let template_callback = PluginTemplateCallback::new(
plugin_manager.clone(),
encryption_manager.clone(),
&plugin_context,
RenderPurpose::Send,
);
// Render templates in the request
let rendered_request = render_http_request(
&request,
environment_chain,
&template_callback,
&RenderOptions::throw(),
)
.await
.expect("Failed to render request templates");
if cli.verbose {
println!("> {} {}", rendered_request.method, rendered_request.url);
}
// Convert to sendable request
let sendable = SendableHttpRequest::from_http_request(
&rendered_request,
SendableHttpRequestOptions::default(),
)
.await
.expect("Failed to build request");
// Create event channel for progress
let (event_tx, mut event_rx) = mpsc::channel(100);
// Spawn task to print events if verbose
let verbose = cli.verbose;
let verbose_handle = if verbose {
Some(tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
println!("{}", event);
}
}))
} else {
// Drain events silently
tokio::spawn(async move {
while event_rx.recv().await.is_some() {}
});
None
};
// Send the request
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
let response = sender
.send(sendable, event_tx)
.await
.expect("Failed to send request");
// Wait for event handler to finish
if let Some(handle) = verbose_handle {
let _ = handle.await;
}
// Print response
if verbose {
println!();
}
println!(
"HTTP {} {}",
response.status,
response.status_reason.as_deref().unwrap_or("")
);
if verbose {
for (name, value) in &response.headers {
println!("{}: {}", name, value);
}
println!();
}
// Print body
let (body, _stats) = response.text().await.expect("Failed to read response body");
println!("{}", body);
}
Commands::Get { url } => {
if cli.verbose {
println!("> GET {}", url);
}
// Build a simple GET request
let sendable = SendableHttpRequest {
url: url.clone(),
method: "GET".to_string(),
headers: vec![],
body: None,
options: SendableHttpRequestOptions::default(),
};
// Create event channel for progress
let (event_tx, mut event_rx) = mpsc::channel(100);
// Spawn task to print events if verbose
let verbose = cli.verbose;
let verbose_handle = if verbose {
Some(tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
println!("{}", event);
}
}))
} else {
tokio::spawn(async move {
while event_rx.recv().await.is_some() {}
});
None
};
// Send the request
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
let response = sender
.send(sendable, event_tx)
.await
.expect("Failed to send request");
if let Some(handle) = verbose_handle {
let _ = handle.await;
}
// Print response
if verbose {
println!();
}
println!(
"HTTP {} {}",
response.status,
response.status_reason.as_deref().unwrap_or("")
);
if verbose {
for (name, value) in &response.headers {
println!("{}: {}", name, value);
}
println!();
}
// Print body
let (body, _stats) = response.text().await.expect("Failed to read response body");
println!("{}", body);
}
Commands::Create {
workspace_id,
name,
method,
url,
} => {
let request = HttpRequest {
workspace_id,
name,
method: method.to_uppercase(),
url,
..Default::default()
};
let created = db
.upsert_http_request(&request, &UpdateSource::Sync)
.expect("Failed to create request");
println!("Created request: {}", created.id);
}
}
// Terminate plugin manager gracefully
plugin_manager.terminate().await;
}

View File

@@ -1,60 +0,0 @@
#![allow(dead_code)]
use assert_cmd::Command;
use assert_cmd::cargo::cargo_bin_cmd;
use std::path::Path;
use yaak_models::models::{HttpRequest, Workspace};
use yaak_models::query_manager::QueryManager;
use yaak_models::util::UpdateSource;
pub fn cli_cmd(data_dir: &Path) -> Command {
let mut cmd = cargo_bin_cmd!("yaakcli");
cmd.arg("--data-dir").arg(data_dir);
cmd
}
pub fn parse_created_id(stdout: &[u8], label: &str) -> String {
String::from_utf8_lossy(stdout)
.trim()
.split_once(": ")
.map(|(_, id)| id.to_string())
.unwrap_or_else(|| panic!("Expected id in '{label}' output"))
}
pub fn query_manager(data_dir: &Path) -> QueryManager {
let db_path = data_dir.join("db.sqlite");
let blob_path = data_dir.join("blobs.sqlite");
let (query_manager, _blob_manager, _rx) =
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize DB");
query_manager
}
pub fn seed_workspace(data_dir: &Path, workspace_id: &str) {
let workspace = Workspace {
id: workspace_id.to_string(),
name: "Seed Workspace".to_string(),
description: "Seeded for integration tests".to_string(),
..Default::default()
};
query_manager(data_dir)
.connect()
.upsert_workspace(&workspace, &UpdateSource::Sync)
.expect("Failed to seed workspace");
}
pub fn seed_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
let request = HttpRequest {
id: request_id.to_string(),
workspace_id: workspace_id.to_string(),
name: "Seeded Request".to_string(),
method: "GET".to_string(),
url: "https://example.com".to_string(),
..Default::default()
};
query_manager(data_dir)
.connect()
.upsert_http_request(&request, &UpdateSource::Sync)
.expect("Failed to seed request");
}

View File

@@ -1,80 +0,0 @@
mod common;
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
use predicates::str::contains;
use tempfile::TempDir;
#[test]
fn create_list_show_delete_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
cli_cmd(data_dir)
.args(["environment", "list", "wk_test"])
.assert()
.success()
.stdout(contains("Global Variables"));
let create_assert = cli_cmd(data_dir)
.args(["environment", "create", "wk_test", "--name", "Production"])
.assert()
.success();
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
cli_cmd(data_dir)
.args(["environment", "list", "wk_test"])
.assert()
.success()
.stdout(contains(&environment_id))
.stdout(contains("Production"));
cli_cmd(data_dir)
.args(["environment", "show", &environment_id])
.assert()
.success()
.stdout(contains(format!("\"id\": \"{environment_id}\"")))
.stdout(contains("\"parentModel\": \"environment\""));
cli_cmd(data_dir)
.args(["environment", "delete", &environment_id, "--yes"])
.assert()
.success()
.stdout(contains(format!("Deleted environment: {environment_id}")));
assert!(query_manager(data_dir).connect().get_environment(&environment_id).is_err());
}
#[test]
fn json_create_and_update_merge_patch_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args([
"environment",
"create",
r#"{"workspaceId":"wk_test","name":"Json Environment"}"#,
])
.assert()
.success();
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
cli_cmd(data_dir)
.args([
"environment",
"update",
&format!(r##"{{"id":"{}","color":"#00ff00"}}"##, environment_id),
])
.assert()
.success()
.stdout(contains(format!("Updated environment: {environment_id}")));
cli_cmd(data_dir)
.args(["environment", "show", &environment_id])
.assert()
.success()
.stdout(contains("\"name\": \"Json Environment\""))
.stdout(contains("\"color\": \"#00ff00\""));
}

View File

@@ -1,74 +0,0 @@
mod common;
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
use predicates::str::contains;
use tempfile::TempDir;
#[test]
fn create_list_show_delete_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args(["folder", "create", "wk_test", "--name", "Auth"])
.assert()
.success();
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
cli_cmd(data_dir)
.args(["folder", "list", "wk_test"])
.assert()
.success()
.stdout(contains(&folder_id))
.stdout(contains("Auth"));
cli_cmd(data_dir)
.args(["folder", "show", &folder_id])
.assert()
.success()
.stdout(contains(format!("\"id\": \"{folder_id}\"")))
.stdout(contains("\"workspaceId\": \"wk_test\""));
cli_cmd(data_dir)
.args(["folder", "delete", &folder_id, "--yes"])
.assert()
.success()
.stdout(contains(format!("Deleted folder: {folder_id}")));
assert!(query_manager(data_dir).connect().get_folder(&folder_id).is_err());
}
#[test]
fn json_create_and_update_merge_patch_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args([
"folder",
"create",
r#"{"workspaceId":"wk_test","name":"Json Folder"}"#,
])
.assert()
.success();
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
cli_cmd(data_dir)
.args([
"folder",
"update",
&format!(r#"{{"id":"{}","description":"Folder Description"}}"#, folder_id),
])
.assert()
.success()
.stdout(contains(format!("Updated folder: {folder_id}")));
cli_cmd(data_dir)
.args(["folder", "show", &folder_id])
.assert()
.success()
.stdout(contains("\"name\": \"Json Folder\""))
.stdout(contains("\"description\": \"Folder Description\""));
}

View File

@@ -1,107 +0,0 @@
mod common;
use common::{cli_cmd, parse_created_id, query_manager, seed_request, seed_workspace};
use predicates::str::contains;
use tempfile::TempDir;
#[test]
fn show_and_delete_yes_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args([
"request",
"create",
"wk_test",
"--name",
"Smoke Test",
"--url",
"https://example.com",
])
.assert()
.success();
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
cli_cmd(data_dir)
.args(["request", "show", &request_id])
.assert()
.success()
.stdout(contains(format!("\"id\": \"{request_id}\"")))
.stdout(contains("\"workspaceId\": \"wk_test\""));
cli_cmd(data_dir)
.args(["request", "delete", &request_id, "--yes"])
.assert()
.success()
.stdout(contains(format!("Deleted request: {request_id}")));
assert!(query_manager(data_dir).connect().get_http_request(&request_id).is_err());
}
#[test]
fn delete_without_yes_fails_in_non_interactive_mode() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
seed_request(data_dir, "wk_test", "rq_seed_delete_noninteractive");
cli_cmd(data_dir)
.args(["request", "delete", "rq_seed_delete_noninteractive"])
.assert()
.failure()
.code(1)
.stderr(contains("Refusing to delete in non-interactive mode without --yes"));
assert!(
query_manager(data_dir).connect().get_http_request("rq_seed_delete_noninteractive").is_ok()
);
}
#[test]
fn json_create_and_update_merge_patch_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args([
"request",
"create",
r#"{"workspaceId":"wk_test","name":"Json Request","url":"https://example.com"}"#,
])
.assert()
.success();
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
cli_cmd(data_dir)
.args([
"request",
"update",
&format!(r#"{{"id":"{}","name":"Renamed Request"}}"#, request_id),
])
.assert()
.success()
.stdout(contains(format!("Updated request: {request_id}")));
cli_cmd(data_dir)
.args(["request", "show", &request_id])
.assert()
.success()
.stdout(contains("\"name\": \"Renamed Request\""))
.stdout(contains("\"url\": \"https://example.com\""));
}
#[test]
fn update_requires_id_in_json_payload() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
cli_cmd(data_dir)
.args(["request", "update", r#"{"name":"No ID"}"#])
.assert()
.failure()
.stderr(contains("request update requires a non-empty \"id\" field"));
}

View File

@@ -1,59 +0,0 @@
mod common;
use common::{cli_cmd, parse_created_id, query_manager};
use predicates::str::contains;
use tempfile::TempDir;
#[test]
fn create_show_delete_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
let create_assert =
cli_cmd(data_dir).args(["workspace", "create", "--name", "WS One"]).assert().success();
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
cli_cmd(data_dir)
.args(["workspace", "show", &workspace_id])
.assert()
.success()
.stdout(contains(format!("\"id\": \"{workspace_id}\"")))
.stdout(contains("\"name\": \"WS One\""));
cli_cmd(data_dir)
.args(["workspace", "delete", &workspace_id, "--yes"])
.assert()
.success()
.stdout(contains(format!("Deleted workspace: {workspace_id}")));
assert!(query_manager(data_dir).connect().get_workspace(&workspace_id).is_err());
}
#[test]
fn json_create_and_update_merge_patch_round_trip() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
let create_assert = cli_cmd(data_dir)
.args(["workspace", "create", r#"{"name":"Json Workspace"}"#])
.assert()
.success();
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
cli_cmd(data_dir)
.args([
"workspace",
"update",
&format!(r#"{{"id":"{}","description":"Updated via JSON"}}"#, workspace_id),
])
.assert()
.success()
.stdout(contains(format!("Updated workspace: {workspace_id}")));
cli_cmd(data_dir)
.args(["workspace", "show", &workspace_id])
.assert()
.success()
.stdout(contains("\"name\": \"Json Workspace\""))
.stdout(contains("\"description\": \"Updated via JSON\""));
}

View File

@@ -57,7 +57,6 @@ url = "2"
tokio-util = { version = "0.7", features = ["codec"] }
ts-rs = { workspace = true }
uuid = "1.12.1"
yaak-api = { workspace = true }
yaak-common = { workspace = true }
yaak-tauri-utils = { workspace = true }
yaak-core = { workspace = true }

View File

@@ -2,6 +2,14 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<!-- Enable for NodeJS execution -->
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<!-- Allow loading 1Password's dylib (signed with different Team ID) -->
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
<!-- Re-enable for sandboxing. Currently disabled because auto-updater doesn't work with sandboxing.-->
<!-- <key>com.apple.security.app-sandbox</key> <true/>-->
<!-- <key>com.apple.security.files.user-selected.read-write</key> <true/>-->

View File

@@ -1,13 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<!-- Enable for NodeJS/V8 JIT compiler -->
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<!-- Allow loading plugins signed with different Team IDs (e.g., 1Password) -->
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>

View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
</dict>
</plist>

View File

@@ -1,10 +1,9 @@
use crate::PluginContextExt;
use crate::error::Result;
use crate::PluginContextExt;
use std::sync::Arc;
use tauri::{AppHandle, Manager, Runtime, State, WebviewWindow, command};
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
use yaak_crypto::manager::EncryptionManager;
use yaak_models::models::HttpRequestHeader;
use yaak_models::queries::workspaces::default_headers;
use yaak_plugins::events::GetThemesResponse;
use yaak_plugins::manager::PluginManager;
use yaak_plugins::native_template_functions::{
@@ -22,6 +21,20 @@ impl<'a, R: Runtime, M: Manager<R>> EncryptionManagerExt<'a, R> for M {
}
}
#[command]
pub(crate) async fn cmd_show_workspace_key<R: Runtime>(
window: WebviewWindow<R>,
workspace_id: &str,
) -> Result<()> {
let key = window.crypto().reveal_workspace_key(workspace_id)?;
window
.dialog()
.message(format!("Your workspace key is \n\n{}", key))
.kind(MessageDialogKind::Info)
.show(|_v| {});
Ok(())
}
#[command]
pub(crate) async fn cmd_decrypt_template<R: Runtime>(
window: WebviewWindow<R>,
@@ -41,12 +54,7 @@ pub(crate) async fn cmd_secure_template<R: Runtime>(
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let plugin_context = window.plugin_context();
Ok(encrypt_secure_template_function(
plugin_manager,
encryption_manager,
&plugin_context,
template,
)?)
Ok(encrypt_secure_template_function(plugin_manager, encryption_manager, &plugin_context, template)?)
}
#[command]
@@ -84,17 +92,3 @@ pub(crate) async fn cmd_set_workspace_key<R: Runtime>(
window.crypto().set_human_key(workspace_id, key)?;
Ok(())
}
#[command]
pub(crate) async fn cmd_disable_encryption<R: Runtime>(
window: WebviewWindow<R>,
workspace_id: &str,
) -> Result<()> {
window.crypto().disable_encryption(workspace_id)?;
Ok(())
}
#[command]
pub(crate) fn cmd_default_headers() -> Vec<HttpRequestHeader> {
default_headers()
}

View File

@@ -36,7 +36,7 @@ pub enum Error {
PluginError(#[from] yaak_plugins::error::Error),
#[error(transparent)]
ApiError(#[from] yaak_api::Error),
TauriUtilsError(#[from] yaak_tauri_utils::error::Error),
#[error(transparent)]
ClipboardError(#[from] tauri_plugin_clipboard_manager::Error),

View File

@@ -6,47 +6,33 @@ use crate::error::Result;
use std::path::{Path, PathBuf};
use tauri::command;
use yaak_git::{
BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult,
PushResult, git_add, git_add_credential, git_add_remote, git_checkout_branch, git_clone,
git_commit, git_create_branch, git_delete_branch, git_delete_remote_branch, git_fetch_all,
git_init, git_log, git_merge_branch, git_pull, git_pull_force_reset, git_pull_merge, git_push,
git_remotes, git_rename_branch, git_reset_changes, git_rm_remote, git_status, git_unstage,
GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult,
git_add, git_add_credential, git_add_remote, git_checkout_branch, git_commit,
git_create_branch, git_delete_branch, git_fetch_all, git_init, git_log,
git_merge_branch, git_pull, git_push, git_remotes, git_rm_remote, git_status,
git_unstage,
};
// NOTE: All of these commands are async to prevent blocking work from locking up the UI
#[command]
pub async fn cmd_git_checkout(dir: &Path, branch: &str, force: bool) -> Result<String> {
Ok(git_checkout_branch(dir, branch, force).await?)
Ok(git_checkout_branch(dir, branch, force)?)
}
#[command]
pub async fn cmd_git_branch(dir: &Path, branch: &str, base: Option<&str>) -> Result<()> {
Ok(git_create_branch(dir, branch, base).await?)
pub async fn cmd_git_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_create_branch(dir, branch)?)
}
#[command]
pub async fn cmd_git_delete_branch(
dir: &Path,
branch: &str,
force: Option<bool>,
) -> Result<BranchDeleteResult> {
Ok(git_delete_branch(dir, branch, force.unwrap_or(false)).await?)
pub async fn cmd_git_delete_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_delete_branch(dir, branch)?)
}
#[command]
pub async fn cmd_git_delete_remote_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_delete_remote_branch(dir, branch).await?)
}
#[command]
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_merge_branch(dir, branch).await?)
}
#[command]
pub async fn cmd_git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
Ok(git_rename_branch(dir, old_name, new_name).await?)
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str, force: bool) -> Result<()> {
Ok(git_merge_branch(dir, branch, force)?)
}
#[command]
@@ -64,43 +50,24 @@ pub async fn cmd_git_initialize(dir: &Path) -> Result<()> {
Ok(git_init(dir)?)
}
#[command]
pub async fn cmd_git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
Ok(git_clone(url, dir).await?)
}
#[command]
pub async fn cmd_git_commit(dir: &Path, message: &str) -> Result<()> {
Ok(git_commit(dir, message).await?)
Ok(git_commit(dir, message)?)
}
#[command]
pub async fn cmd_git_fetch_all(dir: &Path) -> Result<()> {
Ok(git_fetch_all(dir).await?)
Ok(git_fetch_all(dir)?)
}
#[command]
pub async fn cmd_git_push(dir: &Path) -> Result<PushResult> {
Ok(git_push(dir).await?)
Ok(git_push(dir)?)
}
#[command]
pub async fn cmd_git_pull(dir: &Path) -> Result<PullResult> {
Ok(git_pull(dir).await?)
}
#[command]
pub async fn cmd_git_pull_force_reset(
dir: &Path,
remote: &str,
branch: &str,
) -> Result<PullResult> {
Ok(git_pull_force_reset(dir, remote, branch).await?)
}
#[command]
pub async fn cmd_git_pull_merge(dir: &Path, remote: &str, branch: &str) -> Result<PullResult> {
Ok(git_pull_merge(dir, remote, branch).await?)
Ok(git_pull(dir)?)
}
#[command]
@@ -119,18 +86,14 @@ pub async fn cmd_git_unstage(dir: &Path, rela_paths: Vec<PathBuf>) -> Result<()>
Ok(())
}
#[command]
pub async fn cmd_git_reset_changes(dir: &Path) -> Result<()> {
Ok(git_reset_changes(dir).await?)
}
#[command]
pub async fn cmd_git_add_credential(
dir: &Path,
remote_url: &str,
username: &str,
password: &str,
) -> Result<()> {
Ok(git_add_credential(remote_url, username, password).await?)
Ok(git_add_credential(dir, remote_url, username, password).await?)
}
#[command]

View File

@@ -1,12 +1,12 @@
use std::collections::BTreeMap;
use crate::PluginContextExt;
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use crate::PluginContextExt;
use KeyAndValueRef::{Ascii, Binary};
use tauri::{Manager, Runtime, WebviewWindow};
use yaak_grpc::{KeyAndValueRef, MetadataMap};
use yaak_models::models::GrpcRequest;
use crate::models_ext::QueryManagerExt;
use yaak_plugins::events::{CallHttpAuthenticationRequest, HttpHeader};
use yaak_plugins::manager::PluginManager;

View File

@@ -1,8 +1,8 @@
use crate::models_ext::QueryManagerExt;
use chrono::{NaiveDateTime, Utc};
use log::debug;
use std::sync::OnceLock;
use tauri::{AppHandle, Runtime};
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
const NAMESPACE: &str = "analytics";

View File

@@ -1,13 +1,9 @@
use crate::PluginContextExt;
use crate::error::Error::GenericError;
use crate::error::Result;
use crate::models_ext::BlobManagerExt;
use crate::models_ext::QueryManagerExt;
use crate::render::render_http_request;
use log::{debug, warn};
use std::pin::Pin;
use std::sync::Arc;
use std::sync::atomic::{AtomicI32, Ordering};
use std::time::{Duration, Instant};
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
use tokio::fs::{File, create_dir_all};
@@ -19,19 +15,22 @@ use yaak_http::client::{
HttpConnectionOptions, HttpConnectionProxySetting, HttpConnectionProxySettingAuth,
};
use yaak_http::cookies::CookieStore;
use yaak_http::manager::{CachedClient, HttpConnectionManager};
use yaak_http::manager::HttpConnectionManager;
use yaak_http::sender::ReqwestSender;
use yaak_http::tee_reader::TeeReader;
use yaak_http::transaction::HttpTransaction;
use yaak_http::types::{
SendableBody, SendableHttpRequest, SendableHttpRequestOptions, append_query_params,
};
use crate::models_ext::BlobManagerExt;
use yaak_models::blob_manager::BodyChunk;
use yaak_models::models::{
CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseHeader,
HttpResponseState, ProxySetting, ProxySettingAuth,
};
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
use crate::PluginContextExt;
use yaak_plugins::events::{
CallHttpAuthenticationRequest, HttpHeader, PluginContext, RenderPurpose,
};
@@ -174,28 +173,19 @@ async fn send_http_request_inner<R: Runtime>(
let environment_id = environment.map(|e| e.id);
let workspace = window.db().get_workspace(workspace_id)?;
let (resolved, auth_context_id) = resolve_http_request(window, unrendered_request)?;
let cb = PluginTemplateCallback::new(
plugin_manager.clone(),
encryption_manager.clone(),
&plugin_context,
RenderPurpose::Send,
);
let cb = PluginTemplateCallback::new(plugin_manager.clone(), encryption_manager.clone(), &plugin_context, RenderPurpose::Send);
let env_chain =
window.db().resolve_environments(&workspace.id, folder_id, environment_id.as_deref())?;
let mut cancel_rx = cancelled_rx.clone();
let render_options = RenderOptions::throw();
let request = tokio::select! {
result = render_http_request(&resolved, env_chain, &cb, &render_options) => result?,
_ = cancel_rx.changed() => {
return Err(GenericError("Request canceled".to_string()));
}
};
let request = render_http_request(&resolved, env_chain, &cb, &RenderOptions::throw()).await?;
// Resolve inherited settings for this request
let resolved_settings = window.db().resolve_settings_for_http_request(&resolved)?;
// Build the sendable request using the new SendableHttpRequest type
let options = SendableHttpRequestOptions {
follow_redirects: workspace.setting_follow_redirects,
timeout: if workspace.setting_request_timeout > 0 {
Some(Duration::from_millis(workspace.setting_request_timeout.unsigned_abs() as u64))
follow_redirects: resolved_settings.follow_redirects,
timeout: if resolved_settings.request_timeout > 0 {
Some(Duration::from_millis(resolved_settings.request_timeout.unsigned_abs() as u64))
} else {
None
},
@@ -241,36 +231,29 @@ async fn send_http_request_inner<R: Runtime>(
None => None,
};
let cached_client = connection_manager
let client = connection_manager
.get_client(&HttpConnectionOptions {
id: plugin_context.id.clone(),
validate_certificates: workspace.setting_validate_certificates,
validate_certificates: resolved_settings.validate_certificates,
proxy: proxy_setting,
client_certificate,
dns_overrides: workspace.setting_dns_overrides.clone(),
})
.await?;
// Apply authentication to the request, racing against cancellation since
// auth plugins (e.g. OAuth2) can block indefinitely waiting for user action.
let mut cancel_rx = cancelled_rx.clone();
tokio::select! {
result = apply_authentication(
&window,
&mut sendable_request,
&request,
auth_context_id,
&plugin_manager,
plugin_context,
) => result?,
_ = cancel_rx.changed() => {
return Err(GenericError("Request canceled".to_string()));
}
};
// Apply authentication to the request
apply_authentication(
&window,
&mut sendable_request,
&request,
auth_context_id,
&plugin_manager,
plugin_context,
)
.await?;
let cookie_store = maybe_cookie_store.as_ref().map(|(cs, _)| cs.clone());
let result = execute_transaction(
cached_client,
client,
sendable_request,
response_ctx,
cancelled_rx.clone(),
@@ -330,7 +313,7 @@ pub fn resolve_http_request<R: Runtime>(
}
async fn execute_transaction<R: Runtime>(
cached_client: CachedClient,
client: reqwest::Client,
mut sendable_request: SendableHttpRequest,
response_ctx: &mut ResponseContext<R>,
mut cancelled_rx: Receiver<bool>,
@@ -341,10 +324,7 @@ async fn execute_transaction<R: Runtime>(
let workspace_id = response_ctx.response().workspace_id.clone();
let is_persisted = response_ctx.is_persisted();
// Keep a reference to the resolver for DNS timing events
let resolver = cached_client.resolver.clone();
let sender = ReqwestSender::with_client(cached_client.client);
let sender = ReqwestSender::with_client(client);
let transaction = match cookie_store {
Some(cs) => HttpTransaction::with_cookie_store(sender, cs),
None => HttpTransaction::new(sender),
@@ -369,39 +349,21 @@ async fn execute_transaction<R: Runtime>(
let (event_tx, mut event_rx) =
tokio::sync::mpsc::channel::<yaak_http::sender::HttpResponseEvent>(100);
// Set the event sender on the DNS resolver so it can emit DNS timing events
resolver.set_event_sender(Some(event_tx.clone())).await;
// Shared state to capture DNS timing from the event processing task
let dns_elapsed = Arc::new(AtomicI32::new(0));
// Write events to DB in a task (only for persisted responses)
if is_persisted {
let response_id = response_id.clone();
let app_handle = app_handle.clone();
let update_source = response_ctx.update_source.clone();
let workspace_id = workspace_id.clone();
let dns_elapsed = dns_elapsed.clone();
tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
// Capture DNS timing when we see a DNS event
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
}
let db_event = HttpResponseEvent::new(&response_id, &workspace_id, event.into());
let _ = app_handle.db().upsert_http_response_event(&db_event, &update_source);
}
});
} else {
// For ephemeral responses, just drain the events but still capture DNS timing
let dns_elapsed = dns_elapsed.clone();
tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
}
}
});
// For ephemeral responses, just drain the events
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
};
// Capture request body as it's sent (only for persisted responses)
@@ -414,7 +376,7 @@ async fn execute_transaction<R: Runtime>(
sendable_request.body = Some(SendableBody::Bytes(bytes));
None
}
Some(SendableBody::Stream { data: stream, content_length }) => {
Some(SendableBody::Stream(stream)) => {
// Wrap stream with TeeReader to capture data as it's read
// Use unbounded channel to ensure all data is captured without blocking the HTTP request
let (body_chunk_tx, body_chunk_rx) = tokio::sync::mpsc::unbounded_channel::<Vec<u8>>();
@@ -448,7 +410,7 @@ async fn execute_transaction<R: Runtime>(
None
};
sendable_request.body = Some(SendableBody::Stream { data: pinned, content_length });
sendable_request.body = Some(SendableBody::Stream(pinned));
handle
}
None => {
@@ -569,14 +531,10 @@ async fn execute_transaction<R: Runtime>(
// Final update with closed state and accurate byte count
response_ctx.update(|r| {
r.elapsed = start.elapsed().as_millis() as i32;
r.elapsed_dns = dns_elapsed.load(Ordering::SeqCst);
r.content_length = Some(written_bytes as i32);
r.state = HttpResponseState::Closed;
})?;
// Clear the event sender from the resolver since this request is done
resolver.set_event_sender(None).await;
Ok((response_ctx.response().clone(), maybe_blob_write_handle))
}

View File

@@ -1,17 +1,17 @@
use crate::PluginContextExt;
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use crate::PluginContextExt;
use log::info;
use std::collections::BTreeMap;
use std::fs::read_to_string;
use tauri::{Manager, Runtime, WebviewWindow};
use yaak_tauri_utils::window::WorkspaceWindowTrait;
use yaak_core::WorkspaceContext;
use yaak_models::models::{
Environment, Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace,
};
use yaak_models::util::{BatchUpsertResult, UpdateSource, maybe_gen_id, maybe_gen_id_opt};
use yaak_plugins::manager::PluginManager;
use yaak_tauri_utils::window::WorkspaceWindowTrait;
pub(crate) async fn import_data<R: Runtime>(
window: &WebviewWindow<R>,

View File

@@ -7,7 +7,7 @@ use crate::http_request::{resolve_http_request, send_http_request};
use crate::import::import_data;
use crate::models_ext::{BlobManagerExt, QueryManagerExt};
use crate::notifications::YaakNotifier;
use crate::render::{render_grpc_request, render_json_value, render_template};
use crate::render::{render_grpc_request, render_template};
use crate::updates::{UpdateMode, UpdateTrigger, YaakUpdater};
use crate::uri_scheme::handle_deep_link;
use error::Result as YaakResult;
@@ -37,8 +37,8 @@ use yaak_grpc::{Code, ServiceDefinition, serialize_message};
use yaak_mac_window::AppHandleMacWindowExt;
use yaak_models::models::{
AnyModel, CookieJar, Environment, GrpcConnection, GrpcConnectionState, GrpcEvent,
GrpcEventType, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseState, Plugin,
Workspace, WorkspaceMeta,
GrpcEventType, GrpcRequest, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseState,
Plugin, Workspace, WorkspaceMeta,
};
use yaak_models::util::{BatchUpsertResult, UpdateSource, get_workspace_export_resources};
use yaak_plugins::events::{
@@ -101,7 +101,6 @@ struct AppMetaData {
app_data_dir: String,
app_log_dir: String,
vendored_plugin_dir: String,
default_project_dir: String,
feature_updater: bool,
feature_license: bool,
}
@@ -112,7 +111,6 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
let app_log_dir = app_handle.path().app_log_dir()?;
let vendored_plugin_dir =
app_handle.path().resolve("vendored/plugins", BaseDirectory::Resource)?;
let default_project_dir = app_handle.path().home_dir()?.join("YaakProjects");
Ok(AppMetaData {
is_dev: is_dev(),
version: app_handle.package_info().version.to_string(),
@@ -120,7 +118,6 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
app_data_dir: app_data_dir.to_string_lossy().to_string(),
app_log_dir: app_log_dir.to_string_lossy().to_string(),
vendored_plugin_dir: vendored_plugin_dir.to_string_lossy().to_string(),
default_project_dir: default_project_dir.to_string_lossy().to_string(),
feature_license: cfg!(feature = "license"),
feature_updater: cfg!(feature = "updater"),
})
@@ -192,6 +189,7 @@ async fn cmd_grpc_reflect<R: Runtime>(
request_id: &str,
environment_id: Option<&str>,
proto_files: Vec<String>,
skip_cache: Option<bool>,
window: WebviewWindow<R>,
app_handle: AppHandle<R>,
grpc_handle: State<'_, Mutex<GrpcHandle>>,
@@ -226,21 +224,18 @@ async fn cmd_grpc_reflect<R: Runtime>(
let settings = window.db().get_settings();
let client_certificate =
find_client_certificate(req.url.as_str(), &settings.client_certificates);
let proto_files: Vec<PathBuf> =
proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect();
// Always invalidate cached pool when this command is called, to force re-reflection
let mut handle = grpc_handle.lock().await;
handle.invalidate_pool(&req.id, &uri, &proto_files);
Ok(handle
Ok(grpc_handle
.lock()
.await
.services(
&req.id,
&uri,
&proto_files,
&proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect(),
&metadata,
workspace.setting_validate_certificates,
workspace.setting_validate_certificates.unwrap_or(true),
client_certificate,
skip_cache.unwrap_or(false),
)
.await
.map_err(|e| GenericError(e.to_string()))?)
@@ -332,7 +327,7 @@ async fn cmd_grpc_go<R: Runtime>(
uri.as_str(),
&proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect(),
&metadata,
workspace.setting_validate_certificates,
workspace.setting_validate_certificates.unwrap_or(true),
client_cert.clone(),
)
.await;
@@ -365,8 +360,10 @@ async fn cmd_grpc_go<R: Runtime>(
let cb = {
let cancelled_rx = cancelled_rx.clone();
let app_handle = app_handle.clone();
let environment_chain = environment_chain.clone();
let window = window.clone();
let base_msg = base_msg.clone();
let plugin_manager = plugin_manager.clone();
let encryption_manager = encryption_manager.clone();
@@ -388,12 +385,14 @@ async fn cmd_grpc_go<R: Runtime>(
match serde_json::from_str::<IncomingMsg>(ev.payload()) {
Ok(IncomingMsg::Message(msg)) => {
let window = window.clone();
let app_handle = app_handle.clone();
let base_msg = base_msg.clone();
let environment_chain = environment_chain.clone();
let plugin_manager = plugin_manager.clone();
let encryption_manager = encryption_manager.clone();
let msg = block_in_place(|| {
tauri::async_runtime::block_on(async {
let result = render_template(
render_template(
msg.as_str(),
environment_chain,
&PluginTemplateCallback::new(
@@ -407,11 +406,24 @@ async fn cmd_grpc_go<R: Runtime>(
),
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await;
result.expect("Failed to render template")
.await
.expect("Failed to render template")
})
});
in_msg_tx.try_send(msg.clone()).unwrap();
tauri::async_runtime::spawn(async move {
app_handle
.db()
.upsert_grpc_event(
&GrpcEvent {
content: msg,
event_type: GrpcEventType::ClientMessage,
..base_msg.clone()
},
&UpdateSource::from_window_label(window.label()),
)
.unwrap();
});
}
Ok(IncomingMsg::Commit) => {
maybe_in_msg_tx.take();
@@ -458,48 +470,12 @@ async fn cmd_grpc_go<R: Runtime>(
)?;
async move {
// Create callback for streaming methods that handles both success and error
let on_message = {
let app_handle = app_handle.clone();
let base_event = base_event.clone();
let window_label = window.label().to_string();
move |result: std::result::Result<String, String>| match result {
Ok(msg) => {
let _ = app_handle.db().upsert_grpc_event(
&GrpcEvent {
content: msg,
event_type: GrpcEventType::ClientMessage,
..base_event.clone()
},
&UpdateSource::from_window_label(&window_label),
);
}
Err(error) => {
let _ = app_handle.db().upsert_grpc_event(
&GrpcEvent {
content: format!("Failed to send message: {}", error),
event_type: GrpcEventType::Error,
..base_event.clone()
},
&UpdateSource::from_window_label(&window_label),
);
}
}
};
let (maybe_stream, maybe_msg) =
match (method_desc.is_client_streaming(), method_desc.is_server_streaming()) {
(true, true) => (
Some(
connection
.streaming(
&service,
&method,
in_msg_stream,
&metadata,
client_cert,
on_message.clone(),
)
.streaming(&service, &method, in_msg_stream, &metadata, client_cert)
.await,
),
None,
@@ -514,7 +490,6 @@ async fn cmd_grpc_go<R: Runtime>(
in_msg_stream,
&metadata,
client_cert,
on_message.clone(),
)
.await,
),
@@ -1060,55 +1035,14 @@ async fn cmd_get_http_authentication_summaries<R: Runtime>(
#[tauri::command]
async fn cmd_get_http_authentication_config<R: Runtime>(
window: WebviewWindow<R>,
app_handle: AppHandle<R>,
plugin_manager: State<'_, PluginManager>,
encryption_manager: State<'_, EncryptionManager>,
auth_name: &str,
values: HashMap<String, JsonPrimitive>,
model: AnyModel,
environment_id: Option<&str>,
_environment_id: Option<&str>,
) -> YaakResult<GetHttpAuthenticationConfigResponse> {
// Extract workspace_id and folder_id from the model to resolve the environment chain
let (workspace_id, folder_id) = match &model {
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
AnyModel::Workspace(w) => (w.id.clone(), None),
_ => return Err(GenericError("Unsupported model type for authentication config".into())),
};
// Resolve environment chain and render the values for token lookup
let environment_chain = app_handle.db().resolve_environments(
&workspace_id,
folder_id.as_deref(),
environment_id,
)?;
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
let cb = PluginTemplateCallback::new(
plugin_manager_arc,
encryption_manager_arc,
&window.plugin_context(),
RenderPurpose::Preview,
);
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
let values_json: serde_json::Value = serde_json::to_value(&values)?;
let rendered_json =
render_json_value(values_json, environment_chain, &cb, &RenderOptions::return_empty())
.await?;
// Convert back to HashMap<String, JsonPrimitive>
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
Ok(plugin_manager
.get_http_authentication_config(
&window.plugin_context(),
auth_name,
rendered_values,
model.id(),
)
.get_http_authentication_config(&window.plugin_context(), auth_name, values, model.id())
.await?)
}
@@ -1155,54 +1089,19 @@ async fn cmd_call_grpc_request_action<R: Runtime>(
#[tauri::command]
async fn cmd_call_http_authentication_action<R: Runtime>(
window: WebviewWindow<R>,
app_handle: AppHandle<R>,
plugin_manager: State<'_, PluginManager>,
encryption_manager: State<'_, EncryptionManager>,
auth_name: &str,
action_index: i32,
values: HashMap<String, JsonPrimitive>,
model: AnyModel,
environment_id: Option<&str>,
_environment_id: Option<&str>,
) -> YaakResult<()> {
// Extract workspace_id and folder_id from the model to resolve the environment chain
let (workspace_id, folder_id) = match &model {
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
AnyModel::Workspace(w) => (w.id.clone(), None),
_ => return Err(GenericError("Unsupported model type for authentication action".into())),
};
// Resolve environment chain and render the values
let environment_chain = app_handle.db().resolve_environments(
&workspace_id,
folder_id.as_deref(),
environment_id,
)?;
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
let cb = PluginTemplateCallback::new(
plugin_manager_arc,
encryption_manager_arc,
&window.plugin_context(),
RenderPurpose::Send,
);
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
let values_json: serde_json::Value = serde_json::to_value(&values)?;
let rendered_json =
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
// Convert back to HashMap<String, JsonPrimitive>
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
Ok(plugin_manager
.call_http_authentication_action(
&window.plugin_context(),
auth_name,
action_index,
rendered_values,
values,
&model.id(),
)
.await?)
@@ -1272,6 +1171,35 @@ async fn cmd_save_response<R: Runtime>(
Ok(())
}
#[tauri::command]
async fn cmd_send_folder<R: Runtime>(
app_handle: AppHandle<R>,
window: WebviewWindow<R>,
environment_id: Option<String>,
cookie_jar_id: Option<String>,
folder_id: &str,
) -> YaakResult<()> {
let requests = app_handle.db().list_http_requests_for_folder_recursive(folder_id)?;
for request in requests {
let app_handle = app_handle.clone();
let window = window.clone();
let environment_id = environment_id.clone();
let cookie_jar_id = cookie_jar_id.clone();
tokio::spawn(async move {
let _ = cmd_send_http_request(
app_handle,
window,
environment_id.as_deref(),
cookie_jar_id.as_deref(),
request,
)
.await;
});
}
Ok(())
}
#[tauri::command]
async fn cmd_send_http_request<R: Runtime>(
app_handle: AppHandle<R>,
@@ -1368,6 +1296,27 @@ async fn cmd_install_plugin<R: Runtime>(
Ok(plugin)
}
#[tauri::command]
async fn cmd_create_grpc_request<R: Runtime>(
workspace_id: &str,
name: &str,
sort_priority: f64,
folder_id: Option<&str>,
app_handle: AppHandle<R>,
window: WebviewWindow<R>,
) -> YaakResult<GrpcRequest> {
Ok(app_handle.db().upsert_grpc_request(
&GrpcRequest {
workspace_id: workspace_id.to_string(),
name: name.to_string(),
folder_id: folder_id.map(|s| s.to_string()),
sort_priority,
..Default::default()
},
&UpdateSource::from_window_label(window.label()),
)?)
}
#[tauri::command]
async fn cmd_reload_plugins<R: Runtime>(
app_handle: AppHandle<R>,
@@ -1630,6 +1579,7 @@ pub fn run() {
cmd_call_folder_action,
cmd_call_grpc_request_action,
cmd_check_for_updates,
cmd_create_grpc_request,
cmd_curl_to_request,
cmd_delete_all_grpc_connections,
cmd_delete_all_http_responses,
@@ -1663,6 +1613,7 @@ pub fn run() {
cmd_save_response,
cmd_send_ephemeral_request,
cmd_send_http_request,
cmd_send_folder,
cmd_template_function_config,
cmd_template_function_summaries,
cmd_template_tokens_to_string,
@@ -1670,13 +1621,12 @@ pub fn run() {
//
// Migrated commands
crate::commands::cmd_decrypt_template,
crate::commands::cmd_default_headers,
crate::commands::cmd_disable_encryption,
crate::commands::cmd_enable_encryption,
crate::commands::cmd_get_themes,
crate::commands::cmd_reveal_workspace_key,
crate::commands::cmd_secure_template,
crate::commands::cmd_set_workspace_key,
crate::commands::cmd_show_workspace_key,
//
// Models commands
models_ext::models_delete,
@@ -1699,36 +1649,30 @@ pub fn run() {
git_ext::cmd_git_checkout,
git_ext::cmd_git_branch,
git_ext::cmd_git_delete_branch,
git_ext::cmd_git_delete_remote_branch,
git_ext::cmd_git_merge_branch,
git_ext::cmd_git_rename_branch,
git_ext::cmd_git_status,
git_ext::cmd_git_log,
git_ext::cmd_git_initialize,
git_ext::cmd_git_clone,
git_ext::cmd_git_commit,
git_ext::cmd_git_fetch_all,
git_ext::cmd_git_push,
git_ext::cmd_git_pull,
git_ext::cmd_git_pull_force_reset,
git_ext::cmd_git_pull_merge,
git_ext::cmd_git_add,
git_ext::cmd_git_unstage,
git_ext::cmd_git_reset_changes,
git_ext::cmd_git_add_credential,
git_ext::cmd_git_remotes,
git_ext::cmd_git_add_remote,
git_ext::cmd_git_rm_remote,
//
// Plugin commands
plugins_ext::cmd_plugins_search,
plugins_ext::cmd_plugins_install,
plugins_ext::cmd_plugins_uninstall,
plugins_ext::cmd_plugins_updates,
plugins_ext::cmd_plugins_update_all,
//
// WebSocket commands
ws_ext::cmd_ws_upsert_request,
ws_ext::cmd_ws_duplicate_request,
ws_ext::cmd_ws_delete_request,
ws_ext::cmd_ws_delete_connection,
ws_ext::cmd_ws_delete_connections,
ws_ext::cmd_ws_list_events,
ws_ext::cmd_ws_list_requests,
ws_ext::cmd_ws_list_connections,
ws_ext::cmd_ws_send,
ws_ext::cmd_ws_close,
ws_ext::cmd_ws_connect,

View File

@@ -3,9 +3,6 @@
//! This module provides the Tauri plugin initialization and extension traits
//! that allow accessing QueryManager and BlobManager from Tauri's Manager types.
use chrono::Utc;
use log::error;
use std::time::Duration;
use tauri::plugin::TauriPlugin;
use tauri::{Emitter, Manager, Runtime, State};
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
@@ -16,74 +13,6 @@ use yaak_models::models::{AnyModel, GraphQlIntrospection, GrpcEvent, Settings, W
use yaak_models::query_manager::QueryManager;
use yaak_models::util::UpdateSource;
const MODEL_CHANGES_RETENTION_HOURS: i64 = 1;
const MODEL_CHANGES_POLL_INTERVAL_MS: u64 = 250;
const MODEL_CHANGES_POLL_BATCH_SIZE: usize = 200;
struct ModelChangeCursor {
created_at: String,
id: i64,
}
impl ModelChangeCursor {
fn from_launch_time() -> Self {
Self {
created_at: Utc::now().naive_utc().format("%Y-%m-%d %H:%M:%S%.3f").to_string(),
id: 0,
}
}
}
fn drain_model_changes_batch<R: Runtime>(
query_manager: &QueryManager,
app_handle: &tauri::AppHandle<R>,
cursor: &mut ModelChangeCursor,
) -> bool {
let changes = match query_manager.connect().list_model_changes_since(
&cursor.created_at,
cursor.id,
MODEL_CHANGES_POLL_BATCH_SIZE,
) {
Ok(changes) => changes,
Err(err) => {
error!("Failed to poll model_changes rows: {err:?}");
return false;
}
};
if changes.is_empty() {
return false;
}
let fetched_count = changes.len();
for change in changes {
cursor.created_at = change.created_at;
cursor.id = change.id;
// Local window-originated writes are forwarded immediately from the
// in-memory model event channel.
if matches!(change.payload.update_source, UpdateSource::Window { .. }) {
continue;
}
if let Err(err) = app_handle.emit("model_write", change.payload) {
error!("Failed to emit model_write event: {err:?}");
}
}
fetched_count == MODEL_CHANGES_POLL_BATCH_SIZE
}
async fn run_model_change_poller<R: Runtime>(
query_manager: QueryManager,
app_handle: tauri::AppHandle<R>,
mut cursor: ModelChangeCursor,
) {
loop {
while drain_model_changes_batch(&query_manager, &app_handle, &mut cursor) {}
tokio::time::sleep(Duration::from_millis(MODEL_CHANGES_POLL_INTERVAL_MS)).await;
}
}
/// Extension trait for accessing the QueryManager from Tauri Manager types.
pub trait QueryManagerExt<'a, R> {
fn db_manager(&'a self) -> State<'a, QueryManager>;
@@ -333,37 +262,14 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
}
};
let db = query_manager.connect();
if let Err(err) = db.prune_model_changes_older_than_hours(MODEL_CHANGES_RETENTION_HOURS)
{
error!("Failed to prune model_changes rows on startup: {err:?}");
}
// Only stream writes that happen after this app launch.
let cursor = ModelChangeCursor::from_launch_time();
let poll_query_manager = query_manager.clone();
app_handle.manage(query_manager);
app_handle.manage(blob_manager);
// Poll model_changes so all writers (including external CLI processes) update the UI.
let app_handle_poll = app_handle.clone();
let query_manager = poll_query_manager;
tauri::async_runtime::spawn(async move {
run_model_change_poller(query_manager, app_handle_poll, cursor).await;
});
// Fast path for local app writes initiated by frontend windows. This keeps the
// current sync-model UX snappy, while DB polling handles external writers (CLI).
let app_handle_local = app_handle.clone();
// Forward model change events to the frontend
let app_handle = app_handle.clone();
tauri::async_runtime::spawn(async move {
for payload in rx {
if !matches!(payload.update_source, UpdateSource::Window { .. }) {
continue;
}
if let Err(err) = app_handle_local.emit("model_write", payload) {
error!("Failed to emit local model_write event: {err:?}");
}
app_handle.emit("model_write", payload).unwrap();
}
});

View File

@@ -1,6 +1,5 @@
use crate::error::Result;
use crate::history::get_or_upsert_launch_info;
use crate::models_ext::QueryManagerExt;
use chrono::{DateTime, Utc};
use log::{debug, info};
use reqwest::Method;
@@ -8,8 +7,9 @@ use serde::{Deserialize, Serialize};
use std::time::Instant;
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
use ts_rs::TS;
use yaak_api::yaak_api_client;
use yaak_common::platform::get_os_str;
use yaak_tauri_utils::api_client::yaak_api_client;
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
// Check for updates every hour
@@ -101,8 +101,7 @@ impl YaakNotifier {
let license_check = "disabled".to_string();
let launch_info = get_or_upsert_launch_info(app_handle);
let app_version = app_handle.package_info().version.to_string();
let req = yaak_api_client(&app_version)?
let req = yaak_api_client(app_handle)?
.request(Method::GET, "https://notify.yaak.app/notifications")
.query(&[
("version", &launch_info.current_version),

View File

@@ -1,7 +1,5 @@
use crate::error::Result;
use crate::http_request::send_http_request_with_context;
use crate::models_ext::BlobManagerExt;
use crate::models_ext::QueryManagerExt;
use crate::render::{render_grpc_request, render_http_request, render_json_value};
use crate::window::{CreateWindowConfig, create_window};
use crate::{
@@ -12,12 +10,15 @@ use chrono::Utc;
use cookie::Cookie;
use log::error;
use std::sync::Arc;
use tauri::{AppHandle, Emitter, Listener, Manager, Runtime};
use tauri::{AppHandle, Emitter, Manager, Runtime};
use tauri_plugin_clipboard_manager::ClipboardExt;
use tauri_plugin_opener::OpenerExt;
use yaak_crypto::manager::EncryptionManager;
use yaak_tauri_utils::window::WorkspaceWindowTrait;
use crate::models_ext::BlobManagerExt;
use yaak_models::models::{AnyModel, HttpResponse, Plugin};
use yaak_models::queries::any_request::AnyRequest;
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
use yaak_plugins::error::Error::PluginErr;
use yaak_plugins::events::{
@@ -31,7 +32,6 @@ use yaak_plugins::events::{
use yaak_plugins::manager::PluginManager;
use yaak_plugins::plugin_handle::PluginHandle;
use yaak_plugins::template_callback::PluginTemplateCallback;
use yaak_tauri_utils::window::WorkspaceWindowTrait;
use yaak_templates::{RenderErrorBehavior, RenderOptions};
pub(crate) async fn handle_plugin_event<R: Runtime>(
@@ -57,58 +57,6 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
Ok(call_frontend(&window, event).await)
}
InternalEventPayload::PromptFormRequest(_) => {
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
if event.reply_id.is_some() {
// Follow-up update from plugin runtime with resolved inputs — forward to frontend
window.emit_to(window.label(), "plugin_event", event.clone())?;
Ok(None)
} else {
// Initial request — set up bidirectional communication
window.emit_to(window.label(), "plugin_event", event.clone()).unwrap();
let event_id = event.id.clone();
let plugin_handle = plugin_handle.clone();
let plugin_context = plugin_context.clone();
let window = window.clone();
// Spawn async task to handle bidirectional form communication
tauri::async_runtime::spawn(async move {
let (tx, mut rx) = tokio::sync::mpsc::channel::<InternalEvent>(128);
// Listen for replies from the frontend
let listener_id = window.listen(event_id, move |ev: tauri::Event| {
let resp: InternalEvent = serde_json::from_str(ev.payload()).unwrap();
let _ = tx.try_send(resp);
});
// Forward each reply to the plugin runtime
while let Some(resp) = rx.recv().await {
let is_done = matches!(
&resp.payload,
InternalEventPayload::PromptFormResponse(r) if r.done.unwrap_or(false)
);
let event_to_send = plugin_handle.build_event_to_send(
&plugin_context,
&resp.payload,
Some(resp.reply_id.unwrap_or_default()),
);
if let Err(e) = plugin_handle.send(&event_to_send).await {
log::warn!("Failed to forward form response to plugin: {:?}", e);
}
if is_done {
break;
}
}
window.unlisten(listener_id);
});
Ok(None)
}
}
InternalEventPayload::FindHttpResponsesRequest(req) => {
let http_responses = app_handle
.db()
@@ -218,12 +166,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
)?;
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
);
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let grpc_request =
render_grpc_request(&req.grpc_request, environment_chain, &cb, &opt).await?;
@@ -244,12 +187,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
)?;
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
);
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let http_request =
render_http_request(&req.http_request, environment_chain, &cb, &opt).await?;
@@ -280,12 +218,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
)?;
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
);
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let data = render_json_value(req.data, environment_chain, &cb, &opt).await?;
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))

View File

@@ -17,11 +17,10 @@ use tauri::path::BaseDirectory;
use tauri::plugin::{Builder, TauriPlugin};
use tauri::{
AppHandle, Emitter, Manager, RunEvent, Runtime, State, WebviewWindow, WindowEvent, command,
is_dev,
generate_handler, is_dev,
};
use tokio::sync::Mutex;
use ts_rs::TS;
use yaak_api::yaak_api_client;
use yaak_models::models::Plugin;
use yaak_models::util::UpdateSource;
use yaak_plugins::api::{
@@ -32,6 +31,7 @@ use yaak_plugins::events::{Color, Icon, PluginContext, ShowToastRequest};
use yaak_plugins::install::{delete_and_uninstall, download_and_install};
use yaak_plugins::manager::PluginManager;
use yaak_plugins::plugin_meta::get_plugin_meta;
use yaak_tauri_utils::api_client::yaak_api_client;
static EXITING: AtomicBool = AtomicBool::new(false);
@@ -72,8 +72,7 @@ impl PluginUpdater {
info!("Checking for plugin updates");
let app_version = window.app_handle().package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
let http_client = yaak_api_client(window.app_handle())?;
let plugins = window.app_handle().db().list_plugins()?;
let updates = check_plugin_updates(&http_client, plugins.clone()).await?;
@@ -133,24 +132,22 @@ impl PluginUpdater {
// ============================================================================
#[command]
pub async fn cmd_plugins_search<R: Runtime>(
pub(crate) async fn cmd_plugins_search<R: Runtime>(
app_handle: AppHandle<R>,
query: &str,
) -> Result<PluginSearchResponse> {
let app_version = app_handle.package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
let http_client = yaak_api_client(&app_handle)?;
Ok(search_plugins(&http_client, query).await?)
}
#[command]
pub async fn cmd_plugins_install<R: Runtime>(
pub(crate) async fn cmd_plugins_install<R: Runtime>(
window: WebviewWindow<R>,
name: &str,
version: Option<String>,
) -> Result<()> {
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
let app_version = window.app_handle().package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
let http_client = yaak_api_client(window.app_handle())?;
let query_manager = window.state::<yaak_models::query_manager::QueryManager>();
let plugin_context = window.plugin_context();
download_and_install(
@@ -166,7 +163,7 @@ pub async fn cmd_plugins_install<R: Runtime>(
}
#[command]
pub async fn cmd_plugins_uninstall<R: Runtime>(
pub(crate) async fn cmd_plugins_uninstall<R: Runtime>(
plugin_id: &str,
window: WebviewWindow<R>,
) -> Result<Plugin> {
@@ -177,21 +174,19 @@ pub async fn cmd_plugins_uninstall<R: Runtime>(
}
#[command]
pub async fn cmd_plugins_updates<R: Runtime>(
pub(crate) async fn cmd_plugins_updates<R: Runtime>(
app_handle: AppHandle<R>,
) -> Result<PluginUpdatesResponse> {
let app_version = app_handle.package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
let http_client = yaak_api_client(&app_handle)?;
let plugins = app_handle.db().list_plugins()?;
Ok(check_plugin_updates(&http_client, plugins).await?)
}
#[command]
pub async fn cmd_plugins_update_all<R: Runtime>(
pub(crate) async fn cmd_plugins_update_all<R: Runtime>(
window: WebviewWindow<R>,
) -> Result<Vec<PluginNameVersion>> {
let app_version = window.app_handle().package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
let http_client = yaak_api_client(window.app_handle())?;
let plugins = window.db().list_plugins()?;
// Get list of available updates (already filtered to only registry plugins)
@@ -238,6 +233,13 @@ pub async fn cmd_plugins_update_all<R: Runtime>(
pub fn init<R: Runtime>() -> TauriPlugin<R> {
Builder::new("yaak-plugins")
.invoke_handler(generate_handler![
cmd_plugins_search,
cmd_plugins_install,
cmd_plugins_uninstall,
cmd_plugins_updates,
cmd_plugins_update_all
])
.setup(|app_handle, _| {
// Resolve paths for plugin manager
let vendored_plugin_dir = app_handle

View File

@@ -38,9 +38,6 @@ pub async fn render_grpc_request<T: TemplateCallback>(
let mut metadata = Vec::new();
for p in r.metadata.clone() {
if !p.enabled {
continue;
}
metadata.push(HttpRequestHeader {
enabled: p.enabled,
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
@@ -122,7 +119,6 @@ pub async fn render_http_request<T: TemplateCallback>(
let mut body = BTreeMap::new();
for (k, v) in r.body.clone() {
let v = if k == "form" { strip_disabled_form_entries(v) } else { v };
body.insert(k, render_json_value_raw(v, vars, cb, &opt).await?);
}
@@ -165,71 +161,3 @@ pub async fn render_http_request<T: TemplateCallback>(
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
}
/// Strip disabled entries from a JSON array of form objects.
fn strip_disabled_form_entries(v: Value) -> Value {
match v {
Value::Array(items) => Value::Array(
items
.into_iter()
.filter(|item| item.get("enabled").and_then(|e| e.as_bool()).unwrap_or(true))
.collect(),
),
v => v,
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
#[test]
fn test_strip_disabled_form_entries() {
let input = json!([
{"enabled": true, "name": "foo", "value": "bar"},
{"enabled": false, "name": "disabled", "value": "gone"},
{"enabled": true, "name": "baz", "value": "qux"},
]);
let result = strip_disabled_form_entries(input);
assert_eq!(
result,
json!([
{"enabled": true, "name": "foo", "value": "bar"},
{"enabled": true, "name": "baz", "value": "qux"},
])
);
}
#[test]
fn test_strip_disabled_form_entries_all_disabled() {
let input = json!([
{"enabled": false, "name": "a", "value": "b"},
{"enabled": false, "name": "c", "value": "d"},
]);
let result = strip_disabled_form_entries(input);
assert_eq!(result, json!([]));
}
#[test]
fn test_strip_disabled_form_entries_missing_enabled_defaults_to_kept() {
let input = json!([
{"name": "no_enabled_field", "value": "kept"},
{"enabled": false, "name": "disabled", "value": "gone"},
]);
let result = strip_disabled_form_entries(input);
assert_eq!(
result,
json!([
{"name": "no_enabled_field", "value": "kept"},
])
);
}
#[test]
fn test_strip_disabled_form_entries_non_array_passthrough() {
let input = json!("just a string");
let result = strip_disabled_form_entries(input.clone());
assert_eq!(result, input);
}
}

View File

@@ -3,7 +3,6 @@ use std::path::PathBuf;
use std::time::{Duration, Instant};
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use log::{debug, error, info, warn};
use serde::{Deserialize, Serialize};
use tauri::{Emitter, Listener, Manager, Runtime, WebviewWindow};
@@ -12,12 +11,10 @@ use tauri_plugin_updater::{Update, UpdaterExt};
use tokio::task::block_in_place;
use tokio::time::sleep;
use ts_rs::TS;
use crate::models_ext::QueryManagerExt;
use yaak_models::util::generate_id;
use yaak_plugins::manager::PluginManager;
use url::Url;
use yaak_api::get_system_proxy_url;
use crate::error::Error::GenericError;
use crate::is_dev;
@@ -90,13 +87,8 @@ impl YaakUpdater {
info!("Checking for updates mode={} autodl={}", mode, auto_download);
let w = window.clone();
let mut updater_builder = w.updater_builder();
if let Some(proxy_url) = get_system_proxy_url() {
if let Ok(url) = Url::parse(&proxy_url) {
updater_builder = updater_builder.proxy(url);
}
}
let update_check_result = updater_builder
let update_check_result = w
.updater_builder()
.on_before_exit(move || {
// Kill plugin manager before exit or NSIS installer will fail to replace sidecar
// while it's running.

View File

@@ -1,14 +1,14 @@
use crate::PluginContextExt;
use crate::error::Result;
use crate::import::import_data;
use crate::models_ext::QueryManagerExt;
use crate::PluginContextExt;
use log::{info, warn};
use std::collections::HashMap;
use std::fs;
use std::sync::Arc;
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
use yaak_api::yaak_api_client;
use yaak_tauri_utils::api_client::yaak_api_client;
use yaak_models::util::generate_id;
use yaak_plugins::events::{Color, ShowToastRequest};
use yaak_plugins::install::download_and_install;
@@ -46,8 +46,7 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
let query_manager = app_handle.db_manager();
let app_version = app_handle.package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
let http_client = yaak_api_client(app_handle)?;
let plugin_context = window.plugin_context();
let pv = download_and_install(
plugin_manager,
@@ -56,8 +55,7 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
&plugin_context,
name,
version,
)
.await?;
).await?;
app_handle.emit(
"show_toast",
ShowToastRequest {
@@ -87,8 +85,7 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
return Ok(());
}
let app_version = app_handle.package_info().version.to_string();
let resp = yaak_api_client(&app_version)?.get(file_url).send().await?;
let resp = yaak_api_client(app_handle)?.get(file_url).send().await?;
let json = resp.bytes().await?;
let p = app_handle
.path()

View File

@@ -1,5 +1,4 @@
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use crate::window_menu::app_menu;
use log::{info, warn};
use rand::random;
@@ -9,6 +8,7 @@ use tauri::{
};
use tauri_plugin_opener::OpenerExt;
use tokio::sync::mpsc;
use crate::models_ext::QueryManagerExt;
const DEFAULT_WINDOW_WIDTH: f64 = 1100.0;
const DEFAULT_WINDOW_HEIGHT: f64 = 600.0;
@@ -162,16 +162,11 @@ pub(crate) fn create_window<R: Runtime>(
"dev.reset_size" => webview_window
.set_size(LogicalSize::new(DEFAULT_WINDOW_WIDTH, DEFAULT_WINDOW_HEIGHT))
.unwrap(),
"dev.reset_size_16x9" => {
"dev.reset_size_record" => {
let width = webview_window.outer_size().unwrap().width;
let height = width * 9 / 16;
webview_window.set_size(PhysicalSize::new(width, height)).unwrap()
}
"dev.reset_size_16x10" => {
let width = webview_window.outer_size().unwrap().width;
let height = width * 10 / 16;
webview_window.set_size(PhysicalSize::new(width, height)).unwrap()
}
"dev.refresh" => webview_window.eval("location.reload()").unwrap(),
"dev.generate_theme_css" => {
w.emit("generate_theme_css", true).unwrap();

View File

@@ -153,11 +153,9 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
.build(app_handle)?,
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
.build(app_handle)?,
&MenuItemBuilder::with_id("dev.reset_size_16x9".to_string(), "Resize to 16x9")
.build(app_handle)?,
&MenuItemBuilder::with_id(
"dev.reset_size_16x10".to_string(),
"Resize to 16x10",
"dev.reset_size_record".to_string(),
"Reset Size 16x9",
)
.build(app_handle)?,
&MenuItemBuilder::with_id(

View File

@@ -1,9 +1,9 @@
//! WebSocket Tauri command wrappers
//! These wrap the core yaak-ws functionality for Tauri IPC.
use crate::PluginContextExt;
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use crate::PluginContextExt;
use http::HeaderMap;
use log::{debug, info, warn};
use std::str::FromStr;
@@ -28,6 +28,53 @@ use yaak_templates::{RenderErrorBehavior, RenderOptions};
use yaak_tls::find_client_certificate;
use yaak_ws::{WebsocketManager, render_websocket_request};
#[command]
pub async fn cmd_ws_upsert_request<R: Runtime>(
request: WebsocketRequest,
app_handle: AppHandle<R>,
window: WebviewWindow<R>,
) -> Result<WebsocketRequest> {
Ok(app_handle
.db()
.upsert_websocket_request(&request, &UpdateSource::from_window_label(window.label()))?)
}
#[command]
pub async fn cmd_ws_duplicate_request<R: Runtime>(
request_id: &str,
app_handle: AppHandle<R>,
window: WebviewWindow<R>,
) -> Result<WebsocketRequest> {
let db = app_handle.db();
let request = db.get_websocket_request(request_id)?;
Ok(db.duplicate_websocket_request(&request, &UpdateSource::from_window_label(window.label()))?)
}
#[command]
pub async fn cmd_ws_delete_request<R: Runtime>(
request_id: &str,
app_handle: AppHandle<R>,
window: WebviewWindow<R>,
) -> Result<WebsocketRequest> {
Ok(app_handle
.db()
.delete_websocket_request_by_id(request_id, &UpdateSource::from_window_label(window.label()))?)
}
#[command]
pub async fn cmd_ws_delete_connection<R: Runtime>(
connection_id: &str,
app_handle: AppHandle<R>,
window: WebviewWindow<R>,
) -> Result<WebsocketConnection> {
Ok(app_handle
.db()
.delete_websocket_connection_by_id(
connection_id,
&UpdateSource::from_window_label(window.label()),
)?)
}
#[command]
pub async fn cmd_ws_delete_connections<R: Runtime>(
request_id: &str,
@@ -40,6 +87,30 @@ pub async fn cmd_ws_delete_connections<R: Runtime>(
)?)
}
#[command]
pub async fn cmd_ws_list_events<R: Runtime>(
connection_id: &str,
app_handle: AppHandle<R>,
) -> Result<Vec<WebsocketEvent>> {
Ok(app_handle.db().list_websocket_events(connection_id)?)
}
#[command]
pub async fn cmd_ws_list_requests<R: Runtime>(
workspace_id: &str,
app_handle: AppHandle<R>,
) -> Result<Vec<WebsocketRequest>> {
Ok(app_handle.db().list_websocket_requests(workspace_id)?)
}
#[command]
pub async fn cmd_ws_list_connections<R: Runtime>(
workspace_id: &str,
app_handle: AppHandle<R>,
) -> Result<Vec<WebsocketConnection>> {
Ok(app_handle.db().list_websocket_connections(workspace_id)?)
}
#[command]
pub async fn cmd_ws_send<R: Runtime>(
connection_id: &str,
@@ -225,10 +296,8 @@ pub async fn cmd_ws_connect<R: Runtime>(
)
.await?;
for header in plugin_result.set_headers.unwrap_or_default() {
match (
http::HeaderName::from_str(&header.name),
HeaderValue::from_str(&header.value),
) {
match (http::HeaderName::from_str(&header.name), HeaderValue::from_str(&header.value))
{
(Ok(name), Ok(value)) => {
headers.insert(name, value);
}
@@ -286,7 +355,7 @@ pub async fn cmd_ws_connect<R: Runtime>(
url.as_str(),
headers,
receive_tx,
workspace.setting_validate_certificates,
workspace.setting_validate_certificates.unwrap_or(true),
client_cert,
)
.await

View File

@@ -44,8 +44,8 @@
"vendored/protoc/include",
"vendored/plugins",
"vendored/plugin-runtime",
"vendored/node/yaaknode*",
"vendored/protoc/yaakprotoc*"
"vendored/node/yaaknode",
"vendored/protoc/yaakprotoc"
]
}
}

View File

@@ -1,6 +1,9 @@
{
"build": {
"features": ["updater", "license"]
"features": [
"updater",
"license"
]
},
"app": {
"security": {
@@ -8,8 +11,12 @@
"default",
{
"identifier": "release",
"windows": ["*"],
"permissions": ["yaak-license:default"]
"windows": [
"*"
],
"permissions": [
"yaak-license:default"
]
}
]
}
@@ -32,7 +39,14 @@
"createUpdaterArtifacts": true,
"longDescription": "A cross-platform desktop app for interacting with REST, GraphQL, and gRPC",
"shortDescription": "Play with APIs, intuitively",
"targets": ["app", "appimage", "deb", "dmg", "nsis", "rpm"],
"targets": [
"app",
"appimage",
"deb",
"dmg",
"nsis",
"rpm"
],
"macOS": {
"minimumSystemVersion": "13.0",
"exceptionDomain": "",
@@ -44,16 +58,10 @@
},
"linux": {
"deb": {
"desktopTemplate": "./template.desktop",
"files": {
"/usr/share/metainfo/app.yaak.Yaak.metainfo.xml": "../../flatpak/app.yaak.Yaak.metainfo.xml"
}
"desktopTemplate": "./template.desktop"
},
"rpm": {
"desktopTemplate": "./template.desktop",
"files": {
"/usr/share/metainfo/app.yaak.Yaak.metainfo.xml": "../../flatpak/app.yaak.Yaak.metainfo.xml"
}
"desktopTemplate": "./template.desktop"
}
}
}

View File

@@ -16,7 +16,7 @@ thiserror = { workspace = true }
ts-rs = { workspace = true }
yaak-common = { workspace = true }
yaak-models = { workspace = true }
yaak-api = { workspace = true }
yaak-tauri-utils = { workspace = true }
[build-dependencies]
tauri-plugin = { workspace = true, features = ["build"] }

View File

@@ -16,7 +16,7 @@ pub enum Error {
ModelError(#[from] yaak_models::error::Error),
#[error(transparent)]
ApiError(#[from] yaak_api::Error),
TauriUtilsError(#[from] yaak_tauri_utils::error::Error),
#[error("Internal server error")]
ServerError,

View File

@@ -7,8 +7,8 @@ use std::ops::Add;
use std::time::Duration;
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
use ts_rs::TS;
use yaak_api::yaak_api_client;
use yaak_common::platform::get_os_str;
use yaak_tauri_utils::api_client::yaak_api_client;
use yaak_models::db_context::DbContext;
use yaak_models::query_manager::QueryManager;
use yaak_models::util::UpdateSource;
@@ -118,12 +118,11 @@ pub async fn activate_license<R: Runtime>(
license_key: &str,
) -> Result<()> {
info!("Activating license {}", license_key);
let app_version = window.app_handle().package_info().version.to_string();
let client = yaak_api_client(&app_version)?;
let client = reqwest::Client::new();
let payload = ActivateLicenseRequestPayload {
license_key: license_key.to_string(),
app_platform: get_os_str().to_string(),
app_version,
app_version: window.app_handle().package_info().version.to_string(),
};
let response = client.post(build_url("/licenses/activate")).json(&payload).send().await?;
@@ -156,11 +155,12 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
let app_handle = window.app_handle();
let activation_id = get_activation_id(app_handle).await;
let app_version = window.app_handle().package_info().version.to_string();
let client = yaak_api_client(&app_version)?;
let client = reqwest::Client::new();
let path = format!("/licenses/activations/{}/deactivate", activation_id);
let payload =
DeactivateLicenseRequestPayload { app_platform: get_os_str().to_string(), app_version };
let payload = DeactivateLicenseRequestPayload {
app_platform: get_os_str().to_string(),
app_version: window.app_handle().package_info().version.to_string(),
};
let response = client.post(build_url(&path)).json(&payload).send().await?;
if response.status().is_client_error() {
@@ -186,9 +186,10 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
}
pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<LicenseCheckStatus> {
let app_version = window.app_handle().package_info().version.to_string();
let payload =
CheckActivationRequestPayload { app_platform: get_os_str().to_string(), app_version };
let payload = CheckActivationRequestPayload {
app_platform: get_os_str().to_string(),
app_version: window.package_info().version.to_string(),
};
let activation_id = get_activation_id(window.app_handle()).await;
let settings = window.db().get_settings();
@@ -203,7 +204,7 @@ pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<Lice
(true, _) => {
info!("Checking license activation");
// A license has been activated, so let's check the license server
let client = yaak_api_client(&payload.app_version)?;
let client = yaak_api_client(window.app_handle())?;
let path = format!("/licenses/activations/{activation_id}/check-v2");
let response = client.post(build_url(&path)).json(&payload).send().await?;

View File

@@ -6,4 +6,8 @@ publish = false
[dependencies]
tauri = { workspace = true }
reqwest = { workspace = true, features = ["gzip"] }
thiserror = { workspace = true }
serde = { workspace = true, features = ["derive"] }
regex = "1.11.0"
yaak-common = { workspace = true }

View File

@@ -0,0 +1,24 @@
use crate::error::Result;
use reqwest::Client;
use std::time::Duration;
use tauri::http::{HeaderMap, HeaderValue};
use tauri::{AppHandle, Runtime};
use yaak_common::platform::{get_ua_arch, get_ua_platform};
pub fn yaak_api_client<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Client> {
let platform = get_ua_platform();
let version = app_handle.package_info().version.clone();
let arch = get_ua_arch();
let ua = format!("Yaak/{version} ({platform}; {arch})");
let mut default_headers = HeaderMap::new();
default_headers.insert("Accept", HeaderValue::from_str("application/json").unwrap());
let client = reqwest::ClientBuilder::new()
.timeout(Duration::from_secs(20))
.default_headers(default_headers)
.gzip(true)
.user_agent(ua)
.build()?;
Ok(client)
}

View File

@@ -0,0 +1,19 @@
use serde::{Serialize, Serializer};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum Error {
#[error(transparent)]
ReqwestError(#[from] reqwest::Error),
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.to_string().as_ref())
}
}
pub type Result<T> = std::result::Result<T, Error>;

View File

@@ -1 +1,3 @@
pub mod api_client;
pub mod error;
pub mod window;

View File

@@ -1,12 +0,0 @@
[package]
name = "yaak-api"
version = "0.1.0"
edition = "2024"
publish = false
[dependencies]
log = { workspace = true }
reqwest = { workspace = true, features = ["gzip"] }
sysproxy = "0.3"
thiserror = { workspace = true }
yaak-common = { workspace = true }

View File

@@ -1,9 +0,0 @@
use thiserror::Error;
#[derive(Error, Debug)]
pub enum Error {
#[error(transparent)]
ReqwestError(#[from] reqwest::Error),
}
pub type Result<T> = std::result::Result<T, Error>;

View File

@@ -1,70 +0,0 @@
mod error;
pub use error::{Error, Result};
use log::{debug, warn};
use reqwest::Client;
use reqwest::header::{HeaderMap, HeaderValue};
use std::time::Duration;
use yaak_common::platform::{get_ua_arch, get_ua_platform};
/// Build a reqwest Client configured for Yaak's own API calls.
///
/// Includes a custom User-Agent, JSON accept header, 20s timeout, gzip,
/// and automatic OS-level proxy detection via sysproxy.
pub fn yaak_api_client(version: &str) -> Result<Client> {
let platform = get_ua_platform();
let arch = get_ua_arch();
let ua = format!("Yaak/{version} ({platform}; {arch})");
let mut default_headers = HeaderMap::new();
default_headers.insert("Accept", HeaderValue::from_str("application/json").unwrap());
let mut builder = reqwest::ClientBuilder::new()
.timeout(Duration::from_secs(20))
.default_headers(default_headers)
.gzip(true)
.user_agent(ua);
if let Some(sys) = get_enabled_system_proxy() {
let proxy_url = format!("http://{}:{}", sys.host, sys.port);
match reqwest::Proxy::all(&proxy_url) {
Ok(p) => {
let p = if !sys.bypass.is_empty() {
p.no_proxy(reqwest::NoProxy::from_string(&sys.bypass))
} else {
p
};
builder = builder.proxy(p);
}
Err(e) => {
warn!("Failed to configure system proxy: {e}");
}
}
}
Ok(builder.build()?)
}
/// Returns the system proxy URL if one is enabled, e.g. `http://host:port`.
pub fn get_system_proxy_url() -> Option<String> {
let sys = get_enabled_system_proxy()?;
Some(format!("http://{}:{}", sys.host, sys.port))
}
fn get_enabled_system_proxy() -> Option<sysproxy::Sysproxy> {
match sysproxy::Sysproxy::get_system_proxy() {
Ok(sys) if sys.enable => {
debug!("Detected system proxy: http://{}:{}", sys.host, sys.port);
Some(sys)
}
Ok(_) => {
debug!("System proxy detected but not enabled");
None
}
Err(e) => {
debug!("Could not detect system proxy: {e}");
None
}
}
}

View File

@@ -6,4 +6,3 @@ publish = false
[dependencies]
serde_json = { workspace = true }
tokio = { workspace = true, features = ["process"] }

View File

@@ -1,16 +0,0 @@
use std::ffi::OsStr;
#[cfg(target_os = "windows")]
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
/// Creates a new `tokio::process::Command` that won't spawn a console window on Windows.
pub fn new_xplatform_command<S: AsRef<OsStr>>(program: S) -> tokio::process::Command {
#[allow(unused_mut)]
let mut cmd = tokio::process::Command::new(program);
#[cfg(target_os = "windows")]
{
use std::os::windows::process::CommandExt;
cmd.creation_flags(CREATE_NO_WINDOW);
}
cmd
}

View File

@@ -1,3 +1,2 @@
pub mod command;
pub mod platform;
pub mod serde;

View File

@@ -11,7 +11,3 @@ export function revealWorkspaceKey(workspaceId: string) {
export function setWorkspaceKey(args: { workspaceId: string; key: string }) {
return invoke<void>('cmd_set_workspace_key', args);
}
export function disableEncryption(workspaceId: string) {
return invoke<void>('cmd_disable_encryption', { workspaceId });
}

View File

@@ -115,35 +115,6 @@ impl EncryptionManager {
self.set_workspace_key(workspace_id, &wkey)
}
pub fn disable_encryption(&self, workspace_id: &str) -> Result<()> {
info!("Disabling encryption for {workspace_id}");
self.query_manager.with_tx::<(), Error>(|tx| {
let workspace = tx.get_workspace(workspace_id)?;
let workspace_meta = tx.get_or_create_workspace_meta(workspace_id)?;
// Clear encryption challenge on workspace
tx.upsert_workspace(
&Workspace { encryption_key_challenge: None, ..workspace },
&UpdateSource::Background,
)?;
// Clear encryption key on workspace meta
tx.upsert_workspace_meta(
&WorkspaceMeta { encryption_key: None, ..workspace_meta },
&UpdateSource::Background,
)?;
Ok(())
})?;
// Remove from cache
let mut cache = self.cached_workspace_keys.lock().unwrap();
cache.remove(workspace_id);
Ok(())
}
fn get_workspace_key(&self, workspace_id: &str) -> Result<WorkspaceKey> {
{
let cache = self.cached_workspace_keys.lock().unwrap();

View File

@@ -6,15 +6,13 @@ publish = false
[dependencies]
chrono = { workspace = true, features = ["serde"] }
git2 = { version = "0.20.4", features = ["vendored-libgit2", "vendored-openssl"] }
git2 = { version = "0.20.0", features = ["vendored-libgit2", "vendored-openssl"] }
log = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
serde_yaml = "0.9.34"
thiserror = { workspace = true }
tokio = { workspace = true, features = ["io-util"] }
ts-rs = { workspace = true, features = ["chrono-impl", "serde-json-impl"] }
url = "2"
yaak-common = { workspace = true }
yaak-models = { workspace = true }
yaak-sync = { workspace = true }

View File

@@ -1,10 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncModel } from "./gen_models";
export type BranchDeleteResult = { "type": "success", message: string, } | { "type": "not_fully_merged" };
export type CloneResult = { "type": "success" } | { "type": "cancelled" } | { "type": "needs_credentials", url: string, error: string | null, };
export type GitAuthor = { name: string | null, email: string | null, };
export type GitCommit = { author: GitAuthor, when: string, message: string | null, };
@@ -15,8 +11,8 @@ export type GitStatus = "untracked" | "conflict" | "current" | "modified" | "rem
export type GitStatusEntry = { relaPath: string, status: GitStatus, staged: boolean, prev: SyncModel | null, next: SyncModel | null, };
export type GitStatusSummary = { path: string, headRef: string | null, headRefShorthand: string | null, entries: Array<GitStatusEntry>, origins: Array<string>, localBranches: Array<string>, remoteBranches: Array<string>, ahead: number, behind: number, };
export type GitStatusSummary = { path: string, headRef: string | null, headRefShorthand: string | null, entries: Array<GitStatusEntry>, origins: Array<string>, localBranches: Array<string>, remoteBranches: Array<string>, };
export type PullResult = { "type": "success", message: string, } | { "type": "up_to_date" } | { "type": "needs_credentials", url: string, error: string | null, } | { "type": "diverged", remote: string, branch: string, } | { "type": "uncommitted_changes" };
export type PullResult = { "type": "success", message: string, } | { "type": "up_to_date" } | { "type": "needs_credentials", url: string, error: string | null, };
export type PushResult = { "type": "success", message: string, } | { "type": "up_to_date" } | { "type": "needs_credentials", url: string, error: string | null, };

View File

@@ -1,7 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
@@ -20,4 +18,4 @@ export type SyncModel = { "type": "workspace" } & Workspace | { "type": "environ
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };

View File

@@ -3,59 +3,40 @@ import { invoke } from '@tauri-apps/api/core';
import { createFastMutation } from '@yaakapp/app/hooks/useFastMutation';
import { queryClient } from '@yaakapp/app/lib/queryClient';
import { useMemo } from 'react';
import { BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
import { showToast } from '@yaakapp/app/lib/toast';
import { GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
export * from './bindings/gen_git';
export * from './bindings/gen_models';
export interface GitCredentials {
username: string;
password: string;
}
export type DivergedStrategy = 'force_reset' | 'merge' | 'cancel';
export type UncommittedChangesStrategy = 'reset' | 'cancel';
export interface GitCallbacks {
addRemote: () => Promise<GitRemote | null>;
promptCredentials: (
result: Extract<PushResult, { type: 'needs_credentials' }>,
) => Promise<GitCredentials | null>;
promptDiverged: (
result: Extract<PullResult, { type: 'diverged' }>,
) => Promise<DivergedStrategy>;
promptUncommittedChanges: () => Promise<UncommittedChangesStrategy>;
forceSync: () => Promise<void>;
}
const onSuccess = () => queryClient.invalidateQueries({ queryKey: ['git'] });
export function useGit(dir: string, callbacks: GitCallbacks, refreshKey?: string) {
export function useGit(dir: string, callbacks: GitCallbacks) {
const mutations = useMemo(() => gitMutations(dir, callbacks), [dir, callbacks]);
const fetchAll = useQuery<void, string>({
queryKey: ['git', 'fetch_all', dir, refreshKey],
queryFn: () => invoke('cmd_git_fetch_all', { dir }),
refetchInterval: 10 * 60_000,
});
return [
{
remotes: useQuery<GitRemote[], string>({
queryKey: ['git', 'remotes', dir, refreshKey],
queryKey: ['git', 'remotes', dir],
queryFn: () => getRemotes(dir),
placeholderData: (prev) => prev,
}),
log: useQuery<GitCommit[], string>({
queryKey: ['git', 'log', dir, refreshKey],
queryKey: ['git', 'log', dir],
queryFn: () => invoke('cmd_git_log', { dir }),
placeholderData: (prev) => prev,
}),
status: useQuery<GitStatusSummary, string>({
refetchOnMount: true,
queryKey: ['git', 'status', dir, refreshKey, fetchAll.dataUpdatedAt],
queryKey: ['git', 'status', dir],
queryFn: () => invoke('cmd_git_status', { dir }),
placeholderData: (prev) => prev,
}),
},
mutations,
@@ -78,6 +59,7 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
if (creds == null) throw new Error('Canceled');
await invoke('cmd_git_add_credential', {
dir,
remoteUrl: result.url,
username: creds.username,
password: creds.password,
@@ -87,15 +69,6 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
return invoke<PushResult>('cmd_git_push', { dir });
};
const handleError = (err: unknown) => {
showToast({
id: `${err}`,
message: `${err}`,
color: 'danger',
timeout: 5000,
});
}
return {
init: createFastMutation<void, string, void>({
mutationKey: ['git', 'init'],
@@ -117,31 +90,21 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
mutationFn: (args) => invoke('cmd_git_rm_remote', { dir, ...args }),
onSuccess,
}),
createBranch: createFastMutation<void, string, { branch: string; base?: string }>({
branch: createFastMutation<void, string, { branch: string }>({
mutationKey: ['git', 'branch', dir],
mutationFn: (args) => invoke('cmd_git_branch', { dir, ...args }),
onSuccess,
}),
mergeBranch: createFastMutation<void, string, { branch: string }>({
mergeBranch: createFastMutation<void, string, { branch: string; force: boolean }>({
mutationKey: ['git', 'merge', dir],
mutationFn: (args) => invoke('cmd_git_merge_branch', { dir, ...args }),
onSuccess,
}),
deleteBranch: createFastMutation<BranchDeleteResult, string, { branch: string, force?: boolean }>({
deleteBranch: createFastMutation<void, string, { branch: string }>({
mutationKey: ['git', 'delete-branch', dir],
mutationFn: (args) => invoke('cmd_git_delete_branch', { dir, ...args }),
onSuccess,
}),
deleteRemoteBranch: createFastMutation<void, string, { branch: string }>({
mutationKey: ['git', 'delete-remote-branch', dir],
mutationFn: (args) => invoke('cmd_git_delete_remote_branch', { dir, ...args }),
onSuccess,
}),
renameBranch: createFastMutation<void, string, { oldName: string, newName: string }>({
mutationKey: ['git', 'rename-branch', dir],
mutationFn: (args) => invoke('cmd_git_rename_branch', { dir, ...args }),
onSuccess,
}),
checkout: createFastMutation<string, string, { branch: string; force: boolean }>({
mutationKey: ['git', 'checkout', dir],
mutationFn: (args) => invoke('cmd_git_checkout', { dir, ...args }),
@@ -160,7 +123,11 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
},
onSuccess,
}),
fetchAll: createFastMutation<string, string, void>({
mutationKey: ['git', 'checkout', dir],
mutationFn: () => invoke('cmd_git_fetch_all', { dir }),
onSuccess,
}),
push: createFastMutation<PushResult, string, void>({
mutationKey: ['git', 'push', dir],
mutationFn: push,
@@ -170,51 +137,21 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
mutationKey: ['git', 'pull', dir],
async mutationFn() {
const result = await invoke<PullResult>('cmd_git_pull', { dir });
if (result.type !== 'needs_credentials') return result;
if (result.type === 'needs_credentials') {
const creds = await callbacks.promptCredentials(result);
if (creds == null) throw new Error('Canceled');
// Needs credentials, prompt for them
const creds = await callbacks.promptCredentials(result);
if (creds == null) throw new Error('Canceled');
await invoke('cmd_git_add_credential', {
remoteUrl: result.url,
username: creds.username,
password: creds.password,
});
await invoke('cmd_git_add_credential', {
dir,
remoteUrl: result.url,
username: creds.username,
password: creds.password,
});
// Pull again after credentials
return invoke<PullResult>('cmd_git_pull', { dir });
}
if (result.type === 'uncommitted_changes') {
callbacks.promptUncommittedChanges().then(async (strategy) => {
if (strategy === 'cancel') return;
await invoke('cmd_git_reset_changes', { dir });
return invoke<PullResult>('cmd_git_pull', { dir });
}).then(async () => { onSuccess(); await callbacks.forceSync(); }, handleError);
}
if (result.type === 'diverged') {
callbacks.promptDiverged(result).then((strategy) => {
if (strategy === 'cancel') return;
if (strategy === 'force_reset') {
return invoke<PullResult>('cmd_git_pull_force_reset', {
dir,
remote: result.remote,
branch: result.branch,
});
}
return invoke<PullResult>('cmd_git_pull_merge', {
dir,
remote: result.remote,
branch: result.branch,
});
}).then(async () => { onSuccess(); await callbacks.forceSync(); }, handleError);
}
return result;
// Pull again
return invoke<PullResult>('cmd_git_pull', { dir });
},
onSuccess,
}),
@@ -223,39 +160,9 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
mutationFn: (args) => invoke('cmd_git_unstage', { dir, ...args }),
onSuccess,
}),
resetChanges: createFastMutation<void, string, void>({
mutationKey: ['git', 'reset-changes', dir],
mutationFn: () => invoke('cmd_git_reset_changes', { dir }),
onSuccess,
}),
} as const;
};
async function getRemotes(dir: string) {
return invoke<GitRemote[]>('cmd_git_remotes', { dir });
}
/**
* Clone a git repository, prompting for credentials if needed.
*/
export async function gitClone(
url: string,
dir: string,
promptCredentials: (args: { url: string; error: string | null }) => Promise<GitCredentials | null>,
): Promise<CloneResult> {
const result = await invoke<CloneResult>('cmd_git_clone', { url, dir });
if (result.type !== 'needs_credentials') return result;
// Prompt for credentials
const creds = await promptCredentials({ url: result.url, error: result.error });
if (creds == null) return {type: 'cancelled'};
// Store credentials and retry
await invoke('cmd_git_add_credential', {
remoteUrl: result.url,
username: creds.username,
password: creds.password,
});
return invoke<CloneResult>('cmd_git_clone', { url, dir });
}

View File

@@ -1,30 +1,38 @@
use crate::error::Error::GitNotFound;
use crate::error::Result;
use std::path::Path;
use std::process::Stdio;
use tokio::process::Command;
use yaak_common::command::new_xplatform_command;
use std::process::{Command, Stdio};
/// Create a git command that runs in the specified directory
pub(crate) async fn new_binary_command(dir: &Path) -> Result<Command> {
let mut cmd = new_binary_command_global().await?;
cmd.arg("-C").arg(dir);
Ok(cmd)
}
use crate::error::Error::GitNotFound;
#[cfg(target_os = "windows")]
use std::os::windows::process::CommandExt;
/// Create a git command without a specific directory (for global operations)
pub(crate) async fn new_binary_command_global() -> Result<Command> {
#[cfg(target_os = "windows")]
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
pub(crate) fn new_binary_command(dir: &Path) -> Result<Command> {
// 1. Probe that `git` exists and is runnable
let mut probe = new_xplatform_command("git");
let mut probe = Command::new("git");
probe.arg("--version").stdin(Stdio::null()).stdout(Stdio::null()).stderr(Stdio::null());
let status = probe.status().await.map_err(|_| GitNotFound)?;
#[cfg(target_os = "windows")]
{
probe.creation_flags(CREATE_NO_WINDOW);
}
let status = probe.status().map_err(|_| GitNotFound)?;
if !status.success() {
return Err(GitNotFound);
}
// 2. Build the reusable git command
let cmd = new_xplatform_command("git");
let mut cmd = Command::new("git");
cmd.arg("-C").arg(dir);
#[cfg(target_os = "windows")]
{
cmd.creation_flags(CREATE_NO_WINDOW);
}
Ok(cmd)
}

View File

@@ -1,153 +1,99 @@
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use crate::binary::new_binary_command;
use crate::error::Error::GenericError;
use crate::error::Result;
use crate::merge::do_merge;
use crate::repository::open_repo;
use crate::util::{bytes_to_string, get_branch_by_name, get_current_branch};
use git2::BranchType;
use git2::build::CheckoutBuilder;
use log::info;
use std::path::Path;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
#[ts(export, export_to = "gen_git.ts")]
pub enum BranchDeleteResult {
Success { message: String },
NotFullyMerged,
}
pub fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
if branch_name.starts_with("origin/") {
return git_checkout_remote_branch(dir, branch_name, force);
}
pub async fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
let branch_name = branch_name.trim_start_matches("origin/");
let repo = open_repo(dir)?;
let branch = get_branch_by_name(&repo, branch_name)?;
let branch_ref = branch.into_reference();
let branch_tree = branch_ref.peel_to_tree()?;
let mut args = vec!["checkout"];
let mut options = CheckoutBuilder::default();
if force {
args.push("--force");
options.force();
}
args.push(branch_name);
let out = new_binary_command(dir)
.await?
.args(&args)
.output()
.await
.map_err(|e| GenericError(format!("failed to run git checkout: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() {
return Err(GenericError(format!("Failed to checkout: {}", combined.trim())));
}
repo.checkout_tree(branch_tree.as_object(), Some(&mut options))?;
repo.set_head(branch_ref.name().unwrap())?;
Ok(branch_name.to_string())
}
pub async fn git_create_branch(dir: &Path, name: &str, base: Option<&str>) -> Result<()> {
let mut cmd = new_binary_command(dir).await?;
cmd.arg("branch").arg(name);
if let Some(base_branch) = base {
cmd.arg(base_branch);
}
pub(crate) fn git_checkout_remote_branch(
dir: &Path,
branch_name: &str,
force: bool,
) -> Result<String> {
let branch_name = branch_name.trim_start_matches("origin/");
let repo = open_repo(dir)?;
let out =
cmd.output().await.map_err(|e| GenericError(format!("failed to run git branch: {e}")))?;
let refname = format!("refs/remotes/origin/{}", branch_name);
let remote_ref = repo.find_reference(&refname)?;
let commit = remote_ref.peel_to_commit()?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
let mut new_branch = repo.branch(branch_name, &commit, false)?;
let upstream_name = format!("origin/{}", branch_name);
new_branch.set_upstream(Some(&upstream_name))?;
if !out.status.success() {
return Err(GenericError(format!("Failed to create branch: {}", combined.trim())));
}
Ok(())
git_checkout_branch(dir, branch_name, force)
}
pub async fn git_delete_branch(dir: &Path, name: &str, force: bool) -> Result<BranchDeleteResult> {
let mut cmd = new_binary_command(dir).await?;
let out =
if force { cmd.args(["branch", "-D", name]) } else { cmd.args(["branch", "-d", name]) }
.output()
.await
.map_err(|e| GenericError(format!("failed to run git branch -d: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() && stderr.to_lowercase().contains("not fully merged") {
return Ok(BranchDeleteResult::NotFullyMerged);
}
if !out.status.success() {
return Err(GenericError(format!("Failed to delete branch: {}", combined.trim())));
}
Ok(BranchDeleteResult::Success { message: combined })
}
pub async fn git_merge_branch(dir: &Path, name: &str) -> Result<()> {
let out = new_binary_command(dir)
.await?
.args(["merge", name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() {
// Check for merge conflicts
if combined.to_lowercase().contains("conflict") {
return Err(GenericError(
"Merge conflicts detected. Please resolve them manually.".to_string(),
));
pub fn git_create_branch(dir: &Path, name: &str) -> Result<()> {
let repo = open_repo(dir)?;
let head = match repo.head() {
Ok(h) => h,
Err(e) if e.code() == git2::ErrorCode::UnbornBranch => {
let msg = "Cannot create branch when there are no commits";
return Err(GenericError(msg.into()));
}
return Err(GenericError(format!("Failed to merge: {}", combined.trim())));
}
Err(e) => return Err(e.into()),
};
let head = head.peel_to_commit()?;
repo.branch(name, &head, false)?;
Ok(())
}
pub async fn git_delete_remote_branch(dir: &Path, name: &str) -> Result<()> {
// Remote branch names come in as "origin/branch-name", extract the branch name
let branch_name = name.trim_start_matches("origin/");
pub fn git_delete_branch(dir: &Path, name: &str) -> Result<()> {
let repo = open_repo(dir)?;
let mut branch = get_branch_by_name(&repo, name)?;
let out = new_binary_command(dir)
.await?
.args(["push", "origin", "--delete", branch_name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git push --delete: {e}")))?;
if branch.is_head() {
info!("Deleting head branch");
let branches = repo.branches(Some(BranchType::Local))?;
let other_branch = branches.into_iter().filter_map(|b| b.ok()).find(|b| !b.0.is_head());
let other_branch = match other_branch {
None => return Err(GenericError("Cannot delete only branch".into())),
Some(b) => bytes_to_string(b.0.name_bytes()?)?,
};
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() {
return Err(GenericError(format!("Failed to delete remote branch: {}", combined.trim())));
git_checkout_branch(dir, &other_branch, true)?;
}
branch.delete()?;
Ok(())
}
pub async fn git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
let out = new_binary_command(dir)
.await?
.args(["branch", "-m", old_name, new_name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git branch -m: {e}")))?;
pub fn git_merge_branch(dir: &Path, name: &str, _force: bool) -> Result<()> {
let repo = open_repo(dir)?;
let local_branch = get_current_branch(&repo)?.unwrap();
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
let commit_to_merge = get_branch_by_name(&repo, name)?.into_reference();
let commit_to_merge = repo.reference_to_annotated_commit(&commit_to_merge)?;
if !out.status.success() {
return Err(GenericError(format!("Failed to rename branch: {}", combined.trim())));
}
do_merge(&repo, &local_branch, &commit_to_merge)?;
Ok(())
}

View File

@@ -1,53 +0,0 @@
use crate::binary::new_binary_command;
use crate::error::Error::GenericError;
use crate::error::Result;
use log::info;
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::Path;
use ts_rs::TS;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
#[ts(export, export_to = "gen_git.ts")]
pub enum CloneResult {
Success,
Cancelled,
NeedsCredentials { url: String, error: Option<String> },
}
pub async fn git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
let parent = dir.parent().ok_or_else(|| GenericError("Invalid clone directory".to_string()))?;
fs::create_dir_all(parent)
.map_err(|e| GenericError(format!("Failed to create directory: {e}")))?;
let mut cmd = new_binary_command(parent).await?;
cmd.args(["clone", url]).arg(dir).env("GIT_TERMINAL_PROMPT", "0");
let out =
cmd.output().await.map_err(|e| GenericError(format!("failed to run git clone: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
let combined_lower = combined.to_lowercase();
info!("Cloned status={}: {combined}", out.status);
if !out.status.success() {
// Check for credentials error
if combined_lower.contains("could not read") {
return Ok(CloneResult::NeedsCredentials { url: url.to_string(), error: None });
}
if combined_lower.contains("unable to access")
|| combined_lower.contains("authentication failed")
{
return Ok(CloneResult::NeedsCredentials {
url: url.to_string(),
error: Some(combined.to_string()),
});
}
return Err(GenericError(format!("Failed to clone: {}", combined.trim())));
}
Ok(CloneResult::Success)
}

View File

@@ -3,9 +3,8 @@ use crate::error::Error::GenericError;
use log::info;
use std::path::Path;
pub async fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
let out =
new_binary_command(dir).await?.args(["commit", "--message", message]).output().await?;
pub fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
let out = new_binary_command(dir)?.args(["commit", "--message", message]).output()?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);

View File

@@ -1,19 +1,24 @@
use crate::binary::new_binary_command_global;
use crate::binary::new_binary_command;
use crate::error::Error::GenericError;
use crate::error::Result;
use std::io::Write;
use std::path::Path;
use std::process::Stdio;
use tokio::io::AsyncWriteExt;
use url::Url;
pub async fn git_add_credential(remote_url: &str, username: &str, password: &str) -> Result<()> {
pub async fn git_add_credential(
dir: &Path,
remote_url: &str,
username: &str,
password: &str,
) -> Result<()> {
let url = Url::parse(remote_url)
.map_err(|e| GenericError(format!("Failed to parse remote url {remote_url}: {e:?}")))?;
let protocol = url.scheme();
let host = url.host_str().unwrap();
let path = Some(url.path());
let mut child = new_binary_command_global()
.await?
let mut child = new_binary_command(dir)?
.args(["credential", "approve"])
.stdin(Stdio::piped())
.stdout(Stdio::null())
@@ -21,21 +26,19 @@ pub async fn git_add_credential(remote_url: &str, username: &str, password: &str
{
let stdin = child.stdin.as_mut().unwrap();
stdin.write_all(format!("protocol={}\n", protocol).as_bytes()).await?;
stdin.write_all(format!("host={}\n", host).as_bytes()).await?;
writeln!(stdin, "protocol={}", protocol)?;
writeln!(stdin, "host={}", host)?;
if let Some(path) = path {
if !path.is_empty() {
stdin
.write_all(format!("path={}\n", path.trim_start_matches('/')).as_bytes())
.await?;
writeln!(stdin, "path={}", path.trim_start_matches('/'))?;
}
}
stdin.write_all(format!("username={}\n", username).as_bytes()).await?;
stdin.write_all(format!("password={}\n", password).as_bytes()).await?;
stdin.write_all(b"\n").await?; // blank line terminator
writeln!(stdin, "username={}", username)?;
writeln!(stdin, "password={}", password)?;
writeln!(stdin)?; // blank line terminator
}
let status = child.wait().await?;
let status = child.wait()?;
if !status.success() {
return Err(GenericError("Failed to approve git credential".to_string()));
}

View File

@@ -3,12 +3,10 @@ use crate::error::Error::GenericError;
use crate::error::Result;
use std::path::Path;
pub async fn git_fetch_all(dir: &Path) -> Result<()> {
let out = new_binary_command(dir)
.await?
pub fn git_fetch_all(dir: &Path) -> Result<()> {
let out = new_binary_command(dir)?
.args(["fetch", "--all", "--prune", "--tags"])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);

View File

@@ -1,38 +1,31 @@
mod add;
mod binary;
mod branch;
mod clone;
mod commit;
mod credential;
pub mod error;
mod fetch;
mod init;
mod log;
mod merge;
mod pull;
mod push;
mod remotes;
mod repository;
mod reset;
mod status;
mod unstage;
mod util;
// Re-export all git functions for external use
pub use add::git_add;
pub use branch::{
BranchDeleteResult, git_checkout_branch, git_create_branch, git_delete_branch,
git_delete_remote_branch, git_merge_branch, git_rename_branch,
};
pub use clone::{CloneResult, git_clone};
pub use branch::{git_checkout_branch, git_create_branch, git_delete_branch, git_merge_branch};
pub use commit::git_commit;
pub use credential::git_add_credential;
pub use fetch::git_fetch_all;
pub use init::git_init;
pub use log::{GitCommit, git_log};
pub use pull::{PullResult, git_pull, git_pull_force_reset, git_pull_merge};
pub use pull::{PullResult, git_pull};
pub use push::{PushResult, git_push};
pub use remotes::{GitRemote, git_add_remote, git_remotes, git_rm_remote};
pub use reset::git_reset_changes;
pub use status::{GitStatusSummary, git_status};
pub use unstage::git_unstage;

View File

@@ -0,0 +1,135 @@
use crate::error::Error::MergeConflicts;
use crate::util::bytes_to_string;
use git2::{AnnotatedCommit, Branch, IndexEntry, Reference, Repository};
use log::{debug, info};
pub(crate) fn do_merge(
repo: &Repository,
local_branch: &Branch,
commit_to_merge: &AnnotatedCommit,
) -> crate::error::Result<()> {
debug!("Merging remote branches");
let analysis = repo.merge_analysis(&[&commit_to_merge])?;
if analysis.0.is_fast_forward() {
let refname = bytes_to_string(local_branch.get().name_bytes())?;
match repo.find_reference(&refname) {
Ok(mut r) => {
merge_fast_forward(repo, &mut r, &commit_to_merge)?;
}
Err(_) => {
// The branch doesn't exist, so set the reference to the commit directly. Usually
// this is because you are pulling into an empty repository.
repo.reference(
&refname,
commit_to_merge.id(),
true,
&format!("Setting {} to {}", refname, commit_to_merge.id()),
)?;
repo.set_head(&refname)?;
repo.checkout_head(Some(
git2::build::CheckoutBuilder::default()
.allow_conflicts(true)
.conflict_style_merge(true)
.force(),
))?;
}
};
} else if analysis.0.is_normal() {
let head_commit = repo.reference_to_annotated_commit(&repo.head()?)?;
merge_normal(repo, &head_commit, commit_to_merge)?;
} else {
debug!("Skipping merge. Nothing to do")
}
Ok(())
}
pub(crate) fn merge_fast_forward(
repo: &Repository,
local_reference: &mut Reference,
remote_commit: &AnnotatedCommit,
) -> crate::error::Result<()> {
info!("Performing fast forward");
let name = match local_reference.name() {
Some(s) => s.to_string(),
None => String::from_utf8_lossy(local_reference.name_bytes()).to_string(),
};
let msg = format!("Fast-Forward: Setting {} to id: {}", name, remote_commit.id());
local_reference.set_target(remote_commit.id(), &msg)?;
repo.set_head(&name)?;
repo.checkout_head(Some(
git2::build::CheckoutBuilder::default()
// For some reason, the force is required to make the working directory actually get
// updated I suspect we should be adding some logic to handle dirty working directory
// states, but this is just an example so maybe not.
.force(),
))?;
Ok(())
}
pub(crate) fn merge_normal(
repo: &Repository,
local: &AnnotatedCommit,
remote: &AnnotatedCommit,
) -> crate::error::Result<()> {
info!("Performing normal merge");
let local_tree = repo.find_commit(local.id())?.tree()?;
let remote_tree = repo.find_commit(remote.id())?.tree()?;
let ancestor = repo.find_commit(repo.merge_base(local.id(), remote.id())?)?.tree()?;
let mut idx = repo.merge_trees(&ancestor, &local_tree, &remote_tree, None)?;
if idx.has_conflicts() {
let conflicts = idx.conflicts()?;
for conflict in conflicts {
if let Ok(conflict) = conflict {
print_conflict(&conflict);
}
}
return Err(MergeConflicts);
}
let result_tree = repo.find_tree(idx.write_tree_to(repo)?)?;
// now create the merge commit
let msg = format!("Merge: {} into {}", remote.id(), local.id());
let sig = repo.signature()?;
let local_commit = repo.find_commit(local.id())?;
let remote_commit = repo.find_commit(remote.id())?;
// Do our merge commit and set current branch head to that commit.
let _merge_commit = repo.commit(
Some("HEAD"),
&sig,
&sig,
&msg,
&result_tree,
&[&local_commit, &remote_commit],
)?;
// Set working tree to match head.
repo.checkout_head(None)?;
Ok(())
}
fn print_conflict(conflict: &git2::IndexConflict) {
let ancestor = conflict.ancestor.as_ref().map(path_from_index_entry);
let ours = conflict.our.as_ref().map(path_from_index_entry);
let theirs = conflict.their.as_ref().map(path_from_index_entry);
println!("Conflict detected:");
if let Some(path) = ancestor {
println!(" Common ancestor: {:?}", path);
}
if let Some(path) = ours {
println!(" Ours: {:?}", path);
}
if let Some(path) = theirs {
println!(" Theirs: {:?}", path);
}
}
fn path_from_index_entry(entry: &IndexEntry) -> String {
String::from_utf8_lossy(entry.path.as_slice()).into_owned()
}

View File

@@ -15,159 +15,49 @@ pub enum PullResult {
Success { message: String },
UpToDate,
NeedsCredentials { url: String, error: Option<String> },
Diverged { remote: String, branch: String },
UncommittedChanges,
}
fn has_uncommitted_changes(dir: &Path) -> Result<bool> {
pub fn git_pull(dir: &Path) -> Result<PullResult> {
let repo = open_repo(dir)?;
let mut opts = git2::StatusOptions::new();
opts.include_ignored(false).include_untracked(false);
let statuses = repo.statuses(Some(&mut opts))?;
Ok(statuses.iter().any(|e| e.status() != git2::Status::CURRENT))
}
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_in_repo(&repo)?;
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
pub async fn git_pull(dir: &Path) -> Result<PullResult> {
if has_uncommitted_changes(dir)? {
return Ok(PullResult::UncommittedChanges);
}
// Extract all git2 data before any await points (git2 types are not Send)
let (branch_name, remote_name, remote_url) = {
let repo = open_repo(dir)?;
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_in_repo(&repo)?;
let remote_name =
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
let remote_url =
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
(branch_name, remote_name, remote_url)
};
// Step 1: fetch the specific branch
// NOTE: We use fetch + merge instead of `git pull` to avoid conflicts with
// global git config (e.g. pull.ff=only) and the background fetch --all.
let fetch_out = new_binary_command(dir)
.await?
.args(["fetch", &remote_name, &branch_name])
let out = new_binary_command(dir)?
.args(["pull", &remote_name, &branch_name])
.env("GIT_TERMINAL_PROMPT", "0")
.output()
.await
.map_err(|e| GenericError(format!("failed to run git fetch: {e}")))?;
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
let fetch_stdout = String::from_utf8_lossy(&fetch_out.stdout);
let fetch_stderr = String::from_utf8_lossy(&fetch_out.stderr);
let fetch_combined = format!("{fetch_stdout}{fetch_stderr}");
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = stdout + stderr;
info!("Fetched status={} {fetch_combined}", fetch_out.status);
info!("Pulled status={} {combined}", out.status);
if fetch_combined.to_lowercase().contains("could not read") {
if combined.to_lowercase().contains("could not read") {
return Ok(PullResult::NeedsCredentials { url: remote_url.to_string(), error: None });
}
if fetch_combined.to_lowercase().contains("unable to access") {
if combined.to_lowercase().contains("unable to access") {
return Ok(PullResult::NeedsCredentials {
url: remote_url.to_string(),
error: Some(fetch_combined.to_string()),
error: Some(combined.to_string()),
});
}
if !fetch_out.status.success() {
return Err(GenericError(format!("Failed to fetch: {fetch_combined}")));
if !out.status.success() {
return Err(GenericError(format!("Failed to pull {combined}")));
}
// Step 2: merge the fetched branch
let ref_name = format!("{}/{}", remote_name, branch_name);
let merge_out = new_binary_command(dir)
.await?
.args(["merge", "--ff-only", &ref_name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
let merge_stdout = String::from_utf8_lossy(&merge_out.stdout);
let merge_stderr = String::from_utf8_lossy(&merge_out.stderr);
let merge_combined = format!("{merge_stdout}{merge_stderr}");
info!("Merged status={} {merge_combined}", merge_out.status);
if !merge_out.status.success() {
let merge_lower = merge_combined.to_lowercase();
if merge_lower.contains("cannot fast-forward")
|| merge_lower.contains("not possible to fast-forward")
|| merge_lower.contains("diverged")
{
return Ok(PullResult::Diverged { remote: remote_name, branch: branch_name });
}
return Err(GenericError(format!("Failed to merge: {merge_combined}")));
}
if merge_combined.to_lowercase().contains("up to date") {
if combined.to_lowercase().contains("up to date") {
return Ok(PullResult::UpToDate);
}
Ok(PullResult::Success { message: format!("Pulled from {}/{}", remote_name, branch_name) })
}
pub async fn git_pull_force_reset(dir: &Path, remote: &str, branch: &str) -> Result<PullResult> {
// Step 1: fetch the remote
let fetch_out = new_binary_command(dir)
.await?
.args(["fetch", remote])
.env("GIT_TERMINAL_PROMPT", "0")
.output()
.await
.map_err(|e| GenericError(format!("failed to run git fetch: {e}")))?;
if !fetch_out.status.success() {
let stderr = String::from_utf8_lossy(&fetch_out.stderr);
return Err(GenericError(format!("Failed to fetch: {stderr}")));
}
// Step 2: reset --hard to remote/branch
let ref_name = format!("{}/{}", remote, branch);
let reset_out = new_binary_command(dir)
.await?
.args(["reset", "--hard", &ref_name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git reset: {e}")))?;
if !reset_out.status.success() {
let stderr = String::from_utf8_lossy(&reset_out.stderr);
return Err(GenericError(format!("Failed to reset: {}", stderr.trim())));
}
Ok(PullResult::Success { message: format!("Reset to {}/{}", remote, branch) })
}
pub async fn git_pull_merge(dir: &Path, remote: &str, branch: &str) -> Result<PullResult> {
let out = new_binary_command(dir)
.await?
.args(["pull", "--no-rebase", remote, branch])
.env("GIT_TERMINAL_PROMPT", "0")
.output()
.await
.map_err(|e| GenericError(format!("failed to run git pull --no-rebase: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
info!("Pull merge status={} {combined}", out.status);
if !out.status.success() {
if combined.to_lowercase().contains("conflict") {
return Err(GenericError(
"Merge conflicts detected. Please resolve them manually.".to_string(),
));
}
return Err(GenericError(format!("Failed to merge pull: {}", combined.trim())));
}
Ok(PullResult::Success { message: format!("Merged from {}/{}", remote, branch) })
}
// pub(crate) fn git_pull_old(dir: &Path) -> Result<PullResult> {
// let repo = open_repo(dir)?;
//

View File

@@ -17,25 +17,17 @@ pub enum PushResult {
NeedsCredentials { url: String, error: Option<String> },
}
pub async fn git_push(dir: &Path) -> Result<PushResult> {
// Extract all git2 data before any await points (git2 types are not Send)
let (branch_name, remote_name, remote_url) = {
let repo = open_repo(dir)?;
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_for_push_in_repo(&repo)?;
let remote_name =
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
let remote_url =
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
(branch_name, remote_name, remote_url)
};
pub fn git_push(dir: &Path) -> Result<PushResult> {
let repo = open_repo(dir)?;
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_for_push_in_repo(&repo)?;
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
let out = new_binary_command(dir)
.await?
let out = new_binary_command(dir)?
.args(["push", &remote_name, &branch_name])
.env("GIT_TERMINAL_PROMPT", "0")
.output()
.await
.map_err(|e| GenericError(format!("failed to run git push: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);

View File

@@ -1,20 +0,0 @@
use crate::binary::new_binary_command;
use crate::error::Error::GenericError;
use crate::error::Result;
use std::path::Path;
pub async fn git_reset_changes(dir: &Path) -> Result<()> {
let out = new_binary_command(dir)
.await?
.args(["reset", "--hard", "HEAD"])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git reset: {e}")))?;
if !out.status.success() {
let stderr = String::from_utf8_lossy(&out.stderr);
return Err(GenericError(format!("Failed to reset: {}", stderr.trim())));
}
Ok(())
}

View File

@@ -18,8 +18,6 @@ pub struct GitStatusSummary {
pub origins: Vec<String>,
pub local_branches: Vec<String>,
pub remote_branches: Vec<String>,
pub ahead: u32,
pub behind: u32,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
@@ -162,18 +160,6 @@ pub fn git_status(dir: &Path) -> crate::error::Result<GitStatusSummary> {
let local_branches = local_branch_names(&repo)?;
let remote_branches = remote_branch_names(&repo)?;
// Compute ahead/behind relative to remote tracking branch
let (ahead, behind) = (|| -> Option<(usize, usize)> {
let head = repo.head().ok()?;
let local_oid = head.target()?;
let branch_name = head.shorthand()?;
let upstream_ref =
repo.find_branch(&format!("origin/{branch_name}"), git2::BranchType::Remote).ok()?;
let upstream_oid = upstream_ref.get().target()?;
repo.graph_ahead_behind(local_oid, upstream_oid).ok()
})()
.unwrap_or((0, 0));
Ok(GitStatusSummary {
entries,
origins,
@@ -182,7 +168,5 @@ pub fn git_status(dir: &Path) -> crate::error::Result<GitStatusSummary> {
head_ref_shorthand,
local_branches,
remote_branches,
ahead: ahead as u32,
behind: behind as u32,
})
}

View File

@@ -47,6 +47,10 @@ pub(crate) fn remote_branch_names(repo: &Repository) -> Result<Vec<String>> {
Ok(branches)
}
pub(crate) fn get_branch_by_name<'s>(repo: &'s Repository, name: &str) -> Result<Branch<'s>> {
Ok(repo.find_branch(name, BranchType::Local)?)
}
pub(crate) fn bytes_to_string(bytes: &[u8]) -> Result<String> {
Ok(String::from_utf8(bytes.to_vec())?)
}

View File

@@ -22,6 +22,5 @@ tokio-stream = "0.1.14"
tonic = { version = "0.12.3", default-features = false, features = ["transport"] }
tonic-reflection = "0.12.3"
uuid = { version = "1.7.0", features = ["v4"] }
yaak-common = { workspace = true }
yaak-tls = { workspace = true }
thiserror = "2.0.17"

View File

@@ -115,18 +115,14 @@ impl GrpcConnection {
Ok(client.unary(req, path, codec).await?)
}
pub async fn streaming<F>(
pub async fn streaming(
&self,
service: &str,
method: &str,
stream: ReceiverStream<String>,
metadata: &BTreeMap<String, String>,
client_cert: Option<ClientCertificateConfig>,
on_message: F,
) -> Result<Response<Streaming<DynamicMessage>>>
where
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
{
) -> Result<Response<Streaming<DynamicMessage>>> {
let method = &self.method(&service, &method).await?;
let mapped_stream = {
let input_message = method.input();
@@ -135,39 +131,31 @@ impl GrpcConnection {
let md = metadata.clone();
let use_reflection = self.use_reflection.clone();
let client_cert = client_cert.clone();
stream
.then(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
let on_message = on_message.clone();
let json_clone = json.clone();
async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
stream.filter_map(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
tokio::runtime::Handle::current().block_on(async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => {
on_message(Ok(json_clone));
Some(m)
}
Err(e) => {
warn!("Failed to deserialize message: {e}");
on_message(Err(e.to_string()));
None
}
}
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => Some(m),
Err(e) => {
warn!("Failed to deserialize message: {e}");
None
}
}
})
.filter_map(|x| x)
})
};
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
@@ -181,18 +169,14 @@ impl GrpcConnection {
Ok(client.streaming(req, path, codec).await?)
}
pub async fn client_streaming<F>(
pub async fn client_streaming(
&self,
service: &str,
method: &str,
stream: ReceiverStream<String>,
metadata: &BTreeMap<String, String>,
client_cert: Option<ClientCertificateConfig>,
on_message: F,
) -> Result<Response<DynamicMessage>>
where
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
{
) -> Result<Response<DynamicMessage>> {
let method = &self.method(&service, &method).await?;
let mapped_stream = {
let input_message = method.input();
@@ -201,39 +185,31 @@ impl GrpcConnection {
let md = metadata.clone();
let use_reflection = self.use_reflection.clone();
let client_cert = client_cert.clone();
stream
.then(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
let on_message = on_message.clone();
let json_clone = json.clone();
async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
stream.filter_map(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
tokio::runtime::Handle::current().block_on(async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => {
on_message(Ok(json_clone));
Some(m)
}
Err(e) => {
warn!("Failed to deserialize message: {e}");
on_message(Err(e.to_string()));
None
}
}
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => Some(m),
Err(e) => {
warn!("Failed to deserialize message: {e}");
None
}
}
})
.filter_map(|x| x)
})
};
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
@@ -340,9 +316,10 @@ impl GrpcHandle {
metadata: &BTreeMap<String, String>,
validate_certificates: bool,
client_cert: Option<ClientCertificateConfig>,
skip_cache: bool,
) -> Result<Vec<ServiceDefinition>> {
// Ensure we have a pool; reflect only if missing
if self.get_pool(id, uri, proto_files).is_none() {
if skip_cache || self.get_pool(id, uri, proto_files).is_none() {
info!("Reflecting gRPC services for {} at {}", id, uri);
self.reflect(id, uri, proto_files, metadata, validate_certificates, client_cert)
.await?;

View File

@@ -16,12 +16,12 @@ use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::Arc;
use tokio::fs;
use tokio::process::Command;
use tokio::sync::RwLock;
use tonic::codegen::http::uri::PathAndQuery;
use tonic::transport::Uri;
use tonic_reflection::pb::v1::server_reflection_request::MessageRequest;
use tonic_reflection::pb::v1::server_reflection_response::MessageResponse;
use yaak_common::command::new_xplatform_command;
use yaak_tls::ClientCertificateConfig;
pub async fn fill_pool_from_files(
@@ -91,11 +91,11 @@ pub async fn fill_pool_from_files(
info!("Invoking protoc with {}", args.join(" "));
let mut cmd = new_xplatform_command(&config.protoc_bin_path);
cmd.args(&args);
let out =
cmd.output().await.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
let out = Command::new(&config.protoc_bin_path)
.args(&args)
.output()
.await
.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
if !out.status.success() {
return Err(GenericError(format!(

View File

@@ -8,11 +8,10 @@ publish = false
async-compression = { version = "0.4", features = ["tokio", "gzip", "deflate", "brotli", "zstd"] }
async-trait = "0.1"
brotli = "7"
bytes = "1.11.1"
bytes = "1.5.0"
cookie = "0.18.1"
flate2 = "1"
futures-util = "0.3"
http-body = "1"
url = "2"
zstd = "0.13"
hyper-util = { version = "0.1.17", default-features = false, features = ["client-legacy"] }

View File

@@ -2,8 +2,6 @@ use crate::dns::LocalhostResolver;
use crate::error::Result;
use log::{debug, info, warn};
use reqwest::{Client, Proxy, redirect};
use std::sync::Arc;
use yaak_models::models::DnsOverride;
use yaak_tls::{ClientCertificateConfig, get_tls_config};
#[derive(Clone)]
@@ -30,14 +28,10 @@ pub struct HttpConnectionOptions {
pub validate_certificates: bool,
pub proxy: HttpConnectionProxySetting,
pub client_certificate: Option<ClientCertificateConfig>,
pub dns_overrides: Vec<DnsOverride>,
}
impl HttpConnectionOptions {
/// Build a reqwest Client and return it along with the DNS resolver.
/// The resolver is returned separately so it can be configured per-request
/// to emit DNS timing events to the appropriate channel.
pub(crate) fn build_client(&self) -> Result<(Client, Arc<LocalhostResolver>)> {
pub(crate) fn build_client(&self) -> Result<Client> {
let mut client = Client::builder()
.connection_verbose(true)
.redirect(redirect::Policy::none())
@@ -46,19 +40,15 @@ impl HttpConnectionOptions {
.no_brotli()
.no_deflate()
.referer(false)
.tls_info(true)
// Disable connection pooling to ensure DNS resolution happens on each request
// This is needed so we can emit DNS timing events for each request
.pool_max_idle_per_host(0);
.tls_info(true);
// Configure TLS with optional client certificate
let config =
get_tls_config(self.validate_certificates, true, self.client_certificate.clone())?;
client = client.use_preconfigured_tls(config);
// Configure DNS resolver - keep a reference to configure per-request
let resolver = LocalhostResolver::new(self.dns_overrides.clone());
client = client.dns_resolver(resolver.clone());
// Configure DNS resolver
client = client.dns_resolver(LocalhostResolver::new());
// Configure proxy
match self.proxy.clone() {
@@ -79,7 +69,7 @@ impl HttpConnectionOptions {
self.client_certificate.is_some()
);
Ok((client.build()?, resolver))
Ok(client.build()?)
}
}

View File

@@ -1,185 +1,53 @@
use crate::sender::HttpResponseEvent;
use hyper_util::client::legacy::connect::dns::{
GaiResolver as HyperGaiResolver, Name as HyperName,
};
use log::info;
use reqwest::dns::{Addrs, Name, Resolve, Resolving};
use std::collections::HashMap;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr};
use std::str::FromStr;
use std::sync::Arc;
use std::time::Instant;
use tokio::sync::{RwLock, mpsc};
use tower_service::Service;
use yaak_models::models::DnsOverride;
/// Stores resolved addresses for a hostname override
#[derive(Clone)]
pub struct ResolvedOverride {
pub ipv4: Vec<Ipv4Addr>,
pub ipv6: Vec<Ipv6Addr>,
}
#[derive(Clone)]
pub struct LocalhostResolver {
fallback: HyperGaiResolver,
event_tx: Arc<RwLock<Option<mpsc::Sender<HttpResponseEvent>>>>,
overrides: Arc<HashMap<String, ResolvedOverride>>,
}
impl LocalhostResolver {
pub fn new(dns_overrides: Vec<DnsOverride>) -> Arc<Self> {
pub fn new() -> Arc<Self> {
let resolver = HyperGaiResolver::new();
// Pre-parse DNS overrides into a lookup map
let mut overrides = HashMap::new();
for o in dns_overrides {
if !o.enabled {
continue;
}
let hostname = o.hostname.to_lowercase();
let ipv4: Vec<Ipv4Addr> =
o.ipv4.iter().filter_map(|s| s.parse::<Ipv4Addr>().ok()).collect();
let ipv6: Vec<Ipv6Addr> =
o.ipv6.iter().filter_map(|s| s.parse::<Ipv6Addr>().ok()).collect();
// Only add if at least one address is valid
if !ipv4.is_empty() || !ipv6.is_empty() {
overrides.insert(hostname, ResolvedOverride { ipv4, ipv6 });
}
}
Arc::new(Self {
fallback: resolver,
event_tx: Arc::new(RwLock::new(None)),
overrides: Arc::new(overrides),
})
}
/// Set the event sender for the current request.
/// This should be called before each request to direct DNS events
/// to the appropriate channel.
pub async fn set_event_sender(&self, tx: Option<mpsc::Sender<HttpResponseEvent>>) {
let mut guard = self.event_tx.write().await;
*guard = tx;
Arc::new(Self { fallback: resolver })
}
}
impl Resolve for LocalhostResolver {
fn resolve(&self, name: Name) -> Resolving {
let host = name.as_str().to_lowercase();
let event_tx = self.event_tx.clone();
let overrides = self.overrides.clone();
info!("DNS resolve called for: {}", host);
// Check for DNS override first
if let Some(resolved) = overrides.get(&host) {
log::debug!("DNS override found for: {}", host);
let hostname = host.clone();
let mut addrs: Vec<SocketAddr> = Vec::new();
// Add IPv4 addresses
for ip in &resolved.ipv4 {
addrs.push(SocketAddr::new(IpAddr::V4(*ip), 0));
}
// Add IPv6 addresses
for ip in &resolved.ipv6 {
addrs.push(SocketAddr::new(IpAddr::V6(*ip), 0));
}
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
return Box::pin(async move {
// Emit DNS event for override
let guard = event_tx.read().await;
if let Some(tx) = guard.as_ref() {
let _ = tx
.send(HttpResponseEvent::DnsResolved {
hostname,
addresses,
duration: 0,
overridden: true,
})
.await;
}
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
});
}
// Check for .localhost suffix
let is_localhost = host.ends_with(".localhost");
if is_localhost {
let hostname = host.clone();
// Port 0 is fine; reqwest replaces it with the URL's explicit
// port or the scheme's default (80/443, etc.).
// port or the schemes default (80/443, etc.).
// (See docs note below.)
let addrs: Vec<SocketAddr> = vec![
SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
SocketAddr::new(IpAddr::V6(Ipv6Addr::LOCALHOST), 0),
];
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
return Box::pin(async move {
// Emit DNS event for localhost resolution
let guard = event_tx.read().await;
if let Some(tx) = guard.as_ref() {
let _ = tx
.send(HttpResponseEvent::DnsResolved {
hostname,
addresses,
duration: 0,
overridden: false,
})
.await;
}
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
});
}
// Fall back to system DNS
let mut fallback = self.fallback.clone();
let name_str = name.as_str().to_string();
let hostname = host.clone();
Box::pin(async move {
let start = Instant::now();
let result = match HyperName::from_str(&name_str) {
Ok(n) => fallback.call(n).await,
Err(e) => return Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
};
let duration = start.elapsed().as_millis() as u64;
match result {
Ok(addrs) => {
// Collect addresses for event emission
let addr_vec: Vec<SocketAddr> = addrs.collect();
let addresses: Vec<String> =
addr_vec.iter().map(|a| a.ip().to_string()).collect();
// Emit DNS event
let guard = event_tx.read().await;
if let Some(tx) = guard.as_ref() {
let _ = tx
.send(HttpResponseEvent::DnsResolved {
hostname,
addresses,
duration,
overridden: false,
})
.await;
}
Ok(Box::new(addr_vec.into_iter()) as Addrs)
}
Err(err) => Err(Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
match HyperName::from_str(&name_str) {
Ok(n) => fallback
.call(n)
.await
.map(|addrs| Box::new(addrs) as Addrs)
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
Err(e) => Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
}
})
}

View File

@@ -1,5 +1,4 @@
use crate::client::HttpConnectionOptions;
use crate::dns::LocalhostResolver;
use crate::error::Result;
use log::info;
use reqwest::Client;
@@ -8,15 +7,8 @@ use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::RwLock;
/// A cached HTTP client along with its DNS resolver.
/// The resolver is needed to set the event sender per-request.
pub struct CachedClient {
pub client: Client,
pub resolver: Arc<LocalhostResolver>,
}
pub struct HttpConnectionManager {
connections: Arc<RwLock<BTreeMap<String, (CachedClient, Instant)>>>,
connections: Arc<RwLock<BTreeMap<String, (Client, Instant)>>>,
ttl: Duration,
}
@@ -28,26 +20,21 @@ impl HttpConnectionManager {
}
}
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<CachedClient> {
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<Client> {
let mut connections = self.connections.write().await;
let id = opt.id.clone();
// Clean old connections
connections.retain(|_, (_, last_used)| last_used.elapsed() <= self.ttl);
if let Some((cached, last_used)) = connections.get_mut(&id) {
if let Some((c, last_used)) = connections.get_mut(&id) {
info!("Re-using HTTP client {id}");
*last_used = Instant::now();
return Ok(CachedClient {
client: cached.client.clone(),
resolver: cached.resolver.clone(),
});
return Ok(c.clone());
}
let (client, resolver) = opt.build_client()?;
let cached = CachedClient { client: client.clone(), resolver: resolver.clone() };
connections.insert(id.into(), (cached, Instant::now()));
Ok(CachedClient { client, resolver })
let c = opt.build_client()?;
connections.insert(id.into(), (c.clone(), Instant::now()));
Ok(c)
}
}

View File

@@ -2,9 +2,7 @@ use crate::decompress::{ContentEncoding, streaming_decoder};
use crate::error::{Error, Result};
use crate::types::{SendableBody, SendableHttpRequest};
use async_trait::async_trait;
use bytes::Bytes;
use futures_util::StreamExt;
use http_body::{Body as HttpBody, Frame, SizeHint};
use reqwest::{Client, Method, Version};
use std::fmt::Display;
use std::pin::Pin;
@@ -33,14 +31,7 @@ pub enum HttpResponseEvent {
},
SendUrl {
method: String,
scheme: String,
username: String,
password: String,
host: String,
port: u16,
path: String,
query: String,
fragment: String,
},
ReceiveUrl {
version: Version,
@@ -54,12 +45,6 @@ pub enum HttpResponseEvent {
ChunkReceived {
bytes: usize,
},
DnsResolved {
hostname: String,
addresses: Vec<String>,
duration: u64,
overridden: bool,
},
}
impl Display for HttpResponseEvent {
@@ -74,32 +59,7 @@ impl Display for HttpResponseEvent {
};
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
}
HttpResponseEvent::SendUrl {
method,
scheme,
username,
password,
host,
port,
path,
query,
fragment,
} => {
let auth_str = if username.is_empty() && password.is_empty() {
String::new()
} else {
format!("{}:{}@", username, password)
};
let query_str =
if query.is_empty() { String::new() } else { format!("?{}", query) };
let fragment_str =
if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
write!(
f,
"> {} {}://{}{}:{}{}{}{}",
method, scheme, auth_str, host, port, path, query_str, fragment_str
)
}
HttpResponseEvent::SendUrl { method, path } => write!(f, "> {} {}", method, path),
HttpResponseEvent::ReceiveUrl { version, status } => {
write!(f, "< {} {}", version_to_str(version), status)
}
@@ -107,19 +67,6 @@ impl Display for HttpResponseEvent {
HttpResponseEvent::HeaderDown(name, value) => write!(f, "< {}: {}", name, value),
HttpResponseEvent::ChunkSent { bytes } => write!(f, "> [{} bytes sent]", bytes),
HttpResponseEvent::ChunkReceived { bytes } => write!(f, "< [{} bytes received]", bytes),
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
if *overridden {
write!(f, "* DNS override {} -> {}", hostname, addresses.join(", "))
} else {
write!(
f,
"* DNS resolved {} to {} ({}ms)",
hostname,
addresses.join(", "),
duration
)
}
}
}
}
}
@@ -138,19 +85,7 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
RedirectBehavior::DropBody => "drop_body".to_string(),
},
},
HttpResponseEvent::SendUrl {
method,
scheme,
username,
password,
host,
port,
path,
query,
fragment,
} => {
D::SendUrl { method, scheme, username, password, host, port, path, query, fragment }
}
HttpResponseEvent::SendUrl { method, path } => D::SendUrl { method, path },
HttpResponseEvent::ReceiveUrl { version, status } => {
D::ReceiveUrl { version: format!("{:?}", version), status }
}
@@ -158,9 +93,6 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
HttpResponseEvent::HeaderDown(name, value) => D::HeaderDown { name, value },
HttpResponseEvent::ChunkSent { bytes } => D::ChunkSent { bytes },
HttpResponseEvent::ChunkReceived { bytes } => D::ChunkReceived { bytes },
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
D::DnsResolved { hostname, addresses, duration, overridden }
}
}
}
}
@@ -422,9 +354,6 @@ impl HttpSender for ReqwestSender {
// Add headers
for header in request.headers {
if header.0.is_empty() {
continue;
}
req_builder = req_builder.header(&header.0, &header.1);
}
@@ -441,16 +370,10 @@ impl HttpSender for ReqwestSender {
Some(SendableBody::Bytes(bytes)) => {
req_builder = req_builder.body(bytes);
}
Some(SendableBody::Stream { data, content_length }) => {
// Convert AsyncRead stream to reqwest Body. If content length is
// known, wrap with a SizedBody so hyper can set Content-Length
// automatically (for both HTTP/1.1 and HTTP/2).
let stream = tokio_util::io::ReaderStream::new(data);
let body = if let Some(len) = content_length {
reqwest::Body::wrap(SizedBody::new(stream, len))
} else {
reqwest::Body::wrap_stream(stream)
};
Some(SendableBody::Stream(stream)) => {
// Convert AsyncRead stream to reqwest Body
let stream = tokio_util::io::ReaderStream::new(stream);
let body = reqwest::Body::wrap_stream(stream);
req_builder = req_builder.body(body);
}
}
@@ -467,15 +390,8 @@ impl HttpSender for ReqwestSender {
));
send_event(HttpResponseEvent::SendUrl {
method: sendable_req.method().to_string(),
scheme: sendable_req.url().scheme().to_string(),
username: sendable_req.url().username().to_string(),
password: sendable_req.url().password().unwrap_or_default().to_string(),
host: sendable_req.url().host_str().unwrap_or_default().to_string(),
port: sendable_req.url().port_or_known_default().unwrap_or(0),
path: sendable_req.url().path().to_string(),
query: sendable_req.url().query().unwrap_or_default().to_string(),
fragment: sendable_req.url().fragment().unwrap_or_default().to_string(),
method: sendable_req.method().to_string(),
});
let mut request_headers = Vec::new();
@@ -554,54 +470,6 @@ impl HttpSender for ReqwestSender {
}
}
/// A wrapper around a byte stream that reports a known content length via
/// `size_hint()`. This lets hyper set the `Content-Length` header
/// automatically based on the body size, without us having to add it as an
/// explicit header — which can cause duplicate `Content-Length` headers and
/// break HTTP/2.
struct SizedBody<S> {
stream: std::sync::Mutex<S>,
remaining: u64,
}
impl<S> SizedBody<S> {
fn new(stream: S, content_length: u64) -> Self {
Self { stream: std::sync::Mutex::new(stream), remaining: content_length }
}
}
impl<S> HttpBody for SizedBody<S>
where
S: futures_util::Stream<Item = std::result::Result<Bytes, std::io::Error>>
+ Send
+ Unpin
+ 'static,
{
type Data = Bytes;
type Error = std::io::Error;
fn poll_frame(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<std::result::Result<Frame<Self::Data>, Self::Error>>> {
let this = self.get_mut();
let mut stream = this.stream.lock().unwrap();
match stream.poll_next_unpin(cx) {
Poll::Ready(Some(Ok(chunk))) => {
this.remaining = this.remaining.saturating_sub(chunk.len() as u64);
Poll::Ready(Some(Ok(Frame::data(chunk))))
}
Poll::Ready(Some(Err(e))) => Poll::Ready(Some(Err(e))),
Poll::Ready(None) => Poll::Ready(None),
Poll::Pending => Poll::Pending,
}
}
fn size_hint(&self) -> SizeHint {
SizeHint::with_exact(self.remaining)
}
}
fn version_to_str(version: &Version) -> String {
match *version {
Version::HTTP_09 => "HTTP/0.9".to_string(),

View File

@@ -168,7 +168,6 @@ impl<S: HttpSender> HttpTransaction<S> {
response.drain().await?;
// Update the request URL
let previous_url = current_url.clone();
current_url = if location.starts_with("http://") || location.starts_with("https://") {
// Absolute URL
location
@@ -182,8 +181,6 @@ impl<S: HttpSender> HttpTransaction<S> {
format!("{}/{}", base_path, location)
};
Self::remove_sensitive_headers(&mut current_headers, &previous_url, &current_url);
// Determine redirect behavior based on status code and method
let behavior = if status == 303 {
// 303 See Other always changes to GET
@@ -223,33 +220,6 @@ impl<S: HttpSender> HttpTransaction<S> {
}
}
/// Remove sensitive headers when redirecting to a different host.
/// This matches reqwest's `remove_sensitive_headers()` behavior and prevents
/// credentials from being forwarded to third-party servers (e.g., an
/// Authorization header sent from an API redirect to an S3 bucket).
fn remove_sensitive_headers(
headers: &mut Vec<(String, String)>,
previous_url: &str,
next_url: &str,
) {
let previous_host = Url::parse(previous_url).ok().and_then(|u| {
u.host_str().map(|h| format!("{}:{}", h, u.port_or_known_default().unwrap_or(0)))
});
let next_host = Url::parse(next_url).ok().and_then(|u| {
u.host_str().map(|h| format!("{}:{}", h, u.port_or_known_default().unwrap_or(0)))
});
if previous_host != next_host {
headers.retain(|h| {
let name_lower = h.0.to_lowercase();
name_lower != "authorization"
&& name_lower != "cookie"
&& name_lower != "cookie2"
&& name_lower != "proxy-authorization"
&& name_lower != "www-authenticate"
});
}
}
/// Check if a status code indicates a redirect
fn is_redirect(status: u16) -> bool {
matches!(status, 301 | 302 | 303 | 307 | 308)
@@ -299,20 +269,9 @@ mod tests {
use tokio::io::AsyncRead;
use tokio::sync::Mutex;
/// Captured request metadata for test assertions
#[derive(Debug, Clone)]
#[allow(dead_code)]
struct CapturedRequest {
url: String,
method: String,
headers: Vec<(String, String)>,
}
/// Mock sender for testing
struct MockSender {
responses: Arc<Mutex<Vec<MockResponse>>>,
/// Captured requests for assertions
captured_requests: Arc<Mutex<Vec<CapturedRequest>>>,
}
struct MockResponse {
@@ -323,10 +282,7 @@ mod tests {
impl MockSender {
fn new(responses: Vec<MockResponse>) -> Self {
Self {
responses: Arc::new(Mutex::new(responses)),
captured_requests: Arc::new(Mutex::new(Vec::new())),
}
Self { responses: Arc::new(Mutex::new(responses)) }
}
}
@@ -334,16 +290,9 @@ mod tests {
impl HttpSender for MockSender {
async fn send(
&self,
request: SendableHttpRequest,
_request: SendableHttpRequest,
_event_tx: mpsc::Sender<HttpResponseEvent>,
) -> Result<HttpResponse> {
// Capture the request metadata for later assertions
self.captured_requests.lock().await.push(CapturedRequest {
url: request.url.clone(),
method: request.method.clone(),
headers: request.headers.clone(),
});
let mut responses = self.responses.lock().await;
if responses.is_empty() {
Err(crate::error::Error::RequestError("No more mock responses".to_string()))
@@ -393,8 +342,7 @@ mod tests {
#[tokio::test]
async fn test_transaction_single_redirect() {
let redirect_headers =
vec![("Location".to_string(), "https://example.com/new".to_string())];
let redirect_headers = vec![("Location".to_string(), "https://example.com/new".to_string())];
let responses = vec![
MockResponse { status: 302, headers: redirect_headers, body: vec![] },
@@ -425,8 +373,7 @@ mod tests {
#[tokio::test]
async fn test_transaction_max_redirects_exceeded() {
let redirect_headers =
vec![("Location".to_string(), "https://example.com/loop".to_string())];
let redirect_headers = vec![("Location".to_string(), "https://example.com/loop".to_string())];
// Create more redirects than allowed
let responses: Vec<MockResponse> = (0..12)
@@ -578,8 +525,7 @@ mod tests {
_request: SendableHttpRequest,
_event_tx: mpsc::Sender<HttpResponseEvent>,
) -> Result<HttpResponse> {
let headers =
vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
let headers = vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
Box::pin(std::io::Cursor::new(vec![]));
@@ -638,10 +584,7 @@ mod tests {
let headers = vec![
("set-cookie".to_string(), "session=abc123; Path=/".to_string()),
("set-cookie".to_string(), "user_id=42; Path=/".to_string()),
(
"set-cookie".to_string(),
"preferences=dark; Path=/; Max-Age=86400".to_string(),
),
("set-cookie".to_string(), "preferences=dark; Path=/; Max-Age=86400".to_string()),
];
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
@@ -777,116 +720,4 @@ mod tests {
assert!(result.is_ok());
assert_eq!(request_count.load(Ordering::SeqCst), 2);
}
#[tokio::test]
async fn test_cross_origin_redirect_strips_auth_headers() {
// Redirect from api.example.com -> s3.amazonaws.com should strip Authorization
let responses = vec![
MockResponse {
status: 302,
headers: vec![(
"Location".to_string(),
"https://s3.amazonaws.com/bucket/file.pdf".to_string(),
)],
body: vec![],
},
MockResponse { status: 200, headers: Vec::new(), body: b"PDF content".to_vec() },
];
let sender = MockSender::new(responses);
let captured = sender.captured_requests.clone();
let transaction = HttpTransaction::new(sender);
let request = SendableHttpRequest {
url: "https://api.example.com/download".to_string(),
method: "GET".to_string(),
headers: vec![
("Authorization".to_string(), "Basic dXNlcjpwYXNz".to_string()),
("Accept".to_string(), "application/pdf".to_string()),
],
options: crate::types::SendableHttpRequestOptions {
follow_redirects: true,
..Default::default()
},
..Default::default()
};
let (_tx, rx) = tokio::sync::watch::channel(false);
let (event_tx, _event_rx) = mpsc::channel(100);
let result = transaction.execute_with_cancellation(request, rx, event_tx).await.unwrap();
assert_eq!(result.status, 200);
let requests = captured.lock().await;
assert_eq!(requests.len(), 2);
// First request should have the Authorization header
assert!(
requests[0].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
"First request should have Authorization header"
);
// Second request (to different host) should NOT have the Authorization header
assert!(
!requests[1].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
"Redirected request to different host should NOT have Authorization header"
);
// Non-sensitive headers should still be present
assert!(
requests[1].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("accept")),
"Non-sensitive headers should be preserved across cross-origin redirects"
);
}
#[tokio::test]
async fn test_same_origin_redirect_preserves_auth_headers() {
// Redirect within the same host should keep Authorization
let responses = vec![
MockResponse {
status: 302,
headers: vec![(
"Location".to_string(),
"https://api.example.com/v2/download".to_string(),
)],
body: vec![],
},
MockResponse { status: 200, headers: Vec::new(), body: b"OK".to_vec() },
];
let sender = MockSender::new(responses);
let captured = sender.captured_requests.clone();
let transaction = HttpTransaction::new(sender);
let request = SendableHttpRequest {
url: "https://api.example.com/v1/download".to_string(),
method: "GET".to_string(),
headers: vec![
("Authorization".to_string(), "Bearer token123".to_string()),
("Accept".to_string(), "application/json".to_string()),
],
options: crate::types::SendableHttpRequestOptions {
follow_redirects: true,
..Default::default()
},
..Default::default()
};
let (_tx, rx) = tokio::sync::watch::channel(false);
let (event_tx, _event_rx) = mpsc::channel(100);
let result = transaction.execute_with_cancellation(request, rx, event_tx).await.unwrap();
assert_eq!(result.status, 200);
let requests = captured.lock().await;
assert_eq!(requests.len(), 2);
// Both requests should have the Authorization header (same host)
assert!(
requests[0].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
"First request should have Authorization header"
);
assert!(
requests[1].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
"Redirected request to same host should preserve Authorization header"
);
}
}

View File

@@ -16,13 +16,7 @@ pub(crate) const MULTIPART_BOUNDARY: &str = "------YaakFormBoundary";
pub enum SendableBody {
Bytes(Bytes),
Stream {
data: Pin<Box<dyn AsyncRead + Send + 'static>>,
/// Known content length for the stream, if available. This is used by
/// the sender to set the body size hint so that hyper can set
/// Content-Length automatically for both HTTP/1.1 and HTTP/2.
content_length: Option<u64>,
},
Stream(Pin<Box<dyn AsyncRead + Send + 'static>>),
}
enum SendableBodyWithMeta {
@@ -37,9 +31,7 @@ impl From<SendableBodyWithMeta> for SendableBody {
fn from(value: SendableBodyWithMeta) -> Self {
match value {
SendableBodyWithMeta::Bytes(b) => SendableBody::Bytes(b),
SendableBodyWithMeta::Stream { data, content_length } => {
SendableBody::Stream { data, content_length: content_length.map(|l| l as u64) }
}
SendableBodyWithMeta::Stream { data, .. } => SendableBody::Stream(data),
}
}
}
@@ -194,11 +186,23 @@ async fn build_body(
}
}
// NOTE: Content-Length is NOT set as an explicit header here. Instead, the
// body's content length is carried via SendableBody::Stream { content_length }
// and used by the sender to set the body size hint. This lets hyper handle
// Content-Length automatically for both HTTP/1.1 and HTTP/2, avoiding the
// duplicate Content-Length that breaks HTTP/2 servers.
// Check if Transfer-Encoding: chunked is already set
let has_chunked_encoding = headers.iter().any(|h| {
h.0.to_lowercase() == "transfer-encoding" && h.1.to_lowercase().contains("chunked")
});
// Add a Content-Length header only if chunked encoding is not being used
if !has_chunked_encoding {
let content_length = match body {
Some(SendableBodyWithMeta::Bytes(ref bytes)) => Some(bytes.len()),
Some(SendableBodyWithMeta::Stream { content_length, .. }) => content_length,
None => None,
};
if let Some(cl) = content_length {
headers.push(("Content-Length".to_string(), cl.to_string()));
}
}
Ok((body.map(|b| b.into()), headers))
}
@@ -924,27 +928,7 @@ mod tests {
}
#[tokio::test]
async fn test_no_content_length_header_added_by_build_body() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
let headers = vec![];
let (_, result_headers) =
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
// Content-Length should NOT be set as an explicit header. Instead, the
// sender uses the body's size_hint to let hyper set it automatically,
// which works correctly for both HTTP/1.1 and HTTP/2.
let has_content_length =
result_headers.iter().any(|h| h.0.to_lowercase() == "content-length");
assert!(!has_content_length, "Content-Length should not be set as an explicit header");
Ok(())
}
#[tokio::test]
async fn test_chunked_encoding_header_preserved() -> Result<()> {
async fn test_no_content_length_with_chunked_encoding() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
@@ -954,6 +938,11 @@ mod tests {
let (_, result_headers) =
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
// Verify that Content-Length is NOT present when Transfer-Encoding: chunked is set
let has_content_length =
result_headers.iter().any(|h| h.0.to_lowercase() == "content-length");
assert!(!has_content_length, "Content-Length should not be present with chunked encoding");
// Verify that the Transfer-Encoding header is still present
let has_chunked = result_headers.iter().any(|h| {
h.0.to_lowercase() == "transfer-encoding" && h.1.to_lowercase().contains("chunked")
@@ -962,4 +951,31 @@ mod tests {
Ok(())
}
#[tokio::test]
async fn test_content_length_without_chunked_encoding() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
// Headers without Transfer-Encoding: chunked
let headers = vec![];
let (_, result_headers) =
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
// Verify that Content-Length IS present when Transfer-Encoding: chunked is NOT set
let content_length_header =
result_headers.iter().find(|h| h.0.to_lowercase() == "content-length");
assert!(
content_length_header.is_some(),
"Content-Length should be present without chunked encoding"
);
assert_eq!(
content_length_header.unwrap().1,
"13",
"Content-Length should match the body size"
);
Ok(())
}
}

View File

@@ -12,8 +12,6 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
export type EncryptedKey = { encryptedKey: string, };
@@ -40,7 +38,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
@@ -49,7 +47,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
* The `From` impl is in yaak-http to avoid circular dependencies.
*/
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, scheme: string, username: string, password: string, host: string, port: number, path: string, query: string, fragment: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
export type HttpResponseHeader = { name: string, value: string, };
@@ -93,6 +91,6 @@ export type WebsocketMessageType = "text" | "binary";
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };

View File

@@ -206,34 +206,6 @@ export function replaceModelsInStore<
});
}
export function mergeModelsInStore<
M extends AnyModel['model'],
T extends Extract<AnyModel, { model: M }>,
>(model: M, models: T[], filter?: (model: T) => boolean) {
mustStore().set(modelStoreDataAtom, (prev: ModelStoreData) => {
const existingModels = { ...prev[model] } as Record<string, T>;
// Merge in new models first
for (const m of models) {
existingModels[m.id] = m;
}
// Then filter out unwanted models
if (filter) {
for (const [id, m] of Object.entries(existingModels)) {
if (!filter(m)) {
delete existingModels[id];
}
}
}
return {
...prev,
[model]: existingModels,
};
});
}
function shouldIgnoreModel({ model, updateSource }: ModelPayload) {
// Never ignore updates from non-user sources
if (updateSource.type !== 'window') {

View File

@@ -0,0 +1,9 @@
-- Add nullable settings columns to folders (NULL = inherit from parent)
ALTER TABLE folders ADD COLUMN setting_request_timeout INTEGER DEFAULT NULL;
ALTER TABLE folders ADD COLUMN setting_validate_certificates BOOLEAN DEFAULT NULL;
ALTER TABLE folders ADD COLUMN setting_follow_redirects BOOLEAN DEFAULT NULL;
-- Add nullable settings columns to http_requests (NULL = inherit from parent)
ALTER TABLE http_requests ADD COLUMN setting_request_timeout INTEGER DEFAULT NULL;
ALTER TABLE http_requests ADD COLUMN setting_validate_certificates BOOLEAN DEFAULT NULL;
ALTER TABLE http_requests ADD COLUMN setting_follow_redirects BOOLEAN DEFAULT NULL;

View File

@@ -1,2 +0,0 @@
-- Add DNS resolution timing to http_responses
ALTER TABLE http_responses ADD COLUMN elapsed_dns INTEGER DEFAULT 0 NOT NULL;

View File

@@ -1,2 +0,0 @@
-- Add DNS overrides setting to workspaces
ALTER TABLE workspaces ADD COLUMN setting_dns_overrides TEXT DEFAULT '[]' NOT NULL;

View File

@@ -1,12 +0,0 @@
-- Filter out headers that match the hardcoded defaults (User-Agent: yaak, Accept: */*),
-- keeping any other custom headers the user may have added.
UPDATE workspaces
SET headers = (
SELECT json_group_array(json(value))
FROM json_each(headers)
WHERE NOT (
(LOWER(json_extract(value, '$.name')) = 'user-agent' AND json_extract(value, '$.value') = 'yaak')
OR (LOWER(json_extract(value, '$.name')) = 'accept' AND json_extract(value, '$.value') = '*/*')
)
)
WHERE json_array_length(headers) > 0;

Some files were not shown because too many files have changed in this diff Show More