mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-02-22 23:57:57 +01:00
Compare commits
73 Commits
actions-sy
...
cli-improv
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0702864a11 | ||
|
|
487e66faa4 | ||
|
|
f71a3ea8fe | ||
|
|
39fc9e81cd | ||
|
|
a4f96fca11 | ||
|
|
1f588d0498 | ||
|
|
4573edc1e1 | ||
|
|
5a184c1b83 | ||
|
|
7b73401dcf | ||
|
|
8571440d84 | ||
|
|
bc37a5d666 | ||
|
|
a80f2ccf9a | ||
|
|
1eaf276b75 | ||
|
|
e9559dfdfa | ||
|
|
4c2e7b8609 | ||
|
|
e638cecf07 | ||
|
|
076058da4f | ||
|
|
f1bc4aa146 | ||
|
|
773c4a24a5 | ||
|
|
6cc659e5c4 | ||
|
|
e1580210dc | ||
|
|
0a4ffde319 | ||
|
|
cc4d598af3 | ||
|
|
f5d11cb6d3 | ||
|
|
65e91aec6b | ||
|
|
ae943a5fd2 | ||
|
|
9e1a11de0b | ||
|
|
52732e12ec | ||
|
|
1127d7e3fa | ||
|
|
7d4d228236 | ||
|
|
565e053ee8 | ||
|
|
26aba6034f | ||
|
|
9a1d613034 | ||
|
|
3e4de7d3c4 | ||
|
|
b64b5ec0f8 | ||
|
|
510d1c7d17 | ||
|
|
ed13a62269 | ||
|
|
935d613959 | ||
|
|
adeaaccc45 | ||
|
|
d253093333 | ||
|
|
f265b7a572 | ||
|
|
68b2ff016f | ||
|
|
a1c6295810 | ||
|
|
76ee3fa61b | ||
|
|
7fef35ce0a | ||
|
|
654af09951 | ||
|
|
484dcfade0 | ||
|
|
fda18c5434 | ||
|
|
a8176d6e9e | ||
|
|
957d8d9d46 | ||
|
|
5f18bf25e2 | ||
|
|
66942eaf2c | ||
|
|
38796b1833 | ||
|
|
49ffa6fc45 | ||
|
|
1f56ba2eb6 | ||
|
|
f98a70ecb4 | ||
|
|
2984eb40c9 | ||
|
|
cc5d4742f0 | ||
|
|
5b8e4b98a0 | ||
|
|
8637c90a21 | ||
|
|
b88c5e71a0 | ||
|
|
1899d512ab | ||
|
|
7c31718f5e | ||
|
|
8f1463e5d0 | ||
|
|
0dc8807808 | ||
|
|
f24a159b8a | ||
|
|
0b91d3aaff | ||
|
|
431dc1c896 | ||
|
|
bc8277b56b | ||
|
|
0afed185d9 | ||
|
|
55cee00601 | ||
|
|
b41a8e04cb | ||
|
|
eff4519d91 |
@@ -1,35 +1,46 @@
|
|||||||
---
|
---
|
||||||
description: Review a PR in a new worktree
|
description: Review a PR in a new worktree
|
||||||
allowed-tools: Bash(git worktree:*), Bash(gh pr:*)
|
allowed-tools: Bash(git worktree:*), Bash(gh pr:*), Bash(git branch:*)
|
||||||
---
|
---
|
||||||
|
|
||||||
Review a GitHub pull request in a new git worktree.
|
Check out a GitHub pull request for review.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```
|
```
|
||||||
/review-pr <PR_NUMBER>
|
/check-out-pr <PR_NUMBER>
|
||||||
```
|
```
|
||||||
|
|
||||||
## What to do
|
## What to do
|
||||||
|
|
||||||
1. List all open pull requests and ask the user to select one
|
1. If no PR number is provided, list all open pull requests and ask the user to select one
|
||||||
2. Get PR information using `gh pr view <PR_NUMBER> --json number,headRefName`
|
2. Get PR information using `gh pr view <PR_NUMBER> --json number,headRefName`
|
||||||
3. Extract the branch name from the PR
|
3. **Ask the user** whether they want to:
|
||||||
4. Create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>` using `git worktree add` with a timeout of at least 300000ms (5 minutes) since the post-checkout hook runs a bootstrap script
|
- **A) Check out in current directory** — simple `gh pr checkout <PR_NUMBER>`
|
||||||
5. Checkout the PR branch in the new worktree using `gh pr checkout <PR_NUMBER>`
|
- **B) Create a new worktree** — isolated copy at `../yaak-worktrees/pr-<PR_NUMBER>`
|
||||||
6. The post-checkout hook will automatically:
|
4. Follow the appropriate path below
|
||||||
|
|
||||||
|
## Option A: Check out in current directory
|
||||||
|
|
||||||
|
1. Run `gh pr checkout <PR_NUMBER>`
|
||||||
|
2. Inform the user which branch they're now on
|
||||||
|
|
||||||
|
## Option B: Create a new worktree
|
||||||
|
|
||||||
|
1. Create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>` using `git worktree add` with a timeout of at least 300000ms (5 minutes) since the post-checkout hook runs a bootstrap script
|
||||||
|
2. Checkout the PR branch in the new worktree using `gh pr checkout <PR_NUMBER>`
|
||||||
|
3. The post-checkout hook will automatically:
|
||||||
- Create `.env.local` with unique ports
|
- Create `.env.local` with unique ports
|
||||||
- Copy editor config folders
|
- Copy editor config folders
|
||||||
- Run `npm install && npm run bootstrap`
|
- Run `npm install && npm run bootstrap`
|
||||||
7. Inform the user:
|
4. Inform the user:
|
||||||
- Where the worktree was created
|
- Where the worktree was created
|
||||||
- What ports were assigned
|
- What ports were assigned
|
||||||
- How to access it (cd command)
|
- How to access it (cd command)
|
||||||
- How to run the dev server
|
- How to run the dev server
|
||||||
- How to remove the worktree when done
|
- How to remove the worktree when done
|
||||||
|
|
||||||
## Example Output
|
### Example worktree output
|
||||||
|
|
||||||
```
|
```
|
||||||
Created worktree for PR #123 at ../yaak-worktrees/pr-123
|
Created worktree for PR #123 at ../yaak-worktrees/pr-123
|
||||||
|
|||||||
@@ -43,5 +43,7 @@ The skill generates markdown-formatted release notes following this structure:
|
|||||||
After outputting the release notes, ask the user if they would like to create a draft GitHub release with these notes. If they confirm, create the release using:
|
After outputting the release notes, ask the user if they would like to create a draft GitHub release with these notes. If they confirm, create the release using:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gh release create <tag> --draft --prerelease --title "<tag>" --notes '<release notes>'
|
gh release create <tag> --draft --prerelease --title "Release <version>" --notes '<release notes>'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**IMPORTANT**: The release title format is "Release XXXX" where XXXX is the version WITHOUT the `v` prefix. For example, tag `v2026.2.1-beta.1` gets title "Release 2026.2.1-beta.1".
|
||||||
|
|||||||
46
.codex/skills/release-check-out-pr/SKILL.md
Normal file
46
.codex/skills/release-check-out-pr/SKILL.md
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
---
|
||||||
|
name: release-check-out-pr
|
||||||
|
description: Check out a GitHub pull request for review in this repo, either in the current directory or in a new isolated worktree at ../yaak-worktrees/pr-<PR_NUMBER>. Use when asked to run or replace the old Claude check-out-pr command.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Check Out PR
|
||||||
|
|
||||||
|
Check out a PR by number and let the user choose between current-directory checkout and isolated worktree checkout.
|
||||||
|
|
||||||
|
## Workflow
|
||||||
|
|
||||||
|
1. Confirm `gh` CLI is available.
|
||||||
|
2. If no PR number is provided, list open PRs (`gh pr list`) and ask the user to choose one.
|
||||||
|
3. Read PR metadata:
|
||||||
|
- `gh pr view <PR_NUMBER> --json number,headRefName`
|
||||||
|
4. Ask the user to choose:
|
||||||
|
- Option A: check out in the current directory
|
||||||
|
- Option B: create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>`
|
||||||
|
|
||||||
|
## Option A: Current Directory
|
||||||
|
|
||||||
|
1. Run:
|
||||||
|
- `gh pr checkout <PR_NUMBER>`
|
||||||
|
2. Report the checked-out branch.
|
||||||
|
|
||||||
|
## Option B: New Worktree
|
||||||
|
|
||||||
|
1. Use path:
|
||||||
|
- `../yaak-worktrees/pr-<PR_NUMBER>`
|
||||||
|
2. Create the worktree with a timeout of at least 5 minutes because checkout hooks run bootstrap.
|
||||||
|
3. In the new worktree, run:
|
||||||
|
- `gh pr checkout <PR_NUMBER>`
|
||||||
|
4. Report:
|
||||||
|
- Worktree path
|
||||||
|
- Assigned ports from `.env.local` if present
|
||||||
|
- How to start work:
|
||||||
|
- `cd ../yaak-worktrees/pr-<PR_NUMBER>`
|
||||||
|
- `npm run app-dev`
|
||||||
|
- How to remove when done:
|
||||||
|
- `git worktree remove ../yaak-worktrees/pr-<PR_NUMBER>`
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
- If PR does not exist, show a clear error.
|
||||||
|
- If worktree already exists, ask whether to reuse it or remove/recreate it.
|
||||||
|
- If `gh` is missing, instruct the user to install/authenticate it.
|
||||||
48
.codex/skills/release-generate-release-notes/SKILL.md
Normal file
48
.codex/skills/release-generate-release-notes/SKILL.md
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
---
|
||||||
|
name: release-generate-release-notes
|
||||||
|
description: Generate Yaak release notes from git history and PR metadata, including feedback links and full changelog compare links. Use when asked to run or replace the old Claude generate-release-notes command.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Generate Release Notes
|
||||||
|
|
||||||
|
Generate formatted markdown release notes for a Yaak tag.
|
||||||
|
|
||||||
|
## Workflow
|
||||||
|
|
||||||
|
1. Determine target tag.
|
||||||
|
2. Determine previous comparable tag:
|
||||||
|
- Beta tag: compare against previous beta (if the root version is the same) or stable tag.
|
||||||
|
- Stable tag: compare against previous stable tag.
|
||||||
|
3. Collect commits in range:
|
||||||
|
- `git log --oneline <prev_tag>..<target_tag>`
|
||||||
|
4. For linked PRs, fetch metadata:
|
||||||
|
- `gh pr view <PR_NUMBER> --json number,title,body,author,url`
|
||||||
|
5. Extract useful details:
|
||||||
|
- Feedback URLs (`feedback.yaak.app`)
|
||||||
|
- Plugin install links or other notable context
|
||||||
|
6. Format notes using Yaak style:
|
||||||
|
- Changelog badge at top
|
||||||
|
- Bulleted items with PR links where available
|
||||||
|
- Feedback links where available
|
||||||
|
- Full changelog compare link at bottom
|
||||||
|
|
||||||
|
## Formatting Rules
|
||||||
|
|
||||||
|
- Wrap final notes in a markdown code fence.
|
||||||
|
- Keep a blank line before and after the code fence.
|
||||||
|
- Output the markdown code block last.
|
||||||
|
- Do not append `by @gschier` for PRs authored by `@gschier`.
|
||||||
|
|
||||||
|
## Release Creation Prompt
|
||||||
|
|
||||||
|
After producing notes, ask whether to create a draft GitHub release.
|
||||||
|
|
||||||
|
If confirmed and release does not yet exist, run:
|
||||||
|
|
||||||
|
`gh release create <tag> --draft --prerelease --title "Release <version_without_v>" --notes '<release notes>'`
|
||||||
|
|
||||||
|
If a draft release for the tag already exists, update it instead:
|
||||||
|
|
||||||
|
`gh release edit <tag> --title "Release <version_without_v>" --notes-file <path_to_notes>`
|
||||||
|
|
||||||
|
Use title format `Release <version_without_v>`, e.g. `v2026.2.1-beta.1` -> `Release 2026.2.1-beta.1`.
|
||||||
37
.codex/skills/worktree-management/SKILL.md
Normal file
37
.codex/skills/worktree-management/SKILL.md
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
name: worktree-management
|
||||||
|
description: Manage Yaak git worktrees using the standard ../yaak-worktrees/<NAME> layout, including creation, removal, and expected automatic setup behavior and port assignments.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Worktree Management
|
||||||
|
|
||||||
|
Use the Yaak-standard worktree path layout and lifecycle commands.
|
||||||
|
|
||||||
|
## Path Convention
|
||||||
|
|
||||||
|
Always create worktrees under:
|
||||||
|
|
||||||
|
`../yaak-worktrees/<NAME>`
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- `git worktree add ../yaak-worktrees/feature-auth`
|
||||||
|
- `git worktree add ../yaak-worktrees/bugfix-login`
|
||||||
|
- `git worktree add ../yaak-worktrees/refactor-api`
|
||||||
|
|
||||||
|
## Automatic Setup After Checkout
|
||||||
|
|
||||||
|
Project git hooks automatically:
|
||||||
|
1. Create `.env.local` with unique `YAAK_DEV_PORT` and `YAAK_PLUGIN_MCP_SERVER_PORT`
|
||||||
|
2. Copy gitignored editor config folders
|
||||||
|
3. Run `npm install && npm run bootstrap`
|
||||||
|
|
||||||
|
## Remove Worktree
|
||||||
|
|
||||||
|
`git worktree remove ../yaak-worktrees/<NAME>`
|
||||||
|
|
||||||
|
## Port Pattern
|
||||||
|
|
||||||
|
- Main worktree: Vite `1420`, MCP `64343`
|
||||||
|
- First extra worktree: `1421`, `64344`
|
||||||
|
- Second extra worktree: `1422`, `64345`
|
||||||
|
- Continue incrementally for additional worktrees
|
||||||
18
.github/pull_request_template.md
vendored
Normal file
18
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
## Summary
|
||||||
|
|
||||||
|
<!-- Describe the bug and the fix in 1-3 sentences. -->
|
||||||
|
|
||||||
|
## Submission
|
||||||
|
|
||||||
|
- [ ] This PR is a bug fix or small-scope improvement.
|
||||||
|
- [ ] If this PR is not a bug fix or small-scope improvement, I linked an approved feedback item below.
|
||||||
|
- [ ] I have read and followed [`CONTRIBUTING.md`](CONTRIBUTING.md).
|
||||||
|
- [ ] I tested this change locally.
|
||||||
|
- [ ] I added or updated tests when reasonable.
|
||||||
|
|
||||||
|
Approved feedback item (required if not a bug fix or small-scope improvement):
|
||||||
|
<!-- https://yaak.app/feedback/... -->
|
||||||
|
|
||||||
|
## Related
|
||||||
|
|
||||||
|
<!-- Link related issues, discussions, or feedback items. -->
|
||||||
52
.github/workflows/flathub.yml
vendored
Normal file
52
.github/workflows/flathub.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
name: Update Flathub
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update-flathub:
|
||||||
|
name: Update Flathub manifest
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Only run for stable releases (skip betas/pre-releases)
|
||||||
|
if: ${{ !github.event.release.prerelease }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout app repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Checkout Flathub repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: flathub/app.yaak.Yaak
|
||||||
|
token: ${{ secrets.FLATHUB_TOKEN }}
|
||||||
|
path: flathub-repo
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.12"
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: "22"
|
||||||
|
|
||||||
|
- name: Install source generators
|
||||||
|
run: |
|
||||||
|
pip install flatpak-node-generator tomlkit aiohttp
|
||||||
|
git clone --depth 1 https://github.com/flatpak/flatpak-builder-tools flatpak/flatpak-builder-tools
|
||||||
|
|
||||||
|
- name: Run update-manifest.sh
|
||||||
|
run: bash flatpak/update-manifest.sh "${{ github.event.release.tag_name }}" flathub-repo
|
||||||
|
|
||||||
|
- name: Commit and push to Flathub
|
||||||
|
working-directory: flathub-repo
|
||||||
|
run: |
|
||||||
|
git config user.name "github-actions[bot]"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git add -A
|
||||||
|
git diff --cached --quiet && echo "No changes to commit" && exit 0
|
||||||
|
git commit -m "Update to ${{ github.event.release.tag_name }}"
|
||||||
|
git push
|
||||||
24
.github/workflows/release.yml
vendored
24
.github/workflows/release.yml
vendored
@@ -153,3 +153,27 @@ jobs:
|
|||||||
releaseDraft: true
|
releaseDraft: true
|
||||||
prerelease: true
|
prerelease: true
|
||||||
args: "${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json"
|
args: "${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json"
|
||||||
|
|
||||||
|
# Build a per-machine NSIS installer for enterprise deployment (PDQ, SCCM, Intune)
|
||||||
|
- name: Build and upload machine-wide installer (Windows only)
|
||||||
|
if: matrix.os == 'windows'
|
||||||
|
shell: pwsh
|
||||||
|
env:
|
||||||
|
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
||||||
|
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
|
||||||
|
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||||
|
run: |
|
||||||
|
Get-ChildItem -Recurse -Path target -File -Filter "*.exe.sig" | Remove-Item -Force
|
||||||
|
npx tauri bundle ${{ matrix.args }} --bundles nsis --config ./crates-tauri/yaak-app/tauri.release.conf.json --config '{"bundle":{"createUpdaterArtifacts":true,"windows":{"nsis":{"installMode":"perMachine"}}}}'
|
||||||
|
$setup = Get-ChildItem -Recurse -Path target -Filter "*setup*.exe" | Select-Object -First 1
|
||||||
|
$setupSig = "$($setup.FullName).sig"
|
||||||
|
$dest = $setup.FullName -replace '-setup\.exe$', '-setup-machine.exe'
|
||||||
|
$destSig = "$dest.sig"
|
||||||
|
Copy-Item $setup.FullName $dest
|
||||||
|
Copy-Item $setupSig $destSig
|
||||||
|
gh release upload "${{ github.ref_name }}" "$dest" --clobber
|
||||||
|
gh release upload "${{ github.ref_name }}" "$destSig" --clobber
|
||||||
|
|||||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -44,3 +44,13 @@ crates-tauri/yaak-app/tauri.worktree.conf.json
|
|||||||
# Tauri auto-generated permission files
|
# Tauri auto-generated permission files
|
||||||
**/permissions/autogenerated
|
**/permissions/autogenerated
|
||||||
**/permissions/schemas
|
**/permissions/schemas
|
||||||
|
|
||||||
|
# Flatpak build artifacts
|
||||||
|
flatpak-repo/
|
||||||
|
.flatpak-builder/
|
||||||
|
flatpak/flatpak-builder-tools/
|
||||||
|
flatpak/cargo-sources.json
|
||||||
|
flatpak/node-sources.json
|
||||||
|
|
||||||
|
# Local Codex desktop env state
|
||||||
|
.codex/environments/environment.toml
|
||||||
|
|||||||
16
CONTRIBUTING.md
Normal file
16
CONTRIBUTING.md
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# Contributing to Yaak
|
||||||
|
|
||||||
|
Yaak accepts community pull requests for:
|
||||||
|
|
||||||
|
- Bug fixes
|
||||||
|
- Small-scope improvements directly tied to existing behavior
|
||||||
|
|
||||||
|
Pull requests that introduce broad new features, major redesigns, or large refactors are out of scope unless explicitly approved first.
|
||||||
|
|
||||||
|
## Approval for Non-Bugfix Changes
|
||||||
|
|
||||||
|
If your PR is not a bug fix or small-scope improvement, include a link to the approved [feedback item](https://yaak.app/feedback) where contribution approval was explicitly stated.
|
||||||
|
|
||||||
|
## Development Setup
|
||||||
|
|
||||||
|
For local setup and development workflows, see [`DEVELOPMENT.md`](DEVELOPMENT.md).
|
||||||
499
Cargo.lock
generated
499
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,8 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
|
"crates/yaak",
|
||||||
# Shared crates (no Tauri dependency)
|
# Shared crates (no Tauri dependency)
|
||||||
"crates/yaak-actions",
|
|
||||||
"crates/yaak-actions-builtin",
|
|
||||||
"crates/yaak-core",
|
"crates/yaak-core",
|
||||||
"crates/yaak-common",
|
"crates/yaak-common",
|
||||||
"crates/yaak-crypto",
|
"crates/yaak-crypto",
|
||||||
@@ -17,6 +16,7 @@ members = [
|
|||||||
"crates/yaak-templates",
|
"crates/yaak-templates",
|
||||||
"crates/yaak-tls",
|
"crates/yaak-tls",
|
||||||
"crates/yaak-ws",
|
"crates/yaak-ws",
|
||||||
|
"crates/yaak-api",
|
||||||
# CLI crates
|
# CLI crates
|
||||||
"crates-cli/yaak-cli",
|
"crates-cli/yaak-cli",
|
||||||
# Tauri-specific crates
|
# Tauri-specific crates
|
||||||
@@ -35,6 +35,7 @@ log = "0.4.29"
|
|||||||
reqwest = "0.12.20"
|
reqwest = "0.12.20"
|
||||||
rustls = { version = "0.23.34", default-features = false }
|
rustls = { version = "0.23.34", default-features = false }
|
||||||
rustls-platform-verifier = "0.6.2"
|
rustls-platform-verifier = "0.6.2"
|
||||||
|
schemars = { version = "0.8.22", features = ["chrono"] }
|
||||||
serde = "1.0.228"
|
serde = "1.0.228"
|
||||||
serde_json = "1.0.145"
|
serde_json = "1.0.145"
|
||||||
sha2 = "0.10.9"
|
sha2 = "0.10.9"
|
||||||
@@ -47,9 +48,8 @@ tokio = "1.48.0"
|
|||||||
ts-rs = "11.1.0"
|
ts-rs = "11.1.0"
|
||||||
|
|
||||||
# Internal crates - shared
|
# Internal crates - shared
|
||||||
yaak-actions = { path = "crates/yaak-actions" }
|
|
||||||
yaak-actions-builtin = { path = "crates/yaak-actions-builtin" }
|
|
||||||
yaak-core = { path = "crates/yaak-core" }
|
yaak-core = { path = "crates/yaak-core" }
|
||||||
|
yaak = { path = "crates/yaak" }
|
||||||
yaak-common = { path = "crates/yaak-common" }
|
yaak-common = { path = "crates/yaak-common" }
|
||||||
yaak-crypto = { path = "crates/yaak-crypto" }
|
yaak-crypto = { path = "crates/yaak-crypto" }
|
||||||
yaak-git = { path = "crates/yaak-git" }
|
yaak-git = { path = "crates/yaak-git" }
|
||||||
@@ -62,6 +62,7 @@ yaak-sync = { path = "crates/yaak-sync" }
|
|||||||
yaak-templates = { path = "crates/yaak-templates" }
|
yaak-templates = { path = "crates/yaak-templates" }
|
||||||
yaak-tls = { path = "crates/yaak-tls" }
|
yaak-tls = { path = "crates/yaak-tls" }
|
||||||
yaak-ws = { path = "crates/yaak-ws" }
|
yaak-ws = { path = "crates/yaak-ws" }
|
||||||
|
yaak-api = { path = "crates/yaak-api" }
|
||||||
|
|
||||||
# Internal crates - Tauri-specific
|
# Internal crates - Tauri-specific
|
||||||
yaak-fonts = { path = "crates-tauri/yaak-fonts" }
|
yaak-fonts = { path = "crates-tauri/yaak-fonts" }
|
||||||
|
|||||||
@@ -22,7 +22,7 @@
|
|||||||
<!-- sponsors-premium --><a href="https://github.com/MVST-Solutions"><img src="https://github.com/MVST-Solutions.png" width="80px" alt="User avatar: MVST-Solutions" /></a> <a href="https://github.com/dharsanb"><img src="https://github.com/dharsanb.png" width="80px" alt="User avatar: dharsanb" /></a> <a href="https://github.com/railwayapp"><img src="https://github.com/railwayapp.png" width="80px" alt="User avatar: railwayapp" /></a> <a href="https://github.com/caseyamcl"><img src="https://github.com/caseyamcl.png" width="80px" alt="User avatar: caseyamcl" /></a> <a href="https://github.com/bytebase"><img src="https://github.com/bytebase.png" width="80px" alt="User avatar: bytebase" /></a> <a href="https://github.com/"><img src="https://raw.githubusercontent.com/JamesIves/github-sponsors-readme-action/dev/.github/assets/placeholder.png" width="80px" alt="User avatar: " /></a> <!-- sponsors-premium -->
|
<!-- sponsors-premium --><a href="https://github.com/MVST-Solutions"><img src="https://github.com/MVST-Solutions.png" width="80px" alt="User avatar: MVST-Solutions" /></a> <a href="https://github.com/dharsanb"><img src="https://github.com/dharsanb.png" width="80px" alt="User avatar: dharsanb" /></a> <a href="https://github.com/railwayapp"><img src="https://github.com/railwayapp.png" width="80px" alt="User avatar: railwayapp" /></a> <a href="https://github.com/caseyamcl"><img src="https://github.com/caseyamcl.png" width="80px" alt="User avatar: caseyamcl" /></a> <a href="https://github.com/bytebase"><img src="https://github.com/bytebase.png" width="80px" alt="User avatar: bytebase" /></a> <a href="https://github.com/"><img src="https://raw.githubusercontent.com/JamesIves/github-sponsors-readme-action/dev/.github/assets/placeholder.png" width="80px" alt="User avatar: " /></a> <!-- sponsors-premium -->
|
||||||
</p>
|
</p>
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<!-- sponsors-base --><a href="https://github.com/seanwash"><img src="https://github.com/seanwash.png" width="50px" alt="User avatar: seanwash" /></a> <a href="https://github.com/jerath"><img src="https://github.com/jerath.png" width="50px" alt="User avatar: jerath" /></a> <a href="https://github.com/itsa-sh"><img src="https://github.com/itsa-sh.png" width="50px" alt="User avatar: itsa-sh" /></a> <a href="https://github.com/dmmulroy"><img src="https://github.com/dmmulroy.png" width="50px" alt="User avatar: dmmulroy" /></a> <a href="https://github.com/timcole"><img src="https://github.com/timcole.png" width="50px" alt="User avatar: timcole" /></a> <a href="https://github.com/VLZH"><img src="https://github.com/VLZH.png" width="50px" alt="User avatar: VLZH" /></a> <a href="https://github.com/terasaka2k"><img src="https://github.com/terasaka2k.png" width="50px" alt="User avatar: terasaka2k" /></a> <a href="https://github.com/andriyor"><img src="https://github.com/andriyor.png" width="50px" alt="User avatar: andriyor" /></a> <a href="https://github.com/majudhu"><img src="https://github.com/majudhu.png" width="50px" alt="User avatar: majudhu" /></a> <a href="https://github.com/axelrindle"><img src="https://github.com/axelrindle.png" width="50px" alt="User avatar: axelrindle" /></a> <a href="https://github.com/jirizverina"><img src="https://github.com/jirizverina.png" width="50px" alt="User avatar: jirizverina" /></a> <a href="https://github.com/chip-well"><img src="https://github.com/chip-well.png" width="50px" alt="User avatar: chip-well" /></a> <a href="https://github.com/GRAYAH"><img src="https://github.com/GRAYAH.png" width="50px" alt="User avatar: GRAYAH" /></a> <!-- sponsors-base -->
|
<!-- sponsors-base --><a href="https://github.com/seanwash"><img src="https://github.com/seanwash.png" width="50px" alt="User avatar: seanwash" /></a> <a href="https://github.com/jerath"><img src="https://github.com/jerath.png" width="50px" alt="User avatar: jerath" /></a> <a href="https://github.com/itsa-sh"><img src="https://github.com/itsa-sh.png" width="50px" alt="User avatar: itsa-sh" /></a> <a href="https://github.com/dmmulroy"><img src="https://github.com/dmmulroy.png" width="50px" alt="User avatar: dmmulroy" /></a> <a href="https://github.com/timcole"><img src="https://github.com/timcole.png" width="50px" alt="User avatar: timcole" /></a> <a href="https://github.com/VLZH"><img src="https://github.com/VLZH.png" width="50px" alt="User avatar: VLZH" /></a> <a href="https://github.com/terasaka2k"><img src="https://github.com/terasaka2k.png" width="50px" alt="User avatar: terasaka2k" /></a> <a href="https://github.com/andriyor"><img src="https://github.com/andriyor.png" width="50px" alt="User avatar: andriyor" /></a> <a href="https://github.com/majudhu"><img src="https://github.com/majudhu.png" width="50px" alt="User avatar: majudhu" /></a> <a href="https://github.com/axelrindle"><img src="https://github.com/axelrindle.png" width="50px" alt="User avatar: axelrindle" /></a> <a href="https://github.com/jirizverina"><img src="https://github.com/jirizverina.png" width="50px" alt="User avatar: jirizverina" /></a> <a href="https://github.com/chip-well"><img src="https://github.com/chip-well.png" width="50px" alt="User avatar: chip-well" /></a> <a href="https://github.com/GRAYAH"><img src="https://github.com/GRAYAH.png" width="50px" alt="User avatar: GRAYAH" /></a> <a href="https://github.com/flashblaze"><img src="https://github.com/flashblaze.png" width="50px" alt="User avatar: flashblaze" /></a> <!-- sponsors-base -->
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||

|

|
||||||
@@ -58,8 +58,10 @@ Built with [Tauri](https://tauri.app), Rust, and React, it’s fast, lightweight
|
|||||||
|
|
||||||
## Contribution Policy
|
## Contribution Policy
|
||||||
|
|
||||||
Yaak is open source but only accepting contributions for bug fixes. To get started,
|
> [!IMPORTANT]
|
||||||
visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment.
|
> Community PRs are currently limited to bug fixes and small-scope improvements.
|
||||||
|
> If your PR is out of scope, link an approved feedback item from [yaak.app/feedback](https://yaak.app/feedback).
|
||||||
|
> See [`CONTRIBUTING.md`](CONTRIBUTING.md) for policy details and [`DEVELOPMENT.md`](DEVELOPMENT.md) for local setup.
|
||||||
|
|
||||||
## Useful Resources
|
## Useful Resources
|
||||||
|
|
||||||
|
|||||||
@@ -47,7 +47,8 @@
|
|||||||
"!src-web/vite.config.ts",
|
"!src-web/vite.config.ts",
|
||||||
"!src-web/routeTree.gen.ts",
|
"!src-web/routeTree.gen.ts",
|
||||||
"!packages/plugin-runtime-types/lib",
|
"!packages/plugin-runtime-types/lib",
|
||||||
"!**/bindings"
|
"!**/bindings",
|
||||||
|
"!flatpak"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,13 +12,20 @@ path = "src/main.rs"
|
|||||||
clap = { version = "4", features = ["derive"] }
|
clap = { version = "4", features = ["derive"] }
|
||||||
dirs = "6"
|
dirs = "6"
|
||||||
env_logger = "0.11"
|
env_logger = "0.11"
|
||||||
|
futures = "0.3"
|
||||||
log = { workspace = true }
|
log = { workspace = true }
|
||||||
|
schemars = { workspace = true }
|
||||||
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
||||||
yaak-actions = { workspace = true }
|
yaak = { workspace = true }
|
||||||
yaak-actions-builtin = { workspace = true }
|
|
||||||
yaak-crypto = { workspace = true }
|
yaak-crypto = { workspace = true }
|
||||||
yaak-http = { workspace = true }
|
yaak-http = { workspace = true }
|
||||||
yaak-models = { workspace = true }
|
yaak-models = { workspace = true }
|
||||||
yaak-plugins = { workspace = true }
|
yaak-plugins = { workspace = true }
|
||||||
yaak-templates = { workspace = true }
|
yaak-templates = { workspace = true }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
assert_cmd = "2"
|
||||||
|
predicates = "3"
|
||||||
|
tempfile = "3"
|
||||||
|
|||||||
87
crates-cli/yaak-cli/README.md
Normal file
87
crates-cli/yaak-cli/README.md
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
# yaak-cli
|
||||||
|
|
||||||
|
Command-line interface for Yaak.
|
||||||
|
|
||||||
|
## Command Overview
|
||||||
|
|
||||||
|
Current top-level commands:
|
||||||
|
|
||||||
|
```text
|
||||||
|
yaakcli send <request_id>
|
||||||
|
yaakcli workspace list
|
||||||
|
yaakcli workspace show <workspace_id>
|
||||||
|
yaakcli workspace create --name <name>
|
||||||
|
yaakcli workspace create --json '{"name":"My Workspace"}'
|
||||||
|
yaakcli workspace create '{"name":"My Workspace"}'
|
||||||
|
yaakcli workspace update --json '{"id":"wk_abc","description":"Updated"}'
|
||||||
|
yaakcli workspace delete <workspace_id> [--yes]
|
||||||
|
yaakcli request list <workspace_id>
|
||||||
|
yaakcli request show <request_id>
|
||||||
|
yaakcli request send <request_id>
|
||||||
|
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
|
||||||
|
yaakcli request create --json '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
|
||||||
|
yaakcli request create '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
|
||||||
|
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
|
||||||
|
yaakcli request delete <request_id> [--yes]
|
||||||
|
yaakcli folder list <workspace_id>
|
||||||
|
yaakcli folder show <folder_id>
|
||||||
|
yaakcli folder create <workspace_id> --name <name>
|
||||||
|
yaakcli folder create --json '{"workspaceId":"wk_abc","name":"Auth"}'
|
||||||
|
yaakcli folder create '{"workspaceId":"wk_abc","name":"Auth"}'
|
||||||
|
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
|
||||||
|
yaakcli folder delete <folder_id> [--yes]
|
||||||
|
yaakcli environment list <workspace_id>
|
||||||
|
yaakcli environment show <environment_id>
|
||||||
|
yaakcli environment create <workspace_id> --name <name>
|
||||||
|
yaakcli environment create --json '{"workspaceId":"wk_abc","name":"Production"}'
|
||||||
|
yaakcli environment create '{"workspaceId":"wk_abc","name":"Production"}'
|
||||||
|
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
|
||||||
|
yaakcli environment delete <environment_id> [--yes]
|
||||||
|
```
|
||||||
|
|
||||||
|
Global options:
|
||||||
|
|
||||||
|
- `--data-dir <path>`: use a custom data directory
|
||||||
|
- `-e, --environment <id>`: environment to use during request rendering/sending
|
||||||
|
- `-v, --verbose`: verbose logging and send output
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
|
||||||
|
- `send` is currently a shortcut for sending an HTTP request ID.
|
||||||
|
- `delete` commands prompt for confirmation unless `--yes` is provided.
|
||||||
|
- In non-interactive mode, `delete` commands require `--yes`.
|
||||||
|
- `create` and `update` commands support `--json` and positional JSON shorthand.
|
||||||
|
- `update` uses JSON Merge Patch semantics (RFC 7386) for partial updates.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yaakcli workspace list
|
||||||
|
yaakcli workspace create --name "My Workspace"
|
||||||
|
yaakcli workspace show wk_abc
|
||||||
|
yaakcli workspace update --json '{"id":"wk_abc","description":"Team workspace"}'
|
||||||
|
yaakcli request list wk_abc
|
||||||
|
yaakcli request show rq_abc
|
||||||
|
yaakcli request create wk_abc --name "Users" --url "https://api.example.com/users"
|
||||||
|
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
|
||||||
|
yaakcli request send rq_abc -e ev_abc
|
||||||
|
yaakcli request delete rq_abc --yes
|
||||||
|
yaakcli folder create wk_abc --name "Auth"
|
||||||
|
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
|
||||||
|
yaakcli environment create wk_abc --name "Production"
|
||||||
|
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Roadmap
|
||||||
|
|
||||||
|
Planned command expansion (request schema and polymorphic send) is tracked in `PLAN.md`.
|
||||||
|
|
||||||
|
When command behavior changes, update this README and verify with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo run -q -p yaak-cli -- --help
|
||||||
|
cargo run -q -p yaak-cli -- request --help
|
||||||
|
cargo run -q -p yaak-cli -- workspace --help
|
||||||
|
cargo run -q -p yaak-cli -- folder --help
|
||||||
|
cargo run -q -p yaak-cli -- environment --help
|
||||||
|
```
|
||||||
307
crates-cli/yaak-cli/src/cli.rs
Normal file
307
crates-cli/yaak-cli/src/cli.rs
Normal file
@@ -0,0 +1,307 @@
|
|||||||
|
use clap::{Args, Parser, Subcommand, ValueEnum};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(name = "yaakcli")]
|
||||||
|
#[command(about = "Yaak CLI - API client from the command line")]
|
||||||
|
pub struct Cli {
|
||||||
|
/// Use a custom data directory
|
||||||
|
#[arg(long, global = true)]
|
||||||
|
pub data_dir: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// Environment ID to use for variable substitution
|
||||||
|
#[arg(long, short, global = true)]
|
||||||
|
pub environment: Option<String>,
|
||||||
|
|
||||||
|
/// Enable verbose logging
|
||||||
|
#[arg(long, short, global = true)]
|
||||||
|
pub verbose: bool,
|
||||||
|
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: Commands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum Commands {
|
||||||
|
/// Send a request, folder, or workspace by ID
|
||||||
|
Send(SendArgs),
|
||||||
|
|
||||||
|
/// Workspace commands
|
||||||
|
Workspace(WorkspaceArgs),
|
||||||
|
|
||||||
|
/// Request commands
|
||||||
|
Request(RequestArgs),
|
||||||
|
|
||||||
|
/// Folder commands
|
||||||
|
Folder(FolderArgs),
|
||||||
|
|
||||||
|
/// Environment commands
|
||||||
|
Environment(EnvironmentArgs),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct SendArgs {
|
||||||
|
/// Request, folder, or workspace ID
|
||||||
|
pub id: String,
|
||||||
|
|
||||||
|
/// Execute requests sequentially (default)
|
||||||
|
#[arg(long, conflicts_with = "parallel")]
|
||||||
|
pub sequential: bool,
|
||||||
|
|
||||||
|
/// Execute requests in parallel
|
||||||
|
#[arg(long, conflicts_with = "sequential")]
|
||||||
|
pub parallel: bool,
|
||||||
|
|
||||||
|
/// Stop on first request failure when sending folders/workspaces
|
||||||
|
#[arg(long, conflicts_with = "parallel")]
|
||||||
|
pub fail_fast: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct WorkspaceArgs {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: WorkspaceCommands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum WorkspaceCommands {
|
||||||
|
/// List all workspaces
|
||||||
|
List,
|
||||||
|
|
||||||
|
/// Show a workspace as JSON
|
||||||
|
Show {
|
||||||
|
/// Workspace ID
|
||||||
|
workspace_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Create a workspace
|
||||||
|
Create {
|
||||||
|
/// Workspace name
|
||||||
|
#[arg(short, long)]
|
||||||
|
name: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long, conflicts_with = "json_input")]
|
||||||
|
json: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload shorthand
|
||||||
|
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||||
|
json_input: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Update a workspace
|
||||||
|
Update {
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long, conflicts_with = "json_input")]
|
||||||
|
json: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload shorthand
|
||||||
|
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||||
|
json_input: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Delete a workspace
|
||||||
|
Delete {
|
||||||
|
/// Workspace ID
|
||||||
|
workspace_id: String,
|
||||||
|
|
||||||
|
/// Skip confirmation prompt
|
||||||
|
#[arg(short, long)]
|
||||||
|
yes: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct RequestArgs {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: RequestCommands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum RequestCommands {
|
||||||
|
/// List requests in a workspace
|
||||||
|
List {
|
||||||
|
/// Workspace ID
|
||||||
|
workspace_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Show a request as JSON
|
||||||
|
Show {
|
||||||
|
/// Request ID
|
||||||
|
request_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Send a request by ID
|
||||||
|
Send {
|
||||||
|
/// Request ID
|
||||||
|
request_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Output JSON schema for request create/update payloads
|
||||||
|
Schema {
|
||||||
|
#[arg(value_enum)]
|
||||||
|
request_type: RequestSchemaType,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Create a new HTTP request
|
||||||
|
Create {
|
||||||
|
/// Workspace ID (or positional JSON payload shorthand)
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
|
||||||
|
/// Request name
|
||||||
|
#[arg(short, long)]
|
||||||
|
name: Option<String>,
|
||||||
|
|
||||||
|
/// HTTP method
|
||||||
|
#[arg(short, long)]
|
||||||
|
method: Option<String>,
|
||||||
|
|
||||||
|
/// URL
|
||||||
|
#[arg(short, long)]
|
||||||
|
url: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long)]
|
||||||
|
json: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Update an HTTP request
|
||||||
|
Update {
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long, conflicts_with = "json_input")]
|
||||||
|
json: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload shorthand
|
||||||
|
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||||
|
json_input: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Delete a request
|
||||||
|
Delete {
|
||||||
|
/// Request ID
|
||||||
|
request_id: String,
|
||||||
|
|
||||||
|
/// Skip confirmation prompt
|
||||||
|
#[arg(short, long)]
|
||||||
|
yes: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, ValueEnum)]
|
||||||
|
pub enum RequestSchemaType {
|
||||||
|
Http,
|
||||||
|
Grpc,
|
||||||
|
Websocket,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct FolderArgs {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: FolderCommands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum FolderCommands {
|
||||||
|
/// List folders in a workspace
|
||||||
|
List {
|
||||||
|
/// Workspace ID
|
||||||
|
workspace_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Show a folder as JSON
|
||||||
|
Show {
|
||||||
|
/// Folder ID
|
||||||
|
folder_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Create a folder
|
||||||
|
Create {
|
||||||
|
/// Workspace ID (or positional JSON payload shorthand)
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
|
||||||
|
/// Folder name
|
||||||
|
#[arg(short, long)]
|
||||||
|
name: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long)]
|
||||||
|
json: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Update a folder
|
||||||
|
Update {
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long, conflicts_with = "json_input")]
|
||||||
|
json: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload shorthand
|
||||||
|
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||||
|
json_input: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Delete a folder
|
||||||
|
Delete {
|
||||||
|
/// Folder ID
|
||||||
|
folder_id: String,
|
||||||
|
|
||||||
|
/// Skip confirmation prompt
|
||||||
|
#[arg(short, long)]
|
||||||
|
yes: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct EnvironmentArgs {
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: EnvironmentCommands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum EnvironmentCommands {
|
||||||
|
/// List environments in a workspace
|
||||||
|
List {
|
||||||
|
/// Workspace ID
|
||||||
|
workspace_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Show an environment as JSON
|
||||||
|
Show {
|
||||||
|
/// Environment ID
|
||||||
|
environment_id: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Create an environment
|
||||||
|
Create {
|
||||||
|
/// Workspace ID (or positional JSON payload shorthand)
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
|
||||||
|
/// Environment name
|
||||||
|
#[arg(short, long)]
|
||||||
|
name: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long)]
|
||||||
|
json: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Update an environment
|
||||||
|
Update {
|
||||||
|
/// JSON payload
|
||||||
|
#[arg(long, conflicts_with = "json_input")]
|
||||||
|
json: Option<String>,
|
||||||
|
|
||||||
|
/// JSON payload shorthand
|
||||||
|
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||||
|
json_input: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Delete an environment
|
||||||
|
Delete {
|
||||||
|
/// Environment ID
|
||||||
|
environment_id: String,
|
||||||
|
|
||||||
|
/// Skip confirmation prompt
|
||||||
|
#[arg(short, long)]
|
||||||
|
yes: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
159
crates-cli/yaak-cli/src/commands/environment.rs
Normal file
159
crates-cli/yaak-cli/src/commands/environment.rs
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
use crate::cli::{EnvironmentArgs, EnvironmentCommands};
|
||||||
|
use crate::context::CliContext;
|
||||||
|
use crate::utils::confirm::confirm_delete;
|
||||||
|
use crate::utils::json::{
|
||||||
|
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||||
|
validate_create_id,
|
||||||
|
};
|
||||||
|
use yaak_models::models::Environment;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
|
||||||
|
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||||
|
|
||||||
|
pub fn run(ctx: &CliContext, args: EnvironmentArgs) -> i32 {
|
||||||
|
let result = match args.command {
|
||||||
|
EnvironmentCommands::List { workspace_id } => list(ctx, &workspace_id),
|
||||||
|
EnvironmentCommands::Show { environment_id } => show(ctx, &environment_id),
|
||||||
|
EnvironmentCommands::Create { workspace_id, name, json } => {
|
||||||
|
create(ctx, workspace_id, name, json)
|
||||||
|
}
|
||||||
|
EnvironmentCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||||
|
EnvironmentCommands::Delete { environment_id, yes } => delete(ctx, &environment_id, yes),
|
||||||
|
};
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||||
|
let environments = ctx
|
||||||
|
.db()
|
||||||
|
.list_environments_ensure_base(workspace_id)
|
||||||
|
.map_err(|e| format!("Failed to list environments: {e}"))?;
|
||||||
|
|
||||||
|
if environments.is_empty() {
|
||||||
|
println!("No environments found in workspace {}", workspace_id);
|
||||||
|
} else {
|
||||||
|
for environment in environments {
|
||||||
|
println!("{} - {} ({})", environment.id, environment.name, environment.parent_model);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn show(ctx: &CliContext, environment_id: &str) -> CommandResult {
|
||||||
|
let environment = ctx
|
||||||
|
.db()
|
||||||
|
.get_environment(environment_id)
|
||||||
|
.map_err(|e| format!("Failed to get environment: {e}"))?;
|
||||||
|
let output = serde_json::to_string_pretty(&environment)
|
||||||
|
.map_err(|e| format!("Failed to serialize environment: {e}"))?;
|
||||||
|
println!("{output}");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create(
|
||||||
|
ctx: &CliContext,
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
name: Option<String>,
|
||||||
|
json: Option<String>,
|
||||||
|
) -> CommandResult {
|
||||||
|
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||||
|
return Err(
|
||||||
|
"environment create cannot combine workspace_id with --json payload".to_string()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let payload = parse_optional_json(
|
||||||
|
json,
|
||||||
|
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||||
|
"environment create",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if let Some(payload) = payload {
|
||||||
|
if name.is_some() {
|
||||||
|
return Err("environment create cannot combine --name with JSON payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_create_id(&payload, "environment")?;
|
||||||
|
let mut environment: Environment = serde_json::from_value(payload)
|
||||||
|
.map_err(|e| format!("Failed to parse environment create JSON: {e}"))?;
|
||||||
|
|
||||||
|
if environment.workspace_id.is_empty() {
|
||||||
|
return Err("environment create JSON requires non-empty \"workspaceId\"".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if environment.parent_model.is_empty() {
|
||||||
|
environment.parent_model = "environment".to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_environment(&environment, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create environment: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created environment: {}", created.id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let workspace_id = workspace_id.ok_or_else(|| {
|
||||||
|
"environment create requires workspace_id unless JSON payload is provided".to_string()
|
||||||
|
})?;
|
||||||
|
let name = name.ok_or_else(|| {
|
||||||
|
"environment create requires --name unless JSON payload is provided".to_string()
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let environment = Environment {
|
||||||
|
workspace_id,
|
||||||
|
name,
|
||||||
|
parent_model: "environment".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_environment(&environment, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create environment: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created environment: {}", created.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||||
|
let patch = parse_required_json(json, json_input, "environment update")?;
|
||||||
|
let id = require_id(&patch, "environment update")?;
|
||||||
|
|
||||||
|
let existing = ctx
|
||||||
|
.db()
|
||||||
|
.get_environment(&id)
|
||||||
|
.map_err(|e| format!("Failed to get environment for update: {e}"))?;
|
||||||
|
let updated = apply_merge_patch(&existing, &patch, &id, "environment update")?;
|
||||||
|
|
||||||
|
let saved = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_environment(&updated, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to update environment: {e}"))?;
|
||||||
|
|
||||||
|
println!("Updated environment: {}", saved.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(ctx: &CliContext, environment_id: &str, yes: bool) -> CommandResult {
|
||||||
|
if !yes && !confirm_delete("environment", environment_id) {
|
||||||
|
println!("Aborted");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let deleted = ctx
|
||||||
|
.db()
|
||||||
|
.delete_environment_by_id(environment_id, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to delete environment: {e}"))?;
|
||||||
|
|
||||||
|
println!("Deleted environment: {}", deleted.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
141
crates-cli/yaak-cli/src/commands/folder.rs
Normal file
141
crates-cli/yaak-cli/src/commands/folder.rs
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
use crate::cli::{FolderArgs, FolderCommands};
|
||||||
|
use crate::context::CliContext;
|
||||||
|
use crate::utils::confirm::confirm_delete;
|
||||||
|
use crate::utils::json::{
|
||||||
|
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||||
|
validate_create_id,
|
||||||
|
};
|
||||||
|
use yaak_models::models::Folder;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
|
||||||
|
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||||
|
|
||||||
|
pub fn run(ctx: &CliContext, args: FolderArgs) -> i32 {
|
||||||
|
let result = match args.command {
|
||||||
|
FolderCommands::List { workspace_id } => list(ctx, &workspace_id),
|
||||||
|
FolderCommands::Show { folder_id } => show(ctx, &folder_id),
|
||||||
|
FolderCommands::Create { workspace_id, name, json } => {
|
||||||
|
create(ctx, workspace_id, name, json)
|
||||||
|
}
|
||||||
|
FolderCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||||
|
FolderCommands::Delete { folder_id, yes } => delete(ctx, &folder_id, yes),
|
||||||
|
};
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||||
|
let folders =
|
||||||
|
ctx.db().list_folders(workspace_id).map_err(|e| format!("Failed to list folders: {e}"))?;
|
||||||
|
if folders.is_empty() {
|
||||||
|
println!("No folders found in workspace {}", workspace_id);
|
||||||
|
} else {
|
||||||
|
for folder in folders {
|
||||||
|
println!("{} - {}", folder.id, folder.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn show(ctx: &CliContext, folder_id: &str) -> CommandResult {
|
||||||
|
let folder =
|
||||||
|
ctx.db().get_folder(folder_id).map_err(|e| format!("Failed to get folder: {e}"))?;
|
||||||
|
let output = serde_json::to_string_pretty(&folder)
|
||||||
|
.map_err(|e| format!("Failed to serialize folder: {e}"))?;
|
||||||
|
println!("{output}");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create(
|
||||||
|
ctx: &CliContext,
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
name: Option<String>,
|
||||||
|
json: Option<String>,
|
||||||
|
) -> CommandResult {
|
||||||
|
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||||
|
return Err("folder create cannot combine workspace_id with --json payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let payload = parse_optional_json(
|
||||||
|
json,
|
||||||
|
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||||
|
"folder create",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if let Some(payload) = payload {
|
||||||
|
if name.is_some() {
|
||||||
|
return Err("folder create cannot combine --name with JSON payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_create_id(&payload, "folder")?;
|
||||||
|
let folder: Folder = serde_json::from_value(payload)
|
||||||
|
.map_err(|e| format!("Failed to parse folder create JSON: {e}"))?;
|
||||||
|
|
||||||
|
if folder.workspace_id.is_empty() {
|
||||||
|
return Err("folder create JSON requires non-empty \"workspaceId\"".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_folder(&folder, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create folder: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created folder: {}", created.id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let workspace_id = workspace_id.ok_or_else(|| {
|
||||||
|
"folder create requires workspace_id unless JSON payload is provided".to_string()
|
||||||
|
})?;
|
||||||
|
let name = name.ok_or_else(|| {
|
||||||
|
"folder create requires --name unless JSON payload is provided".to_string()
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let folder = Folder { workspace_id, name, ..Default::default() };
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_folder(&folder, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create folder: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created folder: {}", created.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||||
|
let patch = parse_required_json(json, json_input, "folder update")?;
|
||||||
|
let id = require_id(&patch, "folder update")?;
|
||||||
|
|
||||||
|
let existing =
|
||||||
|
ctx.db().get_folder(&id).map_err(|e| format!("Failed to get folder for update: {e}"))?;
|
||||||
|
let updated = apply_merge_patch(&existing, &patch, &id, "folder update")?;
|
||||||
|
|
||||||
|
let saved = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_folder(&updated, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to update folder: {e}"))?;
|
||||||
|
|
||||||
|
println!("Updated folder: {}", saved.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(ctx: &CliContext, folder_id: &str, yes: bool) -> CommandResult {
|
||||||
|
if !yes && !confirm_delete("folder", folder_id) {
|
||||||
|
println!("Aborted");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let deleted = ctx
|
||||||
|
.db()
|
||||||
|
.delete_folder_by_id(folder_id, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to delete folder: {e}"))?;
|
||||||
|
|
||||||
|
println!("Deleted folder: {}", deleted.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
5
crates-cli/yaak-cli/src/commands/mod.rs
Normal file
5
crates-cli/yaak-cli/src/commands/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
pub mod environment;
|
||||||
|
pub mod folder;
|
||||||
|
pub mod request;
|
||||||
|
pub mod send;
|
||||||
|
pub mod workspace;
|
||||||
485
crates-cli/yaak-cli/src/commands/request.rs
Normal file
485
crates-cli/yaak-cli/src/commands/request.rs
Normal file
@@ -0,0 +1,485 @@
|
|||||||
|
use crate::cli::{RequestArgs, RequestCommands, RequestSchemaType};
|
||||||
|
use crate::context::CliContext;
|
||||||
|
use crate::utils::confirm::confirm_delete;
|
||||||
|
use crate::utils::json::{
|
||||||
|
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||||
|
validate_create_id,
|
||||||
|
};
|
||||||
|
use schemars::schema_for;
|
||||||
|
use serde_json::{Map, Value, json};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use yaak::send::{SendHttpRequestByIdWithPluginsParams, send_http_request_by_id_with_plugins};
|
||||||
|
use yaak_models::models::{GrpcRequest, HttpRequest, WebsocketRequest};
|
||||||
|
use yaak_models::queries::any_request::AnyRequest;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
use yaak_plugins::events::{FormInput, FormInputBase, JsonPrimitive, PluginContext};
|
||||||
|
|
||||||
|
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||||
|
|
||||||
|
pub async fn run(
|
||||||
|
ctx: &CliContext,
|
||||||
|
args: RequestArgs,
|
||||||
|
environment: Option<&str>,
|
||||||
|
verbose: bool,
|
||||||
|
) -> i32 {
|
||||||
|
let result = match args.command {
|
||||||
|
RequestCommands::List { workspace_id } => list(ctx, &workspace_id),
|
||||||
|
RequestCommands::Show { request_id } => show(ctx, &request_id),
|
||||||
|
RequestCommands::Send { request_id } => {
|
||||||
|
return match send_request_by_id(ctx, &request_id, environment, verbose).await {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
RequestCommands::Schema { request_type } => {
|
||||||
|
return match schema(ctx, request_type).await {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
RequestCommands::Create { workspace_id, name, method, url, json } => {
|
||||||
|
create(ctx, workspace_id, name, method, url, json)
|
||||||
|
}
|
||||||
|
RequestCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||||
|
RequestCommands::Delete { request_id, yes } => delete(ctx, &request_id, yes),
|
||||||
|
};
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||||
|
let requests = ctx
|
||||||
|
.db()
|
||||||
|
.list_http_requests(workspace_id)
|
||||||
|
.map_err(|e| format!("Failed to list requests: {e}"))?;
|
||||||
|
if requests.is_empty() {
|
||||||
|
println!("No requests found in workspace {}", workspace_id);
|
||||||
|
} else {
|
||||||
|
for request in requests {
|
||||||
|
println!("{} - {} {}", request.id, request.method, request.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn schema(ctx: &CliContext, request_type: RequestSchemaType) -> CommandResult {
|
||||||
|
let mut schema = match request_type {
|
||||||
|
RequestSchemaType::Http => serde_json::to_value(schema_for!(HttpRequest))
|
||||||
|
.map_err(|e| format!("Failed to serialize HTTP request schema: {e}"))?,
|
||||||
|
RequestSchemaType::Grpc => serde_json::to_value(schema_for!(GrpcRequest))
|
||||||
|
.map_err(|e| format!("Failed to serialize gRPC request schema: {e}"))?,
|
||||||
|
RequestSchemaType::Websocket => serde_json::to_value(schema_for!(WebsocketRequest))
|
||||||
|
.map_err(|e| format!("Failed to serialize WebSocket request schema: {e}"))?,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(error) = merge_auth_schema_from_plugins(ctx, &mut schema).await {
|
||||||
|
eprintln!("Warning: Failed to enrich authentication schema from plugins: {error}");
|
||||||
|
}
|
||||||
|
|
||||||
|
let output = serde_json::to_string_pretty(&schema)
|
||||||
|
.map_err(|e| format!("Failed to format schema JSON: {e}"))?;
|
||||||
|
println!("{output}");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn merge_auth_schema_from_plugins(
|
||||||
|
ctx: &CliContext,
|
||||||
|
schema: &mut Value,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let plugin_context = PluginContext::new_empty();
|
||||||
|
let plugin_manager = ctx.plugin_manager();
|
||||||
|
let summaries = plugin_manager
|
||||||
|
.get_http_authentication_summaries(&plugin_context)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let mut auth_variants = Vec::new();
|
||||||
|
for (_, summary) in summaries {
|
||||||
|
let config = match plugin_manager
|
||||||
|
.get_http_authentication_config(
|
||||||
|
&plugin_context,
|
||||||
|
&summary.name,
|
||||||
|
HashMap::<String, JsonPrimitive>::new(),
|
||||||
|
"yaakcli_request_schema",
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(config) => config,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!(
|
||||||
|
"Warning: Failed to load auth config for strategy '{}': {}",
|
||||||
|
summary.name, error
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
auth_variants.push(auth_variant_schema(&summary.name, &summary.label, &config.args));
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(properties) = schema.get_mut("properties").and_then(Value::as_object_mut) else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(auth_schema) = properties.get_mut("authentication") else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
if !auth_variants.is_empty() {
|
||||||
|
let mut one_of = vec![auth_schema.clone()];
|
||||||
|
one_of.extend(auth_variants);
|
||||||
|
*auth_schema = json!({ "oneOf": one_of });
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn auth_variant_schema(auth_name: &str, auth_label: &str, args: &[FormInput]) -> Value {
|
||||||
|
let mut properties = Map::new();
|
||||||
|
let mut required = Vec::new();
|
||||||
|
for input in args {
|
||||||
|
add_input_schema(input, &mut properties, &mut required);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut schema = json!({
|
||||||
|
"title": auth_label,
|
||||||
|
"description": format!("Authentication values for strategy '{}'", auth_name),
|
||||||
|
"type": "object",
|
||||||
|
"properties": properties,
|
||||||
|
"additionalProperties": true
|
||||||
|
});
|
||||||
|
|
||||||
|
if !required.is_empty() {
|
||||||
|
schema["required"] = json!(required);
|
||||||
|
}
|
||||||
|
|
||||||
|
schema
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_input_schema(
|
||||||
|
input: &FormInput,
|
||||||
|
properties: &mut Map<String, Value>,
|
||||||
|
required: &mut Vec<String>,
|
||||||
|
) {
|
||||||
|
match input {
|
||||||
|
FormInput::Text(v) => add_base_schema(
|
||||||
|
&v.base,
|
||||||
|
json!({
|
||||||
|
"type": "string",
|
||||||
|
"writeOnly": v.password.unwrap_or(false),
|
||||||
|
}),
|
||||||
|
properties,
|
||||||
|
required,
|
||||||
|
),
|
||||||
|
FormInput::Editor(v) => add_base_schema(
|
||||||
|
&v.base,
|
||||||
|
json!({
|
||||||
|
"type": "string",
|
||||||
|
"x-editorLanguage": v.language.clone(),
|
||||||
|
}),
|
||||||
|
properties,
|
||||||
|
required,
|
||||||
|
),
|
||||||
|
FormInput::Select(v) => {
|
||||||
|
let options: Vec<Value> =
|
||||||
|
v.options.iter().map(|o| Value::String(o.value.clone())).collect();
|
||||||
|
add_base_schema(
|
||||||
|
&v.base,
|
||||||
|
json!({
|
||||||
|
"type": "string",
|
||||||
|
"enum": options,
|
||||||
|
}),
|
||||||
|
properties,
|
||||||
|
required,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
FormInput::Checkbox(v) => {
|
||||||
|
add_base_schema(&v.base, json!({ "type": "boolean" }), properties, required);
|
||||||
|
}
|
||||||
|
FormInput::File(v) => {
|
||||||
|
if v.multiple.unwrap_or(false) {
|
||||||
|
add_base_schema(
|
||||||
|
&v.base,
|
||||||
|
json!({
|
||||||
|
"type": "array",
|
||||||
|
"items": { "type": "string" },
|
||||||
|
}),
|
||||||
|
properties,
|
||||||
|
required,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
add_base_schema(&v.base, json!({ "type": "string" }), properties, required);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FormInput::HttpRequest(v) => {
|
||||||
|
add_base_schema(&v.base, json!({ "type": "string" }), properties, required);
|
||||||
|
}
|
||||||
|
FormInput::KeyValue(v) => {
|
||||||
|
add_base_schema(
|
||||||
|
&v.base,
|
||||||
|
json!({
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": true,
|
||||||
|
}),
|
||||||
|
properties,
|
||||||
|
required,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
FormInput::Accordion(v) => {
|
||||||
|
if let Some(children) = &v.inputs {
|
||||||
|
for child in children {
|
||||||
|
add_input_schema(child, properties, required);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FormInput::HStack(v) => {
|
||||||
|
if let Some(children) = &v.inputs {
|
||||||
|
for child in children {
|
||||||
|
add_input_schema(child, properties, required);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FormInput::Banner(v) => {
|
||||||
|
if let Some(children) = &v.inputs {
|
||||||
|
for child in children {
|
||||||
|
add_input_schema(child, properties, required);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FormInput::Markdown(_) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_base_schema(
|
||||||
|
base: &FormInputBase,
|
||||||
|
mut schema: Value,
|
||||||
|
properties: &mut Map<String, Value>,
|
||||||
|
required: &mut Vec<String>,
|
||||||
|
) {
|
||||||
|
if base.hidden.unwrap_or(false) || base.name.trim().is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(description) = &base.description {
|
||||||
|
schema["description"] = Value::String(description.clone());
|
||||||
|
}
|
||||||
|
if let Some(label) = &base.label {
|
||||||
|
schema["title"] = Value::String(label.clone());
|
||||||
|
}
|
||||||
|
if let Some(default_value) = &base.default_value {
|
||||||
|
schema["default"] = Value::String(default_value.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let name = base.name.clone();
|
||||||
|
properties.insert(name.clone(), schema);
|
||||||
|
if !base.optional.unwrap_or(false) {
|
||||||
|
required.push(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create(
|
||||||
|
ctx: &CliContext,
|
||||||
|
workspace_id: Option<String>,
|
||||||
|
name: Option<String>,
|
||||||
|
method: Option<String>,
|
||||||
|
url: Option<String>,
|
||||||
|
json: Option<String>,
|
||||||
|
) -> CommandResult {
|
||||||
|
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||||
|
return Err("request create cannot combine workspace_id with --json payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let payload = parse_optional_json(
|
||||||
|
json,
|
||||||
|
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||||
|
"request create",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if let Some(payload) = payload {
|
||||||
|
if name.is_some() || method.is_some() || url.is_some() {
|
||||||
|
return Err("request create cannot combine simple flags with JSON payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_create_id(&payload, "request")?;
|
||||||
|
let request: HttpRequest = serde_json::from_value(payload)
|
||||||
|
.map_err(|e| format!("Failed to parse request create JSON: {e}"))?;
|
||||||
|
|
||||||
|
if request.workspace_id.is_empty() {
|
||||||
|
return Err("request create JSON requires non-empty \"workspaceId\"".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create request: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created request: {}", created.id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let workspace_id = workspace_id.ok_or_else(|| {
|
||||||
|
"request create requires workspace_id unless JSON payload is provided".to_string()
|
||||||
|
})?;
|
||||||
|
let name = name.unwrap_or_default();
|
||||||
|
let url = url.unwrap_or_default();
|
||||||
|
let method = method.unwrap_or_else(|| "GET".to_string());
|
||||||
|
|
||||||
|
let request = HttpRequest {
|
||||||
|
workspace_id,
|
||||||
|
name,
|
||||||
|
method: method.to_uppercase(),
|
||||||
|
url,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create request: {e}"))?;
|
||||||
|
|
||||||
|
println!("Created request: {}", created.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||||
|
let patch = parse_required_json(json, json_input, "request update")?;
|
||||||
|
let id = require_id(&patch, "request update")?;
|
||||||
|
|
||||||
|
let existing = ctx
|
||||||
|
.db()
|
||||||
|
.get_http_request(&id)
|
||||||
|
.map_err(|e| format!("Failed to get request for update: {e}"))?;
|
||||||
|
let updated = apply_merge_patch(&existing, &patch, &id, "request update")?;
|
||||||
|
|
||||||
|
let saved = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_http_request(&updated, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to update request: {e}"))?;
|
||||||
|
|
||||||
|
println!("Updated request: {}", saved.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn show(ctx: &CliContext, request_id: &str) -> CommandResult {
|
||||||
|
let request =
|
||||||
|
ctx.db().get_http_request(request_id).map_err(|e| format!("Failed to get request: {e}"))?;
|
||||||
|
let output = serde_json::to_string_pretty(&request)
|
||||||
|
.map_err(|e| format!("Failed to serialize request: {e}"))?;
|
||||||
|
println!("{output}");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(ctx: &CliContext, request_id: &str, yes: bool) -> CommandResult {
|
||||||
|
if !yes && !confirm_delete("request", request_id) {
|
||||||
|
println!("Aborted");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let deleted = ctx
|
||||||
|
.db()
|
||||||
|
.delete_http_request_by_id(request_id, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to delete request: {e}"))?;
|
||||||
|
println!("Deleted request: {}", deleted.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a request by ID and print response in the same format as legacy `send`.
|
||||||
|
pub async fn send_request_by_id(
|
||||||
|
ctx: &CliContext,
|
||||||
|
request_id: &str,
|
||||||
|
environment: Option<&str>,
|
||||||
|
verbose: bool,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let request =
|
||||||
|
ctx.db().get_any_request(request_id).map_err(|e| format!("Failed to get request: {e}"))?;
|
||||||
|
match request {
|
||||||
|
AnyRequest::HttpRequest(http_request) => {
|
||||||
|
send_http_request_by_id(
|
||||||
|
ctx,
|
||||||
|
&http_request.id,
|
||||||
|
&http_request.workspace_id,
|
||||||
|
environment,
|
||||||
|
verbose,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
AnyRequest::GrpcRequest(_) => {
|
||||||
|
Err("gRPC request send is not implemented yet in yaak-cli".to_string())
|
||||||
|
}
|
||||||
|
AnyRequest::WebsocketRequest(_) => {
|
||||||
|
Err("WebSocket request send is not implemented yet in yaak-cli".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn send_http_request_by_id(
|
||||||
|
ctx: &CliContext,
|
||||||
|
request_id: &str,
|
||||||
|
workspace_id: &str,
|
||||||
|
environment: Option<&str>,
|
||||||
|
verbose: bool,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let plugin_context = PluginContext::new(None, Some(workspace_id.to_string()));
|
||||||
|
|
||||||
|
let (event_tx, mut event_rx) = mpsc::channel(100);
|
||||||
|
let event_handle = tokio::spawn(async move {
|
||||||
|
while let Some(event) = event_rx.recv().await {
|
||||||
|
if verbose {
|
||||||
|
println!("{}", event);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let response_dir = ctx.data_dir().join("responses");
|
||||||
|
|
||||||
|
let result = send_http_request_by_id_with_plugins(SendHttpRequestByIdWithPluginsParams {
|
||||||
|
query_manager: ctx.query_manager(),
|
||||||
|
blob_manager: ctx.blob_manager(),
|
||||||
|
request_id,
|
||||||
|
environment_id: environment,
|
||||||
|
update_source: UpdateSource::Sync,
|
||||||
|
cookie_jar_id: None,
|
||||||
|
response_dir: &response_dir,
|
||||||
|
emit_events_to: Some(event_tx),
|
||||||
|
plugin_manager: ctx.plugin_manager(),
|
||||||
|
encryption_manager: ctx.encryption_manager.clone(),
|
||||||
|
plugin_context: &plugin_context,
|
||||||
|
cancelled_rx: None,
|
||||||
|
connection_manager: None,
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let _ = event_handle.await;
|
||||||
|
let result = result.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
if verbose {
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
println!(
|
||||||
|
"HTTP {} {}",
|
||||||
|
result.response.status,
|
||||||
|
result.response.status_reason.as_deref().unwrap_or("")
|
||||||
|
);
|
||||||
|
if verbose {
|
||||||
|
for header in &result.response.headers {
|
||||||
|
println!("{}: {}", header.name, header.value);
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
let body = String::from_utf8(result.response_body)
|
||||||
|
.map_err(|e| format!("Failed to read response body: {e}"))?;
|
||||||
|
println!("{}", body);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
184
crates-cli/yaak-cli/src/commands/send.rs
Normal file
184
crates-cli/yaak-cli/src/commands/send.rs
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
use crate::cli::SendArgs;
|
||||||
|
use crate::commands::request;
|
||||||
|
use crate::context::CliContext;
|
||||||
|
use futures::future::join_all;
|
||||||
|
|
||||||
|
enum ExecutionMode {
|
||||||
|
Sequential,
|
||||||
|
Parallel,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run(
|
||||||
|
ctx: &CliContext,
|
||||||
|
args: SendArgs,
|
||||||
|
environment: Option<&str>,
|
||||||
|
verbose: bool,
|
||||||
|
) -> i32 {
|
||||||
|
match send_target(ctx, args, environment, verbose).await {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn send_target(
|
||||||
|
ctx: &CliContext,
|
||||||
|
args: SendArgs,
|
||||||
|
environment: Option<&str>,
|
||||||
|
verbose: bool,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let mode = if args.parallel { ExecutionMode::Parallel } else { ExecutionMode::Sequential };
|
||||||
|
|
||||||
|
if ctx.db().get_any_request(&args.id).is_ok() {
|
||||||
|
return request::send_request_by_id(ctx, &args.id, environment, verbose).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.db().get_folder(&args.id).is_ok() {
|
||||||
|
let request_ids = collect_folder_request_ids(ctx, &args.id)?;
|
||||||
|
if request_ids.is_empty() {
|
||||||
|
println!("No requests found in folder {}", args.id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
return send_many(ctx, request_ids, mode, args.fail_fast, environment, verbose).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.db().get_workspace(&args.id).is_ok() {
|
||||||
|
let request_ids = collect_workspace_request_ids(ctx, &args.id)?;
|
||||||
|
if request_ids.is_empty() {
|
||||||
|
println!("No requests found in workspace {}", args.id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
return send_many(ctx, request_ids, mode, args.fail_fast, environment, verbose).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(format!("Could not resolve ID '{}' as request, folder, or workspace", args.id))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn collect_folder_request_ids(ctx: &CliContext, folder_id: &str) -> Result<Vec<String>, String> {
|
||||||
|
let mut ids = Vec::new();
|
||||||
|
|
||||||
|
let mut http_ids = ctx
|
||||||
|
.db()
|
||||||
|
.list_http_requests_for_folder_recursive(folder_id)
|
||||||
|
.map_err(|e| format!("Failed to list HTTP requests in folder: {e}"))?
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| r.id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
ids.append(&mut http_ids);
|
||||||
|
|
||||||
|
let mut grpc_ids = ctx
|
||||||
|
.db()
|
||||||
|
.list_grpc_requests_for_folder_recursive(folder_id)
|
||||||
|
.map_err(|e| format!("Failed to list gRPC requests in folder: {e}"))?
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| r.id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
ids.append(&mut grpc_ids);
|
||||||
|
|
||||||
|
let mut websocket_ids = ctx
|
||||||
|
.db()
|
||||||
|
.list_websocket_requests_for_folder_recursive(folder_id)
|
||||||
|
.map_err(|e| format!("Failed to list WebSocket requests in folder: {e}"))?
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| r.id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
ids.append(&mut websocket_ids);
|
||||||
|
|
||||||
|
Ok(ids)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn collect_workspace_request_ids(
|
||||||
|
ctx: &CliContext,
|
||||||
|
workspace_id: &str,
|
||||||
|
) -> Result<Vec<String>, String> {
|
||||||
|
let mut ids = Vec::new();
|
||||||
|
|
||||||
|
let mut http_ids = ctx
|
||||||
|
.db()
|
||||||
|
.list_http_requests(workspace_id)
|
||||||
|
.map_err(|e| format!("Failed to list HTTP requests in workspace: {e}"))?
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| r.id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
ids.append(&mut http_ids);
|
||||||
|
|
||||||
|
let mut grpc_ids = ctx
|
||||||
|
.db()
|
||||||
|
.list_grpc_requests(workspace_id)
|
||||||
|
.map_err(|e| format!("Failed to list gRPC requests in workspace: {e}"))?
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| r.id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
ids.append(&mut grpc_ids);
|
||||||
|
|
||||||
|
let mut websocket_ids = ctx
|
||||||
|
.db()
|
||||||
|
.list_websocket_requests(workspace_id)
|
||||||
|
.map_err(|e| format!("Failed to list WebSocket requests in workspace: {e}"))?
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| r.id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
ids.append(&mut websocket_ids);
|
||||||
|
|
||||||
|
Ok(ids)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn send_many(
|
||||||
|
ctx: &CliContext,
|
||||||
|
request_ids: Vec<String>,
|
||||||
|
mode: ExecutionMode,
|
||||||
|
fail_fast: bool,
|
||||||
|
environment: Option<&str>,
|
||||||
|
verbose: bool,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let mut success_count = 0usize;
|
||||||
|
let mut failures: Vec<(String, String)> = Vec::new();
|
||||||
|
|
||||||
|
match mode {
|
||||||
|
ExecutionMode::Sequential => {
|
||||||
|
for request_id in request_ids {
|
||||||
|
match request::send_request_by_id(ctx, &request_id, environment, verbose).await {
|
||||||
|
Ok(()) => success_count += 1,
|
||||||
|
Err(error) => {
|
||||||
|
failures.push((request_id, error));
|
||||||
|
if fail_fast {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ExecutionMode::Parallel => {
|
||||||
|
let tasks = request_ids
|
||||||
|
.iter()
|
||||||
|
.map(|request_id| async move {
|
||||||
|
(
|
||||||
|
request_id.clone(),
|
||||||
|
request::send_request_by_id(ctx, request_id, environment, verbose).await,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
for (request_id, result) in join_all(tasks).await {
|
||||||
|
match result {
|
||||||
|
Ok(()) => success_count += 1,
|
||||||
|
Err(error) => failures.push((request_id, error)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let failure_count = failures.len();
|
||||||
|
println!("Send summary: {success_count} succeeded, {failure_count} failed");
|
||||||
|
|
||||||
|
if failure_count == 0 {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (request_id, error) in failures {
|
||||||
|
eprintln!(" {}: {}", request_id, error);
|
||||||
|
}
|
||||||
|
Err("One or more requests failed".to_string())
|
||||||
|
}
|
||||||
123
crates-cli/yaak-cli/src/commands/workspace.rs
Normal file
123
crates-cli/yaak-cli/src/commands/workspace.rs
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
use crate::cli::{WorkspaceArgs, WorkspaceCommands};
|
||||||
|
use crate::context::CliContext;
|
||||||
|
use crate::utils::confirm::confirm_delete;
|
||||||
|
use crate::utils::json::{
|
||||||
|
apply_merge_patch, parse_optional_json, parse_required_json, require_id, validate_create_id,
|
||||||
|
};
|
||||||
|
use yaak_models::models::Workspace;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
|
||||||
|
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||||
|
|
||||||
|
pub fn run(ctx: &CliContext, args: WorkspaceArgs) -> i32 {
|
||||||
|
let result = match args.command {
|
||||||
|
WorkspaceCommands::List => list(ctx),
|
||||||
|
WorkspaceCommands::Show { workspace_id } => show(ctx, &workspace_id),
|
||||||
|
WorkspaceCommands::Create { name, json, json_input } => create(ctx, name, json, json_input),
|
||||||
|
WorkspaceCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||||
|
WorkspaceCommands::Delete { workspace_id, yes } => delete(ctx, &workspace_id, yes),
|
||||||
|
};
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(()) => 0,
|
||||||
|
Err(error) => {
|
||||||
|
eprintln!("Error: {error}");
|
||||||
|
1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list(ctx: &CliContext) -> CommandResult {
|
||||||
|
let workspaces =
|
||||||
|
ctx.db().list_workspaces().map_err(|e| format!("Failed to list workspaces: {e}"))?;
|
||||||
|
if workspaces.is_empty() {
|
||||||
|
println!("No workspaces found");
|
||||||
|
} else {
|
||||||
|
for workspace in workspaces {
|
||||||
|
println!("{} - {}", workspace.id, workspace.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn show(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||||
|
let workspace = ctx
|
||||||
|
.db()
|
||||||
|
.get_workspace(workspace_id)
|
||||||
|
.map_err(|e| format!("Failed to get workspace: {e}"))?;
|
||||||
|
let output = serde_json::to_string_pretty(&workspace)
|
||||||
|
.map_err(|e| format!("Failed to serialize workspace: {e}"))?;
|
||||||
|
println!("{output}");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create(
|
||||||
|
ctx: &CliContext,
|
||||||
|
name: Option<String>,
|
||||||
|
json: Option<String>,
|
||||||
|
json_input: Option<String>,
|
||||||
|
) -> CommandResult {
|
||||||
|
let payload = parse_optional_json(json, json_input, "workspace create")?;
|
||||||
|
|
||||||
|
if let Some(payload) = payload {
|
||||||
|
if name.is_some() {
|
||||||
|
return Err("workspace create cannot combine --name with JSON payload".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_create_id(&payload, "workspace")?;
|
||||||
|
let workspace: Workspace = serde_json::from_value(payload)
|
||||||
|
.map_err(|e| format!("Failed to parse workspace create JSON: {e}"))?;
|
||||||
|
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create workspace: {e}"))?;
|
||||||
|
println!("Created workspace: {}", created.id);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let name = name.ok_or_else(|| {
|
||||||
|
"workspace create requires --name unless JSON payload is provided".to_string()
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let workspace = Workspace { name, ..Default::default() };
|
||||||
|
let created = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to create workspace: {e}"))?;
|
||||||
|
println!("Created workspace: {}", created.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||||
|
let patch = parse_required_json(json, json_input, "workspace update")?;
|
||||||
|
let id = require_id(&patch, "workspace update")?;
|
||||||
|
|
||||||
|
let existing = ctx
|
||||||
|
.db()
|
||||||
|
.get_workspace(&id)
|
||||||
|
.map_err(|e| format!("Failed to get workspace for update: {e}"))?;
|
||||||
|
let updated = apply_merge_patch(&existing, &patch, &id, "workspace update")?;
|
||||||
|
|
||||||
|
let saved = ctx
|
||||||
|
.db()
|
||||||
|
.upsert_workspace(&updated, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to update workspace: {e}"))?;
|
||||||
|
|
||||||
|
println!("Updated workspace: {}", saved.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(ctx: &CliContext, workspace_id: &str, yes: bool) -> CommandResult {
|
||||||
|
if !yes && !confirm_delete("workspace", workspace_id) {
|
||||||
|
println!("Aborted");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let deleted = ctx
|
||||||
|
.db()
|
||||||
|
.delete_workspace_by_id(workspace_id, &UpdateSource::Sync)
|
||||||
|
.map_err(|e| format!("Failed to delete workspace: {e}"))?;
|
||||||
|
println!("Deleted workspace: {}", deleted.id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
115
crates-cli/yaak-cli/src/context.rs
Normal file
115
crates-cli/yaak-cli/src/context.rs
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
use crate::plugin_events::CliPluginEventBridge;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
use yaak_crypto::manager::EncryptionManager;
|
||||||
|
use yaak_models::blob_manager::BlobManager;
|
||||||
|
use yaak_models::db_context::DbContext;
|
||||||
|
use yaak_models::query_manager::QueryManager;
|
||||||
|
use yaak_plugins::events::PluginContext;
|
||||||
|
use yaak_plugins::manager::PluginManager;
|
||||||
|
|
||||||
|
pub struct CliContext {
|
||||||
|
data_dir: PathBuf,
|
||||||
|
query_manager: QueryManager,
|
||||||
|
blob_manager: BlobManager,
|
||||||
|
pub encryption_manager: Arc<EncryptionManager>,
|
||||||
|
plugin_manager: Option<Arc<PluginManager>>,
|
||||||
|
plugin_event_bridge: Mutex<Option<CliPluginEventBridge>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CliContext {
|
||||||
|
pub async fn initialize(data_dir: PathBuf, app_id: &str, with_plugins: bool) -> Self {
|
||||||
|
let db_path = data_dir.join("db.sqlite");
|
||||||
|
let blob_path = data_dir.join("blobs.sqlite");
|
||||||
|
|
||||||
|
let (query_manager, blob_manager, _rx) = yaak_models::init_standalone(&db_path, &blob_path)
|
||||||
|
.expect("Failed to initialize database");
|
||||||
|
|
||||||
|
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
|
||||||
|
|
||||||
|
let plugin_manager = if with_plugins {
|
||||||
|
let vendored_plugin_dir = data_dir.join("vendored-plugins");
|
||||||
|
let installed_plugin_dir = data_dir.join("installed-plugins");
|
||||||
|
let node_bin_path = PathBuf::from("node");
|
||||||
|
|
||||||
|
let plugin_runtime_main =
|
||||||
|
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
|
||||||
|
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||||
|
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
||||||
|
});
|
||||||
|
|
||||||
|
let plugin_manager = Arc::new(
|
||||||
|
PluginManager::new(
|
||||||
|
vendored_plugin_dir,
|
||||||
|
installed_plugin_dir,
|
||||||
|
node_bin_path,
|
||||||
|
plugin_runtime_main,
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
.await,
|
||||||
|
);
|
||||||
|
|
||||||
|
let plugins = query_manager.connect().list_plugins().unwrap_or_default();
|
||||||
|
if !plugins.is_empty() {
|
||||||
|
let errors = plugin_manager
|
||||||
|
.initialize_all_plugins(plugins, &PluginContext::new_empty())
|
||||||
|
.await;
|
||||||
|
for (plugin_dir, error_msg) in errors {
|
||||||
|
eprintln!(
|
||||||
|
"Warning: Failed to initialize plugin '{}': {}",
|
||||||
|
plugin_dir, error_msg
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(plugin_manager)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let plugin_event_bridge = if let Some(plugin_manager) = &plugin_manager {
|
||||||
|
Some(CliPluginEventBridge::start(plugin_manager.clone(), query_manager.clone()).await)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Self {
|
||||||
|
data_dir,
|
||||||
|
query_manager,
|
||||||
|
blob_manager,
|
||||||
|
encryption_manager,
|
||||||
|
plugin_manager,
|
||||||
|
plugin_event_bridge: Mutex::new(plugin_event_bridge),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn data_dir(&self) -> &Path {
|
||||||
|
&self.data_dir
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn db(&self) -> DbContext<'_> {
|
||||||
|
self.query_manager.connect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn query_manager(&self) -> &QueryManager {
|
||||||
|
&self.query_manager
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn blob_manager(&self) -> &BlobManager {
|
||||||
|
&self.blob_manager
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn plugin_manager(&self) -> Arc<PluginManager> {
|
||||||
|
self.plugin_manager.clone().expect("Plugin manager was not initialized for this command")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn shutdown(&self) {
|
||||||
|
if let Some(plugin_manager) = &self.plugin_manager {
|
||||||
|
if let Some(plugin_event_bridge) = self.plugin_event_bridge.lock().await.take() {
|
||||||
|
plugin_event_bridge.shutdown(plugin_manager).await;
|
||||||
|
}
|
||||||
|
plugin_manager.terminate().await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,290 +1,52 @@
|
|||||||
use clap::{Parser, Subcommand};
|
mod cli;
|
||||||
use std::path::PathBuf;
|
mod commands;
|
||||||
use std::sync::Arc;
|
mod context;
|
||||||
use tokio::sync::mpsc;
|
mod plugin_events;
|
||||||
use yaak_http::sender::{HttpSender, ReqwestSender};
|
mod utils;
|
||||||
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
|
|
||||||
use yaak_models::models::HttpRequest;
|
|
||||||
use yaak_models::util::UpdateSource;
|
|
||||||
use yaak_plugins::events::PluginContext;
|
|
||||||
use yaak_plugins::manager::PluginManager;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
use clap::Parser;
|
||||||
#[command(name = "yaakcli")]
|
use cli::{Cli, Commands, RequestCommands};
|
||||||
#[command(about = "Yaak CLI - API client from the command line")]
|
use context::CliContext;
|
||||||
struct Cli {
|
|
||||||
/// Use a custom data directory
|
|
||||||
#[arg(long, global = true)]
|
|
||||||
data_dir: Option<PathBuf>,
|
|
||||||
|
|
||||||
/// Environment ID to use for variable substitution
|
|
||||||
#[arg(long, short, global = true)]
|
|
||||||
environment: Option<String>,
|
|
||||||
|
|
||||||
/// Enable verbose logging
|
|
||||||
#[arg(long, short, global = true)]
|
|
||||||
verbose: bool,
|
|
||||||
|
|
||||||
#[command(subcommand)]
|
|
||||||
command: Commands,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Subcommand)]
|
|
||||||
enum Commands {
|
|
||||||
/// List all workspaces
|
|
||||||
Workspaces,
|
|
||||||
/// List requests in a workspace
|
|
||||||
Requests {
|
|
||||||
/// Workspace ID
|
|
||||||
workspace_id: String,
|
|
||||||
},
|
|
||||||
/// Send an HTTP request by ID
|
|
||||||
Send {
|
|
||||||
/// Request ID
|
|
||||||
request_id: String,
|
|
||||||
},
|
|
||||||
/// Send a GET request to a URL
|
|
||||||
Get {
|
|
||||||
/// URL to request
|
|
||||||
url: String,
|
|
||||||
},
|
|
||||||
/// Create a new HTTP request
|
|
||||||
Create {
|
|
||||||
/// Workspace ID
|
|
||||||
workspace_id: String,
|
|
||||||
/// Request name
|
|
||||||
#[arg(short, long)]
|
|
||||||
name: String,
|
|
||||||
/// HTTP method
|
|
||||||
#[arg(short, long, default_value = "GET")]
|
|
||||||
method: String,
|
|
||||||
/// URL
|
|
||||||
#[arg(short, long)]
|
|
||||||
url: String,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
let cli = Cli::parse();
|
let Cli { data_dir, environment, verbose, command } = Cli::parse();
|
||||||
|
|
||||||
// Initialize logging
|
if verbose {
|
||||||
if cli.verbose {
|
|
||||||
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use the same app_id for both data directory and keyring
|
|
||||||
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
|
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
|
||||||
|
|
||||||
let data_dir = cli.data_dir.unwrap_or_else(|| {
|
let data_dir = data_dir.unwrap_or_else(|| {
|
||||||
dirs::data_dir().expect("Could not determine data directory").join(app_id)
|
dirs::data_dir().expect("Could not determine data directory").join(app_id)
|
||||||
});
|
});
|
||||||
|
|
||||||
let db_path = data_dir.join("db.sqlite");
|
let needs_plugins = matches!(
|
||||||
let blob_path = data_dir.join("blobs.sqlite");
|
&command,
|
||||||
|
Commands::Send(_)
|
||||||
let (query_manager, _blob_manager, _rx) =
|
| Commands::Request(cli::RequestArgs {
|
||||||
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize database");
|
command: RequestCommands::Send { .. } | RequestCommands::Schema { .. },
|
||||||
|
})
|
||||||
let db = query_manager.connect();
|
|
||||||
|
|
||||||
// Initialize plugin manager for template functions
|
|
||||||
let vendored_plugin_dir = data_dir.join("vendored-plugins");
|
|
||||||
let installed_plugin_dir = data_dir.join("installed-plugins");
|
|
||||||
|
|
||||||
// Use system node for CLI (must be in PATH)
|
|
||||||
let node_bin_path = PathBuf::from("node");
|
|
||||||
|
|
||||||
// Find the plugin runtime - check YAAK_PLUGIN_RUNTIME env var, then fallback to development path
|
|
||||||
let plugin_runtime_main =
|
|
||||||
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
|
|
||||||
// Development fallback: look relative to crate root
|
|
||||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
|
||||||
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create plugin manager (plugins may not be available in CLI context)
|
|
||||||
let plugin_manager = Arc::new(
|
|
||||||
PluginManager::new(
|
|
||||||
vendored_plugin_dir.clone(),
|
|
||||||
installed_plugin_dir.clone(),
|
|
||||||
node_bin_path.clone(),
|
|
||||||
plugin_runtime_main,
|
|
||||||
false,
|
|
||||||
)
|
|
||||||
.await,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Initialize plugins from database
|
let context = CliContext::initialize(data_dir, app_id, needs_plugins).await;
|
||||||
let plugins = db.list_plugins().unwrap_or_default();
|
|
||||||
if !plugins.is_empty() {
|
let exit_code = match command {
|
||||||
let errors =
|
Commands::Send(args) => {
|
||||||
plugin_manager.initialize_all_plugins(plugins, &PluginContext::new_empty()).await;
|
commands::send::run(&context, args, environment.as_deref(), verbose).await
|
||||||
for (plugin_dir, error_msg) in errors {
|
|
||||||
eprintln!("Warning: Failed to initialize plugin '{}': {}", plugin_dir, error_msg);
|
|
||||||
}
|
}
|
||||||
|
Commands::Workspace(args) => commands::workspace::run(&context, args),
|
||||||
|
Commands::Request(args) => {
|
||||||
|
commands::request::run(&context, args, environment.as_deref(), verbose).await
|
||||||
|
}
|
||||||
|
Commands::Folder(args) => commands::folder::run(&context, args),
|
||||||
|
Commands::Environment(args) => commands::environment::run(&context, args),
|
||||||
|
};
|
||||||
|
|
||||||
|
context.shutdown().await;
|
||||||
|
|
||||||
|
if exit_code != 0 {
|
||||||
|
std::process::exit(exit_code);
|
||||||
}
|
}
|
||||||
|
|
||||||
match cli.command {
|
|
||||||
Commands::Workspaces => {
|
|
||||||
let workspaces = db.list_workspaces().expect("Failed to list workspaces");
|
|
||||||
if workspaces.is_empty() {
|
|
||||||
println!("No workspaces found");
|
|
||||||
} else {
|
|
||||||
for ws in workspaces {
|
|
||||||
println!("{} - {}", ws.id, ws.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Commands::Requests { workspace_id } => {
|
|
||||||
let requests = db.list_http_requests(&workspace_id).expect("Failed to list requests");
|
|
||||||
if requests.is_empty() {
|
|
||||||
println!("No requests found in workspace {}", workspace_id);
|
|
||||||
} else {
|
|
||||||
for req in requests {
|
|
||||||
println!("{} - {} {}", req.id, req.method, req.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Commands::Send { request_id } => {
|
|
||||||
use yaak_actions::{
|
|
||||||
ActionExecutor, ActionId, ActionParams, ActionResult, ActionTarget, CurrentContext,
|
|
||||||
};
|
|
||||||
use yaak_actions_builtin::{BuiltinActionDependencies, register_http_actions};
|
|
||||||
|
|
||||||
// Create dependencies
|
|
||||||
let deps = BuiltinActionDependencies::new_standalone(
|
|
||||||
&db_path,
|
|
||||||
&blob_path,
|
|
||||||
&app_id,
|
|
||||||
vendored_plugin_dir.clone(),
|
|
||||||
installed_plugin_dir.clone(),
|
|
||||||
node_bin_path.clone(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.expect("Failed to initialize dependencies");
|
|
||||||
|
|
||||||
// Create executor and register actions
|
|
||||||
let executor = ActionExecutor::new();
|
|
||||||
executor.register_builtin_groups().await.expect("Failed to register groups");
|
|
||||||
register_http_actions(&executor, &deps).await.expect("Failed to register HTTP actions");
|
|
||||||
|
|
||||||
// Prepare context
|
|
||||||
let context = CurrentContext {
|
|
||||||
target: Some(ActionTarget::HttpRequest { id: request_id.clone() }),
|
|
||||||
environment_id: cli.environment.clone(),
|
|
||||||
workspace_id: None,
|
|
||||||
has_window: false,
|
|
||||||
can_prompt: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Prepare params
|
|
||||||
let params = ActionParams {
|
|
||||||
data: serde_json::json!({
|
|
||||||
"render": true,
|
|
||||||
"follow_redirects": false,
|
|
||||||
"timeout_ms": 30000,
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Invoke action
|
|
||||||
let action_id = ActionId::builtin("http", "send-request");
|
|
||||||
let result = executor.invoke(&action_id, context, params).await.expect("Action failed");
|
|
||||||
|
|
||||||
// Handle result
|
|
||||||
match result {
|
|
||||||
ActionResult::Success { data, message } => {
|
|
||||||
if let Some(msg) = message {
|
|
||||||
println!("{}", msg);
|
|
||||||
}
|
|
||||||
if let Some(data) = data {
|
|
||||||
println!("{}", serde_json::to_string_pretty(&data).unwrap());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ActionResult::RequiresInput { .. } => {
|
|
||||||
eprintln!("Action requires input (not supported in CLI)");
|
|
||||||
}
|
|
||||||
ActionResult::Cancelled => {
|
|
||||||
eprintln!("Action cancelled");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Commands::Get { url } => {
|
|
||||||
if cli.verbose {
|
|
||||||
println!("> GET {}", url);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build a simple GET request
|
|
||||||
let sendable = SendableHttpRequest {
|
|
||||||
url: url.clone(),
|
|
||||||
method: "GET".to_string(),
|
|
||||||
headers: vec![],
|
|
||||||
body: None,
|
|
||||||
options: SendableHttpRequestOptions::default(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create event channel for progress
|
|
||||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
|
||||||
|
|
||||||
// Spawn task to print events if verbose
|
|
||||||
let verbose = cli.verbose;
|
|
||||||
let verbose_handle = if verbose {
|
|
||||||
Some(tokio::spawn(async move {
|
|
||||||
while let Some(event) = event_rx.recv().await {
|
|
||||||
println!("{}", event);
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// Send the request
|
|
||||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
|
||||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
|
||||||
|
|
||||||
if let Some(handle) = verbose_handle {
|
|
||||||
let _ = handle.await;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print response
|
|
||||||
if verbose {
|
|
||||||
println!();
|
|
||||||
}
|
|
||||||
println!(
|
|
||||||
"HTTP {} {}",
|
|
||||||
response.status,
|
|
||||||
response.status_reason.as_deref().unwrap_or("")
|
|
||||||
);
|
|
||||||
|
|
||||||
if verbose {
|
|
||||||
for (name, value) in &response.headers {
|
|
||||||
println!("{}: {}", name, value);
|
|
||||||
}
|
|
||||||
println!();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print body
|
|
||||||
let (body, _stats) = response.text().await.expect("Failed to read response body");
|
|
||||||
println!("{}", body);
|
|
||||||
}
|
|
||||||
Commands::Create { workspace_id, name, method, url } => {
|
|
||||||
let request = HttpRequest {
|
|
||||||
workspace_id,
|
|
||||||
name,
|
|
||||||
method: method.to_uppercase(),
|
|
||||||
url,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let created = db
|
|
||||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
|
||||||
.expect("Failed to create request");
|
|
||||||
|
|
||||||
println!("Created request: {}", created.id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Terminate plugin manager gracefully
|
|
||||||
plugin_manager.terminate().await;
|
|
||||||
}
|
}
|
||||||
|
|||||||
212
crates-cli/yaak-cli/src/plugin_events.rs
Normal file
212
crates-cli/yaak-cli/src/plugin_events.rs
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::task::JoinHandle;
|
||||||
|
use yaak::plugin_events::{
|
||||||
|
GroupedPluginEvent, HostRequest, SharedPluginEventContext, handle_shared_plugin_event,
|
||||||
|
};
|
||||||
|
use yaak_models::query_manager::QueryManager;
|
||||||
|
use yaak_plugins::events::{
|
||||||
|
EmptyPayload, ErrorResponse, InternalEvent, InternalEventPayload, ListOpenWorkspacesResponse,
|
||||||
|
WorkspaceInfo,
|
||||||
|
};
|
||||||
|
use yaak_plugins::manager::PluginManager;
|
||||||
|
|
||||||
|
pub struct CliPluginEventBridge {
|
||||||
|
rx_id: String,
|
||||||
|
task: JoinHandle<()>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CliPluginEventBridge {
|
||||||
|
pub async fn start(plugin_manager: Arc<PluginManager>, query_manager: QueryManager) -> Self {
|
||||||
|
let (rx_id, mut rx) = plugin_manager.subscribe("cli").await;
|
||||||
|
let rx_id_for_task = rx_id.clone();
|
||||||
|
let pm = plugin_manager.clone();
|
||||||
|
|
||||||
|
let task = tokio::spawn(async move {
|
||||||
|
while let Some(event) = rx.recv().await {
|
||||||
|
// Events with reply IDs are replies to app-originated requests.
|
||||||
|
if event.reply_id.is_some() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(plugin_handle) = pm.get_plugin_by_ref_id(&event.plugin_ref_id).await
|
||||||
|
else {
|
||||||
|
eprintln!(
|
||||||
|
"Warning: Ignoring plugin event with unknown plugin ref '{}'",
|
||||||
|
event.plugin_ref_id
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
let plugin_name = plugin_handle.info().name;
|
||||||
|
let Some(reply_payload) = build_plugin_reply(&query_manager, &event, &plugin_name)
|
||||||
|
else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(err) = pm.reply(&event, &reply_payload).await {
|
||||||
|
eprintln!("Warning: Failed replying to plugin event: {err}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pm.unsubscribe(&rx_id_for_task).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
Self { rx_id, task }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn shutdown(self, plugin_manager: &PluginManager) {
|
||||||
|
plugin_manager.unsubscribe(&self.rx_id).await;
|
||||||
|
self.task.abort();
|
||||||
|
let _ = self.task.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_plugin_reply(
|
||||||
|
query_manager: &QueryManager,
|
||||||
|
event: &InternalEvent,
|
||||||
|
plugin_name: &str,
|
||||||
|
) -> Option<InternalEventPayload> {
|
||||||
|
match handle_shared_plugin_event(
|
||||||
|
query_manager,
|
||||||
|
&event.payload,
|
||||||
|
SharedPluginEventContext {
|
||||||
|
plugin_name,
|
||||||
|
workspace_id: event.context.workspace_id.as_deref(),
|
||||||
|
},
|
||||||
|
) {
|
||||||
|
GroupedPluginEvent::Handled(payload) => payload,
|
||||||
|
GroupedPluginEvent::ToHandle(host_request) => match host_request {
|
||||||
|
HostRequest::ErrorResponse(resp) => {
|
||||||
|
eprintln!("[plugin:{}] error: {}", plugin_name, resp.error);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
HostRequest::ReloadResponse(_) => None,
|
||||||
|
HostRequest::ShowToast(req) => {
|
||||||
|
eprintln!("[plugin:{}] {}", plugin_name, req.message);
|
||||||
|
Some(InternalEventPayload::ShowToastResponse(EmptyPayload {}))
|
||||||
|
}
|
||||||
|
HostRequest::ListOpenWorkspaces(_) => {
|
||||||
|
let workspaces = match query_manager.connect().list_workspaces() {
|
||||||
|
Ok(workspaces) => workspaces
|
||||||
|
.into_iter()
|
||||||
|
.map(|w| WorkspaceInfo { id: w.id.clone(), name: w.name, label: w.id })
|
||||||
|
.collect(),
|
||||||
|
Err(err) => {
|
||||||
|
return Some(InternalEventPayload::ErrorResponse(ErrorResponse {
|
||||||
|
error: format!("Failed to list workspaces in CLI: {err}"),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Some(InternalEventPayload::ListOpenWorkspacesResponse(ListOpenWorkspacesResponse {
|
||||||
|
workspaces,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
req => Some(InternalEventPayload::ErrorResponse(ErrorResponse {
|
||||||
|
error: format!("Unsupported plugin request in CLI: {}", req.type_name()),
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
use yaak_plugins::events::{GetKeyValueRequest, PluginContext, WindowInfoRequest};
|
||||||
|
|
||||||
|
fn query_manager_for_test() -> (QueryManager, TempDir) {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let db_path = temp_dir.path().join("db.sqlite");
|
||||||
|
let blob_path = temp_dir.path().join("blobs.sqlite");
|
||||||
|
let (query_manager, _blob_manager, _rx) =
|
||||||
|
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize DB");
|
||||||
|
(query_manager, temp_dir)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn event(payload: InternalEventPayload) -> InternalEvent {
|
||||||
|
InternalEvent {
|
||||||
|
id: "evt_1".to_string(),
|
||||||
|
plugin_ref_id: "plugin_ref_1".to_string(),
|
||||||
|
plugin_name: "@yaak/test-plugin".to_string(),
|
||||||
|
reply_id: None,
|
||||||
|
context: PluginContext::new_empty(),
|
||||||
|
payload,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn key_value_requests_round_trip() {
|
||||||
|
let (query_manager, _temp_dir) = query_manager_for_test();
|
||||||
|
let plugin_name = "@yaak/test-plugin";
|
||||||
|
|
||||||
|
let get_missing = build_plugin_reply(
|
||||||
|
&query_manager,
|
||||||
|
&event(InternalEventPayload::GetKeyValueRequest(GetKeyValueRequest {
|
||||||
|
key: "missing".to_string(),
|
||||||
|
})),
|
||||||
|
plugin_name,
|
||||||
|
);
|
||||||
|
match get_missing {
|
||||||
|
Some(InternalEventPayload::GetKeyValueResponse(r)) => assert_eq!(r.value, None),
|
||||||
|
other => panic!("unexpected payload for missing get: {other:?}"),
|
||||||
|
}
|
||||||
|
|
||||||
|
let set = build_plugin_reply(
|
||||||
|
&query_manager,
|
||||||
|
&event(InternalEventPayload::SetKeyValueRequest(
|
||||||
|
yaak_plugins::events::SetKeyValueRequest {
|
||||||
|
key: "token".to_string(),
|
||||||
|
value: "{\"access_token\":\"abc\"}".to_string(),
|
||||||
|
},
|
||||||
|
)),
|
||||||
|
plugin_name,
|
||||||
|
);
|
||||||
|
assert!(matches!(set, Some(InternalEventPayload::SetKeyValueResponse(_))));
|
||||||
|
|
||||||
|
let get_present = build_plugin_reply(
|
||||||
|
&query_manager,
|
||||||
|
&event(InternalEventPayload::GetKeyValueRequest(GetKeyValueRequest {
|
||||||
|
key: "token".to_string(),
|
||||||
|
})),
|
||||||
|
plugin_name,
|
||||||
|
);
|
||||||
|
match get_present {
|
||||||
|
Some(InternalEventPayload::GetKeyValueResponse(r)) => {
|
||||||
|
assert_eq!(r.value, Some("{\"access_token\":\"abc\"}".to_string()))
|
||||||
|
}
|
||||||
|
other => panic!("unexpected payload for present get: {other:?}"),
|
||||||
|
}
|
||||||
|
|
||||||
|
let delete = build_plugin_reply(
|
||||||
|
&query_manager,
|
||||||
|
&event(InternalEventPayload::DeleteKeyValueRequest(
|
||||||
|
yaak_plugins::events::DeleteKeyValueRequest { key: "token".to_string() },
|
||||||
|
)),
|
||||||
|
plugin_name,
|
||||||
|
);
|
||||||
|
match delete {
|
||||||
|
Some(InternalEventPayload::DeleteKeyValueResponse(r)) => assert!(r.deleted),
|
||||||
|
other => panic!("unexpected payload for delete: {other:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unsupported_request_gets_error_reply() {
|
||||||
|
let (query_manager, _temp_dir) = query_manager_for_test();
|
||||||
|
let payload = build_plugin_reply(
|
||||||
|
&query_manager,
|
||||||
|
&event(InternalEventPayload::WindowInfoRequest(WindowInfoRequest {
|
||||||
|
label: "main".to_string(),
|
||||||
|
})),
|
||||||
|
"@yaak/test-plugin",
|
||||||
|
);
|
||||||
|
|
||||||
|
match payload {
|
||||||
|
Some(InternalEventPayload::ErrorResponse(err)) => {
|
||||||
|
assert!(err.error.contains("Unsupported plugin request in CLI"));
|
||||||
|
assert!(err.error.contains("window_info_request"));
|
||||||
|
}
|
||||||
|
other => panic!("unexpected payload for unsupported request: {other:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
16
crates-cli/yaak-cli/src/utils/confirm.rs
Normal file
16
crates-cli/yaak-cli/src/utils/confirm.rs
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
use std::io::{self, IsTerminal, Write};
|
||||||
|
|
||||||
|
pub fn confirm_delete(resource_name: &str, resource_id: &str) -> bool {
|
||||||
|
if !io::stdin().is_terminal() {
|
||||||
|
eprintln!("Refusing to delete in non-interactive mode without --yes");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
print!("Delete {resource_name} {resource_id}? [y/N]: ");
|
||||||
|
io::stdout().flush().expect("Failed to flush stdout");
|
||||||
|
|
||||||
|
let mut input = String::new();
|
||||||
|
io::stdin().read_line(&mut input).expect("Failed to read confirmation");
|
||||||
|
|
||||||
|
matches!(input.trim().to_lowercase().as_str(), "y" | "yes")
|
||||||
|
}
|
||||||
107
crates-cli/yaak-cli/src/utils/json.rs
Normal file
107
crates-cli/yaak-cli/src/utils/json.rs
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
use serde::Serialize;
|
||||||
|
use serde::de::DeserializeOwned;
|
||||||
|
use serde_json::{Map, Value};
|
||||||
|
|
||||||
|
type JsonResult<T> = std::result::Result<T, String>;
|
||||||
|
|
||||||
|
pub fn is_json_shorthand(input: &str) -> bool {
|
||||||
|
input.trim_start().starts_with('{')
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_json_object(raw: &str, context: &str) -> JsonResult<Value> {
|
||||||
|
let value: Value = serde_json::from_str(raw)
|
||||||
|
.map_err(|error| format!("Invalid JSON for {context}: {error}"))?;
|
||||||
|
|
||||||
|
if !value.is_object() {
|
||||||
|
return Err(format!("JSON payload for {context} must be an object"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_optional_json(
|
||||||
|
json_flag: Option<String>,
|
||||||
|
json_shorthand: Option<String>,
|
||||||
|
context: &str,
|
||||||
|
) -> JsonResult<Option<Value>> {
|
||||||
|
match (json_flag, json_shorthand) {
|
||||||
|
(Some(_), Some(_)) => {
|
||||||
|
Err(format!("Cannot provide both --json and positional JSON for {context}"))
|
||||||
|
}
|
||||||
|
(Some(raw), None) => parse_json_object(&raw, context).map(Some),
|
||||||
|
(None, Some(raw)) => parse_json_object(&raw, context).map(Some),
|
||||||
|
(None, None) => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_required_json(
|
||||||
|
json_flag: Option<String>,
|
||||||
|
json_shorthand: Option<String>,
|
||||||
|
context: &str,
|
||||||
|
) -> JsonResult<Value> {
|
||||||
|
parse_optional_json(json_flag, json_shorthand, context)?
|
||||||
|
.ok_or_else(|| format!("Missing JSON payload for {context}. Use --json or positional JSON"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn require_id(payload: &Value, context: &str) -> JsonResult<String> {
|
||||||
|
payload
|
||||||
|
.get("id")
|
||||||
|
.and_then(|value| value.as_str())
|
||||||
|
.filter(|value| !value.is_empty())
|
||||||
|
.map(|value| value.to_string())
|
||||||
|
.ok_or_else(|| format!("{context} requires a non-empty \"id\" field"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn validate_create_id(payload: &Value, context: &str) -> JsonResult<()> {
|
||||||
|
let Some(id_value) = payload.get("id") else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
match id_value {
|
||||||
|
Value::String(id) if id.is_empty() => Ok(()),
|
||||||
|
_ => Err(format!("{context} create JSON must omit \"id\" or set it to an empty string")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn apply_merge_patch<T>(existing: &T, patch: &Value, id: &str, context: &str) -> JsonResult<T>
|
||||||
|
where
|
||||||
|
T: Serialize + DeserializeOwned,
|
||||||
|
{
|
||||||
|
let mut base = serde_json::to_value(existing)
|
||||||
|
.map_err(|error| format!("Failed to serialize existing model for {context}: {error}"))?;
|
||||||
|
merge_patch(&mut base, patch);
|
||||||
|
|
||||||
|
let Some(base_object) = base.as_object_mut() else {
|
||||||
|
return Err(format!("Merged payload for {context} must be an object"));
|
||||||
|
};
|
||||||
|
base_object.insert("id".to_string(), Value::String(id.to_string()));
|
||||||
|
|
||||||
|
serde_json::from_value(base)
|
||||||
|
.map_err(|error| format!("Failed to deserialize merged payload for {context}: {error}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn merge_patch(target: &mut Value, patch: &Value) {
|
||||||
|
match patch {
|
||||||
|
Value::Object(patch_map) => {
|
||||||
|
if !target.is_object() {
|
||||||
|
*target = Value::Object(Map::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
let target_map =
|
||||||
|
target.as_object_mut().expect("merge_patch target expected to be object");
|
||||||
|
|
||||||
|
for (key, patch_value) in patch_map {
|
||||||
|
if patch_value.is_null() {
|
||||||
|
target_map.remove(key);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let target_entry = target_map.entry(key.clone()).or_insert(Value::Null);
|
||||||
|
merge_patch(target_entry, patch_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
*target = patch.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
2
crates-cli/yaak-cli/src/utils/mod.rs
Normal file
2
crates-cli/yaak-cli/src/utils/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
pub mod confirm;
|
||||||
|
pub mod json;
|
||||||
42
crates-cli/yaak-cli/tests/common/http_server.rs
Normal file
42
crates-cli/yaak-cli/tests/common/http_server.rs
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
use std::io::{Read, Write};
|
||||||
|
use std::net::TcpListener;
|
||||||
|
use std::thread;
|
||||||
|
|
||||||
|
pub struct TestHttpServer {
|
||||||
|
pub url: String,
|
||||||
|
handle: Option<thread::JoinHandle<()>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TestHttpServer {
|
||||||
|
pub fn spawn_ok(body: &'static str) -> Self {
|
||||||
|
let listener = TcpListener::bind("127.0.0.1:0").expect("Failed to bind test HTTP server");
|
||||||
|
let addr = listener.local_addr().expect("Failed to get local addr");
|
||||||
|
let url = format!("http://{addr}/test");
|
||||||
|
let body_bytes = body.as_bytes().to_vec();
|
||||||
|
|
||||||
|
let handle = thread::spawn(move || {
|
||||||
|
if let Ok((mut stream, _)) = listener.accept() {
|
||||||
|
let mut request_buf = [0u8; 4096];
|
||||||
|
let _ = stream.read(&mut request_buf);
|
||||||
|
|
||||||
|
let response = format!(
|
||||||
|
"HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\nContent-Length: {}\r\nConnection: close\r\n\r\n",
|
||||||
|
body_bytes.len()
|
||||||
|
);
|
||||||
|
let _ = stream.write_all(response.as_bytes());
|
||||||
|
let _ = stream.write_all(&body_bytes);
|
||||||
|
let _ = stream.flush();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Self { url, handle: Some(handle) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for TestHttpServer {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if let Some(handle) = self.handle.take() {
|
||||||
|
let _ = handle.join();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
106
crates-cli/yaak-cli/tests/common/mod.rs
Normal file
106
crates-cli/yaak-cli/tests/common/mod.rs
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
pub mod http_server;
|
||||||
|
|
||||||
|
use assert_cmd::Command;
|
||||||
|
use assert_cmd::cargo::cargo_bin_cmd;
|
||||||
|
use std::path::Path;
|
||||||
|
use yaak_models::models::{Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace};
|
||||||
|
use yaak_models::query_manager::QueryManager;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
|
||||||
|
pub fn cli_cmd(data_dir: &Path) -> Command {
|
||||||
|
let mut cmd = cargo_bin_cmd!("yaakcli");
|
||||||
|
cmd.arg("--data-dir").arg(data_dir);
|
||||||
|
cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_created_id(stdout: &[u8], label: &str) -> String {
|
||||||
|
String::from_utf8_lossy(stdout)
|
||||||
|
.trim()
|
||||||
|
.split_once(": ")
|
||||||
|
.map(|(_, id)| id.to_string())
|
||||||
|
.unwrap_or_else(|| panic!("Expected id in '{label}' output"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn query_manager(data_dir: &Path) -> QueryManager {
|
||||||
|
let db_path = data_dir.join("db.sqlite");
|
||||||
|
let blob_path = data_dir.join("blobs.sqlite");
|
||||||
|
let (query_manager, _blob_manager, _rx) =
|
||||||
|
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize DB");
|
||||||
|
query_manager
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn seed_workspace(data_dir: &Path, workspace_id: &str) {
|
||||||
|
let workspace = Workspace {
|
||||||
|
id: workspace_id.to_string(),
|
||||||
|
name: "Seed Workspace".to_string(),
|
||||||
|
description: "Seeded for integration tests".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
query_manager(data_dir)
|
||||||
|
.connect()
|
||||||
|
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||||
|
.expect("Failed to seed workspace");
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn seed_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
|
||||||
|
let request = HttpRequest {
|
||||||
|
id: request_id.to_string(),
|
||||||
|
workspace_id: workspace_id.to_string(),
|
||||||
|
name: "Seeded Request".to_string(),
|
||||||
|
method: "GET".to_string(),
|
||||||
|
url: "https://example.com".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
query_manager(data_dir)
|
||||||
|
.connect()
|
||||||
|
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||||
|
.expect("Failed to seed request");
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn seed_folder(data_dir: &Path, workspace_id: &str, folder_id: &str) {
|
||||||
|
let folder = Folder {
|
||||||
|
id: folder_id.to_string(),
|
||||||
|
workspace_id: workspace_id.to_string(),
|
||||||
|
name: "Seed Folder".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
query_manager(data_dir)
|
||||||
|
.connect()
|
||||||
|
.upsert_folder(&folder, &UpdateSource::Sync)
|
||||||
|
.expect("Failed to seed folder");
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn seed_grpc_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
|
||||||
|
let request = GrpcRequest {
|
||||||
|
id: request_id.to_string(),
|
||||||
|
workspace_id: workspace_id.to_string(),
|
||||||
|
name: "Seeded gRPC Request".to_string(),
|
||||||
|
url: "https://example.com".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
query_manager(data_dir)
|
||||||
|
.connect()
|
||||||
|
.upsert_grpc_request(&request, &UpdateSource::Sync)
|
||||||
|
.expect("Failed to seed gRPC request");
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn seed_websocket_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
|
||||||
|
let request = WebsocketRequest {
|
||||||
|
id: request_id.to_string(),
|
||||||
|
workspace_id: workspace_id.to_string(),
|
||||||
|
name: "Seeded WebSocket Request".to_string(),
|
||||||
|
url: "wss://example.com/socket".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
query_manager(data_dir)
|
||||||
|
.connect()
|
||||||
|
.upsert_websocket_request(&request, &UpdateSource::Sync)
|
||||||
|
.expect("Failed to seed WebSocket request");
|
||||||
|
}
|
||||||
80
crates-cli/yaak-cli/tests/environment_commands.rs
Normal file
80
crates-cli/yaak-cli/tests/environment_commands.rs
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
mod common;
|
||||||
|
|
||||||
|
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
|
||||||
|
use predicates::str::contains;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_list_show_delete_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["environment", "list", "wk_test"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("Global Variables"));
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args(["environment", "create", "wk_test", "--name", "Production"])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["environment", "list", "wk_test"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(&environment_id))
|
||||||
|
.stdout(contains("Production"));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["environment", "show", &environment_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("\"id\": \"{environment_id}\"")))
|
||||||
|
.stdout(contains("\"parentModel\": \"environment\""));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["environment", "delete", &environment_id, "--yes"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Deleted environment: {environment_id}")));
|
||||||
|
|
||||||
|
assert!(query_manager(data_dir).connect().get_environment(&environment_id).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_create_and_update_merge_patch_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"environment",
|
||||||
|
"create",
|
||||||
|
r#"{"workspaceId":"wk_test","name":"Json Environment"}"#,
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"environment",
|
||||||
|
"update",
|
||||||
|
&format!(r##"{{"id":"{}","color":"#00ff00"}}"##, environment_id),
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Updated environment: {environment_id}")));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["environment", "show", &environment_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("\"name\": \"Json Environment\""))
|
||||||
|
.stdout(contains("\"color\": \"#00ff00\""));
|
||||||
|
}
|
||||||
74
crates-cli/yaak-cli/tests/folder_commands.rs
Normal file
74
crates-cli/yaak-cli/tests/folder_commands.rs
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
mod common;
|
||||||
|
|
||||||
|
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
|
||||||
|
use predicates::str::contains;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_list_show_delete_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args(["folder", "create", "wk_test", "--name", "Auth"])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["folder", "list", "wk_test"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(&folder_id))
|
||||||
|
.stdout(contains("Auth"));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["folder", "show", &folder_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("\"id\": \"{folder_id}\"")))
|
||||||
|
.stdout(contains("\"workspaceId\": \"wk_test\""));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["folder", "delete", &folder_id, "--yes"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Deleted folder: {folder_id}")));
|
||||||
|
|
||||||
|
assert!(query_manager(data_dir).connect().get_folder(&folder_id).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_create_and_update_merge_patch_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"folder",
|
||||||
|
"create",
|
||||||
|
r#"{"workspaceId":"wk_test","name":"Json Folder"}"#,
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"folder",
|
||||||
|
"update",
|
||||||
|
&format!(r#"{{"id":"{}","description":"Folder Description"}}"#, folder_id),
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Updated folder: {folder_id}")));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["folder", "show", &folder_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("\"name\": \"Json Folder\""))
|
||||||
|
.stdout(contains("\"description\": \"Folder Description\""));
|
||||||
|
}
|
||||||
224
crates-cli/yaak-cli/tests/request_commands.rs
Normal file
224
crates-cli/yaak-cli/tests/request_commands.rs
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
mod common;
|
||||||
|
|
||||||
|
use common::http_server::TestHttpServer;
|
||||||
|
use common::{
|
||||||
|
cli_cmd, parse_created_id, query_manager, seed_grpc_request, seed_request,
|
||||||
|
seed_websocket_request, seed_workspace,
|
||||||
|
};
|
||||||
|
use predicates::str::contains;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
use yaak_models::models::HttpResponseState;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn show_and_delete_yes_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"request",
|
||||||
|
"create",
|
||||||
|
"wk_test",
|
||||||
|
"--name",
|
||||||
|
"Smoke Test",
|
||||||
|
"--url",
|
||||||
|
"https://example.com",
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
|
||||||
|
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "show", &request_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("\"id\": \"{request_id}\"")))
|
||||||
|
.stdout(contains("\"workspaceId\": \"wk_test\""));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "delete", &request_id, "--yes"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Deleted request: {request_id}")));
|
||||||
|
|
||||||
|
assert!(query_manager(data_dir).connect().get_http_request(&request_id).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn delete_without_yes_fails_in_non_interactive_mode() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
seed_request(data_dir, "wk_test", "rq_seed_delete_noninteractive");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "delete", "rq_seed_delete_noninteractive"])
|
||||||
|
.assert()
|
||||||
|
.failure()
|
||||||
|
.code(1)
|
||||||
|
.stderr(contains("Refusing to delete in non-interactive mode without --yes"));
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
query_manager(data_dir).connect().get_http_request("rq_seed_delete_noninteractive").is_ok()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_create_and_update_merge_patch_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"request",
|
||||||
|
"create",
|
||||||
|
r#"{"workspaceId":"wk_test","name":"Json Request","url":"https://example.com"}"#,
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"request",
|
||||||
|
"update",
|
||||||
|
&format!(r#"{{"id":"{}","name":"Renamed Request"}}"#, request_id),
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Updated request: {request_id}")));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "show", &request_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("\"name\": \"Renamed Request\""))
|
||||||
|
.stdout(contains("\"url\": \"https://example.com\""));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn update_requires_id_in_json_payload() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "update", r#"{"name":"No ID"}"#])
|
||||||
|
.assert()
|
||||||
|
.failure()
|
||||||
|
.stderr(contains("request update requires a non-empty \"id\" field"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_allows_workspace_only_with_empty_defaults() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir).args(["request", "create", "wk_test"]).assert().success();
|
||||||
|
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||||
|
|
||||||
|
let request = query_manager(data_dir)
|
||||||
|
.connect()
|
||||||
|
.get_http_request(&request_id)
|
||||||
|
.expect("Failed to load created request");
|
||||||
|
assert_eq!(request.workspace_id, "wk_test");
|
||||||
|
assert_eq!(request.method, "GET");
|
||||||
|
assert_eq!(request.name, "");
|
||||||
|
assert_eq!(request.url, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn request_send_persists_response_body_and_events() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let server = TestHttpServer::spawn_ok("hello from integration test");
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"request",
|
||||||
|
"create",
|
||||||
|
"wk_test",
|
||||||
|
"--name",
|
||||||
|
"Send Test",
|
||||||
|
"--url",
|
||||||
|
&server.url,
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "send", &request_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("HTTP 200 OK"))
|
||||||
|
.stdout(contains("hello from integration test"));
|
||||||
|
|
||||||
|
let qm = query_manager(data_dir);
|
||||||
|
let db = qm.connect();
|
||||||
|
let responses =
|
||||||
|
db.list_http_responses_for_request(&request_id, None).expect("Failed to load responses");
|
||||||
|
assert_eq!(responses.len(), 1, "expected exactly one persisted response");
|
||||||
|
|
||||||
|
let response = &responses[0];
|
||||||
|
assert_eq!(response.status, 200);
|
||||||
|
assert!(matches!(response.state, HttpResponseState::Closed));
|
||||||
|
assert!(response.error.is_none());
|
||||||
|
|
||||||
|
let body_path =
|
||||||
|
response.body_path.as_ref().expect("expected persisted response body path").to_string();
|
||||||
|
let body = std::fs::read_to_string(&body_path).expect("Failed to read response body file");
|
||||||
|
assert_eq!(body, "hello from integration test");
|
||||||
|
|
||||||
|
let events =
|
||||||
|
db.list_http_response_events(&response.id).expect("Failed to load response events");
|
||||||
|
assert!(!events.is_empty(), "expected at least one persisted response event");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn request_schema_http_outputs_json_schema() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "schema", "http"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("\"type\": \"object\""))
|
||||||
|
.stdout(contains("\"authentication\""));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn request_send_grpc_returns_explicit_nyi_error() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
seed_grpc_request(data_dir, "wk_test", "gr_seed_nyi");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "send", "gr_seed_nyi"])
|
||||||
|
.assert()
|
||||||
|
.failure()
|
||||||
|
.code(1)
|
||||||
|
.stderr(contains("gRPC request send is not implemented yet in yaak-cli"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn request_send_websocket_returns_explicit_nyi_error() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
seed_websocket_request(data_dir, "wk_test", "wr_seed_nyi");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["request", "send", "wr_seed_nyi"])
|
||||||
|
.assert()
|
||||||
|
.failure()
|
||||||
|
.code(1)
|
||||||
|
.stderr(contains("WebSocket request send is not implemented yet in yaak-cli"));
|
||||||
|
}
|
||||||
81
crates-cli/yaak-cli/tests/send_commands.rs
Normal file
81
crates-cli/yaak-cli/tests/send_commands.rs
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
mod common;
|
||||||
|
|
||||||
|
use common::http_server::TestHttpServer;
|
||||||
|
use common::{cli_cmd, query_manager, seed_folder, seed_workspace};
|
||||||
|
use predicates::str::contains;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
use yaak_models::models::HttpRequest;
|
||||||
|
use yaak_models::util::UpdateSource;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn top_level_send_workspace_sends_http_requests_and_prints_summary() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
|
||||||
|
let server = TestHttpServer::spawn_ok("workspace bulk send");
|
||||||
|
let request = HttpRequest {
|
||||||
|
id: "rq_workspace_send".to_string(),
|
||||||
|
workspace_id: "wk_test".to_string(),
|
||||||
|
name: "Workspace Send".to_string(),
|
||||||
|
method: "GET".to_string(),
|
||||||
|
url: server.url.clone(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
query_manager(data_dir)
|
||||||
|
.connect()
|
||||||
|
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||||
|
.expect("Failed to seed workspace request");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["send", "wk_test"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("HTTP 200 OK"))
|
||||||
|
.stdout(contains("workspace bulk send"))
|
||||||
|
.stdout(contains("Send summary: 1 succeeded, 0 failed"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn top_level_send_folder_sends_http_requests_and_prints_summary() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
seed_workspace(data_dir, "wk_test");
|
||||||
|
seed_folder(data_dir, "wk_test", "fl_test");
|
||||||
|
|
||||||
|
let server = TestHttpServer::spawn_ok("folder bulk send");
|
||||||
|
let request = HttpRequest {
|
||||||
|
id: "rq_folder_send".to_string(),
|
||||||
|
workspace_id: "wk_test".to_string(),
|
||||||
|
folder_id: Some("fl_test".to_string()),
|
||||||
|
name: "Folder Send".to_string(),
|
||||||
|
method: "GET".to_string(),
|
||||||
|
url: server.url.clone(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
query_manager(data_dir)
|
||||||
|
.connect()
|
||||||
|
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||||
|
.expect("Failed to seed folder request");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["send", "fl_test"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("HTTP 200 OK"))
|
||||||
|
.stdout(contains("folder bulk send"))
|
||||||
|
.stdout(contains("Send summary: 1 succeeded, 0 failed"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn top_level_send_unknown_id_fails_with_clear_error() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["send", "does_not_exist"])
|
||||||
|
.assert()
|
||||||
|
.failure()
|
||||||
|
.code(1)
|
||||||
|
.stderr(contains("Could not resolve ID 'does_not_exist' as request, folder, or workspace"));
|
||||||
|
}
|
||||||
59
crates-cli/yaak-cli/tests/workspace_commands.rs
Normal file
59
crates-cli/yaak-cli/tests/workspace_commands.rs
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
mod common;
|
||||||
|
|
||||||
|
use common::{cli_cmd, parse_created_id, query_manager};
|
||||||
|
use predicates::str::contains;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_show_delete_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
|
||||||
|
let create_assert =
|
||||||
|
cli_cmd(data_dir).args(["workspace", "create", "--name", "WS One"]).assert().success();
|
||||||
|
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["workspace", "show", &workspace_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("\"id\": \"{workspace_id}\"")))
|
||||||
|
.stdout(contains("\"name\": \"WS One\""));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["workspace", "delete", &workspace_id, "--yes"])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Deleted workspace: {workspace_id}")));
|
||||||
|
|
||||||
|
assert!(query_manager(data_dir).connect().get_workspace(&workspace_id).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_create_and_update_merge_patch_round_trip() {
|
||||||
|
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||||
|
let data_dir = temp_dir.path();
|
||||||
|
|
||||||
|
let create_assert = cli_cmd(data_dir)
|
||||||
|
.args(["workspace", "create", r#"{"name":"Json Workspace"}"#])
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args([
|
||||||
|
"workspace",
|
||||||
|
"update",
|
||||||
|
&format!(r#"{{"id":"{}","description":"Updated via JSON"}}"#, workspace_id),
|
||||||
|
])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains(format!("Updated workspace: {workspace_id}")));
|
||||||
|
|
||||||
|
cli_cmd(data_dir)
|
||||||
|
.args(["workspace", "show", &workspace_id])
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(contains("\"name\": \"Json Workspace\""))
|
||||||
|
.stdout(contains("\"description\": \"Updated via JSON\""));
|
||||||
|
}
|
||||||
@@ -57,9 +57,11 @@ url = "2"
|
|||||||
tokio-util = { version = "0.7", features = ["codec"] }
|
tokio-util = { version = "0.7", features = ["codec"] }
|
||||||
ts-rs = { workspace = true }
|
ts-rs = { workspace = true }
|
||||||
uuid = "1.12.1"
|
uuid = "1.12.1"
|
||||||
|
yaak-api = { workspace = true }
|
||||||
yaak-common = { workspace = true }
|
yaak-common = { workspace = true }
|
||||||
yaak-tauri-utils = { workspace = true }
|
yaak-tauri-utils = { workspace = true }
|
||||||
yaak-core = { workspace = true }
|
yaak-core = { workspace = true }
|
||||||
|
yaak = { workspace = true }
|
||||||
yaak-crypto = { workspace = true }
|
yaak-crypto = { workspace = true }
|
||||||
yaak-fonts = { workspace = true }
|
yaak-fonts = { workspace = true }
|
||||||
yaak-git = { workspace = true }
|
yaak-git = { workspace = true }
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ use crate::PluginContextExt;
|
|||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tauri::{AppHandle, Manager, Runtime, State, WebviewWindow, command};
|
use tauri::{AppHandle, Manager, Runtime, State, WebviewWindow, command};
|
||||||
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
|
||||||
use yaak_crypto::manager::EncryptionManager;
|
use yaak_crypto::manager::EncryptionManager;
|
||||||
use yaak_models::models::HttpRequestHeader;
|
use yaak_models::models::HttpRequestHeader;
|
||||||
use yaak_models::queries::workspaces::default_headers;
|
use yaak_models::queries::workspaces::default_headers;
|
||||||
@@ -23,20 +22,6 @@ impl<'a, R: Runtime, M: Manager<R>> EncryptionManagerExt<'a, R> for M {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[command]
|
|
||||||
pub(crate) async fn cmd_show_workspace_key<R: Runtime>(
|
|
||||||
window: WebviewWindow<R>,
|
|
||||||
workspace_id: &str,
|
|
||||||
) -> Result<()> {
|
|
||||||
let key = window.crypto().reveal_workspace_key(workspace_id)?;
|
|
||||||
window
|
|
||||||
.dialog()
|
|
||||||
.message(format!("Your workspace key is \n\n{}", key))
|
|
||||||
.kind(MessageDialogKind::Info)
|
|
||||||
.show(|_v| {});
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[command]
|
#[command]
|
||||||
pub(crate) async fn cmd_decrypt_template<R: Runtime>(
|
pub(crate) async fn cmd_decrypt_template<R: Runtime>(
|
||||||
window: WebviewWindow<R>,
|
window: WebviewWindow<R>,
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ pub enum Error {
|
|||||||
PluginError(#[from] yaak_plugins::error::Error),
|
PluginError(#[from] yaak_plugins::error::Error),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
TauriUtilsError(#[from] yaak_tauri_utils::error::Error),
|
ApiError(#[from] yaak_api::Error),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
ClipboardError(#[from] tauri_plugin_clipboard_manager::Error),
|
ClipboardError(#[from] tauri_plugin_clipboard_manager::Error),
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ use yaak_git::{
|
|||||||
BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult,
|
BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult,
|
||||||
PushResult, git_add, git_add_credential, git_add_remote, git_checkout_branch, git_clone,
|
PushResult, git_add, git_add_credential, git_add_remote, git_checkout_branch, git_clone,
|
||||||
git_commit, git_create_branch, git_delete_branch, git_delete_remote_branch, git_fetch_all,
|
git_commit, git_create_branch, git_delete_branch, git_delete_remote_branch, git_fetch_all,
|
||||||
git_init, git_log, git_merge_branch, git_pull, git_push, git_remotes, git_rename_branch,
|
git_init, git_log, git_merge_branch, git_pull, git_pull_force_reset, git_pull_merge, git_push,
|
||||||
git_rm_remote, git_status, git_unstage,
|
git_remotes, git_rename_branch, git_reset_changes, git_rm_remote, git_status, git_unstage,
|
||||||
};
|
};
|
||||||
|
|
||||||
// NOTE: All of these commands are async to prevent blocking work from locking up the UI
|
// NOTE: All of these commands are async to prevent blocking work from locking up the UI
|
||||||
@@ -89,6 +89,20 @@ pub async fn cmd_git_pull(dir: &Path) -> Result<PullResult> {
|
|||||||
Ok(git_pull(dir).await?)
|
Ok(git_pull(dir).await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[command]
|
||||||
|
pub async fn cmd_git_pull_force_reset(
|
||||||
|
dir: &Path,
|
||||||
|
remote: &str,
|
||||||
|
branch: &str,
|
||||||
|
) -> Result<PullResult> {
|
||||||
|
Ok(git_pull_force_reset(dir, remote, branch).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[command]
|
||||||
|
pub async fn cmd_git_pull_merge(dir: &Path, remote: &str, branch: &str) -> Result<PullResult> {
|
||||||
|
Ok(git_pull_merge(dir, remote, branch).await?)
|
||||||
|
}
|
||||||
|
|
||||||
#[command]
|
#[command]
|
||||||
pub async fn cmd_git_add(dir: &Path, rela_paths: Vec<PathBuf>) -> Result<()> {
|
pub async fn cmd_git_add(dir: &Path, rela_paths: Vec<PathBuf>) -> Result<()> {
|
||||||
for path in rela_paths {
|
for path in rela_paths {
|
||||||
@@ -105,6 +119,11 @@ pub async fn cmd_git_unstage(dir: &Path, rela_paths: Vec<PathBuf>) -> Result<()>
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[command]
|
||||||
|
pub async fn cmd_git_reset_changes(dir: &Path) -> Result<()> {
|
||||||
|
Ok(git_reset_changes(dir).await?)
|
||||||
|
}
|
||||||
|
|
||||||
#[command]
|
#[command]
|
||||||
pub async fn cmd_git_add_credential(
|
pub async fn cmd_git_add_credential(
|
||||||
remote_url: &str,
|
remote_url: &str,
|
||||||
|
|||||||
@@ -3,45 +3,18 @@ use crate::error::Error::GenericError;
|
|||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::models_ext::BlobManagerExt;
|
use crate::models_ext::BlobManagerExt;
|
||||||
use crate::models_ext::QueryManagerExt;
|
use crate::models_ext::QueryManagerExt;
|
||||||
use crate::render::render_http_request;
|
use log::warn;
|
||||||
use log::{debug, warn};
|
|
||||||
use std::pin::Pin;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::atomic::{AtomicI32, Ordering};
|
use std::time::Instant;
|
||||||
use std::time::{Duration, Instant};
|
|
||||||
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
|
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
|
||||||
use tokio::fs::{File, create_dir_all};
|
|
||||||
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWriteExt};
|
|
||||||
use tokio::sync::watch::Receiver;
|
use tokio::sync::watch::Receiver;
|
||||||
use tokio_util::bytes::Bytes;
|
use yaak::send::{SendHttpRequestWithPluginsParams, send_http_request_with_plugins};
|
||||||
use yaak_crypto::manager::EncryptionManager;
|
use yaak_crypto::manager::EncryptionManager;
|
||||||
use yaak_http::client::{
|
use yaak_http::manager::HttpConnectionManager;
|
||||||
HttpConnectionOptions, HttpConnectionProxySetting, HttpConnectionProxySettingAuth,
|
use yaak_models::models::{CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseState};
|
||||||
};
|
|
||||||
use yaak_http::cookies::CookieStore;
|
|
||||||
use yaak_http::manager::{CachedClient, HttpConnectionManager};
|
|
||||||
use yaak_http::sender::ReqwestSender;
|
|
||||||
use yaak_http::tee_reader::TeeReader;
|
|
||||||
use yaak_http::transaction::HttpTransaction;
|
|
||||||
use yaak_http::types::{
|
|
||||||
SendableBody, SendableHttpRequest, SendableHttpRequestOptions, append_query_params,
|
|
||||||
};
|
|
||||||
use yaak_models::blob_manager::BodyChunk;
|
|
||||||
use yaak_models::models::{
|
|
||||||
CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseHeader,
|
|
||||||
HttpResponseState, ProxySetting, ProxySettingAuth,
|
|
||||||
};
|
|
||||||
use yaak_models::util::UpdateSource;
|
use yaak_models::util::UpdateSource;
|
||||||
use yaak_plugins::events::{
|
use yaak_plugins::events::PluginContext;
|
||||||
CallHttpAuthenticationRequest, HttpHeader, PluginContext, RenderPurpose,
|
|
||||||
};
|
|
||||||
use yaak_plugins::manager::PluginManager;
|
use yaak_plugins::manager::PluginManager;
|
||||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
|
||||||
use yaak_templates::RenderOptions;
|
|
||||||
use yaak_tls::find_client_certificate;
|
|
||||||
|
|
||||||
/// Chunk size for storing request bodies (1MB)
|
|
||||||
const REQUEST_BODY_CHUNK_SIZE: usize = 1024 * 1024;
|
|
||||||
|
|
||||||
/// Context for managing response state during HTTP transactions.
|
/// Context for managing response state during HTTP transactions.
|
||||||
/// Handles both persisted responses (stored in DB) and ephemeral responses (in-memory only).
|
/// Handles both persisted responses (stored in DB) and ephemeral responses (in-memory only).
|
||||||
@@ -168,135 +141,30 @@ async fn send_http_request_inner<R: Runtime>(
|
|||||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||||
let connection_manager = app_handle.state::<HttpConnectionManager>();
|
let connection_manager = app_handle.state::<HttpConnectionManager>();
|
||||||
let settings = window.db().get_settings();
|
|
||||||
let workspace_id = &unrendered_request.workspace_id;
|
|
||||||
let folder_id = unrendered_request.folder_id.as_deref();
|
|
||||||
let environment_id = environment.map(|e| e.id);
|
let environment_id = environment.map(|e| e.id);
|
||||||
let workspace = window.db().get_workspace(workspace_id)?;
|
let cookie_jar_id = cookie_jar.as_ref().map(|jar| jar.id.clone());
|
||||||
let (resolved, auth_context_id) = resolve_http_request(window, unrendered_request)?;
|
|
||||||
let cb = PluginTemplateCallback::new(
|
|
||||||
plugin_manager.clone(),
|
|
||||||
encryption_manager.clone(),
|
|
||||||
&plugin_context,
|
|
||||||
RenderPurpose::Send,
|
|
||||||
);
|
|
||||||
let env_chain =
|
|
||||||
window.db().resolve_environments(&workspace.id, folder_id, environment_id.as_deref())?;
|
|
||||||
let request = render_http_request(&resolved, env_chain, &cb, &RenderOptions::throw()).await?;
|
|
||||||
|
|
||||||
// Build the sendable request using the new SendableHttpRequest type
|
let response_dir = app_handle.path().app_data_dir()?.join("responses");
|
||||||
let options = SendableHttpRequestOptions {
|
let result = send_http_request_with_plugins(SendHttpRequestWithPluginsParams {
|
||||||
follow_redirects: workspace.setting_follow_redirects,
|
query_manager: app_handle.db_manager().inner(),
|
||||||
timeout: if workspace.setting_request_timeout > 0 {
|
blob_manager: app_handle.blob_manager().inner(),
|
||||||
Some(Duration::from_millis(workspace.setting_request_timeout.unsigned_abs() as u64))
|
request: unrendered_request.clone(),
|
||||||
} else {
|
environment_id: environment_id.as_deref(),
|
||||||
None
|
update_source: response_ctx.update_source.clone(),
|
||||||
},
|
cookie_jar_id,
|
||||||
};
|
response_dir: &response_dir,
|
||||||
let mut sendable_request = SendableHttpRequest::from_http_request(&request, options).await?;
|
emit_events_to: None,
|
||||||
|
existing_response: Some(response_ctx.response().clone()),
|
||||||
debug!("Sending request to {} {}", sendable_request.method, sendable_request.url);
|
plugin_manager,
|
||||||
|
encryption_manager,
|
||||||
let proxy_setting = match settings.proxy {
|
|
||||||
None => HttpConnectionProxySetting::System,
|
|
||||||
Some(ProxySetting::Disabled) => HttpConnectionProxySetting::Disabled,
|
|
||||||
Some(ProxySetting::Enabled { http, https, auth, bypass, disabled }) => {
|
|
||||||
if disabled {
|
|
||||||
HttpConnectionProxySetting::System
|
|
||||||
} else {
|
|
||||||
HttpConnectionProxySetting::Enabled {
|
|
||||||
http,
|
|
||||||
https,
|
|
||||||
bypass,
|
|
||||||
auth: match auth {
|
|
||||||
None => None,
|
|
||||||
Some(ProxySettingAuth { user, password }) => {
|
|
||||||
Some(HttpConnectionProxySettingAuth { user, password })
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let client_certificate =
|
|
||||||
find_client_certificate(&sendable_request.url, &settings.client_certificates);
|
|
||||||
|
|
||||||
// Create cookie store if a cookie jar is specified
|
|
||||||
let maybe_cookie_store = match cookie_jar.clone() {
|
|
||||||
Some(CookieJar { id, .. }) => {
|
|
||||||
// NOTE: We need to refetch the cookie jar because a chained request might have
|
|
||||||
// updated cookies when we rendered the request.
|
|
||||||
let cj = window.db().get_cookie_jar(&id)?;
|
|
||||||
let cookie_store = CookieStore::from_cookies(cj.cookies.clone());
|
|
||||||
Some((cookie_store, cj))
|
|
||||||
}
|
|
||||||
None => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let cached_client = connection_manager
|
|
||||||
.get_client(&HttpConnectionOptions {
|
|
||||||
id: plugin_context.id.clone(),
|
|
||||||
validate_certificates: workspace.setting_validate_certificates,
|
|
||||||
proxy: proxy_setting,
|
|
||||||
client_certificate,
|
|
||||||
dns_overrides: workspace.setting_dns_overrides.clone(),
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Apply authentication to the request
|
|
||||||
apply_authentication(
|
|
||||||
&window,
|
|
||||||
&mut sendable_request,
|
|
||||||
&request,
|
|
||||||
auth_context_id,
|
|
||||||
&plugin_manager,
|
|
||||||
plugin_context,
|
plugin_context,
|
||||||
)
|
cancelled_rx: Some(cancelled_rx.clone()),
|
||||||
.await?;
|
connection_manager: Some(connection_manager.inner()),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| GenericError(e.to_string()))?;
|
||||||
|
|
||||||
let cookie_store = maybe_cookie_store.as_ref().map(|(cs, _)| cs.clone());
|
Ok(result.response)
|
||||||
let result = execute_transaction(
|
|
||||||
cached_client,
|
|
||||||
sendable_request,
|
|
||||||
response_ctx,
|
|
||||||
cancelled_rx.clone(),
|
|
||||||
cookie_store,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Wait for blob writing to complete and check for errors
|
|
||||||
let final_result = match result {
|
|
||||||
Ok((response, maybe_blob_write_handle)) => {
|
|
||||||
// Check if blob writing failed
|
|
||||||
if let Some(handle) = maybe_blob_write_handle {
|
|
||||||
if let Ok(Err(e)) = handle.await {
|
|
||||||
// Update response with the storage error
|
|
||||||
let _ = response_ctx.update(|r| {
|
|
||||||
let error_msg =
|
|
||||||
format!("Request succeeded but failed to store request body: {}", e);
|
|
||||||
r.error = Some(match &r.error {
|
|
||||||
Some(existing) => format!("{}; {}", existing, error_msg),
|
|
||||||
None => error_msg,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(response)
|
|
||||||
}
|
|
||||||
Err(e) => Err(e),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Persist cookies back to the database after the request completes
|
|
||||||
if let Some((cookie_store, mut cj)) = maybe_cookie_store {
|
|
||||||
let cookies = cookie_store.get_all_cookies();
|
|
||||||
cj.cookies = cookies;
|
|
||||||
if let Err(e) = window.db().upsert_cookie_jar(&cj, &UpdateSource::Background) {
|
|
||||||
warn!("Failed to persist cookies to database: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
final_result
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_http_request<R: Runtime>(
|
pub fn resolve_http_request<R: Runtime>(
|
||||||
@@ -315,395 +183,3 @@ pub fn resolve_http_request<R: Runtime>(
|
|||||||
|
|
||||||
Ok((new_request, authentication_context_id))
|
Ok((new_request, authentication_context_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn execute_transaction<R: Runtime>(
|
|
||||||
cached_client: CachedClient,
|
|
||||||
mut sendable_request: SendableHttpRequest,
|
|
||||||
response_ctx: &mut ResponseContext<R>,
|
|
||||||
mut cancelled_rx: Receiver<bool>,
|
|
||||||
cookie_store: Option<CookieStore>,
|
|
||||||
) -> Result<(HttpResponse, Option<tauri::async_runtime::JoinHandle<Result<()>>>)> {
|
|
||||||
let app_handle = &response_ctx.app_handle.clone();
|
|
||||||
let response_id = response_ctx.response().id.clone();
|
|
||||||
let workspace_id = response_ctx.response().workspace_id.clone();
|
|
||||||
let is_persisted = response_ctx.is_persisted();
|
|
||||||
|
|
||||||
// Keep a reference to the resolver for DNS timing events
|
|
||||||
let resolver = cached_client.resolver.clone();
|
|
||||||
|
|
||||||
let sender = ReqwestSender::with_client(cached_client.client);
|
|
||||||
let transaction = match cookie_store {
|
|
||||||
Some(cs) => HttpTransaction::with_cookie_store(sender, cs),
|
|
||||||
None => HttpTransaction::new(sender),
|
|
||||||
};
|
|
||||||
let start = Instant::now();
|
|
||||||
|
|
||||||
// Capture request headers before sending
|
|
||||||
let request_headers: Vec<HttpResponseHeader> = sendable_request
|
|
||||||
.headers
|
|
||||||
.iter()
|
|
||||||
.map(|(name, value)| HttpResponseHeader { name: name.clone(), value: value.clone() })
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// Update response with headers info
|
|
||||||
response_ctx.update(|r| {
|
|
||||||
r.url = sendable_request.url.clone();
|
|
||||||
r.request_headers = request_headers;
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Create bounded channel for receiving events and spawn a task to store them in DB
|
|
||||||
// Buffer size of 100 events provides back pressure if DB writes are slow
|
|
||||||
let (event_tx, mut event_rx) =
|
|
||||||
tokio::sync::mpsc::channel::<yaak_http::sender::HttpResponseEvent>(100);
|
|
||||||
|
|
||||||
// Set the event sender on the DNS resolver so it can emit DNS timing events
|
|
||||||
resolver.set_event_sender(Some(event_tx.clone())).await;
|
|
||||||
|
|
||||||
// Shared state to capture DNS timing from the event processing task
|
|
||||||
let dns_elapsed = Arc::new(AtomicI32::new(0));
|
|
||||||
|
|
||||||
// Write events to DB in a task (only for persisted responses)
|
|
||||||
if is_persisted {
|
|
||||||
let response_id = response_id.clone();
|
|
||||||
let app_handle = app_handle.clone();
|
|
||||||
let update_source = response_ctx.update_source.clone();
|
|
||||||
let workspace_id = workspace_id.clone();
|
|
||||||
let dns_elapsed = dns_elapsed.clone();
|
|
||||||
tokio::spawn(async move {
|
|
||||||
while let Some(event) = event_rx.recv().await {
|
|
||||||
// Capture DNS timing when we see a DNS event
|
|
||||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
|
||||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
|
||||||
}
|
|
||||||
let db_event = HttpResponseEvent::new(&response_id, &workspace_id, event.into());
|
|
||||||
let _ = app_handle.db().upsert_http_response_event(&db_event, &update_source);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
// For ephemeral responses, just drain the events but still capture DNS timing
|
|
||||||
let dns_elapsed = dns_elapsed.clone();
|
|
||||||
tokio::spawn(async move {
|
|
||||||
while let Some(event) = event_rx.recv().await {
|
|
||||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
|
||||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
// Capture request body as it's sent (only for persisted responses)
|
|
||||||
let body_id = format!("{}.request", response_id);
|
|
||||||
let maybe_blob_write_handle = match sendable_request.body {
|
|
||||||
Some(SendableBody::Bytes(bytes)) => {
|
|
||||||
if is_persisted {
|
|
||||||
write_bytes_to_db_sync(response_ctx, &body_id, bytes.clone())?;
|
|
||||||
}
|
|
||||||
sendable_request.body = Some(SendableBody::Bytes(bytes));
|
|
||||||
None
|
|
||||||
}
|
|
||||||
Some(SendableBody::Stream(stream)) => {
|
|
||||||
// Wrap stream with TeeReader to capture data as it's read
|
|
||||||
// Use unbounded channel to ensure all data is captured without blocking the HTTP request
|
|
||||||
let (body_chunk_tx, body_chunk_rx) = tokio::sync::mpsc::unbounded_channel::<Vec<u8>>();
|
|
||||||
let tee_reader = TeeReader::new(stream, body_chunk_tx);
|
|
||||||
let pinned: Pin<Box<dyn AsyncRead + Send + 'static>> = Box::pin(tee_reader);
|
|
||||||
|
|
||||||
let handle = if is_persisted {
|
|
||||||
// Spawn task to write request body chunks to blob DB
|
|
||||||
let app_handle = app_handle.clone();
|
|
||||||
let response_id = response_id.clone();
|
|
||||||
let workspace_id = workspace_id.clone();
|
|
||||||
let body_id = body_id.clone();
|
|
||||||
let update_source = response_ctx.update_source.clone();
|
|
||||||
Some(tauri::async_runtime::spawn(async move {
|
|
||||||
write_stream_chunks_to_db(
|
|
||||||
app_handle,
|
|
||||||
&body_id,
|
|
||||||
&workspace_id,
|
|
||||||
&response_id,
|
|
||||||
&update_source,
|
|
||||||
body_chunk_rx,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
// For ephemeral responses, just drain the body chunks
|
|
||||||
tauri::async_runtime::spawn(async move {
|
|
||||||
let mut rx = body_chunk_rx;
|
|
||||||
while rx.recv().await.is_some() {}
|
|
||||||
});
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
sendable_request.body = Some(SendableBody::Stream(pinned));
|
|
||||||
handle
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
sendable_request.body = None;
|
|
||||||
None
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Execute the transaction with cancellation support
|
|
||||||
// This returns the response with headers, but body is not yet consumed
|
|
||||||
// Events (headers, settings, chunks) are sent through the channel
|
|
||||||
let mut http_response = transaction
|
|
||||||
.execute_with_cancellation(sendable_request, cancelled_rx.clone(), event_tx)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Prepare the response path before consuming the body
|
|
||||||
let body_path = if response_id.is_empty() {
|
|
||||||
// Ephemeral responses: use OS temp directory for automatic cleanup
|
|
||||||
let temp_dir = std::env::temp_dir().join("yaak-ephemeral-responses");
|
|
||||||
create_dir_all(&temp_dir).await?;
|
|
||||||
temp_dir.join(uuid::Uuid::new_v4().to_string())
|
|
||||||
} else {
|
|
||||||
// Persisted responses: use app data directory
|
|
||||||
let dir = app_handle.path().app_data_dir()?;
|
|
||||||
let base_dir = dir.join("responses");
|
|
||||||
create_dir_all(&base_dir).await?;
|
|
||||||
base_dir.join(&response_id)
|
|
||||||
};
|
|
||||||
|
|
||||||
// Extract metadata before consuming the body (headers are available immediately)
|
|
||||||
// Url might change, so update again
|
|
||||||
response_ctx.update(|r| {
|
|
||||||
r.body_path = Some(body_path.to_string_lossy().to_string());
|
|
||||||
r.elapsed_headers = start.elapsed().as_millis() as i32;
|
|
||||||
r.status = http_response.status as i32;
|
|
||||||
r.status_reason = http_response.status_reason.clone();
|
|
||||||
r.url = http_response.url.clone();
|
|
||||||
r.remote_addr = http_response.remote_addr.clone();
|
|
||||||
r.version = http_response.version.clone();
|
|
||||||
r.headers = http_response
|
|
||||||
.headers
|
|
||||||
.iter()
|
|
||||||
.map(|(name, value)| HttpResponseHeader { name: name.clone(), value: value.clone() })
|
|
||||||
.collect();
|
|
||||||
r.content_length = http_response.content_length.map(|l| l as i32);
|
|
||||||
r.state = HttpResponseState::Connected;
|
|
||||||
r.request_headers = http_response
|
|
||||||
.request_headers
|
|
||||||
.iter()
|
|
||||||
.map(|(n, v)| HttpResponseHeader { name: n.clone(), value: v.clone() })
|
|
||||||
.collect();
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Get the body stream for manual consumption
|
|
||||||
let mut body_stream = http_response.into_body_stream()?;
|
|
||||||
|
|
||||||
// Open file for writing
|
|
||||||
let mut file = File::options()
|
|
||||||
.create(true)
|
|
||||||
.truncate(true)
|
|
||||||
.write(true)
|
|
||||||
.open(&body_path)
|
|
||||||
.await
|
|
||||||
.map_err(|e| GenericError(format!("Failed to open file: {}", e)))?;
|
|
||||||
|
|
||||||
// Stream body to file, with throttled DB updates to avoid excessive writes
|
|
||||||
let mut written_bytes: usize = 0;
|
|
||||||
let mut last_update_time = start;
|
|
||||||
let mut buf = [0u8; 8192];
|
|
||||||
|
|
||||||
// Throttle settings: update DB at most every 100ms
|
|
||||||
const UPDATE_INTERVAL_MS: u128 = 100;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
// Check for cancellation. If we already have headers/body, just close cleanly without error
|
|
||||||
if *cancelled_rx.borrow() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use select! to race between reading and cancellation, so cancellation is immediate
|
|
||||||
let read_result = tokio::select! {
|
|
||||||
biased;
|
|
||||||
_ = cancelled_rx.changed() => {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
result = body_stream.read(&mut buf) => result,
|
|
||||||
};
|
|
||||||
|
|
||||||
match read_result {
|
|
||||||
Ok(0) => break, // EOF
|
|
||||||
Ok(n) => {
|
|
||||||
file.write_all(&buf[..n])
|
|
||||||
.await
|
|
||||||
.map_err(|e| GenericError(format!("Failed to write to file: {}", e)))?;
|
|
||||||
file.flush()
|
|
||||||
.await
|
|
||||||
.map_err(|e| GenericError(format!("Failed to flush file: {}", e)))?;
|
|
||||||
written_bytes += n;
|
|
||||||
|
|
||||||
// Throttle DB updates: only update if enough time has passed
|
|
||||||
let now = Instant::now();
|
|
||||||
let elapsed_since_update = now.duration_since(last_update_time).as_millis();
|
|
||||||
|
|
||||||
if elapsed_since_update >= UPDATE_INTERVAL_MS {
|
|
||||||
response_ctx.update(|r| {
|
|
||||||
r.elapsed = start.elapsed().as_millis() as i32;
|
|
||||||
r.content_length = Some(written_bytes as i32);
|
|
||||||
})?;
|
|
||||||
last_update_time = now;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
return Err(GenericError(format!("Failed to read response body: {}", e)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Final update with closed state and accurate byte count
|
|
||||||
response_ctx.update(|r| {
|
|
||||||
r.elapsed = start.elapsed().as_millis() as i32;
|
|
||||||
r.elapsed_dns = dns_elapsed.load(Ordering::SeqCst);
|
|
||||||
r.content_length = Some(written_bytes as i32);
|
|
||||||
r.state = HttpResponseState::Closed;
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Clear the event sender from the resolver since this request is done
|
|
||||||
resolver.set_event_sender(None).await;
|
|
||||||
|
|
||||||
Ok((response_ctx.response().clone(), maybe_blob_write_handle))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_bytes_to_db_sync<R: Runtime>(
|
|
||||||
response_ctx: &mut ResponseContext<R>,
|
|
||||||
body_id: &str,
|
|
||||||
data: Bytes,
|
|
||||||
) -> Result<()> {
|
|
||||||
if data.is_empty() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write in chunks if data is large
|
|
||||||
let mut offset = 0;
|
|
||||||
let mut chunk_index = 0;
|
|
||||||
while offset < data.len() {
|
|
||||||
let end = std::cmp::min(offset + REQUEST_BODY_CHUNK_SIZE, data.len());
|
|
||||||
let chunk_data = data.slice(offset..end).to_vec();
|
|
||||||
let chunk = BodyChunk::new(body_id, chunk_index, chunk_data);
|
|
||||||
response_ctx.app_handle.blobs().insert_chunk(&chunk)?;
|
|
||||||
offset = end;
|
|
||||||
chunk_index += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the response with the total request body size
|
|
||||||
response_ctx.update(|r| {
|
|
||||||
r.request_content_length = Some(data.len() as i32);
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn write_stream_chunks_to_db<R: Runtime>(
|
|
||||||
app_handle: AppHandle<R>,
|
|
||||||
body_id: &str,
|
|
||||||
workspace_id: &str,
|
|
||||||
response_id: &str,
|
|
||||||
update_source: &UpdateSource,
|
|
||||||
mut rx: tokio::sync::mpsc::UnboundedReceiver<Vec<u8>>,
|
|
||||||
) -> Result<()> {
|
|
||||||
let mut buffer = Vec::with_capacity(REQUEST_BODY_CHUNK_SIZE);
|
|
||||||
let mut chunk_index = 0;
|
|
||||||
let mut total_bytes: usize = 0;
|
|
||||||
|
|
||||||
while let Some(data) = rx.recv().await {
|
|
||||||
total_bytes += data.len();
|
|
||||||
buffer.extend_from_slice(&data);
|
|
||||||
|
|
||||||
// Flush when buffer reaches chunk size
|
|
||||||
while buffer.len() >= REQUEST_BODY_CHUNK_SIZE {
|
|
||||||
debug!("Writing chunk {chunk_index} to DB");
|
|
||||||
let chunk_data: Vec<u8> = buffer.drain(..REQUEST_BODY_CHUNK_SIZE).collect();
|
|
||||||
let chunk = BodyChunk::new(body_id, chunk_index, chunk_data);
|
|
||||||
app_handle.blobs().insert_chunk(&chunk)?;
|
|
||||||
app_handle.db().upsert_http_response_event(
|
|
||||||
&HttpResponseEvent::new(
|
|
||||||
response_id,
|
|
||||||
workspace_id,
|
|
||||||
yaak_http::sender::HttpResponseEvent::ChunkSent {
|
|
||||||
bytes: REQUEST_BODY_CHUNK_SIZE,
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
update_source,
|
|
||||||
)?;
|
|
||||||
chunk_index += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Flush remaining data
|
|
||||||
if !buffer.is_empty() {
|
|
||||||
let chunk = BodyChunk::new(body_id, chunk_index, buffer);
|
|
||||||
debug!("Flushing remaining data {chunk_index} {}", chunk.data.len());
|
|
||||||
app_handle.blobs().insert_chunk(&chunk)?;
|
|
||||||
app_handle.db().upsert_http_response_event(
|
|
||||||
&HttpResponseEvent::new(
|
|
||||||
response_id,
|
|
||||||
workspace_id,
|
|
||||||
yaak_http::sender::HttpResponseEvent::ChunkSent { bytes: chunk.data.len() }.into(),
|
|
||||||
),
|
|
||||||
update_source,
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the response with the total request body size
|
|
||||||
app_handle.with_tx(|tx| {
|
|
||||||
debug!("Updating final body length {total_bytes}");
|
|
||||||
if let Ok(mut response) = tx.get_http_response(&response_id) {
|
|
||||||
response.request_content_length = Some(total_bytes as i32);
|
|
||||||
tx.update_http_response_if_id(&response, update_source)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn apply_authentication<R: Runtime>(
|
|
||||||
_window: &WebviewWindow<R>,
|
|
||||||
sendable_request: &mut SendableHttpRequest,
|
|
||||||
request: &HttpRequest,
|
|
||||||
auth_context_id: String,
|
|
||||||
plugin_manager: &PluginManager,
|
|
||||||
plugin_context: &PluginContext,
|
|
||||||
) -> Result<()> {
|
|
||||||
match &request.authentication_type {
|
|
||||||
None => {
|
|
||||||
// No authentication found. Not even inherited
|
|
||||||
}
|
|
||||||
Some(authentication_type) if authentication_type == "none" => {
|
|
||||||
// Explicitly no authentication
|
|
||||||
}
|
|
||||||
Some(authentication_type) => {
|
|
||||||
let req = CallHttpAuthenticationRequest {
|
|
||||||
context_id: format!("{:x}", md5::compute(auth_context_id)),
|
|
||||||
values: serde_json::from_value(serde_json::to_value(&request.authentication)?)?,
|
|
||||||
url: sendable_request.url.clone(),
|
|
||||||
method: sendable_request.method.clone(),
|
|
||||||
headers: sendable_request
|
|
||||||
.headers
|
|
||||||
.iter()
|
|
||||||
.map(|(name, value)| HttpHeader {
|
|
||||||
name: name.to_string(),
|
|
||||||
value: value.to_string(),
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
};
|
|
||||||
let plugin_result = plugin_manager
|
|
||||||
.call_http_authentication(plugin_context, &authentication_type, req)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
for header in plugin_result.set_headers.unwrap_or_default() {
|
|
||||||
sendable_request.insert_header((header.name, header.value));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(params) = plugin_result.set_query_parameters {
|
|
||||||
let params = params.into_iter().map(|p| (p.name, p.value)).collect::<Vec<_>>();
|
|
||||||
sendable_request.url = append_query_params(&sendable_request.url, params);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -37,8 +37,8 @@ use yaak_grpc::{Code, ServiceDefinition, serialize_message};
|
|||||||
use yaak_mac_window::AppHandleMacWindowExt;
|
use yaak_mac_window::AppHandleMacWindowExt;
|
||||||
use yaak_models::models::{
|
use yaak_models::models::{
|
||||||
AnyModel, CookieJar, Environment, GrpcConnection, GrpcConnectionState, GrpcEvent,
|
AnyModel, CookieJar, Environment, GrpcConnection, GrpcConnectionState, GrpcEvent,
|
||||||
GrpcEventType, GrpcRequest, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseState,
|
GrpcEventType, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseState, Plugin,
|
||||||
Plugin, Workspace, WorkspaceMeta,
|
Workspace, WorkspaceMeta,
|
||||||
};
|
};
|
||||||
use yaak_models::util::{BatchUpsertResult, UpdateSource, get_workspace_export_resources};
|
use yaak_models::util::{BatchUpsertResult, UpdateSource, get_workspace_export_resources};
|
||||||
use yaak_plugins::events::{
|
use yaak_plugins::events::{
|
||||||
@@ -1096,7 +1096,8 @@ async fn cmd_get_http_authentication_config<R: Runtime>(
|
|||||||
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
||||||
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
||||||
let rendered_json =
|
let rendered_json =
|
||||||
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
|
render_json_value(values_json, environment_chain, &cb, &RenderOptions::return_empty())
|
||||||
|
.await?;
|
||||||
|
|
||||||
// Convert back to HashMap<String, JsonPrimitive>
|
// Convert back to HashMap<String, JsonPrimitive>
|
||||||
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
||||||
@@ -1271,35 +1272,6 @@ async fn cmd_save_response<R: Runtime>(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
|
||||||
async fn cmd_send_folder<R: Runtime>(
|
|
||||||
app_handle: AppHandle<R>,
|
|
||||||
window: WebviewWindow<R>,
|
|
||||||
environment_id: Option<String>,
|
|
||||||
cookie_jar_id: Option<String>,
|
|
||||||
folder_id: &str,
|
|
||||||
) -> YaakResult<()> {
|
|
||||||
let requests = app_handle.db().list_http_requests_for_folder_recursive(folder_id)?;
|
|
||||||
for request in requests {
|
|
||||||
let app_handle = app_handle.clone();
|
|
||||||
let window = window.clone();
|
|
||||||
let environment_id = environment_id.clone();
|
|
||||||
let cookie_jar_id = cookie_jar_id.clone();
|
|
||||||
tokio::spawn(async move {
|
|
||||||
let _ = cmd_send_http_request(
|
|
||||||
app_handle,
|
|
||||||
window,
|
|
||||||
environment_id.as_deref(),
|
|
||||||
cookie_jar_id.as_deref(),
|
|
||||||
request,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
async fn cmd_send_http_request<R: Runtime>(
|
async fn cmd_send_http_request<R: Runtime>(
|
||||||
app_handle: AppHandle<R>,
|
app_handle: AppHandle<R>,
|
||||||
@@ -1396,27 +1368,6 @@ async fn cmd_install_plugin<R: Runtime>(
|
|||||||
Ok(plugin)
|
Ok(plugin)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
|
||||||
async fn cmd_create_grpc_request<R: Runtime>(
|
|
||||||
workspace_id: &str,
|
|
||||||
name: &str,
|
|
||||||
sort_priority: f64,
|
|
||||||
folder_id: Option<&str>,
|
|
||||||
app_handle: AppHandle<R>,
|
|
||||||
window: WebviewWindow<R>,
|
|
||||||
) -> YaakResult<GrpcRequest> {
|
|
||||||
Ok(app_handle.db().upsert_grpc_request(
|
|
||||||
&GrpcRequest {
|
|
||||||
workspace_id: workspace_id.to_string(),
|
|
||||||
name: name.to_string(),
|
|
||||||
folder_id: folder_id.map(|s| s.to_string()),
|
|
||||||
sort_priority,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
&UpdateSource::from_window_label(window.label()),
|
|
||||||
)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
async fn cmd_reload_plugins<R: Runtime>(
|
async fn cmd_reload_plugins<R: Runtime>(
|
||||||
app_handle: AppHandle<R>,
|
app_handle: AppHandle<R>,
|
||||||
@@ -1679,7 +1630,6 @@ pub fn run() {
|
|||||||
cmd_call_folder_action,
|
cmd_call_folder_action,
|
||||||
cmd_call_grpc_request_action,
|
cmd_call_grpc_request_action,
|
||||||
cmd_check_for_updates,
|
cmd_check_for_updates,
|
||||||
cmd_create_grpc_request,
|
|
||||||
cmd_curl_to_request,
|
cmd_curl_to_request,
|
||||||
cmd_delete_all_grpc_connections,
|
cmd_delete_all_grpc_connections,
|
||||||
cmd_delete_all_http_responses,
|
cmd_delete_all_http_responses,
|
||||||
@@ -1713,7 +1663,6 @@ pub fn run() {
|
|||||||
cmd_save_response,
|
cmd_save_response,
|
||||||
cmd_send_ephemeral_request,
|
cmd_send_ephemeral_request,
|
||||||
cmd_send_http_request,
|
cmd_send_http_request,
|
||||||
cmd_send_folder,
|
|
||||||
cmd_template_function_config,
|
cmd_template_function_config,
|
||||||
cmd_template_function_summaries,
|
cmd_template_function_summaries,
|
||||||
cmd_template_tokens_to_string,
|
cmd_template_tokens_to_string,
|
||||||
@@ -1728,7 +1677,6 @@ pub fn run() {
|
|||||||
crate::commands::cmd_reveal_workspace_key,
|
crate::commands::cmd_reveal_workspace_key,
|
||||||
crate::commands::cmd_secure_template,
|
crate::commands::cmd_secure_template,
|
||||||
crate::commands::cmd_set_workspace_key,
|
crate::commands::cmd_set_workspace_key,
|
||||||
crate::commands::cmd_show_workspace_key,
|
|
||||||
//
|
//
|
||||||
// Models commands
|
// Models commands
|
||||||
models_ext::models_delete,
|
models_ext::models_delete,
|
||||||
@@ -1762,8 +1710,11 @@ pub fn run() {
|
|||||||
git_ext::cmd_git_fetch_all,
|
git_ext::cmd_git_fetch_all,
|
||||||
git_ext::cmd_git_push,
|
git_ext::cmd_git_push,
|
||||||
git_ext::cmd_git_pull,
|
git_ext::cmd_git_pull,
|
||||||
|
git_ext::cmd_git_pull_force_reset,
|
||||||
|
git_ext::cmd_git_pull_merge,
|
||||||
git_ext::cmd_git_add,
|
git_ext::cmd_git_add,
|
||||||
git_ext::cmd_git_unstage,
|
git_ext::cmd_git_unstage,
|
||||||
|
git_ext::cmd_git_reset_changes,
|
||||||
git_ext::cmd_git_add_credential,
|
git_ext::cmd_git_add_credential,
|
||||||
git_ext::cmd_git_remotes,
|
git_ext::cmd_git_remotes,
|
||||||
git_ext::cmd_git_add_remote,
|
git_ext::cmd_git_add_remote,
|
||||||
@@ -1777,14 +1728,7 @@ pub fn run() {
|
|||||||
plugins_ext::cmd_plugins_update_all,
|
plugins_ext::cmd_plugins_update_all,
|
||||||
//
|
//
|
||||||
// WebSocket commands
|
// WebSocket commands
|
||||||
ws_ext::cmd_ws_upsert_request,
|
|
||||||
ws_ext::cmd_ws_duplicate_request,
|
|
||||||
ws_ext::cmd_ws_delete_request,
|
|
||||||
ws_ext::cmd_ws_delete_connection,
|
|
||||||
ws_ext::cmd_ws_delete_connections,
|
ws_ext::cmd_ws_delete_connections,
|
||||||
ws_ext::cmd_ws_list_events,
|
|
||||||
ws_ext::cmd_ws_list_requests,
|
|
||||||
ws_ext::cmd_ws_list_connections,
|
|
||||||
ws_ext::cmd_ws_send,
|
ws_ext::cmd_ws_send,
|
||||||
ws_ext::cmd_ws_close,
|
ws_ext::cmd_ws_close,
|
||||||
ws_ext::cmd_ws_connect,
|
ws_ext::cmd_ws_connect,
|
||||||
|
|||||||
@@ -3,6 +3,9 @@
|
|||||||
//! This module provides the Tauri plugin initialization and extension traits
|
//! This module provides the Tauri plugin initialization and extension traits
|
||||||
//! that allow accessing QueryManager and BlobManager from Tauri's Manager types.
|
//! that allow accessing QueryManager and BlobManager from Tauri's Manager types.
|
||||||
|
|
||||||
|
use chrono::Utc;
|
||||||
|
use log::error;
|
||||||
|
use std::time::Duration;
|
||||||
use tauri::plugin::TauriPlugin;
|
use tauri::plugin::TauriPlugin;
|
||||||
use tauri::{Emitter, Manager, Runtime, State};
|
use tauri::{Emitter, Manager, Runtime, State};
|
||||||
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
||||||
@@ -13,6 +16,74 @@ use yaak_models::models::{AnyModel, GraphQlIntrospection, GrpcEvent, Settings, W
|
|||||||
use yaak_models::query_manager::QueryManager;
|
use yaak_models::query_manager::QueryManager;
|
||||||
use yaak_models::util::UpdateSource;
|
use yaak_models::util::UpdateSource;
|
||||||
|
|
||||||
|
const MODEL_CHANGES_RETENTION_HOURS: i64 = 1;
|
||||||
|
const MODEL_CHANGES_POLL_INTERVAL_MS: u64 = 1000;
|
||||||
|
const MODEL_CHANGES_POLL_BATCH_SIZE: usize = 200;
|
||||||
|
|
||||||
|
struct ModelChangeCursor {
|
||||||
|
created_at: String,
|
||||||
|
id: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ModelChangeCursor {
|
||||||
|
fn from_launch_time() -> Self {
|
||||||
|
Self {
|
||||||
|
created_at: Utc::now().naive_utc().format("%Y-%m-%d %H:%M:%S%.3f").to_string(),
|
||||||
|
id: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn drain_model_changes_batch<R: Runtime>(
|
||||||
|
query_manager: &QueryManager,
|
||||||
|
app_handle: &tauri::AppHandle<R>,
|
||||||
|
cursor: &mut ModelChangeCursor,
|
||||||
|
) -> bool {
|
||||||
|
let changes = match query_manager.connect().list_model_changes_since(
|
||||||
|
&cursor.created_at,
|
||||||
|
cursor.id,
|
||||||
|
MODEL_CHANGES_POLL_BATCH_SIZE,
|
||||||
|
) {
|
||||||
|
Ok(changes) => changes,
|
||||||
|
Err(err) => {
|
||||||
|
error!("Failed to poll model_changes rows: {err:?}");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if changes.is_empty() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let fetched_count = changes.len();
|
||||||
|
for change in changes {
|
||||||
|
cursor.created_at = change.created_at;
|
||||||
|
cursor.id = change.id;
|
||||||
|
|
||||||
|
// Local window-originated writes are forwarded immediately from the
|
||||||
|
// in-memory model event channel.
|
||||||
|
if matches!(change.payload.update_source, UpdateSource::Window { .. }) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if let Err(err) = app_handle.emit("model_write", change.payload) {
|
||||||
|
error!("Failed to emit model_write event: {err:?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fetched_count == MODEL_CHANGES_POLL_BATCH_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run_model_change_poller<R: Runtime>(
|
||||||
|
query_manager: QueryManager,
|
||||||
|
app_handle: tauri::AppHandle<R>,
|
||||||
|
mut cursor: ModelChangeCursor,
|
||||||
|
) {
|
||||||
|
loop {
|
||||||
|
while drain_model_changes_batch(&query_manager, &app_handle, &mut cursor) {}
|
||||||
|
tokio::time::sleep(Duration::from_millis(MODEL_CHANGES_POLL_INTERVAL_MS)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
||||||
pub trait QueryManagerExt<'a, R> {
|
pub trait QueryManagerExt<'a, R> {
|
||||||
fn db_manager(&'a self) -> State<'a, QueryManager>;
|
fn db_manager(&'a self) -> State<'a, QueryManager>;
|
||||||
@@ -262,14 +333,37 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let db = query_manager.connect();
|
||||||
|
if let Err(err) = db.prune_model_changes_older_than_hours(MODEL_CHANGES_RETENTION_HOURS)
|
||||||
|
{
|
||||||
|
error!("Failed to prune model_changes rows on startup: {err:?}");
|
||||||
|
}
|
||||||
|
// Only stream writes that happen after this app launch.
|
||||||
|
let cursor = ModelChangeCursor::from_launch_time();
|
||||||
|
|
||||||
|
let poll_query_manager = query_manager.clone();
|
||||||
|
|
||||||
app_handle.manage(query_manager);
|
app_handle.manage(query_manager);
|
||||||
app_handle.manage(blob_manager);
|
app_handle.manage(blob_manager);
|
||||||
|
|
||||||
// Forward model change events to the frontend
|
// Poll model_changes so all writers (including external CLI processes) update the UI.
|
||||||
let app_handle = app_handle.clone();
|
let app_handle_poll = app_handle.clone();
|
||||||
|
let query_manager = poll_query_manager;
|
||||||
|
tauri::async_runtime::spawn(async move {
|
||||||
|
run_model_change_poller(query_manager, app_handle_poll, cursor).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fast path for local app writes initiated by frontend windows. This keeps the
|
||||||
|
// current sync-model UX snappy, while DB polling handles external writers (CLI).
|
||||||
|
let app_handle_local = app_handle.clone();
|
||||||
tauri::async_runtime::spawn(async move {
|
tauri::async_runtime::spawn(async move {
|
||||||
for payload in rx {
|
for payload in rx {
|
||||||
app_handle.emit("model_write", payload).unwrap();
|
if !matches!(payload.update_source, UpdateSource::Window { .. }) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if let Err(err) = app_handle_local.emit("model_write", payload) {
|
||||||
|
error!("Failed to emit local model_write event: {err:?}");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -8,9 +8,9 @@ use serde::{Deserialize, Serialize};
|
|||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
|
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
|
||||||
use ts_rs::TS;
|
use ts_rs::TS;
|
||||||
|
use yaak_api::yaak_api_client;
|
||||||
use yaak_common::platform::get_os_str;
|
use yaak_common::platform::get_os_str;
|
||||||
use yaak_models::util::UpdateSource;
|
use yaak_models::util::UpdateSource;
|
||||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
|
||||||
|
|
||||||
// Check for updates every hour
|
// Check for updates every hour
|
||||||
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
||||||
@@ -101,7 +101,8 @@ impl YaakNotifier {
|
|||||||
let license_check = "disabled".to_string();
|
let license_check = "disabled".to_string();
|
||||||
|
|
||||||
let launch_info = get_or_upsert_launch_info(app_handle);
|
let launch_info = get_or_upsert_launch_info(app_handle);
|
||||||
let req = yaak_api_client(app_handle)?
|
let app_version = app_handle.package_info().version.to_string();
|
||||||
|
let req = yaak_api_client(&app_version)?
|
||||||
.request(Method::GET, "https://notify.yaak.app/notifications")
|
.request(Method::GET, "https://notify.yaak.app/notifications")
|
||||||
.query(&[
|
.query(&[
|
||||||
("version", &launch_info.current_version),
|
("version", &launch_info.current_version),
|
||||||
|
|||||||
@@ -12,21 +12,23 @@ use chrono::Utc;
|
|||||||
use cookie::Cookie;
|
use cookie::Cookie;
|
||||||
use log::error;
|
use log::error;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tauri::{AppHandle, Emitter, Manager, Runtime};
|
use tauri::{AppHandle, Emitter, Listener, Manager, Runtime};
|
||||||
use tauri_plugin_clipboard_manager::ClipboardExt;
|
use tauri_plugin_clipboard_manager::ClipboardExt;
|
||||||
use tauri_plugin_opener::OpenerExt;
|
use tauri_plugin_opener::OpenerExt;
|
||||||
|
use yaak::plugin_events::{
|
||||||
|
GroupedPluginEvent, HostRequest, SharedPluginEventContext, handle_shared_plugin_event,
|
||||||
|
};
|
||||||
use yaak_crypto::manager::EncryptionManager;
|
use yaak_crypto::manager::EncryptionManager;
|
||||||
use yaak_models::models::{AnyModel, HttpResponse, Plugin};
|
use yaak_models::models::{AnyModel, HttpResponse, Plugin};
|
||||||
use yaak_models::queries::any_request::AnyRequest;
|
use yaak_models::queries::any_request::AnyRequest;
|
||||||
use yaak_models::util::UpdateSource;
|
use yaak_models::util::UpdateSource;
|
||||||
use yaak_plugins::error::Error::PluginErr;
|
use yaak_plugins::error::Error::PluginErr;
|
||||||
use yaak_plugins::events::{
|
use yaak_plugins::events::{
|
||||||
Color, DeleteKeyValueResponse, EmptyPayload, ErrorResponse, FindHttpResponsesResponse,
|
Color, EmptyPayload, ErrorResponse, FindHttpResponsesResponse, GetCookieValueResponse, Icon,
|
||||||
GetCookieValueResponse, GetHttpRequestByIdResponse, GetKeyValueResponse, Icon, InternalEvent,
|
InternalEvent, InternalEventPayload, ListCookieNamesResponse, ListOpenWorkspacesResponse,
|
||||||
InternalEventPayload, ListCookieNamesResponse, ListHttpRequestsResponse,
|
RenderGrpcRequestResponse, RenderHttpRequestResponse, SendHttpRequestResponse,
|
||||||
ListWorkspacesResponse, RenderGrpcRequestResponse, RenderHttpRequestResponse,
|
ShowToastRequest, TemplateRenderResponse, WindowInfoResponse, WindowNavigateEvent,
|
||||||
SendHttpRequestResponse, SetKeyValueResponse, ShowToastRequest, TemplateRenderResponse,
|
WorkspaceInfo,
|
||||||
WindowInfoResponse, WindowNavigateEvent, WorkspaceInfo,
|
|
||||||
};
|
};
|
||||||
use yaak_plugins::manager::PluginManager;
|
use yaak_plugins::manager::PluginManager;
|
||||||
use yaak_plugins::plugin_handle::PluginHandle;
|
use yaak_plugins::plugin_handle::PluginHandle;
|
||||||
@@ -41,27 +43,154 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
) -> Result<Option<InternalEventPayload>> {
|
) -> Result<Option<InternalEventPayload>> {
|
||||||
// log::debug!("Got event to app {event:?}");
|
// log::debug!("Got event to app {event:?}");
|
||||||
let plugin_context = event.context.to_owned();
|
let plugin_context = event.context.to_owned();
|
||||||
match event.clone().payload {
|
let plugin_name = plugin_handle.info().name;
|
||||||
InternalEventPayload::CopyTextRequest(req) => {
|
let fallback_workspace_id = plugin_context.workspace_id.clone().or_else(|| {
|
||||||
|
plugin_context
|
||||||
|
.label
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|label| app_handle.get_webview_window(label))
|
||||||
|
.and_then(|window| workspace_from_window(&window).map(|workspace| workspace.id))
|
||||||
|
});
|
||||||
|
|
||||||
|
match handle_shared_plugin_event(
|
||||||
|
app_handle.db_manager().inner(),
|
||||||
|
&event.payload,
|
||||||
|
SharedPluginEventContext {
|
||||||
|
plugin_name: &plugin_name,
|
||||||
|
workspace_id: fallback_workspace_id.as_deref(),
|
||||||
|
},
|
||||||
|
) {
|
||||||
|
GroupedPluginEvent::Handled(payload) => Ok(payload),
|
||||||
|
GroupedPluginEvent::ToHandle(host_request) => {
|
||||||
|
handle_host_plugin_request(
|
||||||
|
app_handle,
|
||||||
|
event,
|
||||||
|
plugin_handle,
|
||||||
|
&plugin_context,
|
||||||
|
host_request,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_host_plugin_request<R: Runtime>(
|
||||||
|
app_handle: &AppHandle<R>,
|
||||||
|
event: &InternalEvent,
|
||||||
|
plugin_handle: &PluginHandle,
|
||||||
|
plugin_context: &yaak_plugins::events::PluginContext,
|
||||||
|
host_request: HostRequest<'_>,
|
||||||
|
) -> Result<Option<InternalEventPayload>> {
|
||||||
|
match host_request {
|
||||||
|
HostRequest::ErrorResponse(resp) => {
|
||||||
|
error!("Plugin error: {}: {:?}", resp.error, resp);
|
||||||
|
let toast_event = plugin_handle.build_event_to_send(
|
||||||
|
plugin_context,
|
||||||
|
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
||||||
|
message: format!(
|
||||||
|
"Plugin error from {}: {}",
|
||||||
|
plugin_handle.info().name,
|
||||||
|
resp.error
|
||||||
|
),
|
||||||
|
color: Some(Color::Danger),
|
||||||
|
timeout: Some(30000),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
|
||||||
|
}
|
||||||
|
HostRequest::ReloadResponse(req) => {
|
||||||
|
let plugins = app_handle.db().list_plugins()?;
|
||||||
|
for plugin in plugins {
|
||||||
|
if plugin.directory != plugin_handle.dir {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let new_plugin = Plugin { updated_at: Utc::now().naive_utc(), ..plugin };
|
||||||
|
app_handle.db().upsert_plugin(&new_plugin, &UpdateSource::Plugin)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !req.silent {
|
||||||
|
let info = plugin_handle.info();
|
||||||
|
let toast_event = plugin_handle.build_event_to_send(
|
||||||
|
plugin_context,
|
||||||
|
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
||||||
|
message: format!("Reloaded plugin {}@{}", info.name, info.version),
|
||||||
|
icon: Some(Icon::Info),
|
||||||
|
timeout: Some(3000),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
HostRequest::CopyText(req) => {
|
||||||
app_handle.clipboard().write_text(req.text.as_str())?;
|
app_handle.clipboard().write_text(req.text.as_str())?;
|
||||||
Ok(Some(InternalEventPayload::CopyTextResponse(EmptyPayload {})))
|
Ok(Some(InternalEventPayload::CopyTextResponse(EmptyPayload {})))
|
||||||
}
|
}
|
||||||
InternalEventPayload::ShowToastRequest(req) => {
|
HostRequest::ShowToast(req) => {
|
||||||
match plugin_context.label {
|
match &plugin_context.label {
|
||||||
Some(label) => app_handle.emit_to(label, "show_toast", req)?,
|
Some(label) => app_handle.emit_to(label, "show_toast", req)?,
|
||||||
None => app_handle.emit("show_toast", req)?,
|
None => app_handle.emit("show_toast", req)?,
|
||||||
};
|
};
|
||||||
Ok(Some(InternalEventPayload::ShowToastResponse(EmptyPayload {})))
|
Ok(Some(InternalEventPayload::ShowToastResponse(EmptyPayload {})))
|
||||||
}
|
}
|
||||||
InternalEventPayload::PromptTextRequest(_) => {
|
HostRequest::PromptText(_) => {
|
||||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||||
Ok(call_frontend(&window, event).await)
|
Ok(call_frontend(&window, event).await)
|
||||||
}
|
}
|
||||||
InternalEventPayload::PromptFormRequest(_) => {
|
HostRequest::PromptForm(_) => {
|
||||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||||
Ok(call_frontend(&window, event).await)
|
if event.reply_id.is_some() {
|
||||||
|
window.emit_to(window.label(), "plugin_event", event.clone())?;
|
||||||
|
Ok(None)
|
||||||
|
} else {
|
||||||
|
window.emit_to(window.label(), "plugin_event", event.clone()).unwrap();
|
||||||
|
|
||||||
|
let event_id = event.id.clone();
|
||||||
|
let plugin_handle = plugin_handle.clone();
|
||||||
|
let plugin_context = plugin_context.clone();
|
||||||
|
let window = window.clone();
|
||||||
|
|
||||||
|
tauri::async_runtime::spawn(async move {
|
||||||
|
let (tx, mut rx) = tokio::sync::mpsc::channel::<InternalEvent>(128);
|
||||||
|
|
||||||
|
let listener_id = window.listen(event_id, move |ev: tauri::Event| {
|
||||||
|
let resp: InternalEvent = serde_json::from_str(ev.payload()).unwrap();
|
||||||
|
let _ = tx.try_send(resp);
|
||||||
|
});
|
||||||
|
|
||||||
|
while let Some(resp) = rx.recv().await {
|
||||||
|
let is_done = matches!(
|
||||||
|
&resp.payload,
|
||||||
|
InternalEventPayload::PromptFormResponse(r) if r.done.unwrap_or(false)
|
||||||
|
);
|
||||||
|
|
||||||
|
let event_to_send = plugin_handle.build_event_to_send(
|
||||||
|
&plugin_context,
|
||||||
|
&resp.payload,
|
||||||
|
Some(resp.reply_id.unwrap_or_default()),
|
||||||
|
);
|
||||||
|
if let Err(e) = plugin_handle.send(&event_to_send).await {
|
||||||
|
log::warn!("Failed to forward form response to plugin: {:?}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_done {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
window.unlisten(listener_id);
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
InternalEventPayload::FindHttpResponsesRequest(req) => {
|
HostRequest::FindHttpResponses(req) => {
|
||||||
let http_responses = app_handle
|
let http_responses = app_handle
|
||||||
.db()
|
.db()
|
||||||
.list_http_responses_for_request(&req.request_id, req.limit.map(|l| l as u64))
|
.list_http_responses_for_request(&req.request_id, req.limit.map(|l| l as u64))
|
||||||
@@ -70,32 +199,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
http_responses,
|
http_responses,
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
InternalEventPayload::ListHttpRequestsRequest(req) => {
|
HostRequest::UpsertModel(req) => {
|
||||||
let w = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
|
||||||
let workspace = workspace_from_window(&w)
|
|
||||||
.ok_or(PluginErr("Failed to get workspace from window".into()))?;
|
|
||||||
|
|
||||||
let http_requests = if let Some(folder_id) = req.folder_id {
|
|
||||||
app_handle.db().list_http_requests_for_folder_recursive(&folder_id)?
|
|
||||||
} else {
|
|
||||||
app_handle.db().list_http_requests(&workspace.id)?
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Some(InternalEventPayload::ListHttpRequestsResponse(ListHttpRequestsResponse {
|
|
||||||
http_requests,
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
InternalEventPayload::ListFoldersRequest(_req) => {
|
|
||||||
let w = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
|
||||||
let workspace = workspace_from_window(&w)
|
|
||||||
.ok_or(PluginErr("Failed to get workspace from window".into()))?;
|
|
||||||
let folders = app_handle.db().list_folders(&workspace.id)?;
|
|
||||||
|
|
||||||
Ok(Some(InternalEventPayload::ListFoldersResponse(
|
|
||||||
yaak_plugins::events::ListFoldersResponse { folders },
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
InternalEventPayload::UpsertModelRequest(req) => {
|
|
||||||
use AnyModel::*;
|
use AnyModel::*;
|
||||||
let model = match &req.model {
|
let model = match &req.model {
|
||||||
HttpRequest(m) => {
|
HttpRequest(m) => {
|
||||||
@@ -123,7 +227,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
yaak_plugins::events::UpsertModelResponse { model },
|
yaak_plugins::events::UpsertModelResponse { model },
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
InternalEventPayload::DeleteModelRequest(req) => {
|
HostRequest::DeleteModel(req) => {
|
||||||
let model = match req.model.as_str() {
|
let model = match req.model.as_str() {
|
||||||
"http_request" => AnyModel::HttpRequest(
|
"http_request" => AnyModel::HttpRequest(
|
||||||
app_handle.db().delete_http_request_by_id(&req.id, &UpdateSource::Plugin)?,
|
app_handle.db().delete_http_request_by_id(&req.id, &UpdateSource::Plugin)?,
|
||||||
@@ -151,14 +255,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
yaak_plugins::events::DeleteModelResponse { model },
|
yaak_plugins::events::DeleteModelResponse { model },
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
InternalEventPayload::GetHttpRequestByIdRequest(req) => {
|
HostRequest::RenderGrpcRequest(req) => {
|
||||||
let http_request = app_handle.db().get_http_request(&req.id).ok();
|
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||||
Ok(Some(InternalEventPayload::GetHttpRequestByIdResponse(GetHttpRequestByIdResponse {
|
|
||||||
http_request,
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
InternalEventPayload::RenderGrpcRequestRequest(req) => {
|
|
||||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
|
||||||
|
|
||||||
let workspace =
|
let workspace =
|
||||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||||
@@ -173,8 +271,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
let cb = PluginTemplateCallback::new(
|
let cb = PluginTemplateCallback::new(
|
||||||
plugin_manager,
|
plugin_manager,
|
||||||
encryption_manager,
|
encryption_manager,
|
||||||
&plugin_context,
|
plugin_context,
|
||||||
req.purpose,
|
req.purpose.clone(),
|
||||||
);
|
);
|
||||||
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||||
let grpc_request =
|
let grpc_request =
|
||||||
@@ -183,8 +281,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
grpc_request,
|
grpc_request,
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
InternalEventPayload::RenderHttpRequestRequest(req) => {
|
HostRequest::RenderHttpRequest(req) => {
|
||||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||||
|
|
||||||
let workspace =
|
let workspace =
|
||||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||||
@@ -199,18 +297,18 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
let cb = PluginTemplateCallback::new(
|
let cb = PluginTemplateCallback::new(
|
||||||
plugin_manager,
|
plugin_manager,
|
||||||
encryption_manager,
|
encryption_manager,
|
||||||
&plugin_context,
|
plugin_context,
|
||||||
req.purpose,
|
req.purpose.clone(),
|
||||||
);
|
);
|
||||||
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||||
let http_request =
|
let http_request =
|
||||||
render_http_request(&req.http_request, environment_chain, &cb, &opt).await?;
|
render_http_request(&req.http_request, environment_chain, &cb, opt).await?;
|
||||||
Ok(Some(InternalEventPayload::RenderHttpRequestResponse(RenderHttpRequestResponse {
|
Ok(Some(InternalEventPayload::RenderHttpRequestResponse(RenderHttpRequestResponse {
|
||||||
http_request,
|
http_request,
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
InternalEventPayload::TemplateRenderRequest(req) => {
|
HostRequest::TemplateRender(req) => {
|
||||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||||
|
|
||||||
let workspace =
|
let workspace =
|
||||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||||
@@ -235,65 +333,16 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
let cb = PluginTemplateCallback::new(
|
let cb = PluginTemplateCallback::new(
|
||||||
plugin_manager,
|
plugin_manager,
|
||||||
encryption_manager,
|
encryption_manager,
|
||||||
&plugin_context,
|
plugin_context,
|
||||||
req.purpose,
|
req.purpose.clone(),
|
||||||
);
|
);
|
||||||
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||||
let data = render_json_value(req.data, environment_chain, &cb, &opt).await?;
|
let data = render_json_value(req.data.clone(), environment_chain, &cb, &opt).await?;
|
||||||
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))
|
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))
|
||||||
}
|
}
|
||||||
InternalEventPayload::ErrorResponse(resp) => {
|
HostRequest::SendHttpRequest(req) => {
|
||||||
error!("Plugin error: {}: {:?}", resp.error, resp);
|
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||||
let toast_event = plugin_handle.build_event_to_send(
|
let mut http_request = req.http_request.clone();
|
||||||
&plugin_context,
|
|
||||||
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
|
||||||
message: format!(
|
|
||||||
"Plugin error from {}: {}",
|
|
||||||
plugin_handle.info().name,
|
|
||||||
resp.error
|
|
||||||
),
|
|
||||||
color: Some(Color::Danger),
|
|
||||||
timeout: Some(30000),
|
|
||||||
..Default::default()
|
|
||||||
}),
|
|
||||||
None,
|
|
||||||
);
|
|
||||||
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
|
|
||||||
}
|
|
||||||
InternalEventPayload::ReloadResponse(req) => {
|
|
||||||
let plugins = app_handle.db().list_plugins()?;
|
|
||||||
for plugin in plugins {
|
|
||||||
if plugin.directory != plugin_handle.dir {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let new_plugin = Plugin {
|
|
||||||
updated_at: Utc::now().naive_utc(), // TODO: Add reloaded_at field to use instead
|
|
||||||
..plugin
|
|
||||||
};
|
|
||||||
app_handle.db().upsert_plugin(&new_plugin, &UpdateSource::Plugin)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !req.silent {
|
|
||||||
let info = plugin_handle.info();
|
|
||||||
let toast_event = plugin_handle.build_event_to_send(
|
|
||||||
&plugin_context,
|
|
||||||
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
|
||||||
message: format!("Reloaded plugin {}@{}", info.name, info.version),
|
|
||||||
icon: Some(Icon::Info),
|
|
||||||
timeout: Some(3000),
|
|
||||||
..Default::default()
|
|
||||||
}),
|
|
||||||
None,
|
|
||||||
);
|
|
||||||
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
InternalEventPayload::SendHttpRequestRequest(req) => {
|
|
||||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
|
||||||
let mut http_request = req.http_request;
|
|
||||||
let workspace =
|
let workspace =
|
||||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||||
let cookie_jar = cookie_jar_from_window(&window);
|
let cookie_jar = cookie_jar_from_window(&window);
|
||||||
@@ -324,8 +373,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
&http_response,
|
&http_response,
|
||||||
environment,
|
environment,
|
||||||
cookie_jar,
|
cookie_jar,
|
||||||
&mut tokio::sync::watch::channel(false).1, // No-op cancel channel
|
&mut tokio::sync::watch::channel(false).1,
|
||||||
&plugin_context,
|
plugin_context,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -333,7 +382,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
http_response,
|
http_response,
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
InternalEventPayload::OpenWindowRequest(req) => {
|
HostRequest::OpenWindow(req) => {
|
||||||
let (navigation_tx, mut navigation_rx) = tokio::sync::mpsc::channel(128);
|
let (navigation_tx, mut navigation_rx) = tokio::sync::mpsc::channel(128);
|
||||||
let (close_tx, mut close_rx) = tokio::sync::mpsc::channel(128);
|
let (close_tx, mut close_rx) = tokio::sync::mpsc::channel(128);
|
||||||
let win_config = CreateWindowConfig {
|
let win_config = CreateWindowConfig {
|
||||||
@@ -348,7 +397,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
};
|
};
|
||||||
if let Err(e) = create_window(app_handle, win_config) {
|
if let Err(e) = create_window(app_handle, win_config) {
|
||||||
let error_event = plugin_handle.build_event_to_send(
|
let error_event = plugin_handle.build_event_to_send(
|
||||||
&plugin_context,
|
plugin_context,
|
||||||
&InternalEventPayload::ErrorResponse(ErrorResponse {
|
&InternalEventPayload::ErrorResponse(ErrorResponse {
|
||||||
error: format!("Failed to create window: {:?}", e),
|
error: format!("Failed to create window: {:?}", e),
|
||||||
}),
|
}),
|
||||||
@@ -366,7 +415,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
while let Some(url) = navigation_rx.recv().await {
|
while let Some(url) = navigation_rx.recv().await {
|
||||||
let url = url.to_string();
|
let url = url.to_string();
|
||||||
let event_to_send = plugin_handle.build_event_to_send(
|
let event_to_send = plugin_handle.build_event_to_send(
|
||||||
&plugin_context, // NOTE: Sending existing context on purpose here
|
&plugin_context,
|
||||||
&InternalEventPayload::WindowNavigateEvent(WindowNavigateEvent { url }),
|
&InternalEventPayload::WindowNavigateEvent(WindowNavigateEvent { url }),
|
||||||
Some(event_id.clone()),
|
Some(event_id.clone()),
|
||||||
);
|
);
|
||||||
@@ -380,7 +429,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
let plugin_handle = plugin_handle.clone();
|
let plugin_handle = plugin_handle.clone();
|
||||||
let plugin_context = plugin_context.clone();
|
let plugin_context = plugin_context.clone();
|
||||||
tauri::async_runtime::spawn(async move {
|
tauri::async_runtime::spawn(async move {
|
||||||
while let Some(_) = close_rx.recv().await {
|
while close_rx.recv().await.is_some() {
|
||||||
let event_to_send = plugin_handle.build_event_to_send(
|
let event_to_send = plugin_handle.build_event_to_send(
|
||||||
&plugin_context,
|
&plugin_context,
|
||||||
&InternalEventPayload::WindowCloseEvent,
|
&InternalEventPayload::WindowCloseEvent,
|
||||||
@@ -393,35 +442,33 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
|
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
InternalEventPayload::CloseWindowRequest(req) => {
|
HostRequest::CloseWindow(req) => {
|
||||||
if let Some(window) = app_handle.webview_windows().get(&req.label) {
|
if let Some(window) = app_handle.webview_windows().get(&req.label) {
|
||||||
window.close()?;
|
window.close()?;
|
||||||
}
|
}
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
InternalEventPayload::OpenExternalUrlRequest(req) => {
|
HostRequest::OpenExternalUrl(req) => {
|
||||||
app_handle.opener().open_url(&req.url, None::<&str>)?;
|
app_handle.opener().open_url(&req.url, None::<&str>)?;
|
||||||
Ok(Some(InternalEventPayload::OpenExternalUrlResponse(EmptyPayload {})))
|
Ok(Some(InternalEventPayload::OpenExternalUrlResponse(EmptyPayload {})))
|
||||||
}
|
}
|
||||||
InternalEventPayload::SetKeyValueRequest(req) => {
|
HostRequest::ListOpenWorkspaces(_) => {
|
||||||
let name = plugin_handle.info().name;
|
let mut workspaces = Vec::new();
|
||||||
app_handle.db().set_plugin_key_value(&name, &req.key, &req.value);
|
for (_, window) in app_handle.webview_windows() {
|
||||||
Ok(Some(InternalEventPayload::SetKeyValueResponse(SetKeyValueResponse {})))
|
if let Some(workspace) = workspace_from_window(&window) {
|
||||||
}
|
workspaces.push(WorkspaceInfo {
|
||||||
InternalEventPayload::GetKeyValueRequest(req) => {
|
id: workspace.id.clone(),
|
||||||
let name = plugin_handle.info().name;
|
name: workspace.name.clone(),
|
||||||
let value = app_handle.db().get_plugin_key_value(&name, &req.key).map(|v| v.value);
|
label: window.label().to_string(),
|
||||||
Ok(Some(InternalEventPayload::GetKeyValueResponse(GetKeyValueResponse { value })))
|
});
|
||||||
}
|
}
|
||||||
InternalEventPayload::DeleteKeyValueRequest(req) => {
|
}
|
||||||
let name = plugin_handle.info().name;
|
Ok(Some(InternalEventPayload::ListOpenWorkspacesResponse(ListOpenWorkspacesResponse {
|
||||||
let deleted = app_handle.db().delete_plugin_key_value(&name, &req.key)?;
|
workspaces,
|
||||||
Ok(Some(InternalEventPayload::DeleteKeyValueResponse(DeleteKeyValueResponse {
|
|
||||||
deleted,
|
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
InternalEventPayload::ListCookieNamesRequest(_req) => {
|
HostRequest::ListCookieNames(_) => {
|
||||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||||
let names = match cookie_jar_from_window(&window) {
|
let names = match cookie_jar_from_window(&window) {
|
||||||
None => Vec::new(),
|
None => Vec::new(),
|
||||||
Some(j) => j
|
Some(j) => j
|
||||||
@@ -434,8 +481,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
names,
|
names,
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
InternalEventPayload::GetCookieValueRequest(req) => {
|
HostRequest::GetCookieValue(req) => {
|
||||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||||
let value = match cookie_jar_from_window(&window) {
|
let value = match cookie_jar_from_window(&window) {
|
||||||
None => None,
|
None => None,
|
||||||
Some(j) => j.cookies.into_iter().find_map(|c| match Cookie::parse(c.raw_cookie) {
|
Some(j) => j.cookies.into_iter().find_map(|c| match Cookie::parse(c.raw_cookie) {
|
||||||
@@ -447,12 +494,11 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
};
|
};
|
||||||
Ok(Some(InternalEventPayload::GetCookieValueResponse(GetCookieValueResponse { value })))
|
Ok(Some(InternalEventPayload::GetCookieValueResponse(GetCookieValueResponse { value })))
|
||||||
}
|
}
|
||||||
InternalEventPayload::WindowInfoRequest(req) => {
|
HostRequest::WindowInfo(req) => {
|
||||||
let w = app_handle
|
let w = app_handle
|
||||||
.get_webview_window(&req.label)
|
.get_webview_window(&req.label)
|
||||||
.ok_or(PluginErr(format!("Failed to find window for {}", req.label)))?;
|
.ok_or(PluginErr(format!("Failed to find window for {}", req.label)))?;
|
||||||
|
|
||||||
// Actually look up the data so we never return an invalid ID
|
|
||||||
let environment_id = environment_from_window(&w).map(|m| m.id);
|
let environment_id = environment_from_window(&w).map(|m| m.id);
|
||||||
let workspace_id = workspace_from_window(&w).map(|m| m.id);
|
let workspace_id = workspace_from_window(&w).map(|m| m.id);
|
||||||
let request_id =
|
let request_id =
|
||||||
@@ -470,25 +516,13 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
|||||||
environment_id,
|
environment_id,
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
HostRequest::OtherRequest(req) => {
|
||||||
InternalEventPayload::ListWorkspacesRequest(_) => {
|
Ok(Some(InternalEventPayload::ErrorResponse(ErrorResponse {
|
||||||
let mut workspaces = Vec::new();
|
error: format!(
|
||||||
|
"Unsupported plugin request in app host handler: {}",
|
||||||
for (_, window) in app_handle.webview_windows() {
|
req.type_name()
|
||||||
if let Some(workspace) = workspace_from_window(&window) {
|
),
|
||||||
workspaces.push(WorkspaceInfo {
|
|
||||||
id: workspace.id.clone(),
|
|
||||||
name: workspace.name.clone(),
|
|
||||||
label: window.label().to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Some(InternalEventPayload::ListWorkspacesResponse(ListWorkspacesResponse {
|
|
||||||
workspaces,
|
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => Ok(None),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ use tauri::{
|
|||||||
};
|
};
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
use ts_rs::TS;
|
use ts_rs::TS;
|
||||||
|
use yaak_api::yaak_api_client;
|
||||||
use yaak_models::models::Plugin;
|
use yaak_models::models::Plugin;
|
||||||
use yaak_models::util::UpdateSource;
|
use yaak_models::util::UpdateSource;
|
||||||
use yaak_plugins::api::{
|
use yaak_plugins::api::{
|
||||||
@@ -31,7 +32,6 @@ use yaak_plugins::events::{Color, Icon, PluginContext, ShowToastRequest};
|
|||||||
use yaak_plugins::install::{delete_and_uninstall, download_and_install};
|
use yaak_plugins::install::{delete_and_uninstall, download_and_install};
|
||||||
use yaak_plugins::manager::PluginManager;
|
use yaak_plugins::manager::PluginManager;
|
||||||
use yaak_plugins::plugin_meta::get_plugin_meta;
|
use yaak_plugins::plugin_meta::get_plugin_meta;
|
||||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
|
||||||
|
|
||||||
static EXITING: AtomicBool = AtomicBool::new(false);
|
static EXITING: AtomicBool = AtomicBool::new(false);
|
||||||
|
|
||||||
@@ -72,7 +72,8 @@ impl PluginUpdater {
|
|||||||
|
|
||||||
info!("Checking for plugin updates");
|
info!("Checking for plugin updates");
|
||||||
|
|
||||||
let http_client = yaak_api_client(window.app_handle())?;
|
let app_version = window.app_handle().package_info().version.to_string();
|
||||||
|
let http_client = yaak_api_client(&app_version)?;
|
||||||
let plugins = window.app_handle().db().list_plugins()?;
|
let plugins = window.app_handle().db().list_plugins()?;
|
||||||
let updates = check_plugin_updates(&http_client, plugins.clone()).await?;
|
let updates = check_plugin_updates(&http_client, plugins.clone()).await?;
|
||||||
|
|
||||||
@@ -136,7 +137,8 @@ pub async fn cmd_plugins_search<R: Runtime>(
|
|||||||
app_handle: AppHandle<R>,
|
app_handle: AppHandle<R>,
|
||||||
query: &str,
|
query: &str,
|
||||||
) -> Result<PluginSearchResponse> {
|
) -> Result<PluginSearchResponse> {
|
||||||
let http_client = yaak_api_client(&app_handle)?;
|
let app_version = app_handle.package_info().version.to_string();
|
||||||
|
let http_client = yaak_api_client(&app_version)?;
|
||||||
Ok(search_plugins(&http_client, query).await?)
|
Ok(search_plugins(&http_client, query).await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -147,7 +149,8 @@ pub async fn cmd_plugins_install<R: Runtime>(
|
|||||||
version: Option<String>,
|
version: Option<String>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
|
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
|
||||||
let http_client = yaak_api_client(window.app_handle())?;
|
let app_version = window.app_handle().package_info().version.to_string();
|
||||||
|
let http_client = yaak_api_client(&app_version)?;
|
||||||
let query_manager = window.state::<yaak_models::query_manager::QueryManager>();
|
let query_manager = window.state::<yaak_models::query_manager::QueryManager>();
|
||||||
let plugin_context = window.plugin_context();
|
let plugin_context = window.plugin_context();
|
||||||
download_and_install(
|
download_and_install(
|
||||||
@@ -177,7 +180,8 @@ pub async fn cmd_plugins_uninstall<R: Runtime>(
|
|||||||
pub async fn cmd_plugins_updates<R: Runtime>(
|
pub async fn cmd_plugins_updates<R: Runtime>(
|
||||||
app_handle: AppHandle<R>,
|
app_handle: AppHandle<R>,
|
||||||
) -> Result<PluginUpdatesResponse> {
|
) -> Result<PluginUpdatesResponse> {
|
||||||
let http_client = yaak_api_client(&app_handle)?;
|
let app_version = app_handle.package_info().version.to_string();
|
||||||
|
let http_client = yaak_api_client(&app_version)?;
|
||||||
let plugins = app_handle.db().list_plugins()?;
|
let plugins = app_handle.db().list_plugins()?;
|
||||||
Ok(check_plugin_updates(&http_client, plugins).await?)
|
Ok(check_plugin_updates(&http_client, plugins).await?)
|
||||||
}
|
}
|
||||||
@@ -186,7 +190,8 @@ pub async fn cmd_plugins_updates<R: Runtime>(
|
|||||||
pub async fn cmd_plugins_update_all<R: Runtime>(
|
pub async fn cmd_plugins_update_all<R: Runtime>(
|
||||||
window: WebviewWindow<R>,
|
window: WebviewWindow<R>,
|
||||||
) -> Result<Vec<PluginNameVersion>> {
|
) -> Result<Vec<PluginNameVersion>> {
|
||||||
let http_client = yaak_api_client(window.app_handle())?;
|
let app_version = window.app_handle().package_info().version.to_string();
|
||||||
|
let http_client = yaak_api_client(&app_version)?;
|
||||||
let plugins = window.db().list_plugins()?;
|
let plugins = window.db().list_plugins()?;
|
||||||
|
|
||||||
// Get list of available updates (already filtered to only registry plugins)
|
// Get list of available updates (already filtered to only registry plugins)
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
use log::info;
|
use log::info;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use yaak_http::path_placeholders::apply_path_placeholders;
|
pub use yaak::render::render_http_request;
|
||||||
use yaak_models::models::{
|
use yaak_models::models::{Environment, GrpcRequest, HttpRequestHeader};
|
||||||
Environment, GrpcRequest, HttpRequest, HttpRequestHeader, HttpUrlParameter,
|
|
||||||
};
|
|
||||||
use yaak_models::render::make_vars_hashmap;
|
use yaak_models::render::make_vars_hashmap;
|
||||||
use yaak_templates::{RenderOptions, TemplateCallback, parse_and_render, render_json_value_raw};
|
use yaak_templates::{RenderOptions, TemplateCallback, parse_and_render, render_json_value_raw};
|
||||||
|
|
||||||
@@ -38,6 +36,9 @@ pub async fn render_grpc_request<T: TemplateCallback>(
|
|||||||
|
|
||||||
let mut metadata = Vec::new();
|
let mut metadata = Vec::new();
|
||||||
for p in r.metadata.clone() {
|
for p in r.metadata.clone() {
|
||||||
|
if !p.enabled {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
metadata.push(HttpRequestHeader {
|
metadata.push(HttpRequestHeader {
|
||||||
enabled: p.enabled,
|
enabled: p.enabled,
|
||||||
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
|
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
|
||||||
@@ -82,82 +83,3 @@ pub async fn render_grpc_request<T: TemplateCallback>(
|
|||||||
|
|
||||||
Ok(GrpcRequest { url, metadata, authentication, ..r.to_owned() })
|
Ok(GrpcRequest { url, metadata, authentication, ..r.to_owned() })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn render_http_request<T: TemplateCallback>(
|
|
||||||
r: &HttpRequest,
|
|
||||||
environment_chain: Vec<Environment>,
|
|
||||||
cb: &T,
|
|
||||||
opt: &RenderOptions,
|
|
||||||
) -> yaak_templates::error::Result<HttpRequest> {
|
|
||||||
let vars = &make_vars_hashmap(environment_chain);
|
|
||||||
|
|
||||||
let mut url_parameters = Vec::new();
|
|
||||||
for p in r.url_parameters.clone() {
|
|
||||||
if !p.enabled {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
url_parameters.push(HttpUrlParameter {
|
|
||||||
enabled: p.enabled,
|
|
||||||
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
|
|
||||||
value: parse_and_render(p.value.as_str(), vars, cb, &opt).await?,
|
|
||||||
id: p.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut headers = Vec::new();
|
|
||||||
for p in r.headers.clone() {
|
|
||||||
if !p.enabled {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
headers.push(HttpRequestHeader {
|
|
||||||
enabled: p.enabled,
|
|
||||||
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
|
|
||||||
value: parse_and_render(p.value.as_str(), vars, cb, &opt).await?,
|
|
||||||
id: p.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut body = BTreeMap::new();
|
|
||||||
for (k, v) in r.body.clone() {
|
|
||||||
body.insert(k, render_json_value_raw(v, vars, cb, &opt).await?);
|
|
||||||
}
|
|
||||||
|
|
||||||
let authentication = {
|
|
||||||
let mut disabled = false;
|
|
||||||
let mut auth = BTreeMap::new();
|
|
||||||
match r.authentication.get("disabled") {
|
|
||||||
Some(Value::Bool(true)) => {
|
|
||||||
disabled = true;
|
|
||||||
}
|
|
||||||
Some(Value::String(tmpl)) => {
|
|
||||||
disabled = parse_and_render(tmpl.as_str(), vars, cb, &opt)
|
|
||||||
.await
|
|
||||||
.unwrap_or_default()
|
|
||||||
.is_empty();
|
|
||||||
info!(
|
|
||||||
"Rendering authentication.disabled as a template: {disabled} from \"{tmpl}\""
|
|
||||||
);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
if disabled {
|
|
||||||
auth.insert("disabled".to_string(), Value::Bool(true));
|
|
||||||
} else {
|
|
||||||
for (k, v) in r.authentication.clone() {
|
|
||||||
if k == "disabled" {
|
|
||||||
auth.insert(k, Value::Bool(false));
|
|
||||||
} else {
|
|
||||||
auth.insert(k, render_json_value_raw(v, vars, cb, &opt).await?);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
auth
|
|
||||||
};
|
|
||||||
|
|
||||||
let url = parse_and_render(r.url.clone().as_str(), vars, cb, &opt).await?;
|
|
||||||
|
|
||||||
// This doesn't fit perfectly with the concept of "rendering" but it kind of does
|
|
||||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
|
||||||
|
|
||||||
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -15,6 +15,9 @@ use ts_rs::TS;
|
|||||||
use yaak_models::util::generate_id;
|
use yaak_models::util::generate_id;
|
||||||
use yaak_plugins::manager::PluginManager;
|
use yaak_plugins::manager::PluginManager;
|
||||||
|
|
||||||
|
use url::Url;
|
||||||
|
use yaak_api::get_system_proxy_url;
|
||||||
|
|
||||||
use crate::error::Error::GenericError;
|
use crate::error::Error::GenericError;
|
||||||
use crate::is_dev;
|
use crate::is_dev;
|
||||||
|
|
||||||
@@ -87,8 +90,13 @@ impl YaakUpdater {
|
|||||||
info!("Checking for updates mode={} autodl={}", mode, auto_download);
|
info!("Checking for updates mode={} autodl={}", mode, auto_download);
|
||||||
|
|
||||||
let w = window.clone();
|
let w = window.clone();
|
||||||
let update_check_result = w
|
let mut updater_builder = w.updater_builder();
|
||||||
.updater_builder()
|
if let Some(proxy_url) = get_system_proxy_url() {
|
||||||
|
if let Ok(url) = Url::parse(&proxy_url) {
|
||||||
|
updater_builder = updater_builder.proxy(url);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let update_check_result = updater_builder
|
||||||
.on_before_exit(move || {
|
.on_before_exit(move || {
|
||||||
// Kill plugin manager before exit or NSIS installer will fail to replace sidecar
|
// Kill plugin manager before exit or NSIS installer will fail to replace sidecar
|
||||||
// while it's running.
|
// while it's running.
|
||||||
@@ -111,6 +119,7 @@ impl YaakUpdater {
|
|||||||
UpdateTrigger::User => "user",
|
UpdateTrigger::User => "user",
|
||||||
},
|
},
|
||||||
)?
|
)?
|
||||||
|
.header("X-Install-Mode", detect_install_mode().unwrap_or("unknown"))?
|
||||||
.build()?
|
.build()?
|
||||||
.check()
|
.check()
|
||||||
.await;
|
.await;
|
||||||
@@ -353,6 +362,22 @@ pub async fn download_update_idempotent<R: Runtime>(
|
|||||||
Ok(dl_path)
|
Ok(dl_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Detect the installer type so the update server can serve the correct artifact.
|
||||||
|
fn detect_install_mode() -> Option<&'static str> {
|
||||||
|
#[cfg(target_os = "windows")]
|
||||||
|
{
|
||||||
|
if let Ok(exe) = std::env::current_exe() {
|
||||||
|
let path = exe.to_string_lossy().to_lowercase();
|
||||||
|
if path.starts_with(r"c:\program files") {
|
||||||
|
return Some("nsis-machine");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Some("nsis");
|
||||||
|
}
|
||||||
|
#[allow(unreachable_code)]
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn install_update_maybe_download<R: Runtime>(
|
pub async fn install_update_maybe_download<R: Runtime>(
|
||||||
window: &WebviewWindow<R>,
|
window: &WebviewWindow<R>,
|
||||||
update: &Update,
|
update: &Update,
|
||||||
|
|||||||
@@ -8,11 +8,11 @@ use std::fs;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
|
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
|
||||||
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
|
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
|
||||||
|
use yaak_api::yaak_api_client;
|
||||||
use yaak_models::util::generate_id;
|
use yaak_models::util::generate_id;
|
||||||
use yaak_plugins::events::{Color, ShowToastRequest};
|
use yaak_plugins::events::{Color, ShowToastRequest};
|
||||||
use yaak_plugins::install::download_and_install;
|
use yaak_plugins::install::download_and_install;
|
||||||
use yaak_plugins::manager::PluginManager;
|
use yaak_plugins::manager::PluginManager;
|
||||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
|
||||||
|
|
||||||
pub(crate) async fn handle_deep_link<R: Runtime>(
|
pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||||
app_handle: &AppHandle<R>,
|
app_handle: &AppHandle<R>,
|
||||||
@@ -46,7 +46,8 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
|
|||||||
|
|
||||||
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
|
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
|
||||||
let query_manager = app_handle.db_manager();
|
let query_manager = app_handle.db_manager();
|
||||||
let http_client = yaak_api_client(app_handle)?;
|
let app_version = app_handle.package_info().version.to_string();
|
||||||
|
let http_client = yaak_api_client(&app_version)?;
|
||||||
let plugin_context = window.plugin_context();
|
let plugin_context = window.plugin_context();
|
||||||
let pv = download_and_install(
|
let pv = download_and_install(
|
||||||
plugin_manager,
|
plugin_manager,
|
||||||
@@ -86,7 +87,8 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let resp = yaak_api_client(app_handle)?.get(file_url).send().await?;
|
let app_version = app_handle.package_info().version.to_string();
|
||||||
|
let resp = yaak_api_client(&app_version)?.get(file_url).send().await?;
|
||||||
let json = resp.bytes().await?;
|
let json = resp.bytes().await?;
|
||||||
let p = app_handle
|
let p = app_handle
|
||||||
.path()
|
.path()
|
||||||
|
|||||||
@@ -162,11 +162,16 @@ pub(crate) fn create_window<R: Runtime>(
|
|||||||
"dev.reset_size" => webview_window
|
"dev.reset_size" => webview_window
|
||||||
.set_size(LogicalSize::new(DEFAULT_WINDOW_WIDTH, DEFAULT_WINDOW_HEIGHT))
|
.set_size(LogicalSize::new(DEFAULT_WINDOW_WIDTH, DEFAULT_WINDOW_HEIGHT))
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
"dev.reset_size_record" => {
|
"dev.reset_size_16x9" => {
|
||||||
let width = webview_window.outer_size().unwrap().width;
|
let width = webview_window.outer_size().unwrap().width;
|
||||||
let height = width * 9 / 16;
|
let height = width * 9 / 16;
|
||||||
webview_window.set_size(PhysicalSize::new(width, height)).unwrap()
|
webview_window.set_size(PhysicalSize::new(width, height)).unwrap()
|
||||||
}
|
}
|
||||||
|
"dev.reset_size_16x10" => {
|
||||||
|
let width = webview_window.outer_size().unwrap().width;
|
||||||
|
let height = width * 10 / 16;
|
||||||
|
webview_window.set_size(PhysicalSize::new(width, height)).unwrap()
|
||||||
|
}
|
||||||
"dev.refresh" => webview_window.eval("location.reload()").unwrap(),
|
"dev.refresh" => webview_window.eval("location.reload()").unwrap(),
|
||||||
"dev.generate_theme_css" => {
|
"dev.generate_theme_css" => {
|
||||||
w.emit("generate_theme_css", true).unwrap();
|
w.emit("generate_theme_css", true).unwrap();
|
||||||
|
|||||||
@@ -153,9 +153,11 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
|
|||||||
.build(app_handle)?,
|
.build(app_handle)?,
|
||||||
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
|
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
|
||||||
.build(app_handle)?,
|
.build(app_handle)?,
|
||||||
|
&MenuItemBuilder::with_id("dev.reset_size_16x9".to_string(), "Resize to 16x9")
|
||||||
|
.build(app_handle)?,
|
||||||
&MenuItemBuilder::with_id(
|
&MenuItemBuilder::with_id(
|
||||||
"dev.reset_size_record".to_string(),
|
"dev.reset_size_16x10".to_string(),
|
||||||
"Reset Size 16x9",
|
"Resize to 16x10",
|
||||||
)
|
)
|
||||||
.build(app_handle)?,
|
.build(app_handle)?,
|
||||||
&MenuItemBuilder::with_id(
|
&MenuItemBuilder::with_id(
|
||||||
|
|||||||
@@ -28,52 +28,6 @@ use yaak_templates::{RenderErrorBehavior, RenderOptions};
|
|||||||
use yaak_tls::find_client_certificate;
|
use yaak_tls::find_client_certificate;
|
||||||
use yaak_ws::{WebsocketManager, render_websocket_request};
|
use yaak_ws::{WebsocketManager, render_websocket_request};
|
||||||
|
|
||||||
#[command]
|
|
||||||
pub async fn cmd_ws_upsert_request<R: Runtime>(
|
|
||||||
request: WebsocketRequest,
|
|
||||||
app_handle: AppHandle<R>,
|
|
||||||
window: WebviewWindow<R>,
|
|
||||||
) -> Result<WebsocketRequest> {
|
|
||||||
Ok(app_handle
|
|
||||||
.db()
|
|
||||||
.upsert_websocket_request(&request, &UpdateSource::from_window_label(window.label()))?)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[command]
|
|
||||||
pub async fn cmd_ws_duplicate_request<R: Runtime>(
|
|
||||||
request_id: &str,
|
|
||||||
app_handle: AppHandle<R>,
|
|
||||||
window: WebviewWindow<R>,
|
|
||||||
) -> Result<WebsocketRequest> {
|
|
||||||
let db = app_handle.db();
|
|
||||||
let request = db.get_websocket_request(request_id)?;
|
|
||||||
Ok(db.duplicate_websocket_request(&request, &UpdateSource::from_window_label(window.label()))?)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[command]
|
|
||||||
pub async fn cmd_ws_delete_request<R: Runtime>(
|
|
||||||
request_id: &str,
|
|
||||||
app_handle: AppHandle<R>,
|
|
||||||
window: WebviewWindow<R>,
|
|
||||||
) -> Result<WebsocketRequest> {
|
|
||||||
Ok(app_handle.db().delete_websocket_request_by_id(
|
|
||||||
request_id,
|
|
||||||
&UpdateSource::from_window_label(window.label()),
|
|
||||||
)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[command]
|
|
||||||
pub async fn cmd_ws_delete_connection<R: Runtime>(
|
|
||||||
connection_id: &str,
|
|
||||||
app_handle: AppHandle<R>,
|
|
||||||
window: WebviewWindow<R>,
|
|
||||||
) -> Result<WebsocketConnection> {
|
|
||||||
Ok(app_handle.db().delete_websocket_connection_by_id(
|
|
||||||
connection_id,
|
|
||||||
&UpdateSource::from_window_label(window.label()),
|
|
||||||
)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[command]
|
#[command]
|
||||||
pub async fn cmd_ws_delete_connections<R: Runtime>(
|
pub async fn cmd_ws_delete_connections<R: Runtime>(
|
||||||
request_id: &str,
|
request_id: &str,
|
||||||
@@ -86,30 +40,6 @@ pub async fn cmd_ws_delete_connections<R: Runtime>(
|
|||||||
)?)
|
)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[command]
|
|
||||||
pub async fn cmd_ws_list_events<R: Runtime>(
|
|
||||||
connection_id: &str,
|
|
||||||
app_handle: AppHandle<R>,
|
|
||||||
) -> Result<Vec<WebsocketEvent>> {
|
|
||||||
Ok(app_handle.db().list_websocket_events(connection_id)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[command]
|
|
||||||
pub async fn cmd_ws_list_requests<R: Runtime>(
|
|
||||||
workspace_id: &str,
|
|
||||||
app_handle: AppHandle<R>,
|
|
||||||
) -> Result<Vec<WebsocketRequest>> {
|
|
||||||
Ok(app_handle.db().list_websocket_requests(workspace_id)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[command]
|
|
||||||
pub async fn cmd_ws_list_connections<R: Runtime>(
|
|
||||||
workspace_id: &str,
|
|
||||||
app_handle: AppHandle<R>,
|
|
||||||
) -> Result<Vec<WebsocketConnection>> {
|
|
||||||
Ok(app_handle.db().list_websocket_connections(workspace_id)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[command]
|
#[command]
|
||||||
pub async fn cmd_ws_send<R: Runtime>(
|
pub async fn cmd_ws_send<R: Runtime>(
|
||||||
connection_id: &str,
|
connection_id: &str,
|
||||||
|
|||||||
@@ -1,9 +1,6 @@
|
|||||||
{
|
{
|
||||||
"build": {
|
"build": {
|
||||||
"features": [
|
"features": ["updater", "license"]
|
||||||
"updater",
|
|
||||||
"license"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"app": {
|
"app": {
|
||||||
"security": {
|
"security": {
|
||||||
@@ -11,12 +8,8 @@
|
|||||||
"default",
|
"default",
|
||||||
{
|
{
|
||||||
"identifier": "release",
|
"identifier": "release",
|
||||||
"windows": [
|
"windows": ["*"],
|
||||||
"*"
|
"permissions": ["yaak-license:default"]
|
||||||
],
|
|
||||||
"permissions": [
|
|
||||||
"yaak-license:default"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -39,14 +32,7 @@
|
|||||||
"createUpdaterArtifacts": true,
|
"createUpdaterArtifacts": true,
|
||||||
"longDescription": "A cross-platform desktop app for interacting with REST, GraphQL, and gRPC",
|
"longDescription": "A cross-platform desktop app for interacting with REST, GraphQL, and gRPC",
|
||||||
"shortDescription": "Play with APIs, intuitively",
|
"shortDescription": "Play with APIs, intuitively",
|
||||||
"targets": [
|
"targets": ["app", "appimage", "deb", "dmg", "nsis", "rpm"],
|
||||||
"app",
|
|
||||||
"appimage",
|
|
||||||
"deb",
|
|
||||||
"dmg",
|
|
||||||
"nsis",
|
|
||||||
"rpm"
|
|
||||||
],
|
|
||||||
"macOS": {
|
"macOS": {
|
||||||
"minimumSystemVersion": "13.0",
|
"minimumSystemVersion": "13.0",
|
||||||
"exceptionDomain": "",
|
"exceptionDomain": "",
|
||||||
@@ -58,10 +44,16 @@
|
|||||||
},
|
},
|
||||||
"linux": {
|
"linux": {
|
||||||
"deb": {
|
"deb": {
|
||||||
"desktopTemplate": "./template.desktop"
|
"desktopTemplate": "./template.desktop",
|
||||||
|
"files": {
|
||||||
|
"/usr/share/metainfo/app.yaak.Yaak.metainfo.xml": "../../flatpak/app.yaak.Yaak.metainfo.xml"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"rpm": {
|
"rpm": {
|
||||||
"desktopTemplate": "./template.desktop"
|
"desktopTemplate": "./template.desktop",
|
||||||
|
"files": {
|
||||||
|
"/usr/share/metainfo/app.yaak.Yaak.metainfo.xml": "../../flatpak/app.yaak.Yaak.metainfo.xml"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ thiserror = { workspace = true }
|
|||||||
ts-rs = { workspace = true }
|
ts-rs = { workspace = true }
|
||||||
yaak-common = { workspace = true }
|
yaak-common = { workspace = true }
|
||||||
yaak-models = { workspace = true }
|
yaak-models = { workspace = true }
|
||||||
yaak-tauri-utils = { workspace = true }
|
yaak-api = { workspace = true }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
tauri-plugin = { workspace = true, features = ["build"] }
|
tauri-plugin = { workspace = true, features = ["build"] }
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ pub enum Error {
|
|||||||
ModelError(#[from] yaak_models::error::Error),
|
ModelError(#[from] yaak_models::error::Error),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
TauriUtilsError(#[from] yaak_tauri_utils::error::Error),
|
ApiError(#[from] yaak_api::Error),
|
||||||
|
|
||||||
#[error("Internal server error")]
|
#[error("Internal server error")]
|
||||||
ServerError,
|
ServerError,
|
||||||
|
|||||||
@@ -7,11 +7,11 @@ use std::ops::Add;
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
|
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
|
||||||
use ts_rs::TS;
|
use ts_rs::TS;
|
||||||
|
use yaak_api::yaak_api_client;
|
||||||
use yaak_common::platform::get_os_str;
|
use yaak_common::platform::get_os_str;
|
||||||
use yaak_models::db_context::DbContext;
|
use yaak_models::db_context::DbContext;
|
||||||
use yaak_models::query_manager::QueryManager;
|
use yaak_models::query_manager::QueryManager;
|
||||||
use yaak_models::util::UpdateSource;
|
use yaak_models::util::UpdateSource;
|
||||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
|
||||||
|
|
||||||
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
||||||
/// This is needed temporarily until all crates are refactored to not use Tauri.
|
/// This is needed temporarily until all crates are refactored to not use Tauri.
|
||||||
@@ -118,11 +118,12 @@ pub async fn activate_license<R: Runtime>(
|
|||||||
license_key: &str,
|
license_key: &str,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
info!("Activating license {}", license_key);
|
info!("Activating license {}", license_key);
|
||||||
let client = reqwest::Client::new();
|
let app_version = window.app_handle().package_info().version.to_string();
|
||||||
|
let client = yaak_api_client(&app_version)?;
|
||||||
let payload = ActivateLicenseRequestPayload {
|
let payload = ActivateLicenseRequestPayload {
|
||||||
license_key: license_key.to_string(),
|
license_key: license_key.to_string(),
|
||||||
app_platform: get_os_str().to_string(),
|
app_platform: get_os_str().to_string(),
|
||||||
app_version: window.app_handle().package_info().version.to_string(),
|
app_version,
|
||||||
};
|
};
|
||||||
let response = client.post(build_url("/licenses/activate")).json(&payload).send().await?;
|
let response = client.post(build_url("/licenses/activate")).json(&payload).send().await?;
|
||||||
|
|
||||||
@@ -155,12 +156,11 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
|
|||||||
let app_handle = window.app_handle();
|
let app_handle = window.app_handle();
|
||||||
let activation_id = get_activation_id(app_handle).await;
|
let activation_id = get_activation_id(app_handle).await;
|
||||||
|
|
||||||
let client = reqwest::Client::new();
|
let app_version = window.app_handle().package_info().version.to_string();
|
||||||
|
let client = yaak_api_client(&app_version)?;
|
||||||
let path = format!("/licenses/activations/{}/deactivate", activation_id);
|
let path = format!("/licenses/activations/{}/deactivate", activation_id);
|
||||||
let payload = DeactivateLicenseRequestPayload {
|
let payload =
|
||||||
app_platform: get_os_str().to_string(),
|
DeactivateLicenseRequestPayload { app_platform: get_os_str().to_string(), app_version };
|
||||||
app_version: window.app_handle().package_info().version.to_string(),
|
|
||||||
};
|
|
||||||
let response = client.post(build_url(&path)).json(&payload).send().await?;
|
let response = client.post(build_url(&path)).json(&payload).send().await?;
|
||||||
|
|
||||||
if response.status().is_client_error() {
|
if response.status().is_client_error() {
|
||||||
@@ -186,10 +186,9 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<LicenseCheckStatus> {
|
pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<LicenseCheckStatus> {
|
||||||
let payload = CheckActivationRequestPayload {
|
let app_version = window.app_handle().package_info().version.to_string();
|
||||||
app_platform: get_os_str().to_string(),
|
let payload =
|
||||||
app_version: window.package_info().version.to_string(),
|
CheckActivationRequestPayload { app_platform: get_os_str().to_string(), app_version };
|
||||||
};
|
|
||||||
let activation_id = get_activation_id(window.app_handle()).await;
|
let activation_id = get_activation_id(window.app_handle()).await;
|
||||||
|
|
||||||
let settings = window.db().get_settings();
|
let settings = window.db().get_settings();
|
||||||
@@ -204,7 +203,7 @@ pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<Lice
|
|||||||
(true, _) => {
|
(true, _) => {
|
||||||
info!("Checking license activation");
|
info!("Checking license activation");
|
||||||
// A license has been activated, so let's check the license server
|
// A license has been activated, so let's check the license server
|
||||||
let client = yaak_api_client(window.app_handle())?;
|
let client = yaak_api_client(&payload.app_version)?;
|
||||||
let path = format!("/licenses/activations/{activation_id}/check-v2");
|
let path = format!("/licenses/activations/{activation_id}/check-v2");
|
||||||
let response = client.post(build_url(&path)).json(&payload).send().await?;
|
let response = client.post(build_url(&path)).json(&payload).send().await?;
|
||||||
|
|
||||||
|
|||||||
@@ -6,8 +6,4 @@ publish = false
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tauri = { workspace = true }
|
tauri = { workspace = true }
|
||||||
reqwest = { workspace = true, features = ["gzip"] }
|
|
||||||
thiserror = { workspace = true }
|
|
||||||
serde = { workspace = true, features = ["derive"] }
|
|
||||||
regex = "1.11.0"
|
regex = "1.11.0"
|
||||||
yaak-common = { workspace = true }
|
|
||||||
|
|||||||
@@ -1,24 +0,0 @@
|
|||||||
use crate::error::Result;
|
|
||||||
use reqwest::Client;
|
|
||||||
use std::time::Duration;
|
|
||||||
use tauri::http::{HeaderMap, HeaderValue};
|
|
||||||
use tauri::{AppHandle, Runtime};
|
|
||||||
use yaak_common::platform::{get_ua_arch, get_ua_platform};
|
|
||||||
|
|
||||||
pub fn yaak_api_client<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Client> {
|
|
||||||
let platform = get_ua_platform();
|
|
||||||
let version = app_handle.package_info().version.clone();
|
|
||||||
let arch = get_ua_arch();
|
|
||||||
let ua = format!("Yaak/{version} ({platform}; {arch})");
|
|
||||||
let mut default_headers = HeaderMap::new();
|
|
||||||
default_headers.insert("Accept", HeaderValue::from_str("application/json").unwrap());
|
|
||||||
|
|
||||||
let client = reqwest::ClientBuilder::new()
|
|
||||||
.timeout(Duration::from_secs(20))
|
|
||||||
.default_headers(default_headers)
|
|
||||||
.gzip(true)
|
|
||||||
.user_agent(ua)
|
|
||||||
.build()?;
|
|
||||||
|
|
||||||
Ok(client)
|
|
||||||
}
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
use serde::{Serialize, Serializer};
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error(transparent)]
|
|
||||||
ReqwestError(#[from] reqwest::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for Error {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: Serializer,
|
|
||||||
{
|
|
||||||
serializer.serialize_str(self.to_string().as_ref())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
|
||||||
@@ -1,3 +1 @@
|
|||||||
pub mod api_client;
|
|
||||||
pub mod error;
|
|
||||||
pub mod window;
|
pub mod window;
|
||||||
|
|||||||
@@ -1,88 +0,0 @@
|
|||||||
//! Dependency injection for built-in actions.
|
|
||||||
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::sync::Arc;
|
|
||||||
use yaak_crypto::manager::EncryptionManager;
|
|
||||||
use yaak_models::query_manager::QueryManager;
|
|
||||||
use yaak_plugins::events::PluginContext;
|
|
||||||
use yaak_plugins::manager::PluginManager;
|
|
||||||
|
|
||||||
/// Dependencies needed by built-in action implementations.
|
|
||||||
///
|
|
||||||
/// This struct bundles all the dependencies that action handlers need,
|
|
||||||
/// providing a clean way to initialize them in different contexts
|
|
||||||
/// (CLI, Tauri app, MCP server, etc.).
|
|
||||||
pub struct BuiltinActionDependencies {
|
|
||||||
pub query_manager: Arc<QueryManager>,
|
|
||||||
pub plugin_manager: Arc<PluginManager>,
|
|
||||||
pub encryption_manager: Arc<EncryptionManager>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuiltinActionDependencies {
|
|
||||||
/// Create dependencies for standalone usage (CLI, MCP server, etc.)
|
|
||||||
///
|
|
||||||
/// This initializes all the necessary managers following the same pattern
|
|
||||||
/// as the yaak-cli implementation.
|
|
||||||
pub async fn new_standalone(
|
|
||||||
db_path: &Path,
|
|
||||||
blob_path: &Path,
|
|
||||||
app_id: &str,
|
|
||||||
plugin_vendored_dir: PathBuf,
|
|
||||||
plugin_installed_dir: PathBuf,
|
|
||||||
node_path: PathBuf,
|
|
||||||
) -> Result<Self, Box<dyn std::error::Error>> {
|
|
||||||
// Initialize database
|
|
||||||
let (query_manager, _, _) = yaak_models::init_standalone(db_path, blob_path)?;
|
|
||||||
|
|
||||||
// Initialize encryption manager (takes QueryManager by value)
|
|
||||||
let encryption_manager = Arc::new(EncryptionManager::new(
|
|
||||||
query_manager.clone(),
|
|
||||||
app_id.to_string(),
|
|
||||||
));
|
|
||||||
|
|
||||||
let query_manager = Arc::new(query_manager);
|
|
||||||
|
|
||||||
// Find plugin runtime
|
|
||||||
let plugin_runtime_main = std::env::var("YAAK_PLUGIN_RUNTIME")
|
|
||||||
.map(PathBuf::from)
|
|
||||||
.unwrap_or_else(|_| {
|
|
||||||
// Development fallback
|
|
||||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
|
||||||
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
|
||||||
});
|
|
||||||
|
|
||||||
// Initialize plugin manager
|
|
||||||
let plugin_manager = Arc::new(
|
|
||||||
PluginManager::new(
|
|
||||||
plugin_vendored_dir,
|
|
||||||
plugin_installed_dir,
|
|
||||||
node_path,
|
|
||||||
plugin_runtime_main,
|
|
||||||
false, // not sandboxed in CLI
|
|
||||||
)
|
|
||||||
.await,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Initialize plugins from database
|
|
||||||
let db = query_manager.connect();
|
|
||||||
let plugins = db.list_plugins().unwrap_or_default();
|
|
||||||
if !plugins.is_empty() {
|
|
||||||
let errors = plugin_manager
|
|
||||||
.initialize_all_plugins(plugins, &PluginContext::new_empty())
|
|
||||||
.await;
|
|
||||||
for (plugin_dir, error_msg) in errors {
|
|
||||||
log::warn!(
|
|
||||||
"Failed to initialize plugin '{}': {}",
|
|
||||||
plugin_dir,
|
|
||||||
error_msg
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
query_manager,
|
|
||||||
plugin_manager,
|
|
||||||
encryption_manager,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
//! HTTP action implementations.
|
|
||||||
|
|
||||||
pub mod send;
|
|
||||||
|
|
||||||
use crate::BuiltinActionDependencies;
|
|
||||||
use yaak_actions::{ActionError, ActionExecutor, ActionSource};
|
|
||||||
|
|
||||||
/// Register all HTTP-related actions with the executor.
|
|
||||||
pub async fn register_http_actions(
|
|
||||||
executor: &ActionExecutor,
|
|
||||||
deps: &BuiltinActionDependencies,
|
|
||||||
) -> Result<(), ActionError> {
|
|
||||||
let handler = send::HttpSendActionHandler {
|
|
||||||
query_manager: deps.query_manager.clone(),
|
|
||||||
plugin_manager: deps.plugin_manager.clone(),
|
|
||||||
encryption_manager: deps.encryption_manager.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
executor
|
|
||||||
.register(send::metadata(), ActionSource::Builtin, handler)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,293 +0,0 @@
|
|||||||
//! HTTP send action implementation.
|
|
||||||
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use serde_json::{json, Value};
|
|
||||||
use tokio::sync::mpsc;
|
|
||||||
use yaak_actions::{
|
|
||||||
ActionError, ActionGroupId, ActionHandler, ActionId, ActionMetadata,
|
|
||||||
ActionParams, ActionResult, ActionScope, CurrentContext,
|
|
||||||
RequiredContext,
|
|
||||||
};
|
|
||||||
use yaak_crypto::manager::EncryptionManager;
|
|
||||||
use yaak_http::path_placeholders::apply_path_placeholders;
|
|
||||||
use yaak_http::sender::{HttpSender, ReqwestSender};
|
|
||||||
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
|
|
||||||
use yaak_models::models::{HttpRequest, HttpRequestHeader, HttpUrlParameter};
|
|
||||||
use yaak_models::query_manager::QueryManager;
|
|
||||||
use yaak_models::render::make_vars_hashmap;
|
|
||||||
use yaak_plugins::events::{PluginContext, RenderPurpose};
|
|
||||||
use yaak_plugins::manager::PluginManager;
|
|
||||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
|
||||||
use yaak_templates::{parse_and_render, render_json_value_raw, RenderOptions};
|
|
||||||
|
|
||||||
/// Handler for HTTP send action.
|
|
||||||
pub struct HttpSendActionHandler {
|
|
||||||
pub query_manager: Arc<QueryManager>,
|
|
||||||
pub plugin_manager: Arc<PluginManager>,
|
|
||||||
pub encryption_manager: Arc<EncryptionManager>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Metadata for the HTTP send action.
|
|
||||||
pub fn metadata() -> ActionMetadata {
|
|
||||||
ActionMetadata {
|
|
||||||
id: ActionId::builtin("http", "send-request"),
|
|
||||||
label: "Send HTTP Request".to_string(),
|
|
||||||
description: Some("Execute an HTTP request and return the response".to_string()),
|
|
||||||
icon: Some("play".to_string()),
|
|
||||||
scope: ActionScope::HttpRequest,
|
|
||||||
keyboard_shortcut: None,
|
|
||||||
requires_selection: true,
|
|
||||||
enabled_condition: None,
|
|
||||||
group_id: Some(ActionGroupId::builtin("send")),
|
|
||||||
order: 10,
|
|
||||||
required_context: RequiredContext::requires_target(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ActionHandler for HttpSendActionHandler {
|
|
||||||
fn handle(
|
|
||||||
&self,
|
|
||||||
context: CurrentContext,
|
|
||||||
params: ActionParams,
|
|
||||||
) -> std::pin::Pin<
|
|
||||||
Box<dyn std::future::Future<Output = Result<ActionResult, ActionError>> + Send + 'static>,
|
|
||||||
> {
|
|
||||||
let query_manager = self.query_manager.clone();
|
|
||||||
let plugin_manager = self.plugin_manager.clone();
|
|
||||||
let encryption_manager = self.encryption_manager.clone();
|
|
||||||
|
|
||||||
Box::pin(async move {
|
|
||||||
// Extract request_id from context
|
|
||||||
let request_id = context
|
|
||||||
.target
|
|
||||||
.as_ref()
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ActionError::ContextMissing {
|
|
||||||
missing_fields: vec!["target".to_string()],
|
|
||||||
}
|
|
||||||
})?
|
|
||||||
.id()
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ActionError::ContextMissing {
|
|
||||||
missing_fields: vec!["target.id".to_string()],
|
|
||||||
}
|
|
||||||
})?
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
// Fetch request and environment from database (synchronous)
|
|
||||||
let (request, environment_chain) = {
|
|
||||||
let db = query_manager.connect();
|
|
||||||
|
|
||||||
// Fetch HTTP request from database
|
|
||||||
let request = db.get_http_request(&request_id).map_err(|e| {
|
|
||||||
ActionError::Internal(format!("Failed to fetch request {}: {}", request_id, e))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Resolve environment chain for variable substitution
|
|
||||||
let environment_chain = if let Some(env_id) = &context.environment_id {
|
|
||||||
db.resolve_environments(
|
|
||||||
&request.workspace_id,
|
|
||||||
request.folder_id.as_deref(),
|
|
||||||
Some(env_id),
|
|
||||||
)
|
|
||||||
.unwrap_or_default()
|
|
||||||
} else {
|
|
||||||
db.resolve_environments(
|
|
||||||
&request.workspace_id,
|
|
||||||
request.folder_id.as_deref(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.unwrap_or_default()
|
|
||||||
};
|
|
||||||
|
|
||||||
(request, environment_chain)
|
|
||||||
}; // db is dropped here
|
|
||||||
|
|
||||||
// Create template callback with plugin support
|
|
||||||
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
|
|
||||||
let template_callback = PluginTemplateCallback::new(
|
|
||||||
plugin_manager,
|
|
||||||
encryption_manager,
|
|
||||||
&plugin_context,
|
|
||||||
RenderPurpose::Send,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Render templates in the request
|
|
||||||
let rendered_request = render_http_request(
|
|
||||||
&request,
|
|
||||||
environment_chain,
|
|
||||||
&template_callback,
|
|
||||||
&RenderOptions::throw(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(|e| ActionError::Internal(format!("Failed to render request: {}", e)))?;
|
|
||||||
|
|
||||||
// Build sendable request
|
|
||||||
let options = SendableHttpRequestOptions {
|
|
||||||
timeout: params
|
|
||||||
.data
|
|
||||||
.get("timeout_ms")
|
|
||||||
.and_then(|v| v.as_u64())
|
|
||||||
.map(|ms| std::time::Duration::from_millis(ms)),
|
|
||||||
follow_redirects: params
|
|
||||||
.data
|
|
||||||
.get("follow_redirects")
|
|
||||||
.and_then(|v| v.as_bool())
|
|
||||||
.unwrap_or(false),
|
|
||||||
};
|
|
||||||
|
|
||||||
let sendable = SendableHttpRequest::from_http_request(&rendered_request, options)
|
|
||||||
.await
|
|
||||||
.map_err(|e| ActionError::Internal(format!("Failed to build request: {}", e)))?;
|
|
||||||
|
|
||||||
// Create event channel
|
|
||||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
|
||||||
|
|
||||||
// Spawn task to drain events
|
|
||||||
let _event_handle = tokio::spawn(async move {
|
|
||||||
while event_rx.recv().await.is_some() {
|
|
||||||
// For now, just drain events
|
|
||||||
// In the future, we could log them or emit them to UI
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Send the request
|
|
||||||
let sender = ReqwestSender::new()
|
|
||||||
.map_err(|e| ActionError::Internal(format!("Failed to create HTTP client: {}", e)))?;
|
|
||||||
let response = sender
|
|
||||||
.send(sendable, event_tx)
|
|
||||||
.await
|
|
||||||
.map_err(|e| ActionError::Internal(format!("Failed to send request: {}", e)))?;
|
|
||||||
|
|
||||||
// Consume response body
|
|
||||||
let status = response.status;
|
|
||||||
let status_reason = response.status_reason.clone();
|
|
||||||
let headers = response.headers.clone();
|
|
||||||
let url = response.url.clone();
|
|
||||||
|
|
||||||
let (body_text, stats) = response
|
|
||||||
.text()
|
|
||||||
.await
|
|
||||||
.map_err(|e| ActionError::Internal(format!("Failed to read response body: {}", e)))?;
|
|
||||||
|
|
||||||
// Return success result with response data
|
|
||||||
Ok(ActionResult::Success {
|
|
||||||
data: Some(json!({
|
|
||||||
"status": status,
|
|
||||||
"statusReason": status_reason,
|
|
||||||
"headers": headers,
|
|
||||||
"body": body_text,
|
|
||||||
"contentLength": stats.size_decompressed,
|
|
||||||
"url": url,
|
|
||||||
})),
|
|
||||||
message: Some(format!("HTTP {}", status)),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Helper function to render templates in an HTTP request.
|
|
||||||
/// Copied from yaak-cli implementation.
|
|
||||||
async fn render_http_request(
|
|
||||||
r: &HttpRequest,
|
|
||||||
environment_chain: Vec<yaak_models::models::Environment>,
|
|
||||||
cb: &PluginTemplateCallback,
|
|
||||||
opt: &RenderOptions,
|
|
||||||
) -> Result<HttpRequest, String> {
|
|
||||||
let vars = &make_vars_hashmap(environment_chain);
|
|
||||||
|
|
||||||
let mut url_parameters = Vec::new();
|
|
||||||
for p in r.url_parameters.clone() {
|
|
||||||
if !p.enabled {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
url_parameters.push(HttpUrlParameter {
|
|
||||||
enabled: p.enabled,
|
|
||||||
name: parse_and_render(p.name.as_str(), vars, cb, opt)
|
|
||||||
.await
|
|
||||||
.map_err(|e| e.to_string())?,
|
|
||||||
value: parse_and_render(p.value.as_str(), vars, cb, opt)
|
|
||||||
.await
|
|
||||||
.map_err(|e| e.to_string())?,
|
|
||||||
id: p.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut headers = Vec::new();
|
|
||||||
for p in r.headers.clone() {
|
|
||||||
if !p.enabled {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
headers.push(HttpRequestHeader {
|
|
||||||
enabled: p.enabled,
|
|
||||||
name: parse_and_render(p.name.as_str(), vars, cb, opt)
|
|
||||||
.await
|
|
||||||
.map_err(|e| e.to_string())?,
|
|
||||||
value: parse_and_render(p.value.as_str(), vars, cb, opt)
|
|
||||||
.await
|
|
||||||
.map_err(|e| e.to_string())?,
|
|
||||||
id: p.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut body = BTreeMap::new();
|
|
||||||
for (k, v) in r.body.clone() {
|
|
||||||
body.insert(
|
|
||||||
k,
|
|
||||||
render_json_value_raw(v, vars, cb, opt)
|
|
||||||
.await
|
|
||||||
.map_err(|e| e.to_string())?,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let authentication = {
|
|
||||||
let mut disabled = false;
|
|
||||||
let mut auth = BTreeMap::new();
|
|
||||||
match r.authentication.get("disabled") {
|
|
||||||
Some(Value::Bool(true)) => {
|
|
||||||
disabled = true;
|
|
||||||
}
|
|
||||||
Some(Value::String(tmpl)) => {
|
|
||||||
disabled = parse_and_render(tmpl.as_str(), vars, cb, opt)
|
|
||||||
.await
|
|
||||||
.unwrap_or_default()
|
|
||||||
.is_empty();
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
if disabled {
|
|
||||||
auth.insert("disabled".to_string(), Value::Bool(true));
|
|
||||||
} else {
|
|
||||||
for (k, v) in r.authentication.clone() {
|
|
||||||
if k == "disabled" {
|
|
||||||
auth.insert(k, Value::Bool(false));
|
|
||||||
} else {
|
|
||||||
auth.insert(
|
|
||||||
k,
|
|
||||||
render_json_value_raw(v, vars, cb, opt)
|
|
||||||
.await
|
|
||||||
.map_err(|e| e.to_string())?,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
auth
|
|
||||||
};
|
|
||||||
|
|
||||||
let url = parse_and_render(r.url.clone().as_str(), vars, cb, opt)
|
|
||||||
.await
|
|
||||||
.map_err(|e| e.to_string())?;
|
|
||||||
|
|
||||||
// Apply path placeholders (e.g., /users/:id -> /users/123)
|
|
||||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
|
||||||
|
|
||||||
Ok(HttpRequest {
|
|
||||||
url,
|
|
||||||
url_parameters,
|
|
||||||
headers,
|
|
||||||
body,
|
|
||||||
authentication,
|
|
||||||
..r.to_owned()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
//! Built-in action implementations for Yaak.
|
|
||||||
//!
|
|
||||||
//! This crate provides concrete implementations of built-in actions using
|
|
||||||
//! the yaak-actions framework. It depends on domain-specific crates like
|
|
||||||
//! yaak-http, yaak-models, yaak-plugins, etc.
|
|
||||||
|
|
||||||
pub mod dependencies;
|
|
||||||
pub mod http;
|
|
||||||
|
|
||||||
pub use dependencies::BuiltinActionDependencies;
|
|
||||||
pub use http::register_http_actions;
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "yaak-actions"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
description = "Centralized action system for Yaak"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
serde = { workspace = true, features = ["derive"] }
|
|
||||||
serde_json = { workspace = true }
|
|
||||||
thiserror = { workspace = true }
|
|
||||||
tokio = { workspace = true, features = ["sync"] }
|
|
||||||
ts-rs = { workspace = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
|
||||||
14
crates/yaak-actions/bindings/ActionAvailability.ts
generated
14
crates/yaak-actions/bindings/ActionAvailability.ts
generated
@@ -1,14 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Availability status for an action.
|
|
||||||
*/
|
|
||||||
export type ActionAvailability = { "status": "available" } | { "status": "available-with-prompt",
|
|
||||||
/**
|
|
||||||
* Fields that will require prompting.
|
|
||||||
*/
|
|
||||||
prompt_fields: Array<string>, } | { "status": "unavailable",
|
|
||||||
/**
|
|
||||||
* Fields that are missing.
|
|
||||||
*/
|
|
||||||
missing_fields: Array<string>, } | { "status": "not-found" };
|
|
||||||
13
crates/yaak-actions/bindings/ActionError.ts
generated
13
crates/yaak-actions/bindings/ActionError.ts
generated
@@ -1,13 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
import type { ActionGroupId } from "./ActionGroupId";
|
|
||||||
import type { ActionId } from "./ActionId";
|
|
||||||
import type { ActionScope } from "./ActionScope";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Errors that can occur during action operations.
|
|
||||||
*/
|
|
||||||
export type ActionError = { "type": "not-found" } & ActionId | { "type": "disabled", action_id: ActionId, reason: string, } | { "type": "invalid-scope", expected: ActionScope, actual: ActionScope, } | { "type": "timeout" } & ActionId | { "type": "plugin-error" } & string | { "type": "validation-error" } & string | { "type": "permission-denied" } & string | { "type": "cancelled" } | { "type": "internal" } & string | { "type": "context-missing",
|
|
||||||
/**
|
|
||||||
* The context fields that are missing.
|
|
||||||
*/
|
|
||||||
missing_fields: Array<string>, } | { "type": "group-not-found" } & ActionGroupId | { "type": "group-already-exists" } & ActionGroupId;
|
|
||||||
10
crates/yaak-actions/bindings/ActionGroupId.ts
generated
10
crates/yaak-actions/bindings/ActionGroupId.ts
generated
@@ -1,10 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unique identifier for an action group.
|
|
||||||
*
|
|
||||||
* Format: `namespace:group-name`
|
|
||||||
* - Built-in: `yaak:export`
|
|
||||||
* - Plugin: `plugin.my-plugin:utilities`
|
|
||||||
*/
|
|
||||||
export type ActionGroupId = string;
|
|
||||||
32
crates/yaak-actions/bindings/ActionGroupMetadata.ts
generated
32
crates/yaak-actions/bindings/ActionGroupMetadata.ts
generated
@@ -1,32 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
import type { ActionGroupId } from "./ActionGroupId";
|
|
||||||
import type { ActionScope } from "./ActionScope";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Metadata about an action group.
|
|
||||||
*/
|
|
||||||
export type ActionGroupMetadata = {
|
|
||||||
/**
|
|
||||||
* Unique identifier for this group.
|
|
||||||
*/
|
|
||||||
id: ActionGroupId,
|
|
||||||
/**
|
|
||||||
* Display name for the group.
|
|
||||||
*/
|
|
||||||
name: string,
|
|
||||||
/**
|
|
||||||
* Optional description of the group's purpose.
|
|
||||||
*/
|
|
||||||
description: string | null,
|
|
||||||
/**
|
|
||||||
* Icon to display for the group.
|
|
||||||
*/
|
|
||||||
icon: string | null,
|
|
||||||
/**
|
|
||||||
* Sort order for displaying groups (lower = earlier).
|
|
||||||
*/
|
|
||||||
order: number,
|
|
||||||
/**
|
|
||||||
* Optional scope restriction (if set, group only appears in this scope).
|
|
||||||
*/
|
|
||||||
scope: ActionScope | null, };
|
|
||||||
18
crates/yaak-actions/bindings/ActionGroupSource.ts
generated
18
crates/yaak-actions/bindings/ActionGroupSource.ts
generated
@@ -1,18 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Where an action group was registered from.
|
|
||||||
*/
|
|
||||||
export type ActionGroupSource = { "type": "builtin" } | { "type": "plugin",
|
|
||||||
/**
|
|
||||||
* Plugin reference ID.
|
|
||||||
*/
|
|
||||||
ref_id: string,
|
|
||||||
/**
|
|
||||||
* Plugin name.
|
|
||||||
*/
|
|
||||||
name: string, } | { "type": "dynamic",
|
|
||||||
/**
|
|
||||||
* Source identifier.
|
|
||||||
*/
|
|
||||||
source_id: string, };
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
import type { ActionGroupMetadata } from "./ActionGroupMetadata";
|
|
||||||
import type { ActionMetadata } from "./ActionMetadata";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A group with its actions for UI rendering.
|
|
||||||
*/
|
|
||||||
export type ActionGroupWithActions = {
|
|
||||||
/**
|
|
||||||
* Group metadata.
|
|
||||||
*/
|
|
||||||
group: ActionGroupMetadata,
|
|
||||||
/**
|
|
||||||
* Actions in this group.
|
|
||||||
*/
|
|
||||||
actions: Array<ActionMetadata>, };
|
|
||||||
10
crates/yaak-actions/bindings/ActionId.ts
generated
10
crates/yaak-actions/bindings/ActionId.ts
generated
@@ -1,10 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unique identifier for an action.
|
|
||||||
*
|
|
||||||
* Format: `namespace:category:name`
|
|
||||||
* - Built-in: `yaak:http-request:send`
|
|
||||||
* - Plugin: `plugin.copy-curl:http-request:copy`
|
|
||||||
*/
|
|
||||||
export type ActionId = string;
|
|
||||||
54
crates/yaak-actions/bindings/ActionMetadata.ts
generated
54
crates/yaak-actions/bindings/ActionMetadata.ts
generated
@@ -1,54 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
import type { ActionGroupId } from "./ActionGroupId";
|
|
||||||
import type { ActionId } from "./ActionId";
|
|
||||||
import type { ActionScope } from "./ActionScope";
|
|
||||||
import type { RequiredContext } from "./RequiredContext";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Metadata about an action for discovery.
|
|
||||||
*/
|
|
||||||
export type ActionMetadata = {
|
|
||||||
/**
|
|
||||||
* Unique identifier for this action.
|
|
||||||
*/
|
|
||||||
id: ActionId,
|
|
||||||
/**
|
|
||||||
* Display label for the action.
|
|
||||||
*/
|
|
||||||
label: string,
|
|
||||||
/**
|
|
||||||
* Optional description of what the action does.
|
|
||||||
*/
|
|
||||||
description: string | null,
|
|
||||||
/**
|
|
||||||
* Icon name to display.
|
|
||||||
*/
|
|
||||||
icon: string | null,
|
|
||||||
/**
|
|
||||||
* The scope this action applies to.
|
|
||||||
*/
|
|
||||||
scope: ActionScope,
|
|
||||||
/**
|
|
||||||
* Keyboard shortcut (e.g., "Cmd+Enter").
|
|
||||||
*/
|
|
||||||
keyboardShortcut: string | null,
|
|
||||||
/**
|
|
||||||
* Whether the action requires a selection/target.
|
|
||||||
*/
|
|
||||||
requiresSelection: boolean,
|
|
||||||
/**
|
|
||||||
* Optional condition expression for when action is enabled.
|
|
||||||
*/
|
|
||||||
enabledCondition: string | null,
|
|
||||||
/**
|
|
||||||
* Optional group this action belongs to.
|
|
||||||
*/
|
|
||||||
groupId: ActionGroupId | null,
|
|
||||||
/**
|
|
||||||
* Sort order within a group (lower = earlier).
|
|
||||||
*/
|
|
||||||
order: number,
|
|
||||||
/**
|
|
||||||
* Context requirements for this action.
|
|
||||||
*/
|
|
||||||
requiredContext: RequiredContext, };
|
|
||||||
10
crates/yaak-actions/bindings/ActionParams.ts
generated
10
crates/yaak-actions/bindings/ActionParams.ts
generated
@@ -1,10 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parameters passed to action handlers.
|
|
||||||
*/
|
|
||||||
export type ActionParams = {
|
|
||||||
/**
|
|
||||||
* Arbitrary JSON parameters.
|
|
||||||
*/
|
|
||||||
data: unknown, };
|
|
||||||
23
crates/yaak-actions/bindings/ActionResult.ts
generated
23
crates/yaak-actions/bindings/ActionResult.ts
generated
@@ -1,23 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
import type { InputPrompt } from "./InputPrompt";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Result of action execution.
|
|
||||||
*/
|
|
||||||
export type ActionResult = { "type": "success",
|
|
||||||
/**
|
|
||||||
* Optional data to return.
|
|
||||||
*/
|
|
||||||
data: unknown,
|
|
||||||
/**
|
|
||||||
* Optional message to display.
|
|
||||||
*/
|
|
||||||
message: string | null, } | { "type": "requires-input",
|
|
||||||
/**
|
|
||||||
* Prompt to show user.
|
|
||||||
*/
|
|
||||||
prompt: InputPrompt,
|
|
||||||
/**
|
|
||||||
* Continuation token.
|
|
||||||
*/
|
|
||||||
continuation_id: string, } | { "type": "cancelled" };
|
|
||||||
6
crates/yaak-actions/bindings/ActionScope.ts
generated
6
crates/yaak-actions/bindings/ActionScope.ts
generated
@@ -1,6 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The scope in which an action can be invoked.
|
|
||||||
*/
|
|
||||||
export type ActionScope = "global" | "http-request" | "websocket-request" | "grpc-request" | "workspace" | "folder" | "environment" | "cookie-jar";
|
|
||||||
18
crates/yaak-actions/bindings/ActionSource.ts
generated
18
crates/yaak-actions/bindings/ActionSource.ts
generated
@@ -1,18 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Where an action was registered from.
|
|
||||||
*/
|
|
||||||
export type ActionSource = { "type": "builtin" } | { "type": "plugin",
|
|
||||||
/**
|
|
||||||
* Plugin reference ID.
|
|
||||||
*/
|
|
||||||
ref_id: string,
|
|
||||||
/**
|
|
||||||
* Plugin name.
|
|
||||||
*/
|
|
||||||
name: string, } | { "type": "dynamic",
|
|
||||||
/**
|
|
||||||
* Source identifier.
|
|
||||||
*/
|
|
||||||
source_id: string, };
|
|
||||||
6
crates/yaak-actions/bindings/ActionTarget.ts
generated
6
crates/yaak-actions/bindings/ActionTarget.ts
generated
@@ -1,6 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The target entity for an action.
|
|
||||||
*/
|
|
||||||
export type ActionTarget = { "type": "none" } | { "type": "http-request", id: string, } | { "type": "websocket-request", id: string, } | { "type": "grpc-request", id: string, } | { "type": "workspace", id: string, } | { "type": "folder", id: string, } | { "type": "environment", id: string, } | { "type": "multiple", targets: Array<ActionTarget>, };
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
|
|
||||||
/**
|
|
||||||
* How strictly a context field is required.
|
|
||||||
*/
|
|
||||||
export type ContextRequirement = "not-required" | "optional" | "required" | "required-with-prompt";
|
|
||||||
27
crates/yaak-actions/bindings/CurrentContext.ts
generated
27
crates/yaak-actions/bindings/CurrentContext.ts
generated
@@ -1,27 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
import type { ActionTarget } from "./ActionTarget";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Current context state from the application.
|
|
||||||
*/
|
|
||||||
export type CurrentContext = {
|
|
||||||
/**
|
|
||||||
* Current workspace ID (if any).
|
|
||||||
*/
|
|
||||||
workspaceId: string | null,
|
|
||||||
/**
|
|
||||||
* Current environment ID (if any).
|
|
||||||
*/
|
|
||||||
environmentId: string | null,
|
|
||||||
/**
|
|
||||||
* Currently selected target (if any).
|
|
||||||
*/
|
|
||||||
target: ActionTarget | null,
|
|
||||||
/**
|
|
||||||
* Whether a window context is available.
|
|
||||||
*/
|
|
||||||
hasWindow: boolean,
|
|
||||||
/**
|
|
||||||
* Whether the context provider can prompt for missing fields.
|
|
||||||
*/
|
|
||||||
canPrompt: boolean, };
|
|
||||||
7
crates/yaak-actions/bindings/InputPrompt.ts
generated
7
crates/yaak-actions/bindings/InputPrompt.ts
generated
@@ -1,7 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
import type { SelectOption } from "./SelectOption";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A prompt for user input.
|
|
||||||
*/
|
|
||||||
export type InputPrompt = { "type": "text", label: string, placeholder: string | null, default_value: string | null, } | { "type": "select", label: string, options: Array<SelectOption>, } | { "type": "confirm", label: string, };
|
|
||||||
23
crates/yaak-actions/bindings/RequiredContext.ts
generated
23
crates/yaak-actions/bindings/RequiredContext.ts
generated
@@ -1,23 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
import type { ContextRequirement } from "./ContextRequirement";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Specifies what context fields an action requires.
|
|
||||||
*/
|
|
||||||
export type RequiredContext = {
|
|
||||||
/**
|
|
||||||
* Action requires a workspace to be active.
|
|
||||||
*/
|
|
||||||
workspace: ContextRequirement,
|
|
||||||
/**
|
|
||||||
* Action requires an environment to be selected.
|
|
||||||
*/
|
|
||||||
environment: ContextRequirement,
|
|
||||||
/**
|
|
||||||
* Action requires a specific target entity (request, folder, etc.).
|
|
||||||
*/
|
|
||||||
target: ContextRequirement,
|
|
||||||
/**
|
|
||||||
* Action requires a window context (for UI operations).
|
|
||||||
*/
|
|
||||||
window: ContextRequirement, };
|
|
||||||
6
crates/yaak-actions/bindings/SelectOption.ts
generated
6
crates/yaak-actions/bindings/SelectOption.ts
generated
@@ -1,6 +0,0 @@
|
|||||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An option in a select prompt.
|
|
||||||
*/
|
|
||||||
export type SelectOption = { label: string, value: string, };
|
|
||||||
@@ -1,331 +0,0 @@
|
|||||||
//! Action context types and context-aware filtering.
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use ts_rs::TS;
|
|
||||||
|
|
||||||
use crate::ActionScope;
|
|
||||||
|
|
||||||
/// Specifies what context fields an action requires.
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct RequiredContext {
|
|
||||||
/// Action requires a workspace to be active.
|
|
||||||
#[serde(default)]
|
|
||||||
pub workspace: ContextRequirement,
|
|
||||||
|
|
||||||
/// Action requires an environment to be selected.
|
|
||||||
#[serde(default)]
|
|
||||||
pub environment: ContextRequirement,
|
|
||||||
|
|
||||||
/// Action requires a specific target entity (request, folder, etc.).
|
|
||||||
#[serde(default)]
|
|
||||||
pub target: ContextRequirement,
|
|
||||||
|
|
||||||
/// Action requires a window context (for UI operations).
|
|
||||||
#[serde(default)]
|
|
||||||
pub window: ContextRequirement,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RequiredContext {
|
|
||||||
/// Action requires a target entity.
|
|
||||||
pub fn requires_target() -> Self {
|
|
||||||
Self {
|
|
||||||
target: ContextRequirement::Required,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Action requires workspace and target.
|
|
||||||
pub fn requires_workspace_and_target() -> Self {
|
|
||||||
Self {
|
|
||||||
workspace: ContextRequirement::Required,
|
|
||||||
target: ContextRequirement::Required,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Action works globally, no specific context needed.
|
|
||||||
pub fn global() -> Self {
|
|
||||||
Self::default()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Action requires target with prompt if missing.
|
|
||||||
pub fn requires_target_with_prompt() -> Self {
|
|
||||||
Self {
|
|
||||||
target: ContextRequirement::RequiredWithPrompt,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Action requires environment with prompt if missing.
|
|
||||||
pub fn requires_environment_with_prompt() -> Self {
|
|
||||||
Self {
|
|
||||||
environment: ContextRequirement::RequiredWithPrompt,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// How strictly a context field is required.
|
|
||||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(rename_all = "kebab-case")]
|
|
||||||
pub enum ContextRequirement {
|
|
||||||
/// Field is not needed.
|
|
||||||
#[default]
|
|
||||||
NotRequired,
|
|
||||||
|
|
||||||
/// Field is optional but will be used if available.
|
|
||||||
Optional,
|
|
||||||
|
|
||||||
/// Field must be present; action will fail without it.
|
|
||||||
Required,
|
|
||||||
|
|
||||||
/// Field must be present; prompt user to select if missing.
|
|
||||||
RequiredWithPrompt,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Current context state from the application.
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct CurrentContext {
|
|
||||||
/// Current workspace ID (if any).
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub workspace_id: Option<String>,
|
|
||||||
|
|
||||||
/// Current environment ID (if any).
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub environment_id: Option<String>,
|
|
||||||
|
|
||||||
/// Currently selected target (if any).
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub target: Option<ActionTarget>,
|
|
||||||
|
|
||||||
/// Whether a window context is available.
|
|
||||||
#[serde(default)]
|
|
||||||
pub has_window: bool,
|
|
||||||
|
|
||||||
/// Whether the context provider can prompt for missing fields.
|
|
||||||
#[serde(default)]
|
|
||||||
pub can_prompt: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The target entity for an action.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
|
||||||
pub enum ActionTarget {
|
|
||||||
/// No target.
|
|
||||||
None,
|
|
||||||
/// HTTP request target.
|
|
||||||
HttpRequest { id: String },
|
|
||||||
/// WebSocket request target.
|
|
||||||
WebsocketRequest { id: String },
|
|
||||||
/// gRPC request target.
|
|
||||||
GrpcRequest { id: String },
|
|
||||||
/// Workspace target.
|
|
||||||
Workspace { id: String },
|
|
||||||
/// Folder target.
|
|
||||||
Folder { id: String },
|
|
||||||
/// Environment target.
|
|
||||||
Environment { id: String },
|
|
||||||
/// Multiple targets.
|
|
||||||
Multiple { targets: Vec<ActionTarget> },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ActionTarget {
|
|
||||||
/// Get the scope this target corresponds to.
|
|
||||||
pub fn scope(&self) -> Option<ActionScope> {
|
|
||||||
match self {
|
|
||||||
Self::None => None,
|
|
||||||
Self::HttpRequest { .. } => Some(ActionScope::HttpRequest),
|
|
||||||
Self::WebsocketRequest { .. } => Some(ActionScope::WebsocketRequest),
|
|
||||||
Self::GrpcRequest { .. } => Some(ActionScope::GrpcRequest),
|
|
||||||
Self::Workspace { .. } => Some(ActionScope::Workspace),
|
|
||||||
Self::Folder { .. } => Some(ActionScope::Folder),
|
|
||||||
Self::Environment { .. } => Some(ActionScope::Environment),
|
|
||||||
Self::Multiple { .. } => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the ID of the target (if single target).
|
|
||||||
pub fn id(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
Self::HttpRequest { id }
|
|
||||||
| Self::WebsocketRequest { id }
|
|
||||||
| Self::GrpcRequest { id }
|
|
||||||
| Self::Workspace { id }
|
|
||||||
| Self::Folder { id }
|
|
||||||
| Self::Environment { id } => Some(id),
|
|
||||||
Self::None | Self::Multiple { .. } => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Availability status for an action.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(tag = "status", rename_all = "kebab-case")]
|
|
||||||
pub enum ActionAvailability {
|
|
||||||
/// Action is ready to execute.
|
|
||||||
Available,
|
|
||||||
|
|
||||||
/// Action can execute but will prompt for missing context.
|
|
||||||
AvailableWithPrompt {
|
|
||||||
/// Fields that will require prompting.
|
|
||||||
prompt_fields: Vec<String>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Action cannot execute due to missing context.
|
|
||||||
Unavailable {
|
|
||||||
/// Fields that are missing.
|
|
||||||
missing_fields: Vec<String>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Action not found in registry.
|
|
||||||
NotFound,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ActionAvailability {
|
|
||||||
/// Check if the action is available (possibly with prompts).
|
|
||||||
pub fn is_available(&self) -> bool {
|
|
||||||
matches!(self, Self::Available | Self::AvailableWithPrompt { .. })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if the action is immediately available without prompts.
|
|
||||||
pub fn is_immediately_available(&self) -> bool {
|
|
||||||
matches!(self, Self::Available)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if required context is satisfied by current context.
|
|
||||||
pub fn check_context_availability(
|
|
||||||
required: &RequiredContext,
|
|
||||||
current: &CurrentContext,
|
|
||||||
) -> ActionAvailability {
|
|
||||||
let mut missing_fields = Vec::new();
|
|
||||||
let mut prompt_fields = Vec::new();
|
|
||||||
|
|
||||||
// Check workspace
|
|
||||||
check_field(
|
|
||||||
"workspace",
|
|
||||||
current.workspace_id.is_some(),
|
|
||||||
&required.workspace,
|
|
||||||
current.can_prompt,
|
|
||||||
&mut missing_fields,
|
|
||||||
&mut prompt_fields,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Check environment
|
|
||||||
check_field(
|
|
||||||
"environment",
|
|
||||||
current.environment_id.is_some(),
|
|
||||||
&required.environment,
|
|
||||||
current.can_prompt,
|
|
||||||
&mut missing_fields,
|
|
||||||
&mut prompt_fields,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Check target
|
|
||||||
check_field(
|
|
||||||
"target",
|
|
||||||
current.target.is_some(),
|
|
||||||
&required.target,
|
|
||||||
current.can_prompt,
|
|
||||||
&mut missing_fields,
|
|
||||||
&mut prompt_fields,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Check window
|
|
||||||
check_field(
|
|
||||||
"window",
|
|
||||||
current.has_window,
|
|
||||||
&required.window,
|
|
||||||
false, // Can't prompt for window
|
|
||||||
&mut missing_fields,
|
|
||||||
&mut prompt_fields,
|
|
||||||
);
|
|
||||||
|
|
||||||
if !missing_fields.is_empty() {
|
|
||||||
ActionAvailability::Unavailable { missing_fields }
|
|
||||||
} else if !prompt_fields.is_empty() {
|
|
||||||
ActionAvailability::AvailableWithPrompt { prompt_fields }
|
|
||||||
} else {
|
|
||||||
ActionAvailability::Available
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_field(
|
|
||||||
name: &str,
|
|
||||||
has_value: bool,
|
|
||||||
requirement: &ContextRequirement,
|
|
||||||
can_prompt: bool,
|
|
||||||
missing: &mut Vec<String>,
|
|
||||||
promptable: &mut Vec<String>,
|
|
||||||
) {
|
|
||||||
match requirement {
|
|
||||||
ContextRequirement::NotRequired | ContextRequirement::Optional => {}
|
|
||||||
ContextRequirement::Required => {
|
|
||||||
if !has_value {
|
|
||||||
missing.push(name.to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ContextRequirement::RequiredWithPrompt => {
|
|
||||||
if !has_value {
|
|
||||||
if can_prompt {
|
|
||||||
promptable.push(name.to_string());
|
|
||||||
} else {
|
|
||||||
missing.push(name.to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_context_available() {
|
|
||||||
let required = RequiredContext::requires_target();
|
|
||||||
let current = CurrentContext {
|
|
||||||
target: Some(ActionTarget::HttpRequest {
|
|
||||||
id: "123".to_string(),
|
|
||||||
}),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let availability = check_context_availability(&required, ¤t);
|
|
||||||
assert!(matches!(availability, ActionAvailability::Available));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_context_missing() {
|
|
||||||
let required = RequiredContext::requires_target();
|
|
||||||
let current = CurrentContext::default();
|
|
||||||
|
|
||||||
let availability = check_context_availability(&required, ¤t);
|
|
||||||
assert!(matches!(
|
|
||||||
availability,
|
|
||||||
ActionAvailability::Unavailable { missing_fields } if missing_fields == vec!["target"]
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_context_promptable() {
|
|
||||||
let required = RequiredContext::requires_target_with_prompt();
|
|
||||||
let current = CurrentContext {
|
|
||||||
can_prompt: true,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let availability = check_context_availability(&required, ¤t);
|
|
||||||
assert!(matches!(
|
|
||||||
availability,
|
|
||||||
ActionAvailability::AvailableWithPrompt { prompt_fields } if prompt_fields == vec!["target"]
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,131 +0,0 @@
|
|||||||
//! Error types for the action system.
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use thiserror::Error;
|
|
||||||
use ts_rs::TS;
|
|
||||||
|
|
||||||
use crate::{ActionGroupId, ActionId};
|
|
||||||
|
|
||||||
/// Errors that can occur during action operations.
|
|
||||||
#[derive(Debug, Error, Clone, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
|
||||||
pub enum ActionError {
|
|
||||||
/// Action not found in registry.
|
|
||||||
#[error("Action not found: {0}")]
|
|
||||||
NotFound(ActionId),
|
|
||||||
|
|
||||||
/// Action is disabled in current context.
|
|
||||||
#[error("Action is disabled: {action_id} - {reason}")]
|
|
||||||
Disabled { action_id: ActionId, reason: String },
|
|
||||||
|
|
||||||
/// Invalid scope for the action.
|
|
||||||
#[error("Invalid scope: expected {expected:?}, got {actual:?}")]
|
|
||||||
InvalidScope {
|
|
||||||
expected: crate::ActionScope,
|
|
||||||
actual: crate::ActionScope,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Action execution timed out.
|
|
||||||
#[error("Action timed out: {0}")]
|
|
||||||
Timeout(ActionId),
|
|
||||||
|
|
||||||
/// Error from plugin execution.
|
|
||||||
#[error("Plugin error: {0}")]
|
|
||||||
PluginError(String),
|
|
||||||
|
|
||||||
/// Validation error in action parameters.
|
|
||||||
#[error("Validation error: {0}")]
|
|
||||||
ValidationError(String),
|
|
||||||
|
|
||||||
/// Permission denied for action.
|
|
||||||
#[error("Permission denied: {0}")]
|
|
||||||
PermissionDenied(String),
|
|
||||||
|
|
||||||
/// Action was cancelled by user.
|
|
||||||
#[error("Action cancelled by user")]
|
|
||||||
Cancelled,
|
|
||||||
|
|
||||||
/// Internal error.
|
|
||||||
#[error("Internal error: {0}")]
|
|
||||||
Internal(String),
|
|
||||||
|
|
||||||
/// Required context is missing.
|
|
||||||
#[error("Required context missing: {missing_fields:?}")]
|
|
||||||
ContextMissing {
|
|
||||||
/// The context fields that are missing.
|
|
||||||
missing_fields: Vec<String>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Action group not found.
|
|
||||||
#[error("Group not found: {0}")]
|
|
||||||
GroupNotFound(ActionGroupId),
|
|
||||||
|
|
||||||
/// Action group already exists.
|
|
||||||
#[error("Group already exists: {0}")]
|
|
||||||
GroupAlreadyExists(ActionGroupId),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ActionError {
|
|
||||||
/// Get a user-friendly error message.
|
|
||||||
pub fn user_message(&self) -> String {
|
|
||||||
match self {
|
|
||||||
Self::NotFound(id) => format!("Action '{}' is not available", id),
|
|
||||||
Self::Disabled { reason, .. } => reason.clone(),
|
|
||||||
Self::InvalidScope { expected, actual } => {
|
|
||||||
format!("Action requires {:?} scope, but got {:?}", expected, actual)
|
|
||||||
}
|
|
||||||
Self::Timeout(_) => "The operation took too long and was cancelled".into(),
|
|
||||||
Self::PluginError(msg) => format!("Plugin error: {}", msg),
|
|
||||||
Self::ValidationError(msg) => format!("Invalid input: {}", msg),
|
|
||||||
Self::PermissionDenied(resource) => format!("Permission denied for {}", resource),
|
|
||||||
Self::Cancelled => "Operation was cancelled".into(),
|
|
||||||
Self::Internal(_) => "An unexpected error occurred".into(),
|
|
||||||
Self::ContextMissing { missing_fields } => {
|
|
||||||
format!("Missing required context: {}", missing_fields.join(", "))
|
|
||||||
}
|
|
||||||
Self::GroupNotFound(id) => format!("Action group '{}' not found", id),
|
|
||||||
Self::GroupAlreadyExists(id) => format!("Action group '{}' already exists", id),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether this error should be reported to telemetry.
|
|
||||||
pub fn is_reportable(&self) -> bool {
|
|
||||||
matches!(self, Self::Internal(_) | Self::PluginError(_))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether this error can potentially be resolved by user interaction.
|
|
||||||
pub fn is_promptable(&self) -> bool {
|
|
||||||
matches!(self, Self::ContextMissing { .. })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether this is a user-initiated cancellation.
|
|
||||||
pub fn is_cancelled(&self) -> bool {
|
|
||||||
matches!(self, Self::Cancelled)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_error_messages() {
|
|
||||||
let err = ActionError::ContextMissing {
|
|
||||||
missing_fields: vec!["workspace".into()],
|
|
||||||
};
|
|
||||||
assert_eq!(err.user_message(), "Missing required context: workspace");
|
|
||||||
assert!(err.is_promptable());
|
|
||||||
assert!(!err.is_cancelled());
|
|
||||||
|
|
||||||
let cancelled = ActionError::Cancelled;
|
|
||||||
assert!(cancelled.is_cancelled());
|
|
||||||
assert!(!cancelled.is_promptable());
|
|
||||||
|
|
||||||
let not_found = ActionError::NotFound(ActionId::builtin("test", "action"));
|
|
||||||
assert_eq!(
|
|
||||||
not_found.user_message(),
|
|
||||||
"Action 'yaak:test:action' is not available"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,606 +0,0 @@
|
|||||||
//! Action executor - central hub for action registration and invocation.
|
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::RwLock;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
check_context_availability, ActionAvailability, ActionError, ActionGroupId,
|
|
||||||
ActionGroupMetadata, ActionGroupSource, ActionGroupWithActions, ActionHandler, ActionId,
|
|
||||||
ActionMetadata, ActionParams, ActionResult, ActionScope, ActionSource, CurrentContext,
|
|
||||||
RegisteredActionGroup,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Options for listing actions.
|
|
||||||
#[derive(Clone, Debug, Default)]
|
|
||||||
pub struct ListActionsOptions {
|
|
||||||
/// Filter by scope.
|
|
||||||
pub scope: Option<ActionScope>,
|
|
||||||
/// Filter by group.
|
|
||||||
pub group_id: Option<ActionGroupId>,
|
|
||||||
/// Search term for label/description.
|
|
||||||
pub search: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A registered action with its handler.
|
|
||||||
struct RegisteredAction {
|
|
||||||
/// Action metadata.
|
|
||||||
metadata: ActionMetadata,
|
|
||||||
/// Where the action was registered from.
|
|
||||||
source: ActionSource,
|
|
||||||
/// The handler for this action.
|
|
||||||
handler: Arc<dyn ActionHandler>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Central hub for action registration and invocation.
|
|
||||||
///
|
|
||||||
/// The executor owns all action metadata and handlers, ensuring every
|
|
||||||
/// registered action has a handler by construction.
|
|
||||||
pub struct ActionExecutor {
|
|
||||||
/// All registered actions indexed by ID.
|
|
||||||
actions: RwLock<HashMap<ActionId, RegisteredAction>>,
|
|
||||||
|
|
||||||
/// Actions indexed by scope for efficient filtering.
|
|
||||||
scope_index: RwLock<HashMap<ActionScope, Vec<ActionId>>>,
|
|
||||||
|
|
||||||
/// All registered groups indexed by ID.
|
|
||||||
groups: RwLock<HashMap<ActionGroupId, RegisteredActionGroup>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for ActionExecutor {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ActionExecutor {
|
|
||||||
/// Create a new empty executor.
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
actions: RwLock::new(HashMap::new()),
|
|
||||||
scope_index: RwLock::new(HashMap::new()),
|
|
||||||
groups: RwLock::new(HashMap::new()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
// Action Registration
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
/// Register an action with its handler.
|
|
||||||
///
|
|
||||||
/// Every action must have a handler - this is enforced by the API.
|
|
||||||
pub async fn register<H: ActionHandler + 'static>(
|
|
||||||
&self,
|
|
||||||
metadata: ActionMetadata,
|
|
||||||
source: ActionSource,
|
|
||||||
handler: H,
|
|
||||||
) -> Result<ActionId, ActionError> {
|
|
||||||
let id = metadata.id.clone();
|
|
||||||
let scope = metadata.scope.clone();
|
|
||||||
|
|
||||||
let action = RegisteredAction {
|
|
||||||
metadata,
|
|
||||||
source,
|
|
||||||
handler: Arc::new(handler),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Insert action
|
|
||||||
{
|
|
||||||
let mut actions = self.actions.write().await;
|
|
||||||
actions.insert(id.clone(), action);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update scope index
|
|
||||||
{
|
|
||||||
let mut index = self.scope_index.write().await;
|
|
||||||
index.entry(scope).or_default().push(id.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Unregister an action.
|
|
||||||
pub async fn unregister(&self, id: &ActionId) -> Result<(), ActionError> {
|
|
||||||
let mut actions = self.actions.write().await;
|
|
||||||
|
|
||||||
let action = actions
|
|
||||||
.remove(id)
|
|
||||||
.ok_or_else(|| ActionError::NotFound(id.clone()))?;
|
|
||||||
|
|
||||||
// Update scope index
|
|
||||||
{
|
|
||||||
let mut index = self.scope_index.write().await;
|
|
||||||
if let Some(ids) = index.get_mut(&action.metadata.scope) {
|
|
||||||
ids.retain(|i| i != id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove from group if assigned
|
|
||||||
if let Some(group_id) = &action.metadata.group_id {
|
|
||||||
let mut groups = self.groups.write().await;
|
|
||||||
if let Some(group) = groups.get_mut(group_id) {
|
|
||||||
group.action_ids.retain(|i| i != id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Unregister all actions from a specific source.
|
|
||||||
pub async fn unregister_source(&self, source_id: &str) -> Vec<ActionId> {
|
|
||||||
let actions_to_remove: Vec<ActionId> = {
|
|
||||||
let actions = self.actions.read().await;
|
|
||||||
actions
|
|
||||||
.iter()
|
|
||||||
.filter(|(_, a)| match &a.source {
|
|
||||||
ActionSource::Plugin { ref_id, .. } => ref_id == source_id,
|
|
||||||
ActionSource::Dynamic {
|
|
||||||
source_id: sid, ..
|
|
||||||
} => sid == source_id,
|
|
||||||
ActionSource::Builtin => false,
|
|
||||||
})
|
|
||||||
.map(|(id, _)| id.clone())
|
|
||||||
.collect()
|
|
||||||
};
|
|
||||||
|
|
||||||
for id in &actions_to_remove {
|
|
||||||
let _ = self.unregister(id).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
actions_to_remove
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
// Action Invocation
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
/// Invoke an action with the given context and parameters.
|
|
||||||
///
|
|
||||||
/// This will:
|
|
||||||
/// 1. Look up the action metadata
|
|
||||||
/// 2. Check context availability
|
|
||||||
/// 3. Execute the handler
|
|
||||||
pub async fn invoke(
|
|
||||||
&self,
|
|
||||||
action_id: &ActionId,
|
|
||||||
context: CurrentContext,
|
|
||||||
params: ActionParams,
|
|
||||||
) -> Result<ActionResult, ActionError> {
|
|
||||||
// Get action and handler
|
|
||||||
let (metadata, handler) = {
|
|
||||||
let actions = self.actions.read().await;
|
|
||||||
let action = actions
|
|
||||||
.get(action_id)
|
|
||||||
.ok_or_else(|| ActionError::NotFound(action_id.clone()))?;
|
|
||||||
(action.metadata.clone(), action.handler.clone())
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check context availability
|
|
||||||
let availability = check_context_availability(&metadata.required_context, &context);
|
|
||||||
|
|
||||||
match availability {
|
|
||||||
ActionAvailability::Available | ActionAvailability::AvailableWithPrompt { .. } => {
|
|
||||||
// Context is satisfied, proceed with execution
|
|
||||||
}
|
|
||||||
ActionAvailability::Unavailable { missing_fields } => {
|
|
||||||
return Err(ActionError::ContextMissing { missing_fields });
|
|
||||||
}
|
|
||||||
ActionAvailability::NotFound => {
|
|
||||||
return Err(ActionError::NotFound(action_id.clone()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execute handler
|
|
||||||
handler.handle(context, params).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Invoke an action, skipping context validation.
|
|
||||||
///
|
|
||||||
/// Use this when you've already validated the context externally.
|
|
||||||
pub async fn invoke_unchecked(
|
|
||||||
&self,
|
|
||||||
action_id: &ActionId,
|
|
||||||
context: CurrentContext,
|
|
||||||
params: ActionParams,
|
|
||||||
) -> Result<ActionResult, ActionError> {
|
|
||||||
// Get handler
|
|
||||||
let handler = {
|
|
||||||
let actions = self.actions.read().await;
|
|
||||||
let action = actions
|
|
||||||
.get(action_id)
|
|
||||||
.ok_or_else(|| ActionError::NotFound(action_id.clone()))?;
|
|
||||||
action.handler.clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
// Execute handler
|
|
||||||
handler.handle(context, params).await
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
// Action Queries
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
/// Get action metadata by ID.
|
|
||||||
pub async fn get(&self, id: &ActionId) -> Option<ActionMetadata> {
|
|
||||||
let actions = self.actions.read().await;
|
|
||||||
actions.get(id).map(|a| a.metadata.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// List all actions, optionally filtered.
|
|
||||||
pub async fn list(&self, options: ListActionsOptions) -> Vec<ActionMetadata> {
|
|
||||||
let actions = self.actions.read().await;
|
|
||||||
|
|
||||||
let mut result: Vec<_> = actions
|
|
||||||
.values()
|
|
||||||
.filter(|a| {
|
|
||||||
// Scope filter
|
|
||||||
if let Some(scope) = &options.scope {
|
|
||||||
if &a.metadata.scope != scope {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Group filter
|
|
||||||
if let Some(group_id) = &options.group_id {
|
|
||||||
if a.metadata.group_id.as_ref() != Some(group_id) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Search filter
|
|
||||||
if let Some(search) = &options.search {
|
|
||||||
let search = search.to_lowercase();
|
|
||||||
let matches_label = a.metadata.label.to_lowercase().contains(&search);
|
|
||||||
let matches_desc = a
|
|
||||||
.metadata
|
|
||||||
.description
|
|
||||||
.as_ref()
|
|
||||||
.map(|d| d.to_lowercase().contains(&search))
|
|
||||||
.unwrap_or(false);
|
|
||||||
if !matches_label && !matches_desc {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
true
|
|
||||||
})
|
|
||||||
.map(|a| a.metadata.clone())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// Sort by order then label
|
|
||||||
result.sort_by(|a, b| a.order.cmp(&b.order).then_with(|| a.label.cmp(&b.label)));
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
/// List actions available in the given context.
|
|
||||||
pub async fn list_available(
|
|
||||||
&self,
|
|
||||||
context: &CurrentContext,
|
|
||||||
options: ListActionsOptions,
|
|
||||||
) -> Vec<(ActionMetadata, ActionAvailability)> {
|
|
||||||
let all_actions = self.list(options).await;
|
|
||||||
|
|
||||||
all_actions
|
|
||||||
.into_iter()
|
|
||||||
.map(|action| {
|
|
||||||
let availability =
|
|
||||||
check_context_availability(&action.required_context, context);
|
|
||||||
(action, availability)
|
|
||||||
})
|
|
||||||
.filter(|(_, availability)| availability.is_available())
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get availability status for a specific action.
|
|
||||||
pub async fn get_availability(
|
|
||||||
&self,
|
|
||||||
id: &ActionId,
|
|
||||||
context: &CurrentContext,
|
|
||||||
) -> ActionAvailability {
|
|
||||||
let actions = self.actions.read().await;
|
|
||||||
|
|
||||||
match actions.get(id) {
|
|
||||||
Some(action) => {
|
|
||||||
check_context_availability(&action.metadata.required_context, context)
|
|
||||||
}
|
|
||||||
None => ActionAvailability::NotFound,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
// Group Registration
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
/// Register an action group.
|
|
||||||
pub async fn register_group(
|
|
||||||
&self,
|
|
||||||
metadata: ActionGroupMetadata,
|
|
||||||
source: ActionGroupSource,
|
|
||||||
) -> Result<ActionGroupId, ActionError> {
|
|
||||||
let id = metadata.id.clone();
|
|
||||||
|
|
||||||
let mut groups = self.groups.write().await;
|
|
||||||
if groups.contains_key(&id) {
|
|
||||||
return Err(ActionError::GroupAlreadyExists(id));
|
|
||||||
}
|
|
||||||
|
|
||||||
groups.insert(
|
|
||||||
id.clone(),
|
|
||||||
RegisteredActionGroup {
|
|
||||||
metadata,
|
|
||||||
action_ids: Vec::new(),
|
|
||||||
source,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Unregister a group (does not unregister its actions).
|
|
||||||
pub async fn unregister_group(&self, id: &ActionGroupId) -> Result<(), ActionError> {
|
|
||||||
let mut groups = self.groups.write().await;
|
|
||||||
groups
|
|
||||||
.remove(id)
|
|
||||||
.ok_or_else(|| ActionError::GroupNotFound(id.clone()))?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add an action to a group.
|
|
||||||
pub async fn add_to_group(
|
|
||||||
&self,
|
|
||||||
action_id: &ActionId,
|
|
||||||
group_id: &ActionGroupId,
|
|
||||||
) -> Result<(), ActionError> {
|
|
||||||
// Update action's group_id
|
|
||||||
{
|
|
||||||
let mut actions = self.actions.write().await;
|
|
||||||
let action = actions
|
|
||||||
.get_mut(action_id)
|
|
||||||
.ok_or_else(|| ActionError::NotFound(action_id.clone()))?;
|
|
||||||
action.metadata.group_id = Some(group_id.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add to group's action list
|
|
||||||
{
|
|
||||||
let mut groups = self.groups.write().await;
|
|
||||||
let group = groups
|
|
||||||
.get_mut(group_id)
|
|
||||||
.ok_or_else(|| ActionError::GroupNotFound(group_id.clone()))?;
|
|
||||||
|
|
||||||
if !group.action_ids.contains(action_id) {
|
|
||||||
group.action_ids.push(action_id.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
// Group Queries
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
/// Get a group by ID.
|
|
||||||
pub async fn get_group(&self, id: &ActionGroupId) -> Option<ActionGroupMetadata> {
|
|
||||||
let groups = self.groups.read().await;
|
|
||||||
groups.get(id).map(|g| g.metadata.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// List all groups, optionally filtered by scope.
|
|
||||||
pub async fn list_groups(&self, scope: Option<ActionScope>) -> Vec<ActionGroupMetadata> {
|
|
||||||
let groups = self.groups.read().await;
|
|
||||||
|
|
||||||
let mut result: Vec<_> = groups
|
|
||||||
.values()
|
|
||||||
.filter(|g| {
|
|
||||||
scope.as_ref().map_or(true, |s| {
|
|
||||||
g.metadata.scope.as_ref().map_or(true, |gs| gs == s)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.map(|g| g.metadata.clone())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
result.sort_by_key(|g| g.order);
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
/// List all actions in a specific group.
|
|
||||||
pub async fn list_by_group(&self, group_id: &ActionGroupId) -> Vec<ActionMetadata> {
|
|
||||||
let groups = self.groups.read().await;
|
|
||||||
let actions = self.actions.read().await;
|
|
||||||
|
|
||||||
groups
|
|
||||||
.get(group_id)
|
|
||||||
.map(|group| {
|
|
||||||
let mut result: Vec<_> = group
|
|
||||||
.action_ids
|
|
||||||
.iter()
|
|
||||||
.filter_map(|id| actions.get(id).map(|a| a.metadata.clone()))
|
|
||||||
.collect();
|
|
||||||
result.sort_by_key(|a| a.order);
|
|
||||||
result
|
|
||||||
})
|
|
||||||
.unwrap_or_default()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get actions organized by their groups.
|
|
||||||
pub async fn list_grouped(&self, scope: Option<ActionScope>) -> Vec<ActionGroupWithActions> {
|
|
||||||
let group_list = self.list_groups(scope).await;
|
|
||||||
let mut result = Vec::new();
|
|
||||||
|
|
||||||
for group in group_list {
|
|
||||||
let actions = self.list_by_group(&group.id).await;
|
|
||||||
result.push(ActionGroupWithActions { group, actions });
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
// Built-in Registration
|
|
||||||
// ─────────────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
/// Register all built-in groups.
|
|
||||||
pub async fn register_builtin_groups(&self) -> Result<(), ActionError> {
|
|
||||||
for group in crate::groups::builtin::all() {
|
|
||||||
self.register_group(group, ActionGroupSource::Builtin).await?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::{handler_fn, RequiredContext};
|
|
||||||
|
|
||||||
async fn create_test_executor() -> ActionExecutor {
|
|
||||||
let executor = ActionExecutor::new();
|
|
||||||
executor
|
|
||||||
.register(
|
|
||||||
ActionMetadata {
|
|
||||||
id: ActionId::builtin("test", "echo"),
|
|
||||||
label: "Echo".to_string(),
|
|
||||||
description: None,
|
|
||||||
icon: None,
|
|
||||||
scope: ActionScope::Global,
|
|
||||||
keyboard_shortcut: None,
|
|
||||||
requires_selection: false,
|
|
||||||
enabled_condition: None,
|
|
||||||
group_id: None,
|
|
||||||
order: 0,
|
|
||||||
required_context: RequiredContext::default(),
|
|
||||||
},
|
|
||||||
ActionSource::Builtin,
|
|
||||||
handler_fn(|_ctx, params| async move {
|
|
||||||
let msg: String = params.get("message").unwrap_or_default();
|
|
||||||
Ok(ActionResult::with_message(msg))
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
executor
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_register_and_invoke() {
|
|
||||||
let executor = create_test_executor().await;
|
|
||||||
let action_id = ActionId::builtin("test", "echo");
|
|
||||||
|
|
||||||
let params = ActionParams::from_json(serde_json::json!({
|
|
||||||
"message": "Hello, World!"
|
|
||||||
}));
|
|
||||||
|
|
||||||
let result = executor
|
|
||||||
.invoke(&action_id, CurrentContext::default(), params)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
match result {
|
|
||||||
ActionResult::Success { message, .. } => {
|
|
||||||
assert_eq!(message, Some("Hello, World!".to_string()));
|
|
||||||
}
|
|
||||||
_ => panic!("Expected Success result"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_invoke_not_found() {
|
|
||||||
let executor = ActionExecutor::new();
|
|
||||||
let action_id = ActionId::builtin("test", "unknown");
|
|
||||||
|
|
||||||
let result = executor
|
|
||||||
.invoke(&action_id, CurrentContext::default(), ActionParams::empty())
|
|
||||||
.await;
|
|
||||||
|
|
||||||
assert!(matches!(result, Err(ActionError::NotFound(_))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_list_by_scope() {
|
|
||||||
let executor = ActionExecutor::new();
|
|
||||||
|
|
||||||
executor
|
|
||||||
.register(
|
|
||||||
ActionMetadata {
|
|
||||||
id: ActionId::builtin("global", "one"),
|
|
||||||
label: "Global One".to_string(),
|
|
||||||
description: None,
|
|
||||||
icon: None,
|
|
||||||
scope: ActionScope::Global,
|
|
||||||
keyboard_shortcut: None,
|
|
||||||
requires_selection: false,
|
|
||||||
enabled_condition: None,
|
|
||||||
group_id: None,
|
|
||||||
order: 0,
|
|
||||||
required_context: RequiredContext::default(),
|
|
||||||
},
|
|
||||||
ActionSource::Builtin,
|
|
||||||
handler_fn(|_ctx, _params| async move { Ok(ActionResult::ok()) }),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
executor
|
|
||||||
.register(
|
|
||||||
ActionMetadata {
|
|
||||||
id: ActionId::builtin("http", "one"),
|
|
||||||
label: "HTTP One".to_string(),
|
|
||||||
description: None,
|
|
||||||
icon: None,
|
|
||||||
scope: ActionScope::HttpRequest,
|
|
||||||
keyboard_shortcut: None,
|
|
||||||
requires_selection: false,
|
|
||||||
enabled_condition: None,
|
|
||||||
group_id: None,
|
|
||||||
order: 0,
|
|
||||||
required_context: RequiredContext::default(),
|
|
||||||
},
|
|
||||||
ActionSource::Builtin,
|
|
||||||
handler_fn(|_ctx, _params| async move { Ok(ActionResult::ok()) }),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let global_actions = executor
|
|
||||||
.list(ListActionsOptions {
|
|
||||||
scope: Some(ActionScope::Global),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
assert_eq!(global_actions.len(), 1);
|
|
||||||
|
|
||||||
let http_actions = executor
|
|
||||||
.list(ListActionsOptions {
|
|
||||||
scope: Some(ActionScope::HttpRequest),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
assert_eq!(http_actions.len(), 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_groups() {
|
|
||||||
let executor = ActionExecutor::new();
|
|
||||||
executor.register_builtin_groups().await.unwrap();
|
|
||||||
|
|
||||||
let groups = executor.list_groups(None).await;
|
|
||||||
assert!(!groups.is_empty());
|
|
||||||
|
|
||||||
let export_group = executor.get_group(&ActionGroupId::builtin("export")).await;
|
|
||||||
assert!(export_group.is_some());
|
|
||||||
assert_eq!(export_group.unwrap().name, "Export");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_unregister() {
|
|
||||||
let executor = create_test_executor().await;
|
|
||||||
let action_id = ActionId::builtin("test", "echo");
|
|
||||||
|
|
||||||
assert!(executor.get(&action_id).await.is_some());
|
|
||||||
|
|
||||||
executor.unregister(&action_id).await.unwrap();
|
|
||||||
assert!(executor.get(&action_id).await.is_none());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,208 +0,0 @@
|
|||||||
//! Action group types and management.
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use ts_rs::TS;
|
|
||||||
|
|
||||||
use crate::{ActionId, ActionMetadata, ActionScope};
|
|
||||||
|
|
||||||
/// Unique identifier for an action group.
|
|
||||||
///
|
|
||||||
/// Format: `namespace:group-name`
|
|
||||||
/// - Built-in: `yaak:export`
|
|
||||||
/// - Plugin: `plugin.my-plugin:utilities`
|
|
||||||
#[derive(Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
pub struct ActionGroupId(pub String);
|
|
||||||
|
|
||||||
impl ActionGroupId {
|
|
||||||
/// Create a namespaced group ID.
|
|
||||||
pub fn new(namespace: &str, name: &str) -> Self {
|
|
||||||
Self(format!("{}:{}", namespace, name))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create ID for built-in groups.
|
|
||||||
pub fn builtin(name: &str) -> Self {
|
|
||||||
Self::new("yaak", name)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create ID for plugin groups.
|
|
||||||
pub fn plugin(plugin_ref_id: &str, name: &str) -> Self {
|
|
||||||
Self::new(&format!("plugin.{}", plugin_ref_id), name)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the raw string value.
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for ActionGroupId {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Metadata about an action group.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct ActionGroupMetadata {
|
|
||||||
/// Unique identifier for this group.
|
|
||||||
pub id: ActionGroupId,
|
|
||||||
|
|
||||||
/// Display name for the group.
|
|
||||||
pub name: String,
|
|
||||||
|
|
||||||
/// Optional description of the group's purpose.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub description: Option<String>,
|
|
||||||
|
|
||||||
/// Icon to display for the group.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub icon: Option<String>,
|
|
||||||
|
|
||||||
/// Sort order for displaying groups (lower = earlier).
|
|
||||||
#[serde(default)]
|
|
||||||
pub order: i32,
|
|
||||||
|
|
||||||
/// Optional scope restriction (if set, group only appears in this scope).
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub scope: Option<ActionScope>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Where an action group was registered from.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
|
||||||
pub enum ActionGroupSource {
|
|
||||||
/// Built into Yaak core.
|
|
||||||
Builtin,
|
|
||||||
/// Registered by a plugin.
|
|
||||||
Plugin {
|
|
||||||
/// Plugin reference ID.
|
|
||||||
ref_id: String,
|
|
||||||
/// Plugin name.
|
|
||||||
name: String,
|
|
||||||
},
|
|
||||||
/// Registered at runtime.
|
|
||||||
Dynamic {
|
|
||||||
/// Source identifier.
|
|
||||||
source_id: String,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A registered action group with its actions.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct RegisteredActionGroup {
|
|
||||||
/// Group metadata.
|
|
||||||
pub metadata: ActionGroupMetadata,
|
|
||||||
|
|
||||||
/// IDs of actions in this group (ordered by action's order field).
|
|
||||||
pub action_ids: Vec<ActionId>,
|
|
||||||
|
|
||||||
/// Where the group was registered from.
|
|
||||||
pub source: ActionGroupSource,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A group with its actions for UI rendering.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct ActionGroupWithActions {
|
|
||||||
/// Group metadata.
|
|
||||||
pub group: ActionGroupMetadata,
|
|
||||||
|
|
||||||
/// Actions in this group.
|
|
||||||
pub actions: Vec<ActionMetadata>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Built-in action group definitions.
|
|
||||||
pub mod builtin {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
/// Export group - export and copy actions.
|
|
||||||
pub fn export() -> ActionGroupMetadata {
|
|
||||||
ActionGroupMetadata {
|
|
||||||
id: ActionGroupId::builtin("export"),
|
|
||||||
name: "Export".into(),
|
|
||||||
description: Some("Export and copy actions".into()),
|
|
||||||
icon: Some("download".into()),
|
|
||||||
order: 100,
|
|
||||||
scope: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Code generation group.
|
|
||||||
pub fn code_generation() -> ActionGroupMetadata {
|
|
||||||
ActionGroupMetadata {
|
|
||||||
id: ActionGroupId::builtin("code-generation"),
|
|
||||||
name: "Code Generation".into(),
|
|
||||||
description: Some("Generate code snippets from requests".into()),
|
|
||||||
icon: Some("code".into()),
|
|
||||||
order: 200,
|
|
||||||
scope: Some(ActionScope::HttpRequest),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Send group - request sending actions.
|
|
||||||
pub fn send() -> ActionGroupMetadata {
|
|
||||||
ActionGroupMetadata {
|
|
||||||
id: ActionGroupId::builtin("send"),
|
|
||||||
name: "Send".into(),
|
|
||||||
description: Some("Actions for sending requests".into()),
|
|
||||||
icon: Some("play".into()),
|
|
||||||
order: 50,
|
|
||||||
scope: Some(ActionScope::HttpRequest),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Import group.
|
|
||||||
pub fn import() -> ActionGroupMetadata {
|
|
||||||
ActionGroupMetadata {
|
|
||||||
id: ActionGroupId::builtin("import"),
|
|
||||||
name: "Import".into(),
|
|
||||||
description: Some("Import data from files".into()),
|
|
||||||
icon: Some("upload".into()),
|
|
||||||
order: 150,
|
|
||||||
scope: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Workspace management group.
|
|
||||||
pub fn workspace() -> ActionGroupMetadata {
|
|
||||||
ActionGroupMetadata {
|
|
||||||
id: ActionGroupId::builtin("workspace"),
|
|
||||||
name: "Workspace".into(),
|
|
||||||
description: Some("Workspace management actions".into()),
|
|
||||||
icon: Some("folder".into()),
|
|
||||||
order: 300,
|
|
||||||
scope: Some(ActionScope::Workspace),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get all built-in group definitions.
|
|
||||||
pub fn all() -> Vec<ActionGroupMetadata> {
|
|
||||||
vec![send(), export(), import(), code_generation(), workspace()]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_group_id_creation() {
|
|
||||||
let id = ActionGroupId::builtin("export");
|
|
||||||
assert_eq!(id.as_str(), "yaak:export");
|
|
||||||
|
|
||||||
let plugin_id = ActionGroupId::plugin("my-plugin", "utilities");
|
|
||||||
assert_eq!(plugin_id.as_str(), "plugin.my-plugin:utilities");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_builtin_groups() {
|
|
||||||
let groups = builtin::all();
|
|
||||||
assert!(!groups.is_empty());
|
|
||||||
assert!(groups.iter().any(|g| g.id == ActionGroupId::builtin("export")));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,103 +0,0 @@
|
|||||||
//! Action handler types and execution.
|
|
||||||
|
|
||||||
use std::future::Future;
|
|
||||||
use std::pin::Pin;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use crate::{ActionError, ActionParams, ActionResult, CurrentContext};
|
|
||||||
|
|
||||||
/// A boxed future for async action handlers.
|
|
||||||
pub type BoxFuture<'a, T> = Pin<Box<dyn Future<Output = T> + Send + 'a>>;
|
|
||||||
|
|
||||||
/// Function signature for action handlers.
|
|
||||||
pub type ActionHandlerFn = Arc<
|
|
||||||
dyn Fn(CurrentContext, ActionParams) -> BoxFuture<'static, Result<ActionResult, ActionError>>
|
|
||||||
+ Send
|
|
||||||
+ Sync,
|
|
||||||
>;
|
|
||||||
|
|
||||||
/// Trait for types that can handle action invocations.
|
|
||||||
pub trait ActionHandler: Send + Sync {
|
|
||||||
/// Execute the action with the given context and parameters.
|
|
||||||
fn handle(
|
|
||||||
&self,
|
|
||||||
context: CurrentContext,
|
|
||||||
params: ActionParams,
|
|
||||||
) -> BoxFuture<'static, Result<ActionResult, ActionError>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Wrapper to create an ActionHandler from a function.
|
|
||||||
pub struct FnHandler<F>(pub F);
|
|
||||||
|
|
||||||
impl<F, Fut> ActionHandler for FnHandler<F>
|
|
||||||
where
|
|
||||||
F: Fn(CurrentContext, ActionParams) -> Fut + Send + Sync,
|
|
||||||
Fut: Future<Output = Result<ActionResult, ActionError>> + Send + 'static,
|
|
||||||
{
|
|
||||||
fn handle(
|
|
||||||
&self,
|
|
||||||
context: CurrentContext,
|
|
||||||
params: ActionParams,
|
|
||||||
) -> BoxFuture<'static, Result<ActionResult, ActionError>> {
|
|
||||||
Box::pin((self.0)(context, params))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create an action handler from an async function.
|
|
||||||
///
|
|
||||||
/// # Example
|
|
||||||
/// ```ignore
|
|
||||||
/// let handler = handler_fn(|ctx, params| async move {
|
|
||||||
/// Ok(ActionResult::ok())
|
|
||||||
/// });
|
|
||||||
/// ```
|
|
||||||
pub fn handler_fn<F, Fut>(f: F) -> FnHandler<F>
|
|
||||||
where
|
|
||||||
F: Fn(CurrentContext, ActionParams) -> Fut + Send + Sync,
|
|
||||||
Fut: Future<Output = Result<ActionResult, ActionError>> + Send + 'static,
|
|
||||||
{
|
|
||||||
FnHandler(f)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_handler_fn() {
|
|
||||||
let handler = handler_fn(|_ctx, _params| async move { Ok(ActionResult::ok()) });
|
|
||||||
|
|
||||||
let result = handler
|
|
||||||
.handle(CurrentContext::default(), ActionParams::empty())
|
|
||||||
.await;
|
|
||||||
|
|
||||||
assert!(result.is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_handler_with_params() {
|
|
||||||
let handler = handler_fn(|_ctx, params| async move {
|
|
||||||
let name: Option<String> = params.get("name");
|
|
||||||
Ok(ActionResult::with_message(format!(
|
|
||||||
"Hello, {}!",
|
|
||||||
name.unwrap_or_else(|| "World".to_string())
|
|
||||||
)))
|
|
||||||
});
|
|
||||||
|
|
||||||
let params = ActionParams::from_json(serde_json::json!({
|
|
||||||
"name": "Yaak"
|
|
||||||
}));
|
|
||||||
|
|
||||||
let result = handler
|
|
||||||
.handle(CurrentContext::default(), params)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
match result {
|
|
||||||
ActionResult::Success { message, .. } => {
|
|
||||||
assert_eq!(message, Some("Hello, Yaak!".to_string()));
|
|
||||||
}
|
|
||||||
_ => panic!("Expected Success result"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
//! Centralized action system for Yaak.
|
|
||||||
//!
|
|
||||||
//! This crate provides a unified hub for registering and invoking actions
|
|
||||||
//! across all entry points: plugins, Tauri desktop app, CLI, deep links, and MCP server.
|
|
||||||
|
|
||||||
mod context;
|
|
||||||
mod error;
|
|
||||||
mod executor;
|
|
||||||
mod groups;
|
|
||||||
mod handler;
|
|
||||||
mod types;
|
|
||||||
|
|
||||||
pub use context::*;
|
|
||||||
pub use error::*;
|
|
||||||
pub use executor::*;
|
|
||||||
pub use groups::*;
|
|
||||||
pub use handler::*;
|
|
||||||
pub use types::*;
|
|
||||||
@@ -1,273 +0,0 @@
|
|||||||
//! Core types for the action system.
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use ts_rs::TS;
|
|
||||||
|
|
||||||
use crate::{ActionGroupId, RequiredContext};
|
|
||||||
|
|
||||||
/// Unique identifier for an action.
|
|
||||||
///
|
|
||||||
/// Format: `namespace:category:name`
|
|
||||||
/// - Built-in: `yaak:http-request:send`
|
|
||||||
/// - Plugin: `plugin.copy-curl:http-request:copy`
|
|
||||||
#[derive(Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
pub struct ActionId(pub String);
|
|
||||||
|
|
||||||
impl ActionId {
|
|
||||||
/// Create a namespaced action ID.
|
|
||||||
pub fn new(namespace: &str, category: &str, name: &str) -> Self {
|
|
||||||
Self(format!("{}:{}:{}", namespace, category, name))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create ID for built-in actions.
|
|
||||||
pub fn builtin(category: &str, name: &str) -> Self {
|
|
||||||
Self::new("yaak", category, name)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create ID for plugin actions.
|
|
||||||
pub fn plugin(plugin_ref_id: &str, category: &str, name: &str) -> Self {
|
|
||||||
Self::new(&format!("plugin.{}", plugin_ref_id), category, name)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the raw string value.
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for ActionId {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The scope in which an action can be invoked.
|
|
||||||
#[derive(Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(rename_all = "kebab-case")]
|
|
||||||
pub enum ActionScope {
|
|
||||||
/// Global actions available everywhere.
|
|
||||||
Global,
|
|
||||||
/// Actions on HTTP requests.
|
|
||||||
HttpRequest,
|
|
||||||
/// Actions on WebSocket requests.
|
|
||||||
WebsocketRequest,
|
|
||||||
/// Actions on gRPC requests.
|
|
||||||
GrpcRequest,
|
|
||||||
/// Actions on workspaces.
|
|
||||||
Workspace,
|
|
||||||
/// Actions on folders.
|
|
||||||
Folder,
|
|
||||||
/// Actions on environments.
|
|
||||||
Environment,
|
|
||||||
/// Actions on cookie jars.
|
|
||||||
CookieJar,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Metadata about an action for discovery.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct ActionMetadata {
|
|
||||||
/// Unique identifier for this action.
|
|
||||||
pub id: ActionId,
|
|
||||||
|
|
||||||
/// Display label for the action.
|
|
||||||
pub label: String,
|
|
||||||
|
|
||||||
/// Optional description of what the action does.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub description: Option<String>,
|
|
||||||
|
|
||||||
/// Icon name to display.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub icon: Option<String>,
|
|
||||||
|
|
||||||
/// The scope this action applies to.
|
|
||||||
pub scope: ActionScope,
|
|
||||||
|
|
||||||
/// Keyboard shortcut (e.g., "Cmd+Enter").
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub keyboard_shortcut: Option<String>,
|
|
||||||
|
|
||||||
/// Whether the action requires a selection/target.
|
|
||||||
#[serde(default)]
|
|
||||||
pub requires_selection: bool,
|
|
||||||
|
|
||||||
/// Optional condition expression for when action is enabled.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub enabled_condition: Option<String>,
|
|
||||||
|
|
||||||
/// Optional group this action belongs to.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub group_id: Option<ActionGroupId>,
|
|
||||||
|
|
||||||
/// Sort order within a group (lower = earlier).
|
|
||||||
#[serde(default)]
|
|
||||||
pub order: i32,
|
|
||||||
|
|
||||||
/// Context requirements for this action.
|
|
||||||
#[serde(default)]
|
|
||||||
pub required_context: RequiredContext,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Where an action was registered from.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
|
||||||
pub enum ActionSource {
|
|
||||||
/// Built into Yaak core.
|
|
||||||
Builtin,
|
|
||||||
/// Registered by a plugin.
|
|
||||||
Plugin {
|
|
||||||
/// Plugin reference ID.
|
|
||||||
ref_id: String,
|
|
||||||
/// Plugin name.
|
|
||||||
name: String,
|
|
||||||
},
|
|
||||||
/// Registered at runtime (e.g., by MCP tools).
|
|
||||||
Dynamic {
|
|
||||||
/// Source identifier.
|
|
||||||
source_id: String,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parameters passed to action handlers.
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
pub struct ActionParams {
|
|
||||||
/// Arbitrary JSON parameters.
|
|
||||||
#[serde(default)]
|
|
||||||
#[ts(type = "unknown")]
|
|
||||||
pub data: serde_json::Value,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ActionParams {
|
|
||||||
/// Create empty params.
|
|
||||||
pub fn empty() -> Self {
|
|
||||||
Self {
|
|
||||||
data: serde_json::Value::Null,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create params from a JSON value.
|
|
||||||
pub fn from_json(data: serde_json::Value) -> Self {
|
|
||||||
Self { data }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a typed value from the params.
|
|
||||||
pub fn get<T: serde::de::DeserializeOwned>(&self, key: &str) -> Option<T> {
|
|
||||||
self.data
|
|
||||||
.get(key)
|
|
||||||
.and_then(|v| serde_json::from_value(v.clone()).ok())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Result of action execution.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
|
||||||
pub enum ActionResult {
|
|
||||||
/// Action completed successfully.
|
|
||||||
Success {
|
|
||||||
/// Optional data to return.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
#[ts(type = "unknown")]
|
|
||||||
data: Option<serde_json::Value>,
|
|
||||||
/// Optional message to display.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
message: Option<String>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Action requires user input to continue.
|
|
||||||
RequiresInput {
|
|
||||||
/// Prompt to show user.
|
|
||||||
prompt: InputPrompt,
|
|
||||||
/// Continuation token.
|
|
||||||
continuation_id: String,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Action was cancelled by the user.
|
|
||||||
Cancelled,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ActionResult {
|
|
||||||
/// Create a success result with no data.
|
|
||||||
pub fn ok() -> Self {
|
|
||||||
Self::Success {
|
|
||||||
data: None,
|
|
||||||
message: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a success result with a message.
|
|
||||||
pub fn with_message(message: impl Into<String>) -> Self {
|
|
||||||
Self::Success {
|
|
||||||
data: None,
|
|
||||||
message: Some(message.into()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a success result with data.
|
|
||||||
pub fn with_data(data: serde_json::Value) -> Self {
|
|
||||||
Self::Success {
|
|
||||||
data: Some(data),
|
|
||||||
message: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A prompt for user input.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
|
||||||
pub enum InputPrompt {
|
|
||||||
/// Text input prompt.
|
|
||||||
Text {
|
|
||||||
label: String,
|
|
||||||
placeholder: Option<String>,
|
|
||||||
default_value: Option<String>,
|
|
||||||
},
|
|
||||||
/// Selection prompt.
|
|
||||||
Select {
|
|
||||||
label: String,
|
|
||||||
options: Vec<SelectOption>,
|
|
||||||
},
|
|
||||||
/// Confirmation prompt.
|
|
||||||
Confirm { label: String },
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An option in a select prompt.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
pub struct SelectOption {
|
|
||||||
pub label: String,
|
|
||||||
pub value: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_action_id_creation() {
|
|
||||||
let id = ActionId::builtin("http-request", "send");
|
|
||||||
assert_eq!(id.as_str(), "yaak:http-request:send");
|
|
||||||
|
|
||||||
let plugin_id = ActionId::plugin("copy-curl", "http-request", "copy");
|
|
||||||
assert_eq!(plugin_id.as_str(), "plugin.copy-curl:http-request:copy");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_action_params() {
|
|
||||||
let params = ActionParams::from_json(serde_json::json!({
|
|
||||||
"name": "test",
|
|
||||||
"count": 42
|
|
||||||
}));
|
|
||||||
|
|
||||||
assert_eq!(params.get::<String>("name"), Some("test".to_string()));
|
|
||||||
assert_eq!(params.get::<i32>("count"), Some(42));
|
|
||||||
assert_eq!(params.get::<String>("missing"), None);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
12
crates/yaak-api/Cargo.toml
Normal file
12
crates/yaak-api/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
[package]
|
||||||
|
name = "yaak-api"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
publish = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
log = { workspace = true }
|
||||||
|
reqwest = { workspace = true, features = ["gzip"] }
|
||||||
|
sysproxy = "0.3"
|
||||||
|
thiserror = { workspace = true }
|
||||||
|
yaak-common = { workspace = true }
|
||||||
9
crates/yaak-api/src/error.rs
Normal file
9
crates/yaak-api/src/error.rs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error(transparent)]
|
||||||
|
ReqwestError(#[from] reqwest::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
70
crates/yaak-api/src/lib.rs
Normal file
70
crates/yaak-api/src/lib.rs
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
mod error;
|
||||||
|
|
||||||
|
pub use error::{Error, Result};
|
||||||
|
|
||||||
|
use log::{debug, warn};
|
||||||
|
use reqwest::Client;
|
||||||
|
use reqwest::header::{HeaderMap, HeaderValue};
|
||||||
|
use std::time::Duration;
|
||||||
|
use yaak_common::platform::{get_ua_arch, get_ua_platform};
|
||||||
|
|
||||||
|
/// Build a reqwest Client configured for Yaak's own API calls.
|
||||||
|
///
|
||||||
|
/// Includes a custom User-Agent, JSON accept header, 20s timeout, gzip,
|
||||||
|
/// and automatic OS-level proxy detection via sysproxy.
|
||||||
|
pub fn yaak_api_client(version: &str) -> Result<Client> {
|
||||||
|
let platform = get_ua_platform();
|
||||||
|
let arch = get_ua_arch();
|
||||||
|
let ua = format!("Yaak/{version} ({platform}; {arch})");
|
||||||
|
|
||||||
|
let mut default_headers = HeaderMap::new();
|
||||||
|
default_headers.insert("Accept", HeaderValue::from_str("application/json").unwrap());
|
||||||
|
|
||||||
|
let mut builder = reqwest::ClientBuilder::new()
|
||||||
|
.timeout(Duration::from_secs(20))
|
||||||
|
.default_headers(default_headers)
|
||||||
|
.gzip(true)
|
||||||
|
.user_agent(ua);
|
||||||
|
|
||||||
|
if let Some(sys) = get_enabled_system_proxy() {
|
||||||
|
let proxy_url = format!("http://{}:{}", sys.host, sys.port);
|
||||||
|
match reqwest::Proxy::all(&proxy_url) {
|
||||||
|
Ok(p) => {
|
||||||
|
let p = if !sys.bypass.is_empty() {
|
||||||
|
p.no_proxy(reqwest::NoProxy::from_string(&sys.bypass))
|
||||||
|
} else {
|
||||||
|
p
|
||||||
|
};
|
||||||
|
builder = builder.proxy(p);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Failed to configure system proxy: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(builder.build()?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the system proxy URL if one is enabled, e.g. `http://host:port`.
|
||||||
|
pub fn get_system_proxy_url() -> Option<String> {
|
||||||
|
let sys = get_enabled_system_proxy()?;
|
||||||
|
Some(format!("http://{}:{}", sys.host, sys.port))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_enabled_system_proxy() -> Option<sysproxy::Sysproxy> {
|
||||||
|
match sysproxy::Sysproxy::get_system_proxy() {
|
||||||
|
Ok(sys) if sys.enable => {
|
||||||
|
debug!("Detected system proxy: http://{}:{}", sys.host, sys.port);
|
||||||
|
Some(sys)
|
||||||
|
}
|
||||||
|
Ok(_) => {
|
||||||
|
debug!("System proxy detected but not enabled");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
debug!("Could not detect system proxy: {e}");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -6,7 +6,7 @@ publish = false
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = { workspace = true, features = ["serde"] }
|
chrono = { workspace = true, features = ["serde"] }
|
||||||
git2 = { version = "0.20.0", features = ["vendored-libgit2", "vendored-openssl"] }
|
git2 = { version = "0.20.4", features = ["vendored-libgit2", "vendored-openssl"] }
|
||||||
log = { workspace = true }
|
log = { workspace = true }
|
||||||
serde = { workspace = true, features = ["derive"] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|||||||
4
crates/yaak-git/bindings/gen_git.ts
generated
4
crates/yaak-git/bindings/gen_git.ts
generated
@@ -15,8 +15,8 @@ export type GitStatus = "untracked" | "conflict" | "current" | "modified" | "rem
|
|||||||
|
|
||||||
export type GitStatusEntry = { relaPath: string, status: GitStatus, staged: boolean, prev: SyncModel | null, next: SyncModel | null, };
|
export type GitStatusEntry = { relaPath: string, status: GitStatus, staged: boolean, prev: SyncModel | null, next: SyncModel | null, };
|
||||||
|
|
||||||
export type GitStatusSummary = { path: string, headRef: string | null, headRefShorthand: string | null, entries: Array<GitStatusEntry>, origins: Array<string>, localBranches: Array<string>, remoteBranches: Array<string>, };
|
export type GitStatusSummary = { path: string, headRef: string | null, headRefShorthand: string | null, entries: Array<GitStatusEntry>, origins: Array<string>, localBranches: Array<string>, remoteBranches: Array<string>, ahead: number, behind: number, };
|
||||||
|
|
||||||
export type PullResult = { "type": "success", message: string, } | { "type": "up_to_date" } | { "type": "needs_credentials", url: string, error: string | null, };
|
export type PullResult = { "type": "success", message: string, } | { "type": "up_to_date" } | { "type": "needs_credentials", url: string, error: string | null, } | { "type": "diverged", remote: string, branch: string, } | { "type": "uncommitted_changes" };
|
||||||
|
|
||||||
export type PushResult = { "type": "success", message: string, } | { "type": "up_to_date" } | { "type": "needs_credentials", url: string, error: string | null, };
|
export type PushResult = { "type": "success", message: string, } | { "type": "up_to_date" } | { "type": "needs_credentials", url: string, error: string | null, };
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { createFastMutation } from '@yaakapp/app/hooks/useFastMutation';
|
|||||||
import { queryClient } from '@yaakapp/app/lib/queryClient';
|
import { queryClient } from '@yaakapp/app/lib/queryClient';
|
||||||
import { useMemo } from 'react';
|
import { useMemo } from 'react';
|
||||||
import { BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
|
import { BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
|
||||||
|
import { showToast } from '@yaakapp/app/lib/toast';
|
||||||
|
|
||||||
export * from './bindings/gen_git';
|
export * from './bindings/gen_git';
|
||||||
export * from './bindings/gen_models';
|
export * from './bindings/gen_models';
|
||||||
@@ -13,31 +14,48 @@ export interface GitCredentials {
|
|||||||
password: string;
|
password: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type DivergedStrategy = 'force_reset' | 'merge' | 'cancel';
|
||||||
|
|
||||||
|
export type UncommittedChangesStrategy = 'reset' | 'cancel';
|
||||||
|
|
||||||
export interface GitCallbacks {
|
export interface GitCallbacks {
|
||||||
addRemote: () => Promise<GitRemote | null>;
|
addRemote: () => Promise<GitRemote | null>;
|
||||||
promptCredentials: (
|
promptCredentials: (
|
||||||
result: Extract<PushResult, { type: 'needs_credentials' }>,
|
result: Extract<PushResult, { type: 'needs_credentials' }>,
|
||||||
) => Promise<GitCredentials | null>;
|
) => Promise<GitCredentials | null>;
|
||||||
|
promptDiverged: (
|
||||||
|
result: Extract<PullResult, { type: 'diverged' }>,
|
||||||
|
) => Promise<DivergedStrategy>;
|
||||||
|
promptUncommittedChanges: () => Promise<UncommittedChangesStrategy>;
|
||||||
|
forceSync: () => Promise<void>;
|
||||||
}
|
}
|
||||||
|
|
||||||
const onSuccess = () => queryClient.invalidateQueries({ queryKey: ['git'] });
|
const onSuccess = () => queryClient.invalidateQueries({ queryKey: ['git'] });
|
||||||
|
|
||||||
export function useGit(dir: string, callbacks: GitCallbacks) {
|
export function useGit(dir: string, callbacks: GitCallbacks, refreshKey?: string) {
|
||||||
const mutations = useMemo(() => gitMutations(dir, callbacks), [dir, callbacks]);
|
const mutations = useMemo(() => gitMutations(dir, callbacks), [dir, callbacks]);
|
||||||
|
const fetchAll = useQuery<void, string>({
|
||||||
|
queryKey: ['git', 'fetch_all', dir, refreshKey],
|
||||||
|
queryFn: () => invoke('cmd_git_fetch_all', { dir }),
|
||||||
|
refetchInterval: 10 * 60_000,
|
||||||
|
});
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
remotes: useQuery<GitRemote[], string>({
|
remotes: useQuery<GitRemote[], string>({
|
||||||
queryKey: ['git', 'remotes', dir],
|
queryKey: ['git', 'remotes', dir, refreshKey],
|
||||||
queryFn: () => getRemotes(dir),
|
queryFn: () => getRemotes(dir),
|
||||||
|
placeholderData: (prev) => prev,
|
||||||
}),
|
}),
|
||||||
log: useQuery<GitCommit[], string>({
|
log: useQuery<GitCommit[], string>({
|
||||||
queryKey: ['git', 'log', dir],
|
queryKey: ['git', 'log', dir, refreshKey],
|
||||||
queryFn: () => invoke('cmd_git_log', { dir }),
|
queryFn: () => invoke('cmd_git_log', { dir }),
|
||||||
|
placeholderData: (prev) => prev,
|
||||||
}),
|
}),
|
||||||
status: useQuery<GitStatusSummary, string>({
|
status: useQuery<GitStatusSummary, string>({
|
||||||
refetchOnMount: true,
|
refetchOnMount: true,
|
||||||
queryKey: ['git', 'status', dir],
|
queryKey: ['git', 'status', dir, refreshKey, fetchAll.dataUpdatedAt],
|
||||||
queryFn: () => invoke('cmd_git_status', { dir }),
|
queryFn: () => invoke('cmd_git_status', { dir }),
|
||||||
|
placeholderData: (prev) => prev,
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
mutations,
|
mutations,
|
||||||
@@ -69,6 +87,15 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
|||||||
return invoke<PushResult>('cmd_git_push', { dir });
|
return invoke<PushResult>('cmd_git_push', { dir });
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleError = (err: unknown) => {
|
||||||
|
showToast({
|
||||||
|
id: `${err}`,
|
||||||
|
message: `${err}`,
|
||||||
|
color: 'danger',
|
||||||
|
timeout: 5000,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
init: createFastMutation<void, string, void>({
|
init: createFastMutation<void, string, void>({
|
||||||
mutationKey: ['git', 'init'],
|
mutationKey: ['git', 'init'],
|
||||||
@@ -133,11 +160,7 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
|||||||
},
|
},
|
||||||
onSuccess,
|
onSuccess,
|
||||||
}),
|
}),
|
||||||
fetchAll: createFastMutation<string, string, void>({
|
|
||||||
mutationKey: ['git', 'checkout', dir],
|
|
||||||
mutationFn: () => invoke('cmd_git_fetch_all', { dir }),
|
|
||||||
onSuccess,
|
|
||||||
}),
|
|
||||||
push: createFastMutation<PushResult, string, void>({
|
push: createFastMutation<PushResult, string, void>({
|
||||||
mutationKey: ['git', 'push', dir],
|
mutationKey: ['git', 'push', dir],
|
||||||
mutationFn: push,
|
mutationFn: push,
|
||||||
@@ -147,20 +170,51 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
|||||||
mutationKey: ['git', 'pull', dir],
|
mutationKey: ['git', 'pull', dir],
|
||||||
async mutationFn() {
|
async mutationFn() {
|
||||||
const result = await invoke<PullResult>('cmd_git_pull', { dir });
|
const result = await invoke<PullResult>('cmd_git_pull', { dir });
|
||||||
if (result.type !== 'needs_credentials') return result;
|
|
||||||
|
|
||||||
// Needs credentials, prompt for them
|
if (result.type === 'needs_credentials') {
|
||||||
const creds = await callbacks.promptCredentials(result);
|
const creds = await callbacks.promptCredentials(result);
|
||||||
if (creds == null) throw new Error('Canceled');
|
if (creds == null) throw new Error('Canceled');
|
||||||
|
|
||||||
await invoke('cmd_git_add_credential', {
|
await invoke('cmd_git_add_credential', {
|
||||||
remoteUrl: result.url,
|
remoteUrl: result.url,
|
||||||
username: creds.username,
|
username: creds.username,
|
||||||
password: creds.password,
|
password: creds.password,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Pull again
|
// Pull again after credentials
|
||||||
return invoke<PullResult>('cmd_git_pull', { dir });
|
return invoke<PullResult>('cmd_git_pull', { dir });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.type === 'uncommitted_changes') {
|
||||||
|
callbacks.promptUncommittedChanges().then(async (strategy) => {
|
||||||
|
if (strategy === 'cancel') return;
|
||||||
|
|
||||||
|
await invoke('cmd_git_reset_changes', { dir });
|
||||||
|
return invoke<PullResult>('cmd_git_pull', { dir });
|
||||||
|
}).then(async () => { onSuccess(); await callbacks.forceSync(); }, handleError);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.type === 'diverged') {
|
||||||
|
callbacks.promptDiverged(result).then((strategy) => {
|
||||||
|
if (strategy === 'cancel') return;
|
||||||
|
|
||||||
|
if (strategy === 'force_reset') {
|
||||||
|
return invoke<PullResult>('cmd_git_pull_force_reset', {
|
||||||
|
dir,
|
||||||
|
remote: result.remote,
|
||||||
|
branch: result.branch,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return invoke<PullResult>('cmd_git_pull_merge', {
|
||||||
|
dir,
|
||||||
|
remote: result.remote,
|
||||||
|
branch: result.branch,
|
||||||
|
});
|
||||||
|
}).then(async () => { onSuccess(); await callbacks.forceSync(); }, handleError);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
},
|
},
|
||||||
onSuccess,
|
onSuccess,
|
||||||
}),
|
}),
|
||||||
@@ -169,6 +223,11 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
|||||||
mutationFn: (args) => invoke('cmd_git_unstage', { dir, ...args }),
|
mutationFn: (args) => invoke('cmd_git_unstage', { dir, ...args }),
|
||||||
onSuccess,
|
onSuccess,
|
||||||
}),
|
}),
|
||||||
|
resetChanges: createFastMutation<void, string, void>({
|
||||||
|
mutationKey: ['git', 'reset-changes', dir],
|
||||||
|
mutationFn: () => invoke('cmd_git_reset_changes', { dir }),
|
||||||
|
onSuccess,
|
||||||
|
}),
|
||||||
} as const;
|
} as const;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ mod pull;
|
|||||||
mod push;
|
mod push;
|
||||||
mod remotes;
|
mod remotes;
|
||||||
mod repository;
|
mod repository;
|
||||||
|
mod reset;
|
||||||
mod status;
|
mod status;
|
||||||
mod unstage;
|
mod unstage;
|
||||||
mod util;
|
mod util;
|
||||||
@@ -29,8 +30,9 @@ pub use credential::git_add_credential;
|
|||||||
pub use fetch::git_fetch_all;
|
pub use fetch::git_fetch_all;
|
||||||
pub use init::git_init;
|
pub use init::git_init;
|
||||||
pub use log::{GitCommit, git_log};
|
pub use log::{GitCommit, git_log};
|
||||||
pub use pull::{PullResult, git_pull};
|
pub use pull::{PullResult, git_pull, git_pull_force_reset, git_pull_merge};
|
||||||
pub use push::{PushResult, git_push};
|
pub use push::{PushResult, git_push};
|
||||||
pub use remotes::{GitRemote, git_add_remote, git_remotes, git_rm_remote};
|
pub use remotes::{GitRemote, git_add_remote, git_remotes, git_rm_remote};
|
||||||
|
pub use reset::git_reset_changes;
|
||||||
pub use status::{GitStatusSummary, git_status};
|
pub use status::{GitStatusSummary, git_status};
|
||||||
pub use unstage::git_unstage;
|
pub use unstage::git_unstage;
|
||||||
|
|||||||
@@ -15,9 +15,23 @@ pub enum PullResult {
|
|||||||
Success { message: String },
|
Success { message: String },
|
||||||
UpToDate,
|
UpToDate,
|
||||||
NeedsCredentials { url: String, error: Option<String> },
|
NeedsCredentials { url: String, error: Option<String> },
|
||||||
|
Diverged { remote: String, branch: String },
|
||||||
|
UncommittedChanges,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_uncommitted_changes(dir: &Path) -> Result<bool> {
|
||||||
|
let repo = open_repo(dir)?;
|
||||||
|
let mut opts = git2::StatusOptions::new();
|
||||||
|
opts.include_ignored(false).include_untracked(false);
|
||||||
|
let statuses = repo.statuses(Some(&mut opts))?;
|
||||||
|
Ok(statuses.iter().any(|e| e.status() != git2::Status::CURRENT))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn git_pull(dir: &Path) -> Result<PullResult> {
|
pub async fn git_pull(dir: &Path) -> Result<PullResult> {
|
||||||
|
if has_uncommitted_changes(dir)? {
|
||||||
|
return Ok(PullResult::UncommittedChanges);
|
||||||
|
}
|
||||||
|
|
||||||
// Extract all git2 data before any await points (git2 types are not Send)
|
// Extract all git2 data before any await points (git2 types are not Send)
|
||||||
let (branch_name, remote_name, remote_url) = {
|
let (branch_name, remote_name, remote_url) = {
|
||||||
let repo = open_repo(dir)?;
|
let repo = open_repo(dir)?;
|
||||||
@@ -30,42 +44,130 @@ pub async fn git_pull(dir: &Path) -> Result<PullResult> {
|
|||||||
(branch_name, remote_name, remote_url)
|
(branch_name, remote_name, remote_url)
|
||||||
};
|
};
|
||||||
|
|
||||||
let out = new_binary_command(dir)
|
// Step 1: fetch the specific branch
|
||||||
|
// NOTE: We use fetch + merge instead of `git pull` to avoid conflicts with
|
||||||
|
// global git config (e.g. pull.ff=only) and the background fetch --all.
|
||||||
|
let fetch_out = new_binary_command(dir)
|
||||||
.await?
|
.await?
|
||||||
.args(["pull", &remote_name, &branch_name])
|
.args(["fetch", &remote_name, &branch_name])
|
||||||
.env("GIT_TERMINAL_PROMPT", "0")
|
.env("GIT_TERMINAL_PROMPT", "0")
|
||||||
.output()
|
.output()
|
||||||
.await
|
.await
|
||||||
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
|
.map_err(|e| GenericError(format!("failed to run git fetch: {e}")))?;
|
||||||
|
|
||||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
let fetch_stdout = String::from_utf8_lossy(&fetch_out.stdout);
|
||||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
let fetch_stderr = String::from_utf8_lossy(&fetch_out.stderr);
|
||||||
let combined = stdout + stderr;
|
let fetch_combined = format!("{fetch_stdout}{fetch_stderr}");
|
||||||
|
|
||||||
info!("Pulled status={} {combined}", out.status);
|
info!("Fetched status={} {fetch_combined}", fetch_out.status);
|
||||||
|
|
||||||
if combined.to_lowercase().contains("could not read") {
|
if fetch_combined.to_lowercase().contains("could not read") {
|
||||||
return Ok(PullResult::NeedsCredentials { url: remote_url.to_string(), error: None });
|
return Ok(PullResult::NeedsCredentials { url: remote_url.to_string(), error: None });
|
||||||
}
|
}
|
||||||
|
|
||||||
if combined.to_lowercase().contains("unable to access") {
|
if fetch_combined.to_lowercase().contains("unable to access") {
|
||||||
return Ok(PullResult::NeedsCredentials {
|
return Ok(PullResult::NeedsCredentials {
|
||||||
url: remote_url.to_string(),
|
url: remote_url.to_string(),
|
||||||
error: Some(combined.to_string()),
|
error: Some(fetch_combined.to_string()),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if !out.status.success() {
|
if !fetch_out.status.success() {
|
||||||
return Err(GenericError(format!("Failed to pull {combined}")));
|
return Err(GenericError(format!("Failed to fetch: {fetch_combined}")));
|
||||||
}
|
}
|
||||||
|
|
||||||
if combined.to_lowercase().contains("up to date") {
|
// Step 2: merge the fetched branch
|
||||||
|
let ref_name = format!("{}/{}", remote_name, branch_name);
|
||||||
|
let merge_out = new_binary_command(dir)
|
||||||
|
.await?
|
||||||
|
.args(["merge", "--ff-only", &ref_name])
|
||||||
|
.output()
|
||||||
|
.await
|
||||||
|
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
|
||||||
|
|
||||||
|
let merge_stdout = String::from_utf8_lossy(&merge_out.stdout);
|
||||||
|
let merge_stderr = String::from_utf8_lossy(&merge_out.stderr);
|
||||||
|
let merge_combined = format!("{merge_stdout}{merge_stderr}");
|
||||||
|
|
||||||
|
info!("Merged status={} {merge_combined}", merge_out.status);
|
||||||
|
|
||||||
|
if !merge_out.status.success() {
|
||||||
|
let merge_lower = merge_combined.to_lowercase();
|
||||||
|
if merge_lower.contains("cannot fast-forward")
|
||||||
|
|| merge_lower.contains("not possible to fast-forward")
|
||||||
|
|| merge_lower.contains("diverged")
|
||||||
|
{
|
||||||
|
return Ok(PullResult::Diverged { remote: remote_name, branch: branch_name });
|
||||||
|
}
|
||||||
|
return Err(GenericError(format!("Failed to merge: {merge_combined}")));
|
||||||
|
}
|
||||||
|
|
||||||
|
if merge_combined.to_lowercase().contains("up to date") {
|
||||||
return Ok(PullResult::UpToDate);
|
return Ok(PullResult::UpToDate);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(PullResult::Success { message: format!("Pulled from {}/{}", remote_name, branch_name) })
|
Ok(PullResult::Success { message: format!("Pulled from {}/{}", remote_name, branch_name) })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn git_pull_force_reset(dir: &Path, remote: &str, branch: &str) -> Result<PullResult> {
|
||||||
|
// Step 1: fetch the remote
|
||||||
|
let fetch_out = new_binary_command(dir)
|
||||||
|
.await?
|
||||||
|
.args(["fetch", remote])
|
||||||
|
.env("GIT_TERMINAL_PROMPT", "0")
|
||||||
|
.output()
|
||||||
|
.await
|
||||||
|
.map_err(|e| GenericError(format!("failed to run git fetch: {e}")))?;
|
||||||
|
|
||||||
|
if !fetch_out.status.success() {
|
||||||
|
let stderr = String::from_utf8_lossy(&fetch_out.stderr);
|
||||||
|
return Err(GenericError(format!("Failed to fetch: {stderr}")));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2: reset --hard to remote/branch
|
||||||
|
let ref_name = format!("{}/{}", remote, branch);
|
||||||
|
let reset_out = new_binary_command(dir)
|
||||||
|
.await?
|
||||||
|
.args(["reset", "--hard", &ref_name])
|
||||||
|
.output()
|
||||||
|
.await
|
||||||
|
.map_err(|e| GenericError(format!("failed to run git reset: {e}")))?;
|
||||||
|
|
||||||
|
if !reset_out.status.success() {
|
||||||
|
let stderr = String::from_utf8_lossy(&reset_out.stderr);
|
||||||
|
return Err(GenericError(format!("Failed to reset: {}", stderr.trim())));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(PullResult::Success { message: format!("Reset to {}/{}", remote, branch) })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn git_pull_merge(dir: &Path, remote: &str, branch: &str) -> Result<PullResult> {
|
||||||
|
let out = new_binary_command(dir)
|
||||||
|
.await?
|
||||||
|
.args(["pull", "--no-rebase", remote, branch])
|
||||||
|
.env("GIT_TERMINAL_PROMPT", "0")
|
||||||
|
.output()
|
||||||
|
.await
|
||||||
|
.map_err(|e| GenericError(format!("failed to run git pull --no-rebase: {e}")))?;
|
||||||
|
|
||||||
|
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||||
|
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||||
|
let combined = format!("{}{}", stdout, stderr);
|
||||||
|
|
||||||
|
info!("Pull merge status={} {combined}", out.status);
|
||||||
|
|
||||||
|
if !out.status.success() {
|
||||||
|
if combined.to_lowercase().contains("conflict") {
|
||||||
|
return Err(GenericError(
|
||||||
|
"Merge conflicts detected. Please resolve them manually.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
return Err(GenericError(format!("Failed to merge pull: {}", combined.trim())));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(PullResult::Success { message: format!("Merged from {}/{}", remote, branch) })
|
||||||
|
}
|
||||||
|
|
||||||
// pub(crate) fn git_pull_old(dir: &Path) -> Result<PullResult> {
|
// pub(crate) fn git_pull_old(dir: &Path) -> Result<PullResult> {
|
||||||
// let repo = open_repo(dir)?;
|
// let repo = open_repo(dir)?;
|
||||||
//
|
//
|
||||||
|
|||||||
20
crates/yaak-git/src/reset.rs
Normal file
20
crates/yaak-git/src/reset.rs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
use crate::binary::new_binary_command;
|
||||||
|
use crate::error::Error::GenericError;
|
||||||
|
use crate::error::Result;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
pub async fn git_reset_changes(dir: &Path) -> Result<()> {
|
||||||
|
let out = new_binary_command(dir)
|
||||||
|
.await?
|
||||||
|
.args(["reset", "--hard", "HEAD"])
|
||||||
|
.output()
|
||||||
|
.await
|
||||||
|
.map_err(|e| GenericError(format!("failed to run git reset: {e}")))?;
|
||||||
|
|
||||||
|
if !out.status.success() {
|
||||||
|
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||||
|
return Err(GenericError(format!("Failed to reset: {}", stderr.trim())));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -18,6 +18,8 @@ pub struct GitStatusSummary {
|
|||||||
pub origins: Vec<String>,
|
pub origins: Vec<String>,
|
||||||
pub local_branches: Vec<String>,
|
pub local_branches: Vec<String>,
|
||||||
pub remote_branches: Vec<String>,
|
pub remote_branches: Vec<String>,
|
||||||
|
pub ahead: u32,
|
||||||
|
pub behind: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
||||||
@@ -160,6 +162,18 @@ pub fn git_status(dir: &Path) -> crate::error::Result<GitStatusSummary> {
|
|||||||
let local_branches = local_branch_names(&repo)?;
|
let local_branches = local_branch_names(&repo)?;
|
||||||
let remote_branches = remote_branch_names(&repo)?;
|
let remote_branches = remote_branch_names(&repo)?;
|
||||||
|
|
||||||
|
// Compute ahead/behind relative to remote tracking branch
|
||||||
|
let (ahead, behind) = (|| -> Option<(usize, usize)> {
|
||||||
|
let head = repo.head().ok()?;
|
||||||
|
let local_oid = head.target()?;
|
||||||
|
let branch_name = head.shorthand()?;
|
||||||
|
let upstream_ref =
|
||||||
|
repo.find_branch(&format!("origin/{branch_name}"), git2::BranchType::Remote).ok()?;
|
||||||
|
let upstream_oid = upstream_ref.get().target()?;
|
||||||
|
repo.graph_ahead_behind(local_oid, upstream_oid).ok()
|
||||||
|
})()
|
||||||
|
.unwrap_or((0, 0));
|
||||||
|
|
||||||
Ok(GitStatusSummary {
|
Ok(GitStatusSummary {
|
||||||
entries,
|
entries,
|
||||||
origins,
|
origins,
|
||||||
@@ -168,5 +182,7 @@ pub fn git_status(dir: &Path) -> crate::error::Result<GitStatusSummary> {
|
|||||||
head_ref_shorthand,
|
head_ref_shorthand,
|
||||||
local_branches,
|
local_branches,
|
||||||
remote_branches,
|
remote_branches,
|
||||||
|
ahead: ahead as u32,
|
||||||
|
behind: behind as u32,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user