mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-02-16 23:57:48 +01:00
Compare commits
22 Commits
v2026.2.1-
...
cli-comman
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9856383566 | ||
|
|
b75e9479e6 | ||
|
|
0d1d8d4afa | ||
|
|
5888e69956 | ||
|
|
e48a0894de | ||
|
|
ea0b083d25 | ||
|
|
f6c20283f0 | ||
|
|
0d57f91ca4 | ||
|
|
0a4ffde319 | ||
|
|
570676dffb | ||
|
|
0bf24c0dc1 | ||
|
|
6a23f0a5ee | ||
|
|
91e0660a7a | ||
|
|
26e145942a | ||
|
|
cc4d598af3 | ||
|
|
f5d11cb6d3 | ||
|
|
65e91aec6b | ||
|
|
ae943a5fd2 | ||
|
|
9e1a11de0b | ||
|
|
52732e12ec | ||
|
|
1127d7e3fa | ||
|
|
8023603ebe |
@@ -1,35 +1,46 @@
|
||||
---
|
||||
description: Review a PR in a new worktree
|
||||
allowed-tools: Bash(git worktree:*), Bash(gh pr:*)
|
||||
allowed-tools: Bash(git worktree:*), Bash(gh pr:*), Bash(git branch:*)
|
||||
---
|
||||
|
||||
Review a GitHub pull request in a new git worktree.
|
||||
Check out a GitHub pull request for review.
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
/review-pr <PR_NUMBER>
|
||||
/check-out-pr <PR_NUMBER>
|
||||
```
|
||||
|
||||
## What to do
|
||||
|
||||
1. List all open pull requests and ask the user to select one
|
||||
1. If no PR number is provided, list all open pull requests and ask the user to select one
|
||||
2. Get PR information using `gh pr view <PR_NUMBER> --json number,headRefName`
|
||||
3. Extract the branch name from the PR
|
||||
4. Create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>` using `git worktree add` with a timeout of at least 300000ms (5 minutes) since the post-checkout hook runs a bootstrap script
|
||||
5. Checkout the PR branch in the new worktree using `gh pr checkout <PR_NUMBER>`
|
||||
6. The post-checkout hook will automatically:
|
||||
3. **Ask the user** whether they want to:
|
||||
- **A) Check out in current directory** — simple `gh pr checkout <PR_NUMBER>`
|
||||
- **B) Create a new worktree** — isolated copy at `../yaak-worktrees/pr-<PR_NUMBER>`
|
||||
4. Follow the appropriate path below
|
||||
|
||||
## Option A: Check out in current directory
|
||||
|
||||
1. Run `gh pr checkout <PR_NUMBER>`
|
||||
2. Inform the user which branch they're now on
|
||||
|
||||
## Option B: Create a new worktree
|
||||
|
||||
1. Create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>` using `git worktree add` with a timeout of at least 300000ms (5 minutes) since the post-checkout hook runs a bootstrap script
|
||||
2. Checkout the PR branch in the new worktree using `gh pr checkout <PR_NUMBER>`
|
||||
3. The post-checkout hook will automatically:
|
||||
- Create `.env.local` with unique ports
|
||||
- Copy editor config folders
|
||||
- Run `npm install && npm run bootstrap`
|
||||
7. Inform the user:
|
||||
4. Inform the user:
|
||||
- Where the worktree was created
|
||||
- What ports were assigned
|
||||
- How to access it (cd command)
|
||||
- How to run the dev server
|
||||
- How to remove the worktree when done
|
||||
|
||||
## Example Output
|
||||
### Example worktree output
|
||||
|
||||
```
|
||||
Created worktree for PR #123 at ../yaak-worktrees/pr-123
|
||||
|
||||
@@ -43,5 +43,7 @@ The skill generates markdown-formatted release notes following this structure:
|
||||
After outputting the release notes, ask the user if they would like to create a draft GitHub release with these notes. If they confirm, create the release using:
|
||||
|
||||
```bash
|
||||
gh release create <tag> --draft --prerelease --title "<tag>" --notes '<release notes>'
|
||||
gh release create <tag> --draft --prerelease --title "Release <version>" --notes '<release notes>'
|
||||
```
|
||||
|
||||
**IMPORTANT**: The release title format is "Release XXXX" where XXXX is the version WITHOUT the `v` prefix. For example, tag `v2026.2.1-beta.1` gets title "Release 2026.2.1-beta.1".
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -51,3 +51,6 @@ flatpak-repo/
|
||||
flatpak/flatpak-builder-tools/
|
||||
flatpak/cargo-sources.json
|
||||
flatpak/node-sources.json
|
||||
|
||||
# Local Codex desktop env state
|
||||
.codex/environments/environment.toml
|
||||
|
||||
417
Cargo.lock
generated
417
Cargo.lock
generated
@@ -52,6 +52,15 @@ dependencies = [
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.6.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81ce3d38065e618af2d7b77e10c5ad9a069859b4be3c2250f674af3840d9c8a5"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.1.3"
|
||||
@@ -90,7 +99,7 @@ checksum = "f6f39be698127218cca460cb624878c9aa4e2b47dba3b277963d2bf00bad263b"
|
||||
dependencies = [
|
||||
"android_log-sys",
|
||||
"env_filter",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -175,7 +184,7 @@ checksum = "c1df21f715862ede32a0c525ce2ca4d52626bb0007f8c18b87a384503ac33e70"
|
||||
dependencies = [
|
||||
"clipboard-win",
|
||||
"image",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"objc2 0.6.1",
|
||||
"objc2-app-kit",
|
||||
"objc2-core-foundation",
|
||||
@@ -212,6 +221,21 @@ dependencies = [
|
||||
"zbus",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "assert_cmd"
|
||||
version = "2.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c5bcfa8749ac45dd12cb11055aeeb6b27a3895560d60d71e3c23bf979e60514"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"bstr",
|
||||
"libc",
|
||||
"predicates",
|
||||
"predicates-core",
|
||||
"predicates-tree",
|
||||
"wait-timeout",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-broadcast"
|
||||
version = "0.7.2"
|
||||
@@ -630,6 +654,17 @@ dependencies = [
|
||||
"alloc-stdlib",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bumpalo"
|
||||
version = "3.18.1"
|
||||
@@ -999,7 +1034,7 @@ version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"lazy_static 1.5.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
@@ -1357,6 +1392,12 @@ dependencies = [
|
||||
"cipher",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "difflib"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8"
|
||||
|
||||
[[package]]
|
||||
name = "digest"
|
||||
version = "0.10.7"
|
||||
@@ -1519,7 +1560,7 @@ dependencies = [
|
||||
"rustc_version",
|
||||
"toml 0.8.23",
|
||||
"vswhom",
|
||||
"winreg",
|
||||
"winreg 0.55.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1570,8 +1611,8 @@ version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0"
|
||||
dependencies = [
|
||||
"log",
|
||||
"regex",
|
||||
"log 0.4.29",
|
||||
"regex 1.11.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1584,7 +1625,7 @@ dependencies = [
|
||||
"anstyle",
|
||||
"env_filter",
|
||||
"jiff",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1645,7 +1686,7 @@ name = "eventsource-client"
|
||||
version = "0.14.0"
|
||||
source = "git+https://github.com/yaakapp/rust-eventsource-client#60e0e3ac5038149c4778dc4979b09b152214f9a8"
|
||||
dependencies = [
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"pin-project",
|
||||
]
|
||||
|
||||
@@ -1693,7 +1734,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4316185f709b23713e41e3195f90edef7fb00c3ed4adc79769cf09cc762a3b29"
|
||||
dependencies = [
|
||||
"colored",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1702,7 +1743,7 @@ version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "38e2275cc4e4fc009b0669731a1e5ab7ebf11f469eaede2bab9309a5b4d6057f"
|
||||
dependencies = [
|
||||
"memoffset",
|
||||
"memoffset 0.9.1",
|
||||
"rustc_version",
|
||||
]
|
||||
|
||||
@@ -1735,6 +1776,15 @@ dependencies = [
|
||||
"miniz_oxide",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "float-cmp"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fnv"
|
||||
version = "1.0.7"
|
||||
@@ -2143,7 +2193,7 @@ dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"libc",
|
||||
"libgit2-sys",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"openssl-probe",
|
||||
"openssl-sys",
|
||||
"url",
|
||||
@@ -2284,6 +2334,21 @@ dependencies = [
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "handlebars"
|
||||
version = "0.29.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb04af2006ea09d985fef82b81e0eb25337e51b691c76403332378a53d521edc"
|
||||
dependencies = [
|
||||
"lazy_static 0.2.11",
|
||||
"log 0.3.9",
|
||||
"pest",
|
||||
"quick-error",
|
||||
"regex 0.2.11",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.12.3"
|
||||
@@ -2356,7 +2421,7 @@ version = "0.29.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b7410cae13cbc75623c98ac4cbfd1f0bedddf3227afc24f370cf0f50a44a11c"
|
||||
dependencies = [
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"mac",
|
||||
"markup5ever",
|
||||
"match_token",
|
||||
@@ -2517,7 +2582,7 @@ dependencies = [
|
||||
"core-foundation-sys",
|
||||
"iana-time-zone-haiku",
|
||||
"js-sys",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"wasm-bindgen",
|
||||
"windows-core",
|
||||
]
|
||||
@@ -2758,6 +2823,22 @@ dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "interfaces"
|
||||
version = "0.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ec8f50a973916cac3da5057c986db05cd3346f38c78e9bc24f64cc9f6a3978f"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"cc",
|
||||
"handlebars",
|
||||
"lazy_static 1.5.0",
|
||||
"libc",
|
||||
"nix 0.23.2",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.11.0"
|
||||
@@ -2844,7 +2925,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e67e8da4c49d6d9909fe03361f9b620f58898859f5c7aded68351e85e71ecf50"
|
||||
dependencies = [
|
||||
"jiff-static",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"portable-atomic",
|
||||
"portable-atomic-util",
|
||||
"serde_core",
|
||||
@@ -2871,7 +2952,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"combine",
|
||||
"jni-sys",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"thiserror 1.0.69",
|
||||
"walkdir",
|
||||
"windows-sys 0.45.0",
|
||||
@@ -2950,7 +3031,7 @@ checksum = "eebcc3aff044e5944a8fbaf69eb277d11986064cba30c468730e8b9909fb551c"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"dbus-secret-service",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"security-framework 2.11.1",
|
||||
"security-framework 3.5.1",
|
||||
"windows-sys 0.60.2",
|
||||
@@ -2989,6 +3070,12 @@ dependencies = [
|
||||
"selectors",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "0.2.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.5.0"
|
||||
@@ -3005,7 +3092,7 @@ dependencies = [
|
||||
"gtk",
|
||||
"gtk-sys",
|
||||
"libappindicator-sys",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3163,6 +3250,15 @@ dependencies = [
|
||||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b"
|
||||
dependencies = [
|
||||
"log 0.4.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.29"
|
||||
@@ -3199,7 +3295,7 @@ version = "0.14.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7a7213d12e1864c0f002f52c2923d4556935a43dec5e71355c2760e0f6e7a18"
|
||||
dependencies = [
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"phf 0.11.3",
|
||||
"phf_codegen 0.11.3",
|
||||
"string_cache",
|
||||
@@ -3248,6 +3344,15 @@ version = "2.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
version = "0.6.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
version = "0.9.1"
|
||||
@@ -3302,7 +3407,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
@@ -3344,7 +3449,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"openssl",
|
||||
"openssl-probe",
|
||||
"openssl-sys",
|
||||
@@ -3362,7 +3467,7 @@ checksum = "c3f42e7bbe13d351b6bead8286a43aac9534b82bd3cc43e47037f012ebfd62d4"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"jni-sys",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"ndk-sys",
|
||||
"num_enum",
|
||||
"raw-window-handle",
|
||||
@@ -3390,6 +3495,19 @@ version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
|
||||
|
||||
[[package]]
|
||||
name = "nix"
|
||||
version = "0.23.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f3790c00a0150112de0f4cd161e3d7fc4b2d8a5542ffc35f099a2562aecb35c"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"memoffset 0.6.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nix"
|
||||
version = "0.30.1"
|
||||
@@ -3400,7 +3518,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"cfg_aliases",
|
||||
"libc",
|
||||
"memoffset",
|
||||
"memoffset 0.9.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3419,6 +3537,12 @@ dependencies = [
|
||||
"minimal-lexical",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "normalize-line-endings"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
|
||||
|
||||
[[package]]
|
||||
name = "notify"
|
||||
version = "8.0.0"
|
||||
@@ -3431,7 +3555,7 @@ dependencies = [
|
||||
"inotify",
|
||||
"kqueue",
|
||||
"libc",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"mio",
|
||||
"notify-types",
|
||||
"walkdir",
|
||||
@@ -3872,7 +3996,7 @@ version = "3.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41fc863e2ca13dc2d5c34fb22ea4a588248ac14db929616ba65c45f21744b1e9"
|
||||
dependencies = [
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"serde",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
@@ -3912,7 +4036,7 @@ dependencies = [
|
||||
"des",
|
||||
"getrandom 0.2.16",
|
||||
"hmac",
|
||||
"lazy_static",
|
||||
"lazy_static 1.5.0",
|
||||
"rc2",
|
||||
"sha1",
|
||||
"yasna",
|
||||
@@ -4000,6 +4124,12 @@ version = "2.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
|
||||
|
||||
[[package]]
|
||||
name = "pest"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8"
|
||||
|
||||
[[package]]
|
||||
name = "petgraph"
|
||||
version = "0.6.5"
|
||||
@@ -4290,6 +4420,36 @@ version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
||||
|
||||
[[package]]
|
||||
name = "predicates"
|
||||
version = "3.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ada8f2932f28a27ee7b70dd6c1c39ea0675c55a36879ab92f3a715eaa1e63cfe"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"difflib",
|
||||
"float-cmp",
|
||||
"normalize-line-endings",
|
||||
"predicates-core",
|
||||
"regex 1.11.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "predicates-core"
|
||||
version = "1.0.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cad38746f3166b4031b1a0d39ad9f954dd291e7854fcc0eed52ee41a0b50d144"
|
||||
|
||||
[[package]]
|
||||
name = "predicates-tree"
|
||||
version = "1.0.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d0de1b847b39c8131db0467e9df1ff60e6d0562ab8e9a16e568ad0fdb372e2f2"
|
||||
dependencies = [
|
||||
"predicates-core",
|
||||
"termtree",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-crate"
|
||||
version = "1.3.1"
|
||||
@@ -4435,6 +4595,12 @@ dependencies = [
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quick-error"
|
||||
version = "1.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
|
||||
|
||||
[[package]]
|
||||
name = "quick-xml"
|
||||
version = "0.32.0"
|
||||
@@ -4529,7 +4695,7 @@ version = "0.8.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93"
|
||||
dependencies = [
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"parking_lot",
|
||||
"scheduled-thread-pool",
|
||||
]
|
||||
@@ -4696,16 +4862,29 @@ dependencies = [
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "0.2.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9329abc99e39129fcceabd24cf5d85b4671ef7c29c50e972bc5afe32438ec384"
|
||||
dependencies = [
|
||||
"aho-corasick 0.6.10",
|
||||
"memchr",
|
||||
"regex-syntax 0.5.6",
|
||||
"thread_local",
|
||||
"utf8-ranges",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"aho-corasick 1.1.3",
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
"regex-syntax 0.8.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4714,9 +4893,18 @@ version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"aho-corasick 1.1.3",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
"regex-syntax 0.8.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d707a4fa2637f2dca2ef9fd02225ec7661fe01a53623c1e6515b6916511f7a7"
|
||||
dependencies = [
|
||||
"ucd-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4755,7 +4943,7 @@ dependencies = [
|
||||
"hyper-tls",
|
||||
"hyper-util",
|
||||
"js-sys",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
"native-tls",
|
||||
@@ -4796,7 +4984,7 @@ dependencies = [
|
||||
"gobject-sys",
|
||||
"gtk-sys",
|
||||
"js-sys",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"objc2 0.6.1",
|
||||
"objc2-app-kit",
|
||||
"objc2-core-foundation",
|
||||
@@ -4988,7 +5176,7 @@ dependencies = [
|
||||
"core-foundation 0.10.1",
|
||||
"core-foundation-sys",
|
||||
"jni",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"once_cell",
|
||||
"rustls",
|
||||
"rustls-native-certs",
|
||||
@@ -5176,7 +5364,7 @@ dependencies = [
|
||||
"cssparser",
|
||||
"derive_more",
|
||||
"fxhash",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"phf 0.8.0",
|
||||
"phf_codegen 0.8.0",
|
||||
"precomputed-hash",
|
||||
@@ -5544,7 +5732,7 @@ dependencies = [
|
||||
"core-graphics 0.24.0",
|
||||
"foreign-types 0.5.0",
|
||||
"js-sys",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"objc2 0.5.2",
|
||||
"objc2-foundation 0.2.2",
|
||||
"objc2-quartz-core 0.2.2",
|
||||
@@ -5692,6 +5880,18 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sysproxy"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9707a79d3b95683aa5a9521e698ffd878b8fb289727c25a69157fb85d529ffff"
|
||||
dependencies = [
|
||||
"interfaces",
|
||||
"thiserror 1.0.69",
|
||||
"winapi",
|
||||
"winreg 0.10.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "system-configuration"
|
||||
version = "0.6.1"
|
||||
@@ -5744,9 +5944,9 @@ dependencies = [
|
||||
"gdkx11-sys",
|
||||
"gtk",
|
||||
"jni",
|
||||
"lazy_static",
|
||||
"lazy_static 1.5.0",
|
||||
"libc",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"ndk",
|
||||
"ndk-context",
|
||||
"ndk-sys",
|
||||
@@ -5820,7 +6020,7 @@ dependencies = [
|
||||
"http-range",
|
||||
"jni",
|
||||
"libc",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"mime",
|
||||
"muda",
|
||||
"objc2 0.6.1",
|
||||
@@ -5939,7 +6139,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "206dc20af4ed210748ba945c2774e60fd0acd52b9a73a028402caf809e9b6ecf"
|
||||
dependencies = [
|
||||
"arboard",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tauri",
|
||||
@@ -5974,7 +6174,7 @@ version = "2.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "313f8138692ddc4a2127c4c9607d616a46f5c042e77b3722450866da0aad2f19"
|
||||
dependencies = [
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"raw-window-handle",
|
||||
"rfd",
|
||||
"serde",
|
||||
@@ -6017,7 +6217,7 @@ dependencies = [
|
||||
"android_logger",
|
||||
"byte-unit",
|
||||
"fern",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"objc2 0.6.1",
|
||||
"objc2-foundation 0.3.1",
|
||||
"serde",
|
||||
@@ -6059,7 +6259,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d8f08346c8deb39e96f86973da0e2d76cbb933d7ac9b750f6dc4daf955a6f997"
|
||||
dependencies = [
|
||||
"gethostname 1.0.2",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"os_info",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -6077,10 +6277,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c374b6db45f2a8a304f0273a15080d98c70cde86178855fc24653ba657a1144c"
|
||||
dependencies = [
|
||||
"encoding_rs",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"open",
|
||||
"os_pipe",
|
||||
"regex",
|
||||
"regex 1.11.1",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -6119,7 +6319,7 @@ dependencies = [
|
||||
"futures-util",
|
||||
"http",
|
||||
"infer",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"minisign-verify",
|
||||
"osakit",
|
||||
"percent-encoding",
|
||||
@@ -6146,7 +6346,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73736611e14142408d15353e21e3cca2f12a3cfb523ad0ce85999b6d2ef1a704"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tauri",
|
||||
@@ -6188,7 +6388,7 @@ dependencies = [
|
||||
"gtk",
|
||||
"http",
|
||||
"jni",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"objc2 0.6.1",
|
||||
"objc2-app-kit",
|
||||
"objc2-foundation 0.3.1",
|
||||
@@ -6223,12 +6423,12 @@ dependencies = [
|
||||
"infer",
|
||||
"json-patch",
|
||||
"kuchikiki",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"memchr",
|
||||
"phf 0.11.3",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"regex 1.11.1",
|
||||
"schemars",
|
||||
"semver",
|
||||
"serde",
|
||||
@@ -6288,6 +6488,12 @@ dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termtree"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683"
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.69"
|
||||
@@ -6328,6 +6534,15 @@ dependencies = [
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
|
||||
dependencies = [
|
||||
"lazy_static 1.5.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tiff"
|
||||
version = "0.9.1"
|
||||
@@ -6472,7 +6687,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"rustls",
|
||||
"rustls-native-certs",
|
||||
"rustls-pki-types",
|
||||
@@ -6816,7 +7031,7 @@ dependencies = [
|
||||
"data-encoding",
|
||||
"http",
|
||||
"httparse",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"rand 0.9.1",
|
||||
"rustls",
|
||||
"rustls-pki-types",
|
||||
@@ -6837,13 +7052,19 @@ version = "1.18.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
|
||||
|
||||
[[package]]
|
||||
name = "ucd-util"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abd2fc5d32b590614af8b0a20d837f32eca055edd0bbead59a9cfe80858be003"
|
||||
|
||||
[[package]]
|
||||
name = "uds_windows"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9"
|
||||
dependencies = [
|
||||
"memoffset",
|
||||
"memoffset 0.9.1",
|
||||
"tempfile",
|
||||
"winapi",
|
||||
]
|
||||
@@ -6953,7 +7174,7 @@ version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "70acd30e3aa1450bc2eece896ce2ad0d178e9c079493819301573dae3c37ba6d"
|
||||
dependencies = [
|
||||
"regex",
|
||||
"regex 1.11.1",
|
||||
"serde",
|
||||
"unic-ucd-ident",
|
||||
"url",
|
||||
@@ -6965,6 +7186,12 @@ version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
|
||||
|
||||
[[package]]
|
||||
name = "utf8-ranges"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fcfc827f90e53a02eaef5e535ee14266c1d569214c6aa70133a624d8a3164ba"
|
||||
|
||||
[[package]]
|
||||
name = "utf8-width"
|
||||
version = "0.1.7"
|
||||
@@ -7040,6 +7267,15 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wait-timeout"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.5.0"
|
||||
@@ -7101,7 +7337,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.101",
|
||||
@@ -7848,6 +8084,15 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winreg"
|
||||
version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winreg"
|
||||
version = "0.55.0"
|
||||
@@ -7874,7 +8119,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e5ff8d0e60065f549fafd9d6cb626203ea64a798186c80d8e7df4f8af56baeb"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"os_pipe",
|
||||
"rustix 0.38.44",
|
||||
"tempfile",
|
||||
@@ -7994,6 +8239,17 @@ dependencies = [
|
||||
"rustix 1.0.7",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yaak-api"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"log 0.4.29",
|
||||
"reqwest",
|
||||
"sysproxy",
|
||||
"thiserror 2.0.17",
|
||||
"yaak-common",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yaak-app"
|
||||
version = "0.0.0"
|
||||
@@ -8003,7 +8259,7 @@ dependencies = [
|
||||
"cookie",
|
||||
"eventsource-client",
|
||||
"http",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"md5 0.8.0",
|
||||
"mime_guess",
|
||||
"openssl-sys",
|
||||
@@ -8034,6 +8290,7 @@ dependencies = [
|
||||
"ts-rs",
|
||||
"url",
|
||||
"uuid",
|
||||
"yaak-api",
|
||||
"yaak-common",
|
||||
"yaak-core",
|
||||
"yaak-crypto",
|
||||
@@ -8057,11 +8314,15 @@ dependencies = [
|
||||
name = "yaak-cli"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"assert_cmd",
|
||||
"clap",
|
||||
"dirs",
|
||||
"env_logger",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"predicates",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"yaak-crypto",
|
||||
"yaak-http",
|
||||
@@ -8093,7 +8354,7 @@ dependencies = [
|
||||
"base64 0.22.1",
|
||||
"chacha20poly1305",
|
||||
"keyring",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"serde",
|
||||
"thiserror 2.0.17",
|
||||
"yaak-models",
|
||||
@@ -8117,7 +8378,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"git2",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
@@ -8139,7 +8400,7 @@ dependencies = [
|
||||
"dunce",
|
||||
"hyper-rustls",
|
||||
"hyper-util",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"md5 0.7.0",
|
||||
"prost",
|
||||
"prost-reflect",
|
||||
@@ -8169,9 +8430,9 @@ dependencies = [
|
||||
"futures-util",
|
||||
"http-body",
|
||||
"hyper-util",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"mime_guess",
|
||||
"regex",
|
||||
"regex 1.11.1",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -8192,7 +8453,7 @@ name = "yaak-license"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -8200,9 +8461,9 @@ dependencies = [
|
||||
"tauri-plugin",
|
||||
"thiserror 2.0.17",
|
||||
"ts-rs",
|
||||
"yaak-api",
|
||||
"yaak-common",
|
||||
"yaak-models",
|
||||
"yaak-tauri-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8211,7 +8472,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"cocoa",
|
||||
"csscolorparser",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"objc",
|
||||
"rand 0.9.1",
|
||||
"tauri",
|
||||
@@ -8225,7 +8486,7 @@ dependencies = [
|
||||
"chrono",
|
||||
"hex",
|
||||
"include_dir",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"nanoid",
|
||||
"r2d2",
|
||||
"r2d2_sqlite",
|
||||
@@ -8250,11 +8511,11 @@ dependencies = [
|
||||
"futures-util",
|
||||
"hex",
|
||||
"keyring",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"md5 0.7.0",
|
||||
"path-slash",
|
||||
"rand 0.9.1",
|
||||
"regex",
|
||||
"regex 1.11.1",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -8284,7 +8545,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"hex",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"notify",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -8301,12 +8562,8 @@ dependencies = [
|
||||
name = "yaak-tauri-utils"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"regex 1.11.1",
|
||||
"tauri",
|
||||
"thiserror 2.0.17",
|
||||
"yaak-common",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8314,7 +8571,7 @@ name = "yaak-templates"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"serde",
|
||||
"serde-wasm-bindgen",
|
||||
"serde_json",
|
||||
@@ -8328,7 +8585,7 @@ dependencies = [
|
||||
name = "yaak-tls"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"p12",
|
||||
"rustls",
|
||||
"rustls-pemfile",
|
||||
@@ -8345,7 +8602,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"http",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"md5 0.8.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -8409,7 +8666,7 @@ dependencies = [
|
||||
"futures-core",
|
||||
"futures-lite",
|
||||
"hex",
|
||||
"nix",
|
||||
"nix 0.30.1",
|
||||
"ordered-stream",
|
||||
"serde",
|
||||
"serde_repr",
|
||||
@@ -8576,7 +8833,7 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aed5f10c571472911e37d8f7601a8dfba52b4f7f73a344015291b82ab292faf6"
|
||||
dependencies = [
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"thiserror 2.0.17",
|
||||
"zip",
|
||||
]
|
||||
@@ -8595,7 +8852,7 @@ checksum = "edfc5ee405f504cd4984ecc6f14d02d55cfda60fa4b689434ef4102aae150cd7"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"crc32fast",
|
||||
"log",
|
||||
"log 0.4.29",
|
||||
"simd-adler32",
|
||||
]
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ members = [
|
||||
"crates/yaak-templates",
|
||||
"crates/yaak-tls",
|
||||
"crates/yaak-ws",
|
||||
"crates/yaak-api",
|
||||
# CLI crates
|
||||
"crates-cli/yaak-cli",
|
||||
# Tauri-specific crates
|
||||
@@ -58,6 +59,7 @@ yaak-sync = { path = "crates/yaak-sync" }
|
||||
yaak-templates = { path = "crates/yaak-templates" }
|
||||
yaak-tls = { path = "crates/yaak-tls" }
|
||||
yaak-ws = { path = "crates/yaak-ws" }
|
||||
yaak-api = { path = "crates/yaak-api" }
|
||||
|
||||
# Internal crates - Tauri-specific
|
||||
yaak-fonts = { path = "crates-tauri/yaak-fonts" }
|
||||
|
||||
@@ -47,7 +47,8 @@
|
||||
"!src-web/vite.config.ts",
|
||||
"!src-web/routeTree.gen.ts",
|
||||
"!packages/plugin-runtime-types/lib",
|
||||
"!**/bindings"
|
||||
"!**/bindings",
|
||||
"!flatpak"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ clap = { version = "4", features = ["derive"] }
|
||||
dirs = "6"
|
||||
env_logger = "0.11"
|
||||
log = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
||||
yaak-crypto = { workspace = true }
|
||||
@@ -20,3 +21,8 @@ yaak-http = { workspace = true }
|
||||
yaak-models = { workspace = true }
|
||||
yaak-plugins = { workspace = true }
|
||||
yaak-templates = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2"
|
||||
predicates = "3"
|
||||
tempfile = "3"
|
||||
|
||||
340
crates-cli/yaak-cli/PLAN.md
Normal file
340
crates-cli/yaak-cli/PLAN.md
Normal file
@@ -0,0 +1,340 @@
|
||||
# CLI Command Architecture Plan
|
||||
|
||||
## Goal
|
||||
|
||||
Redesign the yaak-cli command structure to use a resource-oriented `<resource> <action>`
|
||||
pattern that scales well, is discoverable, and supports both human and LLM workflows.
|
||||
|
||||
## Status Snapshot
|
||||
|
||||
Current branch state:
|
||||
|
||||
- Modular CLI structure with command modules and shared `CliContext`
|
||||
- Resource/action hierarchy in place for:
|
||||
- `workspace list|show|create|update|delete`
|
||||
- `request list|show|create|update|send|delete`
|
||||
- `folder list|show|create|update|delete`
|
||||
- `environment list|show|create|update|delete`
|
||||
- Top-level `send` exists as a request-send shortcut (not yet flexible request/folder/workspace resolution)
|
||||
- Legacy `get` command removed
|
||||
- JSON create/update flow implemented (`--json` and positional JSON shorthand)
|
||||
- No `request schema` command yet
|
||||
|
||||
Progress checklist:
|
||||
|
||||
- [x] Phase 1 complete
|
||||
- [x] Phase 2 complete
|
||||
- [x] Phase 3 complete
|
||||
- [ ] Phase 4 complete
|
||||
- [ ] Phase 5 complete
|
||||
- [ ] Phase 6 complete
|
||||
|
||||
## Command Architecture
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Resource-oriented**: top-level commands are nouns, subcommands are verbs
|
||||
- **Polymorphic requests**: `request` covers HTTP, gRPC, and WebSocket — the CLI
|
||||
resolves the type via `get_any_request` and adapts behavior accordingly
|
||||
- **Simple creation, full-fidelity via JSON**: human-friendly flags for basic creation,
|
||||
`--json` for full control (targeted at LLM and scripting workflows)
|
||||
- **Runtime schema introspection**: `request schema` outputs JSON Schema for the request
|
||||
models, with dynamic auth fields populated from loaded plugins at runtime
|
||||
- **Destructive actions require confirmation**: `delete` commands prompt for user
|
||||
confirmation before proceeding. Can be bypassed with `--yes` / `-y` for scripting
|
||||
|
||||
### Commands
|
||||
|
||||
```
|
||||
# Top-level shortcut
|
||||
yaakcli send <id> [-e <env_id>] # id can be a request, folder, or workspace
|
||||
|
||||
# Resource commands
|
||||
yaakcli workspace list
|
||||
yaakcli workspace show <id>
|
||||
yaakcli workspace create --name <name>
|
||||
yaakcli workspace create --json '{"name": "My Workspace"}'
|
||||
yaakcli workspace create '{"name": "My Workspace"}' # positional JSON shorthand
|
||||
yaakcli workspace update --json '{"id": "wk_abc", "name": "New Name"}'
|
||||
yaakcli workspace delete <id>
|
||||
|
||||
yaakcli request list <workspace_id>
|
||||
yaakcli request show <id>
|
||||
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
|
||||
yaakcli request create --json '{"workspaceId": "wk_abc", "url": "..."}'
|
||||
yaakcli request update --json '{"id": "rq_abc", "url": "https://new.com"}'
|
||||
yaakcli request send <id> [-e <env_id>]
|
||||
yaakcli request delete <id>
|
||||
yaakcli request schema <http|grpc|websocket>
|
||||
|
||||
yaakcli folder list <workspace_id>
|
||||
yaakcli folder show <id>
|
||||
yaakcli folder create <workspace_id> --name <name>
|
||||
yaakcli folder create --json '{"workspaceId": "wk_abc", "name": "Auth"}'
|
||||
yaakcli folder update --json '{"id": "fl_abc", "name": "New Name"}'
|
||||
yaakcli folder delete <id>
|
||||
|
||||
yaakcli environment list <workspace_id>
|
||||
yaakcli environment show <id>
|
||||
yaakcli environment create <workspace_id> --name <name>
|
||||
yaakcli environment create --json '{"workspaceId": "wk_abc", "name": "Production"}'
|
||||
yaakcli environment update --json '{"id": "ev_abc", ...}'
|
||||
yaakcli environment delete <id>
|
||||
|
||||
```
|
||||
|
||||
### `send` — Top-Level Shortcut
|
||||
|
||||
`yaakcli send <id>` is a convenience alias that accepts any sendable ID. It tries
|
||||
each type in order via DB lookups (short-circuiting on first match):
|
||||
|
||||
1. Request (HTTP, gRPC, or WebSocket via `get_any_request`)
|
||||
2. Folder (sends all requests in the folder)
|
||||
3. Workspace (sends all requests in the workspace)
|
||||
|
||||
ID prefixes exist (e.g. `rq_`, `fl_`, `wk_`) but are not relied upon — resolution
|
||||
is purely by DB lookup.
|
||||
|
||||
`request send <id>` is the same but restricted to request IDs only.
|
||||
|
||||
### Request Send — Polymorphic Behavior
|
||||
|
||||
`send` means "execute this request" regardless of protocol:
|
||||
|
||||
- **HTTP**: send request, print response, exit
|
||||
- **gRPC**: invoke the method; for streaming, stream output to stdout until done/Ctrl+C
|
||||
- **WebSocket**: connect, stream messages to stdout until closed/Ctrl+C
|
||||
|
||||
### `request schema` — Runtime JSON Schema
|
||||
|
||||
Outputs a JSON Schema describing the full request shape, including dynamic fields:
|
||||
|
||||
1. Generate base schema from `schemars::JsonSchema` derive on the Rust model structs
|
||||
2. Load plugins, collect auth strategy definitions and their form inputs
|
||||
3. Merge plugin-defined auth fields into the `authentication` property as a `oneOf`
|
||||
4. Output the combined schema as JSON
|
||||
|
||||
This lets an LLM call `schema`, read the shape, and construct valid JSON for
|
||||
`create --json` or `update --json`.
|
||||
|
||||
## Implementation Steps
|
||||
|
||||
### Phase 1: Restructure commands (no new functionality)
|
||||
|
||||
Refactor `main.rs` into the new resource/action pattern using clap subcommand nesting.
|
||||
Existing behavior stays the same, just reorganized. Remove the `get` command.
|
||||
|
||||
1. Create module structure: `commands/workspace.rs`, `commands/request.rs`, etc.
|
||||
2. Define nested clap enums:
|
||||
```rust
|
||||
enum Commands {
|
||||
Send(SendArgs),
|
||||
Workspace(WorkspaceArgs),
|
||||
Request(RequestArgs),
|
||||
Folder(FolderArgs),
|
||||
Environment(EnvironmentArgs),
|
||||
}
|
||||
```
|
||||
3. Move existing `Workspaces` logic into `workspace list`
|
||||
4. Move existing `Requests` logic into `request list`
|
||||
5. Move existing `Send` logic into `request send`
|
||||
6. Move existing `Create` logic into `request create`
|
||||
7. Delete the `Get` command entirely
|
||||
8. Extract shared setup (DB init, plugin init, encryption) into a reusable context struct
|
||||
|
||||
### Phase 2: Add missing CRUD commands
|
||||
|
||||
Status: complete
|
||||
|
||||
1. `workspace show <id>`
|
||||
2. `workspace create --name <name>` (and `--json`)
|
||||
3. `workspace update --json`
|
||||
4. `workspace delete <id>`
|
||||
5. `request show <id>` (JSON output of the full request model)
|
||||
6. `request delete <id>`
|
||||
7. `folder list <workspace_id>`
|
||||
8. `folder show <id>`
|
||||
9. `folder create <workspace_id> --name <name>` (and `--json`)
|
||||
10. `folder update --json`
|
||||
11. `folder delete <id>`
|
||||
12. `environment list <workspace_id>`
|
||||
13. `environment show <id>`
|
||||
14. `environment create <workspace_id> --name <name>` (and `--json`)
|
||||
15. `environment update --json`
|
||||
16. `environment delete <id>`
|
||||
|
||||
### Phase 3: JSON input for create/update
|
||||
|
||||
Both commands accept JSON via `--json <string>` or as a positional argument (detected
|
||||
by leading `{`). They follow the same upsert pattern as the plugin API.
|
||||
|
||||
- **`create --json`**: JSON must include `workspaceId`. Must NOT include `id` (or
|
||||
use empty string `""`). Deserializes into the model with defaults for missing fields,
|
||||
then upserts (insert).
|
||||
- **`update --json`**: JSON must include `id`. Performs a fetch-merge-upsert:
|
||||
1. Fetch the existing model from DB
|
||||
2. Serialize it to `serde_json::Value`
|
||||
3. Deep-merge the user's partial JSON on top (JSON Merge Patch / RFC 7386 semantics)
|
||||
4. Deserialize back into the typed model
|
||||
5. Upsert (update)
|
||||
|
||||
This matches how the MCP server plugin already does it (fetch existing, spread, override),
|
||||
but the CLI handles the merge server-side so callers don't have to.
|
||||
|
||||
Setting a field to `null` removes it (for `Option<T>` fields), per RFC 7386.
|
||||
|
||||
Implementation:
|
||||
1. Add `--json` flag and positional JSON detection to `create` commands
|
||||
2. Add `update` commands with required `--json` flag
|
||||
3. Implement JSON merge utility (or use `json-patch` crate)
|
||||
|
||||
### Phase 4: Runtime schema generation
|
||||
|
||||
1. Add `schemars` dependency to `yaak-models`
|
||||
2. Derive `JsonSchema` on `HttpRequest`, `GrpcRequest`, `WebsocketRequest`, and their
|
||||
nested types (`HttpRequestHeader`, `HttpUrlParameter`, etc.)
|
||||
3. Implement `request schema` command:
|
||||
- Generate base schema from schemars
|
||||
- Query plugins for auth strategy form inputs
|
||||
- Convert plugin form inputs into JSON Schema properties
|
||||
- Merge into the `authentication` field
|
||||
- Print to stdout
|
||||
|
||||
### Phase 5: Polymorphic send
|
||||
|
||||
1. Update `request send` to use `get_any_request` to resolve the request type
|
||||
2. Match on `AnyRequest` variant and dispatch to the appropriate sender:
|
||||
- `AnyRequest::HttpRequest` — existing HTTP send logic
|
||||
- `AnyRequest::GrpcRequest` — gRPC invoke (future implementation)
|
||||
- `AnyRequest::WebsocketRequest` — WebSocket connect (future implementation)
|
||||
3. gRPC and WebSocket send can initially return "not yet implemented" errors
|
||||
|
||||
### Phase 6: Top-level `send` and folder/workspace send
|
||||
|
||||
1. Add top-level `yaakcli send <id>` command
|
||||
2. Resolve ID by trying DB lookups in order: any_request → folder → workspace
|
||||
3. For folder: list all requests in folder, send each
|
||||
4. For workspace: list all requests in workspace, send each
|
||||
5. Add execution options: `--sequential` (default), `--parallel`, `--fail-fast`
|
||||
|
||||
## Execution Plan (PR Slices)
|
||||
|
||||
### PR 1: Command tree refactor + compatibility aliases
|
||||
|
||||
Scope:
|
||||
|
||||
1. Introduce `commands/` modules and a `CliContext` for shared setup
|
||||
2. Add new clap hierarchy (`workspace`, `request`, `folder`, `environment`)
|
||||
3. Route existing behavior into:
|
||||
- `workspace list`
|
||||
- `request list <workspace_id>`
|
||||
- `request send <id>`
|
||||
- `request create <workspace_id> ...`
|
||||
4. Keep compatibility aliases temporarily:
|
||||
- `workspaces` -> `workspace list`
|
||||
- `requests <workspace_id>` -> `request list <workspace_id>`
|
||||
- `create ...` -> `request create ...`
|
||||
5. Remove `get` and update help text
|
||||
|
||||
Acceptance criteria:
|
||||
|
||||
- `yaakcli --help` shows noun/verb structure
|
||||
- Existing list/send/create workflows still work
|
||||
- No behavior change in HTTP send output format
|
||||
|
||||
### PR 2: CRUD surface area
|
||||
|
||||
Scope:
|
||||
|
||||
1. Implement `show/create/update/delete` for `workspace`, `request`, `folder`, `environment`
|
||||
2. Ensure delete commands require confirmation by default (`--yes` bypass)
|
||||
3. Normalize output format for list/show/create/update/delete responses
|
||||
|
||||
Acceptance criteria:
|
||||
|
||||
- Every command listed in the "Commands" section parses and executes
|
||||
- Delete commands are safe by default in interactive terminals
|
||||
- `--yes` supports non-interactive scripts
|
||||
|
||||
### PR 3: JSON input + merge patch semantics
|
||||
|
||||
Scope:
|
||||
|
||||
1. Add shared parser for `--json` and positional JSON shorthand
|
||||
2. Add `create --json` and `update --json` for all mutable resources
|
||||
3. Implement server-side RFC 7386 merge patch behavior
|
||||
4. Add guardrails:
|
||||
- `create --json`: reject non-empty `id`
|
||||
- `update --json`: require `id`
|
||||
|
||||
Acceptance criteria:
|
||||
|
||||
- Partial `update --json` only modifies provided keys
|
||||
- `null` clears optional values
|
||||
- Invalid JSON and missing required fields return actionable errors
|
||||
|
||||
### PR 4: `request schema` and plugin auth integration
|
||||
|
||||
Scope:
|
||||
|
||||
1. Add `schemars` to `yaak-models` and derive `JsonSchema` for request models
|
||||
2. Implement `request schema <http|grpc|websocket>`
|
||||
3. Merge plugin auth form inputs into `authentication` schema at runtime
|
||||
|
||||
Acceptance criteria:
|
||||
|
||||
- Command prints valid JSON schema
|
||||
- Schema reflects installed auth providers at runtime
|
||||
- No panic when plugins fail to initialize (degrade gracefully)
|
||||
|
||||
### PR 5: Polymorphic request send
|
||||
|
||||
Scope:
|
||||
|
||||
1. Replace request resolution in `request send` with `get_any_request`
|
||||
2. Dispatch by request type
|
||||
3. Keep HTTP fully functional
|
||||
4. Return explicit NYI errors for gRPC/WebSocket until implemented
|
||||
|
||||
Acceptance criteria:
|
||||
|
||||
- HTTP behavior remains unchanged
|
||||
- gRPC/WebSocket IDs are recognized and return explicit status
|
||||
|
||||
### PR 6: Top-level `send` + bulk execution
|
||||
|
||||
Scope:
|
||||
|
||||
1. Add top-level `send <id>` for request/folder/workspace IDs
|
||||
2. Implement folder/workspace fan-out execution
|
||||
3. Add execution controls: `--sequential`, `--parallel`, `--fail-fast`
|
||||
|
||||
Acceptance criteria:
|
||||
|
||||
- Correct ID dispatch order: request -> folder -> workspace
|
||||
- Deterministic summary output (success/failure counts)
|
||||
- Non-zero exit code when any request fails (unless explicitly configured otherwise)
|
||||
|
||||
## Validation Matrix
|
||||
|
||||
1. CLI parsing tests for every command path (including aliases while retained)
|
||||
2. Integration tests against temp SQLite DB for CRUD flows
|
||||
3. Snapshot tests for output text where scripting compatibility matters
|
||||
4. Manual smoke tests:
|
||||
- Send HTTP request with template/rendered vars
|
||||
- JSON create/update for each resource
|
||||
- Delete confirmation and `--yes`
|
||||
- Top-level `send` on request/folder/workspace
|
||||
|
||||
## Open Questions
|
||||
|
||||
1. Should compatibility aliases (`workspaces`, `requests`, `create`) be removed immediately or after one release cycle?
|
||||
2. For bulk `send`, should default behavior stop on first failure or continue and summarize?
|
||||
3. Should command output default to human-readable text with an optional `--format json`, or return JSON by default for `show`/`list`?
|
||||
4. For `request schema`, should plugin-derived auth fields be namespaced by plugin ID to avoid collisions?
|
||||
|
||||
## Crate Changes
|
||||
|
||||
- **yaak-cli**: restructure into modules, new clap hierarchy
|
||||
- **yaak-models**: add `schemars` dependency, derive `JsonSchema` on model structs
|
||||
(current derives: `Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS`)
|
||||
87
crates-cli/yaak-cli/README.md
Normal file
87
crates-cli/yaak-cli/README.md
Normal file
@@ -0,0 +1,87 @@
|
||||
# yaak-cli
|
||||
|
||||
Command-line interface for Yaak.
|
||||
|
||||
## Command Overview
|
||||
|
||||
Current top-level commands:
|
||||
|
||||
```text
|
||||
yaakcli send <request_id>
|
||||
yaakcli workspace list
|
||||
yaakcli workspace show <workspace_id>
|
||||
yaakcli workspace create --name <name>
|
||||
yaakcli workspace create --json '{"name":"My Workspace"}'
|
||||
yaakcli workspace create '{"name":"My Workspace"}'
|
||||
yaakcli workspace update --json '{"id":"wk_abc","description":"Updated"}'
|
||||
yaakcli workspace delete <workspace_id> [--yes]
|
||||
yaakcli request list <workspace_id>
|
||||
yaakcli request show <request_id>
|
||||
yaakcli request send <request_id>
|
||||
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
|
||||
yaakcli request create --json '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
|
||||
yaakcli request create '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
|
||||
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
|
||||
yaakcli request delete <request_id> [--yes]
|
||||
yaakcli folder list <workspace_id>
|
||||
yaakcli folder show <folder_id>
|
||||
yaakcli folder create <workspace_id> --name <name>
|
||||
yaakcli folder create --json '{"workspaceId":"wk_abc","name":"Auth"}'
|
||||
yaakcli folder create '{"workspaceId":"wk_abc","name":"Auth"}'
|
||||
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
|
||||
yaakcli folder delete <folder_id> [--yes]
|
||||
yaakcli environment list <workspace_id>
|
||||
yaakcli environment show <environment_id>
|
||||
yaakcli environment create <workspace_id> --name <name>
|
||||
yaakcli environment create --json '{"workspaceId":"wk_abc","name":"Production"}'
|
||||
yaakcli environment create '{"workspaceId":"wk_abc","name":"Production"}'
|
||||
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
|
||||
yaakcli environment delete <environment_id> [--yes]
|
||||
```
|
||||
|
||||
Global options:
|
||||
|
||||
- `--data-dir <path>`: use a custom data directory
|
||||
- `-e, --environment <id>`: environment to use during request rendering/sending
|
||||
- `-v, --verbose`: verbose logging and send output
|
||||
|
||||
Notes:
|
||||
|
||||
- `send` is currently a shortcut for sending an HTTP request ID.
|
||||
- `delete` commands prompt for confirmation unless `--yes` is provided.
|
||||
- In non-interactive mode, `delete` commands require `--yes`.
|
||||
- `create` and `update` commands support `--json` and positional JSON shorthand.
|
||||
- `update` uses JSON Merge Patch semantics (RFC 7386) for partial updates.
|
||||
|
||||
## Examples
|
||||
|
||||
```bash
|
||||
yaakcli workspace list
|
||||
yaakcli workspace create --name "My Workspace"
|
||||
yaakcli workspace show wk_abc
|
||||
yaakcli workspace update --json '{"id":"wk_abc","description":"Team workspace"}'
|
||||
yaakcli request list wk_abc
|
||||
yaakcli request show rq_abc
|
||||
yaakcli request create wk_abc --name "Users" --url "https://api.example.com/users"
|
||||
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
|
||||
yaakcli request send rq_abc -e ev_abc
|
||||
yaakcli request delete rq_abc --yes
|
||||
yaakcli folder create wk_abc --name "Auth"
|
||||
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
|
||||
yaakcli environment create wk_abc --name "Production"
|
||||
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
|
||||
```
|
||||
|
||||
## Roadmap
|
||||
|
||||
Planned command expansion (request schema and polymorphic send) is tracked in `PLAN.md`.
|
||||
|
||||
When command behavior changes, update this README and verify with:
|
||||
|
||||
```bash
|
||||
cargo run -q -p yaak-cli -- --help
|
||||
cargo run -q -p yaak-cli -- request --help
|
||||
cargo run -q -p yaak-cli -- workspace --help
|
||||
cargo run -q -p yaak-cli -- folder --help
|
||||
cargo run -q -p yaak-cli -- environment --help
|
||||
```
|
||||
282
crates-cli/yaak-cli/src/cli.rs
Normal file
282
crates-cli/yaak-cli/src/cli.rs
Normal file
@@ -0,0 +1,282 @@
|
||||
use clap::{Args, Parser, Subcommand};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "yaakcli")]
|
||||
#[command(about = "Yaak CLI - API client from the command line")]
|
||||
pub struct Cli {
|
||||
/// Use a custom data directory
|
||||
#[arg(long, global = true)]
|
||||
pub data_dir: Option<PathBuf>,
|
||||
|
||||
/// Environment ID to use for variable substitution
|
||||
#[arg(long, short, global = true)]
|
||||
pub environment: Option<String>,
|
||||
|
||||
/// Enable verbose logging
|
||||
#[arg(long, short, global = true)]
|
||||
pub verbose: bool,
|
||||
|
||||
#[command(subcommand)]
|
||||
pub command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum Commands {
|
||||
/// Send an HTTP request by ID
|
||||
Send(SendArgs),
|
||||
|
||||
/// Workspace commands
|
||||
Workspace(WorkspaceArgs),
|
||||
|
||||
/// Request commands
|
||||
Request(RequestArgs),
|
||||
|
||||
/// Folder commands
|
||||
Folder(FolderArgs),
|
||||
|
||||
/// Environment commands
|
||||
Environment(EnvironmentArgs),
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct SendArgs {
|
||||
/// Request ID
|
||||
pub request_id: String,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct WorkspaceArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: WorkspaceCommands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum WorkspaceCommands {
|
||||
/// List all workspaces
|
||||
List,
|
||||
|
||||
/// Show a workspace as JSON
|
||||
Show {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
},
|
||||
|
||||
/// Create a workspace
|
||||
Create {
|
||||
/// Workspace name
|
||||
#[arg(short, long)]
|
||||
name: Option<String>,
|
||||
|
||||
/// JSON payload
|
||||
#[arg(long, conflicts_with = "json_input")]
|
||||
json: Option<String>,
|
||||
|
||||
/// JSON payload shorthand
|
||||
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||
json_input: Option<String>,
|
||||
},
|
||||
|
||||
/// Update a workspace
|
||||
Update {
|
||||
/// JSON payload
|
||||
#[arg(long, conflicts_with = "json_input")]
|
||||
json: Option<String>,
|
||||
|
||||
/// JSON payload shorthand
|
||||
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||
json_input: Option<String>,
|
||||
},
|
||||
|
||||
/// Delete a workspace
|
||||
Delete {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
|
||||
/// Skip confirmation prompt
|
||||
#[arg(short, long)]
|
||||
yes: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct RequestArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: RequestCommands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum RequestCommands {
|
||||
/// List requests in a workspace
|
||||
List {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
},
|
||||
|
||||
/// Show a request as JSON
|
||||
Show {
|
||||
/// Request ID
|
||||
request_id: String,
|
||||
},
|
||||
|
||||
/// Send an HTTP request by ID
|
||||
Send {
|
||||
/// Request ID
|
||||
request_id: String,
|
||||
},
|
||||
|
||||
/// Create a new HTTP request
|
||||
Create {
|
||||
/// Workspace ID (or positional JSON payload shorthand)
|
||||
workspace_id: Option<String>,
|
||||
|
||||
/// Request name
|
||||
#[arg(short, long)]
|
||||
name: Option<String>,
|
||||
|
||||
/// HTTP method
|
||||
#[arg(short, long)]
|
||||
method: Option<String>,
|
||||
|
||||
/// URL
|
||||
#[arg(short, long)]
|
||||
url: Option<String>,
|
||||
|
||||
/// JSON payload
|
||||
#[arg(long)]
|
||||
json: Option<String>,
|
||||
},
|
||||
|
||||
/// Update an HTTP request
|
||||
Update {
|
||||
/// JSON payload
|
||||
#[arg(long, conflicts_with = "json_input")]
|
||||
json: Option<String>,
|
||||
|
||||
/// JSON payload shorthand
|
||||
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||
json_input: Option<String>,
|
||||
},
|
||||
|
||||
/// Delete a request
|
||||
Delete {
|
||||
/// Request ID
|
||||
request_id: String,
|
||||
|
||||
/// Skip confirmation prompt
|
||||
#[arg(short, long)]
|
||||
yes: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct FolderArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: FolderCommands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum FolderCommands {
|
||||
/// List folders in a workspace
|
||||
List {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
},
|
||||
|
||||
/// Show a folder as JSON
|
||||
Show {
|
||||
/// Folder ID
|
||||
folder_id: String,
|
||||
},
|
||||
|
||||
/// Create a folder
|
||||
Create {
|
||||
/// Workspace ID (or positional JSON payload shorthand)
|
||||
workspace_id: Option<String>,
|
||||
|
||||
/// Folder name
|
||||
#[arg(short, long)]
|
||||
name: Option<String>,
|
||||
|
||||
/// JSON payload
|
||||
#[arg(long)]
|
||||
json: Option<String>,
|
||||
},
|
||||
|
||||
/// Update a folder
|
||||
Update {
|
||||
/// JSON payload
|
||||
#[arg(long, conflicts_with = "json_input")]
|
||||
json: Option<String>,
|
||||
|
||||
/// JSON payload shorthand
|
||||
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||
json_input: Option<String>,
|
||||
},
|
||||
|
||||
/// Delete a folder
|
||||
Delete {
|
||||
/// Folder ID
|
||||
folder_id: String,
|
||||
|
||||
/// Skip confirmation prompt
|
||||
#[arg(short, long)]
|
||||
yes: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct EnvironmentArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: EnvironmentCommands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum EnvironmentCommands {
|
||||
/// List environments in a workspace
|
||||
List {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
},
|
||||
|
||||
/// Show an environment as JSON
|
||||
Show {
|
||||
/// Environment ID
|
||||
environment_id: String,
|
||||
},
|
||||
|
||||
/// Create an environment
|
||||
Create {
|
||||
/// Workspace ID (or positional JSON payload shorthand)
|
||||
workspace_id: Option<String>,
|
||||
|
||||
/// Environment name
|
||||
#[arg(short, long)]
|
||||
name: Option<String>,
|
||||
|
||||
/// JSON payload
|
||||
#[arg(long)]
|
||||
json: Option<String>,
|
||||
},
|
||||
|
||||
/// Update an environment
|
||||
Update {
|
||||
/// JSON payload
|
||||
#[arg(long, conflicts_with = "json_input")]
|
||||
json: Option<String>,
|
||||
|
||||
/// JSON payload shorthand
|
||||
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||
json_input: Option<String>,
|
||||
},
|
||||
|
||||
/// Delete an environment
|
||||
Delete {
|
||||
/// Environment ID
|
||||
environment_id: String,
|
||||
|
||||
/// Skip confirmation prompt
|
||||
#[arg(short, long)]
|
||||
yes: bool,
|
||||
},
|
||||
}
|
||||
16
crates-cli/yaak-cli/src/commands/confirm.rs
Normal file
16
crates-cli/yaak-cli/src/commands/confirm.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use std::io::{self, IsTerminal, Write};
|
||||
|
||||
pub fn confirm_delete(resource_name: &str, resource_id: &str) -> bool {
|
||||
if !io::stdin().is_terminal() {
|
||||
eprintln!("Refusing to delete in non-interactive mode without --yes");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
print!("Delete {resource_name} {resource_id}? [y/N]: ");
|
||||
io::stdout().flush().expect("Failed to flush stdout");
|
||||
|
||||
let mut input = String::new();
|
||||
io::stdin().read_line(&mut input).expect("Failed to read confirmation");
|
||||
|
||||
matches!(input.trim().to_lowercase().as_str(), "y" | "yes")
|
||||
}
|
||||
134
crates-cli/yaak-cli/src/commands/environment.rs
Normal file
134
crates-cli/yaak-cli/src/commands/environment.rs
Normal file
@@ -0,0 +1,134 @@
|
||||
use crate::cli::{EnvironmentArgs, EnvironmentCommands};
|
||||
use crate::commands::confirm::confirm_delete;
|
||||
use crate::commands::json::{
|
||||
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||
validate_create_id,
|
||||
};
|
||||
use crate::context::CliContext;
|
||||
use yaak_models::models::Environment;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
pub fn run(ctx: &CliContext, args: EnvironmentArgs) {
|
||||
match args.command {
|
||||
EnvironmentCommands::List { workspace_id } => list(ctx, &workspace_id),
|
||||
EnvironmentCommands::Show { environment_id } => show(ctx, &environment_id),
|
||||
EnvironmentCommands::Create { workspace_id, name, json } => {
|
||||
create(ctx, workspace_id, name, json)
|
||||
}
|
||||
EnvironmentCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||
EnvironmentCommands::Delete { environment_id, yes } => delete(ctx, &environment_id, yes),
|
||||
}
|
||||
}
|
||||
|
||||
fn list(ctx: &CliContext, workspace_id: &str) {
|
||||
let environments =
|
||||
ctx.db().list_environments_ensure_base(workspace_id).expect("Failed to list environments");
|
||||
|
||||
if environments.is_empty() {
|
||||
println!("No environments found in workspace {}", workspace_id);
|
||||
} else {
|
||||
for environment in environments {
|
||||
println!("{} - {} ({})", environment.id, environment.name, environment.parent_model);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn show(ctx: &CliContext, environment_id: &str) {
|
||||
let environment = ctx.db().get_environment(environment_id).expect("Failed to get environment");
|
||||
let output =
|
||||
serde_json::to_string_pretty(&environment).expect("Failed to serialize environment");
|
||||
println!("{output}");
|
||||
}
|
||||
|
||||
fn create(
|
||||
ctx: &CliContext,
|
||||
workspace_id: Option<String>,
|
||||
name: Option<String>,
|
||||
json: Option<String>,
|
||||
) {
|
||||
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||
panic!("environment create cannot combine workspace_id with --json payload");
|
||||
}
|
||||
|
||||
let payload = parse_optional_json(
|
||||
json,
|
||||
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||
"environment create",
|
||||
);
|
||||
|
||||
if let Some(payload) = payload {
|
||||
if name.is_some() {
|
||||
panic!("environment create cannot combine --name with JSON payload");
|
||||
}
|
||||
|
||||
validate_create_id(&payload, "environment");
|
||||
let mut environment: Environment =
|
||||
serde_json::from_value(payload).expect("Failed to parse environment create JSON");
|
||||
|
||||
if environment.workspace_id.is_empty() {
|
||||
panic!("environment create JSON requires non-empty \"workspaceId\"");
|
||||
}
|
||||
|
||||
if environment.parent_model.is_empty() {
|
||||
environment.parent_model = "environment".to_string();
|
||||
}
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_environment(&environment, &UpdateSource::Sync)
|
||||
.expect("Failed to create environment");
|
||||
|
||||
println!("Created environment: {}", created.id);
|
||||
return;
|
||||
}
|
||||
|
||||
let workspace_id = workspace_id.unwrap_or_else(|| {
|
||||
panic!("environment create requires workspace_id unless JSON payload is provided")
|
||||
});
|
||||
let name = name.unwrap_or_else(|| {
|
||||
panic!("environment create requires --name unless JSON payload is provided")
|
||||
});
|
||||
|
||||
let environment = Environment {
|
||||
workspace_id,
|
||||
name,
|
||||
parent_model: "environment".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_environment(&environment, &UpdateSource::Sync)
|
||||
.expect("Failed to create environment");
|
||||
|
||||
println!("Created environment: {}", created.id);
|
||||
}
|
||||
|
||||
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
|
||||
let patch = parse_required_json(json, json_input, "environment update");
|
||||
let id = require_id(&patch, "environment update");
|
||||
|
||||
let existing = ctx.db().get_environment(&id).expect("Failed to get environment for update");
|
||||
let updated = apply_merge_patch(&existing, &patch, &id, "environment update");
|
||||
|
||||
let saved = ctx
|
||||
.db()
|
||||
.upsert_environment(&updated, &UpdateSource::Sync)
|
||||
.expect("Failed to update environment");
|
||||
|
||||
println!("Updated environment: {}", saved.id);
|
||||
}
|
||||
|
||||
fn delete(ctx: &CliContext, environment_id: &str, yes: bool) {
|
||||
if !yes && !confirm_delete("environment", environment_id) {
|
||||
println!("Aborted");
|
||||
return;
|
||||
}
|
||||
|
||||
let deleted = ctx
|
||||
.db()
|
||||
.delete_environment_by_id(environment_id, &UpdateSource::Sync)
|
||||
.expect("Failed to delete environment");
|
||||
|
||||
println!("Deleted environment: {}", deleted.id);
|
||||
}
|
||||
115
crates-cli/yaak-cli/src/commands/folder.rs
Normal file
115
crates-cli/yaak-cli/src/commands/folder.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use crate::cli::{FolderArgs, FolderCommands};
|
||||
use crate::commands::confirm::confirm_delete;
|
||||
use crate::commands::json::{
|
||||
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||
validate_create_id,
|
||||
};
|
||||
use crate::context::CliContext;
|
||||
use yaak_models::models::Folder;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
pub fn run(ctx: &CliContext, args: FolderArgs) {
|
||||
match args.command {
|
||||
FolderCommands::List { workspace_id } => list(ctx, &workspace_id),
|
||||
FolderCommands::Show { folder_id } => show(ctx, &folder_id),
|
||||
FolderCommands::Create { workspace_id, name, json } => {
|
||||
create(ctx, workspace_id, name, json)
|
||||
}
|
||||
FolderCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||
FolderCommands::Delete { folder_id, yes } => delete(ctx, &folder_id, yes),
|
||||
}
|
||||
}
|
||||
|
||||
fn list(ctx: &CliContext, workspace_id: &str) {
|
||||
let folders = ctx.db().list_folders(workspace_id).expect("Failed to list folders");
|
||||
if folders.is_empty() {
|
||||
println!("No folders found in workspace {}", workspace_id);
|
||||
} else {
|
||||
for folder in folders {
|
||||
println!("{} - {}", folder.id, folder.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn show(ctx: &CliContext, folder_id: &str) {
|
||||
let folder = ctx.db().get_folder(folder_id).expect("Failed to get folder");
|
||||
let output = serde_json::to_string_pretty(&folder).expect("Failed to serialize folder");
|
||||
println!("{output}");
|
||||
}
|
||||
|
||||
fn create(
|
||||
ctx: &CliContext,
|
||||
workspace_id: Option<String>,
|
||||
name: Option<String>,
|
||||
json: Option<String>,
|
||||
) {
|
||||
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||
panic!("folder create cannot combine workspace_id with --json payload");
|
||||
}
|
||||
|
||||
let payload = parse_optional_json(
|
||||
json,
|
||||
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||
"folder create",
|
||||
);
|
||||
|
||||
if let Some(payload) = payload {
|
||||
if name.is_some() {
|
||||
panic!("folder create cannot combine --name with JSON payload");
|
||||
}
|
||||
|
||||
validate_create_id(&payload, "folder");
|
||||
let folder: Folder =
|
||||
serde_json::from_value(payload).expect("Failed to parse folder create JSON");
|
||||
|
||||
if folder.workspace_id.is_empty() {
|
||||
panic!("folder create JSON requires non-empty \"workspaceId\"");
|
||||
}
|
||||
|
||||
let created =
|
||||
ctx.db().upsert_folder(&folder, &UpdateSource::Sync).expect("Failed to create folder");
|
||||
|
||||
println!("Created folder: {}", created.id);
|
||||
return;
|
||||
}
|
||||
|
||||
let workspace_id = workspace_id.unwrap_or_else(|| {
|
||||
panic!("folder create requires workspace_id unless JSON payload is provided")
|
||||
});
|
||||
let name = name
|
||||
.unwrap_or_else(|| panic!("folder create requires --name unless JSON payload is provided"));
|
||||
|
||||
let folder = Folder { workspace_id, name, ..Default::default() };
|
||||
|
||||
let created =
|
||||
ctx.db().upsert_folder(&folder, &UpdateSource::Sync).expect("Failed to create folder");
|
||||
|
||||
println!("Created folder: {}", created.id);
|
||||
}
|
||||
|
||||
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
|
||||
let patch = parse_required_json(json, json_input, "folder update");
|
||||
let id = require_id(&patch, "folder update");
|
||||
|
||||
let existing = ctx.db().get_folder(&id).expect("Failed to get folder for update");
|
||||
let updated = apply_merge_patch(&existing, &patch, &id, "folder update");
|
||||
|
||||
let saved =
|
||||
ctx.db().upsert_folder(&updated, &UpdateSource::Sync).expect("Failed to update folder");
|
||||
|
||||
println!("Updated folder: {}", saved.id);
|
||||
}
|
||||
|
||||
fn delete(ctx: &CliContext, folder_id: &str, yes: bool) {
|
||||
if !yes && !confirm_delete("folder", folder_id) {
|
||||
println!("Aborted");
|
||||
return;
|
||||
}
|
||||
|
||||
let deleted = ctx
|
||||
.db()
|
||||
.delete_folder_by_id(folder_id, &UpdateSource::Sync)
|
||||
.expect("Failed to delete folder");
|
||||
|
||||
println!("Deleted folder: {}", deleted.id);
|
||||
}
|
||||
108
crates-cli/yaak-cli/src/commands/json.rs
Normal file
108
crates-cli/yaak-cli/src/commands/json.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::{Map, Value};
|
||||
|
||||
pub fn is_json_shorthand(input: &str) -> bool {
|
||||
input.trim_start().starts_with('{')
|
||||
}
|
||||
|
||||
pub fn parse_json_object(raw: &str, context: &str) -> Value {
|
||||
let value: Value = serde_json::from_str(raw)
|
||||
.unwrap_or_else(|error| panic!("Invalid JSON for {context}: {error}"));
|
||||
|
||||
if !value.is_object() {
|
||||
panic!("JSON payload for {context} must be an object");
|
||||
}
|
||||
|
||||
value
|
||||
}
|
||||
|
||||
pub fn parse_optional_json(
|
||||
json_flag: Option<String>,
|
||||
json_shorthand: Option<String>,
|
||||
context: &str,
|
||||
) -> Option<Value> {
|
||||
match (json_flag, json_shorthand) {
|
||||
(Some(_), Some(_)) => {
|
||||
panic!("Cannot provide both --json and positional JSON for {context}")
|
||||
}
|
||||
(Some(raw), None) => Some(parse_json_object(&raw, context)),
|
||||
(None, Some(raw)) => Some(parse_json_object(&raw, context)),
|
||||
(None, None) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_required_json(
|
||||
json_flag: Option<String>,
|
||||
json_shorthand: Option<String>,
|
||||
context: &str,
|
||||
) -> Value {
|
||||
parse_optional_json(json_flag, json_shorthand, context).unwrap_or_else(|| {
|
||||
panic!("Missing JSON payload for {context}. Use --json or positional JSON")
|
||||
})
|
||||
}
|
||||
|
||||
pub fn require_id(payload: &Value, context: &str) -> String {
|
||||
payload
|
||||
.get("id")
|
||||
.and_then(|value| value.as_str())
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(|value| value.to_string())
|
||||
.unwrap_or_else(|| panic!("{context} requires a non-empty \"id\" field"))
|
||||
}
|
||||
|
||||
pub fn validate_create_id(payload: &Value, context: &str) {
|
||||
let Some(id_value) = payload.get("id") else {
|
||||
return;
|
||||
};
|
||||
|
||||
match id_value {
|
||||
Value::String(id) if id.is_empty() => {}
|
||||
_ => panic!("{context} create JSON must omit \"id\" or set it to an empty string"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_merge_patch<T>(existing: &T, patch: &Value, id: &str, context: &str) -> T
|
||||
where
|
||||
T: Serialize + DeserializeOwned,
|
||||
{
|
||||
let mut base = serde_json::to_value(existing).unwrap_or_else(|error| {
|
||||
panic!("Failed to serialize existing model for {context}: {error}")
|
||||
});
|
||||
merge_patch(&mut base, patch);
|
||||
|
||||
let Some(base_object) = base.as_object_mut() else {
|
||||
panic!("Merged payload for {context} must be an object");
|
||||
};
|
||||
base_object.insert("id".to_string(), Value::String(id.to_string()));
|
||||
|
||||
serde_json::from_value(base).unwrap_or_else(|error| {
|
||||
panic!("Failed to deserialize merged payload for {context}: {error}")
|
||||
})
|
||||
}
|
||||
|
||||
fn merge_patch(target: &mut Value, patch: &Value) {
|
||||
match patch {
|
||||
Value::Object(patch_map) => {
|
||||
if !target.is_object() {
|
||||
*target = Value::Object(Map::new());
|
||||
}
|
||||
|
||||
let target_map =
|
||||
target.as_object_mut().expect("merge_patch target expected to be object");
|
||||
|
||||
for (key, patch_value) in patch_map {
|
||||
if patch_value.is_null() {
|
||||
target_map.remove(key);
|
||||
continue;
|
||||
}
|
||||
|
||||
let target_entry = target_map.entry(key.clone()).or_insert(Value::Null);
|
||||
merge_patch(target_entry, patch_value);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
*target = patch.clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
7
crates-cli/yaak-cli/src/commands/mod.rs
Normal file
7
crates-cli/yaak-cli/src/commands/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
pub mod confirm;
|
||||
pub mod environment;
|
||||
pub mod folder;
|
||||
pub mod json;
|
||||
pub mod request;
|
||||
pub mod send;
|
||||
pub mod workspace;
|
||||
338
crates-cli/yaak-cli/src/commands/request.rs
Normal file
338
crates-cli/yaak-cli/src/commands/request.rs
Normal file
@@ -0,0 +1,338 @@
|
||||
use crate::cli::{RequestArgs, RequestCommands};
|
||||
use crate::commands::confirm::confirm_delete;
|
||||
use crate::commands::json::{
|
||||
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||
validate_create_id,
|
||||
};
|
||||
use crate::context::CliContext;
|
||||
use log::info;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use tokio::sync::mpsc;
|
||||
use yaak_http::path_placeholders::apply_path_placeholders;
|
||||
use yaak_http::sender::{HttpSender, ReqwestSender};
|
||||
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
|
||||
use yaak_models::models::{Environment, HttpRequest, HttpRequestHeader, HttpUrlParameter};
|
||||
use yaak_models::render::make_vars_hashmap;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{PluginContext, RenderPurpose};
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
use yaak_templates::{RenderOptions, parse_and_render, render_json_value_raw};
|
||||
|
||||
pub async fn run(
|
||||
ctx: &CliContext,
|
||||
args: RequestArgs,
|
||||
environment: Option<&str>,
|
||||
verbose: bool,
|
||||
) -> i32 {
|
||||
match args.command {
|
||||
RequestCommands::List { workspace_id } => {
|
||||
list(ctx, &workspace_id);
|
||||
0
|
||||
}
|
||||
RequestCommands::Show { request_id } => {
|
||||
show(ctx, &request_id);
|
||||
0
|
||||
}
|
||||
RequestCommands::Send { request_id } => {
|
||||
match send_request_by_id(ctx, &request_id, environment, verbose).await {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
eprintln!("Error: {error}");
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
RequestCommands::Create { workspace_id, name, method, url, json } => {
|
||||
create(ctx, workspace_id, name, method, url, json);
|
||||
0
|
||||
}
|
||||
RequestCommands::Update { json, json_input } => {
|
||||
update(ctx, json, json_input);
|
||||
0
|
||||
}
|
||||
RequestCommands::Delete { request_id, yes } => {
|
||||
delete(ctx, &request_id, yes);
|
||||
0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn list(ctx: &CliContext, workspace_id: &str) {
|
||||
let requests = ctx.db().list_http_requests(workspace_id).expect("Failed to list requests");
|
||||
if requests.is_empty() {
|
||||
println!("No requests found in workspace {}", workspace_id);
|
||||
} else {
|
||||
for request in requests {
|
||||
println!("{} - {} {}", request.id, request.method, request.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn create(
|
||||
ctx: &CliContext,
|
||||
workspace_id: Option<String>,
|
||||
name: Option<String>,
|
||||
method: Option<String>,
|
||||
url: Option<String>,
|
||||
json: Option<String>,
|
||||
) {
|
||||
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||
panic!("request create cannot combine workspace_id with --json payload");
|
||||
}
|
||||
|
||||
let payload = parse_optional_json(
|
||||
json,
|
||||
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||
"request create",
|
||||
);
|
||||
|
||||
if let Some(payload) = payload {
|
||||
if name.is_some() || method.is_some() || url.is_some() {
|
||||
panic!("request create cannot combine simple flags with JSON payload");
|
||||
}
|
||||
|
||||
validate_create_id(&payload, "request");
|
||||
let request: HttpRequest =
|
||||
serde_json::from_value(payload).expect("Failed to parse request create JSON");
|
||||
|
||||
if request.workspace_id.is_empty() {
|
||||
panic!("request create JSON requires non-empty \"workspaceId\"");
|
||||
}
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||
.expect("Failed to create request");
|
||||
|
||||
println!("Created request: {}", created.id);
|
||||
return;
|
||||
}
|
||||
|
||||
let workspace_id = workspace_id.unwrap_or_else(|| {
|
||||
panic!("request create requires workspace_id unless JSON payload is provided")
|
||||
});
|
||||
let name = name.unwrap_or_else(|| {
|
||||
panic!("request create requires --name unless JSON payload is provided")
|
||||
});
|
||||
let url = url
|
||||
.unwrap_or_else(|| panic!("request create requires --url unless JSON payload is provided"));
|
||||
let method = method.unwrap_or_else(|| "GET".to_string());
|
||||
|
||||
let request = HttpRequest {
|
||||
workspace_id,
|
||||
name,
|
||||
method: method.to_uppercase(),
|
||||
url,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||
.expect("Failed to create request");
|
||||
|
||||
println!("Created request: {}", created.id);
|
||||
}
|
||||
|
||||
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
|
||||
let patch = parse_required_json(json, json_input, "request update");
|
||||
let id = require_id(&patch, "request update");
|
||||
|
||||
let existing = ctx.db().get_http_request(&id).expect("Failed to get request for update");
|
||||
let updated = apply_merge_patch(&existing, &patch, &id, "request update");
|
||||
|
||||
let saved = ctx
|
||||
.db()
|
||||
.upsert_http_request(&updated, &UpdateSource::Sync)
|
||||
.expect("Failed to update request");
|
||||
|
||||
println!("Updated request: {}", saved.id);
|
||||
}
|
||||
|
||||
fn show(ctx: &CliContext, request_id: &str) {
|
||||
let request = ctx.db().get_http_request(request_id).expect("Failed to get request");
|
||||
let output = serde_json::to_string_pretty(&request).expect("Failed to serialize request");
|
||||
println!("{output}");
|
||||
}
|
||||
|
||||
fn delete(ctx: &CliContext, request_id: &str, yes: bool) {
|
||||
if !yes && !confirm_delete("request", request_id) {
|
||||
println!("Aborted");
|
||||
return;
|
||||
}
|
||||
|
||||
let deleted = ctx
|
||||
.db()
|
||||
.delete_http_request_by_id(request_id, &UpdateSource::Sync)
|
||||
.expect("Failed to delete request");
|
||||
println!("Deleted request: {}", deleted.id);
|
||||
}
|
||||
|
||||
/// Send a request by ID and print response in the same format as legacy `send`.
|
||||
pub async fn send_request_by_id(
|
||||
ctx: &CliContext,
|
||||
request_id: &str,
|
||||
environment: Option<&str>,
|
||||
verbose: bool,
|
||||
) -> Result<(), String> {
|
||||
let request =
|
||||
ctx.db().get_http_request(request_id).map_err(|e| format!("Failed to get request: {e}"))?;
|
||||
|
||||
let environment_chain = ctx
|
||||
.db()
|
||||
.resolve_environments(&request.workspace_id, request.folder_id.as_deref(), environment)
|
||||
.map_err(|e| format!("Failed to resolve environments: {e}"))?;
|
||||
|
||||
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
|
||||
let template_callback = PluginTemplateCallback::new(
|
||||
ctx.plugin_manager(),
|
||||
ctx.encryption_manager.clone(),
|
||||
&plugin_context,
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
|
||||
let rendered_request = render_http_request(
|
||||
&request,
|
||||
environment_chain,
|
||||
&template_callback,
|
||||
&RenderOptions::throw(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to render request templates: {e}"))?;
|
||||
|
||||
if verbose {
|
||||
println!("> {} {}", rendered_request.method, rendered_request.url);
|
||||
}
|
||||
|
||||
let sendable = SendableHttpRequest::from_http_request(
|
||||
&rendered_request,
|
||||
SendableHttpRequestOptions::default(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to build request: {e}"))?;
|
||||
|
||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
||||
|
||||
let verbose_handle = if verbose {
|
||||
Some(tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
println!("{}", event);
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
None
|
||||
};
|
||||
|
||||
let sender = ReqwestSender::new().map_err(|e| format!("Failed to create HTTP client: {e}"))?;
|
||||
let response = sender
|
||||
.send(sendable, event_tx)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to send request: {e}"))?;
|
||||
|
||||
if let Some(handle) = verbose_handle {
|
||||
let _ = handle.await;
|
||||
}
|
||||
|
||||
if verbose {
|
||||
println!();
|
||||
}
|
||||
println!("HTTP {} {}", response.status, response.status_reason.as_deref().unwrap_or(""));
|
||||
|
||||
if verbose {
|
||||
for (name, value) in &response.headers {
|
||||
println!("{}: {}", name, value);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
let (body, _stats) =
|
||||
response.text().await.map_err(|e| format!("Failed to read response body: {e}"))?;
|
||||
println!("{}", body);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Render an HTTP request with template variables and plugin functions.
|
||||
async fn render_http_request(
|
||||
request: &HttpRequest,
|
||||
environment_chain: Vec<Environment>,
|
||||
callback: &PluginTemplateCallback,
|
||||
options: &RenderOptions,
|
||||
) -> yaak_templates::error::Result<HttpRequest> {
|
||||
let vars = &make_vars_hashmap(environment_chain);
|
||||
|
||||
let mut url_parameters = Vec::new();
|
||||
for parameter in request.url_parameters.clone() {
|
||||
if !parameter.enabled {
|
||||
continue;
|
||||
}
|
||||
|
||||
url_parameters.push(HttpUrlParameter {
|
||||
enabled: parameter.enabled,
|
||||
name: parse_and_render(parameter.name.as_str(), vars, callback, options).await?,
|
||||
value: parse_and_render(parameter.value.as_str(), vars, callback, options).await?,
|
||||
id: parameter.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut headers = Vec::new();
|
||||
for header in request.headers.clone() {
|
||||
if !header.enabled {
|
||||
continue;
|
||||
}
|
||||
|
||||
headers.push(HttpRequestHeader {
|
||||
enabled: header.enabled,
|
||||
name: parse_and_render(header.name.as_str(), vars, callback, options).await?,
|
||||
value: parse_and_render(header.value.as_str(), vars, callback, options).await?,
|
||||
id: header.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut body = BTreeMap::new();
|
||||
for (key, value) in request.body.clone() {
|
||||
body.insert(key, render_json_value_raw(value, vars, callback, options).await?);
|
||||
}
|
||||
|
||||
let authentication = {
|
||||
let mut disabled = false;
|
||||
let mut auth = BTreeMap::new();
|
||||
|
||||
match request.authentication.get("disabled") {
|
||||
Some(Value::Bool(true)) => {
|
||||
disabled = true;
|
||||
}
|
||||
Some(Value::String(template)) => {
|
||||
disabled = parse_and_render(template.as_str(), vars, callback, options)
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
.is_empty();
|
||||
info!(
|
||||
"Rendering authentication.disabled as a template: {disabled} from \"{template}\""
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if disabled {
|
||||
auth.insert("disabled".to_string(), Value::Bool(true));
|
||||
} else {
|
||||
for (key, value) in request.authentication.clone() {
|
||||
if key == "disabled" {
|
||||
auth.insert(key, Value::Bool(false));
|
||||
} else {
|
||||
auth.insert(key, render_json_value_raw(value, vars, callback, options).await?);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
auth
|
||||
};
|
||||
|
||||
let url = parse_and_render(request.url.clone().as_str(), vars, callback, options).await?;
|
||||
|
||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
||||
|
||||
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..request.to_owned() })
|
||||
}
|
||||
18
crates-cli/yaak-cli/src/commands/send.rs
Normal file
18
crates-cli/yaak-cli/src/commands/send.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use crate::cli::SendArgs;
|
||||
use crate::commands::request;
|
||||
use crate::context::CliContext;
|
||||
|
||||
pub async fn run(
|
||||
ctx: &CliContext,
|
||||
args: SendArgs,
|
||||
environment: Option<&str>,
|
||||
verbose: bool,
|
||||
) -> i32 {
|
||||
match request::send_request_by_id(ctx, &args.request_id, environment, verbose).await {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
eprintln!("Error: {error}");
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
100
crates-cli/yaak-cli/src/commands/workspace.rs
Normal file
100
crates-cli/yaak-cli/src/commands/workspace.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
use crate::cli::{WorkspaceArgs, WorkspaceCommands};
|
||||
use crate::commands::confirm::confirm_delete;
|
||||
use crate::commands::json::{
|
||||
apply_merge_patch, parse_optional_json, parse_required_json, require_id, validate_create_id,
|
||||
};
|
||||
use crate::context::CliContext;
|
||||
use yaak_models::models::Workspace;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
pub fn run(ctx: &CliContext, args: WorkspaceArgs) {
|
||||
match args.command {
|
||||
WorkspaceCommands::List => list(ctx),
|
||||
WorkspaceCommands::Show { workspace_id } => show(ctx, &workspace_id),
|
||||
WorkspaceCommands::Create { name, json, json_input } => create(ctx, name, json, json_input),
|
||||
WorkspaceCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||
WorkspaceCommands::Delete { workspace_id, yes } => delete(ctx, &workspace_id, yes),
|
||||
}
|
||||
}
|
||||
|
||||
fn list(ctx: &CliContext) {
|
||||
let workspaces = ctx.db().list_workspaces().expect("Failed to list workspaces");
|
||||
if workspaces.is_empty() {
|
||||
println!("No workspaces found");
|
||||
} else {
|
||||
for workspace in workspaces {
|
||||
println!("{} - {}", workspace.id, workspace.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn show(ctx: &CliContext, workspace_id: &str) {
|
||||
let workspace = ctx.db().get_workspace(workspace_id).expect("Failed to get workspace");
|
||||
let output = serde_json::to_string_pretty(&workspace).expect("Failed to serialize workspace");
|
||||
println!("{output}");
|
||||
}
|
||||
|
||||
fn create(
|
||||
ctx: &CliContext,
|
||||
name: Option<String>,
|
||||
json: Option<String>,
|
||||
json_input: Option<String>,
|
||||
) {
|
||||
let payload = parse_optional_json(json, json_input, "workspace create");
|
||||
|
||||
if let Some(payload) = payload {
|
||||
if name.is_some() {
|
||||
panic!("workspace create cannot combine --name with JSON payload");
|
||||
}
|
||||
|
||||
validate_create_id(&payload, "workspace");
|
||||
let workspace: Workspace =
|
||||
serde_json::from_value(payload).expect("Failed to parse workspace create JSON");
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||
.expect("Failed to create workspace");
|
||||
println!("Created workspace: {}", created.id);
|
||||
return;
|
||||
}
|
||||
|
||||
let name = name.unwrap_or_else(|| {
|
||||
panic!("workspace create requires --name unless JSON payload is provided")
|
||||
});
|
||||
|
||||
let workspace = Workspace { name, ..Default::default() };
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||
.expect("Failed to create workspace");
|
||||
println!("Created workspace: {}", created.id);
|
||||
}
|
||||
|
||||
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) {
|
||||
let patch = parse_required_json(json, json_input, "workspace update");
|
||||
let id = require_id(&patch, "workspace update");
|
||||
|
||||
let existing = ctx.db().get_workspace(&id).expect("Failed to get workspace for update");
|
||||
let updated = apply_merge_patch(&existing, &patch, &id, "workspace update");
|
||||
|
||||
let saved = ctx
|
||||
.db()
|
||||
.upsert_workspace(&updated, &UpdateSource::Sync)
|
||||
.expect("Failed to update workspace");
|
||||
|
||||
println!("Updated workspace: {}", saved.id);
|
||||
}
|
||||
|
||||
fn delete(ctx: &CliContext, workspace_id: &str, yes: bool) {
|
||||
if !yes && !confirm_delete("workspace", workspace_id) {
|
||||
println!("Aborted");
|
||||
return;
|
||||
}
|
||||
|
||||
let deleted = ctx
|
||||
.db()
|
||||
.delete_workspace_by_id(workspace_id, &UpdateSource::Sync)
|
||||
.expect("Failed to delete workspace");
|
||||
println!("Deleted workspace: {}", deleted.id);
|
||||
}
|
||||
82
crates-cli/yaak-cli/src/context.rs
Normal file
82
crates-cli/yaak-cli/src/context.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_models::db_context::DbContext;
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_plugins::events::PluginContext;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
pub struct CliContext {
|
||||
query_manager: QueryManager,
|
||||
pub encryption_manager: Arc<EncryptionManager>,
|
||||
plugin_manager: Option<Arc<PluginManager>>,
|
||||
}
|
||||
|
||||
impl CliContext {
|
||||
pub async fn initialize(data_dir: PathBuf, app_id: &str, with_plugins: bool) -> Self {
|
||||
let db_path = data_dir.join("db.sqlite");
|
||||
let blob_path = data_dir.join("blobs.sqlite");
|
||||
|
||||
let (query_manager, _blob_manager, _rx) =
|
||||
yaak_models::init_standalone(&db_path, &blob_path)
|
||||
.expect("Failed to initialize database");
|
||||
|
||||
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
|
||||
|
||||
let plugin_manager = if with_plugins {
|
||||
let vendored_plugin_dir = data_dir.join("vendored-plugins");
|
||||
let installed_plugin_dir = data_dir.join("installed-plugins");
|
||||
let node_bin_path = PathBuf::from("node");
|
||||
|
||||
let plugin_runtime_main =
|
||||
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
|
||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
||||
});
|
||||
|
||||
let plugin_manager = Arc::new(
|
||||
PluginManager::new(
|
||||
vendored_plugin_dir,
|
||||
installed_plugin_dir,
|
||||
node_bin_path,
|
||||
plugin_runtime_main,
|
||||
false,
|
||||
)
|
||||
.await,
|
||||
);
|
||||
|
||||
let plugins = query_manager.connect().list_plugins().unwrap_or_default();
|
||||
if !plugins.is_empty() {
|
||||
let errors = plugin_manager
|
||||
.initialize_all_plugins(plugins, &PluginContext::new_empty())
|
||||
.await;
|
||||
for (plugin_dir, error_msg) in errors {
|
||||
eprintln!(
|
||||
"Warning: Failed to initialize plugin '{}': {}",
|
||||
plugin_dir, error_msg
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Some(plugin_manager)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Self { query_manager, encryption_manager, plugin_manager }
|
||||
}
|
||||
|
||||
pub fn db(&self) -> DbContext<'_> {
|
||||
self.query_manager.connect()
|
||||
}
|
||||
|
||||
pub fn plugin_manager(&self) -> Arc<PluginManager> {
|
||||
self.plugin_manager.clone().expect("Plugin manager was not initialized for this command")
|
||||
}
|
||||
|
||||
pub async fn shutdown(&self) {
|
||||
if let Some(plugin_manager) = &self.plugin_manager {
|
||||
plugin_manager.terminate().await;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,409 +1,57 @@
|
||||
use clap::{Parser, Subcommand};
|
||||
use log::info;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc;
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_http::path_placeholders::apply_path_placeholders;
|
||||
use yaak_http::sender::{HttpSender, ReqwestSender};
|
||||
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
|
||||
use yaak_models::models::{HttpRequest, HttpRequestHeader, HttpUrlParameter};
|
||||
use yaak_models::render::make_vars_hashmap;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{PluginContext, RenderPurpose};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
use yaak_templates::{RenderOptions, parse_and_render, render_json_value_raw};
|
||||
mod cli;
|
||||
mod commands;
|
||||
mod context;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "yaakcli")]
|
||||
#[command(about = "Yaak CLI - API client from the command line")]
|
||||
struct Cli {
|
||||
/// Use a custom data directory
|
||||
#[arg(long, global = true)]
|
||||
data_dir: Option<PathBuf>,
|
||||
|
||||
/// Environment ID to use for variable substitution
|
||||
#[arg(long, short, global = true)]
|
||||
environment: Option<String>,
|
||||
|
||||
/// Enable verbose logging
|
||||
#[arg(long, short, global = true)]
|
||||
verbose: bool,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// List all workspaces
|
||||
Workspaces,
|
||||
/// List requests in a workspace
|
||||
Requests {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
},
|
||||
/// Send an HTTP request by ID
|
||||
Send {
|
||||
/// Request ID
|
||||
request_id: String,
|
||||
},
|
||||
/// Send a GET request to a URL
|
||||
Get {
|
||||
/// URL to request
|
||||
url: String,
|
||||
},
|
||||
/// Create a new HTTP request
|
||||
Create {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
/// Request name
|
||||
#[arg(short, long)]
|
||||
name: String,
|
||||
/// HTTP method
|
||||
#[arg(short, long, default_value = "GET")]
|
||||
method: String,
|
||||
/// URL
|
||||
#[arg(short, long)]
|
||||
url: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// Render an HTTP request with template variables and plugin functions
|
||||
async fn render_http_request(
|
||||
r: &HttpRequest,
|
||||
environment_chain: Vec<yaak_models::models::Environment>,
|
||||
cb: &PluginTemplateCallback,
|
||||
opt: &RenderOptions,
|
||||
) -> yaak_templates::error::Result<HttpRequest> {
|
||||
let vars = &make_vars_hashmap(environment_chain);
|
||||
|
||||
let mut url_parameters = Vec::new();
|
||||
for p in r.url_parameters.clone() {
|
||||
if !p.enabled {
|
||||
continue;
|
||||
}
|
||||
url_parameters.push(HttpUrlParameter {
|
||||
enabled: p.enabled,
|
||||
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
|
||||
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut headers = Vec::new();
|
||||
for p in r.headers.clone() {
|
||||
if !p.enabled {
|
||||
continue;
|
||||
}
|
||||
headers.push(HttpRequestHeader {
|
||||
enabled: p.enabled,
|
||||
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
|
||||
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut body = BTreeMap::new();
|
||||
for (k, v) in r.body.clone() {
|
||||
body.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
|
||||
}
|
||||
|
||||
let authentication = {
|
||||
let mut disabled = false;
|
||||
let mut auth = BTreeMap::new();
|
||||
match r.authentication.get("disabled") {
|
||||
Some(Value::Bool(true)) => {
|
||||
disabled = true;
|
||||
}
|
||||
Some(Value::String(tmpl)) => {
|
||||
disabled = parse_and_render(tmpl.as_str(), vars, cb, opt)
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
.is_empty();
|
||||
info!(
|
||||
"Rendering authentication.disabled as a template: {disabled} from \"{tmpl}\""
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if disabled {
|
||||
auth.insert("disabled".to_string(), Value::Bool(true));
|
||||
} else {
|
||||
for (k, v) in r.authentication.clone() {
|
||||
if k == "disabled" {
|
||||
auth.insert(k, Value::Bool(false));
|
||||
} else {
|
||||
auth.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
|
||||
}
|
||||
}
|
||||
}
|
||||
auth
|
||||
};
|
||||
|
||||
let url = parse_and_render(r.url.clone().as_str(), vars, cb, opt).await?;
|
||||
|
||||
// Apply path placeholders (e.g., /users/:id -> /users/123)
|
||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
||||
|
||||
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
|
||||
}
|
||||
use clap::Parser;
|
||||
use cli::{Cli, Commands, RequestCommands};
|
||||
use context::CliContext;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let cli = Cli::parse();
|
||||
let Cli { data_dir, environment, verbose, command } = Cli::parse();
|
||||
|
||||
// Initialize logging
|
||||
if cli.verbose {
|
||||
if verbose {
|
||||
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
||||
}
|
||||
|
||||
// Use the same app_id for both data directory and keyring
|
||||
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
|
||||
|
||||
let data_dir = cli.data_dir.unwrap_or_else(|| {
|
||||
let data_dir = data_dir.unwrap_or_else(|| {
|
||||
dirs::data_dir().expect("Could not determine data directory").join(app_id)
|
||||
});
|
||||
|
||||
let db_path = data_dir.join("db.sqlite");
|
||||
let blob_path = data_dir.join("blobs.sqlite");
|
||||
|
||||
let (query_manager, _blob_manager, _rx) =
|
||||
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize database");
|
||||
|
||||
let db = query_manager.connect();
|
||||
|
||||
// Initialize encryption manager for secure() template function
|
||||
// Use the same app_id as the Tauri app for keyring access
|
||||
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
|
||||
|
||||
// Initialize plugin manager for template functions
|
||||
let vendored_plugin_dir = data_dir.join("vendored-plugins");
|
||||
let installed_plugin_dir = data_dir.join("installed-plugins");
|
||||
|
||||
// Use system node for CLI (must be in PATH)
|
||||
let node_bin_path = PathBuf::from("node");
|
||||
|
||||
// Find the plugin runtime - check YAAK_PLUGIN_RUNTIME env var, then fallback to development path
|
||||
let plugin_runtime_main =
|
||||
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
|
||||
// Development fallback: look relative to crate root
|
||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
||||
});
|
||||
|
||||
// Create plugin manager (plugins may not be available in CLI context)
|
||||
let plugin_manager = Arc::new(
|
||||
PluginManager::new(
|
||||
vendored_plugin_dir,
|
||||
installed_plugin_dir,
|
||||
node_bin_path,
|
||||
plugin_runtime_main,
|
||||
false,
|
||||
)
|
||||
.await,
|
||||
let needs_plugins = matches!(
|
||||
&command,
|
||||
Commands::Send(_)
|
||||
| Commands::Request(cli::RequestArgs { command: RequestCommands::Send { .. } })
|
||||
);
|
||||
|
||||
// Initialize plugins from database
|
||||
let plugins = db.list_plugins().unwrap_or_default();
|
||||
if !plugins.is_empty() {
|
||||
let errors =
|
||||
plugin_manager.initialize_all_plugins(plugins, &PluginContext::new_empty()).await;
|
||||
for (plugin_dir, error_msg) in errors {
|
||||
eprintln!("Warning: Failed to initialize plugin '{}': {}", plugin_dir, error_msg);
|
||||
let context = CliContext::initialize(data_dir, app_id, needs_plugins).await;
|
||||
|
||||
let exit_code = match command {
|
||||
Commands::Send(args) => {
|
||||
commands::send::run(&context, args, environment.as_deref(), verbose).await
|
||||
}
|
||||
Commands::Workspace(args) => {
|
||||
commands::workspace::run(&context, args);
|
||||
0
|
||||
}
|
||||
Commands::Request(args) => {
|
||||
commands::request::run(&context, args, environment.as_deref(), verbose).await
|
||||
}
|
||||
Commands::Folder(args) => {
|
||||
commands::folder::run(&context, args);
|
||||
0
|
||||
}
|
||||
Commands::Environment(args) => {
|
||||
commands::environment::run(&context, args);
|
||||
0
|
||||
}
|
||||
};
|
||||
|
||||
context.shutdown().await;
|
||||
|
||||
if exit_code != 0 {
|
||||
std::process::exit(exit_code);
|
||||
}
|
||||
|
||||
match cli.command {
|
||||
Commands::Workspaces => {
|
||||
let workspaces = db.list_workspaces().expect("Failed to list workspaces");
|
||||
if workspaces.is_empty() {
|
||||
println!("No workspaces found");
|
||||
} else {
|
||||
for ws in workspaces {
|
||||
println!("{} - {}", ws.id, ws.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Requests { workspace_id } => {
|
||||
let requests = db.list_http_requests(&workspace_id).expect("Failed to list requests");
|
||||
if requests.is_empty() {
|
||||
println!("No requests found in workspace {}", workspace_id);
|
||||
} else {
|
||||
for req in requests {
|
||||
println!("{} - {} {}", req.id, req.method, req.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Send { request_id } => {
|
||||
let request = db.get_http_request(&request_id).expect("Failed to get request");
|
||||
|
||||
// Resolve environment chain for variable substitution
|
||||
let environment_chain = db
|
||||
.resolve_environments(
|
||||
&request.workspace_id,
|
||||
request.folder_id.as_deref(),
|
||||
cli.environment.as_deref(),
|
||||
)
|
||||
.unwrap_or_default();
|
||||
|
||||
// Create template callback with plugin support
|
||||
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
|
||||
let template_callback = PluginTemplateCallback::new(
|
||||
plugin_manager.clone(),
|
||||
encryption_manager.clone(),
|
||||
&plugin_context,
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
|
||||
// Render templates in the request
|
||||
let rendered_request = render_http_request(
|
||||
&request,
|
||||
environment_chain,
|
||||
&template_callback,
|
||||
&RenderOptions::throw(),
|
||||
)
|
||||
.await
|
||||
.expect("Failed to render request templates");
|
||||
|
||||
if cli.verbose {
|
||||
println!("> {} {}", rendered_request.method, rendered_request.url);
|
||||
}
|
||||
|
||||
// Convert to sendable request
|
||||
let sendable = SendableHttpRequest::from_http_request(
|
||||
&rendered_request,
|
||||
SendableHttpRequestOptions::default(),
|
||||
)
|
||||
.await
|
||||
.expect("Failed to build request");
|
||||
|
||||
// Create event channel for progress
|
||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
||||
|
||||
// Spawn task to print events if verbose
|
||||
let verbose = cli.verbose;
|
||||
let verbose_handle = if verbose {
|
||||
Some(tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
println!("{}", event);
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
// Drain events silently
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
None
|
||||
};
|
||||
|
||||
// Send the request
|
||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
||||
|
||||
// Wait for event handler to finish
|
||||
if let Some(handle) = verbose_handle {
|
||||
let _ = handle.await;
|
||||
}
|
||||
|
||||
// Print response
|
||||
if verbose {
|
||||
println!();
|
||||
}
|
||||
println!(
|
||||
"HTTP {} {}",
|
||||
response.status,
|
||||
response.status_reason.as_deref().unwrap_or("")
|
||||
);
|
||||
|
||||
if verbose {
|
||||
for (name, value) in &response.headers {
|
||||
println!("{}: {}", name, value);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
// Print body
|
||||
let (body, _stats) = response.text().await.expect("Failed to read response body");
|
||||
println!("{}", body);
|
||||
}
|
||||
Commands::Get { url } => {
|
||||
if cli.verbose {
|
||||
println!("> GET {}", url);
|
||||
}
|
||||
|
||||
// Build a simple GET request
|
||||
let sendable = SendableHttpRequest {
|
||||
url: url.clone(),
|
||||
method: "GET".to_string(),
|
||||
headers: vec![],
|
||||
body: None,
|
||||
options: SendableHttpRequestOptions::default(),
|
||||
};
|
||||
|
||||
// Create event channel for progress
|
||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
||||
|
||||
// Spawn task to print events if verbose
|
||||
let verbose = cli.verbose;
|
||||
let verbose_handle = if verbose {
|
||||
Some(tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
println!("{}", event);
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
None
|
||||
};
|
||||
|
||||
// Send the request
|
||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
||||
|
||||
if let Some(handle) = verbose_handle {
|
||||
let _ = handle.await;
|
||||
}
|
||||
|
||||
// Print response
|
||||
if verbose {
|
||||
println!();
|
||||
}
|
||||
println!(
|
||||
"HTTP {} {}",
|
||||
response.status,
|
||||
response.status_reason.as_deref().unwrap_or("")
|
||||
);
|
||||
|
||||
if verbose {
|
||||
for (name, value) in &response.headers {
|
||||
println!("{}: {}", name, value);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
// Print body
|
||||
let (body, _stats) = response.text().await.expect("Failed to read response body");
|
||||
println!("{}", body);
|
||||
}
|
||||
Commands::Create { workspace_id, name, method, url } => {
|
||||
let request = HttpRequest {
|
||||
workspace_id,
|
||||
name,
|
||||
method: method.to_uppercase(),
|
||||
url,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let created = db
|
||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||
.expect("Failed to create request");
|
||||
|
||||
println!("Created request: {}", created.id);
|
||||
}
|
||||
}
|
||||
|
||||
// Terminate plugin manager gracefully
|
||||
plugin_manager.terminate().await;
|
||||
}
|
||||
|
||||
60
crates-cli/yaak-cli/tests/common/mod.rs
Normal file
60
crates-cli/yaak-cli/tests/common/mod.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use assert_cmd::Command;
|
||||
use assert_cmd::cargo::cargo_bin_cmd;
|
||||
use std::path::Path;
|
||||
use yaak_models::models::{HttpRequest, Workspace};
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
pub fn cli_cmd(data_dir: &Path) -> Command {
|
||||
let mut cmd = cargo_bin_cmd!("yaakcli");
|
||||
cmd.arg("--data-dir").arg(data_dir);
|
||||
cmd
|
||||
}
|
||||
|
||||
pub fn parse_created_id(stdout: &[u8], label: &str) -> String {
|
||||
String::from_utf8_lossy(stdout)
|
||||
.trim()
|
||||
.split_once(": ")
|
||||
.map(|(_, id)| id.to_string())
|
||||
.unwrap_or_else(|| panic!("Expected id in '{label}' output"))
|
||||
}
|
||||
|
||||
pub fn query_manager(data_dir: &Path) -> QueryManager {
|
||||
let db_path = data_dir.join("db.sqlite");
|
||||
let blob_path = data_dir.join("blobs.sqlite");
|
||||
let (query_manager, _blob_manager, _rx) =
|
||||
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize DB");
|
||||
query_manager
|
||||
}
|
||||
|
||||
pub fn seed_workspace(data_dir: &Path, workspace_id: &str) {
|
||||
let workspace = Workspace {
|
||||
id: workspace_id.to_string(),
|
||||
name: "Seed Workspace".to_string(),
|
||||
description: "Seeded for integration tests".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
query_manager(data_dir)
|
||||
.connect()
|
||||
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||
.expect("Failed to seed workspace");
|
||||
}
|
||||
|
||||
pub fn seed_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
|
||||
let request = HttpRequest {
|
||||
id: request_id.to_string(),
|
||||
workspace_id: workspace_id.to_string(),
|
||||
name: "Seeded Request".to_string(),
|
||||
method: "GET".to_string(),
|
||||
url: "https://example.com".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
query_manager(data_dir)
|
||||
.connect()
|
||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||
.expect("Failed to seed request");
|
||||
}
|
||||
80
crates-cli/yaak-cli/tests/environment_commands.rs
Normal file
80
crates-cli/yaak-cli/tests/environment_commands.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
mod common;
|
||||
|
||||
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn create_list_show_delete_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["environment", "list", "wk_test"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Global Variables"));
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args(["environment", "create", "wk_test", "--name", "Production"])
|
||||
.assert()
|
||||
.success();
|
||||
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["environment", "list", "wk_test"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(&environment_id))
|
||||
.stdout(contains("Production"));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["environment", "show", &environment_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("\"id\": \"{environment_id}\"")))
|
||||
.stdout(contains("\"parentModel\": \"environment\""));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["environment", "delete", &environment_id, "--yes"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Deleted environment: {environment_id}")));
|
||||
|
||||
assert!(query_manager(data_dir).connect().get_environment(&environment_id).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_create_and_update_merge_patch_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args([
|
||||
"environment",
|
||||
"create",
|
||||
r#"{"workspaceId":"wk_test","name":"Json Environment"}"#,
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args([
|
||||
"environment",
|
||||
"update",
|
||||
&format!(r##"{{"id":"{}","color":"#00ff00"}}"##, environment_id),
|
||||
])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Updated environment: {environment_id}")));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["environment", "show", &environment_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"name\": \"Json Environment\""))
|
||||
.stdout(contains("\"color\": \"#00ff00\""));
|
||||
}
|
||||
74
crates-cli/yaak-cli/tests/folder_commands.rs
Normal file
74
crates-cli/yaak-cli/tests/folder_commands.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
mod common;
|
||||
|
||||
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn create_list_show_delete_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args(["folder", "create", "wk_test", "--name", "Auth"])
|
||||
.assert()
|
||||
.success();
|
||||
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["folder", "list", "wk_test"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(&folder_id))
|
||||
.stdout(contains("Auth"));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["folder", "show", &folder_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("\"id\": \"{folder_id}\"")))
|
||||
.stdout(contains("\"workspaceId\": \"wk_test\""));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["folder", "delete", &folder_id, "--yes"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Deleted folder: {folder_id}")));
|
||||
|
||||
assert!(query_manager(data_dir).connect().get_folder(&folder_id).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_create_and_update_merge_patch_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args([
|
||||
"folder",
|
||||
"create",
|
||||
r#"{"workspaceId":"wk_test","name":"Json Folder"}"#,
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args([
|
||||
"folder",
|
||||
"update",
|
||||
&format!(r#"{{"id":"{}","description":"Folder Description"}}"#, folder_id),
|
||||
])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Updated folder: {folder_id}")));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["folder", "show", &folder_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"name\": \"Json Folder\""))
|
||||
.stdout(contains("\"description\": \"Folder Description\""));
|
||||
}
|
||||
107
crates-cli/yaak-cli/tests/request_commands.rs
Normal file
107
crates-cli/yaak-cli/tests/request_commands.rs
Normal file
@@ -0,0 +1,107 @@
|
||||
mod common;
|
||||
|
||||
use common::{cli_cmd, parse_created_id, query_manager, seed_request, seed_workspace};
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn show_and_delete_yes_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args([
|
||||
"request",
|
||||
"create",
|
||||
"wk_test",
|
||||
"--name",
|
||||
"Smoke Test",
|
||||
"--url",
|
||||
"https://example.com",
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "show", &request_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("\"id\": \"{request_id}\"")))
|
||||
.stdout(contains("\"workspaceId\": \"wk_test\""));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "delete", &request_id, "--yes"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Deleted request: {request_id}")));
|
||||
|
||||
assert!(query_manager(data_dir).connect().get_http_request(&request_id).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn delete_without_yes_fails_in_non_interactive_mode() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
seed_request(data_dir, "wk_test", "rq_seed_delete_noninteractive");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "delete", "rq_seed_delete_noninteractive"])
|
||||
.assert()
|
||||
.failure()
|
||||
.code(1)
|
||||
.stderr(contains("Refusing to delete in non-interactive mode without --yes"));
|
||||
|
||||
assert!(
|
||||
query_manager(data_dir).connect().get_http_request("rq_seed_delete_noninteractive").is_ok()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_create_and_update_merge_patch_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args([
|
||||
"request",
|
||||
"create",
|
||||
r#"{"workspaceId":"wk_test","name":"Json Request","url":"https://example.com"}"#,
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args([
|
||||
"request",
|
||||
"update",
|
||||
&format!(r#"{{"id":"{}","name":"Renamed Request"}}"#, request_id),
|
||||
])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Updated request: {request_id}")));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "show", &request_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"name\": \"Renamed Request\""))
|
||||
.stdout(contains("\"url\": \"https://example.com\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn update_requires_id_in_json_payload() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "update", r#"{"name":"No ID"}"#])
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr(contains("request update requires a non-empty \"id\" field"));
|
||||
}
|
||||
59
crates-cli/yaak-cli/tests/workspace_commands.rs
Normal file
59
crates-cli/yaak-cli/tests/workspace_commands.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
mod common;
|
||||
|
||||
use common::{cli_cmd, parse_created_id, query_manager};
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn create_show_delete_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
|
||||
let create_assert =
|
||||
cli_cmd(data_dir).args(["workspace", "create", "--name", "WS One"]).assert().success();
|
||||
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["workspace", "show", &workspace_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("\"id\": \"{workspace_id}\"")))
|
||||
.stdout(contains("\"name\": \"WS One\""));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["workspace", "delete", &workspace_id, "--yes"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Deleted workspace: {workspace_id}")));
|
||||
|
||||
assert!(query_manager(data_dir).connect().get_workspace(&workspace_id).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_create_and_update_merge_patch_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args(["workspace", "create", r#"{"name":"Json Workspace"}"#])
|
||||
.assert()
|
||||
.success();
|
||||
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args([
|
||||
"workspace",
|
||||
"update",
|
||||
&format!(r#"{{"id":"{}","description":"Updated via JSON"}}"#, workspace_id),
|
||||
])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Updated workspace: {workspace_id}")));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["workspace", "show", &workspace_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"name\": \"Json Workspace\""))
|
||||
.stdout(contains("\"description\": \"Updated via JSON\""));
|
||||
}
|
||||
@@ -57,6 +57,7 @@ url = "2"
|
||||
tokio-util = { version = "0.7", features = ["codec"] }
|
||||
ts-rs = { workspace = true }
|
||||
uuid = "1.12.1"
|
||||
yaak-api = { workspace = true }
|
||||
yaak-common = { workspace = true }
|
||||
yaak-tauri-utils = { workspace = true }
|
||||
yaak-core = { workspace = true }
|
||||
|
||||
@@ -36,7 +36,7 @@ pub enum Error {
|
||||
PluginError(#[from] yaak_plugins::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
TauriUtilsError(#[from] yaak_tauri_utils::error::Error),
|
||||
ApiError(#[from] yaak_api::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
ClipboardError(#[from] tauri_plugin_clipboard_manager::Error),
|
||||
|
||||
@@ -1095,13 +1095,9 @@ async fn cmd_get_http_authentication_config<R: Runtime>(
|
||||
|
||||
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
||||
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
||||
let rendered_json = render_json_value(
|
||||
values_json,
|
||||
environment_chain,
|
||||
&cb,
|
||||
&RenderOptions::return_empty(),
|
||||
)
|
||||
.await?;
|
||||
let rendered_json =
|
||||
render_json_value(values_json, environment_chain, &cb, &RenderOptions::return_empty())
|
||||
.await?;
|
||||
|
||||
// Convert back to HashMap<String, JsonPrimitive>
|
||||
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
//! This module provides the Tauri plugin initialization and extension traits
|
||||
//! that allow accessing QueryManager and BlobManager from Tauri's Manager types.
|
||||
|
||||
use chrono::Utc;
|
||||
use log::error;
|
||||
use std::time::Duration;
|
||||
use tauri::plugin::TauriPlugin;
|
||||
use tauri::{Emitter, Manager, Runtime, State};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
||||
@@ -13,6 +16,74 @@ use yaak_models::models::{AnyModel, GraphQlIntrospection, GrpcEvent, Settings, W
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
const MODEL_CHANGES_RETENTION_HOURS: i64 = 1;
|
||||
const MODEL_CHANGES_POLL_INTERVAL_MS: u64 = 250;
|
||||
const MODEL_CHANGES_POLL_BATCH_SIZE: usize = 200;
|
||||
|
||||
struct ModelChangeCursor {
|
||||
created_at: String,
|
||||
id: i64,
|
||||
}
|
||||
|
||||
impl ModelChangeCursor {
|
||||
fn from_launch_time() -> Self {
|
||||
Self {
|
||||
created_at: Utc::now().naive_utc().format("%Y-%m-%d %H:%M:%S%.3f").to_string(),
|
||||
id: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn drain_model_changes_batch<R: Runtime>(
|
||||
query_manager: &QueryManager,
|
||||
app_handle: &tauri::AppHandle<R>,
|
||||
cursor: &mut ModelChangeCursor,
|
||||
) -> bool {
|
||||
let changes = match query_manager.connect().list_model_changes_since(
|
||||
&cursor.created_at,
|
||||
cursor.id,
|
||||
MODEL_CHANGES_POLL_BATCH_SIZE,
|
||||
) {
|
||||
Ok(changes) => changes,
|
||||
Err(err) => {
|
||||
error!("Failed to poll model_changes rows: {err:?}");
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
if changes.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let fetched_count = changes.len();
|
||||
for change in changes {
|
||||
cursor.created_at = change.created_at;
|
||||
cursor.id = change.id;
|
||||
|
||||
// Local window-originated writes are forwarded immediately from the
|
||||
// in-memory model event channel.
|
||||
if matches!(change.payload.update_source, UpdateSource::Window { .. }) {
|
||||
continue;
|
||||
}
|
||||
if let Err(err) = app_handle.emit("model_write", change.payload) {
|
||||
error!("Failed to emit model_write event: {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
fetched_count == MODEL_CHANGES_POLL_BATCH_SIZE
|
||||
}
|
||||
|
||||
async fn run_model_change_poller<R: Runtime>(
|
||||
query_manager: QueryManager,
|
||||
app_handle: tauri::AppHandle<R>,
|
||||
mut cursor: ModelChangeCursor,
|
||||
) {
|
||||
loop {
|
||||
while drain_model_changes_batch(&query_manager, &app_handle, &mut cursor) {}
|
||||
tokio::time::sleep(Duration::from_millis(MODEL_CHANGES_POLL_INTERVAL_MS)).await;
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
||||
pub trait QueryManagerExt<'a, R> {
|
||||
fn db_manager(&'a self) -> State<'a, QueryManager>;
|
||||
@@ -262,14 +333,37 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
}
|
||||
};
|
||||
|
||||
let db = query_manager.connect();
|
||||
if let Err(err) = db.prune_model_changes_older_than_hours(MODEL_CHANGES_RETENTION_HOURS)
|
||||
{
|
||||
error!("Failed to prune model_changes rows on startup: {err:?}");
|
||||
}
|
||||
// Only stream writes that happen after this app launch.
|
||||
let cursor = ModelChangeCursor::from_launch_time();
|
||||
|
||||
let poll_query_manager = query_manager.clone();
|
||||
|
||||
app_handle.manage(query_manager);
|
||||
app_handle.manage(blob_manager);
|
||||
|
||||
// Forward model change events to the frontend
|
||||
let app_handle = app_handle.clone();
|
||||
// Poll model_changes so all writers (including external CLI processes) update the UI.
|
||||
let app_handle_poll = app_handle.clone();
|
||||
let query_manager = poll_query_manager;
|
||||
tauri::async_runtime::spawn(async move {
|
||||
run_model_change_poller(query_manager, app_handle_poll, cursor).await;
|
||||
});
|
||||
|
||||
// Fast path for local app writes initiated by frontend windows. This keeps the
|
||||
// current sync-model UX snappy, while DB polling handles external writers (CLI).
|
||||
let app_handle_local = app_handle.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
for payload in rx {
|
||||
app_handle.emit("model_write", payload).unwrap();
|
||||
if !matches!(payload.update_source, UpdateSource::Window { .. }) {
|
||||
continue;
|
||||
}
|
||||
if let Err(err) = app_handle_local.emit("model_write", payload) {
|
||||
error!("Failed to emit local model_write event: {err:?}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -8,9 +8,9 @@ use serde::{Deserialize, Serialize};
|
||||
use std::time::Instant;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
|
||||
use ts_rs::TS;
|
||||
use yaak_api::yaak_api_client;
|
||||
use yaak_common::platform::get_os_str;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
// Check for updates every hour
|
||||
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
||||
@@ -101,7 +101,8 @@ impl YaakNotifier {
|
||||
let license_check = "disabled".to_string();
|
||||
|
||||
let launch_info = get_or_upsert_launch_info(app_handle);
|
||||
let req = yaak_api_client(app_handle)?
|
||||
let app_version = app_handle.package_info().version.to_string();
|
||||
let req = yaak_api_client(&app_version)?
|
||||
.request(Method::GET, "https://notify.yaak.app/notifications")
|
||||
.query(&[
|
||||
("version", &launch_info.current_version),
|
||||
|
||||
@@ -21,6 +21,7 @@ use tauri::{
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
use ts_rs::TS;
|
||||
use yaak_api::yaak_api_client;
|
||||
use yaak_models::models::Plugin;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::api::{
|
||||
@@ -31,7 +32,6 @@ use yaak_plugins::events::{Color, Icon, PluginContext, ShowToastRequest};
|
||||
use yaak_plugins::install::{delete_and_uninstall, download_and_install};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::plugin_meta::get_plugin_meta;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
static EXITING: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
@@ -72,7 +72,8 @@ impl PluginUpdater {
|
||||
|
||||
info!("Checking for plugin updates");
|
||||
|
||||
let http_client = yaak_api_client(window.app_handle())?;
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
let plugins = window.app_handle().db().list_plugins()?;
|
||||
let updates = check_plugin_updates(&http_client, plugins.clone()).await?;
|
||||
|
||||
@@ -136,7 +137,8 @@ pub async fn cmd_plugins_search<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
query: &str,
|
||||
) -> Result<PluginSearchResponse> {
|
||||
let http_client = yaak_api_client(&app_handle)?;
|
||||
let app_version = app_handle.package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
Ok(search_plugins(&http_client, query).await?)
|
||||
}
|
||||
|
||||
@@ -147,7 +149,8 @@ pub async fn cmd_plugins_install<R: Runtime>(
|
||||
version: Option<String>,
|
||||
) -> Result<()> {
|
||||
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
|
||||
let http_client = yaak_api_client(window.app_handle())?;
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
let query_manager = window.state::<yaak_models::query_manager::QueryManager>();
|
||||
let plugin_context = window.plugin_context();
|
||||
download_and_install(
|
||||
@@ -177,7 +180,8 @@ pub async fn cmd_plugins_uninstall<R: Runtime>(
|
||||
pub async fn cmd_plugins_updates<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
) -> Result<PluginUpdatesResponse> {
|
||||
let http_client = yaak_api_client(&app_handle)?;
|
||||
let app_version = app_handle.package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
let plugins = app_handle.db().list_plugins()?;
|
||||
Ok(check_plugin_updates(&http_client, plugins).await?)
|
||||
}
|
||||
@@ -186,7 +190,8 @@ pub async fn cmd_plugins_updates<R: Runtime>(
|
||||
pub async fn cmd_plugins_update_all<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<Vec<PluginNameVersion>> {
|
||||
let http_client = yaak_api_client(window.app_handle())?;
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
let plugins = window.db().list_plugins()?;
|
||||
|
||||
// Get list of available updates (already filtered to only registry plugins)
|
||||
|
||||
@@ -15,6 +15,9 @@ use ts_rs::TS;
|
||||
use yaak_models::util::generate_id;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
use url::Url;
|
||||
use yaak_api::get_system_proxy_url;
|
||||
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::is_dev;
|
||||
|
||||
@@ -87,8 +90,13 @@ impl YaakUpdater {
|
||||
info!("Checking for updates mode={} autodl={}", mode, auto_download);
|
||||
|
||||
let w = window.clone();
|
||||
let update_check_result = w
|
||||
.updater_builder()
|
||||
let mut updater_builder = w.updater_builder();
|
||||
if let Some(proxy_url) = get_system_proxy_url() {
|
||||
if let Ok(url) = Url::parse(&proxy_url) {
|
||||
updater_builder = updater_builder.proxy(url);
|
||||
}
|
||||
}
|
||||
let update_check_result = updater_builder
|
||||
.on_before_exit(move || {
|
||||
// Kill plugin manager before exit or NSIS installer will fail to replace sidecar
|
||||
// while it's running.
|
||||
|
||||
@@ -8,11 +8,11 @@ use std::fs;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
|
||||
use yaak_api::yaak_api_client;
|
||||
use yaak_models::util::generate_id;
|
||||
use yaak_plugins::events::{Color, ShowToastRequest};
|
||||
use yaak_plugins::install::download_and_install;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
@@ -46,7 +46,8 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
|
||||
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
|
||||
let query_manager = app_handle.db_manager();
|
||||
let http_client = yaak_api_client(app_handle)?;
|
||||
let app_version = app_handle.package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
let plugin_context = window.plugin_context();
|
||||
let pv = download_and_install(
|
||||
plugin_manager,
|
||||
@@ -86,7 +87,8 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let resp = yaak_api_client(app_handle)?.get(file_url).send().await?;
|
||||
let app_version = app_handle.package_info().version.to_string();
|
||||
let resp = yaak_api_client(&app_version)?.get(file_url).send().await?;
|
||||
let json = resp.bytes().await?;
|
||||
let p = app_handle
|
||||
.path()
|
||||
|
||||
@@ -153,11 +153,8 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id(
|
||||
"dev.reset_size_16x9".to_string(),
|
||||
"Resize to 16x9",
|
||||
)
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("dev.reset_size_16x9".to_string(), "Resize to 16x9")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id(
|
||||
"dev.reset_size_16x10".to_string(),
|
||||
"Resize to 16x10",
|
||||
|
||||
@@ -16,7 +16,7 @@ thiserror = { workspace = true }
|
||||
ts-rs = { workspace = true }
|
||||
yaak-common = { workspace = true }
|
||||
yaak-models = { workspace = true }
|
||||
yaak-tauri-utils = { workspace = true }
|
||||
yaak-api = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
tauri-plugin = { workspace = true, features = ["build"] }
|
||||
|
||||
@@ -16,7 +16,7 @@ pub enum Error {
|
||||
ModelError(#[from] yaak_models::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
TauriUtilsError(#[from] yaak_tauri_utils::error::Error),
|
||||
ApiError(#[from] yaak_api::Error),
|
||||
|
||||
#[error("Internal server error")]
|
||||
ServerError,
|
||||
|
||||
@@ -7,11 +7,11 @@ use std::ops::Add;
|
||||
use std::time::Duration;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
|
||||
use ts_rs::TS;
|
||||
use yaak_api::yaak_api_client;
|
||||
use yaak_common::platform::get_os_str;
|
||||
use yaak_models::db_context::DbContext;
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
||||
/// This is needed temporarily until all crates are refactored to not use Tauri.
|
||||
@@ -118,11 +118,12 @@ pub async fn activate_license<R: Runtime>(
|
||||
license_key: &str,
|
||||
) -> Result<()> {
|
||||
info!("Activating license {}", license_key);
|
||||
let client = reqwest::Client::new();
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let client = yaak_api_client(&app_version)?;
|
||||
let payload = ActivateLicenseRequestPayload {
|
||||
license_key: license_key.to_string(),
|
||||
app_platform: get_os_str().to_string(),
|
||||
app_version: window.app_handle().package_info().version.to_string(),
|
||||
app_version,
|
||||
};
|
||||
let response = client.post(build_url("/licenses/activate")).json(&payload).send().await?;
|
||||
|
||||
@@ -155,12 +156,11 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
|
||||
let app_handle = window.app_handle();
|
||||
let activation_id = get_activation_id(app_handle).await;
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let client = yaak_api_client(&app_version)?;
|
||||
let path = format!("/licenses/activations/{}/deactivate", activation_id);
|
||||
let payload = DeactivateLicenseRequestPayload {
|
||||
app_platform: get_os_str().to_string(),
|
||||
app_version: window.app_handle().package_info().version.to_string(),
|
||||
};
|
||||
let payload =
|
||||
DeactivateLicenseRequestPayload { app_platform: get_os_str().to_string(), app_version };
|
||||
let response = client.post(build_url(&path)).json(&payload).send().await?;
|
||||
|
||||
if response.status().is_client_error() {
|
||||
@@ -186,10 +186,9 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
|
||||
}
|
||||
|
||||
pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<LicenseCheckStatus> {
|
||||
let payload = CheckActivationRequestPayload {
|
||||
app_platform: get_os_str().to_string(),
|
||||
app_version: window.package_info().version.to_string(),
|
||||
};
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let payload =
|
||||
CheckActivationRequestPayload { app_platform: get_os_str().to_string(), app_version };
|
||||
let activation_id = get_activation_id(window.app_handle()).await;
|
||||
|
||||
let settings = window.db().get_settings();
|
||||
@@ -204,7 +203,7 @@ pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<Lice
|
||||
(true, _) => {
|
||||
info!("Checking license activation");
|
||||
// A license has been activated, so let's check the license server
|
||||
let client = yaak_api_client(window.app_handle())?;
|
||||
let client = yaak_api_client(&payload.app_version)?;
|
||||
let path = format!("/licenses/activations/{activation_id}/check-v2");
|
||||
let response = client.post(build_url(&path)).json(&payload).send().await?;
|
||||
|
||||
|
||||
@@ -6,8 +6,4 @@ publish = false
|
||||
|
||||
[dependencies]
|
||||
tauri = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["gzip"] }
|
||||
thiserror = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
regex = "1.11.0"
|
||||
yaak-common = { workspace = true }
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
use crate::error::Result;
|
||||
use reqwest::Client;
|
||||
use std::time::Duration;
|
||||
use tauri::http::{HeaderMap, HeaderValue};
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use yaak_common::platform::{get_ua_arch, get_ua_platform};
|
||||
|
||||
pub fn yaak_api_client<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Client> {
|
||||
let platform = get_ua_platform();
|
||||
let version = app_handle.package_info().version.clone();
|
||||
let arch = get_ua_arch();
|
||||
let ua = format!("Yaak/{version} ({platform}; {arch})");
|
||||
let mut default_headers = HeaderMap::new();
|
||||
default_headers.insert("Accept", HeaderValue::from_str("application/json").unwrap());
|
||||
|
||||
let client = reqwest::ClientBuilder::new()
|
||||
.timeout(Duration::from_secs(20))
|
||||
.default_headers(default_headers)
|
||||
.gzip(true)
|
||||
.user_agent(ua)
|
||||
.build()?;
|
||||
|
||||
Ok(client)
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
use serde::{Serialize, Serializer};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
ReqwestError(#[from] reqwest::Error),
|
||||
}
|
||||
|
||||
impl Serialize for Error {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.to_string().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
@@ -1,3 +1 @@
|
||||
pub mod api_client;
|
||||
pub mod error;
|
||||
pub mod window;
|
||||
|
||||
12
crates/yaak-api/Cargo.toml
Normal file
12
crates/yaak-api/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "yaak-api"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
log = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["gzip"] }
|
||||
sysproxy = "0.3"
|
||||
thiserror = { workspace = true }
|
||||
yaak-common = { workspace = true }
|
||||
9
crates/yaak-api/src/error.rs
Normal file
9
crates/yaak-api/src/error.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
ReqwestError(#[from] reqwest::Error),
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
70
crates/yaak-api/src/lib.rs
Normal file
70
crates/yaak-api/src/lib.rs
Normal file
@@ -0,0 +1,70 @@
|
||||
mod error;
|
||||
|
||||
pub use error::{Error, Result};
|
||||
|
||||
use log::{debug, warn};
|
||||
use reqwest::Client;
|
||||
use reqwest::header::{HeaderMap, HeaderValue};
|
||||
use std::time::Duration;
|
||||
use yaak_common::platform::{get_ua_arch, get_ua_platform};
|
||||
|
||||
/// Build a reqwest Client configured for Yaak's own API calls.
|
||||
///
|
||||
/// Includes a custom User-Agent, JSON accept header, 20s timeout, gzip,
|
||||
/// and automatic OS-level proxy detection via sysproxy.
|
||||
pub fn yaak_api_client(version: &str) -> Result<Client> {
|
||||
let platform = get_ua_platform();
|
||||
let arch = get_ua_arch();
|
||||
let ua = format!("Yaak/{version} ({platform}; {arch})");
|
||||
|
||||
let mut default_headers = HeaderMap::new();
|
||||
default_headers.insert("Accept", HeaderValue::from_str("application/json").unwrap());
|
||||
|
||||
let mut builder = reqwest::ClientBuilder::new()
|
||||
.timeout(Duration::from_secs(20))
|
||||
.default_headers(default_headers)
|
||||
.gzip(true)
|
||||
.user_agent(ua);
|
||||
|
||||
if let Some(sys) = get_enabled_system_proxy() {
|
||||
let proxy_url = format!("http://{}:{}", sys.host, sys.port);
|
||||
match reqwest::Proxy::all(&proxy_url) {
|
||||
Ok(p) => {
|
||||
let p = if !sys.bypass.is_empty() {
|
||||
p.no_proxy(reqwest::NoProxy::from_string(&sys.bypass))
|
||||
} else {
|
||||
p
|
||||
};
|
||||
builder = builder.proxy(p);
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to configure system proxy: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(builder.build()?)
|
||||
}
|
||||
|
||||
/// Returns the system proxy URL if one is enabled, e.g. `http://host:port`.
|
||||
pub fn get_system_proxy_url() -> Option<String> {
|
||||
let sys = get_enabled_system_proxy()?;
|
||||
Some(format!("http://{}:{}", sys.host, sys.port))
|
||||
}
|
||||
|
||||
fn get_enabled_system_proxy() -> Option<sysproxy::Sysproxy> {
|
||||
match sysproxy::Sysproxy::get_system_proxy() {
|
||||
Ok(sys) if sys.enable => {
|
||||
debug!("Detected system proxy: http://{}:{}", sys.host, sys.port);
|
||||
Some(sys)
|
||||
}
|
||||
Ok(_) => {
|
||||
debug!("System proxy detected but not enabled");
|
||||
None
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("Could not detect system proxy: {e}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -44,43 +44,65 @@ pub async fn git_pull(dir: &Path) -> Result<PullResult> {
|
||||
(branch_name, remote_name, remote_url)
|
||||
};
|
||||
|
||||
let out = new_binary_command(dir)
|
||||
// Step 1: fetch the specific branch
|
||||
// NOTE: We use fetch + merge instead of `git pull` to avoid conflicts with
|
||||
// global git config (e.g. pull.ff=only) and the background fetch --all.
|
||||
let fetch_out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["pull", &remote_name, &branch_name])
|
||||
.args(["fetch", &remote_name, &branch_name])
|
||||
.env("GIT_TERMINAL_PROMPT", "0")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
|
||||
.map_err(|e| GenericError(format!("failed to run git fetch: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = stdout + stderr;
|
||||
let fetch_stdout = String::from_utf8_lossy(&fetch_out.stdout);
|
||||
let fetch_stderr = String::from_utf8_lossy(&fetch_out.stderr);
|
||||
let fetch_combined = format!("{fetch_stdout}{fetch_stderr}");
|
||||
|
||||
info!("Pulled status={} {combined}", out.status);
|
||||
info!("Fetched status={} {fetch_combined}", fetch_out.status);
|
||||
|
||||
if combined.to_lowercase().contains("could not read") {
|
||||
if fetch_combined.to_lowercase().contains("could not read") {
|
||||
return Ok(PullResult::NeedsCredentials { url: remote_url.to_string(), error: None });
|
||||
}
|
||||
|
||||
if combined.to_lowercase().contains("unable to access") {
|
||||
if fetch_combined.to_lowercase().contains("unable to access") {
|
||||
return Ok(PullResult::NeedsCredentials {
|
||||
url: remote_url.to_string(),
|
||||
error: Some(combined.to_string()),
|
||||
error: Some(fetch_combined.to_string()),
|
||||
});
|
||||
}
|
||||
|
||||
if !out.status.success() {
|
||||
let combined_lower = combined.to_lowercase();
|
||||
if combined_lower.contains("cannot fast-forward")
|
||||
|| combined_lower.contains("not possible to fast-forward")
|
||||
|| combined_lower.contains("diverged")
|
||||
if !fetch_out.status.success() {
|
||||
return Err(GenericError(format!("Failed to fetch: {fetch_combined}")));
|
||||
}
|
||||
|
||||
// Step 2: merge the fetched branch
|
||||
let ref_name = format!("{}/{}", remote_name, branch_name);
|
||||
let merge_out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["merge", "--ff-only", &ref_name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
|
||||
|
||||
let merge_stdout = String::from_utf8_lossy(&merge_out.stdout);
|
||||
let merge_stderr = String::from_utf8_lossy(&merge_out.stderr);
|
||||
let merge_combined = format!("{merge_stdout}{merge_stderr}");
|
||||
|
||||
info!("Merged status={} {merge_combined}", merge_out.status);
|
||||
|
||||
if !merge_out.status.success() {
|
||||
let merge_lower = merge_combined.to_lowercase();
|
||||
if merge_lower.contains("cannot fast-forward")
|
||||
|| merge_lower.contains("not possible to fast-forward")
|
||||
|| merge_lower.contains("diverged")
|
||||
{
|
||||
return Ok(PullResult::Diverged { remote: remote_name, branch: branch_name });
|
||||
}
|
||||
return Err(GenericError(format!("Failed to pull {combined}")));
|
||||
return Err(GenericError(format!("Failed to merge: {merge_combined}")));
|
||||
}
|
||||
|
||||
if combined.to_lowercase().contains("up to date") {
|
||||
if merge_combined.to_lowercase().contains("up to date") {
|
||||
return Ok(PullResult::UpToDate);
|
||||
}
|
||||
|
||||
|
||||
@@ -74,15 +74,31 @@ impl Display for HttpResponseEvent {
|
||||
};
|
||||
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
|
||||
}
|
||||
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
|
||||
HttpResponseEvent::SendUrl {
|
||||
method,
|
||||
scheme,
|
||||
username,
|
||||
password,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
fragment,
|
||||
} => {
|
||||
let auth_str = if username.is_empty() && password.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("{}:{}@", username, password)
|
||||
};
|
||||
let query_str = if query.is_empty() { String::new() } else { format!("?{}", query) };
|
||||
let fragment_str = if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
|
||||
write!(f, "> {} {}://{}{}:{}{}{}{}", method, scheme, auth_str, host, port, path, query_str, fragment_str)
|
||||
let query_str =
|
||||
if query.is_empty() { String::new() } else { format!("?{}", query) };
|
||||
let fragment_str =
|
||||
if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
|
||||
write!(
|
||||
f,
|
||||
"> {} {}://{}{}:{}{}{}{}",
|
||||
method, scheme, auth_str, host, port, path, query_str, fragment_str
|
||||
)
|
||||
}
|
||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||
write!(f, "< {} {}", version_to_str(version), status)
|
||||
@@ -122,7 +138,17 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
|
||||
RedirectBehavior::DropBody => "drop_body".to_string(),
|
||||
},
|
||||
},
|
||||
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
|
||||
HttpResponseEvent::SendUrl {
|
||||
method,
|
||||
scheme,
|
||||
username,
|
||||
password,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
fragment,
|
||||
} => {
|
||||
D::SendUrl { method, scheme, username, password, host, port, path, query, fragment }
|
||||
}
|
||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||
@@ -546,7 +572,10 @@ impl<S> SizedBody<S> {
|
||||
|
||||
impl<S> HttpBody for SizedBody<S>
|
||||
where
|
||||
S: futures_util::Stream<Item = std::result::Result<Bytes, std::io::Error>> + Send + Unpin + 'static,
|
||||
S: futures_util::Stream<Item = std::result::Result<Bytes, std::io::Error>>
|
||||
+ Send
|
||||
+ Unpin
|
||||
+ 'static,
|
||||
{
|
||||
type Data = Bytes;
|
||||
type Error = std::io::Error;
|
||||
|
||||
@@ -37,10 +37,9 @@ impl From<SendableBodyWithMeta> for SendableBody {
|
||||
fn from(value: SendableBodyWithMeta) -> Self {
|
||||
match value {
|
||||
SendableBodyWithMeta::Bytes(b) => SendableBody::Bytes(b),
|
||||
SendableBodyWithMeta::Stream { data, content_length } => SendableBody::Stream {
|
||||
data,
|
||||
content_length: content_length.map(|l| l as u64),
|
||||
},
|
||||
SendableBodyWithMeta::Stream { data, content_length } => {
|
||||
SendableBody::Stream { data, content_length: content_length.map(|l| l as u64) }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
CREATE TABLE model_changes
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
model TEXT NOT NULL,
|
||||
model_id TEXT NOT NULL,
|
||||
change TEXT NOT NULL,
|
||||
update_source TEXT NOT NULL,
|
||||
payload TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_model_changes_created_at ON model_changes (created_at);
|
||||
@@ -3,8 +3,7 @@ use crate::error::Error::ModelNotFound;
|
||||
use crate::error::Result;
|
||||
use crate::models::{AnyModel, UpsertModelInfo};
|
||||
use crate::util::{ModelChangeEvent, ModelPayload, UpdateSource};
|
||||
use log::error;
|
||||
use rusqlite::OptionalExtension;
|
||||
use rusqlite::{OptionalExtension, params};
|
||||
use sea_query::{
|
||||
Asterisk, Expr, Func, IntoColumnRef, IntoIden, IntoTableRef, OnConflict, Query, SimpleExpr,
|
||||
SqliteQueryBuilder,
|
||||
@@ -14,7 +13,7 @@ use std::fmt::Debug;
|
||||
use std::sync::mpsc;
|
||||
|
||||
pub struct DbContext<'a> {
|
||||
pub(crate) events_tx: mpsc::Sender<ModelPayload>,
|
||||
pub(crate) _events_tx: mpsc::Sender<ModelPayload>,
|
||||
pub(crate) conn: ConnectionOrTx<'a>,
|
||||
}
|
||||
|
||||
@@ -180,9 +179,8 @@ impl<'a> DbContext<'a> {
|
||||
change: ModelChangeEvent::Upsert { created },
|
||||
};
|
||||
|
||||
if let Err(e) = self.events_tx.send(payload.clone()) {
|
||||
error!("Failed to send model change {source:?}: {e:?}");
|
||||
}
|
||||
self.record_model_change(&payload)?;
|
||||
let _ = self._events_tx.send(payload);
|
||||
|
||||
Ok(m)
|
||||
}
|
||||
@@ -203,9 +201,31 @@ impl<'a> DbContext<'a> {
|
||||
change: ModelChangeEvent::Delete,
|
||||
};
|
||||
|
||||
if let Err(e) = self.events_tx.send(payload) {
|
||||
error!("Failed to send model change {source:?}: {e:?}");
|
||||
}
|
||||
self.record_model_change(&payload)?;
|
||||
let _ = self._events_tx.send(payload);
|
||||
|
||||
Ok(m.clone())
|
||||
}
|
||||
|
||||
fn record_model_change(&self, payload: &ModelPayload) -> Result<()> {
|
||||
let payload_json = serde_json::to_string(payload)?;
|
||||
let source_json = serde_json::to_string(&payload.update_source)?;
|
||||
let change_json = serde_json::to_string(&payload.change)?;
|
||||
|
||||
self.conn.resolve().execute(
|
||||
r#"
|
||||
INSERT INTO model_changes (model, model_id, change, update_source, payload)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||
"#,
|
||||
params![
|
||||
payload.model.model(),
|
||||
payload.model.id(),
|
||||
change_json,
|
||||
source_json,
|
||||
payload_json,
|
||||
],
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2347,6 +2347,15 @@ macro_rules! define_any_model {
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn model(&self) -> &str {
|
||||
match self {
|
||||
$(
|
||||
AnyModel::$type(inner) => &inner.model,
|
||||
)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$(
|
||||
@@ -2400,30 +2409,29 @@ impl<'de> Deserialize<'de> for AnyModel {
|
||||
{
|
||||
let value = Value::deserialize(deserializer)?;
|
||||
let model = value.as_object().unwrap();
|
||||
use AnyModel::*;
|
||||
use serde_json::from_value as fv;
|
||||
|
||||
let model = match model.get("model") {
|
||||
Some(m) if m == "cookie_jar" => AnyModel::CookieJar(fv(value).unwrap()),
|
||||
Some(m) if m == "environment" => AnyModel::Environment(fv(value).unwrap()),
|
||||
Some(m) if m == "folder" => AnyModel::Folder(fv(value).unwrap()),
|
||||
Some(m) if m == "graphql_introspection" => {
|
||||
AnyModel::GraphQlIntrospection(fv(value).unwrap())
|
||||
}
|
||||
Some(m) if m == "grpc_connection" => AnyModel::GrpcConnection(fv(value).unwrap()),
|
||||
Some(m) if m == "grpc_event" => AnyModel::GrpcEvent(fv(value).unwrap()),
|
||||
Some(m) if m == "grpc_request" => AnyModel::GrpcRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "http_request" => AnyModel::HttpRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "http_response" => AnyModel::HttpResponse(fv(value).unwrap()),
|
||||
Some(m) if m == "key_value" => AnyModel::KeyValue(fv(value).unwrap()),
|
||||
Some(m) if m == "plugin" => AnyModel::Plugin(fv(value).unwrap()),
|
||||
Some(m) if m == "settings" => AnyModel::Settings(fv(value).unwrap()),
|
||||
Some(m) if m == "websocket_connection" => {
|
||||
AnyModel::WebsocketConnection(fv(value).unwrap())
|
||||
}
|
||||
Some(m) if m == "websocket_event" => AnyModel::WebsocketEvent(fv(value).unwrap()),
|
||||
Some(m) if m == "websocket_request" => AnyModel::WebsocketRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "workspace" => AnyModel::Workspace(fv(value).unwrap()),
|
||||
Some(m) if m == "workspace_meta" => AnyModel::WorkspaceMeta(fv(value).unwrap()),
|
||||
Some(m) if m == "cookie_jar" => CookieJar(fv(value).unwrap()),
|
||||
Some(m) if m == "environment" => Environment(fv(value).unwrap()),
|
||||
Some(m) if m == "folder" => Folder(fv(value).unwrap()),
|
||||
Some(m) if m == "graphql_introspection" => GraphQlIntrospection(fv(value).unwrap()),
|
||||
Some(m) if m == "grpc_connection" => GrpcConnection(fv(value).unwrap()),
|
||||
Some(m) if m == "grpc_event" => GrpcEvent(fv(value).unwrap()),
|
||||
Some(m) if m == "grpc_request" => GrpcRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "http_request" => HttpRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "http_response" => HttpResponse(fv(value).unwrap()),
|
||||
Some(m) if m == "http_response_event" => HttpResponseEvent(fv(value).unwrap()),
|
||||
Some(m) if m == "key_value" => KeyValue(fv(value).unwrap()),
|
||||
Some(m) if m == "plugin" => Plugin(fv(value).unwrap()),
|
||||
Some(m) if m == "settings" => Settings(fv(value).unwrap()),
|
||||
Some(m) if m == "sync_state" => SyncState(fv(value).unwrap()),
|
||||
Some(m) if m == "websocket_connection" => WebsocketConnection(fv(value).unwrap()),
|
||||
Some(m) if m == "websocket_event" => WebsocketEvent(fv(value).unwrap()),
|
||||
Some(m) if m == "websocket_request" => WebsocketRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "workspace" => Workspace(fv(value).unwrap()),
|
||||
Some(m) if m == "workspace_meta" => WorkspaceMeta(fv(value).unwrap()),
|
||||
Some(m) => {
|
||||
return Err(serde::de::Error::custom(format!(
|
||||
"Failed to deserialize AnyModel {}",
|
||||
|
||||
@@ -11,6 +11,7 @@ mod http_requests;
|
||||
mod http_response_events;
|
||||
mod http_responses;
|
||||
mod key_values;
|
||||
mod model_changes;
|
||||
mod plugin_key_values;
|
||||
mod plugins;
|
||||
mod settings;
|
||||
@@ -20,6 +21,7 @@ mod websocket_events;
|
||||
mod websocket_requests;
|
||||
mod workspace_metas;
|
||||
pub mod workspaces;
|
||||
pub use model_changes::PersistedModelChange;
|
||||
|
||||
const MAX_HISTORY_ITEMS: usize = 20;
|
||||
|
||||
|
||||
289
crates/yaak-models/src/queries/model_changes.rs
Normal file
289
crates/yaak-models/src/queries/model_changes.rs
Normal file
@@ -0,0 +1,289 @@
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::util::ModelPayload;
|
||||
use rusqlite::params;
|
||||
use rusqlite::types::Type;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PersistedModelChange {
|
||||
pub id: i64,
|
||||
pub created_at: String,
|
||||
pub payload: ModelPayload,
|
||||
}
|
||||
|
||||
impl<'a> DbContext<'a> {
|
||||
pub fn list_model_changes_after(
|
||||
&self,
|
||||
after_id: i64,
|
||||
limit: usize,
|
||||
) -> Result<Vec<PersistedModelChange>> {
|
||||
let mut stmt = self.conn.prepare(
|
||||
r#"
|
||||
SELECT id, created_at, payload
|
||||
FROM model_changes
|
||||
WHERE id > ?1
|
||||
ORDER BY id ASC
|
||||
LIMIT ?2
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let items = stmt.query_map(params![after_id, limit as i64], |row| {
|
||||
let id: i64 = row.get(0)?;
|
||||
let created_at: String = row.get(1)?;
|
||||
let payload_raw: String = row.get(2)?;
|
||||
let payload = serde_json::from_str::<ModelPayload>(&payload_raw).map_err(|e| {
|
||||
rusqlite::Error::FromSqlConversionFailure(2, Type::Text, Box::new(e))
|
||||
})?;
|
||||
Ok(PersistedModelChange { id, created_at, payload })
|
||||
})?;
|
||||
|
||||
Ok(items.collect::<std::result::Result<Vec<_>, rusqlite::Error>>()?)
|
||||
}
|
||||
|
||||
pub fn list_model_changes_since(
|
||||
&self,
|
||||
since_created_at: &str,
|
||||
since_id: i64,
|
||||
limit: usize,
|
||||
) -> Result<Vec<PersistedModelChange>> {
|
||||
let mut stmt = self.conn.prepare(
|
||||
r#"
|
||||
SELECT id, created_at, payload
|
||||
FROM model_changes
|
||||
WHERE created_at > ?1
|
||||
OR (created_at = ?1 AND id > ?2)
|
||||
ORDER BY created_at ASC, id ASC
|
||||
LIMIT ?3
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let items = stmt.query_map(params![since_created_at, since_id, limit as i64], |row| {
|
||||
let id: i64 = row.get(0)?;
|
||||
let created_at: String = row.get(1)?;
|
||||
let payload_raw: String = row.get(2)?;
|
||||
let payload = serde_json::from_str::<ModelPayload>(&payload_raw).map_err(|e| {
|
||||
rusqlite::Error::FromSqlConversionFailure(2, Type::Text, Box::new(e))
|
||||
})?;
|
||||
Ok(PersistedModelChange { id, created_at, payload })
|
||||
})?;
|
||||
|
||||
Ok(items.collect::<std::result::Result<Vec<_>, rusqlite::Error>>()?)
|
||||
}
|
||||
|
||||
pub fn prune_model_changes_older_than_days(&self, days: i64) -> Result<usize> {
|
||||
let offset = format!("-{days} days");
|
||||
Ok(self.conn.resolve().execute(
|
||||
r#"
|
||||
DELETE FROM model_changes
|
||||
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)
|
||||
"#,
|
||||
params![offset],
|
||||
)?)
|
||||
}
|
||||
|
||||
pub fn prune_model_changes_older_than_hours(&self, hours: i64) -> Result<usize> {
|
||||
let offset = format!("-{hours} hours");
|
||||
Ok(self.conn.resolve().execute(
|
||||
r#"
|
||||
DELETE FROM model_changes
|
||||
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)
|
||||
"#,
|
||||
params![offset],
|
||||
)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::init_in_memory;
|
||||
use crate::models::Workspace;
|
||||
use crate::util::{ModelChangeEvent, UpdateSource};
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn records_model_changes_for_upsert_and_delete() {
|
||||
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||
let db = query_manager.connect();
|
||||
|
||||
let workspace = db
|
||||
.upsert_workspace(
|
||||
&Workspace {
|
||||
name: "Changes Test".to_string(),
|
||||
setting_follow_redirects: true,
|
||||
setting_validate_certificates: true,
|
||||
..Default::default()
|
||||
},
|
||||
&UpdateSource::Sync,
|
||||
)
|
||||
.expect("Failed to upsert workspace");
|
||||
|
||||
let created_changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(created_changes.len(), 1);
|
||||
assert_eq!(created_changes[0].payload.model.id(), workspace.id);
|
||||
assert_eq!(created_changes[0].payload.model.model(), "workspace");
|
||||
assert!(matches!(
|
||||
created_changes[0].payload.change,
|
||||
ModelChangeEvent::Upsert { created: true }
|
||||
));
|
||||
assert!(matches!(created_changes[0].payload.update_source, UpdateSource::Sync));
|
||||
|
||||
db.delete_workspace_by_id(&workspace.id, &UpdateSource::Sync)
|
||||
.expect("Failed to delete workspace");
|
||||
|
||||
let all_changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(all_changes.len(), 2);
|
||||
assert!(matches!(all_changes[1].payload.change, ModelChangeEvent::Delete));
|
||||
assert!(all_changes[1].id > all_changes[0].id);
|
||||
|
||||
let changes_after_first = db
|
||||
.list_model_changes_after(all_changes[0].id, 10)
|
||||
.expect("Failed to list changes after cursor");
|
||||
assert_eq!(changes_after_first.len(), 1);
|
||||
assert!(matches!(changes_after_first[0].payload.change, ModelChangeEvent::Delete));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prunes_old_model_changes() {
|
||||
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||
let db = query_manager.connect();
|
||||
|
||||
db.upsert_workspace(
|
||||
&Workspace {
|
||||
name: "Prune Test".to_string(),
|
||||
setting_follow_redirects: true,
|
||||
setting_validate_certificates: true,
|
||||
..Default::default()
|
||||
},
|
||||
&UpdateSource::Sync,
|
||||
)
|
||||
.expect("Failed to upsert workspace");
|
||||
|
||||
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(changes.len(), 1);
|
||||
|
||||
db.conn
|
||||
.resolve()
|
||||
.execute(
|
||||
"UPDATE model_changes SET created_at = '2000-01-01 00:00:00.000' WHERE id = ?1",
|
||||
params![changes[0].id],
|
||||
)
|
||||
.expect("Failed to age model change row");
|
||||
|
||||
let pruned =
|
||||
db.prune_model_changes_older_than_days(30).expect("Failed to prune model changes");
|
||||
assert_eq!(pruned, 1);
|
||||
assert!(db.list_model_changes_after(0, 10).expect("Failed to list changes").is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_model_changes_since_uses_timestamp_with_id_tiebreaker() {
|
||||
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||
let db = query_manager.connect();
|
||||
|
||||
let workspace = db
|
||||
.upsert_workspace(
|
||||
&Workspace {
|
||||
name: "Cursor Test".to_string(),
|
||||
setting_follow_redirects: true,
|
||||
setting_validate_certificates: true,
|
||||
..Default::default()
|
||||
},
|
||||
&UpdateSource::Sync,
|
||||
)
|
||||
.expect("Failed to upsert workspace");
|
||||
db.delete_workspace_by_id(&workspace.id, &UpdateSource::Sync)
|
||||
.expect("Failed to delete workspace");
|
||||
|
||||
let all = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(all.len(), 2);
|
||||
|
||||
let fixed_ts = "2026-02-16 00:00:00.000";
|
||||
db.conn
|
||||
.resolve()
|
||||
.execute("UPDATE model_changes SET created_at = ?1", params![fixed_ts])
|
||||
.expect("Failed to normalize timestamps");
|
||||
|
||||
let after_first =
|
||||
db.list_model_changes_since(fixed_ts, all[0].id, 10).expect("Failed to query cursor");
|
||||
assert_eq!(after_first.len(), 1);
|
||||
assert_eq!(after_first[0].id, all[1].id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prunes_old_model_changes_by_hours() {
|
||||
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||
let db = query_manager.connect();
|
||||
|
||||
db.upsert_workspace(
|
||||
&Workspace {
|
||||
name: "Prune Hour Test".to_string(),
|
||||
setting_follow_redirects: true,
|
||||
setting_validate_certificates: true,
|
||||
..Default::default()
|
||||
},
|
||||
&UpdateSource::Sync,
|
||||
)
|
||||
.expect("Failed to upsert workspace");
|
||||
|
||||
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(changes.len(), 1);
|
||||
|
||||
db.conn
|
||||
.resolve()
|
||||
.execute(
|
||||
"UPDATE model_changes SET created_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', '-2 hours') WHERE id = ?1",
|
||||
params![changes[0].id],
|
||||
)
|
||||
.expect("Failed to age model change row");
|
||||
|
||||
let pruned =
|
||||
db.prune_model_changes_older_than_hours(1).expect("Failed to prune model changes");
|
||||
assert_eq!(pruned, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_model_changes_deserializes_http_response_event_payload() {
|
||||
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||
let db = query_manager.connect();
|
||||
|
||||
let payload = json!({
|
||||
"model": {
|
||||
"model": "http_response_event",
|
||||
"id": "re_test",
|
||||
"createdAt": "2026-02-16T21:01:34.809162",
|
||||
"updatedAt": "2026-02-16T21:01:34.809163",
|
||||
"workspaceId": "wk_test",
|
||||
"responseId": "rs_test",
|
||||
"event": {
|
||||
"type": "info",
|
||||
"message": "hello"
|
||||
}
|
||||
},
|
||||
"updateSource": { "type": "sync" },
|
||||
"change": { "type": "upsert", "created": false }
|
||||
});
|
||||
|
||||
db.conn
|
||||
.resolve()
|
||||
.execute(
|
||||
r#"
|
||||
INSERT INTO model_changes (model, model_id, change, update_source, payload)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||
"#,
|
||||
params![
|
||||
"http_response_event",
|
||||
"re_test",
|
||||
r#"{"type":"upsert","created":false}"#,
|
||||
r#"{"type":"sync"}"#,
|
||||
payload.to_string(),
|
||||
],
|
||||
)
|
||||
.expect("Failed to insert model change row");
|
||||
|
||||
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(changes.len(), 1);
|
||||
assert_eq!(changes[0].payload.model.model(), "http_response_event");
|
||||
assert_eq!(changes[0].payload.model.id(), "re_test");
|
||||
}
|
||||
}
|
||||
@@ -25,7 +25,7 @@ impl QueryManager {
|
||||
.expect("Failed to gain lock on DB")
|
||||
.get()
|
||||
.expect("Failed to get a new DB connection from the pool");
|
||||
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
|
||||
DbContext { _events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
|
||||
}
|
||||
|
||||
pub fn with_conn<F, T>(&self, func: F) -> T
|
||||
@@ -39,8 +39,10 @@ impl QueryManager {
|
||||
.get()
|
||||
.expect("Failed to get new DB connection from the pool");
|
||||
|
||||
let db_context =
|
||||
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) };
|
||||
let db_context = DbContext {
|
||||
_events_tx: self.events_tx.clone(),
|
||||
conn: ConnectionOrTx::Connection(conn),
|
||||
};
|
||||
|
||||
func(&db_context)
|
||||
}
|
||||
@@ -62,8 +64,10 @@ impl QueryManager {
|
||||
.transaction_with_behavior(TransactionBehavior::Immediate)
|
||||
.expect("Failed to start DB transaction");
|
||||
|
||||
let db_context =
|
||||
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Transaction(&tx) };
|
||||
let db_context = DbContext {
|
||||
_events_tx: self.events_tx.clone(),
|
||||
conn: ConnectionOrTx::Transaction(&tx),
|
||||
};
|
||||
|
||||
match func(&db_context) {
|
||||
Ok(val) => {
|
||||
|
||||
@@ -68,7 +68,9 @@ pub async fn start_nodejs_plugin_runtime(
|
||||
// Handle kill signal
|
||||
let mut kill_rx = kill_rx.clone();
|
||||
tokio::spawn(async move {
|
||||
kill_rx.wait_for(|b| *b == true).await.expect("Kill channel errored");
|
||||
if kill_rx.wait_for(|b| *b == true).await.is_err() {
|
||||
warn!("Kill channel closed before explicit shutdown; terminating plugin runtime");
|
||||
}
|
||||
info!("Killing plugin runtime");
|
||||
if let Err(e) = child.kill().await {
|
||||
warn!("Failed to kill plugin runtime: {e}");
|
||||
|
||||
@@ -13,5 +13,11 @@
|
||||
"build": "yaakcli build",
|
||||
"dev": "yaakcli dev",
|
||||
"test": "vitest --run tests"
|
||||
},
|
||||
"dependencies": {
|
||||
"jsonwebtoken": "^9.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jsonwebtoken": "^9.0.7"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,26 +4,16 @@ import type { AccessTokenRawResponse } from './store';
|
||||
|
||||
export async function fetchAccessToken(
|
||||
ctx: Context,
|
||||
{
|
||||
accessTokenUrl,
|
||||
scope,
|
||||
audience,
|
||||
params,
|
||||
grantType,
|
||||
credentialsInBody,
|
||||
clientId,
|
||||
clientSecret,
|
||||
}: {
|
||||
args: {
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
grantType: string;
|
||||
accessTokenUrl: string;
|
||||
scope: string | null;
|
||||
audience: string | null;
|
||||
credentialsInBody: boolean;
|
||||
params: HttpUrlParameter[];
|
||||
},
|
||||
} & ({ clientAssertion: string } | { clientSecret: string; credentialsInBody: boolean }),
|
||||
): Promise<AccessTokenRawResponse> {
|
||||
const { clientId, grantType, accessTokenUrl, scope, audience, params } = args;
|
||||
console.log('[oauth2] Getting access token', accessTokenUrl);
|
||||
const httpRequest: Partial<HttpRequest> = {
|
||||
method: 'POST',
|
||||
@@ -34,7 +24,10 @@ export async function fetchAccessToken(
|
||||
},
|
||||
headers: [
|
||||
{ name: 'User-Agent', value: 'yaak' },
|
||||
{ name: 'Accept', value: 'application/x-www-form-urlencoded, application/json' },
|
||||
{
|
||||
name: 'Accept',
|
||||
value: 'application/x-www-form-urlencoded, application/json',
|
||||
},
|
||||
{ name: 'Content-Type', value: 'application/x-www-form-urlencoded' },
|
||||
],
|
||||
};
|
||||
@@ -42,11 +35,24 @@ export async function fetchAccessToken(
|
||||
if (scope) httpRequest.body?.form.push({ name: 'scope', value: scope });
|
||||
if (audience) httpRequest.body?.form.push({ name: 'audience', value: audience });
|
||||
|
||||
if (credentialsInBody) {
|
||||
if ('clientAssertion' in args) {
|
||||
httpRequest.body?.form.push({ name: 'client_id', value: clientId });
|
||||
httpRequest.body?.form.push({ name: 'client_secret', value: clientSecret });
|
||||
httpRequest.body?.form.push({
|
||||
name: 'client_assertion_type',
|
||||
value: 'urn:ietf:params:oauth:client-assertion-type:jwt-bearer',
|
||||
});
|
||||
httpRequest.body?.form.push({
|
||||
name: 'client_assertion',
|
||||
value: args.clientAssertion,
|
||||
});
|
||||
} else if (args.credentialsInBody) {
|
||||
httpRequest.body?.form.push({ name: 'client_id', value: clientId });
|
||||
httpRequest.body?.form.push({
|
||||
name: 'client_secret',
|
||||
value: args.clientSecret,
|
||||
});
|
||||
} else {
|
||||
const value = `Basic ${Buffer.from(`${clientId}:${clientSecret}`).toString('base64')}`;
|
||||
const value = `Basic ${Buffer.from(`${clientId}:${args.clientSecret}`).toString('base64')}`;
|
||||
httpRequest.headers?.push({ name: 'Authorization', value });
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,99 @@
|
||||
import { createPrivateKey, randomUUID } from 'node:crypto';
|
||||
import type { Context } from '@yaakapp/api';
|
||||
import jwt, { type Algorithm } from 'jsonwebtoken';
|
||||
import { fetchAccessToken } from '../fetchAccessToken';
|
||||
import type { TokenStoreArgs } from '../store';
|
||||
import { getToken, storeToken } from '../store';
|
||||
import { isTokenExpired } from '../util';
|
||||
|
||||
export const jwtAlgorithms = [
|
||||
'HS256',
|
||||
'HS384',
|
||||
'HS512',
|
||||
'RS256',
|
||||
'RS384',
|
||||
'RS512',
|
||||
'PS256',
|
||||
'PS384',
|
||||
'PS512',
|
||||
'ES256',
|
||||
'ES384',
|
||||
'ES512',
|
||||
'none',
|
||||
] as const;
|
||||
|
||||
export const defaultJwtAlgorithm = jwtAlgorithms[0];
|
||||
|
||||
/**
|
||||
* Build a signed JWT for the client_assertion parameter (RFC 7523).
|
||||
*
|
||||
* The `secret` value is auto-detected as one of:
|
||||
* - **JWK** – a JSON string containing a private-key object (has a `kty` field).
|
||||
* - **PEM** – a string whose trimmed form starts with `-----`.
|
||||
* - **HMAC secret** – anything else, used as-is for HS* algorithms.
|
||||
*/
|
||||
function buildClientAssertionJwt(params: {
|
||||
clientId: string;
|
||||
accessTokenUrl: string;
|
||||
secret: string;
|
||||
algorithm: Algorithm;
|
||||
}): string {
|
||||
const { clientId, accessTokenUrl, secret, algorithm } = params;
|
||||
|
||||
const isHmac = algorithm.startsWith('HS') || algorithm === 'none';
|
||||
|
||||
// Resolve the signing key depending on format
|
||||
let signingKey: jwt.Secret;
|
||||
let kid: string | undefined;
|
||||
|
||||
const trimmed = secret.trim();
|
||||
|
||||
if (isHmac) {
|
||||
// HMAC algorithms use the raw secret (string or Buffer)
|
||||
signingKey = secret;
|
||||
} else if (trimmed.startsWith('{')) {
|
||||
// Looks like JSON - treat as JWK. There is surely a better way to detect JWK vs a raw secret, but this should work in most cases.
|
||||
let jwk: any;
|
||||
try {
|
||||
jwk = JSON.parse(trimmed);
|
||||
} catch {
|
||||
throw new Error('Client Assertion secret looks like JSON but is not valid');
|
||||
}
|
||||
|
||||
kid = jwk?.kid;
|
||||
signingKey = createPrivateKey({ key: jwk, format: 'jwk' });
|
||||
} else if (trimmed.startsWith('-----')) {
|
||||
// PEM-encoded key
|
||||
signingKey = createPrivateKey({ key: trimmed, format: 'pem' });
|
||||
} else {
|
||||
throw new Error(
|
||||
'Client Assertion secret must be a JWK JSON object, a PEM-encoded key ' +
|
||||
'(starting with -----), or a raw secret for HMAC algorithms.',
|
||||
);
|
||||
}
|
||||
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const payload = {
|
||||
iss: clientId,
|
||||
sub: clientId,
|
||||
aud: accessTokenUrl,
|
||||
iat: now,
|
||||
exp: now + 300, // 5 minutes
|
||||
jti: randomUUID(),
|
||||
};
|
||||
|
||||
// Build the JWT header; include "kid" when available
|
||||
const header: jwt.JwtHeader = { alg: algorithm, typ: 'JWT' };
|
||||
if (kid) {
|
||||
header.kid = kid;
|
||||
}
|
||||
|
||||
return jwt.sign(JSON.stringify(payload), signingKey, {
|
||||
algorithm: algorithm as jwt.Algorithm,
|
||||
header,
|
||||
});
|
||||
}
|
||||
|
||||
export async function getClientCredentials(
|
||||
ctx: Context,
|
||||
contextId: string,
|
||||
@@ -14,6 +104,10 @@ export async function getClientCredentials(
|
||||
scope,
|
||||
audience,
|
||||
credentialsInBody,
|
||||
clientAssertionSecret,
|
||||
clientAssertionSecretBase64,
|
||||
clientCredentialsMethod,
|
||||
clientAssertionAlgorithm,
|
||||
}: {
|
||||
accessTokenUrl: string;
|
||||
clientId: string;
|
||||
@@ -21,6 +115,10 @@ export async function getClientCredentials(
|
||||
scope: string | null;
|
||||
audience: string | null;
|
||||
credentialsInBody: boolean;
|
||||
clientAssertionSecret: string;
|
||||
clientAssertionSecretBase64: boolean;
|
||||
clientCredentialsMethod: string;
|
||||
clientAssertionAlgorithm: string;
|
||||
},
|
||||
) {
|
||||
const tokenArgs: TokenStoreArgs = {
|
||||
@@ -34,16 +132,38 @@ export async function getClientCredentials(
|
||||
return token;
|
||||
}
|
||||
|
||||
const response = await fetchAccessToken(ctx, {
|
||||
const common: Omit<
|
||||
Parameters<typeof fetchAccessToken>[1],
|
||||
'clientAssertion' | 'clientSecret' | 'credentialsInBody'
|
||||
> = {
|
||||
grantType: 'client_credentials',
|
||||
accessTokenUrl,
|
||||
audience,
|
||||
clientId,
|
||||
clientSecret,
|
||||
scope,
|
||||
credentialsInBody,
|
||||
params: [],
|
||||
});
|
||||
};
|
||||
|
||||
const fetchParams: Parameters<typeof fetchAccessToken>[1] =
|
||||
clientCredentialsMethod === 'client_assertion'
|
||||
? {
|
||||
...common,
|
||||
clientAssertion: buildClientAssertionJwt({
|
||||
clientId,
|
||||
algorithm: clientAssertionAlgorithm as Algorithm,
|
||||
accessTokenUrl,
|
||||
secret: clientAssertionSecretBase64
|
||||
? Buffer.from(clientAssertionSecret, 'base64').toString('utf-8')
|
||||
: clientAssertionSecret,
|
||||
}),
|
||||
}
|
||||
: {
|
||||
...common,
|
||||
clientSecret,
|
||||
credentialsInBody,
|
||||
};
|
||||
|
||||
const response = await fetchAccessToken(ctx, fetchParams);
|
||||
|
||||
return storeToken(ctx, tokenArgs, response);
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import type {
|
||||
JsonPrimitive,
|
||||
PluginDefinition,
|
||||
} from '@yaakapp/api';
|
||||
import type { Algorithm } from 'jsonwebtoken';
|
||||
import { DEFAULT_LOCALHOST_PORT, HOSTED_CALLBACK_URL, stopActiveServer } from './callbackServer';
|
||||
import {
|
||||
type CallbackType,
|
||||
@@ -14,7 +15,11 @@ import {
|
||||
PKCE_PLAIN,
|
||||
PKCE_SHA256,
|
||||
} from './grants/authorizationCode';
|
||||
import { getClientCredentials } from './grants/clientCredentials';
|
||||
import {
|
||||
defaultJwtAlgorithm,
|
||||
getClientCredentials,
|
||||
jwtAlgorithms,
|
||||
} from './grants/clientCredentials';
|
||||
import { getImplicit } from './grants/implicit';
|
||||
import { getPassword } from './grants/password';
|
||||
import type { AccessToken, TokenStoreArgs } from './store';
|
||||
@@ -97,7 +102,10 @@ export const plugin: PluginDefinition = {
|
||||
};
|
||||
const token = await getToken(ctx, tokenArgs);
|
||||
if (token == null) {
|
||||
await ctx.toast.show({ message: 'No token to copy', color: 'warning' });
|
||||
await ctx.toast.show({
|
||||
message: 'No token to copy',
|
||||
color: 'warning',
|
||||
});
|
||||
} else {
|
||||
await ctx.clipboard.copyText(token.response.access_token);
|
||||
await ctx.toast.show({
|
||||
@@ -118,9 +126,15 @@ export const plugin: PluginDefinition = {
|
||||
clientId: stringArg(values, 'clientId'),
|
||||
};
|
||||
if (await deleteToken(ctx, tokenArgs)) {
|
||||
await ctx.toast.show({ message: 'Token deleted', color: 'success' });
|
||||
await ctx.toast.show({
|
||||
message: 'Token deleted',
|
||||
color: 'success',
|
||||
});
|
||||
} else {
|
||||
await ctx.toast.show({ message: 'No token to delete', color: 'warning' });
|
||||
await ctx.toast.show({
|
||||
message: 'No token to delete',
|
||||
color: 'warning',
|
||||
});
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -139,6 +153,19 @@ export const plugin: PluginDefinition = {
|
||||
defaultValue: defaultGrantType,
|
||||
options: grantTypes,
|
||||
},
|
||||
{
|
||||
type: 'select',
|
||||
name: 'clientCredentialsMethod',
|
||||
label: 'Authentication Method',
|
||||
description:
|
||||
'"Client Secret" sends client_secret. \n' + '"Client Assertion" sends a signed JWT.',
|
||||
defaultValue: 'client_secret',
|
||||
options: [
|
||||
{ label: 'Client Secret', value: 'client_secret' },
|
||||
{ label: 'Client Assertion', value: 'client_assertion' },
|
||||
],
|
||||
dynamic: hiddenIfNot(['client_credentials']),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'clientId',
|
||||
@@ -151,7 +178,47 @@ export const plugin: PluginDefinition = {
|
||||
label: 'Client Secret',
|
||||
optional: true,
|
||||
password: true,
|
||||
dynamic: hiddenIfNot(['authorization_code', 'password', 'client_credentials']),
|
||||
dynamic: hiddenIfNot(
|
||||
['authorization_code', 'password', 'client_credentials'],
|
||||
(values) => values.clientCredentialsMethod === 'client_secret',
|
||||
),
|
||||
},
|
||||
{
|
||||
type: 'select',
|
||||
name: 'clientAssertionAlgorithm',
|
||||
label: 'JWT Algorithm',
|
||||
defaultValue: defaultJwtAlgorithm,
|
||||
options: jwtAlgorithms.map((value) => ({
|
||||
label: value === 'none' ? 'None' : value,
|
||||
value,
|
||||
})),
|
||||
dynamic: hiddenIfNot(
|
||||
['client_credentials'],
|
||||
({ clientCredentialsMethod }) => clientCredentialsMethod === 'client_assertion',
|
||||
),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'clientAssertionSecret',
|
||||
label: 'JWT Secret',
|
||||
description:
|
||||
'Can be HMAC, PEM or JWK. Make sure you pick the correct algorithm type above.',
|
||||
password: true,
|
||||
optional: true,
|
||||
multiLine: true,
|
||||
dynamic: hiddenIfNot(
|
||||
['client_credentials'],
|
||||
({ clientCredentialsMethod }) => clientCredentialsMethod === 'client_assertion',
|
||||
),
|
||||
},
|
||||
{
|
||||
type: 'checkbox',
|
||||
name: 'clientAssertionSecretBase64',
|
||||
label: 'JWT secret is base64 encoded',
|
||||
dynamic: hiddenIfNot(
|
||||
['client_credentials'],
|
||||
({ clientCredentialsMethod }) => clientCredentialsMethod === 'client_assertion',
|
||||
),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
@@ -160,7 +227,10 @@ export const plugin: PluginDefinition = {
|
||||
label: 'Authorization URL',
|
||||
dynamic: hiddenIfNot(['authorization_code', 'implicit']),
|
||||
placeholder: authorizationUrls[0],
|
||||
completionOptions: authorizationUrls.map((url) => ({ label: url, value: url })),
|
||||
completionOptions: authorizationUrls.map((url) => ({
|
||||
label: url,
|
||||
value: url,
|
||||
})),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
@@ -169,7 +239,10 @@ export const plugin: PluginDefinition = {
|
||||
label: 'Access Token URL',
|
||||
placeholder: accessTokenUrls[0],
|
||||
dynamic: hiddenIfNot(['authorization_code', 'password', 'client_credentials']),
|
||||
completionOptions: accessTokenUrls.map((url) => ({ label: url, value: url })),
|
||||
completionOptions: accessTokenUrls.map((url) => ({
|
||||
label: url,
|
||||
value: url,
|
||||
})),
|
||||
},
|
||||
{
|
||||
type: 'banner',
|
||||
@@ -186,7 +259,8 @@ export const plugin: PluginDefinition = {
|
||||
{
|
||||
type: 'text',
|
||||
name: 'redirectUri',
|
||||
label: 'Redirect URI',
|
||||
label: 'Redirect URI (can be any valid URL)',
|
||||
placeholder: 'https://mysite.example.com/oauth/callback',
|
||||
description:
|
||||
'URI the OAuth provider redirects to after authorization. Yaak intercepts this automatically in its embedded browser so any valid URI will work.',
|
||||
optional: true,
|
||||
@@ -383,6 +457,11 @@ export const plugin: PluginDefinition = {
|
||||
{ label: 'In Request Body', value: 'body' },
|
||||
{ label: 'As Basic Authentication', value: 'basic' },
|
||||
],
|
||||
dynamic: (_ctx: Context, { values }: GetHttpAuthenticationConfigRequest) => ({
|
||||
hidden:
|
||||
values.grantType === 'client_credentials' &&
|
||||
values.clientCredentialsMethod === 'client_assertion',
|
||||
}),
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -484,7 +563,11 @@ export const plugin: PluginDefinition = {
|
||||
? accessTokenUrl
|
||||
: `https://${accessTokenUrl}`,
|
||||
clientId: stringArg(values, 'clientId'),
|
||||
clientAssertionAlgorithm: stringArg(values, 'clientAssertionAlgorithm') as Algorithm,
|
||||
clientSecret: stringArg(values, 'clientSecret'),
|
||||
clientCredentialsMethod: stringArg(values, 'clientCredentialsMethod'),
|
||||
clientAssertionSecret: stringArg(values, 'clientAssertionSecret'),
|
||||
clientAssertionSecretBase64: !!values.clientAssertionSecretBase64,
|
||||
scope: stringArgOrNull(values, 'scope'),
|
||||
audience: stringArgOrNull(values, 'audience'),
|
||||
credentialsInBody,
|
||||
|
||||
@@ -3,23 +3,30 @@ import type { GrpcRequest, HttpRequest, WebsocketRequest } from '@yaakapp-intern
|
||||
import { MoveToWorkspaceDialog } from '../components/MoveToWorkspaceDialog';
|
||||
import { activeWorkspaceIdAtom } from '../hooks/useActiveWorkspace';
|
||||
import { createFastMutation } from '../hooks/useFastMutation';
|
||||
import { pluralizeCount } from '../lib/pluralize';
|
||||
import { showDialog } from '../lib/dialog';
|
||||
import { jotaiStore } from '../lib/jotai';
|
||||
|
||||
export const moveToWorkspace = createFastMutation({
|
||||
mutationKey: ['move_workspace'],
|
||||
mutationFn: async (request: HttpRequest | GrpcRequest | WebsocketRequest) => {
|
||||
mutationFn: async (requests: (HttpRequest | GrpcRequest | WebsocketRequest)[]) => {
|
||||
const activeWorkspaceId = jotaiStore.get(activeWorkspaceIdAtom);
|
||||
if (activeWorkspaceId == null) return;
|
||||
if (requests.length === 0) return;
|
||||
|
||||
const title =
|
||||
requests.length === 1
|
||||
? 'Move Request'
|
||||
: `Move ${pluralizeCount('Request', requests.length)}`;
|
||||
|
||||
showDialog({
|
||||
id: 'change-workspace',
|
||||
title: 'Move Workspace',
|
||||
title,
|
||||
size: 'sm',
|
||||
render: ({ hide }) => (
|
||||
<MoveToWorkspaceDialog
|
||||
onDone={hide}
|
||||
request={request}
|
||||
requests={requests}
|
||||
activeWorkspaceId={activeWorkspaceId}
|
||||
/>
|
||||
),
|
||||
|
||||
@@ -616,5 +616,16 @@ function KeyValueArg({
|
||||
|
||||
function hasVisibleInputs(inputs: FormInput[] | undefined): boolean {
|
||||
if (!inputs) return false;
|
||||
return inputs.some((i) => !i.hidden);
|
||||
|
||||
for (const input of inputs) {
|
||||
if ('inputs' in input && !hasVisibleInputs(input.inputs)) {
|
||||
// Has children, but none are visible
|
||||
return false;
|
||||
}
|
||||
if (!input.hidden) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import type { GrpcRequest, HttpRequest, WebsocketRequest } from '@yaakapp-intern
|
||||
import { patchModel, workspacesAtom } from '@yaakapp-internal/models';
|
||||
import { useAtomValue } from 'jotai';
|
||||
import { useState } from 'react';
|
||||
import { pluralizeCount } from '../lib/pluralize';
|
||||
import { resolvedModelName } from '../lib/resolvedModelName';
|
||||
import { router } from '../lib/router';
|
||||
import { showToast } from '../lib/toast';
|
||||
@@ -12,18 +13,21 @@ import { VStack } from './core/Stacks';
|
||||
|
||||
interface Props {
|
||||
activeWorkspaceId: string;
|
||||
request: HttpRequest | GrpcRequest | WebsocketRequest;
|
||||
requests: (HttpRequest | GrpcRequest | WebsocketRequest)[];
|
||||
onDone: () => void;
|
||||
}
|
||||
|
||||
export function MoveToWorkspaceDialog({ onDone, request, activeWorkspaceId }: Props) {
|
||||
export function MoveToWorkspaceDialog({ onDone, requests, activeWorkspaceId }: Props) {
|
||||
const workspaces = useAtomValue(workspacesAtom);
|
||||
const [selectedWorkspaceId, setSelectedWorkspaceId] = useState<string>(activeWorkspaceId);
|
||||
|
||||
const targetWorkspace = workspaces.find((w) => w.id === selectedWorkspaceId);
|
||||
const isSameWorkspace = selectedWorkspaceId === activeWorkspaceId;
|
||||
|
||||
return (
|
||||
<VStack space={4} className="mb-4">
|
||||
<Select
|
||||
label="New Workspace"
|
||||
label="Target Workspace"
|
||||
name="workspace"
|
||||
value={selectedWorkspaceId}
|
||||
onChange={setSelectedWorkspaceId}
|
||||
@@ -34,27 +38,31 @@ export function MoveToWorkspaceDialog({ onDone, request, activeWorkspaceId }: Pr
|
||||
/>
|
||||
<Button
|
||||
color="primary"
|
||||
disabled={selectedWorkspaceId === activeWorkspaceId}
|
||||
disabled={isSameWorkspace}
|
||||
onClick={async () => {
|
||||
const patch = {
|
||||
workspaceId: selectedWorkspaceId,
|
||||
folderId: null,
|
||||
};
|
||||
|
||||
await patchModel(request, patch);
|
||||
await Promise.all(requests.map((r) => patchModel(r, patch)));
|
||||
|
||||
// Hide after a moment, to give time for request to disappear
|
||||
// Hide after a moment, to give time for requests to disappear
|
||||
setTimeout(onDone, 100);
|
||||
showToast({
|
||||
id: 'workspace-moved',
|
||||
message: (
|
||||
<>
|
||||
<InlineCode>{resolvedModelName(request)}</InlineCode> moved to{' '}
|
||||
<InlineCode>
|
||||
{workspaces.find((w) => w.id === selectedWorkspaceId)?.name ?? 'unknown'}
|
||||
</InlineCode>
|
||||
</>
|
||||
),
|
||||
message:
|
||||
requests.length === 1 && requests[0] != null ? (
|
||||
<>
|
||||
<InlineCode>{resolvedModelName(requests[0])}</InlineCode> moved to{' '}
|
||||
<InlineCode>{targetWorkspace?.name ?? 'unknown'}</InlineCode>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{pluralizeCount('request', requests.length)} moved to{' '}
|
||||
<InlineCode>{targetWorkspace?.name ?? 'unknown'}</InlineCode>
|
||||
</>
|
||||
),
|
||||
action: ({ hide }) => (
|
||||
<Button
|
||||
size="xs"
|
||||
@@ -74,7 +82,7 @@ export function MoveToWorkspaceDialog({ onDone, request, activeWorkspaceId }: Pr
|
||||
});
|
||||
}}
|
||||
>
|
||||
Move
|
||||
{requests.length === 1 ? 'Move' : `Move ${pluralizeCount('Request', requests.length)}`}
|
||||
</Button>
|
||||
</VStack>
|
||||
);
|
||||
|
||||
@@ -278,6 +278,7 @@ function Sidebar({ className }: { className?: string }) {
|
||||
},
|
||||
},
|
||||
'sidebar.selected.duplicate': {
|
||||
// Higher priority so this takes precedence over model.duplicate (same Meta+d binding)
|
||||
priority: 10,
|
||||
enable,
|
||||
cb: async (items: SidebarModel[]) => {
|
||||
@@ -290,6 +291,18 @@ function Sidebar({ className }: { className?: string }) {
|
||||
}
|
||||
},
|
||||
},
|
||||
'sidebar.selected.move': {
|
||||
enable,
|
||||
cb: async (items: SidebarModel[]) => {
|
||||
const requests = items.filter(
|
||||
(i): i is HttpRequest | GrpcRequest | WebsocketRequest =>
|
||||
i.model === 'http_request' || i.model === 'grpc_request' || i.model === 'websocket_request'
|
||||
);
|
||||
if (requests.length > 0) {
|
||||
moveToWorkspace.mutate(requests);
|
||||
}
|
||||
},
|
||||
},
|
||||
'request.send': {
|
||||
enable,
|
||||
cb: async (items: SidebarModel[]) => {
|
||||
@@ -320,6 +333,10 @@ function Sidebar({ className }: { className?: string }) {
|
||||
|
||||
const workspaces = jotaiStore.get(workspacesAtom);
|
||||
const onlyHttpRequests = items.every((i) => i.model === 'http_request');
|
||||
const requestItems = items.filter(
|
||||
(i) =>
|
||||
i.model === 'http_request' || i.model === 'grpc_request' || i.model === 'websocket_request',
|
||||
);
|
||||
|
||||
const initialItems: ContextMenuProps['items'] = [
|
||||
{
|
||||
@@ -416,16 +433,13 @@ function Sidebar({ className }: { className?: string }) {
|
||||
onSelect: () => actions['sidebar.selected.duplicate'].cb(items),
|
||||
},
|
||||
{
|
||||
label: 'Move',
|
||||
label: items.length <= 1 ? 'Move' : `Move ${requestItems.length} Requests`,
|
||||
hotKeyAction: 'sidebar.selected.move',
|
||||
hotKeyLabelOnly: true,
|
||||
leftSlot: <Icon icon="arrow_right_circle" />,
|
||||
hidden:
|
||||
workspaces.length <= 1 ||
|
||||
items.length > 1 ||
|
||||
child.model === 'folder' ||
|
||||
child.model === 'workspace',
|
||||
hidden: workspaces.length <= 1 || requestItems.length === 0 || requestItems.length !== items.length,
|
||||
onSelect: () => {
|
||||
if (child.model === 'folder' || child.model === 'workspace') return;
|
||||
moveToWorkspace.mutate(child);
|
||||
actions['sidebar.selected.move'].cb(items);
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -434,11 +434,23 @@
|
||||
|
||||
input {
|
||||
@apply bg-surface border-border-subtle focus:border-border-focus;
|
||||
@apply border outline-none cursor-text;
|
||||
@apply border outline-none;
|
||||
}
|
||||
|
||||
label {
|
||||
@apply focus-within:text-text;
|
||||
input.cm-textfield {
|
||||
@apply cursor-text;
|
||||
}
|
||||
|
||||
.cm-search label {
|
||||
@apply inline-flex items-center h-6 px-1.5 rounded-sm border border-border-subtle cursor-default text-text-subtle text-xs;
|
||||
|
||||
input[type="checkbox"] {
|
||||
@apply hidden;
|
||||
}
|
||||
|
||||
&:has(:checked) {
|
||||
@apply text-primary border-border;
|
||||
}
|
||||
}
|
||||
|
||||
/* Hide the "All" button */
|
||||
@@ -446,4 +458,31 @@
|
||||
button[name="select"] {
|
||||
@apply hidden;
|
||||
}
|
||||
|
||||
/* Replace next/prev button text with chevron icons */
|
||||
|
||||
.cm-search button[name="next"],
|
||||
.cm-search button[name="prev"] {
|
||||
@apply text-[0px] w-7 h-6 inline-flex items-center justify-center border border-border-subtle mr-1;
|
||||
}
|
||||
|
||||
.cm-search button[name="prev"]::after,
|
||||
.cm-search button[name="next"]::after {
|
||||
@apply block w-3.5 h-3.5 bg-text;
|
||||
content: "";
|
||||
}
|
||||
|
||||
.cm-search button[name="prev"]::after {
|
||||
-webkit-mask-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none' stroke='currentColor' stroke-width='2.5' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M15 18l-6-6 6-6'/%3E%3C/svg%3E");
|
||||
mask-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none' stroke='currentColor' stroke-width='2.5' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M15 18l-6-6 6-6'/%3E%3C/svg%3E");
|
||||
}
|
||||
|
||||
.cm-search button[name="next"]::after {
|
||||
-webkit-mask-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none' stroke='currentColor' stroke-width='2.5' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M9 18l6-6-6-6'/%3E%3C/svg%3E");
|
||||
mask-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none' stroke='currentColor' stroke-width='2.5' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M9 18l6-6-6-6'/%3E%3C/svg%3E");
|
||||
}
|
||||
|
||||
.cm-search-match-count {
|
||||
@apply text-text-subtle text-xs font-mono whitespace-nowrap px-1.5 py-0.5 self-center;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,6 +67,7 @@ import type { TwigCompletionOption } from './twig/completion';
|
||||
import { twig } from './twig/extension';
|
||||
import { pathParametersPlugin } from './twig/pathParameters';
|
||||
import { url } from './url/extension';
|
||||
import { searchMatchCount } from './searchMatchCount';
|
||||
|
||||
export const syntaxHighlightStyle = HighlightStyle.define([
|
||||
{
|
||||
@@ -256,6 +257,7 @@ export const readonlyExtensions = [
|
||||
|
||||
export const multiLineExtensions = ({ hideGutter }: { hideGutter?: boolean }) => [
|
||||
search({ top: true }),
|
||||
searchMatchCount(),
|
||||
hideGutter
|
||||
? []
|
||||
: [
|
||||
|
||||
116
src-web/components/core/Editor/searchMatchCount.ts
Normal file
116
src-web/components/core/Editor/searchMatchCount.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { getSearchQuery, searchPanelOpen } from '@codemirror/search';
|
||||
import type { Extension } from '@codemirror/state';
|
||||
import { type EditorView, ViewPlugin, type ViewUpdate } from '@codemirror/view';
|
||||
|
||||
/**
|
||||
* A CodeMirror extension that displays the total number of search matches
|
||||
* inside the built-in search panel.
|
||||
*/
|
||||
export function searchMatchCount(): Extension {
|
||||
return ViewPlugin.fromClass(
|
||||
class {
|
||||
private countEl: HTMLElement | null = null;
|
||||
|
||||
constructor(private view: EditorView) {
|
||||
this.updateCount();
|
||||
}
|
||||
|
||||
update(update: ViewUpdate) {
|
||||
// Recompute when doc changes, search state changes, or selection moves
|
||||
const query = getSearchQuery(update.state);
|
||||
const prevQuery = getSearchQuery(update.startState);
|
||||
const open = searchPanelOpen(update.state);
|
||||
const prevOpen = searchPanelOpen(update.startState);
|
||||
|
||||
if (update.docChanged || update.selectionSet || !query.eq(prevQuery) || open !== prevOpen) {
|
||||
this.updateCount();
|
||||
}
|
||||
}
|
||||
|
||||
private updateCount() {
|
||||
const state = this.view.state;
|
||||
const open = searchPanelOpen(state);
|
||||
const query = getSearchQuery(state);
|
||||
|
||||
if (!open) {
|
||||
this.removeCountEl();
|
||||
return;
|
||||
}
|
||||
|
||||
this.ensureCountEl();
|
||||
|
||||
if (!query.search) {
|
||||
if (this.countEl) {
|
||||
this.countEl.textContent = '0/0';
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const selection = state.selection.main;
|
||||
let count = 0;
|
||||
let currentIndex = 0;
|
||||
const MAX_COUNT = 9999;
|
||||
const cursor = query.getCursor(state);
|
||||
for (let result = cursor.next(); !result.done; result = cursor.next()) {
|
||||
count++;
|
||||
const match = result.value;
|
||||
if (match.from <= selection.from && match.to >= selection.to) {
|
||||
currentIndex = count;
|
||||
}
|
||||
if (count > MAX_COUNT) break;
|
||||
}
|
||||
|
||||
if (this.countEl) {
|
||||
if (count > MAX_COUNT) {
|
||||
this.countEl.textContent = `${MAX_COUNT}+`;
|
||||
} else if (count === 0) {
|
||||
this.countEl.textContent = '0/0';
|
||||
} else if (currentIndex > 0) {
|
||||
this.countEl.textContent = `${currentIndex}/${count}`;
|
||||
} else {
|
||||
this.countEl.textContent = `0/${count}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private ensureCountEl() {
|
||||
// Find the search panel in the editor DOM
|
||||
const panel = this.view.dom.querySelector('.cm-search');
|
||||
if (!panel) {
|
||||
this.countEl = null;
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.countEl && this.countEl.parentElement === panel) {
|
||||
return; // Already attached
|
||||
}
|
||||
|
||||
this.countEl = document.createElement('span');
|
||||
this.countEl.className = 'cm-search-match-count';
|
||||
|
||||
// Reorder: insert prev button, then next button, then count after the search input
|
||||
const searchInput = panel.querySelector('input');
|
||||
const prevBtn = panel.querySelector('button[name="prev"]');
|
||||
const nextBtn = panel.querySelector('button[name="next"]');
|
||||
if (searchInput && searchInput.parentElement === panel) {
|
||||
searchInput.after(this.countEl);
|
||||
if (prevBtn) this.countEl.after(prevBtn);
|
||||
if (nextBtn && prevBtn) prevBtn.after(nextBtn);
|
||||
} else {
|
||||
panel.prepend(this.countEl);
|
||||
}
|
||||
}
|
||||
|
||||
private removeCountEl() {
|
||||
if (this.countEl) {
|
||||
this.countEl.remove();
|
||||
this.countEl = null;
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.removeCountEl();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -176,7 +176,11 @@ export function GitCommitDialog({ syncDir, onDone, workspace }: Props) {
|
||||
}
|
||||
|
||||
if (!hasAnythingToAdd) {
|
||||
return <EmptyStateText>No changes since last commit</EmptyStateText>;
|
||||
return (
|
||||
<div className="h-full px-6 pb-4">
|
||||
<EmptyStateText>No changes since last commit</EmptyStateText>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -230,14 +234,14 @@ export function GitCommitDialog({ syncDir, onDone, workspace }: Props) {
|
||||
hideLabel
|
||||
/>
|
||||
{commitError && <Banner color="danger">{commitError}</Banner>}
|
||||
<HStack alignItems="center">
|
||||
<HStack alignItems="center" space={2}>
|
||||
<InlineCode>{status.data?.headRefShorthand}</InlineCode>
|
||||
<HStack space={2} className="ml-auto">
|
||||
<Button
|
||||
color="secondary"
|
||||
size="sm"
|
||||
onClick={handleCreateCommit}
|
||||
disabled={!hasAddedAnything}
|
||||
disabled={!hasAddedAnything || message.trim().length === 0}
|
||||
isLoading={isPushing}
|
||||
>
|
||||
Commit
|
||||
@@ -245,7 +249,7 @@ export function GitCommitDialog({ syncDir, onDone, workspace }: Props) {
|
||||
<Button
|
||||
color="primary"
|
||||
size="sm"
|
||||
disabled={!hasAddedAnything}
|
||||
disabled={!hasAddedAnything || message.trim().length === 0}
|
||||
onClick={handleCreateCommitAndPush}
|
||||
isLoading={isPushing}
|
||||
>
|
||||
|
||||
@@ -173,7 +173,6 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
|
||||
{ type: 'separator' },
|
||||
{
|
||||
label: 'Push',
|
||||
hidden: !hasRemotes,
|
||||
leftSlot: <Icon icon="arrow_up_from_line" />,
|
||||
waitForOnSelect: true,
|
||||
async onSelect() {
|
||||
@@ -192,7 +191,6 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
|
||||
},
|
||||
{
|
||||
label: 'Pull',
|
||||
hidden: !hasRemotes,
|
||||
leftSlot: <Icon icon="arrow_down_to_line" />,
|
||||
waitForOnSelect: true,
|
||||
async onSelect() {
|
||||
|
||||
@@ -2,13 +2,13 @@ import type { GitCallbacks } from '@yaakapp-internal/git';
|
||||
import { sync } from '../../init/sync';
|
||||
import { promptCredentials } from './credentials';
|
||||
import { promptDivergedStrategy } from './diverged';
|
||||
import { promptUncommittedChangesStrategy } from './uncommitted';
|
||||
import { addGitRemote } from './showAddRemoteDialog';
|
||||
import { promptUncommittedChangesStrategy } from './uncommitted';
|
||||
|
||||
export function gitCallbacks(dir: string): GitCallbacks {
|
||||
return {
|
||||
addRemote: async () => {
|
||||
return addGitRemote(dir);
|
||||
return addGitRemote(dir, 'origin');
|
||||
},
|
||||
promptCredentials: async ({ url, error }) => {
|
||||
const creds = await promptCredentials({ url, error });
|
||||
|
||||
@@ -3,12 +3,12 @@ import { gitMutations } from '@yaakapp-internal/git';
|
||||
import { showPromptForm } from '../../lib/prompt-form';
|
||||
import { gitCallbacks } from './callbacks';
|
||||
|
||||
export async function addGitRemote(dir: string): Promise<GitRemote> {
|
||||
export async function addGitRemote(dir: string, defaultName?: string): Promise<GitRemote> {
|
||||
const r = await showPromptForm({
|
||||
id: 'add-remote',
|
||||
title: 'Add Remote',
|
||||
inputs: [
|
||||
{ type: 'text', label: 'Name', name: 'name' },
|
||||
{ type: 'text', label: 'Name', name: 'name', defaultValue: defaultName },
|
||||
{ type: 'text', label: 'URL', name: 'url' },
|
||||
],
|
||||
});
|
||||
|
||||
@@ -28,6 +28,7 @@ export type HotkeyAction =
|
||||
| 'sidebar.filter'
|
||||
| 'sidebar.selected.delete'
|
||||
| 'sidebar.selected.duplicate'
|
||||
| 'sidebar.selected.move'
|
||||
| 'sidebar.selected.rename'
|
||||
| 'sidebar.expand_all'
|
||||
| 'sidebar.collapse_all'
|
||||
@@ -58,6 +59,7 @@ const defaultHotkeysMac: Record<HotkeyAction, string[]> = {
|
||||
'sidebar.collapse_all': ['Meta+Shift+Minus'],
|
||||
'sidebar.selected.delete': ['Delete', 'Meta+Backspace'],
|
||||
'sidebar.selected.duplicate': ['Meta+d'],
|
||||
'sidebar.selected.move': [],
|
||||
'sidebar.selected.rename': ['Enter'],
|
||||
'sidebar.focus': ['Meta+b'],
|
||||
'sidebar.context_menu': ['Control+Enter'],
|
||||
@@ -87,6 +89,7 @@ const defaultHotkeysOther: Record<HotkeyAction, string[]> = {
|
||||
'sidebar.collapse_all': ['Control+Shift+Minus'],
|
||||
'sidebar.selected.delete': ['Delete', 'Control+Backspace'],
|
||||
'sidebar.selected.duplicate': ['Control+d'],
|
||||
'sidebar.selected.move': [],
|
||||
'sidebar.selected.rename': ['Enter'],
|
||||
'sidebar.focus': ['Control+b'],
|
||||
'sidebar.context_menu': ['Alt+Insert'],
|
||||
@@ -141,6 +144,7 @@ const hotkeyLabels: Record<HotkeyAction, string> = {
|
||||
'sidebar.collapse_all': 'Collapse All Folders',
|
||||
'sidebar.selected.delete': 'Delete Selected Sidebar Item',
|
||||
'sidebar.selected.duplicate': 'Duplicate Selected Sidebar Item',
|
||||
'sidebar.selected.move': 'Move Selected to Workspace',
|
||||
'sidebar.selected.rename': 'Rename Selected Sidebar Item',
|
||||
'sidebar.focus': 'Focus or Toggle Sidebar',
|
||||
'sidebar.context_menu': 'Show Context Menu',
|
||||
|
||||
Reference in New Issue
Block a user