Compare commits

..

27 Commits

Author SHA1 Message Date
Gregory Schier
65e91aec6b Fix git pull conflicts with pull.ff=only and improve commit UX (#394) 2026-02-13 14:46:47 -08:00
Gregory Schier
ae943a5fd2 Fix lint: ignore flatpak in biome, fix search cursor iterator usage 2026-02-12 15:34:13 -08:00
winit
9e1a11de0b Add CodeMirror extension to display find match count in the editor. (#390) 2026-02-12 15:27:18 -08:00
Gregory Schier
52732e12ec Fix license activation and plugin requests ignoring proxy settings (#393)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com
2026-02-12 14:38:53 -08:00
Gregory Schier
1127d7e3fa Use consistent release title format in generate-release-notes command 2026-02-11 17:38:13 -08:00
Gregory Schier
7d4d228236 Fix HTTP/2 requests failing with duplicate Content-Length (#391)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-11 17:11:35 -08:00
Gregory Schier
565e053ee8 Fix auth tab crash when template rendering fails (#392) 2026-02-11 14:59:17 -08:00
Gregory Schier
26aba6034f Move faker plugin back to external (plugins-external/faker) 2026-02-11 10:22:20 -08:00
Gregory Schier
9a1d613034 Fix RPM bundle path validation for metainfo file 2026-02-11 08:14:16 -08:00
Gregory Schier
3e4de7d3c4 Move build scripts to Flathub repo, keep release prep scripts here 2026-02-11 07:28:56 -08:00
Gregory Schier
b64b5ec0f8 Refresh Git dropdown data on open and fetch periodically
- Add refreshKey to useGit queries so dropdown data refreshes on open
- Convert fetchAll from mutation to query with 10-minute refetch interval
- Re-run status query after fetchAll completes via dataUpdatedAt key
- Use placeholderData to keep previous data during key changes
- Remove disabled state from Push, Pull, and Commit menu items
2026-02-11 07:20:53 -08:00
Gregory Schier
510d1c7d17 Remove Flatpak manifest (lives in Flathub repo) 2026-02-11 06:32:39 -08:00
Gregory Schier
ed13a62269 Use static desktop file and clean up manifest comments 2026-02-11 06:30:09 -08:00
Gregory Schier
935d613959 Move lockfile patch to standalone script 2026-02-10 23:35:14 -08:00
Gregory Schier
adeaaccc45 Add v2026.2.0 release to metainfo, simplify CI workflow
- Metainfo is managed upstream (updated before tagging)
- CI no longer modifies metainfo; just copies manifest and sources to Flathub
- Flathub manifest installs metainfo from git source
- Permissions reverted to read-only
2026-02-10 23:29:27 -08:00
Gregory Schier
d253093333 Revert "Simplify CI: metainfo releases only accumulate in Flathub repo"
This reverts commit f265b7a572.
2026-02-10 23:26:52 -08:00
Gregory Schier
f265b7a572 Simplify CI: metainfo releases only accumulate in Flathub repo
- Remove metainfo update from update-manifest.sh
- Remove CI step that committed metainfo back to app repo
- Revert permissions back to read-only
- CI now inserts release entry directly into Flathub repo's metainfo
2026-02-10 23:26:22 -08:00
Gregory Schier
68b2ff016f CI: rewrite metainfo paths for Flathub repo 2026-02-10 23:24:09 -08:00
Gregory Schier
a1c6295810 Clean up Flatpak manifest for v2026.2.0
- Update tag to v2026.2.0
- Use SKIP_WASM_BUILD env var instead of build-time package.json patch
- Install metainfo from git source (remove temporary type: file source)
- Fix fix-lockfile.mjs to skip workspace packages
- CI: commit metainfo releases back to app repo, bump permissions to write
2026-02-10 23:19:23 -08:00
Gregory Schier
76ee3fa61b Flatpak: build from source instead of repackaging debs (#389) 2026-02-10 23:05:33 -08:00
Gregory Schier
7fef35ce0a Ship metainfo in deb, remove from Flatpak manifest 2026-02-10 15:26:40 -08:00
Gregory Schier
654af09951 Bump GNOME runtime to 49, fix corrupted arm64 SHA256 2026-02-10 15:22:51 -08:00
Gregory Schier
484dcfade0 Add Flatpak and Flathub packaging support (#388) 2026-02-10 14:38:40 -08:00
Gregory Schier
fda18c5434 Snapshot faker template function names in test
Replace the brittle count assertion (toBe(226)) with a snapshot of all
exported function names. This catches accidental additions, removals,
or renames across faker upgrades with a clear diff.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-09 10:22:03 -08:00
Gregory Schier
a8176d6e9e Skip disabled key-value entries during request rendering
Skip disabled headers, metadata, URL parameters, and form body
entries in the render phase for HTTP, gRPC, and WebSocket requests.
Previously, disabled entries were still template-rendered even though
they were filtered out later at the use site.
2026-02-09 10:17:43 -08:00
Gregory Schier
957d8d9d46 Move faker plugin from external to bundled 2026-02-09 08:43:49 -08:00
Gregory Schier
5f18bf25e2 Replace shell-quote with shlex for curl import (#387) 2026-02-09 08:22:11 -08:00
57 changed files with 1717 additions and 615 deletions

View File

@@ -43,5 +43,7 @@ The skill generates markdown-formatted release notes following this structure:
After outputting the release notes, ask the user if they would like to create a draft GitHub release with these notes. If they confirm, create the release using:
```bash
gh release create <tag> --draft --prerelease --title "<tag>" --notes '<release notes>'
gh release create <tag> --draft --prerelease --title "Release <version>" --notes '<release notes>'
```
**IMPORTANT**: The release title format is "Release XXXX" where XXXX is the version WITHOUT the `v` prefix. For example, tag `v2026.2.1-beta.1` gets title "Release 2026.2.1-beta.1".

52
.github/workflows/flathub.yml vendored Normal file
View File

@@ -0,0 +1,52 @@
name: Update Flathub
on:
release:
types: [published]
permissions:
contents: read
jobs:
update-flathub:
name: Update Flathub manifest
runs-on: ubuntu-latest
# Only run for stable releases (skip betas/pre-releases)
if: ${{ !github.event.release.prerelease }}
steps:
- name: Checkout app repo
uses: actions/checkout@v4
- name: Checkout Flathub repo
uses: actions/checkout@v4
with:
repository: flathub/app.yaak.Yaak
token: ${{ secrets.FLATHUB_TOKEN }}
path: flathub-repo
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: "22"
- name: Install source generators
run: |
pip install flatpak-node-generator tomlkit aiohttp
git clone --depth 1 https://github.com/flatpak/flatpak-builder-tools flatpak/flatpak-builder-tools
- name: Run update-manifest.sh
run: bash flatpak/update-manifest.sh "${{ github.event.release.tag_name }}" flathub-repo
- name: Commit and push to Flathub
working-directory: flathub-repo
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add -A
git diff --cached --quiet && echo "No changes to commit" && exit 0
git commit -m "Update to ${{ github.event.release.tag_name }}"
git push

7
.gitignore vendored
View File

@@ -44,3 +44,10 @@ crates-tauri/yaak-app/tauri.worktree.conf.json
# Tauri auto-generated permission files
**/permissions/autogenerated
**/permissions/schemas
# Flatpak build artifacts
flatpak-repo/
.flatpak-builder/
flatpak/flatpak-builder-tools/
flatpak/cargo-sources.json
flatpak/node-sources.json

322
Cargo.lock generated
View File

@@ -52,6 +52,15 @@ dependencies = [
"zerocopy",
]
[[package]]
name = "aho-corasick"
version = "0.6.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81ce3d38065e618af2d7b77e10c5ad9a069859b4be3c2250f674af3840d9c8a5"
dependencies = [
"memchr",
]
[[package]]
name = "aho-corasick"
version = "1.1.3"
@@ -90,7 +99,7 @@ checksum = "f6f39be698127218cca460cb624878c9aa4e2b47dba3b277963d2bf00bad263b"
dependencies = [
"android_log-sys",
"env_filter",
"log",
"log 0.4.29",
]
[[package]]
@@ -175,7 +184,7 @@ checksum = "c1df21f715862ede32a0c525ce2ca4d52626bb0007f8c18b87a384503ac33e70"
dependencies = [
"clipboard-win",
"image",
"log",
"log 0.4.29",
"objc2 0.6.1",
"objc2-app-kit",
"objc2-core-foundation",
@@ -999,7 +1008,7 @@ version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
dependencies = [
"lazy_static",
"lazy_static 1.5.0",
"windows-sys 0.59.0",
]
@@ -1519,7 +1528,7 @@ dependencies = [
"rustc_version",
"toml 0.8.23",
"vswhom",
"winreg",
"winreg 0.55.0",
]
[[package]]
@@ -1570,8 +1579,8 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0"
dependencies = [
"log",
"regex",
"log 0.4.29",
"regex 1.11.1",
]
[[package]]
@@ -1584,7 +1593,7 @@ dependencies = [
"anstyle",
"env_filter",
"jiff",
"log",
"log 0.4.29",
]
[[package]]
@@ -1645,7 +1654,7 @@ name = "eventsource-client"
version = "0.14.0"
source = "git+https://github.com/yaakapp/rust-eventsource-client#60e0e3ac5038149c4778dc4979b09b152214f9a8"
dependencies = [
"log",
"log 0.4.29",
"pin-project",
]
@@ -1693,7 +1702,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4316185f709b23713e41e3195f90edef7fb00c3ed4adc79769cf09cc762a3b29"
dependencies = [
"colored",
"log",
"log 0.4.29",
]
[[package]]
@@ -1702,7 +1711,7 @@ version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38e2275cc4e4fc009b0669731a1e5ab7ebf11f469eaede2bab9309a5b4d6057f"
dependencies = [
"memoffset",
"memoffset 0.9.1",
"rustc_version",
]
@@ -2143,7 +2152,7 @@ dependencies = [
"bitflags 2.9.1",
"libc",
"libgit2-sys",
"log",
"log 0.4.29",
"openssl-probe",
"openssl-sys",
"url",
@@ -2284,6 +2293,21 @@ dependencies = [
"tracing",
]
[[package]]
name = "handlebars"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb04af2006ea09d985fef82b81e0eb25337e51b691c76403332378a53d521edc"
dependencies = [
"lazy_static 0.2.11",
"log 0.3.9",
"pest",
"quick-error",
"regex 0.2.11",
"serde",
"serde_json",
]
[[package]]
name = "hashbrown"
version = "0.12.3"
@@ -2356,7 +2380,7 @@ version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b7410cae13cbc75623c98ac4cbfd1f0bedddf3227afc24f370cf0f50a44a11c"
dependencies = [
"log",
"log 0.4.29",
"mac",
"markup5ever",
"match_token",
@@ -2517,7 +2541,7 @@ dependencies = [
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
"log",
"log 0.4.29",
"wasm-bindgen",
"windows-core",
]
@@ -2758,6 +2782,22 @@ dependencies = [
"generic-array",
]
[[package]]
name = "interfaces"
version = "0.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec8f50a973916cac3da5057c986db05cd3346f38c78e9bc24f64cc9f6a3978f"
dependencies = [
"bitflags 1.3.2",
"cc",
"handlebars",
"lazy_static 1.5.0",
"libc",
"nix 0.23.2",
"serde",
"serde_derive",
]
[[package]]
name = "ipnet"
version = "2.11.0"
@@ -2844,7 +2884,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e67e8da4c49d6d9909fe03361f9b620f58898859f5c7aded68351e85e71ecf50"
dependencies = [
"jiff-static",
"log",
"log 0.4.29",
"portable-atomic",
"portable-atomic-util",
"serde_core",
@@ -2871,7 +2911,7 @@ dependencies = [
"cfg-if",
"combine",
"jni-sys",
"log",
"log 0.4.29",
"thiserror 1.0.69",
"walkdir",
"windows-sys 0.45.0",
@@ -2950,7 +2990,7 @@ checksum = "eebcc3aff044e5944a8fbaf69eb277d11986064cba30c468730e8b9909fb551c"
dependencies = [
"byteorder",
"dbus-secret-service",
"log",
"log 0.4.29",
"security-framework 2.11.1",
"security-framework 3.5.1",
"windows-sys 0.60.2",
@@ -2989,6 +3029,12 @@ dependencies = [
"selectors",
]
[[package]]
name = "lazy_static"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
[[package]]
name = "lazy_static"
version = "1.5.0"
@@ -3005,7 +3051,7 @@ dependencies = [
"gtk",
"gtk-sys",
"libappindicator-sys",
"log",
"log 0.4.29",
]
[[package]]
@@ -3163,6 +3209,15 @@ dependencies = [
"scopeguard",
]
[[package]]
name = "log"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b"
dependencies = [
"log 0.4.29",
]
[[package]]
name = "log"
version = "0.4.29"
@@ -3199,7 +3254,7 @@ version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7a7213d12e1864c0f002f52c2923d4556935a43dec5e71355c2760e0f6e7a18"
dependencies = [
"log",
"log 0.4.29",
"phf 0.11.3",
"phf_codegen 0.11.3",
"string_cache",
@@ -3248,6 +3303,15 @@ version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "memoffset"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
dependencies = [
"autocfg",
]
[[package]]
name = "memoffset"
version = "0.9.1"
@@ -3302,7 +3366,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c"
dependencies = [
"libc",
"log",
"log 0.4.29",
"wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys 0.59.0",
]
@@ -3344,7 +3408,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e"
dependencies = [
"libc",
"log",
"log 0.4.29",
"openssl",
"openssl-probe",
"openssl-sys",
@@ -3362,7 +3426,7 @@ checksum = "c3f42e7bbe13d351b6bead8286a43aac9534b82bd3cc43e47037f012ebfd62d4"
dependencies = [
"bitflags 2.9.1",
"jni-sys",
"log",
"log 0.4.29",
"ndk-sys",
"num_enum",
"raw-window-handle",
@@ -3390,6 +3454,19 @@ version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
[[package]]
name = "nix"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f3790c00a0150112de0f4cd161e3d7fc4b2d8a5542ffc35f099a2562aecb35c"
dependencies = [
"bitflags 1.3.2",
"cc",
"cfg-if",
"libc",
"memoffset 0.6.5",
]
[[package]]
name = "nix"
version = "0.30.1"
@@ -3400,7 +3477,7 @@ dependencies = [
"cfg-if",
"cfg_aliases",
"libc",
"memoffset",
"memoffset 0.9.1",
]
[[package]]
@@ -3431,7 +3508,7 @@ dependencies = [
"inotify",
"kqueue",
"libc",
"log",
"log 0.4.29",
"mio",
"notify-types",
"walkdir",
@@ -3872,7 +3949,7 @@ version = "3.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41fc863e2ca13dc2d5c34fb22ea4a588248ac14db929616ba65c45f21744b1e9"
dependencies = [
"log",
"log 0.4.29",
"serde",
"windows-sys 0.52.0",
]
@@ -3912,7 +3989,7 @@ dependencies = [
"des",
"getrandom 0.2.16",
"hmac",
"lazy_static",
"lazy_static 1.5.0",
"rc2",
"sha1",
"yasna",
@@ -4000,6 +4077,12 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pest"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8"
[[package]]
name = "petgraph"
version = "0.6.5"
@@ -4435,6 +4518,12 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "quick-error"
version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quick-xml"
version = "0.32.0"
@@ -4529,7 +4618,7 @@ version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93"
dependencies = [
"log",
"log 0.4.29",
"parking_lot",
"scheduled-thread-pool",
]
@@ -4696,16 +4785,29 @@ dependencies = [
"thiserror 2.0.17",
]
[[package]]
name = "regex"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9329abc99e39129fcceabd24cf5d85b4671ef7c29c50e972bc5afe32438ec384"
dependencies = [
"aho-corasick 0.6.10",
"memchr",
"regex-syntax 0.5.6",
"thread_local",
"utf8-ranges",
]
[[package]]
name = "regex"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [
"aho-corasick",
"aho-corasick 1.1.3",
"memchr",
"regex-automata",
"regex-syntax",
"regex-syntax 0.8.5",
]
[[package]]
@@ -4714,9 +4816,18 @@ version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [
"aho-corasick",
"aho-corasick 1.1.3",
"memchr",
"regex-syntax",
"regex-syntax 0.8.5",
]
[[package]]
name = "regex-syntax"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d707a4fa2637f2dca2ef9fd02225ec7661fe01a53623c1e6515b6916511f7a7"
dependencies = [
"ucd-util",
]
[[package]]
@@ -4755,7 +4866,7 @@ dependencies = [
"hyper-tls",
"hyper-util",
"js-sys",
"log",
"log 0.4.29",
"mime",
"mime_guess",
"native-tls",
@@ -4796,7 +4907,7 @@ dependencies = [
"gobject-sys",
"gtk-sys",
"js-sys",
"log",
"log 0.4.29",
"objc2 0.6.1",
"objc2-app-kit",
"objc2-core-foundation",
@@ -4988,7 +5099,7 @@ dependencies = [
"core-foundation 0.10.1",
"core-foundation-sys",
"jni",
"log",
"log 0.4.29",
"once_cell",
"rustls",
"rustls-native-certs",
@@ -5176,7 +5287,7 @@ dependencies = [
"cssparser",
"derive_more",
"fxhash",
"log",
"log 0.4.29",
"phf 0.8.0",
"phf_codegen 0.8.0",
"precomputed-hash",
@@ -5544,7 +5655,7 @@ dependencies = [
"core-graphics 0.24.0",
"foreign-types 0.5.0",
"js-sys",
"log",
"log 0.4.29",
"objc2 0.5.2",
"objc2-foundation 0.2.2",
"objc2-quartz-core 0.2.2",
@@ -5692,6 +5803,18 @@ dependencies = [
"libc",
]
[[package]]
name = "sysproxy"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9707a79d3b95683aa5a9521e698ffd878b8fb289727c25a69157fb85d529ffff"
dependencies = [
"interfaces",
"thiserror 1.0.69",
"winapi",
"winreg 0.10.1",
]
[[package]]
name = "system-configuration"
version = "0.6.1"
@@ -5744,9 +5867,9 @@ dependencies = [
"gdkx11-sys",
"gtk",
"jni",
"lazy_static",
"lazy_static 1.5.0",
"libc",
"log",
"log 0.4.29",
"ndk",
"ndk-context",
"ndk-sys",
@@ -5820,7 +5943,7 @@ dependencies = [
"http-range",
"jni",
"libc",
"log",
"log 0.4.29",
"mime",
"muda",
"objc2 0.6.1",
@@ -5939,7 +6062,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "206dc20af4ed210748ba945c2774e60fd0acd52b9a73a028402caf809e9b6ecf"
dependencies = [
"arboard",
"log",
"log 0.4.29",
"serde",
"serde_json",
"tauri",
@@ -5974,7 +6097,7 @@ version = "2.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "313f8138692ddc4a2127c4c9607d616a46f5c042e77b3722450866da0aad2f19"
dependencies = [
"log",
"log 0.4.29",
"raw-window-handle",
"rfd",
"serde",
@@ -6017,7 +6140,7 @@ dependencies = [
"android_logger",
"byte-unit",
"fern",
"log",
"log 0.4.29",
"objc2 0.6.1",
"objc2-foundation 0.3.1",
"serde",
@@ -6059,7 +6182,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8f08346c8deb39e96f86973da0e2d76cbb933d7ac9b750f6dc4daf955a6f997"
dependencies = [
"gethostname 1.0.2",
"log",
"log 0.4.29",
"os_info",
"serde",
"serde_json",
@@ -6077,10 +6200,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c374b6db45f2a8a304f0273a15080d98c70cde86178855fc24653ba657a1144c"
dependencies = [
"encoding_rs",
"log",
"log 0.4.29",
"open",
"os_pipe",
"regex",
"regex 1.11.1",
"schemars",
"serde",
"serde_json",
@@ -6119,7 +6242,7 @@ dependencies = [
"futures-util",
"http",
"infer",
"log",
"log 0.4.29",
"minisign-verify",
"osakit",
"percent-encoding",
@@ -6146,7 +6269,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73736611e14142408d15353e21e3cca2f12a3cfb523ad0ce85999b6d2ef1a704"
dependencies = [
"bitflags 2.9.1",
"log",
"log 0.4.29",
"serde",
"serde_json",
"tauri",
@@ -6188,7 +6311,7 @@ dependencies = [
"gtk",
"http",
"jni",
"log",
"log 0.4.29",
"objc2 0.6.1",
"objc2-app-kit",
"objc2-foundation 0.3.1",
@@ -6223,12 +6346,12 @@ dependencies = [
"infer",
"json-patch",
"kuchikiki",
"log",
"log 0.4.29",
"memchr",
"phf 0.11.3",
"proc-macro2",
"quote",
"regex",
"regex 1.11.1",
"schemars",
"semver",
"serde",
@@ -6328,6 +6451,15 @@ dependencies = [
"syn 2.0.101",
]
[[package]]
name = "thread_local"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
dependencies = [
"lazy_static 1.5.0",
]
[[package]]
name = "tiff"
version = "0.9.1"
@@ -6472,7 +6604,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084"
dependencies = [
"futures-util",
"log",
"log 0.4.29",
"rustls",
"rustls-native-certs",
"rustls-pki-types",
@@ -6816,7 +6948,7 @@ dependencies = [
"data-encoding",
"http",
"httparse",
"log",
"log 0.4.29",
"rand 0.9.1",
"rustls",
"rustls-pki-types",
@@ -6837,13 +6969,19 @@ version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
[[package]]
name = "ucd-util"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abd2fc5d32b590614af8b0a20d837f32eca055edd0bbead59a9cfe80858be003"
[[package]]
name = "uds_windows"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9"
dependencies = [
"memoffset",
"memoffset 0.9.1",
"tempfile",
"winapi",
]
@@ -6953,7 +7091,7 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70acd30e3aa1450bc2eece896ce2ad0d178e9c079493819301573dae3c37ba6d"
dependencies = [
"regex",
"regex 1.11.1",
"serde",
"unic-ucd-ident",
"url",
@@ -6965,6 +7103,12 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "utf8-ranges"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcfc827f90e53a02eaef5e535ee14266c1d569214c6aa70133a624d8a3164ba"
[[package]]
name = "utf8-width"
version = "0.1.7"
@@ -7101,7 +7245,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
dependencies = [
"bumpalo",
"log",
"log 0.4.29",
"proc-macro2",
"quote",
"syn 2.0.101",
@@ -7848,6 +7992,15 @@ dependencies = [
"memchr",
]
[[package]]
name = "winreg"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d"
dependencies = [
"winapi",
]
[[package]]
name = "winreg"
version = "0.55.0"
@@ -7874,7 +8027,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e5ff8d0e60065f549fafd9d6cb626203ea64a798186c80d8e7df4f8af56baeb"
dependencies = [
"libc",
"log",
"log 0.4.29",
"os_pipe",
"rustix 0.38.44",
"tempfile",
@@ -7994,6 +8147,17 @@ dependencies = [
"rustix 1.0.7",
]
[[package]]
name = "yaak-api"
version = "0.1.0"
dependencies = [
"log 0.4.29",
"reqwest",
"sysproxy",
"thiserror 2.0.17",
"yaak-common",
]
[[package]]
name = "yaak-app"
version = "0.0.0"
@@ -8003,7 +8167,7 @@ dependencies = [
"cookie",
"eventsource-client",
"http",
"log",
"log 0.4.29",
"md5 0.8.0",
"mime_guess",
"openssl-sys",
@@ -8034,6 +8198,7 @@ dependencies = [
"ts-rs",
"url",
"uuid",
"yaak-api",
"yaak-common",
"yaak-core",
"yaak-crypto",
@@ -8060,7 +8225,7 @@ dependencies = [
"clap",
"dirs",
"env_logger",
"log",
"log 0.4.29",
"serde_json",
"tokio",
"yaak-crypto",
@@ -8093,7 +8258,7 @@ dependencies = [
"base64 0.22.1",
"chacha20poly1305",
"keyring",
"log",
"log 0.4.29",
"serde",
"thiserror 2.0.17",
"yaak-models",
@@ -8117,7 +8282,7 @@ version = "0.1.0"
dependencies = [
"chrono",
"git2",
"log",
"log 0.4.29",
"serde",
"serde_json",
"serde_yaml",
@@ -8139,7 +8304,7 @@ dependencies = [
"dunce",
"hyper-rustls",
"hyper-util",
"log",
"log 0.4.29",
"md5 0.7.0",
"prost",
"prost-reflect",
@@ -8167,10 +8332,11 @@ dependencies = [
"cookie",
"flate2",
"futures-util",
"http-body",
"hyper-util",
"log",
"log 0.4.29",
"mime_guess",
"regex",
"regex 1.11.1",
"reqwest",
"serde",
"serde_json",
@@ -8191,7 +8357,7 @@ name = "yaak-license"
version = "0.1.0"
dependencies = [
"chrono",
"log",
"log 0.4.29",
"reqwest",
"serde",
"serde_json",
@@ -8199,9 +8365,9 @@ dependencies = [
"tauri-plugin",
"thiserror 2.0.17",
"ts-rs",
"yaak-api",
"yaak-common",
"yaak-models",
"yaak-tauri-utils",
]
[[package]]
@@ -8210,7 +8376,7 @@ version = "0.1.0"
dependencies = [
"cocoa",
"csscolorparser",
"log",
"log 0.4.29",
"objc",
"rand 0.9.1",
"tauri",
@@ -8224,7 +8390,7 @@ dependencies = [
"chrono",
"hex",
"include_dir",
"log",
"log 0.4.29",
"nanoid",
"r2d2",
"r2d2_sqlite",
@@ -8249,11 +8415,11 @@ dependencies = [
"futures-util",
"hex",
"keyring",
"log",
"log 0.4.29",
"md5 0.7.0",
"path-slash",
"rand 0.9.1",
"regex",
"regex 1.11.1",
"reqwest",
"serde",
"serde_json",
@@ -8283,7 +8449,7 @@ version = "0.1.0"
dependencies = [
"chrono",
"hex",
"log",
"log 0.4.29",
"notify",
"serde",
"serde_json",
@@ -8300,12 +8466,8 @@ dependencies = [
name = "yaak-tauri-utils"
version = "0.1.0"
dependencies = [
"regex",
"reqwest",
"serde",
"regex 1.11.1",
"tauri",
"thiserror 2.0.17",
"yaak-common",
]
[[package]]
@@ -8313,7 +8475,7 @@ name = "yaak-templates"
version = "0.1.0"
dependencies = [
"base64 0.22.1",
"log",
"log 0.4.29",
"serde",
"serde-wasm-bindgen",
"serde_json",
@@ -8327,7 +8489,7 @@ dependencies = [
name = "yaak-tls"
version = "0.1.0"
dependencies = [
"log",
"log 0.4.29",
"p12",
"rustls",
"rustls-pemfile",
@@ -8344,7 +8506,7 @@ version = "0.1.0"
dependencies = [
"futures-util",
"http",
"log",
"log 0.4.29",
"md5 0.8.0",
"serde",
"serde_json",
@@ -8408,7 +8570,7 @@ dependencies = [
"futures-core",
"futures-lite",
"hex",
"nix",
"nix 0.30.1",
"ordered-stream",
"serde",
"serde_repr",
@@ -8575,7 +8737,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aed5f10c571472911e37d8f7601a8dfba52b4f7f73a344015291b82ab292faf6"
dependencies = [
"log",
"log 0.4.29",
"thiserror 2.0.17",
"zip",
]
@@ -8594,7 +8756,7 @@ checksum = "edfc5ee405f504cd4984ecc6f14d02d55cfda60fa4b689434ef4102aae150cd7"
dependencies = [
"bumpalo",
"crc32fast",
"log",
"log 0.4.29",
"simd-adler32",
]

View File

@@ -15,6 +15,7 @@ members = [
"crates/yaak-templates",
"crates/yaak-tls",
"crates/yaak-ws",
"crates/yaak-api",
# CLI crates
"crates-cli/yaak-cli",
# Tauri-specific crates
@@ -58,6 +59,7 @@ yaak-sync = { path = "crates/yaak-sync" }
yaak-templates = { path = "crates/yaak-templates" }
yaak-tls = { path = "crates/yaak-tls" }
yaak-ws = { path = "crates/yaak-ws" }
yaak-api = { path = "crates/yaak-api" }
# Internal crates - Tauri-specific
yaak-fonts = { path = "crates-tauri/yaak-fonts" }

View File

@@ -1,27 +0,0 @@
# MCP Client Plan
## Goal
Add an MCP client mode to Yaak so users can connect to and debug MCP servers.
## Core Design
- **Protocol layer:** Implement JSONRPC framing, message IDs, and notifications as the common core.
- **Transport interface:** Define an async trait with `connect`, `send`, `receive`, and `close` methods.
- **Transports:**
- Start with **Standard I/O** for local development.
- Reuse the existing HTTP stack for **HTTP streaming** next.
- Leave hooks for **WebSocket** support later.
## Integration
- Register MCP as a new request type alongside REST, GraphQL, gRPC, and WebSocket.
- Allow perrequest transport selection (stdio or HTTP).
- Map inbound messages into a new MCP response model that feeds existing timeline and debug views.
## Testing and Dogfooding
- Convert Yaak's own MCP server to Standard I/O for local testing.
- Use it internally to validate protocol behavior and message flow.
- Add unit and integration tests for JSONRPC messaging and transport abstractions.
## Future Refinements
- Add WebSocket transport support once core paths are stable.
- Extend timelines for protocollevel visualization layered over raw transport events.
- Implement version and capability negotiation between client and server.

View File

@@ -47,7 +47,8 @@
"!src-web/vite.config.ts",
"!src-web/routeTree.gen.ts",
"!packages/plugin-runtime-types/lib",
"!**/bindings"
"!**/bindings",
"!flatpak"
]
}
}

View File

@@ -1,198 +0,0 @@
# CLI Command Architecture Plan
## Goal
Redesign the yaak-cli command structure to use a resource-oriented `<resource> <action>`
pattern that scales well, is discoverable, and supports both human and LLM workflows.
## Command Architecture
### Design Principles
- **Resource-oriented**: top-level commands are nouns, subcommands are verbs
- **Polymorphic requests**: `request` covers HTTP, gRPC, and WebSocket — the CLI
resolves the type via `get_any_request` and adapts behavior accordingly
- **Simple creation, full-fidelity via JSON**: human-friendly flags for basic creation,
`--json` for full control (targeted at LLM and scripting workflows)
- **Runtime schema introspection**: `request schema` outputs JSON Schema for the request
models, with dynamic auth fields populated from loaded plugins at runtime
- **Destructive actions require confirmation**: `delete` commands prompt for user
confirmation before proceeding. Can be bypassed with `--yes` / `-y` for scripting
### Commands
```
# Top-level shortcut
yaakcli send <id> [-e <env_id>] # id can be a request, folder, or workspace
# Resource commands
yaakcli workspace list
yaakcli workspace show <id>
yaakcli workspace create --name <name>
yaakcli workspace create --json '{"name": "My Workspace"}'
yaakcli workspace create '{"name": "My Workspace"}' # positional JSON shorthand
yaakcli workspace update --json '{"id": "wk_abc", "name": "New Name"}'
yaakcli workspace delete <id>
yaakcli request list <workspace_id>
yaakcli request show <id>
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
yaakcli request create --json '{"workspaceId": "wk_abc", "url": "..."}'
yaakcli request update --json '{"id": "rq_abc", "url": "https://new.com"}'
yaakcli request send <id> [-e <env_id>]
yaakcli request delete <id>
yaakcli request schema <http|grpc|websocket>
yaakcli folder list <workspace_id>
yaakcli folder show <id>
yaakcli folder create <workspace_id> --name <name>
yaakcli folder create --json '{"workspaceId": "wk_abc", "name": "Auth"}'
yaakcli folder update --json '{"id": "fl_abc", "name": "New Name"}'
yaakcli folder delete <id>
yaakcli environment list <workspace_id>
yaakcli environment show <id>
yaakcli environment create <workspace_id> --name <name>
yaakcli environment create --json '{"workspaceId": "wk_abc", "name": "Production"}'
yaakcli environment update --json '{"id": "ev_abc", ...}'
yaakcli environment delete <id>
```
### `send` — Top-Level Shortcut
`yaakcli send <id>` is a convenience alias that accepts any sendable ID. It tries
each type in order via DB lookups (short-circuiting on first match):
1. Request (HTTP, gRPC, or WebSocket via `get_any_request`)
2. Folder (sends all requests in the folder)
3. Workspace (sends all requests in the workspace)
ID prefixes exist (e.g. `rq_`, `fl_`, `wk_`) but are not relied upon — resolution
is purely by DB lookup.
`request send <id>` is the same but restricted to request IDs only.
### Request Send — Polymorphic Behavior
`send` means "execute this request" regardless of protocol:
- **HTTP**: send request, print response, exit
- **gRPC**: invoke the method; for streaming, stream output to stdout until done/Ctrl+C
- **WebSocket**: connect, stream messages to stdout until closed/Ctrl+C
### `request schema` — Runtime JSON Schema
Outputs a JSON Schema describing the full request shape, including dynamic fields:
1. Generate base schema from `schemars::JsonSchema` derive on the Rust model structs
2. Load plugins, collect auth strategy definitions and their form inputs
3. Merge plugin-defined auth fields into the `authentication` property as a `oneOf`
4. Output the combined schema as JSON
This lets an LLM call `schema`, read the shape, and construct valid JSON for
`create --json` or `update --json`.
## Implementation Steps
### Phase 1: Restructure commands (no new functionality)
Refactor `main.rs` into the new resource/action pattern using clap subcommand nesting.
Existing behavior stays the same, just reorganized. Remove the `get` command.
1. Create module structure: `commands/workspace.rs`, `commands/request.rs`, etc.
2. Define nested clap enums:
```rust
enum Commands {
Send(SendArgs),
Workspace(WorkspaceArgs),
Request(RequestArgs),
Folder(FolderArgs),
Environment(EnvironmentArgs),
}
```
3. Move existing `Workspaces` logic into `workspace list`
4. Move existing `Requests` logic into `request list`
5. Move existing `Send` logic into `request send`
6. Move existing `Create` logic into `request create`
7. Delete the `Get` command entirely
8. Extract shared setup (DB init, plugin init, encryption) into a reusable context struct
### Phase 2: Add missing CRUD commands
1. `workspace show <id>`
2. `workspace create --name <name>` (and `--json`)
3. `workspace update --json`
4. `workspace delete <id>`
5. `request show <id>` (JSON output of the full request model)
6. `request delete <id>`
7. `folder list <workspace_id>`
8. `folder show <id>`
9. `folder create <workspace_id> --name <name>` (and `--json`)
10. `folder update --json`
11. `folder delete <id>`
12. `environment list <workspace_id>`
13. `environment show <id>`
14. `environment create <workspace_id> --name <name>` (and `--json`)
15. `environment update --json`
16. `environment delete <id>`
### Phase 3: JSON input for create/update
Both commands accept JSON via `--json <string>` or as a positional argument (detected
by leading `{`). They follow the same upsert pattern as the plugin API.
- **`create --json`**: JSON must include `workspaceId`. Must NOT include `id` (or
use empty string `""`). Deserializes into the model with defaults for missing fields,
then upserts (insert).
- **`update --json`**: JSON must include `id`. Performs a fetch-merge-upsert:
1. Fetch the existing model from DB
2. Serialize it to `serde_json::Value`
3. Deep-merge the user's partial JSON on top (JSON Merge Patch / RFC 7386 semantics)
4. Deserialize back into the typed model
5. Upsert (update)
This matches how the MCP server plugin already does it (fetch existing, spread, override),
but the CLI handles the merge server-side so callers don't have to.
Setting a field to `null` removes it (for `Option<T>` fields), per RFC 7386.
Implementation:
1. Add `--json` flag and positional JSON detection to `create` commands
2. Add `update` commands with required `--json` flag
3. Implement JSON merge utility (or use `json-patch` crate)
### Phase 4: Runtime schema generation
1. Add `schemars` dependency to `yaak-models`
2. Derive `JsonSchema` on `HttpRequest`, `GrpcRequest`, `WebsocketRequest`, and their
nested types (`HttpRequestHeader`, `HttpUrlParameter`, etc.)
3. Implement `request schema` command:
- Generate base schema from schemars
- Query plugins for auth strategy form inputs
- Convert plugin form inputs into JSON Schema properties
- Merge into the `authentication` field
- Print to stdout
### Phase 5: Polymorphic send
1. Update `request send` to use `get_any_request` to resolve the request type
2. Match on `AnyRequest` variant and dispatch to the appropriate sender:
- `AnyRequest::HttpRequest` — existing HTTP send logic
- `AnyRequest::GrpcRequest` — gRPC invoke (future implementation)
- `AnyRequest::WebsocketRequest` — WebSocket connect (future implementation)
3. gRPC and WebSocket send can initially return "not yet implemented" errors
### Phase 6: Top-level `send` and folder/workspace send
1. Add top-level `yaakcli send <id>` command
2. Resolve ID by trying DB lookups in order: any_request → folder → workspace
3. For folder: list all requests in folder, send each
4. For workspace: list all requests in workspace, send each
5. Add execution options: `--sequential` (default), `--parallel`, `--fail-fast`
## Crate Changes
- **yaak-cli**: restructure into modules, new clap hierarchy
- **yaak-models**: add `schemars` dependency, derive `JsonSchema` on model structs
(current derives: `Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS`)

View File

@@ -57,6 +57,7 @@ url = "2"
tokio-util = { version = "0.7", features = ["codec"] }
ts-rs = { workspace = true }
uuid = "1.12.1"
yaak-api = { workspace = true }
yaak-common = { workspace = true }
yaak-tauri-utils = { workspace = true }
yaak-core = { workspace = true }

View File

@@ -36,7 +36,7 @@ pub enum Error {
PluginError(#[from] yaak_plugins::error::Error),
#[error(transparent)]
TauriUtilsError(#[from] yaak_tauri_utils::error::Error),
ApiError(#[from] yaak_api::Error),
#[error(transparent)]
ClipboardError(#[from] tauri_plugin_clipboard_manager::Error),

View File

@@ -414,7 +414,7 @@ async fn execute_transaction<R: Runtime>(
sendable_request.body = Some(SendableBody::Bytes(bytes));
None
}
Some(SendableBody::Stream(stream)) => {
Some(SendableBody::Stream { data: stream, content_length }) => {
// Wrap stream with TeeReader to capture data as it's read
// Use unbounded channel to ensure all data is captured without blocking the HTTP request
let (body_chunk_tx, body_chunk_rx) = tokio::sync::mpsc::unbounded_channel::<Vec<u8>>();
@@ -448,7 +448,7 @@ async fn execute_transaction<R: Runtime>(
None
};
sendable_request.body = Some(SendableBody::Stream(pinned));
sendable_request.body = Some(SendableBody::Stream { data: pinned, content_length });
handle
}
None => {

View File

@@ -1095,8 +1095,13 @@ async fn cmd_get_http_authentication_config<R: Runtime>(
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
let values_json: serde_json::Value = serde_json::to_value(&values)?;
let rendered_json =
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
let rendered_json = render_json_value(
values_json,
environment_chain,
&cb,
&RenderOptions::return_empty(),
)
.await?;
// Convert back to HashMap<String, JsonPrimitive>
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;

View File

@@ -10,7 +10,7 @@ use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
use ts_rs::TS;
use yaak_common::platform::get_os_str;
use yaak_models::util::UpdateSource;
use yaak_tauri_utils::api_client::yaak_api_client;
use yaak_api::yaak_api_client;
// Check for updates every hour
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
@@ -101,7 +101,8 @@ impl YaakNotifier {
let license_check = "disabled".to_string();
let launch_info = get_or_upsert_launch_info(app_handle);
let req = yaak_api_client(app_handle)?
let app_version = app_handle.package_info().version.to_string();
let req = yaak_api_client(&app_version)?
.request(Method::GET, "https://notify.yaak.app/notifications")
.query(&[
("version", &launch_info.current_version),

View File

@@ -31,7 +31,7 @@ use yaak_plugins::events::{Color, Icon, PluginContext, ShowToastRequest};
use yaak_plugins::install::{delete_and_uninstall, download_and_install};
use yaak_plugins::manager::PluginManager;
use yaak_plugins::plugin_meta::get_plugin_meta;
use yaak_tauri_utils::api_client::yaak_api_client;
use yaak_api::yaak_api_client;
static EXITING: AtomicBool = AtomicBool::new(false);
@@ -72,7 +72,8 @@ impl PluginUpdater {
info!("Checking for plugin updates");
let http_client = yaak_api_client(window.app_handle())?;
let app_version = window.app_handle().package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
let plugins = window.app_handle().db().list_plugins()?;
let updates = check_plugin_updates(&http_client, plugins.clone()).await?;
@@ -136,7 +137,8 @@ pub async fn cmd_plugins_search<R: Runtime>(
app_handle: AppHandle<R>,
query: &str,
) -> Result<PluginSearchResponse> {
let http_client = yaak_api_client(&app_handle)?;
let app_version = app_handle.package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
Ok(search_plugins(&http_client, query).await?)
}
@@ -147,7 +149,8 @@ pub async fn cmd_plugins_install<R: Runtime>(
version: Option<String>,
) -> Result<()> {
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
let http_client = yaak_api_client(window.app_handle())?;
let app_version = window.app_handle().package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
let query_manager = window.state::<yaak_models::query_manager::QueryManager>();
let plugin_context = window.plugin_context();
download_and_install(
@@ -177,7 +180,8 @@ pub async fn cmd_plugins_uninstall<R: Runtime>(
pub async fn cmd_plugins_updates<R: Runtime>(
app_handle: AppHandle<R>,
) -> Result<PluginUpdatesResponse> {
let http_client = yaak_api_client(&app_handle)?;
let app_version = app_handle.package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
let plugins = app_handle.db().list_plugins()?;
Ok(check_plugin_updates(&http_client, plugins).await?)
}
@@ -186,7 +190,8 @@ pub async fn cmd_plugins_updates<R: Runtime>(
pub async fn cmd_plugins_update_all<R: Runtime>(
window: WebviewWindow<R>,
) -> Result<Vec<PluginNameVersion>> {
let http_client = yaak_api_client(window.app_handle())?;
let app_version = window.app_handle().package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
let plugins = window.db().list_plugins()?;
// Get list of available updates (already filtered to only registry plugins)

View File

@@ -38,6 +38,9 @@ pub async fn render_grpc_request<T: TemplateCallback>(
let mut metadata = Vec::new();
for p in r.metadata.clone() {
if !p.enabled {
continue;
}
metadata.push(HttpRequestHeader {
enabled: p.enabled,
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
@@ -119,6 +122,7 @@ pub async fn render_http_request<T: TemplateCallback>(
let mut body = BTreeMap::new();
for (k, v) in r.body.clone() {
let v = if k == "form" { strip_disabled_form_entries(v) } else { v };
body.insert(k, render_json_value_raw(v, vars, cb, &opt).await?);
}
@@ -161,3 +165,71 @@ pub async fn render_http_request<T: TemplateCallback>(
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
}
/// Strip disabled entries from a JSON array of form objects.
fn strip_disabled_form_entries(v: Value) -> Value {
match v {
Value::Array(items) => Value::Array(
items
.into_iter()
.filter(|item| item.get("enabled").and_then(|e| e.as_bool()).unwrap_or(true))
.collect(),
),
v => v,
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
#[test]
fn test_strip_disabled_form_entries() {
let input = json!([
{"enabled": true, "name": "foo", "value": "bar"},
{"enabled": false, "name": "disabled", "value": "gone"},
{"enabled": true, "name": "baz", "value": "qux"},
]);
let result = strip_disabled_form_entries(input);
assert_eq!(
result,
json!([
{"enabled": true, "name": "foo", "value": "bar"},
{"enabled": true, "name": "baz", "value": "qux"},
])
);
}
#[test]
fn test_strip_disabled_form_entries_all_disabled() {
let input = json!([
{"enabled": false, "name": "a", "value": "b"},
{"enabled": false, "name": "c", "value": "d"},
]);
let result = strip_disabled_form_entries(input);
assert_eq!(result, json!([]));
}
#[test]
fn test_strip_disabled_form_entries_missing_enabled_defaults_to_kept() {
let input = json!([
{"name": "no_enabled_field", "value": "kept"},
{"enabled": false, "name": "disabled", "value": "gone"},
]);
let result = strip_disabled_form_entries(input);
assert_eq!(
result,
json!([
{"name": "no_enabled_field", "value": "kept"},
])
);
}
#[test]
fn test_strip_disabled_form_entries_non_array_passthrough() {
let input = json!("just a string");
let result = strip_disabled_form_entries(input.clone());
assert_eq!(result, input);
}
}

View File

@@ -15,6 +15,9 @@ use ts_rs::TS;
use yaak_models::util::generate_id;
use yaak_plugins::manager::PluginManager;
use url::Url;
use yaak_api::get_system_proxy_url;
use crate::error::Error::GenericError;
use crate::is_dev;
@@ -87,8 +90,13 @@ impl YaakUpdater {
info!("Checking for updates mode={} autodl={}", mode, auto_download);
let w = window.clone();
let update_check_result = w
.updater_builder()
let mut updater_builder = w.updater_builder();
if let Some(proxy_url) = get_system_proxy_url() {
if let Ok(url) = Url::parse(&proxy_url) {
updater_builder = updater_builder.proxy(url);
}
}
let update_check_result = updater_builder
.on_before_exit(move || {
// Kill plugin manager before exit or NSIS installer will fail to replace sidecar
// while it's running.

View File

@@ -12,7 +12,7 @@ use yaak_models::util::generate_id;
use yaak_plugins::events::{Color, ShowToastRequest};
use yaak_plugins::install::download_and_install;
use yaak_plugins::manager::PluginManager;
use yaak_tauri_utils::api_client::yaak_api_client;
use yaak_api::yaak_api_client;
pub(crate) async fn handle_deep_link<R: Runtime>(
app_handle: &AppHandle<R>,
@@ -46,7 +46,8 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
let query_manager = app_handle.db_manager();
let http_client = yaak_api_client(app_handle)?;
let app_version = app_handle.package_info().version.to_string();
let http_client = yaak_api_client(&app_version)?;
let plugin_context = window.plugin_context();
let pv = download_and_install(
plugin_manager,
@@ -86,7 +87,8 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
return Ok(());
}
let resp = yaak_api_client(app_handle)?.get(file_url).send().await?;
let app_version = app_handle.package_info().version.to_string();
let resp = yaak_api_client(&app_version)?.get(file_url).send().await?;
let json = resp.bytes().await?;
let p = app_handle
.path()

View File

@@ -1,9 +1,6 @@
{
"build": {
"features": [
"updater",
"license"
]
"features": ["updater", "license"]
},
"app": {
"security": {
@@ -11,12 +8,8 @@
"default",
{
"identifier": "release",
"windows": [
"*"
],
"permissions": [
"yaak-license:default"
]
"windows": ["*"],
"permissions": ["yaak-license:default"]
}
]
}
@@ -39,14 +32,7 @@
"createUpdaterArtifacts": true,
"longDescription": "A cross-platform desktop app for interacting with REST, GraphQL, and gRPC",
"shortDescription": "Play with APIs, intuitively",
"targets": [
"app",
"appimage",
"deb",
"dmg",
"nsis",
"rpm"
],
"targets": ["app", "appimage", "deb", "dmg", "nsis", "rpm"],
"macOS": {
"minimumSystemVersion": "13.0",
"exceptionDomain": "",
@@ -58,10 +44,16 @@
},
"linux": {
"deb": {
"desktopTemplate": "./template.desktop"
"desktopTemplate": "./template.desktop",
"files": {
"/usr/share/metainfo/app.yaak.Yaak.metainfo.xml": "../../flatpak/app.yaak.Yaak.metainfo.xml"
}
},
"rpm": {
"desktopTemplate": "./template.desktop"
"desktopTemplate": "./template.desktop",
"files": {
"/usr/share/metainfo/app.yaak.Yaak.metainfo.xml": "../../flatpak/app.yaak.Yaak.metainfo.xml"
}
}
}
}

View File

@@ -16,7 +16,7 @@ thiserror = { workspace = true }
ts-rs = { workspace = true }
yaak-common = { workspace = true }
yaak-models = { workspace = true }
yaak-tauri-utils = { workspace = true }
yaak-api = { workspace = true }
[build-dependencies]
tauri-plugin = { workspace = true, features = ["build"] }

View File

@@ -16,7 +16,7 @@ pub enum Error {
ModelError(#[from] yaak_models::error::Error),
#[error(transparent)]
TauriUtilsError(#[from] yaak_tauri_utils::error::Error),
ApiError(#[from] yaak_api::Error),
#[error("Internal server error")]
ServerError,

View File

@@ -11,7 +11,7 @@ use yaak_common::platform::get_os_str;
use yaak_models::db_context::DbContext;
use yaak_models::query_manager::QueryManager;
use yaak_models::util::UpdateSource;
use yaak_tauri_utils::api_client::yaak_api_client;
use yaak_api::yaak_api_client;
/// Extension trait for accessing the QueryManager from Tauri Manager types.
/// This is needed temporarily until all crates are refactored to not use Tauri.
@@ -118,11 +118,12 @@ pub async fn activate_license<R: Runtime>(
license_key: &str,
) -> Result<()> {
info!("Activating license {}", license_key);
let client = reqwest::Client::new();
let app_version = window.app_handle().package_info().version.to_string();
let client = yaak_api_client(&app_version)?;
let payload = ActivateLicenseRequestPayload {
license_key: license_key.to_string(),
app_platform: get_os_str().to_string(),
app_version: window.app_handle().package_info().version.to_string(),
app_version,
};
let response = client.post(build_url("/licenses/activate")).json(&payload).send().await?;
@@ -155,11 +156,12 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
let app_handle = window.app_handle();
let activation_id = get_activation_id(app_handle).await;
let client = reqwest::Client::new();
let app_version = window.app_handle().package_info().version.to_string();
let client = yaak_api_client(&app_version)?;
let path = format!("/licenses/activations/{}/deactivate", activation_id);
let payload = DeactivateLicenseRequestPayload {
app_platform: get_os_str().to_string(),
app_version: window.app_handle().package_info().version.to_string(),
app_version,
};
let response = client.post(build_url(&path)).json(&payload).send().await?;
@@ -186,9 +188,10 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
}
pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<LicenseCheckStatus> {
let app_version = window.app_handle().package_info().version.to_string();
let payload = CheckActivationRequestPayload {
app_platform: get_os_str().to_string(),
app_version: window.package_info().version.to_string(),
app_version,
};
let activation_id = get_activation_id(window.app_handle()).await;
@@ -204,7 +207,7 @@ pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<Lice
(true, _) => {
info!("Checking license activation");
// A license has been activated, so let's check the license server
let client = yaak_api_client(window.app_handle())?;
let client = yaak_api_client(&payload.app_version)?;
let path = format!("/licenses/activations/{activation_id}/check-v2");
let response = client.post(build_url(&path)).json(&payload).send().await?;

View File

@@ -6,8 +6,4 @@ publish = false
[dependencies]
tauri = { workspace = true }
reqwest = { workspace = true, features = ["gzip"] }
thiserror = { workspace = true }
serde = { workspace = true, features = ["derive"] }
regex = "1.11.0"
yaak-common = { workspace = true }

View File

@@ -1,24 +0,0 @@
use crate::error::Result;
use reqwest::Client;
use std::time::Duration;
use tauri::http::{HeaderMap, HeaderValue};
use tauri::{AppHandle, Runtime};
use yaak_common::platform::{get_ua_arch, get_ua_platform};
pub fn yaak_api_client<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Client> {
let platform = get_ua_platform();
let version = app_handle.package_info().version.clone();
let arch = get_ua_arch();
let ua = format!("Yaak/{version} ({platform}; {arch})");
let mut default_headers = HeaderMap::new();
default_headers.insert("Accept", HeaderValue::from_str("application/json").unwrap());
let client = reqwest::ClientBuilder::new()
.timeout(Duration::from_secs(20))
.default_headers(default_headers)
.gzip(true)
.user_agent(ua)
.build()?;
Ok(client)
}

View File

@@ -1,19 +0,0 @@
use serde::{Serialize, Serializer};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum Error {
#[error(transparent)]
ReqwestError(#[from] reqwest::Error),
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.to_string().as_ref())
}
}
pub type Result<T> = std::result::Result<T, Error>;

View File

@@ -1,3 +1 @@
pub mod api_client;
pub mod error;
pub mod window;

View File

@@ -0,0 +1,12 @@
[package]
name = "yaak-api"
version = "0.1.0"
edition = "2024"
publish = false
[dependencies]
log = { workspace = true }
reqwest = { workspace = true, features = ["gzip"] }
sysproxy = "0.3"
thiserror = { workspace = true }
yaak-common = { workspace = true }

View File

@@ -0,0 +1,9 @@
use thiserror::Error;
#[derive(Error, Debug)]
pub enum Error {
#[error(transparent)]
ReqwestError(#[from] reqwest::Error),
}
pub type Result<T> = std::result::Result<T, Error>;

View File

@@ -0,0 +1,70 @@
mod error;
pub use error::{Error, Result};
use log::{debug, warn};
use reqwest::Client;
use reqwest::header::{HeaderMap, HeaderValue};
use std::time::Duration;
use yaak_common::platform::{get_ua_arch, get_ua_platform};
/// Build a reqwest Client configured for Yaak's own API calls.
///
/// Includes a custom User-Agent, JSON accept header, 20s timeout, gzip,
/// and automatic OS-level proxy detection via sysproxy.
pub fn yaak_api_client(version: &str) -> Result<Client> {
let platform = get_ua_platform();
let arch = get_ua_arch();
let ua = format!("Yaak/{version} ({platform}; {arch})");
let mut default_headers = HeaderMap::new();
default_headers.insert("Accept", HeaderValue::from_str("application/json").unwrap());
let mut builder = reqwest::ClientBuilder::new()
.timeout(Duration::from_secs(20))
.default_headers(default_headers)
.gzip(true)
.user_agent(ua);
if let Some(sys) = get_enabled_system_proxy() {
let proxy_url = format!("http://{}:{}", sys.host, sys.port);
match reqwest::Proxy::all(&proxy_url) {
Ok(p) => {
let p = if !sys.bypass.is_empty() {
p.no_proxy(reqwest::NoProxy::from_string(&sys.bypass))
} else {
p
};
builder = builder.proxy(p);
}
Err(e) => {
warn!("Failed to configure system proxy: {e}");
}
}
}
Ok(builder.build()?)
}
/// Returns the system proxy URL if one is enabled, e.g. `http://host:port`.
pub fn get_system_proxy_url() -> Option<String> {
let sys = get_enabled_system_proxy()?;
Some(format!("http://{}:{}", sys.host, sys.port))
}
fn get_enabled_system_proxy() -> Option<sysproxy::Sysproxy> {
match sysproxy::Sysproxy::get_system_proxy() {
Ok(sys) if sys.enable => {
debug!("Detected system proxy: http://{}:{}", sys.host, sys.port);
Some(sys)
}
Ok(_) => {
debug!("System proxy detected but not enabled");
None
}
Err(e) => {
debug!("Could not detect system proxy: {e}");
None
}
}
}

View File

@@ -32,22 +32,30 @@ export interface GitCallbacks {
const onSuccess = () => queryClient.invalidateQueries({ queryKey: ['git'] });
export function useGit(dir: string, callbacks: GitCallbacks) {
export function useGit(dir: string, callbacks: GitCallbacks, refreshKey?: string) {
const mutations = useMemo(() => gitMutations(dir, callbacks), [dir, callbacks]);
const fetchAll = useQuery<void, string>({
queryKey: ['git', 'fetch_all', dir, refreshKey],
queryFn: () => invoke('cmd_git_fetch_all', { dir }),
refetchInterval: 10 * 60_000,
});
return [
{
remotes: useQuery<GitRemote[], string>({
queryKey: ['git', 'remotes', dir],
queryKey: ['git', 'remotes', dir, refreshKey],
queryFn: () => getRemotes(dir),
placeholderData: (prev) => prev,
}),
log: useQuery<GitCommit[], string>({
queryKey: ['git', 'log', dir],
queryKey: ['git', 'log', dir, refreshKey],
queryFn: () => invoke('cmd_git_log', { dir }),
placeholderData: (prev) => prev,
}),
status: useQuery<GitStatusSummary, string>({
refetchOnMount: true,
queryKey: ['git', 'status', dir],
queryKey: ['git', 'status', dir, refreshKey, fetchAll.dataUpdatedAt],
queryFn: () => invoke('cmd_git_status', { dir }),
placeholderData: (prev) => prev,
}),
},
mutations,
@@ -152,10 +160,7 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
},
onSuccess,
}),
fetchAll: createFastMutation<void, string, void>({
mutationKey: ['git', 'fetch_all', dir],
mutationFn: () => invoke('cmd_git_fetch_all', { dir }),
}),
push: createFastMutation<PushResult, string, void>({
mutationKey: ['git', 'push', dir],
mutationFn: push,

View File

@@ -44,43 +44,65 @@ pub async fn git_pull(dir: &Path) -> Result<PullResult> {
(branch_name, remote_name, remote_url)
};
let out = new_binary_command(dir)
// Step 1: fetch the specific branch
// NOTE: We use fetch + merge instead of `git pull` to avoid conflicts with
// global git config (e.g. pull.ff=only) and the background fetch --all.
let fetch_out = new_binary_command(dir)
.await?
.args(["pull", &remote_name, &branch_name])
.args(["fetch", &remote_name, &branch_name])
.env("GIT_TERMINAL_PROMPT", "0")
.output()
.await
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
.map_err(|e| GenericError(format!("failed to run git fetch: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = stdout + stderr;
let fetch_stdout = String::from_utf8_lossy(&fetch_out.stdout);
let fetch_stderr = String::from_utf8_lossy(&fetch_out.stderr);
let fetch_combined = format!("{fetch_stdout}{fetch_stderr}");
info!("Pulled status={} {combined}", out.status);
info!("Fetched status={} {fetch_combined}", fetch_out.status);
if combined.to_lowercase().contains("could not read") {
if fetch_combined.to_lowercase().contains("could not read") {
return Ok(PullResult::NeedsCredentials { url: remote_url.to_string(), error: None });
}
if combined.to_lowercase().contains("unable to access") {
if fetch_combined.to_lowercase().contains("unable to access") {
return Ok(PullResult::NeedsCredentials {
url: remote_url.to_string(),
error: Some(combined.to_string()),
error: Some(fetch_combined.to_string()),
});
}
if !out.status.success() {
let combined_lower = combined.to_lowercase();
if combined_lower.contains("cannot fast-forward")
|| combined_lower.contains("not possible to fast-forward")
|| combined_lower.contains("diverged")
if !fetch_out.status.success() {
return Err(GenericError(format!("Failed to fetch: {fetch_combined}")));
}
// Step 2: merge the fetched branch
let ref_name = format!("{}/{}", remote_name, branch_name);
let merge_out = new_binary_command(dir)
.await?
.args(["merge", "--ff-only", &ref_name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
let merge_stdout = String::from_utf8_lossy(&merge_out.stdout);
let merge_stderr = String::from_utf8_lossy(&merge_out.stderr);
let merge_combined = format!("{merge_stdout}{merge_stderr}");
info!("Merged status={} {merge_combined}", merge_out.status);
if !merge_out.status.success() {
let merge_lower = merge_combined.to_lowercase();
if merge_lower.contains("cannot fast-forward")
|| merge_lower.contains("not possible to fast-forward")
|| merge_lower.contains("diverged")
{
return Ok(PullResult::Diverged { remote: remote_name, branch: branch_name });
}
return Err(GenericError(format!("Failed to pull {combined}")));
return Err(GenericError(format!("Failed to merge: {merge_combined}")));
}
if combined.to_lowercase().contains("up to date") {
if merge_combined.to_lowercase().contains("up to date") {
return Ok(PullResult::UpToDate);
}

View File

@@ -12,6 +12,7 @@ bytes = "1.11.1"
cookie = "0.18.1"
flate2 = "1"
futures-util = "0.3"
http-body = "1"
url = "2"
zstd = "0.13"
hyper-util = { version = "0.1.17", default-features = false, features = ["client-legacy"] }

View File

@@ -2,7 +2,9 @@ use crate::decompress::{ContentEncoding, streaming_decoder};
use crate::error::{Error, Result};
use crate::types::{SendableBody, SendableHttpRequest};
use async_trait::async_trait;
use bytes::Bytes;
use futures_util::StreamExt;
use http_body::{Body as HttpBody, Frame, SizeHint};
use reqwest::{Client, Method, Version};
use std::fmt::Display;
use std::pin::Pin;
@@ -413,10 +415,16 @@ impl HttpSender for ReqwestSender {
Some(SendableBody::Bytes(bytes)) => {
req_builder = req_builder.body(bytes);
}
Some(SendableBody::Stream(stream)) => {
// Convert AsyncRead stream to reqwest Body
let stream = tokio_util::io::ReaderStream::new(stream);
let body = reqwest::Body::wrap_stream(stream);
Some(SendableBody::Stream { data, content_length }) => {
// Convert AsyncRead stream to reqwest Body. If content length is
// known, wrap with a SizedBody so hyper can set Content-Length
// automatically (for both HTTP/1.1 and HTTP/2).
let stream = tokio_util::io::ReaderStream::new(data);
let body = if let Some(len) = content_length {
reqwest::Body::wrap(SizedBody::new(stream, len))
} else {
reqwest::Body::wrap_stream(stream)
};
req_builder = req_builder.body(body);
}
}
@@ -520,6 +528,51 @@ impl HttpSender for ReqwestSender {
}
}
/// A wrapper around a byte stream that reports a known content length via
/// `size_hint()`. This lets hyper set the `Content-Length` header
/// automatically based on the body size, without us having to add it as an
/// explicit header — which can cause duplicate `Content-Length` headers and
/// break HTTP/2.
struct SizedBody<S> {
stream: std::sync::Mutex<S>,
remaining: u64,
}
impl<S> SizedBody<S> {
fn new(stream: S, content_length: u64) -> Self {
Self { stream: std::sync::Mutex::new(stream), remaining: content_length }
}
}
impl<S> HttpBody for SizedBody<S>
where
S: futures_util::Stream<Item = std::result::Result<Bytes, std::io::Error>> + Send + Unpin + 'static,
{
type Data = Bytes;
type Error = std::io::Error;
fn poll_frame(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<std::result::Result<Frame<Self::Data>, Self::Error>>> {
let this = self.get_mut();
let mut stream = this.stream.lock().unwrap();
match stream.poll_next_unpin(cx) {
Poll::Ready(Some(Ok(chunk))) => {
this.remaining = this.remaining.saturating_sub(chunk.len() as u64);
Poll::Ready(Some(Ok(Frame::data(chunk))))
}
Poll::Ready(Some(Err(e))) => Poll::Ready(Some(Err(e))),
Poll::Ready(None) => Poll::Ready(None),
Poll::Pending => Poll::Pending,
}
}
fn size_hint(&self) -> SizeHint {
SizeHint::with_exact(self.remaining)
}
}
fn version_to_str(version: &Version) -> String {
match *version {
Version::HTTP_09 => "HTTP/0.9".to_string(),

View File

@@ -16,7 +16,13 @@ pub(crate) const MULTIPART_BOUNDARY: &str = "------YaakFormBoundary";
pub enum SendableBody {
Bytes(Bytes),
Stream(Pin<Box<dyn AsyncRead + Send + 'static>>),
Stream {
data: Pin<Box<dyn AsyncRead + Send + 'static>>,
/// Known content length for the stream, if available. This is used by
/// the sender to set the body size hint so that hyper can set
/// Content-Length automatically for both HTTP/1.1 and HTTP/2.
content_length: Option<u64>,
},
}
enum SendableBodyWithMeta {
@@ -31,7 +37,10 @@ impl From<SendableBodyWithMeta> for SendableBody {
fn from(value: SendableBodyWithMeta) -> Self {
match value {
SendableBodyWithMeta::Bytes(b) => SendableBody::Bytes(b),
SendableBodyWithMeta::Stream { data, .. } => SendableBody::Stream(data),
SendableBodyWithMeta::Stream { data, content_length } => SendableBody::Stream {
data,
content_length: content_length.map(|l| l as u64),
},
}
}
}
@@ -186,23 +195,11 @@ async fn build_body(
}
}
// Check if Transfer-Encoding: chunked is already set
let has_chunked_encoding = headers.iter().any(|h| {
h.0.to_lowercase() == "transfer-encoding" && h.1.to_lowercase().contains("chunked")
});
// Add a Content-Length header only if chunked encoding is not being used
if !has_chunked_encoding {
let content_length = match body {
Some(SendableBodyWithMeta::Bytes(ref bytes)) => Some(bytes.len()),
Some(SendableBodyWithMeta::Stream { content_length, .. }) => content_length,
None => None,
};
if let Some(cl) = content_length {
headers.push(("Content-Length".to_string(), cl.to_string()));
}
}
// NOTE: Content-Length is NOT set as an explicit header here. Instead, the
// body's content length is carried via SendableBody::Stream { content_length }
// and used by the sender to set the body size hint. This lets hyper handle
// Content-Length automatically for both HTTP/1.1 and HTTP/2, avoiding the
// duplicate Content-Length that breaks HTTP/2 servers.
Ok((body.map(|b| b.into()), headers))
}
@@ -928,7 +925,27 @@ mod tests {
}
#[tokio::test]
async fn test_no_content_length_with_chunked_encoding() -> Result<()> {
async fn test_no_content_length_header_added_by_build_body() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
let headers = vec![];
let (_, result_headers) =
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
// Content-Length should NOT be set as an explicit header. Instead, the
// sender uses the body's size_hint to let hyper set it automatically,
// which works correctly for both HTTP/1.1 and HTTP/2.
let has_content_length =
result_headers.iter().any(|h| h.0.to_lowercase() == "content-length");
assert!(!has_content_length, "Content-Length should not be set as an explicit header");
Ok(())
}
#[tokio::test]
async fn test_chunked_encoding_header_preserved() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
@@ -938,11 +955,6 @@ mod tests {
let (_, result_headers) =
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
// Verify that Content-Length is NOT present when Transfer-Encoding: chunked is set
let has_content_length =
result_headers.iter().any(|h| h.0.to_lowercase() == "content-length");
assert!(!has_content_length, "Content-Length should not be present with chunked encoding");
// Verify that the Transfer-Encoding header is still present
let has_chunked = result_headers.iter().any(|h| {
h.0.to_lowercase() == "transfer-encoding" && h.1.to_lowercase().contains("chunked")
@@ -951,31 +963,4 @@ mod tests {
Ok(())
}
#[tokio::test]
async fn test_content_length_without_chunked_encoding() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
// Headers without Transfer-Encoding: chunked
let headers = vec![];
let (_, result_headers) =
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
// Verify that Content-Length IS present when Transfer-Encoding: chunked is NOT set
let content_length_header =
result_headers.iter().find(|h| h.0.to_lowercase() == "content-length");
assert!(
content_length_header.is_some(),
"Content-Length should be present without chunked encoding"
);
assert_eq!(
content_length_header.unwrap().1,
"13",
"Content-Length should match the body size"
);
Ok(())
}
}

View File

@@ -0,0 +1,8 @@
const { execSync } = require('node:child_process');
if (process.env.SKIP_WASM_BUILD === '1') {
console.log('Skipping wasm-pack build (SKIP_WASM_BUILD=1)');
return;
}
execSync('wasm-pack build --target bundler', { stdio: 'inherit' });

View File

@@ -6,7 +6,7 @@
"scripts": {
"bootstrap": "npm run build",
"build": "run-s build:*",
"build:pack": "wasm-pack build --target bundler",
"build:pack": "node build-wasm.cjs",
"build:clean": "rimraf ./pkg/.gitignore"
},
"devDependencies": {

View File

@@ -81,6 +81,10 @@ impl RenderOptions {
pub fn throw() -> Self {
Self { error_behavior: RenderErrorBehavior::Throw }
}
pub fn return_empty() -> Self {
Self { error_behavior: RenderErrorBehavior::ReturnEmpty }
}
}
impl RenderErrorBehavior {

View File

@@ -16,6 +16,9 @@ pub async fn render_websocket_request<T: TemplateCallback>(
let mut url_parameters = Vec::new();
for p in r.url_parameters.clone() {
if !p.enabled {
continue;
}
url_parameters.push(HttpUrlParameter {
enabled: p.enabled,
name: parse_and_render(&p.name, vars, cb, opt).await?,
@@ -26,6 +29,9 @@ pub async fn render_websocket_request<T: TemplateCallback>(
let mut headers = Vec::new();
for p in r.headers.clone() {
if !p.enabled {
continue;
}
headers.push(HttpRequestHeader {
enabled: p.enabled,
name: parse_and_render(&p.name, vars, cb, opt).await?,

View File

@@ -0,0 +1,57 @@
<?xml version="1.0" encoding="UTF-8"?>
<component type="desktop-application">
<id>app.yaak.Yaak</id>
<name>Yaak</name>
<summary>An offline, Git friendly API Client</summary>
<developer id="app.yaak">
<name>Yaak</name>
</developer>
<metadata_license>MIT</metadata_license>
<project_license>MIT</project_license>
<url type="homepage">https://yaak.app</url>
<url type="bugtracker">https://yaak.app/feedback</url>
<url type="contact">https://yaak.app/feedback</url>
<url type="vcs-browser">https://github.com/mountain-loop/yaak</url>
<description>
<p>
A fast, privacy-first API client for REST, GraphQL, SSE, WebSocket,
and gRPC — built with Tauri, Rust, and React.
</p>
<p>Features include:</p>
<ul>
<li>REST, GraphQL, SSE, WebSocket, and gRPC support</li>
<li>Local-only data, secrets encryption, and zero telemetry</li>
<li>Git-friendly plain-text project storage</li>
<li>Environment variables and template functions</li>
<li>Request chaining and dynamic values</li>
<li>OAuth 2.0, Bearer, Basic, API Key, AWS, JWT, and NTLM authentication</li>
<li>Import from cURL, Postman, Insomnia, and OpenAPI</li>
<li>Extensible plugin system</li>
</ul>
</description>
<launchable type="desktop-id">app.yaak.Yaak.desktop</launchable>
<branding>
<color type="primary" scheme_preference="light">#8b32ff</color>
<color type="primary" scheme_preference="dark">#c293ff</color>
</branding>
<content_rating type="oars-1.1" />
<screenshots>
<screenshot type="default">
<caption>Crafting an API request</caption>

</screenshot>
</screenshots>
<releases>
<release version="2026.2.0" date="2026-02-10" />
</releases>
</component>

75
flatpak/fix-lockfile.mjs Normal file
View File

@@ -0,0 +1,75 @@
#!/usr/bin/env node
// Adds missing `resolved` and `integrity` fields to npm package-lock.json.
//
// npm sometimes omits these fields for nested dependencies inside workspace
// packages. This breaks offline installs and tools like flatpak-node-generator
// that need explicit tarball URLs for every package.
//
// Based on https://github.com/grant-dennison/npm-package-lock-add-resolved
// (MIT License, Copyright (c) 2024 Grant Dennison)
import { readFile, writeFile } from "node:fs/promises";
import { get } from "node:https";
const lockfilePath = process.argv[2] || "package-lock.json";
function fetchJson(url) {
return new Promise((resolve, reject) => {
get(url, (res) => {
let data = "";
res.on("data", (chunk) => {
data += chunk;
});
res.on("end", () => {
if (res.statusCode === 200) {
resolve(JSON.parse(data));
} else {
reject(`${url} returned ${res.statusCode} ${res.statusMessage}`);
}
});
res.on("error", reject);
}).on("error", reject);
});
}
async function fillResolved(name, p) {
const version = p.version.replace(/^.*@/, "");
console.log(`Retrieving metadata for ${name}@${version}`);
const metadataUrl = `https://registry.npmjs.com/${name}/${version}`;
const metadata = await fetchJson(metadataUrl);
p.resolved = metadata.dist.tarball;
p.integrity = metadata.dist.integrity;
}
let changesMade = false;
async function fillAllResolved(packages) {
for (const packagePath in packages) {
if (packagePath === "") continue;
if (!packagePath.includes("node_modules/")) continue;
const p = packages[packagePath];
if (p.link) continue;
if (!p.inBundle && !p.bundled && (!p.resolved || !p.integrity)) {
const packageName =
p.name ||
/^npm:(.+?)@.+$/.exec(p.version)?.[1] ||
packagePath.replace(/^.*node_modules\/(?=.+?$)/, "");
await fillResolved(packageName, p);
changesMade = true;
}
}
}
const oldContents = await readFile(lockfilePath, "utf-8");
const packageLock = JSON.parse(oldContents);
await fillAllResolved(packageLock.packages ?? []);
if (changesMade) {
const newContents = JSON.stringify(packageLock, null, 2) + "\n";
await writeFile(lockfilePath, newContents);
console.log(`Updated ${lockfilePath}`);
} else {
console.log("No changes needed.");
}

48
flatpak/generate-sources.sh Executable file
View File

@@ -0,0 +1,48 @@
#!/usr/bin/env bash
#
# Generate offline dependency source files for Flatpak builds.
#
# Prerequisites:
# pip install flatpak-node-generator tomlkit aiohttp
# Clone https://github.com/flatpak/flatpak-builder-tools (for cargo generator)
#
# Usage:
# ./flatpak/generate-sources.sh <flathub-repo-path>
# ./flatpak/generate-sources.sh ../flathub-repo
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
if [ $# -lt 1 ]; then
echo "Usage: $0 <flathub-repo-path>"
echo "Example: $0 ../flathub-repo"
exit 1
fi
FLATHUB_REPO="$(cd "$1" && pwd)"
python3 "$SCRIPT_DIR/flatpak-builder-tools/cargo/flatpak-cargo-generator.py" \
-o "$FLATHUB_REPO/cargo-sources.json" "$REPO_ROOT/Cargo.lock"
TMPDIR=$(mktemp -d)
trap 'rm -rf "$TMPDIR"' EXIT
cp "$REPO_ROOT/package-lock.json" "$TMPDIR/package-lock.json"
cp "$REPO_ROOT/package.json" "$TMPDIR/package.json"
node "$SCRIPT_DIR/fix-lockfile.mjs" "$TMPDIR/package-lock.json"
node -e "
const fs = require('fs');
const p = process.argv[1];
const d = JSON.parse(fs.readFileSync(p, 'utf-8'));
for (const [name, info] of Object.entries(d.packages || {})) {
if (name && (info.link || !info.resolved)) delete d.packages[name];
}
fs.writeFileSync(p, JSON.stringify(d, null, 2));
" "$TMPDIR/package-lock.json"
flatpak-node-generator --no-requests-cache \
-o "$FLATHUB_REPO/node-sources.json" npm "$TMPDIR/package-lock.json"

86
flatpak/update-manifest.sh Executable file
View File

@@ -0,0 +1,86 @@
#!/usr/bin/env bash
#
# Update the Flathub repo for a new release.
#
# Usage:
# ./flatpak/update-manifest.sh <version-tag> <flathub-repo-path>
# ./flatpak/update-manifest.sh v2026.2.0 ../flathub-repo
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
if [ $# -lt 2 ]; then
echo "Usage: $0 <version-tag> <flathub-repo-path>"
echo "Example: $0 v2026.2.0 ../flathub-repo"
exit 1
fi
VERSION_TAG="$1"
VERSION="${VERSION_TAG#v}"
FLATHUB_REPO="$(cd "$2" && pwd)"
MANIFEST="$FLATHUB_REPO/app.yaak.Yaak.yml"
METAINFO="$SCRIPT_DIR/app.yaak.Yaak.metainfo.xml"
if [[ "$VERSION" == *-* ]]; then
echo "Skipping pre-release version '$VERSION_TAG' (only stable releases are published to Flathub)"
exit 0
fi
REPO="mountain-loop/yaak"
COMMIT=$(git ls-remote "https://github.com/$REPO.git" "refs/tags/$VERSION_TAG" | cut -f1)
if [ -z "$COMMIT" ]; then
echo "Error: Could not resolve commit for tag $VERSION_TAG"
exit 1
fi
echo "Tag: $VERSION_TAG"
echo "Commit: $COMMIT"
# Update git tag and commit in the manifest
sed -i "s|tag: v.*|tag: $VERSION_TAG|" "$MANIFEST"
sed -i "s|commit: .*|commit: $COMMIT|" "$MANIFEST"
echo "Updated manifest tag and commit."
# Regenerate offline dependency sources from the tagged lockfiles
TMPDIR=$(mktemp -d)
trap 'rm -rf "$TMPDIR"' EXIT
echo "Fetching lockfiles from $VERSION_TAG..."
curl -fsSL "https://raw.githubusercontent.com/$REPO/$VERSION_TAG/Cargo.lock" -o "$TMPDIR/Cargo.lock"
curl -fsSL "https://raw.githubusercontent.com/$REPO/$VERSION_TAG/package-lock.json" -o "$TMPDIR/package-lock.json"
curl -fsSL "https://raw.githubusercontent.com/$REPO/$VERSION_TAG/package.json" -o "$TMPDIR/package.json"
echo "Generating cargo-sources.json..."
python3 "$SCRIPT_DIR/flatpak-builder-tools/cargo/flatpak-cargo-generator.py" \
-o "$FLATHUB_REPO/cargo-sources.json" "$TMPDIR/Cargo.lock"
echo "Generating node-sources.json..."
node "$SCRIPT_DIR/fix-lockfile.mjs" "$TMPDIR/package-lock.json"
node -e "
const fs = require('fs');
const p = process.argv[1];
const d = JSON.parse(fs.readFileSync(p, 'utf-8'));
for (const [name, info] of Object.entries(d.packages || {})) {
if (name && (info.link || !info.resolved)) delete d.packages[name];
}
fs.writeFileSync(p, JSON.stringify(d, null, 2));
" "$TMPDIR/package-lock.json"
flatpak-node-generator --no-requests-cache \
-o "$FLATHUB_REPO/node-sources.json" npm "$TMPDIR/package-lock.json"
# Update metainfo with new release
TODAY=$(date +%Y-%m-%d)
sed -i "s| <releases>| <releases>\n <release version=\"$VERSION\" date=\"$TODAY\" />|" "$METAINFO"
echo "Updated metainfo with release $VERSION."
echo ""
echo "Done! Review the changes:"
echo " $MANIFEST"
echo " $METAINFO"
echo " $FLATHUB_REPO/cargo-sources.json"
echo " $FLATHUB_REPO/node-sources.json"

56
package-lock.json generated
View File

@@ -12,7 +12,7 @@
"packages/plugin-runtime",
"packages/plugin-runtime-types",
"plugins-external/mcp-server",
"plugins-external/template-function-faker",
"plugins-external/faker",
"plugins-external/httpsnippet",
"plugins/action-copy-curl",
"plugins/action-copy-grpcurl",
@@ -3922,13 +3922,6 @@
"@types/react": "*"
}
},
"node_modules/@types/shell-quote": {
"version": "1.7.5",
"resolved": "https://registry.npmjs.org/@types/shell-quote/-/shell-quote-1.7.5.tgz",
"integrity": "sha512-+UE8GAGRPbJVQDdxi16dgadcBfQ+KG2vgZhV1+3A1XmHbmwcdwhCUwIdy+d3pAGrbvgRoVSjeI9vOWyq376Yzw==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/unist": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
@@ -4160,6 +4153,10 @@
"resolved": "plugins/auth-oauth2",
"link": true
},
"node_modules/@yaak/faker": {
"resolved": "plugins-external/faker",
"link": true
},
"node_modules/@yaak/filter-jsonpath": {
"resolved": "plugins/filter-jsonpath",
"link": true
@@ -13405,6 +13402,7 @@
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz",
"integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -13413,6 +13411,12 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/shlex": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/shlex/-/shlex-3.0.0.tgz",
"integrity": "sha512-jHPXQQk9d/QXCvJuLPYMOYWez3c43sORAgcIEoV7bFv5AJSJRAOyw5lQO12PMfd385qiLRCaDt7OtEzgrIGZUA==",
"license": "MIT"
},
"node_modules/should": {
"version": "13.2.3",
"resolved": "https://registry.npmjs.org/should/-/should-13.2.3.tgz",
@@ -15953,9 +15957,36 @@
"undici-types": "~7.16.0"
}
},
"plugins-external/faker": {
"name": "@yaak/faker",
"version": "1.1.1",
"dependencies": {
"@faker-js/faker": "^10.1.0"
},
"devDependencies": {
"@types/node": "^25.0.3",
"typescript": "^5.9.3"
}
},
"plugins-external/faker/node_modules/@faker-js/faker": {
"version": "10.3.0",
"resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-10.3.0.tgz",
"integrity": "sha512-It0Sne6P3szg7JIi6CgKbvTZoMjxBZhcv91ZrqrNuaZQfB5WoqYYbzCUOq89YR+VY8juY9M1vDWmDDa2TzfXCw==",
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/fakerjs"
}
],
"license": "MIT",
"engines": {
"node": "^20.19.0 || ^22.13.0 || ^23.5.0 || >=24.0.0",
"npm": ">=10"
}
},
"plugins-external/httpsnippet": {
"name": "@yaak/httpsnippet",
"version": "1.0.0",
"version": "1.0.3",
"dependencies": {
"@readme/httpsnippet": "^11.0.0"
},
@@ -15983,7 +16014,7 @@
},
"plugins-external/mcp-server": {
"name": "@yaak/mcp-server",
"version": "0.1.7",
"version": "0.2.1",
"dependencies": {
"@hono/mcp": "^0.2.3",
"@hono/node-server": "^1.19.7",
@@ -16080,10 +16111,7 @@
"name": "@yaak/importer-curl",
"version": "0.1.0",
"dependencies": {
"shell-quote": "^1.8.1"
},
"devDependencies": {
"@types/shell-quote": "^1.7.5"
"shlex": "^3.0.0"
}
},
"plugins/importer-insomnia": {

View File

@@ -11,7 +11,7 @@
"packages/plugin-runtime",
"packages/plugin-runtime-types",
"plugins-external/mcp-server",
"plugins-external/template-function-faker",
"plugins-external/faker",
"plugins-external/httpsnippet",
"plugins/action-copy-curl",
"plugins/action-copy-grpcurl",

View File

@@ -0,0 +1,233 @@
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
exports[`template-function-faker > exports all expected template functions 1`] = `
[
"faker.airline.aircraftType",
"faker.airline.airline",
"faker.airline.airplane",
"faker.airline.airport",
"faker.airline.flightNumber",
"faker.airline.recordLocator",
"faker.airline.seat",
"faker.animal.bear",
"faker.animal.bird",
"faker.animal.cat",
"faker.animal.cetacean",
"faker.animal.cow",
"faker.animal.crocodilia",
"faker.animal.dog",
"faker.animal.fish",
"faker.animal.horse",
"faker.animal.insect",
"faker.animal.lion",
"faker.animal.petName",
"faker.animal.rabbit",
"faker.animal.rodent",
"faker.animal.snake",
"faker.animal.type",
"faker.color.cmyk",
"faker.color.colorByCSSColorSpace",
"faker.color.cssSupportedFunction",
"faker.color.cssSupportedSpace",
"faker.color.hsl",
"faker.color.human",
"faker.color.hwb",
"faker.color.lab",
"faker.color.lch",
"faker.color.rgb",
"faker.color.space",
"faker.commerce.department",
"faker.commerce.isbn",
"faker.commerce.price",
"faker.commerce.product",
"faker.commerce.productAdjective",
"faker.commerce.productDescription",
"faker.commerce.productMaterial",
"faker.commerce.productName",
"faker.commerce.upc",
"faker.company.buzzAdjective",
"faker.company.buzzNoun",
"faker.company.buzzPhrase",
"faker.company.buzzVerb",
"faker.company.catchPhrase",
"faker.company.catchPhraseAdjective",
"faker.company.catchPhraseDescriptor",
"faker.company.catchPhraseNoun",
"faker.company.name",
"faker.database.collation",
"faker.database.column",
"faker.database.engine",
"faker.database.mongodbObjectId",
"faker.database.type",
"faker.date.anytime",
"faker.date.between",
"faker.date.betweens",
"faker.date.birthdate",
"faker.date.future",
"faker.date.month",
"faker.date.past",
"faker.date.recent",
"faker.date.soon",
"faker.date.timeZone",
"faker.date.weekday",
"faker.finance.accountName",
"faker.finance.accountNumber",
"faker.finance.amount",
"faker.finance.bic",
"faker.finance.bitcoinAddress",
"faker.finance.creditCardCVV",
"faker.finance.creditCardIssuer",
"faker.finance.creditCardNumber",
"faker.finance.currency",
"faker.finance.currencyCode",
"faker.finance.currencyName",
"faker.finance.currencyNumericCode",
"faker.finance.currencySymbol",
"faker.finance.ethereumAddress",
"faker.finance.iban",
"faker.finance.litecoinAddress",
"faker.finance.pin",
"faker.finance.routingNumber",
"faker.finance.transactionDescription",
"faker.finance.transactionType",
"faker.git.branch",
"faker.git.commitDate",
"faker.git.commitEntry",
"faker.git.commitMessage",
"faker.git.commitSha",
"faker.hacker.abbreviation",
"faker.hacker.adjective",
"faker.hacker.ingverb",
"faker.hacker.noun",
"faker.hacker.phrase",
"faker.hacker.verb",
"faker.image.avatar",
"faker.image.avatarGitHub",
"faker.image.dataUri",
"faker.image.personPortrait",
"faker.image.url",
"faker.image.urlLoremFlickr",
"faker.image.urlPicsumPhotos",
"faker.internet.displayName",
"faker.internet.domainName",
"faker.internet.domainSuffix",
"faker.internet.domainWord",
"faker.internet.email",
"faker.internet.emoji",
"faker.internet.exampleEmail",
"faker.internet.httpMethod",
"faker.internet.httpStatusCode",
"faker.internet.ip",
"faker.internet.ipv4",
"faker.internet.ipv6",
"faker.internet.jwt",
"faker.internet.jwtAlgorithm",
"faker.internet.mac",
"faker.internet.password",
"faker.internet.port",
"faker.internet.protocol",
"faker.internet.url",
"faker.internet.userAgent",
"faker.internet.username",
"faker.location.buildingNumber",
"faker.location.cardinalDirection",
"faker.location.city",
"faker.location.continent",
"faker.location.country",
"faker.location.countryCode",
"faker.location.county",
"faker.location.direction",
"faker.location.language",
"faker.location.latitude",
"faker.location.longitude",
"faker.location.nearbyGPSCoordinate",
"faker.location.ordinalDirection",
"faker.location.secondaryAddress",
"faker.location.state",
"faker.location.street",
"faker.location.streetAddress",
"faker.location.timeZone",
"faker.location.zipCode",
"faker.lorem.lines",
"faker.lorem.paragraph",
"faker.lorem.paragraphs",
"faker.lorem.sentence",
"faker.lorem.sentences",
"faker.lorem.slug",
"faker.lorem.text",
"faker.lorem.word",
"faker.lorem.words",
"faker.music.album",
"faker.music.artist",
"faker.music.genre",
"faker.music.songName",
"faker.number.bigInt",
"faker.number.binary",
"faker.number.float",
"faker.number.hex",
"faker.number.int",
"faker.number.octal",
"faker.number.romanNumeral",
"faker.person.bio",
"faker.person.firstName",
"faker.person.fullName",
"faker.person.gender",
"faker.person.jobArea",
"faker.person.jobDescriptor",
"faker.person.jobTitle",
"faker.person.jobType",
"faker.person.lastName",
"faker.person.middleName",
"faker.person.prefix",
"faker.person.sex",
"faker.person.sexType",
"faker.person.suffix",
"faker.person.zodiacSign",
"faker.phone.imei",
"faker.phone.number",
"faker.science.chemicalElement",
"faker.science.unit",
"faker.string.alpha",
"faker.string.alphanumeric",
"faker.string.binary",
"faker.string.fromCharacters",
"faker.string.hexadecimal",
"faker.string.nanoid",
"faker.string.numeric",
"faker.string.octal",
"faker.string.sample",
"faker.string.symbol",
"faker.string.ulid",
"faker.string.uuid",
"faker.system.commonFileExt",
"faker.system.commonFileName",
"faker.system.commonFileType",
"faker.system.cron",
"faker.system.directoryPath",
"faker.system.fileExt",
"faker.system.fileName",
"faker.system.filePath",
"faker.system.fileType",
"faker.system.mimeType",
"faker.system.networkInterface",
"faker.system.semver",
"faker.vehicle.bicycle",
"faker.vehicle.color",
"faker.vehicle.fuel",
"faker.vehicle.manufacturer",
"faker.vehicle.model",
"faker.vehicle.type",
"faker.vehicle.vehicle",
"faker.vehicle.vin",
"faker.vehicle.vrm",
"faker.word.adjective",
"faker.word.adverb",
"faker.word.conjunction",
"faker.word.interjection",
"faker.word.noun",
"faker.word.preposition",
"faker.word.sample",
"faker.word.verb",
"faker.word.words",
]
`;

View File

@@ -1,9 +1,12 @@
import { describe, expect, it } from 'vitest';
describe('formatDatetime', () => {
it('returns formatted current date', async () => {
// Ensure the plugin imports properly
const faker = await import('../src/index');
expect(faker.plugin.templateFunctions?.length).toBe(226);
describe('template-function-faker', () => {
it('exports all expected template functions', async () => {
const { plugin } = await import('../src/index');
const names = plugin.templateFunctions?.map((fn) => fn.name).sort() ?? [];
// Snapshot the full list of exported function names so we catch any
// accidental additions, removals, or renames across faker upgrades.
expect(names).toMatchSnapshot();
});
});

View File

@@ -10,9 +10,6 @@
"test": "vitest --run tests"
},
"dependencies": {
"shell-quote": "^1.8.1"
},
"devDependencies": {
"@types/shell-quote": "^1.7.5"
"shlex": "^3.0.0"
}
}

View File

@@ -7,8 +7,7 @@ import type {
PluginDefinition,
Workspace,
} from '@yaakapp/api';
import type { ControlOperator, ParseEntry } from 'shell-quote';
import { parse } from 'shell-quote';
import { split } from 'shlex';
type AtLeast<T, K extends keyof T> = Partial<T> & Pick<T, K>;
@@ -56,31 +55,89 @@ export const plugin: PluginDefinition = {
};
/**
* Decodes escape sequences in shell $'...' strings
* Handles Unicode escape sequences (\uXXXX) and common escape codes
* Splits raw input into individual shell command strings.
* Handles line continuations, semicolons, and newline-separated curl commands.
*/
function decodeShellString(str: string): string {
return str
.replace(/\\u([0-9a-fA-F]{4})/g, (_, hex) => String.fromCharCode(parseInt(hex, 16)))
.replace(/\\x([0-9a-fA-F]{2})/g, (_, hex) => String.fromCharCode(parseInt(hex, 16)))
.replace(/\\n/g, '\n')
.replace(/\\r/g, '\r')
.replace(/\\t/g, '\t')
.replace(/\\'/g, "'")
.replace(/\\"/g, '"')
.replace(/\\\\/g, '\\');
}
function splitCommands(rawData: string): string[] {
// Join line continuations (backslash-newline, and backslash-CRLF for Windows)
const joined = rawData.replace(/\\\r?\n/g, ' ');
/**
* Checks if a string might contain escape sequences that need decoding
* If so, decodes them; otherwise returns the string as-is
*/
function maybeDecodeEscapeSequences(str: string): string {
// Check if the string contains escape sequences that shell-quote might not handle
if (str.includes('\\u') || str.includes('\\x')) {
return decodeShellString(str);
// Count consecutive backslashes immediately before position i.
// An even count means the quote at i is NOT escaped; odd means it IS escaped.
function isEscaped(i: number): boolean {
let backslashes = 0;
let j = i - 1;
while (j >= 0 && joined[j] === '\\') {
backslashes++;
j--;
}
return backslashes % 2 !== 0;
}
return str;
// Split on semicolons and newlines to separate commands
const commands: string[] = [];
let current = '';
let inSingleQuote = false;
let inDoubleQuote = false;
let inDollarQuote = false;
for (let i = 0; i < joined.length; i++) {
const ch = joined[i]!;
const next = joined[i + 1];
// Track quoting state to avoid splitting inside quoted strings
if (!inDoubleQuote && !inDollarQuote && ch === "'" && !inSingleQuote) {
inSingleQuote = true;
current += ch;
continue;
}
if (inSingleQuote && ch === "'") {
inSingleQuote = false;
current += ch;
continue;
}
if (!inSingleQuote && !inDollarQuote && ch === '"' && !inDoubleQuote) {
inDoubleQuote = true;
current += ch;
continue;
}
if (inDoubleQuote && ch === '"' && !isEscaped(i)) {
inDoubleQuote = false;
current += ch;
continue;
}
if (!inSingleQuote && !inDoubleQuote && !inDollarQuote && ch === '$' && next === "'") {
inDollarQuote = true;
current += ch + next;
i++; // Skip the opening quote
continue;
}
if (inDollarQuote && ch === "'" && !isEscaped(i)) {
inDollarQuote = false;
current += ch;
continue;
}
const inQuote = inSingleQuote || inDoubleQuote || inDollarQuote;
// Split on ;, newline, or CRLF when not inside quotes and not escaped
if (!inQuote && !isEscaped(i) && (ch === ';' || ch === '\n' || (ch === '\r' && next === '\n'))) {
if (ch === '\r') i++; // Skip the \n in \r\n
if (current.trim()) {
commands.push(current.trim());
}
current = '';
continue;
}
current += ch;
}
if (current.trim()) {
commands.push(current.trim());
}
return commands;
}
export function convertCurl(rawData: string) {
@@ -88,68 +145,17 @@ export function convertCurl(rawData: string) {
return null;
}
const commands: ParseEntry[][] = [];
const commands: string[][] = splitCommands(rawData).map((cmd) => {
const tokens = split(cmd);
// Replace non-escaped newlines with semicolons to make parsing easier
// NOTE: This is really slow in debug build but fast in release mode
const normalizedData = rawData.replace(/\ncurl/g, '; curl');
let currentCommand: ParseEntry[] = [];
const parsed = parse(normalizedData);
// Break up `-XPOST` into `-X POST`
const normalizedParseEntries = parsed.flatMap((entry) => {
if (
typeof entry === 'string' &&
entry.startsWith('-') &&
!entry.startsWith('--') &&
entry.length > 2
) {
return [entry.slice(0, 2), entry.slice(2)];
}
return entry;
});
for (const parseEntry of normalizedParseEntries) {
if (typeof parseEntry === 'string') {
if (parseEntry.startsWith('$')) {
// Handle $'...' strings from shell-quote - decode escape sequences
currentCommand.push(decodeShellString(parseEntry.slice(1)));
} else {
// Decode escape sequences that shell-quote might not handle
currentCommand.push(maybeDecodeEscapeSequences(parseEntry));
// Break up squished arguments like `-XPOST` into `-X POST`
return tokens.flatMap((token) => {
if (token.startsWith('-') && !token.startsWith('--') && token.length > 2) {
return [token.slice(0, 2), token.slice(2)];
}
continue;
}
if ('comment' in parseEntry) {
continue;
}
const { op } = parseEntry as { op: 'glob'; pattern: string } | { op: ControlOperator };
// `;` separates commands
if (op === ';') {
commands.push(currentCommand);
currentCommand = [];
continue;
}
if (op?.startsWith('$')) {
// Handle the case where literal like -H $'Header: \'Some Quoted Thing\''
const str = decodeShellString(op.slice(2, op.length - 1));
currentCommand.push(str);
continue;
}
if (op === 'glob') {
currentCommand.push((parseEntry as { op: 'glob'; pattern: string }).pattern);
}
}
commands.push(currentCommand);
return token;
});
});
const workspace: ExportResources['workspaces'][0] = {
model: 'workspace',
@@ -169,12 +175,12 @@ export function convertCurl(rawData: string) {
};
}
function importCommand(parseEntries: ParseEntry[], workspaceId: string) {
function importCommand(parseEntries: string[], workspaceId: string) {
// ~~~~~~~~~~~~~~~~~~~~~ //
// Collect all the flags //
// ~~~~~~~~~~~~~~~~~~~~~ //
const flagsByName: FlagsByName = {};
const singletons: ParseEntry[] = [];
const singletons: string[] = [];
// Start at 1 so we can skip the ^curl part
for (let i = 1; i < parseEntries.length; i++) {

View File

@@ -112,9 +112,28 @@ describe('importer-curl', () => {
});
});
test('Imports with Windows CRLF line endings', () => {
expect(
convertCurl('curl \\\r\n -X POST \\\r\n https://yaak.app'),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({ url: 'https://yaak.app', method: 'POST' }),
],
},
});
});
test('Throws on malformed quotes', () => {
expect(() =>
convertCurl('curl -X POST -F "a=aaa" -F b=bbb" https://yaak.app'),
).toThrow();
});
test('Imports form data', () => {
expect(
convertCurl('curl -X POST -F "a=aaa" -F b=bbb" -F f=@filepath https://yaak.app'),
convertCurl('curl -X POST -F "a=aaa" -F b=bbb -F f=@filepath https://yaak.app'),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
@@ -476,6 +495,130 @@ describe('importer-curl', () => {
});
});
test('Imports JSON body with newlines in $quotes', () => {
expect(
convertCurl(
`curl 'https://yaak.app' -H 'Content-Type: application/json' --data-raw $'{\\n "foo": "bar",\\n "baz": "qux"\\n}' -X POST`,
),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
method: 'POST',
headers: [{ name: 'Content-Type', value: 'application/json', enabled: true }],
bodyType: 'application/json',
body: { text: '{\n "foo": "bar",\n "baz": "qux"\n}' },
}),
],
},
});
});
test('Handles double-quoted string ending with even backslashes before semicolon', () => {
// "C:\\" has two backslashes which escape each other, so the closing " is real.
// The ; after should split into a second command.
expect(
convertCurl(
'curl -d "C:\\\\" https://yaak.app;curl https://example.com',
),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
method: 'POST',
bodyType: 'application/x-www-form-urlencoded',
body: {
form: [{ name: 'C:\\', value: '', enabled: true }],
},
headers: [
{
name: 'Content-Type',
value: 'application/x-www-form-urlencoded',
enabled: true,
},
],
}),
baseRequest({ url: 'https://example.com' }),
],
},
});
});
test('Handles $quoted string ending with a literal backslash before semicolon', () => {
// $'C:\\\\' has two backslashes which become one literal backslash.
// The closing ' must not be misinterpreted as escaped.
// The ; after should split into a second command.
expect(
convertCurl(
"curl -d $'C:\\\\' https://yaak.app;curl https://example.com",
),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
method: 'POST',
bodyType: 'application/x-www-form-urlencoded',
body: {
form: [{ name: 'C:\\', value: '', enabled: true }],
},
headers: [
{
name: 'Content-Type',
value: 'application/x-www-form-urlencoded',
enabled: true,
},
],
}),
baseRequest({ url: 'https://example.com' }),
],
},
});
});
test('Imports $quoted header with escaped single quotes', () => {
expect(
convertCurl(
`curl https://yaak.app -H $'X-Custom: it\\'s a test'`,
),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
headers: [{ name: 'X-Custom', value: "it's a test", enabled: true }],
}),
],
},
});
});
test('Does not split on escaped semicolon outside quotes', () => {
// In shell, \; is a literal semicolon and should not split commands.
// This should be treated as a single curl command with the URL "https://yaak.app?a=1;b=2"
expect(
convertCurl('curl https://yaak.app?a=1\\;b=2'),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
urlParameters: [
{ name: 'a', value: '1;b=2', enabled: true },
],
}),
],
},
});
});
test('Imports multipart form data with text-only fields from --data-raw', () => {
const curlCommand = `curl 'http://example.com/api' \
-H 'Content-Type: multipart/form-data; boundary=----FormBoundary123' \

View File

@@ -1,4 +1,6 @@
const path = require('node:path');
const crypto = require('node:crypto');
const fs = require('node:fs');
const decompress = require('decompress');
const Downloader = require('nodejs-file-downloader');
const { rmSync, cpSync, mkdirSync, existsSync } = require('node:fs');
@@ -41,6 +43,15 @@ const DST_BIN_MAP = {
[WIN_ARM]: 'yaaknode.exe',
};
const SHA256_MAP = {
[MAC_ARM]: 'b05aa3a66efe680023f930bd5af3fdbbd542794da5644ca2ad711d68cbd4dc35',
[MAC_X64]: '096081b6d6fcdd3f5ba0f5f1d44a47e83037ad2e78eada26671c252fe64dd111',
[LNX_ARM]: '0dc93ec5c798b0d347f068db6d205d03dea9a71765e6a53922b682b91265d71f',
[LNX_X64]: '58a5ff5cc8f2200e458bea22e329d5c1994aa1b111d499ca46ec2411d58239ca',
[WIN_X64]: '5355ae6d7c49eddcfde7d34ac3486820600a831bf81dc3bdca5c8db6a9bb0e76',
[WIN_ARM]: 'ce9ee4e547ebdff355beb48e309b166c24df6be0291c9eaf103ce15f3de9e5b4',
};
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
const destDir = path.join(__dirname, `..`, 'crates-tauri', 'yaak-app', 'vendored', 'node');
@@ -68,6 +79,15 @@ rmSync(tmpDir, { recursive: true, force: true });
timeout: 1000 * 60 * 2,
}).download();
// Verify SHA256
const expectedHash = SHA256_MAP[key];
const fileBuffer = fs.readFileSync(filePath);
const actualHash = crypto.createHash('sha256').update(fileBuffer).digest('hex');
if (actualHash !== expectedHash) {
throw new Error(`SHA256 mismatch for ${path.basename(filePath)}\n expected: ${expectedHash}\n actual: ${actualHash}`);
}
console.log('SHA256 verified:', actualHash);
// Decompress to the same directory
await decompress(filePath, tmpDir, {});

View File

@@ -1,3 +1,5 @@
const crypto = require('node:crypto');
const fs = require('node:fs');
const decompress = require('decompress');
const Downloader = require('nodejs-file-downloader');
const path = require('node:path');
@@ -41,6 +43,15 @@ const DST_BIN_MAP = {
[WIN_ARM]: 'yaakprotoc.exe',
};
const SHA256_MAP = {
[MAC_ARM]: 'db7e66ff7f9080614d0f5505a6b0ac488cf89a15621b6a361672d1332ec2e14e',
[MAC_X64]: 'e20b5f930e886da85e7402776a4959efb1ed60c57e72794bcade765e67abaa82',
[LNX_ARM]: '6018147740548e0e0f764408c87f4cd040e6e1c1203e13aeacaf811892b604f3',
[LNX_X64]: 'f3340e28a83d1c637d8bafdeed92b9f7db6a384c26bca880a6e5217b40a4328b',
[WIN_X64]: 'd7a207fb6eec0e4b1b6613be3b7d11905375b6fd1147a071116eb8e9f24ac53b',
[WIN_ARM]: 'd7a207fb6eec0e4b1b6613be3b7d11905375b6fd1147a071116eb8e9f24ac53b',
};
const dstDir = path.join(__dirname, `..`, 'crates-tauri', 'yaak-app', 'vendored', 'protoc');
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
console.log(`Vendoring protoc ${VERSION} for ${key}`);
@@ -63,6 +74,15 @@ mkdirSync(dstDir, { recursive: true });
// Download GitHub release artifact
const { filePath } = await new Downloader({ url, directory: tmpDir }).download();
// Verify SHA256
const expectedHash = SHA256_MAP[key];
const fileBuffer = fs.readFileSync(filePath);
const actualHash = crypto.createHash('sha256').update(fileBuffer).digest('hex');
if (actualHash !== expectedHash) {
throw new Error(`SHA256 mismatch for ${path.basename(filePath)}\n expected: ${expectedHash}\n actual: ${actualHash}`);
}
console.log('SHA256 verified:', actualHash);
// Decompress to the same directory
await decompress(filePath, tmpDir, {});

View File

@@ -434,11 +434,23 @@
input {
@apply bg-surface border-border-subtle focus:border-border-focus;
@apply border outline-none cursor-text;
@apply border outline-none;
}
label {
@apply focus-within:text-text;
input.cm-textfield {
@apply cursor-text;
}
.cm-search label {
@apply inline-flex items-center h-6 px-1.5 rounded-sm border border-border-subtle cursor-default text-text-subtle text-xs;
input[type="checkbox"] {
@apply hidden;
}
&:has(:checked) {
@apply text-primary border-border;
}
}
/* Hide the "All" button */
@@ -446,4 +458,31 @@
button[name="select"] {
@apply hidden;
}
/* Replace next/prev button text with chevron icons */
.cm-search button[name="next"],
.cm-search button[name="prev"] {
@apply text-[0px] w-7 h-6 inline-flex items-center justify-center border border-border-subtle mr-1;
}
.cm-search button[name="prev"]::after,
.cm-search button[name="next"]::after {
@apply block w-3.5 h-3.5 bg-text;
content: "";
}
.cm-search button[name="prev"]::after {
-webkit-mask-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none' stroke='currentColor' stroke-width='2.5' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M15 18l-6-6 6-6'/%3E%3C/svg%3E");
mask-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none' stroke='currentColor' stroke-width='2.5' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M15 18l-6-6 6-6'/%3E%3C/svg%3E");
}
.cm-search button[name="next"]::after {
-webkit-mask-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none' stroke='currentColor' stroke-width='2.5' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M9 18l6-6-6-6'/%3E%3C/svg%3E");
mask-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none' stroke='currentColor' stroke-width='2.5' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M9 18l6-6-6-6'/%3E%3C/svg%3E");
}
.cm-search-match-count {
@apply text-text-subtle text-xs font-mono whitespace-nowrap px-1.5 py-0.5 self-center;
}
}

View File

@@ -67,6 +67,7 @@ import type { TwigCompletionOption } from './twig/completion';
import { twig } from './twig/extension';
import { pathParametersPlugin } from './twig/pathParameters';
import { url } from './url/extension';
import { searchMatchCount } from './searchMatchCount';
export const syntaxHighlightStyle = HighlightStyle.define([
{
@@ -256,6 +257,7 @@ export const readonlyExtensions = [
export const multiLineExtensions = ({ hideGutter }: { hideGutter?: boolean }) => [
search({ top: true }),
searchMatchCount(),
hideGutter
? []
: [

View File

@@ -0,0 +1,116 @@
import { getSearchQuery, searchPanelOpen } from '@codemirror/search';
import type { Extension } from '@codemirror/state';
import { type EditorView, ViewPlugin, type ViewUpdate } from '@codemirror/view';
/**
* A CodeMirror extension that displays the total number of search matches
* inside the built-in search panel.
*/
export function searchMatchCount(): Extension {
return ViewPlugin.fromClass(
class {
private countEl: HTMLElement | null = null;
constructor(private view: EditorView) {
this.updateCount();
}
update(update: ViewUpdate) {
// Recompute when doc changes, search state changes, or selection moves
const query = getSearchQuery(update.state);
const prevQuery = getSearchQuery(update.startState);
const open = searchPanelOpen(update.state);
const prevOpen = searchPanelOpen(update.startState);
if (update.docChanged || update.selectionSet || !query.eq(prevQuery) || open !== prevOpen) {
this.updateCount();
}
}
private updateCount() {
const state = this.view.state;
const open = searchPanelOpen(state);
const query = getSearchQuery(state);
if (!open) {
this.removeCountEl();
return;
}
this.ensureCountEl();
if (!query.search) {
if (this.countEl) {
this.countEl.textContent = '0/0';
}
return;
}
const selection = state.selection.main;
let count = 0;
let currentIndex = 0;
const MAX_COUNT = 9999;
const cursor = query.getCursor(state);
for (let result = cursor.next(); !result.done; result = cursor.next()) {
count++;
const match = result.value;
if (match.from <= selection.from && match.to >= selection.to) {
currentIndex = count;
}
if (count > MAX_COUNT) break;
}
if (this.countEl) {
if (count > MAX_COUNT) {
this.countEl.textContent = `${MAX_COUNT}+`;
} else if (count === 0) {
this.countEl.textContent = '0/0';
} else if (currentIndex > 0) {
this.countEl.textContent = `${currentIndex}/${count}`;
} else {
this.countEl.textContent = `0/${count}`;
}
}
}
private ensureCountEl() {
// Find the search panel in the editor DOM
const panel = this.view.dom.querySelector('.cm-search');
if (!panel) {
this.countEl = null;
return;
}
if (this.countEl && this.countEl.parentElement === panel) {
return; // Already attached
}
this.countEl = document.createElement('span');
this.countEl.className = 'cm-search-match-count';
// Reorder: insert prev button, then next button, then count after the search input
const searchInput = panel.querySelector('input');
const prevBtn = panel.querySelector('button[name="prev"]');
const nextBtn = panel.querySelector('button[name="next"]');
if (searchInput && searchInput.parentElement === panel) {
searchInput.after(this.countEl);
if (prevBtn) this.countEl.after(prevBtn);
if (nextBtn && prevBtn) prevBtn.after(nextBtn);
} else {
panel.prepend(this.countEl);
}
}
private removeCountEl() {
if (this.countEl) {
this.countEl.remove();
this.countEl = null;
}
}
destroy() {
this.removeCountEl();
}
},
);
}

View File

@@ -176,7 +176,11 @@ export function GitCommitDialog({ syncDir, onDone, workspace }: Props) {
}
if (!hasAnythingToAdd) {
return <EmptyStateText>No changes since last commit</EmptyStateText>;
return (
<div className="h-full px-6 pb-4">
<EmptyStateText>No changes since last commit</EmptyStateText>
</div>
);
}
return (
@@ -230,14 +234,14 @@ export function GitCommitDialog({ syncDir, onDone, workspace }: Props) {
hideLabel
/>
{commitError && <Banner color="danger">{commitError}</Banner>}
<HStack alignItems="center">
<HStack alignItems="center" space={2}>
<InlineCode>{status.data?.headRefShorthand}</InlineCode>
<HStack space={2} className="ml-auto">
<Button
color="secondary"
size="sm"
onClick={handleCreateCommit}
disabled={!hasAddedAnything}
disabled={!hasAddedAnything || message.trim().length === 0}
isLoading={isPushing}
>
Commit
@@ -245,7 +249,7 @@ export function GitCommitDialog({ syncDir, onDone, workspace }: Props) {
<Button
color="primary"
size="sm"
disabled={!hasAddedAnything}
disabled={!hasAddedAnything || message.trim().length === 0}
onClick={handleCreateCommitAndPush}
isLoading={isPushing}
>

View File

@@ -7,6 +7,7 @@ import { forwardRef } from 'react';
import { openWorkspaceSettings } from '../../commands/openWorkspaceSettings';
import { activeWorkspaceAtom, activeWorkspaceMetaAtom } from '../../hooks/useActiveWorkspace';
import { useKeyValue } from '../../hooks/useKeyValue';
import { useRandomKey } from '../../hooks/useRandomKey';
import { sync } from '../../init/sync';
import { showConfirm, showConfirmDelete } from '../../lib/confirm';
import { showDialog } from '../../lib/dialog';
@@ -36,6 +37,7 @@ export function GitDropdown() {
function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
const workspace = useAtomValue(activeWorkspaceAtom);
const [refreshKey, regenerateKey] = useRandomKey();
const [
{ status, log },
{
@@ -43,7 +45,6 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
deleteBranch,
deleteRemoteBranch,
renameBranch,
fetchAll,
mergeBranch,
push,
pull,
@@ -51,7 +52,7 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
resetChanges,
init,
},
] = useGit(syncDir, gitCallbacks(syncDir));
] = useGit(syncDir, gitCallbacks(syncDir), refreshKey);
const localBranches = status.data?.localBranches ?? [];
const remoteBranches = status.data?.remoteBranches ?? [];
@@ -172,7 +173,6 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
{ type: 'separator' },
{
label: 'Push',
disabled: !hasRemotes || ahead === 0,
leftSlot: <Icon icon="arrow_up_from_line" />,
waitForOnSelect: true,
async onSelect() {
@@ -191,7 +191,6 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
},
{
label: 'Pull',
disabled: !hasRemotes || behind === 0,
leftSlot: <Icon icon="arrow_down_to_line" />,
waitForOnSelect: true,
async onSelect() {
@@ -210,7 +209,7 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
},
{
label: 'Commit...',
disabled: !hasChanges,
leftSlot: <Icon icon="git_commit_vertical" />,
onSelect() {
showDialog({
@@ -502,15 +501,25 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
];
return (
<Dropdown fullWidth items={items} onOpen={fetchAll.mutate}>
<Dropdown fullWidth items={items} onOpen={regenerateKey}>
<GitMenuButton>
<InlineCode className="flex items-center gap-1">
<Icon icon="git_branch" size="xs" className="opacity-50" />
{currentBranch}
</InlineCode>
<div className="flex items-center gap-1.5">
{ahead > 0 && <span className="text-xs flex items-center gap-0.5"><span className="text-primary"></span>{ahead}</span>}
{behind > 0 && <span className="text-xs flex items-center gap-0.5"><span className="text-info"></span>{behind}</span>}
{ahead > 0 && (
<span className="text-xs flex items-center gap-0.5">
<span className="text-primary"></span>
{ahead}
</span>
)}
{behind > 0 && (
<span className="text-xs flex items-center gap-0.5">
<span className="text-info"></span>
{behind}
</span>
)}
</div>
</GitMenuButton>
</Dropdown>

View File

@@ -2,13 +2,13 @@ import type { GitCallbacks } from '@yaakapp-internal/git';
import { sync } from '../../init/sync';
import { promptCredentials } from './credentials';
import { promptDivergedStrategy } from './diverged';
import { promptUncommittedChangesStrategy } from './uncommitted';
import { addGitRemote } from './showAddRemoteDialog';
import { promptUncommittedChangesStrategy } from './uncommitted';
export function gitCallbacks(dir: string): GitCallbacks {
return {
addRemote: async () => {
return addGitRemote(dir);
return addGitRemote(dir, 'origin');
},
promptCredentials: async ({ url, error }) => {
const creds = await promptCredentials({ url, error });

View File

@@ -3,12 +3,12 @@ import { gitMutations } from '@yaakapp-internal/git';
import { showPromptForm } from '../../lib/prompt-form';
import { gitCallbacks } from './callbacks';
export async function addGitRemote(dir: string): Promise<GitRemote> {
export async function addGitRemote(dir: string, defaultName?: string): Promise<GitRemote> {
const r = await showPromptForm({
id: 'add-remote',
title: 'Add Remote',
inputs: [
{ type: 'text', label: 'Name', name: 'name' },
{ type: 'text', label: 'Name', name: 'name', defaultValue: defaultName },
{ type: 'text', label: 'URL', name: 'url' },
],
});