Compare commits

...

20 Commits

Author SHA1 Message Date
Gregory Schier
26aba6034f Move faker plugin back to external (plugins-external/faker) 2026-02-11 10:22:20 -08:00
Gregory Schier
9a1d613034 Fix RPM bundle path validation for metainfo file 2026-02-11 08:14:16 -08:00
Gregory Schier
3e4de7d3c4 Move build scripts to Flathub repo, keep release prep scripts here 2026-02-11 07:28:56 -08:00
Gregory Schier
b64b5ec0f8 Refresh Git dropdown data on open and fetch periodically
- Add refreshKey to useGit queries so dropdown data refreshes on open
- Convert fetchAll from mutation to query with 10-minute refetch interval
- Re-run status query after fetchAll completes via dataUpdatedAt key
- Use placeholderData to keep previous data during key changes
- Remove disabled state from Push, Pull, and Commit menu items
2026-02-11 07:20:53 -08:00
Gregory Schier
510d1c7d17 Remove Flatpak manifest (lives in Flathub repo) 2026-02-11 06:32:39 -08:00
Gregory Schier
ed13a62269 Use static desktop file and clean up manifest comments 2026-02-11 06:30:09 -08:00
Gregory Schier
935d613959 Move lockfile patch to standalone script 2026-02-10 23:35:14 -08:00
Gregory Schier
adeaaccc45 Add v2026.2.0 release to metainfo, simplify CI workflow
- Metainfo is managed upstream (updated before tagging)
- CI no longer modifies metainfo; just copies manifest and sources to Flathub
- Flathub manifest installs metainfo from git source
- Permissions reverted to read-only
2026-02-10 23:29:27 -08:00
Gregory Schier
d253093333 Revert "Simplify CI: metainfo releases only accumulate in Flathub repo"
This reverts commit f265b7a572.
2026-02-10 23:26:52 -08:00
Gregory Schier
f265b7a572 Simplify CI: metainfo releases only accumulate in Flathub repo
- Remove metainfo update from update-manifest.sh
- Remove CI step that committed metainfo back to app repo
- Revert permissions back to read-only
- CI now inserts release entry directly into Flathub repo's metainfo
2026-02-10 23:26:22 -08:00
Gregory Schier
68b2ff016f CI: rewrite metainfo paths for Flathub repo 2026-02-10 23:24:09 -08:00
Gregory Schier
a1c6295810 Clean up Flatpak manifest for v2026.2.0
- Update tag to v2026.2.0
- Use SKIP_WASM_BUILD env var instead of build-time package.json patch
- Install metainfo from git source (remove temporary type: file source)
- Fix fix-lockfile.mjs to skip workspace packages
- CI: commit metainfo releases back to app repo, bump permissions to write
2026-02-10 23:19:23 -08:00
Gregory Schier
76ee3fa61b Flatpak: build from source instead of repackaging debs (#389) 2026-02-10 23:05:33 -08:00
Gregory Schier
7fef35ce0a Ship metainfo in deb, remove from Flatpak manifest 2026-02-10 15:26:40 -08:00
Gregory Schier
654af09951 Bump GNOME runtime to 49, fix corrupted arm64 SHA256 2026-02-10 15:22:51 -08:00
Gregory Schier
484dcfade0 Add Flatpak and Flathub packaging support (#388) 2026-02-10 14:38:40 -08:00
Gregory Schier
fda18c5434 Snapshot faker template function names in test
Replace the brittle count assertion (toBe(226)) with a snapshot of all
exported function names. This catches accidental additions, removals,
or renames across faker upgrades with a clear diff.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-09 10:22:03 -08:00
Gregory Schier
a8176d6e9e Skip disabled key-value entries during request rendering
Skip disabled headers, metadata, URL parameters, and form body
entries in the render phase for HTTP, gRPC, and WebSocket requests.
Previously, disabled entries were still template-rendered even though
they were filtered out later at the use site.
2026-02-09 10:17:43 -08:00
Gregory Schier
957d8d9d46 Move faker plugin from external to bundled 2026-02-09 08:43:49 -08:00
Gregory Schier
5f18bf25e2 Replace shell-quote with shlex for curl import (#387) 2026-02-09 08:22:11 -08:00
22 changed files with 1017 additions and 148 deletions

52
.github/workflows/flathub.yml vendored Normal file
View File

@@ -0,0 +1,52 @@
name: Update Flathub
on:
release:
types: [published]
permissions:
contents: read
jobs:
update-flathub:
name: Update Flathub manifest
runs-on: ubuntu-latest
# Only run for stable releases (skip betas/pre-releases)
if: ${{ !github.event.release.prerelease }}
steps:
- name: Checkout app repo
uses: actions/checkout@v4
- name: Checkout Flathub repo
uses: actions/checkout@v4
with:
repository: flathub/app.yaak.Yaak
token: ${{ secrets.FLATHUB_TOKEN }}
path: flathub-repo
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: "22"
- name: Install source generators
run: |
pip install flatpak-node-generator tomlkit aiohttp
git clone --depth 1 https://github.com/flatpak/flatpak-builder-tools flatpak/flatpak-builder-tools
- name: Run update-manifest.sh
run: bash flatpak/update-manifest.sh "${{ github.event.release.tag_name }}" flathub-repo
- name: Commit and push to Flathub
working-directory: flathub-repo
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add -A
git diff --cached --quiet && echo "No changes to commit" && exit 0
git commit -m "Update to ${{ github.event.release.tag_name }}"
git push

7
.gitignore vendored
View File

@@ -44,3 +44,10 @@ crates-tauri/yaak-app/tauri.worktree.conf.json
# Tauri auto-generated permission files
**/permissions/autogenerated
**/permissions/schemas
# Flatpak build artifacts
flatpak-repo/
.flatpak-builder/
flatpak/flatpak-builder-tools/
flatpak/cargo-sources.json
flatpak/node-sources.json

View File

@@ -38,6 +38,9 @@ pub async fn render_grpc_request<T: TemplateCallback>(
let mut metadata = Vec::new();
for p in r.metadata.clone() {
if !p.enabled {
continue;
}
metadata.push(HttpRequestHeader {
enabled: p.enabled,
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
@@ -119,6 +122,7 @@ pub async fn render_http_request<T: TemplateCallback>(
let mut body = BTreeMap::new();
for (k, v) in r.body.clone() {
let v = if k == "form" { strip_disabled_form_entries(v) } else { v };
body.insert(k, render_json_value_raw(v, vars, cb, &opt).await?);
}
@@ -161,3 +165,71 @@ pub async fn render_http_request<T: TemplateCallback>(
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
}
/// Strip disabled entries from a JSON array of form objects.
fn strip_disabled_form_entries(v: Value) -> Value {
match v {
Value::Array(items) => Value::Array(
items
.into_iter()
.filter(|item| item.get("enabled").and_then(|e| e.as_bool()).unwrap_or(true))
.collect(),
),
v => v,
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
#[test]
fn test_strip_disabled_form_entries() {
let input = json!([
{"enabled": true, "name": "foo", "value": "bar"},
{"enabled": false, "name": "disabled", "value": "gone"},
{"enabled": true, "name": "baz", "value": "qux"},
]);
let result = strip_disabled_form_entries(input);
assert_eq!(
result,
json!([
{"enabled": true, "name": "foo", "value": "bar"},
{"enabled": true, "name": "baz", "value": "qux"},
])
);
}
#[test]
fn test_strip_disabled_form_entries_all_disabled() {
let input = json!([
{"enabled": false, "name": "a", "value": "b"},
{"enabled": false, "name": "c", "value": "d"},
]);
let result = strip_disabled_form_entries(input);
assert_eq!(result, json!([]));
}
#[test]
fn test_strip_disabled_form_entries_missing_enabled_defaults_to_kept() {
let input = json!([
{"name": "no_enabled_field", "value": "kept"},
{"enabled": false, "name": "disabled", "value": "gone"},
]);
let result = strip_disabled_form_entries(input);
assert_eq!(
result,
json!([
{"name": "no_enabled_field", "value": "kept"},
])
);
}
#[test]
fn test_strip_disabled_form_entries_non_array_passthrough() {
let input = json!("just a string");
let result = strip_disabled_form_entries(input.clone());
assert_eq!(result, input);
}
}

View File

@@ -1,9 +1,6 @@
{
"build": {
"features": [
"updater",
"license"
]
"features": ["updater", "license"]
},
"app": {
"security": {
@@ -11,12 +8,8 @@
"default",
{
"identifier": "release",
"windows": [
"*"
],
"permissions": [
"yaak-license:default"
]
"windows": ["*"],
"permissions": ["yaak-license:default"]
}
]
}
@@ -39,14 +32,7 @@
"createUpdaterArtifacts": true,
"longDescription": "A cross-platform desktop app for interacting with REST, GraphQL, and gRPC",
"shortDescription": "Play with APIs, intuitively",
"targets": [
"app",
"appimage",
"deb",
"dmg",
"nsis",
"rpm"
],
"targets": ["app", "appimage", "deb", "dmg", "nsis", "rpm"],
"macOS": {
"minimumSystemVersion": "13.0",
"exceptionDomain": "",
@@ -58,10 +44,16 @@
},
"linux": {
"deb": {
"desktopTemplate": "./template.desktop"
"desktopTemplate": "./template.desktop",
"files": {
"/usr/share/metainfo/app.yaak.Yaak.metainfo.xml": "../../flatpak/app.yaak.Yaak.metainfo.xml"
}
},
"rpm": {
"desktopTemplate": "./template.desktop"
"desktopTemplate": "./template.desktop",
"files": {
"/usr/share/metainfo/app.yaak.Yaak.metainfo.xml": "../../flatpak/app.yaak.Yaak.metainfo.xml"
}
}
}
}

View File

@@ -32,22 +32,30 @@ export interface GitCallbacks {
const onSuccess = () => queryClient.invalidateQueries({ queryKey: ['git'] });
export function useGit(dir: string, callbacks: GitCallbacks) {
export function useGit(dir: string, callbacks: GitCallbacks, refreshKey?: string) {
const mutations = useMemo(() => gitMutations(dir, callbacks), [dir, callbacks]);
const fetchAll = useQuery<void, string>({
queryKey: ['git', 'fetch_all', dir, refreshKey],
queryFn: () => invoke('cmd_git_fetch_all', { dir }),
refetchInterval: 10 * 60_000,
});
return [
{
remotes: useQuery<GitRemote[], string>({
queryKey: ['git', 'remotes', dir],
queryKey: ['git', 'remotes', dir, refreshKey],
queryFn: () => getRemotes(dir),
placeholderData: (prev) => prev,
}),
log: useQuery<GitCommit[], string>({
queryKey: ['git', 'log', dir],
queryKey: ['git', 'log', dir, refreshKey],
queryFn: () => invoke('cmd_git_log', { dir }),
placeholderData: (prev) => prev,
}),
status: useQuery<GitStatusSummary, string>({
refetchOnMount: true,
queryKey: ['git', 'status', dir],
queryKey: ['git', 'status', dir, refreshKey, fetchAll.dataUpdatedAt],
queryFn: () => invoke('cmd_git_status', { dir }),
placeholderData: (prev) => prev,
}),
},
mutations,
@@ -152,10 +160,7 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
},
onSuccess,
}),
fetchAll: createFastMutation<void, string, void>({
mutationKey: ['git', 'fetch_all', dir],
mutationFn: () => invoke('cmd_git_fetch_all', { dir }),
}),
push: createFastMutation<PushResult, string, void>({
mutationKey: ['git', 'push', dir],
mutationFn: push,

View File

@@ -0,0 +1,8 @@
const { execSync } = require('node:child_process');
if (process.env.SKIP_WASM_BUILD === '1') {
console.log('Skipping wasm-pack build (SKIP_WASM_BUILD=1)');
return;
}
execSync('wasm-pack build --target bundler', { stdio: 'inherit' });

View File

@@ -6,7 +6,7 @@
"scripts": {
"bootstrap": "npm run build",
"build": "run-s build:*",
"build:pack": "wasm-pack build --target bundler",
"build:pack": "node build-wasm.cjs",
"build:clean": "rimraf ./pkg/.gitignore"
},
"devDependencies": {

View File

@@ -16,6 +16,9 @@ pub async fn render_websocket_request<T: TemplateCallback>(
let mut url_parameters = Vec::new();
for p in r.url_parameters.clone() {
if !p.enabled {
continue;
}
url_parameters.push(HttpUrlParameter {
enabled: p.enabled,
name: parse_and_render(&p.name, vars, cb, opt).await?,
@@ -26,6 +29,9 @@ pub async fn render_websocket_request<T: TemplateCallback>(
let mut headers = Vec::new();
for p in r.headers.clone() {
if !p.enabled {
continue;
}
headers.push(HttpRequestHeader {
enabled: p.enabled,
name: parse_and_render(&p.name, vars, cb, opt).await?,

View File

@@ -0,0 +1,57 @@
<?xml version="1.0" encoding="UTF-8"?>
<component type="desktop-application">
<id>app.yaak.Yaak</id>
<name>Yaak</name>
<summary>An offline, Git friendly API Client</summary>
<developer id="app.yaak">
<name>Yaak</name>
</developer>
<metadata_license>MIT</metadata_license>
<project_license>MIT</project_license>
<url type="homepage">https://yaak.app</url>
<url type="bugtracker">https://yaak.app/feedback</url>
<url type="contact">https://yaak.app/feedback</url>
<url type="vcs-browser">https://github.com/mountain-loop/yaak</url>
<description>
<p>
A fast, privacy-first API client for REST, GraphQL, SSE, WebSocket,
and gRPC — built with Tauri, Rust, and React.
</p>
<p>Features include:</p>
<ul>
<li>REST, GraphQL, SSE, WebSocket, and gRPC support</li>
<li>Local-only data, secrets encryption, and zero telemetry</li>
<li>Git-friendly plain-text project storage</li>
<li>Environment variables and template functions</li>
<li>Request chaining and dynamic values</li>
<li>OAuth 2.0, Bearer, Basic, API Key, AWS, JWT, and NTLM authentication</li>
<li>Import from cURL, Postman, Insomnia, and OpenAPI</li>
<li>Extensible plugin system</li>
</ul>
</description>
<launchable type="desktop-id">app.yaak.Yaak.desktop</launchable>
<branding>
<color type="primary" scheme_preference="light">#8b32ff</color>
<color type="primary" scheme_preference="dark">#c293ff</color>
</branding>
<content_rating type="oars-1.1" />
<screenshots>
<screenshot type="default">
<caption>Crafting an API request</caption>

</screenshot>
</screenshots>
<releases>
<release version="2026.2.0" date="2026-02-10" />
</releases>
</component>

75
flatpak/fix-lockfile.mjs Normal file
View File

@@ -0,0 +1,75 @@
#!/usr/bin/env node
// Adds missing `resolved` and `integrity` fields to npm package-lock.json.
//
// npm sometimes omits these fields for nested dependencies inside workspace
// packages. This breaks offline installs and tools like flatpak-node-generator
// that need explicit tarball URLs for every package.
//
// Based on https://github.com/grant-dennison/npm-package-lock-add-resolved
// (MIT License, Copyright (c) 2024 Grant Dennison)
import { readFile, writeFile } from "node:fs/promises";
import { get } from "node:https";
const lockfilePath = process.argv[2] || "package-lock.json";
function fetchJson(url) {
return new Promise((resolve, reject) => {
get(url, (res) => {
let data = "";
res.on("data", (chunk) => {
data += chunk;
});
res.on("end", () => {
if (res.statusCode === 200) {
resolve(JSON.parse(data));
} else {
reject(`${url} returned ${res.statusCode} ${res.statusMessage}`);
}
});
res.on("error", reject);
}).on("error", reject);
});
}
async function fillResolved(name, p) {
const version = p.version.replace(/^.*@/, "");
console.log(`Retrieving metadata for ${name}@${version}`);
const metadataUrl = `https://registry.npmjs.com/${name}/${version}`;
const metadata = await fetchJson(metadataUrl);
p.resolved = metadata.dist.tarball;
p.integrity = metadata.dist.integrity;
}
let changesMade = false;
async function fillAllResolved(packages) {
for (const packagePath in packages) {
if (packagePath === "") continue;
if (!packagePath.includes("node_modules/")) continue;
const p = packages[packagePath];
if (p.link) continue;
if (!p.inBundle && !p.bundled && (!p.resolved || !p.integrity)) {
const packageName =
p.name ||
/^npm:(.+?)@.+$/.exec(p.version)?.[1] ||
packagePath.replace(/^.*node_modules\/(?=.+?$)/, "");
await fillResolved(packageName, p);
changesMade = true;
}
}
}
const oldContents = await readFile(lockfilePath, "utf-8");
const packageLock = JSON.parse(oldContents);
await fillAllResolved(packageLock.packages ?? []);
if (changesMade) {
const newContents = JSON.stringify(packageLock, null, 2) + "\n";
await writeFile(lockfilePath, newContents);
console.log(`Updated ${lockfilePath}`);
} else {
console.log("No changes needed.");
}

48
flatpak/generate-sources.sh Executable file
View File

@@ -0,0 +1,48 @@
#!/usr/bin/env bash
#
# Generate offline dependency source files for Flatpak builds.
#
# Prerequisites:
# pip install flatpak-node-generator tomlkit aiohttp
# Clone https://github.com/flatpak/flatpak-builder-tools (for cargo generator)
#
# Usage:
# ./flatpak/generate-sources.sh <flathub-repo-path>
# ./flatpak/generate-sources.sh ../flathub-repo
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
if [ $# -lt 1 ]; then
echo "Usage: $0 <flathub-repo-path>"
echo "Example: $0 ../flathub-repo"
exit 1
fi
FLATHUB_REPO="$(cd "$1" && pwd)"
python3 "$SCRIPT_DIR/flatpak-builder-tools/cargo/flatpak-cargo-generator.py" \
-o "$FLATHUB_REPO/cargo-sources.json" "$REPO_ROOT/Cargo.lock"
TMPDIR=$(mktemp -d)
trap 'rm -rf "$TMPDIR"' EXIT
cp "$REPO_ROOT/package-lock.json" "$TMPDIR/package-lock.json"
cp "$REPO_ROOT/package.json" "$TMPDIR/package.json"
node "$SCRIPT_DIR/fix-lockfile.mjs" "$TMPDIR/package-lock.json"
node -e "
const fs = require('fs');
const p = process.argv[1];
const d = JSON.parse(fs.readFileSync(p, 'utf-8'));
for (const [name, info] of Object.entries(d.packages || {})) {
if (name && (info.link || !info.resolved)) delete d.packages[name];
}
fs.writeFileSync(p, JSON.stringify(d, null, 2));
" "$TMPDIR/package-lock.json"
flatpak-node-generator --no-requests-cache \
-o "$FLATHUB_REPO/node-sources.json" npm "$TMPDIR/package-lock.json"

86
flatpak/update-manifest.sh Executable file
View File

@@ -0,0 +1,86 @@
#!/usr/bin/env bash
#
# Update the Flathub repo for a new release.
#
# Usage:
# ./flatpak/update-manifest.sh <version-tag> <flathub-repo-path>
# ./flatpak/update-manifest.sh v2026.2.0 ../flathub-repo
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
if [ $# -lt 2 ]; then
echo "Usage: $0 <version-tag> <flathub-repo-path>"
echo "Example: $0 v2026.2.0 ../flathub-repo"
exit 1
fi
VERSION_TAG="$1"
VERSION="${VERSION_TAG#v}"
FLATHUB_REPO="$(cd "$2" && pwd)"
MANIFEST="$FLATHUB_REPO/app.yaak.Yaak.yml"
METAINFO="$SCRIPT_DIR/app.yaak.Yaak.metainfo.xml"
if [[ "$VERSION" == *-* ]]; then
echo "Skipping pre-release version '$VERSION_TAG' (only stable releases are published to Flathub)"
exit 0
fi
REPO="mountain-loop/yaak"
COMMIT=$(git ls-remote "https://github.com/$REPO.git" "refs/tags/$VERSION_TAG" | cut -f1)
if [ -z "$COMMIT" ]; then
echo "Error: Could not resolve commit for tag $VERSION_TAG"
exit 1
fi
echo "Tag: $VERSION_TAG"
echo "Commit: $COMMIT"
# Update git tag and commit in the manifest
sed -i "s|tag: v.*|tag: $VERSION_TAG|" "$MANIFEST"
sed -i "s|commit: .*|commit: $COMMIT|" "$MANIFEST"
echo "Updated manifest tag and commit."
# Regenerate offline dependency sources from the tagged lockfiles
TMPDIR=$(mktemp -d)
trap 'rm -rf "$TMPDIR"' EXIT
echo "Fetching lockfiles from $VERSION_TAG..."
curl -fsSL "https://raw.githubusercontent.com/$REPO/$VERSION_TAG/Cargo.lock" -o "$TMPDIR/Cargo.lock"
curl -fsSL "https://raw.githubusercontent.com/$REPO/$VERSION_TAG/package-lock.json" -o "$TMPDIR/package-lock.json"
curl -fsSL "https://raw.githubusercontent.com/$REPO/$VERSION_TAG/package.json" -o "$TMPDIR/package.json"
echo "Generating cargo-sources.json..."
python3 "$SCRIPT_DIR/flatpak-builder-tools/cargo/flatpak-cargo-generator.py" \
-o "$FLATHUB_REPO/cargo-sources.json" "$TMPDIR/Cargo.lock"
echo "Generating node-sources.json..."
node "$SCRIPT_DIR/fix-lockfile.mjs" "$TMPDIR/package-lock.json"
node -e "
const fs = require('fs');
const p = process.argv[1];
const d = JSON.parse(fs.readFileSync(p, 'utf-8'));
for (const [name, info] of Object.entries(d.packages || {})) {
if (name && (info.link || !info.resolved)) delete d.packages[name];
}
fs.writeFileSync(p, JSON.stringify(d, null, 2));
" "$TMPDIR/package-lock.json"
flatpak-node-generator --no-requests-cache \
-o "$FLATHUB_REPO/node-sources.json" npm "$TMPDIR/package-lock.json"
# Update metainfo with new release
TODAY=$(date +%Y-%m-%d)
sed -i "s| <releases>| <releases>\n <release version=\"$VERSION\" date=\"$TODAY\" />|" "$METAINFO"
echo "Updated metainfo with release $VERSION."
echo ""
echo "Done! Review the changes:"
echo " $MANIFEST"
echo " $METAINFO"
echo " $FLATHUB_REPO/cargo-sources.json"
echo " $FLATHUB_REPO/node-sources.json"

56
package-lock.json generated
View File

@@ -12,7 +12,7 @@
"packages/plugin-runtime",
"packages/plugin-runtime-types",
"plugins-external/mcp-server",
"plugins-external/template-function-faker",
"plugins-external/faker",
"plugins-external/httpsnippet",
"plugins/action-copy-curl",
"plugins/action-copy-grpcurl",
@@ -3922,13 +3922,6 @@
"@types/react": "*"
}
},
"node_modules/@types/shell-quote": {
"version": "1.7.5",
"resolved": "https://registry.npmjs.org/@types/shell-quote/-/shell-quote-1.7.5.tgz",
"integrity": "sha512-+UE8GAGRPbJVQDdxi16dgadcBfQ+KG2vgZhV1+3A1XmHbmwcdwhCUwIdy+d3pAGrbvgRoVSjeI9vOWyq376Yzw==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/unist": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
@@ -4160,6 +4153,10 @@
"resolved": "plugins/auth-oauth2",
"link": true
},
"node_modules/@yaak/faker": {
"resolved": "plugins-external/faker",
"link": true
},
"node_modules/@yaak/filter-jsonpath": {
"resolved": "plugins/filter-jsonpath",
"link": true
@@ -13405,6 +13402,7 @@
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz",
"integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -13413,6 +13411,12 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/shlex": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/shlex/-/shlex-3.0.0.tgz",
"integrity": "sha512-jHPXQQk9d/QXCvJuLPYMOYWez3c43sORAgcIEoV7bFv5AJSJRAOyw5lQO12PMfd385qiLRCaDt7OtEzgrIGZUA==",
"license": "MIT"
},
"node_modules/should": {
"version": "13.2.3",
"resolved": "https://registry.npmjs.org/should/-/should-13.2.3.tgz",
@@ -15953,9 +15957,36 @@
"undici-types": "~7.16.0"
}
},
"plugins-external/faker": {
"name": "@yaak/faker",
"version": "1.1.1",
"dependencies": {
"@faker-js/faker": "^10.1.0"
},
"devDependencies": {
"@types/node": "^25.0.3",
"typescript": "^5.9.3"
}
},
"plugins-external/faker/node_modules/@faker-js/faker": {
"version": "10.3.0",
"resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-10.3.0.tgz",
"integrity": "sha512-It0Sne6P3szg7JIi6CgKbvTZoMjxBZhcv91ZrqrNuaZQfB5WoqYYbzCUOq89YR+VY8juY9M1vDWmDDa2TzfXCw==",
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/fakerjs"
}
],
"license": "MIT",
"engines": {
"node": "^20.19.0 || ^22.13.0 || ^23.5.0 || >=24.0.0",
"npm": ">=10"
}
},
"plugins-external/httpsnippet": {
"name": "@yaak/httpsnippet",
"version": "1.0.0",
"version": "1.0.3",
"dependencies": {
"@readme/httpsnippet": "^11.0.0"
},
@@ -15983,7 +16014,7 @@
},
"plugins-external/mcp-server": {
"name": "@yaak/mcp-server",
"version": "0.1.7",
"version": "0.2.1",
"dependencies": {
"@hono/mcp": "^0.2.3",
"@hono/node-server": "^1.19.7",
@@ -16080,10 +16111,7 @@
"name": "@yaak/importer-curl",
"version": "0.1.0",
"dependencies": {
"shell-quote": "^1.8.1"
},
"devDependencies": {
"@types/shell-quote": "^1.7.5"
"shlex": "^3.0.0"
}
},
"plugins/importer-insomnia": {

View File

@@ -11,7 +11,7 @@
"packages/plugin-runtime",
"packages/plugin-runtime-types",
"plugins-external/mcp-server",
"plugins-external/template-function-faker",
"plugins-external/faker",
"plugins-external/httpsnippet",
"plugins/action-copy-curl",
"plugins/action-copy-grpcurl",

View File

@@ -0,0 +1,233 @@
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
exports[`template-function-faker > exports all expected template functions 1`] = `
[
"faker.airline.aircraftType",
"faker.airline.airline",
"faker.airline.airplane",
"faker.airline.airport",
"faker.airline.flightNumber",
"faker.airline.recordLocator",
"faker.airline.seat",
"faker.animal.bear",
"faker.animal.bird",
"faker.animal.cat",
"faker.animal.cetacean",
"faker.animal.cow",
"faker.animal.crocodilia",
"faker.animal.dog",
"faker.animal.fish",
"faker.animal.horse",
"faker.animal.insect",
"faker.animal.lion",
"faker.animal.petName",
"faker.animal.rabbit",
"faker.animal.rodent",
"faker.animal.snake",
"faker.animal.type",
"faker.color.cmyk",
"faker.color.colorByCSSColorSpace",
"faker.color.cssSupportedFunction",
"faker.color.cssSupportedSpace",
"faker.color.hsl",
"faker.color.human",
"faker.color.hwb",
"faker.color.lab",
"faker.color.lch",
"faker.color.rgb",
"faker.color.space",
"faker.commerce.department",
"faker.commerce.isbn",
"faker.commerce.price",
"faker.commerce.product",
"faker.commerce.productAdjective",
"faker.commerce.productDescription",
"faker.commerce.productMaterial",
"faker.commerce.productName",
"faker.commerce.upc",
"faker.company.buzzAdjective",
"faker.company.buzzNoun",
"faker.company.buzzPhrase",
"faker.company.buzzVerb",
"faker.company.catchPhrase",
"faker.company.catchPhraseAdjective",
"faker.company.catchPhraseDescriptor",
"faker.company.catchPhraseNoun",
"faker.company.name",
"faker.database.collation",
"faker.database.column",
"faker.database.engine",
"faker.database.mongodbObjectId",
"faker.database.type",
"faker.date.anytime",
"faker.date.between",
"faker.date.betweens",
"faker.date.birthdate",
"faker.date.future",
"faker.date.month",
"faker.date.past",
"faker.date.recent",
"faker.date.soon",
"faker.date.timeZone",
"faker.date.weekday",
"faker.finance.accountName",
"faker.finance.accountNumber",
"faker.finance.amount",
"faker.finance.bic",
"faker.finance.bitcoinAddress",
"faker.finance.creditCardCVV",
"faker.finance.creditCardIssuer",
"faker.finance.creditCardNumber",
"faker.finance.currency",
"faker.finance.currencyCode",
"faker.finance.currencyName",
"faker.finance.currencyNumericCode",
"faker.finance.currencySymbol",
"faker.finance.ethereumAddress",
"faker.finance.iban",
"faker.finance.litecoinAddress",
"faker.finance.pin",
"faker.finance.routingNumber",
"faker.finance.transactionDescription",
"faker.finance.transactionType",
"faker.git.branch",
"faker.git.commitDate",
"faker.git.commitEntry",
"faker.git.commitMessage",
"faker.git.commitSha",
"faker.hacker.abbreviation",
"faker.hacker.adjective",
"faker.hacker.ingverb",
"faker.hacker.noun",
"faker.hacker.phrase",
"faker.hacker.verb",
"faker.image.avatar",
"faker.image.avatarGitHub",
"faker.image.dataUri",
"faker.image.personPortrait",
"faker.image.url",
"faker.image.urlLoremFlickr",
"faker.image.urlPicsumPhotos",
"faker.internet.displayName",
"faker.internet.domainName",
"faker.internet.domainSuffix",
"faker.internet.domainWord",
"faker.internet.email",
"faker.internet.emoji",
"faker.internet.exampleEmail",
"faker.internet.httpMethod",
"faker.internet.httpStatusCode",
"faker.internet.ip",
"faker.internet.ipv4",
"faker.internet.ipv6",
"faker.internet.jwt",
"faker.internet.jwtAlgorithm",
"faker.internet.mac",
"faker.internet.password",
"faker.internet.port",
"faker.internet.protocol",
"faker.internet.url",
"faker.internet.userAgent",
"faker.internet.username",
"faker.location.buildingNumber",
"faker.location.cardinalDirection",
"faker.location.city",
"faker.location.continent",
"faker.location.country",
"faker.location.countryCode",
"faker.location.county",
"faker.location.direction",
"faker.location.language",
"faker.location.latitude",
"faker.location.longitude",
"faker.location.nearbyGPSCoordinate",
"faker.location.ordinalDirection",
"faker.location.secondaryAddress",
"faker.location.state",
"faker.location.street",
"faker.location.streetAddress",
"faker.location.timeZone",
"faker.location.zipCode",
"faker.lorem.lines",
"faker.lorem.paragraph",
"faker.lorem.paragraphs",
"faker.lorem.sentence",
"faker.lorem.sentences",
"faker.lorem.slug",
"faker.lorem.text",
"faker.lorem.word",
"faker.lorem.words",
"faker.music.album",
"faker.music.artist",
"faker.music.genre",
"faker.music.songName",
"faker.number.bigInt",
"faker.number.binary",
"faker.number.float",
"faker.number.hex",
"faker.number.int",
"faker.number.octal",
"faker.number.romanNumeral",
"faker.person.bio",
"faker.person.firstName",
"faker.person.fullName",
"faker.person.gender",
"faker.person.jobArea",
"faker.person.jobDescriptor",
"faker.person.jobTitle",
"faker.person.jobType",
"faker.person.lastName",
"faker.person.middleName",
"faker.person.prefix",
"faker.person.sex",
"faker.person.sexType",
"faker.person.suffix",
"faker.person.zodiacSign",
"faker.phone.imei",
"faker.phone.number",
"faker.science.chemicalElement",
"faker.science.unit",
"faker.string.alpha",
"faker.string.alphanumeric",
"faker.string.binary",
"faker.string.fromCharacters",
"faker.string.hexadecimal",
"faker.string.nanoid",
"faker.string.numeric",
"faker.string.octal",
"faker.string.sample",
"faker.string.symbol",
"faker.string.ulid",
"faker.string.uuid",
"faker.system.commonFileExt",
"faker.system.commonFileName",
"faker.system.commonFileType",
"faker.system.cron",
"faker.system.directoryPath",
"faker.system.fileExt",
"faker.system.fileName",
"faker.system.filePath",
"faker.system.fileType",
"faker.system.mimeType",
"faker.system.networkInterface",
"faker.system.semver",
"faker.vehicle.bicycle",
"faker.vehicle.color",
"faker.vehicle.fuel",
"faker.vehicle.manufacturer",
"faker.vehicle.model",
"faker.vehicle.type",
"faker.vehicle.vehicle",
"faker.vehicle.vin",
"faker.vehicle.vrm",
"faker.word.adjective",
"faker.word.adverb",
"faker.word.conjunction",
"faker.word.interjection",
"faker.word.noun",
"faker.word.preposition",
"faker.word.sample",
"faker.word.verb",
"faker.word.words",
]
`;

View File

@@ -1,9 +1,12 @@
import { describe, expect, it } from 'vitest';
describe('formatDatetime', () => {
it('returns formatted current date', async () => {
// Ensure the plugin imports properly
const faker = await import('../src/index');
expect(faker.plugin.templateFunctions?.length).toBe(226);
describe('template-function-faker', () => {
it('exports all expected template functions', async () => {
const { plugin } = await import('../src/index');
const names = plugin.templateFunctions?.map((fn) => fn.name).sort() ?? [];
// Snapshot the full list of exported function names so we catch any
// accidental additions, removals, or renames across faker upgrades.
expect(names).toMatchSnapshot();
});
});

View File

@@ -10,9 +10,6 @@
"test": "vitest --run tests"
},
"dependencies": {
"shell-quote": "^1.8.1"
},
"devDependencies": {
"@types/shell-quote": "^1.7.5"
"shlex": "^3.0.0"
}
}

View File

@@ -7,8 +7,7 @@ import type {
PluginDefinition,
Workspace,
} from '@yaakapp/api';
import type { ControlOperator, ParseEntry } from 'shell-quote';
import { parse } from 'shell-quote';
import { split } from 'shlex';
type AtLeast<T, K extends keyof T> = Partial<T> & Pick<T, K>;
@@ -56,31 +55,89 @@ export const plugin: PluginDefinition = {
};
/**
* Decodes escape sequences in shell $'...' strings
* Handles Unicode escape sequences (\uXXXX) and common escape codes
* Splits raw input into individual shell command strings.
* Handles line continuations, semicolons, and newline-separated curl commands.
*/
function decodeShellString(str: string): string {
return str
.replace(/\\u([0-9a-fA-F]{4})/g, (_, hex) => String.fromCharCode(parseInt(hex, 16)))
.replace(/\\x([0-9a-fA-F]{2})/g, (_, hex) => String.fromCharCode(parseInt(hex, 16)))
.replace(/\\n/g, '\n')
.replace(/\\r/g, '\r')
.replace(/\\t/g, '\t')
.replace(/\\'/g, "'")
.replace(/\\"/g, '"')
.replace(/\\\\/g, '\\');
}
function splitCommands(rawData: string): string[] {
// Join line continuations (backslash-newline, and backslash-CRLF for Windows)
const joined = rawData.replace(/\\\r?\n/g, ' ');
/**
* Checks if a string might contain escape sequences that need decoding
* If so, decodes them; otherwise returns the string as-is
*/
function maybeDecodeEscapeSequences(str: string): string {
// Check if the string contains escape sequences that shell-quote might not handle
if (str.includes('\\u') || str.includes('\\x')) {
return decodeShellString(str);
// Count consecutive backslashes immediately before position i.
// An even count means the quote at i is NOT escaped; odd means it IS escaped.
function isEscaped(i: number): boolean {
let backslashes = 0;
let j = i - 1;
while (j >= 0 && joined[j] === '\\') {
backslashes++;
j--;
}
return backslashes % 2 !== 0;
}
return str;
// Split on semicolons and newlines to separate commands
const commands: string[] = [];
let current = '';
let inSingleQuote = false;
let inDoubleQuote = false;
let inDollarQuote = false;
for (let i = 0; i < joined.length; i++) {
const ch = joined[i]!;
const next = joined[i + 1];
// Track quoting state to avoid splitting inside quoted strings
if (!inDoubleQuote && !inDollarQuote && ch === "'" && !inSingleQuote) {
inSingleQuote = true;
current += ch;
continue;
}
if (inSingleQuote && ch === "'") {
inSingleQuote = false;
current += ch;
continue;
}
if (!inSingleQuote && !inDollarQuote && ch === '"' && !inDoubleQuote) {
inDoubleQuote = true;
current += ch;
continue;
}
if (inDoubleQuote && ch === '"' && !isEscaped(i)) {
inDoubleQuote = false;
current += ch;
continue;
}
if (!inSingleQuote && !inDoubleQuote && !inDollarQuote && ch === '$' && next === "'") {
inDollarQuote = true;
current += ch + next;
i++; // Skip the opening quote
continue;
}
if (inDollarQuote && ch === "'" && !isEscaped(i)) {
inDollarQuote = false;
current += ch;
continue;
}
const inQuote = inSingleQuote || inDoubleQuote || inDollarQuote;
// Split on ;, newline, or CRLF when not inside quotes and not escaped
if (!inQuote && !isEscaped(i) && (ch === ';' || ch === '\n' || (ch === '\r' && next === '\n'))) {
if (ch === '\r') i++; // Skip the \n in \r\n
if (current.trim()) {
commands.push(current.trim());
}
current = '';
continue;
}
current += ch;
}
if (current.trim()) {
commands.push(current.trim());
}
return commands;
}
export function convertCurl(rawData: string) {
@@ -88,68 +145,17 @@ export function convertCurl(rawData: string) {
return null;
}
const commands: ParseEntry[][] = [];
const commands: string[][] = splitCommands(rawData).map((cmd) => {
const tokens = split(cmd);
// Replace non-escaped newlines with semicolons to make parsing easier
// NOTE: This is really slow in debug build but fast in release mode
const normalizedData = rawData.replace(/\ncurl/g, '; curl');
let currentCommand: ParseEntry[] = [];
const parsed = parse(normalizedData);
// Break up `-XPOST` into `-X POST`
const normalizedParseEntries = parsed.flatMap((entry) => {
if (
typeof entry === 'string' &&
entry.startsWith('-') &&
!entry.startsWith('--') &&
entry.length > 2
) {
return [entry.slice(0, 2), entry.slice(2)];
}
return entry;
});
for (const parseEntry of normalizedParseEntries) {
if (typeof parseEntry === 'string') {
if (parseEntry.startsWith('$')) {
// Handle $'...' strings from shell-quote - decode escape sequences
currentCommand.push(decodeShellString(parseEntry.slice(1)));
} else {
// Decode escape sequences that shell-quote might not handle
currentCommand.push(maybeDecodeEscapeSequences(parseEntry));
// Break up squished arguments like `-XPOST` into `-X POST`
return tokens.flatMap((token) => {
if (token.startsWith('-') && !token.startsWith('--') && token.length > 2) {
return [token.slice(0, 2), token.slice(2)];
}
continue;
}
if ('comment' in parseEntry) {
continue;
}
const { op } = parseEntry as { op: 'glob'; pattern: string } | { op: ControlOperator };
// `;` separates commands
if (op === ';') {
commands.push(currentCommand);
currentCommand = [];
continue;
}
if (op?.startsWith('$')) {
// Handle the case where literal like -H $'Header: \'Some Quoted Thing\''
const str = decodeShellString(op.slice(2, op.length - 1));
currentCommand.push(str);
continue;
}
if (op === 'glob') {
currentCommand.push((parseEntry as { op: 'glob'; pattern: string }).pattern);
}
}
commands.push(currentCommand);
return token;
});
});
const workspace: ExportResources['workspaces'][0] = {
model: 'workspace',
@@ -169,12 +175,12 @@ export function convertCurl(rawData: string) {
};
}
function importCommand(parseEntries: ParseEntry[], workspaceId: string) {
function importCommand(parseEntries: string[], workspaceId: string) {
// ~~~~~~~~~~~~~~~~~~~~~ //
// Collect all the flags //
// ~~~~~~~~~~~~~~~~~~~~~ //
const flagsByName: FlagsByName = {};
const singletons: ParseEntry[] = [];
const singletons: string[] = [];
// Start at 1 so we can skip the ^curl part
for (let i = 1; i < parseEntries.length; i++) {

View File

@@ -112,9 +112,28 @@ describe('importer-curl', () => {
});
});
test('Imports with Windows CRLF line endings', () => {
expect(
convertCurl('curl \\\r\n -X POST \\\r\n https://yaak.app'),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({ url: 'https://yaak.app', method: 'POST' }),
],
},
});
});
test('Throws on malformed quotes', () => {
expect(() =>
convertCurl('curl -X POST -F "a=aaa" -F b=bbb" https://yaak.app'),
).toThrow();
});
test('Imports form data', () => {
expect(
convertCurl('curl -X POST -F "a=aaa" -F b=bbb" -F f=@filepath https://yaak.app'),
convertCurl('curl -X POST -F "a=aaa" -F b=bbb -F f=@filepath https://yaak.app'),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
@@ -476,6 +495,130 @@ describe('importer-curl', () => {
});
});
test('Imports JSON body with newlines in $quotes', () => {
expect(
convertCurl(
`curl 'https://yaak.app' -H 'Content-Type: application/json' --data-raw $'{\\n "foo": "bar",\\n "baz": "qux"\\n}' -X POST`,
),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
method: 'POST',
headers: [{ name: 'Content-Type', value: 'application/json', enabled: true }],
bodyType: 'application/json',
body: { text: '{\n "foo": "bar",\n "baz": "qux"\n}' },
}),
],
},
});
});
test('Handles double-quoted string ending with even backslashes before semicolon', () => {
// "C:\\" has two backslashes which escape each other, so the closing " is real.
// The ; after should split into a second command.
expect(
convertCurl(
'curl -d "C:\\\\" https://yaak.app;curl https://example.com',
),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
method: 'POST',
bodyType: 'application/x-www-form-urlencoded',
body: {
form: [{ name: 'C:\\', value: '', enabled: true }],
},
headers: [
{
name: 'Content-Type',
value: 'application/x-www-form-urlencoded',
enabled: true,
},
],
}),
baseRequest({ url: 'https://example.com' }),
],
},
});
});
test('Handles $quoted string ending with a literal backslash before semicolon', () => {
// $'C:\\\\' has two backslashes which become one literal backslash.
// The closing ' must not be misinterpreted as escaped.
// The ; after should split into a second command.
expect(
convertCurl(
"curl -d $'C:\\\\' https://yaak.app;curl https://example.com",
),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
method: 'POST',
bodyType: 'application/x-www-form-urlencoded',
body: {
form: [{ name: 'C:\\', value: '', enabled: true }],
},
headers: [
{
name: 'Content-Type',
value: 'application/x-www-form-urlencoded',
enabled: true,
},
],
}),
baseRequest({ url: 'https://example.com' }),
],
},
});
});
test('Imports $quoted header with escaped single quotes', () => {
expect(
convertCurl(
`curl https://yaak.app -H $'X-Custom: it\\'s a test'`,
),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
headers: [{ name: 'X-Custom', value: "it's a test", enabled: true }],
}),
],
},
});
});
test('Does not split on escaped semicolon outside quotes', () => {
// In shell, \; is a literal semicolon and should not split commands.
// This should be treated as a single curl command with the URL "https://yaak.app?a=1;b=2"
expect(
convertCurl('curl https://yaak.app?a=1\\;b=2'),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
urlParameters: [
{ name: 'a', value: '1;b=2', enabled: true },
],
}),
],
},
});
});
test('Imports multipart form data with text-only fields from --data-raw', () => {
const curlCommand = `curl 'http://example.com/api' \
-H 'Content-Type: multipart/form-data; boundary=----FormBoundary123' \

View File

@@ -1,4 +1,6 @@
const path = require('node:path');
const crypto = require('node:crypto');
const fs = require('node:fs');
const decompress = require('decompress');
const Downloader = require('nodejs-file-downloader');
const { rmSync, cpSync, mkdirSync, existsSync } = require('node:fs');
@@ -41,6 +43,15 @@ const DST_BIN_MAP = {
[WIN_ARM]: 'yaaknode.exe',
};
const SHA256_MAP = {
[MAC_ARM]: 'b05aa3a66efe680023f930bd5af3fdbbd542794da5644ca2ad711d68cbd4dc35',
[MAC_X64]: '096081b6d6fcdd3f5ba0f5f1d44a47e83037ad2e78eada26671c252fe64dd111',
[LNX_ARM]: '0dc93ec5c798b0d347f068db6d205d03dea9a71765e6a53922b682b91265d71f',
[LNX_X64]: '58a5ff5cc8f2200e458bea22e329d5c1994aa1b111d499ca46ec2411d58239ca',
[WIN_X64]: '5355ae6d7c49eddcfde7d34ac3486820600a831bf81dc3bdca5c8db6a9bb0e76',
[WIN_ARM]: 'ce9ee4e547ebdff355beb48e309b166c24df6be0291c9eaf103ce15f3de9e5b4',
};
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
const destDir = path.join(__dirname, `..`, 'crates-tauri', 'yaak-app', 'vendored', 'node');
@@ -68,6 +79,15 @@ rmSync(tmpDir, { recursive: true, force: true });
timeout: 1000 * 60 * 2,
}).download();
// Verify SHA256
const expectedHash = SHA256_MAP[key];
const fileBuffer = fs.readFileSync(filePath);
const actualHash = crypto.createHash('sha256').update(fileBuffer).digest('hex');
if (actualHash !== expectedHash) {
throw new Error(`SHA256 mismatch for ${path.basename(filePath)}\n expected: ${expectedHash}\n actual: ${actualHash}`);
}
console.log('SHA256 verified:', actualHash);
// Decompress to the same directory
await decompress(filePath, tmpDir, {});

View File

@@ -1,3 +1,5 @@
const crypto = require('node:crypto');
const fs = require('node:fs');
const decompress = require('decompress');
const Downloader = require('nodejs-file-downloader');
const path = require('node:path');
@@ -41,6 +43,15 @@ const DST_BIN_MAP = {
[WIN_ARM]: 'yaakprotoc.exe',
};
const SHA256_MAP = {
[MAC_ARM]: 'db7e66ff7f9080614d0f5505a6b0ac488cf89a15621b6a361672d1332ec2e14e',
[MAC_X64]: 'e20b5f930e886da85e7402776a4959efb1ed60c57e72794bcade765e67abaa82',
[LNX_ARM]: '6018147740548e0e0f764408c87f4cd040e6e1c1203e13aeacaf811892b604f3',
[LNX_X64]: 'f3340e28a83d1c637d8bafdeed92b9f7db6a384c26bca880a6e5217b40a4328b',
[WIN_X64]: 'd7a207fb6eec0e4b1b6613be3b7d11905375b6fd1147a071116eb8e9f24ac53b',
[WIN_ARM]: 'd7a207fb6eec0e4b1b6613be3b7d11905375b6fd1147a071116eb8e9f24ac53b',
};
const dstDir = path.join(__dirname, `..`, 'crates-tauri', 'yaak-app', 'vendored', 'protoc');
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
console.log(`Vendoring protoc ${VERSION} for ${key}`);
@@ -63,6 +74,15 @@ mkdirSync(dstDir, { recursive: true });
// Download GitHub release artifact
const { filePath } = await new Downloader({ url, directory: tmpDir }).download();
// Verify SHA256
const expectedHash = SHA256_MAP[key];
const fileBuffer = fs.readFileSync(filePath);
const actualHash = crypto.createHash('sha256').update(fileBuffer).digest('hex');
if (actualHash !== expectedHash) {
throw new Error(`SHA256 mismatch for ${path.basename(filePath)}\n expected: ${expectedHash}\n actual: ${actualHash}`);
}
console.log('SHA256 verified:', actualHash);
// Decompress to the same directory
await decompress(filePath, tmpDir, {});

View File

@@ -7,6 +7,7 @@ import { forwardRef } from 'react';
import { openWorkspaceSettings } from '../../commands/openWorkspaceSettings';
import { activeWorkspaceAtom, activeWorkspaceMetaAtom } from '../../hooks/useActiveWorkspace';
import { useKeyValue } from '../../hooks/useKeyValue';
import { useRandomKey } from '../../hooks/useRandomKey';
import { sync } from '../../init/sync';
import { showConfirm, showConfirmDelete } from '../../lib/confirm';
import { showDialog } from '../../lib/dialog';
@@ -36,6 +37,7 @@ export function GitDropdown() {
function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
const workspace = useAtomValue(activeWorkspaceAtom);
const [refreshKey, regenerateKey] = useRandomKey();
const [
{ status, log },
{
@@ -43,7 +45,6 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
deleteBranch,
deleteRemoteBranch,
renameBranch,
fetchAll,
mergeBranch,
push,
pull,
@@ -51,7 +52,7 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
resetChanges,
init,
},
] = useGit(syncDir, gitCallbacks(syncDir));
] = useGit(syncDir, gitCallbacks(syncDir), refreshKey);
const localBranches = status.data?.localBranches ?? [];
const remoteBranches = status.data?.remoteBranches ?? [];
@@ -172,7 +173,7 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
{ type: 'separator' },
{
label: 'Push',
disabled: !hasRemotes || ahead === 0,
hidden: !hasRemotes,
leftSlot: <Icon icon="arrow_up_from_line" />,
waitForOnSelect: true,
async onSelect() {
@@ -191,7 +192,7 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
},
{
label: 'Pull',
disabled: !hasRemotes || behind === 0,
hidden: !hasRemotes,
leftSlot: <Icon icon="arrow_down_to_line" />,
waitForOnSelect: true,
async onSelect() {
@@ -210,7 +211,7 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
},
{
label: 'Commit...',
disabled: !hasChanges,
leftSlot: <Icon icon="git_commit_vertical" />,
onSelect() {
showDialog({
@@ -502,15 +503,25 @@ function SyncDropdownWithSyncDir({ syncDir }: { syncDir: string }) {
];
return (
<Dropdown fullWidth items={items} onOpen={fetchAll.mutate}>
<Dropdown fullWidth items={items} onOpen={regenerateKey}>
<GitMenuButton>
<InlineCode className="flex items-center gap-1">
<Icon icon="git_branch" size="xs" className="opacity-50" />
{currentBranch}
</InlineCode>
<div className="flex items-center gap-1.5">
{ahead > 0 && <span className="text-xs flex items-center gap-0.5"><span className="text-primary"></span>{ahead}</span>}
{behind > 0 && <span className="text-xs flex items-center gap-0.5"><span className="text-info"></span>{behind}</span>}
{ahead > 0 && (
<span className="text-xs flex items-center gap-0.5">
<span className="text-primary"></span>
{ahead}
</span>
)}
{behind > 0 && (
<span className="text-xs flex items-center gap-0.5">
<span className="text-info"></span>
{behind}
</span>
)}
</div>
</GitMenuButton>
</Dropdown>