Compare commits

..

103 Commits

Author SHA1 Message Date
Gregory Schier
bdf89ac288 Fix platform check 2023-03-14 00:15:01 -07:00
Gregory Schier
debd3c8185 Some small changes 2023-03-14 00:08:03 -07:00
Gregory Schier
f81a3ae8e7 Move stuff around 2023-03-13 23:30:14 -07:00
Gregory Schier
7d4e9894c3 Refactor hooks to be easier to use 2023-03-13 23:25:41 -07:00
Gregory Schier
4bf22d8a60 Fix header editor and scroll in general 2023-03-13 19:37:36 -07:00
Gregory Schier
8be4971a23 Lazy load routes 2023-03-13 13:56:13 -07:00
Gregory Schier
359e916b73 Back to React 2023-03-13 09:50:49 -07:00
Gregory Schier
68058f3e41 Move some stuff around 2023-03-13 09:24:38 -07:00
Gregory Schier
0c6fa3e634 Fix URL bar 2023-03-13 00:13:25 -07:00
Gregory Schier
0fa25c6335 Fix ButtonLink and edit request names 2023-03-13 00:11:23 -07:00
Gregory Schier
5684479f1d Remove old rust cache action 2023-03-12 22:48:43 -07:00
Gregory Schier
2d1603601c Better rust cache 2023-03-12 22:47:43 -07:00
Gregory Schier
f5394b2210 Start GraphQL support 2023-03-12 22:43:25 -07:00
Gregory Schier
833db5df06 Fix artifact tag 2023-03-12 21:41:15 -07:00
Gregory Schier
525ac7e980 Remove wasm stuff 2023-03-12 21:25:31 -07:00
Gregory Schier
44a747c80a Use tauri action 2023-03-12 21:13:08 -07:00
Gregory Schier
2056e7f40a Fix traffic lights thingy 2023-03-12 20:47:52 -07:00
Gregory Schier
9b6c1ad364 Cache cargo bin for "install" 2023-03-12 19:10:39 -07:00
Gregory Schier
34987bcacb Refformat 2023-03-12 19:03:27 -07:00
Gregory Schier
b62c11222a Fix artifact upload 2023-03-12 19:01:48 -07:00
Gregory Schier
b3cee3ace3 Fix dev 2023-03-12 18:39:02 -07:00
Gregory Schier
222c054c95 Split out macos deps 2023-03-12 18:36:25 -07:00
Gregory Schier
46f18a2491 Cache workflow 2023-03-12 18:28:14 -07:00
Gregory Schier
f2ca8e2753 Add wasm-pack 2023-03-12 18:19:20 -07:00
Gregory Schier
b0d243c378 Install rsw 2023-03-12 18:14:38 -07:00
Gregory Schier
6161fb86c8 Fix artifact names 2023-03-12 18:13:00 -07:00
Gregory Schier
b09cc91fe5 Fix build command 2023-03-12 18:11:24 -07:00
Gregory Schier
ef1638cbb3 Update secrets context 2023-03-12 18:07:57 -07:00
Gregory Schier
00ef8743f2 Update workflow name 2023-03-12 18:05:45 -07:00
Gregory Schier
68222659e3 Fix workflow 2023-03-12 18:05:13 -07:00
Gregory Schier
69420a4bba Start of auto updates 2023-03-12 18:04:11 -07:00
Gregory Schier
0161bbaeb1 Fix tabbing to tabs 2023-03-11 23:32:39 -08:00
Gregory Schier
948dbfe3cc Fix eslint errors 2023-03-11 23:29:25 -08:00
Gregory Schier
338ba8b189 Got tab content scrolling working 2023-03-11 22:36:13 -08:00
Gregory Schier
ca4655b441 Removed some debug stuff 2023-03-10 10:43:15 -08:00
Gregory Schier
bf37499428 Refactor editor to update better 2023-03-10 10:39:23 -08:00
Gregory Schier
0b94b57e2a Fix headers persistence and better sending 2023-03-09 13:38:17 -08:00
Gregory Schier
fc40aead98 Hook up header editor! 2023-03-09 13:07:13 -08:00
Gregory Schier
7d7f934e6a Fix 2023-03-09 10:58:27 -08:00
Gregory Schier
d5fbf4d622 Fix blur de-select speed 2023-03-09 10:57:34 -08:00
Gregory Schier
e4f6c919dc Fix Codemirror performance!! 2023-03-09 10:50:55 -08:00
Gregory Schier
4d806ff2b1 Switch to Preact!!! 2023-03-09 00:47:25 -08:00
Gregory Schier
bf8f12274f Move some things around 2023-03-08 23:20:15 -08:00
Gregory Schier
f4f438d9fe Better scrollbar color 2023-03-08 19:23:24 -08:00
Gregory Schier
2434f373be Zoom, better sizes, color picker, sidebar footer 2023-03-08 19:22:04 -08:00
Gregory Schier
2bb2061f97 Read-only editor 2023-03-08 16:53:13 -08:00
Gregory Schier
2c011a5c2a More theme tweaks 2023-03-08 16:37:20 -08:00
Gregory Schier
f66b0ccea1 Debounce autocomplete 2023-03-08 11:25:20 -08:00
Gregory Schier
665dd8447d Minor theme updates again 2023-03-08 09:43:35 -08:00
Gregory Schier
1b61ce31e6 Editor tweaks 2023-03-07 23:05:33 -08:00
Gregory Schier
ef4d960698 Remove unneeded space 2023-03-07 22:58:13 -08:00
Gregory Schier
b6d557b632 Fix small view 2023-03-07 22:55:51 -08:00
Gregory Schier
b700bd356c Minor style tweaks 2023-03-07 22:21:58 -08:00
Gregory Schier
620dd7d3ef Lots more theme stuff 2023-03-07 21:52:21 -08:00
Gregory Schier
6575121902 Start of themes 2023-03-07 11:24:38 -08:00
Gregory Schier
7c1755a0dc More subtle layout tweaks 2023-03-06 08:57:57 -08:00
Gregory Schier
8ad301a666 More layout fiddling and error page 2023-03-04 22:26:00 -08:00
Gregory Schier
ae24cd4939 More work on the layout 2023-03-04 21:51:17 -08:00
Gregory Schier
7152e1845e Try new layout and a bunch of editor fixes 2023-03-04 19:06:12 -08:00
Gregory Schier
96c1dd4081 Fix autocomplete inside dialog 2023-03-03 17:03:20 -08:00
Gregory Schier
87c7b3a663 Beginnings of Header Editor 2023-03-03 13:18:57 -08:00
Gregory Schier
c1be46a539 Fix tailwind dark selector 2023-03-03 07:54:19 -08:00
Gregory Schier
4655e0018b Fix content type in URL 2023-03-02 23:17:09 -08:00
Gregory Schier
da5ba2e3be Add Dialog component 2023-03-02 18:46:10 -08:00
Gregory Schier
aaf95f565f More colors 2023-03-02 17:56:53 -08:00
Gregory Schier
f32b984e77 Minor style tweaks 2023-03-02 16:16:41 -08:00
Gregory Schier
548aa4c7cd Improved autocompletion! 2023-03-02 11:14:51 -08:00
Gregory Schier
0ccceaac77 Rename, fix autocomplete and singleline, etc... 2023-03-02 10:42:43 -08:00
Gregory Schier
70f534f1d8 Editor placeholder 2023-03-01 14:22:10 -08:00
Gregory Schier
61fe95b300 Some minor bugs 2023-03-01 14:16:02 -08:00
Gregory Schier
915e0e8613 Fix migrations for build and iframe rendering 2023-03-01 10:31:50 -08:00
Gregory Schier
aace2580da Tweaks 2023-03-01 10:19:21 -08:00
Gregory Schier
3d36905664 Response streaming 2023-03-01 09:05:00 -08:00
Gregory Schier
0d671423da Autocomplete, and more CM stuff! 2023-02-28 22:54:54 -08:00
Gregory Schier
aebfcb9437 Some small tweaks 2023-02-28 17:25:59 -08:00
Gregory Schier
be7ef7beb1 Better editor updating 2023-02-28 12:41:03 -08:00
Gregory Schier
d77ed0c5cc URL highlighting with inline CM 2023-02-28 11:26:26 -08:00
Gregory Schier
e57e7bcec5 Implement request deletion 2023-02-27 15:42:06 -08:00
Gregory Schier
a637842ce4 Tauri events for request model updates 2023-02-27 13:28:50 -08:00
Gregory Schier
fc54ec49af Split request upsert command 2023-02-27 10:00:57 -08:00
Gregory Schier
5c43d8510a Add toggle for pretty view 2023-02-27 09:08:48 -08:00
Gregory Schier
83f84ded8d Small tweaks 2023-02-26 15:25:55 -08:00
Gregory Schier
5658da34a2 Add variable highlighting widgets 2023-02-26 15:06:14 -08:00
Gregory Schier
38e8ef6535 Dropdown scrolling 2023-02-25 23:33:07 -08:00
Gregory Schier
8c89b06238 Show response body size 2023-02-25 23:08:19 -08:00
Gregory Schier
d85c021305 A bunch more small things 2023-02-25 23:04:31 -08:00
Gregory Schier
83bb18df03 Added react-router 2023-02-25 18:04:14 -08:00
Gregory Schier
93105a3e89 Migrations and initial data stuff 2023-02-25 16:39:18 -08:00
Gregory Schier
ba3b899115 Minor tweaks 2023-02-24 17:01:48 -08:00
Gregory Schier
fcfbc1d1da Dummy requests in sidebar 2023-02-24 16:46:56 -08:00
Gregory Schier
72486b448c Codemirror initial value support 2023-02-24 16:43:47 -08:00
Gregory Schier
7dea1b7870 Send request body 2023-02-24 16:09:19 -08:00
Gregory Schier
4de2c496c9 Vendor basicSetup 2023-02-24 14:51:56 -08:00
Gregory Schier
9e1393a392 Additional methods and tweaks 2023-02-24 14:10:25 -08:00
Gregory Schier
0901690ed6 Focus states 2023-02-24 12:35:13 -08:00
Gregory Schier
95303648cc Hook up theme and clear responses 2023-02-24 12:13:30 -08:00
Gregory Schier
1dbb08c045 SQLite store in proper dir 2023-02-22 20:18:14 -08:00
Gregory Schier
00a7d9a180 Started on grid layout 2023-02-22 19:44:44 -08:00
Gregory Schier
6c549dc086 Save responses in DB 2023-02-22 18:53:44 -08:00
Gregory Schier
dc368e326a Better URL bar 2023-02-22 16:15:25 -08:00
Gregory Schier
e42188a627 Cleaner URL bar and some improvements 2023-02-22 15:58:04 -08:00
Gregory Schier
7a6a337eff Refactor classname usage 2023-02-21 18:03:57 -08:00
Gregory Schier
3907344884 Some minor tweaks 2023-02-21 17:56:48 -08:00
500 changed files with 14804 additions and 70735 deletions

View File

@@ -1,37 +1,31 @@
module.exports = {
extends: [
"eslint:recommended",
"plugin:react/recommended",
"plugin:react-hooks/recommended",
"plugin:import/recommended",
"plugin:jsx-a11y/recommended",
"plugin:@typescript-eslint/recommended",
"eslint-config-prettier"
],
parser: "@typescript-eslint/parser",
parserOptions: {
project: ["./tsconfig.json"]
},
ignorePatterns: ["scripts/**/*", "plugin-runtime/**/*", "src-tauri/**/*", "plugins/**/*"],
settings: {
react: {
version: "detect"
extends: [
'eslint:recommended',
'plugin:react/recommended',
'plugin:import/recommended',
'plugin:jsx-a11y/recommended',
'plugin:@typescript-eslint/recommended',
'eslint-config-prettier',
],
ignorePatterns: ['src-tauri/**/*'],
settings: {
react: {
version: 'detect',
},
'import/resolver': {
node: {
paths: ['src-web'],
extensions: ['.js', '.jsx', '.ts', '.tsx'],
},
},
},
rules: {
"jsx-a11y/no-autofocus": "warn",
"react/react-in-jsx-scope": "off",
"@typescript-eslint/consistent-type-imports": ["error", {
prefer: "type-imports",
disallowTypeAnnotations: true,
fixStyle: "separate-type-imports",
}]
},
"import/resolver": {
node: {
paths: ["src-web"],
extensions: [".ts", ".tsx"]
}
}
},
rules: {
"jsx-a11y/no-autofocus": "off",
"react/react-in-jsx-scope": "off",
"import/no-unresolved": "off",
"@typescript-eslint/consistent-type-imports": ["error", {
prefer: "type-imports",
disallowTypeAnnotations: true,
fixStyle: "separate-type-imports"
}]
}
};

49
.github/workflows/artifacts.yml vendored Normal file
View File

@@ -0,0 +1,49 @@
name: Generate Artifacts
on:
push:
tags: [ v* ]
jobs:
build-artifacts:
runs-on: ${{ matrix.platform }}
strategy:
fail-fast: false
matrix:
platform: [ ubuntu-latest, macos-latest, windows-latest ]
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@stable
- name: Cache Rust
uses: actions/cache@v2
with:
path: |
~/.cargo/registry
~/.cargo/git
./src-tauri/target
key: ${{ runner.os }}-cargo-${{ hashFiles('src-tauri/Cargo.lock') }}
- uses: actions/setup-node@v3
with:
node-version: 18
cache: 'npm'
- name: install dependencies (ubuntu only)
if: matrix.platform == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
- name: Install dependencies
run: npm ci
- name: Run tests
run: npm test
- uses: tauri-apps/tauri-action@v0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
with:
tagName: v__VERSION__
releaseName: 'App v__VERSION__'
releaseBody: 'See the assets to download this version and install.'
releaseDraft: true
prerelease: false

View File

@@ -1,18 +0,0 @@
on:
pull_request:
branches: [develop]
name: CI (JS)
jobs:
test:
name: Lint/Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 20
- run: npm ci
- run: npm run lint
- run: npm test

View File

@@ -1,36 +0,0 @@
on:
pull_request:
branches: [develop]
paths:
- src-tauri/**
- .github/workflows/**
name: CI (Rust)
defaults:
run:
working-directory: src-tauri
jobs:
test:
name: Check/Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev
- uses: dtolnay/rust-toolchain@stable
- uses: actions/cache@v3
continue-on-error: false
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-cargo-
- run: cargo check --all
- run: cargo test --all

View File

@@ -1,120 +0,0 @@
name: Generate Artifacts
on:
push:
tags: [ v* ]
env:
YAAK_PLUGINS_DIR: checkout/plugins
jobs:
build-artifacts:
permissions:
contents: write
name: Build
strategy:
fail-fast: false
matrix:
include:
- platform: 'macos-latest' # for Arm-based macs (M1 and above).
args: '--target aarch64-apple-darwin'
yaak_arch: 'arm64'
- platform: 'macos-latest' # for Intel-based macs.
args: '--target x86_64-apple-darwin'
yaak_arch: 'x64'
- platform: 'ubuntu-22.04' # for Tauri v1, you could replace this with ubuntu-20.04.
args: ''
yaak_arch: 'x64'
- platform: 'windows-latest'
args: ''
yaak_arch: 'x64'
runs-on: ${{ matrix.platform }}
steps:
- name: Checkout yaakapp/app
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 20
- uses: actions/setup-go@v5
with:
go-version: '1.22'
- name: install Rust stable
uses: dtolnay/rust-toolchain@stable
with:
# Those targets are only used on macos runners so it's in an `if` to slightly speed up windows and linux builds.
targets: ${{ matrix.platform == 'macos-latest' && 'aarch64-apple-darwin,x86_64-apple-darwin' || '' }}
- uses: actions/cache@v3
continue-on-error: false
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: ${{ runner.os }}-cargo-
- name: install dependencies (ubuntu only)
if: matrix.platform == 'ubuntu-22.04' # This must match the platform value defined above.
run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
- name: Install Node dependencies
run: |
npm ci
- name: Install plugin-runtime Node dependencies
working-directory: plugin-runtime
run: |
npm ci
- name: Install Protoc for plugin-runtime
uses: arduino/setup-protoc@v3
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install yaak CLI
run: go install github.com/yaakapp/yaakcli@latest
- name: Run lint
run: npm run lint
- name: Checkout yaakapp/plugins
uses: actions/checkout@v4
with:
repository: yaakapp/plugins
path: ${{ env.YAAK_PLUGINS_DIR }}
- name: Set version
run: npm run replace-version
env:
YAAK_VERSION: ${{ github.ref_name }}
- uses: tauri-apps/tauri-action@v0
env:
YAAK_PLUGINS_DIR: ${{ env.YAAK_PLUGINS_DIR }}
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
ENABLE_CODE_SIGNING: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
with:
tagName: 'v__VERSION__'
releaseName: 'Release __VERSION__'
releaseBody: 'https://yaak.app/changelog/__VERSION__'
releaseDraft: true
prerelease: false
args: ${{ matrix.args }}

7
.gitignore vendored
View File

@@ -22,12 +22,5 @@ dist-ssr
*.njsproj
*.sln
*.sw?
.eslintcache
*.sqlite
*.sqlite-*
.cargo
.tmp
tmp

1
.husky/.gitignore vendored
View File

@@ -1 +0,0 @@
_

View File

@@ -1,4 +0,0 @@
#!/bin/sh
. "$(dirname "$0")/_/husky.sh"
npx lint-staged

2
.nvmrc
View File

@@ -1 +1 @@
20
18

View File

@@ -1,4 +1,3 @@
node_modules/
dist/
out/
.prettierrc.cjs

View File

@@ -3,10 +3,11 @@
<package-json value="$PROJECT_DIR$/package.json" />
<command value="run" />
<scripts>
<script value="start" />
<script value="tauri-dev" />
</scripts>
<node-interpreter value="project" />
<envs>
<env name="DATABASE_URL" value="sqlite://$USER_HOME$/Library/Application%20Support/co.schier.yaak/db.sqlite" />
</envs>
<method v="2" />
</configuration>

View File

@@ -1,5 +0,0 @@
{
"name": "yaak-dev",
"adapter": "sqlite3",
"filename": "src-tauri/db.sqlite"
}

View File

@@ -1,13 +0,0 @@
.PHONY: sqlx-prepare, dev, migrate, build
sqlx-prepare:
cd src-tauri && cargo sqlx prepare --database-url 'sqlite://db.sqlite'
dev:
npm run tauri-dev
migrate:
cd src-tauri && cargo sqlx migrate run --database-url 'sqlite://db.sqlite?mode=rw'
build:
./node_modules/.bin/tauri build

View File

@@ -1,16 +1,3 @@
# Yaak Network Toolkit
# Tauri REST Client
The most fun you'll ever have working with APIs.
## Common Commands
```sh
# Start dev app
npm run tauri-dev
# Migration commands
cd src-tauri
cargo sqlx migrate add ${MIGRATION_NAME}
cargo sqlx migrate run --database-url 'sqlite://db.sqlite?mode=rw'
cargo sqlx prepare --database-url 'sqlite://db.sqlite'
```
It's a REST client, yo.

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 356 KiB

Binary file not shown.

View File

@@ -1,31 +1,17 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
<title>Yaak App</title>
<script src="http://localhost:8097"></script>
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Yaak App</title>
<!-- <script src="http://localhost:8097"></script>-->
</head>
<!-- Certain elements like webview (and maybe <select>?) will use background
color depending on document background color-->
<style>
html, body {
background-color: white;
}
@media (prefers-color-scheme: dark) {
html, body {
background-color: #1b1a29;
}
}
</style>
</head>
<body class="text-base">
<div id="root"></div>
<div id="cm-portal" class="cm-portal"></div>
<div id="react-portal"></div>
<div id="radix-portal" class="cm-portal"></div>
<script type="module" src="/src-web/main.tsx"></script>
</body>
<body>
<div id="root"></div>
<div id="cm-portal" class="cm-portal" style="pointer-events: auto"></div>
<div id="radix-portal" class="cm-portal"></div>
<script type="module" src="/src-web/main.tsx"></script>
</body>
</html>

16741
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,30 +1,23 @@
{
"name": "yaak-app",
"name": "tauri-app",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"start": "npm run tauri-dev:desktop",
"tauri-dev:desktop": "tauri dev --no-watch --config ./src-tauri/tauri-dev.conf.json",
"tauri-dev:ios": "tauri ios dev --force-ip-prompt --config ./src-tauri/tauri-dev.conf.json",
"tauri-build": "tauri build",
"tauri": "tauri",
"dev:js": "vite dev",
"tauri-dev": "tauri dev",
"tauri-build": "npm run build:icon && tauri build",
"build": "npm run build:frontend",
"dev": "vite dev",
"lint": "tsc && eslint . --ext .ts,.tsx",
"build:icon:release": "tauri icon design/icon.png --output ./src-tauri/icons/release",
"build:icon:dev": "tauri icon design/icon-dev.png --output ./src-tauri/icons/dev",
"build": "run-p build:*",
"build:js": "vite build",
"build:vendor-protoc": "node scripts/vendor-protoc.cjs",
"build:vendor-plugins": "node scripts/vendor-plugins.cjs",
"build:vendor-node": "node scripts/vendor-node.cjs",
"build:plugin-runtime": "npm run --prefix plugin-runtime build",
"prepare": "husky install",
"replace-version": "node scripts/replace-version.cjs"
"build:icon": "tauri icon src-tauri/icons/icon.png",
"build:frontend": "vite build",
"test": "vitest",
"coverage": "vitest run --coverage"
},
"dependencies": {
"@codemirror/commands": "^6.2.1",
"@codemirror/lang-javascript": "^6.2.2",
"@codemirror/lang-html": "^6.4.2",
"@codemirror/lang-javascript": "^6.1.4",
"@codemirror/lang-json": "^6.0.1",
"@codemirror/lang-xml": "^6.0.2",
"@codemirror/language": "^6.6.0",
@@ -32,83 +25,51 @@
"@lezer/generator": "^1.2.2",
"@lezer/highlight": "^1.1.3",
"@lezer/lr": "^1.3.3",
"@react-hook/resize-observer": "^1.2.6",
"@tailwindcss/container-queries": "^0.1.0",
"@tanstack/react-query": "^5.45.1",
"@tauri-apps/api": "^2.0.0-beta.15",
"@tauri-apps/plugin-clipboard-manager": "^2.1.0-beta.5",
"@tauri-apps/plugin-dialog": "^2.0.0-beta.7",
"@tauri-apps/plugin-fs": "^2.0.0-beta.7",
"@tauri-apps/plugin-os": "^2.0.0-beta.7",
"@tauri-apps/plugin-shell": "^2.0.0-beta.8",
"buffer": "^6.0.3",
"@radix-ui/react-dialog": "^1.0.2",
"@radix-ui/react-dropdown-menu": "^2.0.2",
"@radix-ui/react-icons": "^1.2.0",
"@radix-ui/react-popover": "1.0.3",
"@radix-ui/react-scroll-area": "^1.0.2",
"@radix-ui/react-separator": "^1.0.1",
"@radix-ui/react-tabs": "^1.0.3",
"@tanstack/react-query": "^4.24.10",
"@tauri-apps/api": "^1.2.0",
"@vitejs/plugin-react": "^3.1.0",
"classnames": "^2.3.2",
"cm6-graphql": "^0.0.9",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-router-dom": "^6.8.1",
"cm6-graphql": "^0.0.4-canary-b30a2325.0",
"codemirror": "^6.0.1",
"codemirror-json-schema": "^0.6.1",
"date-fns": "^3.3.1",
"fast-fuzzy": "^1.12.0",
"focus-trap-react": "^10.1.1",
"format-graphql": "^1.4.0",
"framer-motion": "^9.0.4",
"lucide-react": "^0.309.0",
"mime": "^4.0.1",
"papaparse": "^5.4.1",
"parse-color": "^1.0.0",
"react": "^18.2.0",
"react-dnd": "^16.0.1",
"react-dnd-html5-backend": "^16.0.1",
"react-dom": "^18.2.0",
"react-helmet-async": "^1.3.0",
"react-pdf": "^9.0.0",
"react-router-dom": "^6.8.1",
"react-use": "^17.4.0",
"slugify": "^1.6.6",
"tauri-plugin-log-api": "github:tauri-apps/tauri-plugin-log#v2",
"uuid": "^9.0.0",
"xml-formatter": "^3.6.2"
"react-use": "^17.4.0"
},
"devDependencies": {
"@tailwindcss/nesting": "^0.0.0-insiders.565cd3e",
"@tanstack/react-query-devtools": "^5.45.1",
"@tauri-apps/cli": "^2.0.0-beta.22",
"@tauri-apps/cli": "^1.2.2",
"@types/node": "^18.7.10",
"@types/papaparse": "^5.3.7",
"@types/parse-color": "^1.0.1",
"@types/parse-json": "^4.0.0",
"@types/react": "^18.0.31",
"@types/react-dom": "^18.0.11",
"@types/uuid": "^9.0.1",
"@typescript-eslint/eslint-plugin": "^7.0.2",
"@typescript-eslint/parser": "^7.0.2",
"@vitejs/plugin-react": "^4.2.1",
"@types/react-dom": "^18.0.6",
"@typescript-eslint/eslint-plugin": "^5.52.0",
"@typescript-eslint/parser": "^5.52.0",
"autoprefixer": "^10.4.13",
"decompress": "^4.2.1",
"concurrently": "^7.6.0",
"eslint": "^8.34.0",
"eslint-config-prettier": "^8.6.0",
"eslint-plugin-import": "^2.27.5",
"eslint-plugin-jsx-a11y": "^6.7.1",
"eslint-plugin-react": "^7.32.2",
"eslint-plugin-react-hooks": "^4.6.0",
"husky": "^8.0.3",
"internal-ip": "^8.0.0",
"lint-staged": "^15.0.2",
"nodejs-file-downloader": "^4.13.0",
"npm-run-all": "^4.1.5",
"postcss": "^8.4.21",
"postcss-nesting": "^11.2.1",
"prettier": "^2.8.4",
"react-devtools": "^4.27.2",
"rimraf": "^5.0.7",
"tailwindcss": "^3.2.7",
"typescript": "^5.4.5",
"vite": "^5.0.0",
"vite-plugin-static-copy": "^1.0.5",
"vite-plugin-svgr": "^4.2.0",
"vite-plugin-top-level-await": "^1.4.1"
},
"lint-staged": {
"*.{ts,tsx}": "eslint --cache --fix",
"*.{js,css,md}": "prettier --write"
"typescript": "^4.6.4",
"vite": "^4.0.0",
"vite-plugin-top-level-await": "^1.2.4",
"vitest": "^0.29.2"
}
}

View File

@@ -1,3 +0,0 @@
build
node_modules
*.blob

View File

@@ -1,5 +0,0 @@
{
"watch": ["src"],
"ext": "ts",
"exec": "node -r ts-node/register ./src/index.ts"
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,23 +0,0 @@
{
"name": "@yaak/plugin-runtime",
"scripts": {
"dev": "nodemon",
"build": "run-p build:*",
"build:main": "esbuild src/index.ts --bundle --platform=node --outfile=build/index.cjs",
"build:worker": "esbuild src/index.worker.ts --bundle --platform=node --outfile=build/index.worker.cjs",
"build:proto": "grpc_tools_node_protoc --ts_proto_out=src/gen --ts_proto_opt=outputServices=nice-grpc,outputServices=generic-definitions,useExactTypes=false --proto_path=../proto ../proto/plugins/*.proto"
},
"dependencies": {
"long": "^5.2.3",
"nice-grpc": "^2.1.9",
"protobufjs": "^7.3.2"
},
"devDependencies": {
"grpc-tools": "^1.12.4",
"nodemon": "^3.1.4",
"npm-run-all": "^4.1.5",
"ts-node": "^10.9.2",
"ts-proto": "^1.180.0",
"typescript": "^5.5.2"
}
}

View File

@@ -1,92 +0,0 @@
import { randomUUID } from 'node:crypto';
import path from 'node:path';
import { Worker } from 'node:worker_threads';
import { PluginInfo } from './plugins';
export interface ParentToWorkerEvent<T = any> {
callbackId: string;
name: string;
payload: T;
}
export type WorkerToParentSuccessEvent<T> = {
callbackId: string;
payload: T;
};
export type WorkerToParentErrorEvent = {
callbackId: string;
error: string;
};
export type WorkerToParentEvent<T = any> = WorkerToParentErrorEvent | WorkerToParentSuccessEvent<T>;
export class PluginHandle {
readonly pluginDir: string;
readonly #worker: Worker;
constructor(pluginDir: string) {
this.pluginDir = pluginDir;
const workerPath = path.join(__dirname, 'index.worker.cjs');
this.#worker = new Worker(workerPath, {
workerData: {
pluginDir: this.pluginDir,
},
});
this.#worker.on('error', this.#handleError.bind(this));
this.#worker.on('exit', this.#handleExit.bind(this));
}
async getInfo(): Promise<PluginInfo> {
return this.#callPlugin('info', null);
}
async runResponseFilter({ filter, body }: { filter: string; body: string }): Promise<string> {
return this.#callPlugin('run-filter', { filter, body });
}
async runExport(request: any): Promise<string> {
return this.#callPlugin('run-export', request);
}
async runImport(data: string): Promise<string> {
const result = await this.#callPlugin('run-import', data);
// Plugin returns object, but we convert to string
return JSON.stringify(result, null, 2);
}
#callPlugin<P, R>(name: string, payload: P): Promise<R> {
const callbackId = `cb_${randomUUID().replaceAll('-', '')}`;
return new Promise((resolve, reject) => {
const cb = (e: WorkerToParentEvent<R>) => {
if (e.callbackId !== callbackId) return;
if ('error' in e) {
reject(e.error);
} else {
resolve(e.payload as R);
}
this.#worker.removeListener('message', cb);
};
this.#worker.addListener('message', cb);
this.#worker.postMessage({ callbackId, name, payload });
});
}
async #handleError(err: Error) {
console.error('Plugin errored', this.pluginDir, err);
}
async #handleExit(code: number) {
if (code === 0) {
console.log('Plugin exited successfully', this.pluginDir);
} else {
console.log('Plugin exited with error', code, this.pluginDir);
}
}
}

View File

@@ -1,44 +0,0 @@
import { PluginHandle } from './PluginHandle';
import { loadPlugins, PluginInfo } from './plugins';
export class PluginManager {
#handles: PluginHandle[] | null = null;
static #instance: PluginManager | null = null;
public static instance(): PluginManager {
if (PluginManager.#instance == null) {
PluginManager.#instance = new PluginManager();
PluginManager.#instance.plugins(); // Trigger workers to boot, as it takes a few seconds
}
return PluginManager.#instance;
}
async plugins(): Promise<PluginHandle[]> {
this.#handles = this.#handles ?? loadPlugins();
return this.#handles;
}
async #pluginsWithInfo(): Promise<{ plugin: PluginHandle; info: PluginInfo }[]> {
const plugins = await this.plugins();
return Promise.all(plugins.map(async (plugin) => ({ plugin, info: await plugin.getInfo() })));
}
async pluginsWith(capability: PluginInfo['capabilities'][0]): Promise<PluginHandle[]> {
return (await this.#pluginsWithInfo())
.filter((v) => v.info.capabilities.includes(capability))
.map((v) => v.plugin);
}
async plugin(name: string): Promise<PluginHandle | null> {
return (await this.#pluginsWithInfo()).find((v) => v.info.name === name)?.plugin ?? null;
}
async pluginOrThrow(name: string): Promise<PluginHandle> {
const plugin = await this.plugin(name);
if (plugin == null) {
throw new Error(`Failed to find plugin by ${name}`);
}
return plugin;
}
}

View File

@@ -1,432 +0,0 @@
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v1.180.0
// protoc v3.19.1
// source: plugins/runtime.proto
/* eslint-disable */
import { type CallContext, type CallOptions } from "nice-grpc-common";
import * as _m0 from "protobufjs/minimal";
export const protobufPackage = "yaak.plugins.runtime";
export interface PluginInfo {
plugin: string;
}
export interface HookResponse {
info: PluginInfo | undefined;
data: string;
}
export interface HookImportRequest {
data: string;
}
export interface HookResponseFilterRequest {
filter: string;
body: string;
contentType: string;
}
export interface HookExportRequest {
request: string;
}
function createBasePluginInfo(): PluginInfo {
return { plugin: "" };
}
export const PluginInfo = {
encode(message: PluginInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.plugin !== "") {
writer.uint32(10).string(message.plugin);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): PluginInfo {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBasePluginInfo();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag !== 10) {
break;
}
message.plugin = reader.string();
continue;
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): PluginInfo {
return { plugin: isSet(object.plugin) ? globalThis.String(object.plugin) : "" };
},
toJSON(message: PluginInfo): unknown {
const obj: any = {};
if (message.plugin !== "") {
obj.plugin = message.plugin;
}
return obj;
},
create(base?: DeepPartial<PluginInfo>): PluginInfo {
return PluginInfo.fromPartial(base ?? {});
},
fromPartial(object: DeepPartial<PluginInfo>): PluginInfo {
const message = createBasePluginInfo();
message.plugin = object.plugin ?? "";
return message;
},
};
function createBaseHookResponse(): HookResponse {
return { info: undefined, data: "" };
}
export const HookResponse = {
encode(message: HookResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.info !== undefined) {
PluginInfo.encode(message.info, writer.uint32(10).fork()).ldelim();
}
if (message.data !== "") {
writer.uint32(18).string(message.data);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): HookResponse {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseHookResponse();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag !== 10) {
break;
}
message.info = PluginInfo.decode(reader, reader.uint32());
continue;
case 2:
if (tag !== 18) {
break;
}
message.data = reader.string();
continue;
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): HookResponse {
return {
info: isSet(object.info) ? PluginInfo.fromJSON(object.info) : undefined,
data: isSet(object.data) ? globalThis.String(object.data) : "",
};
},
toJSON(message: HookResponse): unknown {
const obj: any = {};
if (message.info !== undefined) {
obj.info = PluginInfo.toJSON(message.info);
}
if (message.data !== "") {
obj.data = message.data;
}
return obj;
},
create(base?: DeepPartial<HookResponse>): HookResponse {
return HookResponse.fromPartial(base ?? {});
},
fromPartial(object: DeepPartial<HookResponse>): HookResponse {
const message = createBaseHookResponse();
message.info = (object.info !== undefined && object.info !== null)
? PluginInfo.fromPartial(object.info)
: undefined;
message.data = object.data ?? "";
return message;
},
};
function createBaseHookImportRequest(): HookImportRequest {
return { data: "" };
}
export const HookImportRequest = {
encode(message: HookImportRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.data !== "") {
writer.uint32(10).string(message.data);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): HookImportRequest {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseHookImportRequest();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag !== 10) {
break;
}
message.data = reader.string();
continue;
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): HookImportRequest {
return { data: isSet(object.data) ? globalThis.String(object.data) : "" };
},
toJSON(message: HookImportRequest): unknown {
const obj: any = {};
if (message.data !== "") {
obj.data = message.data;
}
return obj;
},
create(base?: DeepPartial<HookImportRequest>): HookImportRequest {
return HookImportRequest.fromPartial(base ?? {});
},
fromPartial(object: DeepPartial<HookImportRequest>): HookImportRequest {
const message = createBaseHookImportRequest();
message.data = object.data ?? "";
return message;
},
};
function createBaseHookResponseFilterRequest(): HookResponseFilterRequest {
return { filter: "", body: "", contentType: "" };
}
export const HookResponseFilterRequest = {
encode(message: HookResponseFilterRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.filter !== "") {
writer.uint32(10).string(message.filter);
}
if (message.body !== "") {
writer.uint32(18).string(message.body);
}
if (message.contentType !== "") {
writer.uint32(26).string(message.contentType);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): HookResponseFilterRequest {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseHookResponseFilterRequest();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag !== 10) {
break;
}
message.filter = reader.string();
continue;
case 2:
if (tag !== 18) {
break;
}
message.body = reader.string();
continue;
case 3:
if (tag !== 26) {
break;
}
message.contentType = reader.string();
continue;
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): HookResponseFilterRequest {
return {
filter: isSet(object.filter) ? globalThis.String(object.filter) : "",
body: isSet(object.body) ? globalThis.String(object.body) : "",
contentType: isSet(object.contentType) ? globalThis.String(object.contentType) : "",
};
},
toJSON(message: HookResponseFilterRequest): unknown {
const obj: any = {};
if (message.filter !== "") {
obj.filter = message.filter;
}
if (message.body !== "") {
obj.body = message.body;
}
if (message.contentType !== "") {
obj.contentType = message.contentType;
}
return obj;
},
create(base?: DeepPartial<HookResponseFilterRequest>): HookResponseFilterRequest {
return HookResponseFilterRequest.fromPartial(base ?? {});
},
fromPartial(object: DeepPartial<HookResponseFilterRequest>): HookResponseFilterRequest {
const message = createBaseHookResponseFilterRequest();
message.filter = object.filter ?? "";
message.body = object.body ?? "";
message.contentType = object.contentType ?? "";
return message;
},
};
function createBaseHookExportRequest(): HookExportRequest {
return { request: "" };
}
export const HookExportRequest = {
encode(message: HookExportRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.request !== "") {
writer.uint32(10).string(message.request);
}
return writer;
},
decode(input: _m0.Reader | Uint8Array, length?: number): HookExportRequest {
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseHookExportRequest();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
if (tag !== 10) {
break;
}
message.request = reader.string();
continue;
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skipType(tag & 7);
}
return message;
},
fromJSON(object: any): HookExportRequest {
return { request: isSet(object.request) ? globalThis.String(object.request) : "" };
},
toJSON(message: HookExportRequest): unknown {
const obj: any = {};
if (message.request !== "") {
obj.request = message.request;
}
return obj;
},
create(base?: DeepPartial<HookExportRequest>): HookExportRequest {
return HookExportRequest.fromPartial(base ?? {});
},
fromPartial(object: DeepPartial<HookExportRequest>): HookExportRequest {
const message = createBaseHookExportRequest();
message.request = object.request ?? "";
return message;
},
};
export type PluginRuntimeDefinition = typeof PluginRuntimeDefinition;
export const PluginRuntimeDefinition = {
name: "PluginRuntime",
fullName: "yaak.plugins.runtime.PluginRuntime",
methods: {
hookImport: {
name: "hookImport",
requestType: HookImportRequest,
requestStream: false,
responseType: HookResponse,
responseStream: false,
options: {},
},
hookExport: {
name: "hookExport",
requestType: HookExportRequest,
requestStream: false,
responseType: HookResponse,
responseStream: false,
options: {},
},
hookResponseFilter: {
name: "hookResponseFilter",
requestType: HookResponseFilterRequest,
requestStream: false,
responseType: HookResponse,
responseStream: false,
options: {},
},
},
} as const;
export interface PluginRuntimeServiceImplementation<CallContextExt = {}> {
hookImport(request: HookImportRequest, context: CallContext & CallContextExt): Promise<DeepPartial<HookResponse>>;
hookExport(request: HookExportRequest, context: CallContext & CallContextExt): Promise<DeepPartial<HookResponse>>;
hookResponseFilter(
request: HookResponseFilterRequest,
context: CallContext & CallContextExt,
): Promise<DeepPartial<HookResponse>>;
}
export interface PluginRuntimeClient<CallOptionsExt = {}> {
hookImport(request: DeepPartial<HookImportRequest>, options?: CallOptions & CallOptionsExt): Promise<HookResponse>;
hookExport(request: DeepPartial<HookExportRequest>, options?: CallOptions & CallOptionsExt): Promise<HookResponse>;
hookResponseFilter(
request: DeepPartial<HookResponseFilterRequest>,
options?: CallOptions & CallOptionsExt,
): Promise<HookResponse>;
}
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
export type DeepPartial<T> = T extends Builtin ? T
: T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>>
: T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
: Partial<T>;
function isSet(value: any): boolean {
return value !== null && value !== undefined;
}

View File

@@ -1,87 +0,0 @@
import { isAbortError } from 'abort-controller-x';
import { createServer, ServerError, ServerMiddlewareCall, Status } from 'nice-grpc';
import { CallContext } from 'nice-grpc-common';
import * as fs from 'node:fs';
import {
DeepPartial,
HookExportRequest,
HookImportRequest,
HookResponse,
HookResponseFilterRequest,
PluginRuntimeDefinition,
PluginRuntimeServiceImplementation,
} from './gen/plugins/runtime';
import { PluginManager } from './PluginManager';
class PluginRuntimeService implements PluginRuntimeServiceImplementation {
#manager: PluginManager;
constructor() {
this.#manager = PluginManager.instance();
}
async hookExport(request: HookExportRequest): Promise<DeepPartial<HookResponse>> {
const plugin = await this.#manager.pluginOrThrow('exporter-curl');
const data = await plugin.runExport(JSON.parse(request.request));
const info = { plugin: (await plugin.getInfo()).name };
return { info, data };
}
async hookImport(request: HookImportRequest): Promise<DeepPartial<HookResponse>> {
const plugins = await this.#manager.pluginsWith('import');
for (const p of plugins) {
const data = await p.runImport(request.data);
if (data != null && data !== 'null') {
const info = { plugin: (await p.getInfo()).name };
return { info, data };
}
}
throw new ServerError(Status.UNKNOWN, 'No importers found for data');
}
async hookResponseFilter(request: HookResponseFilterRequest): Promise<DeepPartial<HookResponse>> {
const pluginName = request.contentType.includes('json') ? 'filter-jsonpath' : 'filter-xpath';
const plugin = await this.#manager.pluginOrThrow(pluginName);
const data = await plugin.runResponseFilter(request);
const info = { plugin: (await plugin.getInfo()).name };
return { info, data };
}
}
let server = createServer();
async function* errorHandlingMiddleware<Request, Response>(
call: ServerMiddlewareCall<Request, Response>,
context: CallContext,
) {
try {
return yield* call.next(call.request, context);
} catch (error: unknown) {
if (error instanceof ServerError || isAbortError(error)) {
throw error;
}
let details = String(error);
if (process.env.NODE_ENV === 'development') {
// @ts-ignore
details += `: ${error.stack}`;
}
throw new ServerError(Status.UNKNOWN, details);
}
}
server = server.use(errorHandlingMiddleware);
server.add(PluginRuntimeDefinition, new PluginRuntimeService());
// Start on random port if YAAK_GRPC_PORT_FILE_PATH is set, or :4000
const addr = process.env.YAAK_GRPC_PORT_FILE_PATH ? 'localhost:0' : 'localhost:4000';
server.listen(addr).then((port) => {
console.log('gRPC server listening on', `http://localhost:${port}`);
if (process.env.YAAK_GRPC_PORT_FILE_PATH) {
console.log('Wrote port file to', process.env.YAAK_GRPC_PORT_FILE_PATH);
fs.writeFileSync(process.env.YAAK_GRPC_PORT_FILE_PATH, JSON.stringify({ port }, null, 2));
}
});

View File

@@ -1,76 +0,0 @@
import { readFileSync } from 'node:fs';
import path from 'node:path';
import { parentPort, workerData } from 'node:worker_threads';
import { ParentToWorkerEvent } from './PluginHandle';
import { PluginInfo } from './plugins';
new Promise<void>(async (resolve, reject) => {
const { pluginDir } = workerData;
const pathMod = path.join(pluginDir, 'build/index.js');
const pathPkg = path.join(pluginDir, 'package.json');
let pkg: { [x: string]: any };
try {
pkg = JSON.parse(readFileSync(pathPkg, 'utf8'));
} catch (err) {
// TODO: Do something better here
reject(err);
return;
}
const mod = (await import(`file://${pathMod}`)).default ?? {};
const info: PluginInfo = {
capabilities: [],
name: pkg['name'] ?? 'n/a',
dir: pluginDir,
};
if (typeof mod['pluginHookImport'] === 'function') {
info.capabilities.push('import');
}
if (typeof mod['pluginHookExport'] === 'function') {
info.capabilities.push('export');
}
if (typeof mod['pluginHookResponseFilter'] === 'function') {
info.capabilities.push('filter');
}
console.log('Loaded plugin', info.name, info.capabilities, info.dir);
function reply<T>(originalMsg: ParentToWorkerEvent, payload: T) {
parentPort!.postMessage({ payload, callbackId: originalMsg.callbackId });
}
function replyErr(originalMsg: ParentToWorkerEvent, error: unknown) {
parentPort!.postMessage({
error: String(error),
callbackId: originalMsg.callbackId,
});
}
parentPort!.on('message', async (msg: ParentToWorkerEvent) => {
try {
const ctx = { todo: 'implement me' };
if (msg.name === 'run-import') {
reply(msg, await mod.pluginHookImport(ctx, msg.payload));
} else if (msg.name === 'run-filter') {
reply(msg, await mod.pluginHookResponseFilter(ctx, msg.payload));
} else if (msg.name === 'run-export') {
reply(msg, await mod.pluginHookExport(ctx, msg.payload));
} else if (msg.name === 'info') {
reply(msg, info);
} else {
console.log('Unknown message', msg);
}
} catch (err: unknown) {
replyErr(msg, err);
}
});
resolve();
}).catch((err) => {
console.log('failed to boot plugin', err);
});

View File

@@ -1,18 +0,0 @@
import * as fs from 'node:fs';
import path from 'node:path';
import { PluginHandle } from './PluginHandle';
export interface PluginInfo {
name: string;
dir: string;
capabilities: ('import' | 'export' | 'filter')[];
}
export function loadPlugins(): PluginHandle[] {
const pluginsDir = process.env.YAAK_PLUGINS_DIR;
if (!pluginsDir) throw new Error('YAAK_PLUGINS_DIR is not set');
console.log('Loading plugins from', pluginsDir);
const pluginDirs = fs.readdirSync(pluginsDir).map((p) => path.join(pluginsDir, p));
return pluginDirs.map((pluginDir) => new PluginHandle(pluginDir));
}

View File

@@ -1,25 +0,0 @@
{
"compilerOptions": {
"module": "node16",
"strict": true,
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"target": "es2021",
"lib": ["es2021"],
"noImplicitAny": false,
"moduleResolution": "node",
"sourceMap": true,
"outDir": "dist",
"baseUrl": ".",
"skipLibCheck": true,
"paths": {
"*": [
"node_modules/*",
"src/types/*"
]
}
},
"include": [
"src/**/*"
]
}

View File

@@ -1,32 +0,0 @@
syntax = "proto3";
package yaak.plugins.runtime;
service PluginRuntime {
rpc hookImport (HookImportRequest) returns (HookResponse);
rpc hookExport (HookExportRequest) returns (HookResponse);
rpc hookResponseFilter (HookResponseFilterRequest) returns (HookResponse);
}
message PluginInfo {
string plugin = 1;
}
message HookResponse {
PluginInfo info = 1;
string data = 2;
}
message HookImportRequest {
string data = 1;
}
message HookResponseFilterRequest {
string filter = 1;
string body = 2;
string contentType = 3;
}
message HookExportRequest {
string request = 1;
}

View File

@@ -1 +0,0 @@
edition = "2018"

View File

@@ -1,15 +0,0 @@
const path = require('path');
const fs = require('fs');
const version = process.env.YAAK_VERSION?.replace('v', '');
if (!version) {
throw new Error('YAAK_VERSION environment variable not set')
}
const tauriConfigPath = path.join(__dirname, '../src-tauri/tauri.conf.json');
const tauriConfig = JSON.parse(fs.readFileSync(tauriConfigPath, 'utf8'));
tauriConfig.version = version;
console.log('Writing version ' + version + ' to ' + tauriConfigPath)
fs.writeFileSync(tauriConfigPath, JSON.stringify(tauriConfig, null, 2));

View File

@@ -1,69 +0,0 @@
const path = require('node:path');
const decompress = require('decompress');
const Downloader = require("nodejs-file-downloader");
const {rmSync, cpSync, mkdirSync, existsSync} = require("node:fs");
const {execSync} = require("node:child_process");
const NODE_VERSION = 'v22.5.1';
// `${process.platform}_${process.arch}`
const MAC_ARM = 'darwin_arm64';
const MAC_X64 = 'darwin_x64';
const LNX_X64 = 'linux_x64';
const WIN_X64 = 'win32_x64';
const URL_MAP = {
[MAC_ARM]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-darwin-arm64.tar.gz`,
[MAC_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-darwin-x64.tar.gz`,
[LNX_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-linux-x64.tar.gz`,
[WIN_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-win-x64.zip`,
};
const SRC_BIN_MAP = {
[MAC_ARM]: `node-${NODE_VERSION}-darwin-arm64/bin/node`,
[MAC_X64]: `node-${NODE_VERSION}-darwin-x64/bin/node`,
[LNX_X64]: `node-${NODE_VERSION}-linux-x64/bin/node`,
[WIN_X64]: `node-${NODE_VERSION}-win-x64/node.exe`,
};
const DST_BIN_MAP = {
darwin_arm64: 'yaaknode-aarch64-apple-darwin',
darwin_x64: 'yaaknode-x86_64-apple-darwin',
linux_x64: 'yaaknode-x86_64-unknown-linux-gnu',
win32_x64: 'yaaknode-x86_64-pc-windows-msvc.exe',
};
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
const destDir = path.join(__dirname, `..`, 'src-tauri', 'vendored', 'node');
const binDest = path.join(destDir, DST_BIN_MAP[key]);
console.log(`Vendoring NodeJS ${NODE_VERSION} for ${key}`);
if (existsSync(binDest) && execSync(`${binDest} --version`).toString('utf-8').trim() === NODE_VERSION) {
console.log("NodeJS already vendored");
return;
}
rmSync(destDir, {recursive: true, force: true});
mkdirSync(destDir, {recursive: true});
(async function () {
const url = URL_MAP[key];
const tmpDir = path.join(__dirname, 'tmp', Date.now().toString());
// Download GitHub release artifact
const {filePath} = await new Downloader({url, directory: tmpDir,}).download();
// Decompress to the same directory
await decompress(filePath, tmpDir, {});
// Copy binary
const binSrc = path.join(tmpDir, SRC_BIN_MAP[key]);
cpSync(binSrc, binDest);
rmSync(tmpDir, {recursive: true, force: true});
console.log("Downloaded NodeJS to", binDest);
})().catch(err => {
console.log('Script failed:', err);
process.exit(1);
});

View File

@@ -1,25 +0,0 @@
const {readdirSync, cpSync} = require("node:fs");
const path = require("node:path");
const {execSync} = require("node:child_process");
const pluginsDir = process.env.YAAK_PLUGINS_DIR;
if (!pluginsDir) {
console.log("YAAK_PLUGINS_DIR is not set");
process.exit(1);
}
console.log('Installing Yaak plugins dependencies', pluginsDir);
execSync('npm ci', {cwd: pluginsDir});
console.log('Building Yaak plugins', pluginsDir);
execSync('npm run build', {cwd: pluginsDir});
console.log('Copying Yaak plugins to', pluginsDir);
const pluginsRoot = path.join(pluginsDir, 'plugins');
for (const name of readdirSync(pluginsRoot)) {
const dir = path.join(pluginsRoot, name);
if (name.startsWith('.')) continue;
const destDir = path.join(__dirname, '../src-tauri/vendored/plugins/', name);
console.log(`Copying ${name} to ${destDir}`);
cpSync(path.join(dir, 'package.json'), path.join(destDir, 'package.json'));
cpSync(path.join(dir, 'build/index.js'), path.join(destDir, 'build/index.js'));
}

View File

@@ -1,62 +0,0 @@
const decompress = require('decompress');
const Downloader = require("nodejs-file-downloader");
const path = require("node:path");
const {rmSync, mkdirSync, cpSync} = require("node:fs");
// `${process.platform}_${process.arch}`
const MAC_ARM = 'darwin_arm64';
const MAC_X64 = 'darwin_x64';
const LNX_X64 = 'linux_x64';
const WIN_X64 = 'win32_x64';
const URL_MAP = {
[MAC_ARM]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-osx-aarch_64.zip',
[MAC_X64]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-osx-x86_64.zip',
[LNX_X64]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-linux-x86_64.zip',
[WIN_X64]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-win64.zip',
};
const SRC_BIN_MAP = {
[MAC_ARM]: 'bin/protoc',
[MAC_X64]: 'bin/protoc',
[LNX_X64]: 'bin/protoc',
[WIN_X64]: 'bin/protoc.exe',
};
const DST_BIN_MAP = {
[MAC_ARM]: 'yaakprotoc-aarch64-apple-darwin',
[MAC_X64]: 'yaakprotoc-x86_64-apple-darwin',
[LNX_X64]: 'yaakprotoc-x86_64-unknown-linux-gnu',
[WIN_X64]: 'yaakprotoc-x86_64-pc-windows-msvc.exe',
};
const dstDir = path.join(__dirname, `..`, 'src-tauri', 'vendored', 'protoc');
rmSync(dstDir, {recursive: true, force: true});
mkdirSync(dstDir, {recursive: true});
(async function () {
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
console.log("Vendoring protoc binary for", key);
const url = URL_MAP[key];
const tmpDir = path.join(__dirname, 'tmp', Date.now().toString());
// Download GitHub release artifact
const {filePath} = await new Downloader({url, directory: tmpDir,}).download();
// Decompress to the same directory
await decompress(filePath, tmpDir, {});
// Copy binary
const binSrc = path.join(tmpDir, SRC_BIN_MAP[key]);
const binDst = path.join(dstDir, DST_BIN_MAP[key]);
cpSync(binSrc, binDst);
// Copy other files
const includeSrc = path.join(tmpDir, 'include');
const includeDst = path.join(dstDir, 'include');
cpSync(includeSrc, includeDst, {recursive: true});
rmSync(tmpDir, {recursive: true, force: true});
console.log("Downloaded protoc to", binDst);
})().catch(err => console.log('Script failed:', err));

View File

@@ -2,4 +2,3 @@
# will have compiled files and executables
/target/
vendored

View File

@@ -1,50 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT model, created_at, updated_at, namespace, key, value\n FROM key_values\n WHERE namespace = ? AND key = ?\n ",
"describe": {
"columns": [
{
"name": "model",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 1,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "namespace",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "key",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "value",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false,
false,
false,
false,
false
]
},
"hash": "06aaf8f4a17566f1d25da2a60f0baf4b5fc28c3cf0c001a84e25edf9eab3c7e3"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM http_responses\n WHERE id = ?\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "07d1a1c7b4f3d9625a766e60fd57bb779b71dae30e5bbce34885a911a5a42428"
}

View File

@@ -1,116 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, request_id, updated_at, created_at, url, status,\n status_reason, content_length, body_path, elapsed, elapsed_headers, error,\n version, remote_addr,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpResponseHeader>>\"\n FROM http_responses\n WHERE id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "request_id",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "updated_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "created_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "url",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "status",
"ordinal": 7,
"type_info": "Int64"
},
{
"name": "status_reason",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "content_length",
"ordinal": 9,
"type_info": "Int64"
},
{
"name": "body_path",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "elapsed",
"ordinal": 11,
"type_info": "Int64"
},
{
"name": "elapsed_headers",
"ordinal": 12,
"type_info": "Int64"
},
{
"name": "error",
"ordinal": 13,
"type_info": "Text"
},
{
"name": "version",
"ordinal": 14,
"type_info": "Text"
},
{
"name": "remote_addr",
"ordinal": 15,
"type_info": "Text"
},
{
"name": "headers!: sqlx::types::Json<Vec<HttpResponseHeader>>",
"ordinal": 16,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
true,
true,
true,
false,
false,
true,
true,
true,
false
]
},
"hash": "0fa6b56f8c996d14908a56928674b4b35af5fa36f63dc48b9b66ee6dfde78976"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO workspaces (\n id, name, description, variables, setting_request_timeout,\n setting_follow_redirects, setting_validate_certificates\n )\n VALUES (?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n description = excluded.description,\n variables = excluded.variables,\n setting_request_timeout = excluded.setting_request_timeout,\n setting_follow_redirects = excluded.setting_follow_redirects,\n setting_validate_certificates = excluded.setting_validate_certificates\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 7
},
"nullable": []
},
"hash": "12b265491d1ebba19e1ce8a660e458ffbcd8c0850aef16ba1f70e358623ac66a"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO environments (\n id, workspace_id, name, variables\n )\n VALUES (?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n variables = excluded.variables\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "13cb883199e81966174e6fda9c252bf7213fe01b5346266c0a89dc0ac89eda64"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO grpc_events (\n id, workspace_id, request_id, connection_id, content, event_type, metadata,\n status, error\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n content = excluded.content,\n event_type = excluded.event_type,\n metadata = excluded.metadata,\n status = excluded.status,\n error = excluded.error\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 9
},
"nullable": []
},
"hash": "14930955e8a914e292dfbebfce2ea43cc41c1d517386ed816c16d436bf626bf3"
}

View File

@@ -1,104 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, folder_id, created_at, updated_at, name, sort_priority,\n url, service, method, message, authentication_type,\n authentication AS \"authentication!: Json<HashMap<String, JsonValue>>\",\n metadata AS \"metadata!: sqlx::types::Json<Vec<GrpcMetadataEntry>>\"\n FROM grpc_requests\n WHERE workspace_id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "folder_id",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "sort_priority",
"ordinal": 7,
"type_info": "Float"
},
{
"name": "url",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "service",
"ordinal": 9,
"type_info": "Text"
},
{
"name": "method",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "message",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "authentication_type",
"ordinal": 12,
"type_info": "Text"
},
{
"name": "authentication!: Json<HashMap<String, JsonValue>>",
"ordinal": 13,
"type_info": "Text"
},
{
"name": "metadata!: sqlx::types::Json<Vec<GrpcMetadataEntry>>",
"ordinal": 14,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
true,
false,
false,
false,
false,
false,
true,
true,
false,
true,
false,
false
]
},
"hash": "1821c2f60b9fa4514d58eb73b23e25ad683b80b9bd0c2944063190a0d0a19ee5"
}

View File

@@ -1,80 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, request_id, connection_id, created_at, content, status, error,\n event_type AS \"event_type!: GrpcEventType\",\n metadata AS \"metadata!: sqlx::types::Json<HashMap<String, String>>\"\n FROM grpc_events\n WHERE connection_id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "request_id",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "connection_id",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "content",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "status",
"ordinal": 7,
"type_info": "Int64"
},
{
"name": "error",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "event_type!: GrpcEventType",
"ordinal": 9,
"type_info": "Text"
},
{
"name": "metadata!: sqlx::types::Json<HashMap<String, String>>",
"ordinal": 10,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
true,
true,
false,
false
]
},
"hash": "18ada3bb42c29f1940ab2e61961d79cdd69210f3dc2076aedcadeba8e34dcb6e"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO settings (id)\n VALUES ('default')\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 0
},
"nullable": []
},
"hash": "2c181a4dc13efc52fe6a5a68291c5678a9624020df4ea744e78396f6926d5c88"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO http_responses (\n id, request_id, workspace_id, elapsed, elapsed_headers, url, status, status_reason,\n content_length, body_path, headers, version, remote_addr\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 13
},
"nullable": []
},
"hash": "2c9658a639c5e4994ae9c8ec30bd4e40a1945d640962991f879928619950ef62"
}

View File

@@ -1,92 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, request_id, created_at, updated_at, service,\n method, elapsed, status, error, url,\n trailers AS \"trailers!: sqlx::types::Json<HashMap<String, String>>\"\n FROM grpc_connections\n WHERE request_id = ?\n ORDER BY created_at DESC\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "request_id",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "service",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "method",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "elapsed",
"ordinal": 8,
"type_info": "Int64"
},
{
"name": "status",
"ordinal": 9,
"type_info": "Int64"
},
{
"name": "error",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "url",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "trailers!: sqlx::types::Json<HashMap<String, String>>",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "3e8651cca7feecc208a676dfd24c7d8775040d5287c16890056dcb474674edfb"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM grpc_connections\n WHERE id = ?\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "42bc0ded60b44dab19daf6d8fc7df83d83af5d88ea0b84514fdc877a668c27cd"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM http_requests\n WHERE id = ?\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "448a1d1f1866ab42c0f81fcf8eb2930bf21dfdd43ca4831bc1a198cf45ac3732"
}

View File

@@ -1,62 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, created_at, updated_at, folder_id, name, sort_priority\n FROM folders\n WHERE workspace_id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "folder_id",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "name",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "sort_priority",
"ordinal": 7,
"type_info": "Float"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "558e72df3c6f2635c6b3d52a199f9a5f7a3d82b379ff9af36645dcfb92548fdd"
}

View File

@@ -1,110 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, folder_id, created_at, updated_at, name, url, method,\n body_type, authentication_type, sort_priority,\n url_parameters AS \"url_parameters!: sqlx::types::Json<Vec<HttpUrlParameter>>\",\n body AS \"body!: Json<HashMap<String, JsonValue>>\",\n authentication AS \"authentication!: Json<HashMap<String, JsonValue>>\",\n headers AS \"headers!: sqlx::types::Json<Vec<HttpRequestHeader>>\"\n FROM http_requests\n WHERE id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "folder_id",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "url",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "method",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "body_type",
"ordinal": 9,
"type_info": "Text"
},
{
"name": "authentication_type",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "sort_priority",
"ordinal": 11,
"type_info": "Float"
},
{
"name": "url_parameters!: sqlx::types::Json<Vec<HttpUrlParameter>>",
"ordinal": 12,
"type_info": "Text"
},
{
"name": "body!: Json<HashMap<String, JsonValue>>",
"ordinal": 13,
"type_info": "Text"
},
{
"name": "authentication!: Json<HashMap<String, JsonValue>>",
"ordinal": 14,
"type_info": "Text"
},
{
"name": "headers!: sqlx::types::Json<Vec<HttpRequestHeader>>",
"ordinal": 15,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
true,
false,
false,
false,
false,
false,
true,
true,
false,
false,
false,
false,
false
]
},
"hash": "573db23160de025e5c72efb90be7fff5e3ec4619b962d149fdd4d618fe02c680"
}

View File

@@ -1,56 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, created_at, updated_at, name,\n variables AS \"variables!: sqlx::types::Json<Vec<EnvironmentVariable>>\"\n FROM environments\n WHERE id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "variables!: sqlx::types::Json<Vec<EnvironmentVariable>>",
"ordinal": 6,
"type_info": "Null"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false
]
},
"hash": "5765e9565a8b89c5bc2d72197e0e4a1093739e9abba69f6fe5527d453fab4db8"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO grpc_requests (\n id, name, workspace_id, folder_id, sort_priority, url, service, method, message,\n authentication_type, authentication, metadata\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n workspace_id = excluded.workspace_id,\n name = excluded.name,\n folder_id = excluded.folder_id,\n sort_priority = excluded.sort_priority,\n url = excluded.url,\n service = excluded.service,\n method = excluded.method,\n message = excluded.message,\n authentication_type = excluded.authentication_type,\n authentication = excluded.authentication,\n metadata = excluded.metadata\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 12
},
"nullable": []
},
"hash": "5af82cd333895d3d7d67a92f37b0feb338f615b88aea2bd09cb5809008c645a3"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO http_requests (\n id, workspace_id, folder_id, name, url, url_parameters, method, body, body_type,\n authentication, authentication_type, headers, sort_priority\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n workspace_id = excluded.workspace_id,\n name = excluded.name,\n folder_id = excluded.folder_id,\n method = excluded.method,\n headers = excluded.headers,\n body = excluded.body,\n body_type = excluded.body_type,\n authentication = excluded.authentication,\n authentication_type = excluded.authentication_type,\n url = excluded.url,\n url_parameters = excluded.url_parameters,\n sort_priority = excluded.sort_priority\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 13
},
"nullable": []
},
"hash": "5f2f40062abbe93e23b38876319cf16d4d2b3f8d0be32ffe7848528c725e1429"
}

View File

@@ -1,56 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, created_at, updated_at, workspace_id, name,\n cookies AS \"cookies!: sqlx::types::Json<Vec<JsonValue>>\"\n FROM cookie_jars WHERE workspace_id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "workspace_id",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "name",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "cookies!: sqlx::types::Json<Vec<JsonValue>>",
"ordinal": 6,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false
]
},
"hash": "612efa9ac45723dc604a88f5e7e288b4055fec4ba7d9102131bd255c037fa021"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO grpc_connections (\n id, workspace_id, request_id, service, method, elapsed,\n status, error, trailers, url\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n service = excluded.service,\n method = excluded.method,\n elapsed = excluded.elapsed,\n status = excluded.status,\n error = excluded.error,\n trailers = excluded.trailers,\n url = excluded.url\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 10
},
"nullable": []
},
"hash": "66deed028199c78ed15ea2f837907887c2a2cb564d1d076dd4ebf0ecbc82e098"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n UPDATE settings SET (\n theme, appearance, theme_dark, theme_light, update_channel,\n interface_font_size, interface_scale, editor_font_size, editor_soft_wrap,\n open_workspace_new_window\n ) = (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) WHERE id = 'default';\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 10
},
"nullable": []
},
"hash": "6b5edf45a6799cd7f87c23a3c7f818ad110d58c601f694a619d9345ae9e8e11d"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM workspaces\n WHERE id = ?\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "84be2b954870ab181738656ecd4d03fca2ff21012947014c79626abfce8e999b"
}

View File

@@ -1,74 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, created_at, updated_at, name, description, setting_request_timeout,\n setting_follow_redirects, setting_validate_certificates,\n variables AS \"variables!: sqlx::types::Json<Vec<EnvironmentVariable>>\"\n FROM workspaces\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "description",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "setting_request_timeout",
"ordinal": 6,
"type_info": "Int64"
},
{
"name": "setting_follow_redirects",
"ordinal": 7,
"type_info": "Bool"
},
{
"name": "setting_validate_certificates",
"ordinal": 8,
"type_info": "Bool"
},
{
"name": "variables!: sqlx::types::Json<Vec<EnvironmentVariable>>",
"ordinal": 9,
"type_info": "Text"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false
]
},
"hash": "8dfbae65ddec905ea3734448cc9f7029b6c78de227c6fa3a85d75d0a7f21e0e9"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO folders (\n id, workspace_id, folder_id, name, sort_priority\n )\n VALUES (?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n folder_id = excluded.folder_id,\n sort_priority = excluded.sort_priority\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 5
},
"nullable": []
},
"hash": "9238f94c688d91f42627e5b73c627c514bab4039ab5edadc79b77dfdfd63b208"
}

View File

@@ -1,80 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, request_id, connection_id, created_at, content, status, error,\n event_type AS \"event_type!: GrpcEventType\",\n metadata AS \"metadata!: sqlx::types::Json<HashMap<String, String>>\"\n FROM grpc_events\n WHERE id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "request_id",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "connection_id",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "content",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "status",
"ordinal": 7,
"type_info": "Int64"
},
{
"name": "error",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "event_type!: GrpcEventType",
"ordinal": 9,
"type_info": "Text"
},
{
"name": "metadata!: sqlx::types::Json<HashMap<String, String>>",
"ordinal": 10,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
true,
true,
false,
false
]
},
"hash": "92d8f003a8f7df692345f2d2fd2504c9222645976e3433e32e190f4ee4bf100d"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM folders\n WHERE id = ?\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "93aea3881dffb70a82325263740a0bb6477e78f27991ce7456b394e84383acb6"
}

View File

@@ -1,74 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, created_at, updated_at, name, description, setting_request_timeout,\n setting_follow_redirects, setting_validate_certificates,\n variables AS \"variables!: sqlx::types::Json<Vec<EnvironmentVariable>>\"\n FROM workspaces WHERE id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "description",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "setting_request_timeout",
"ordinal": 6,
"type_info": "Int64"
},
{
"name": "setting_follow_redirects",
"ordinal": 7,
"type_info": "Bool"
},
{
"name": "setting_validate_certificates",
"ordinal": 8,
"type_info": "Bool"
},
{
"name": "variables!: sqlx::types::Json<Vec<EnvironmentVariable>>",
"ordinal": 9,
"type_info": "Null"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false
]
},
"hash": "9ba3f783238b77637ffded4171b2fbb5e5ad0be952a0d832448d65cc5f0effc1"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n UPDATE grpc_connections\n SET (elapsed) = (-1)\n WHERE elapsed = 0;\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 0
},
"nullable": []
},
"hash": "a690a04cd1ebe8c3dbfd0cd98ae4ef093a1696d7b7ecaf694d12e5fafd62b685"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n UPDATE http_responses\n SET (elapsed, status_reason) = (-1, 'Cancelled')\n WHERE elapsed = 0;\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 0
},
"nullable": []
},
"hash": "ac1b4ffbd98b67f0a1a74e3525387d679dd6f44c561d55c7bbea747053e53671"
}

View File

@@ -1,116 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, request_id, updated_at, created_at, url, status,\n status_reason, content_length, body_path, elapsed, elapsed_headers, error,\n version, remote_addr,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpResponseHeader>>\"\n FROM http_responses\n WHERE request_id = ?\n ORDER BY created_at DESC\n LIMIT ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "request_id",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "updated_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "created_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "url",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "status",
"ordinal": 7,
"type_info": "Int64"
},
{
"name": "status_reason",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "content_length",
"ordinal": 9,
"type_info": "Int64"
},
{
"name": "body_path",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "elapsed",
"ordinal": 11,
"type_info": "Int64"
},
{
"name": "elapsed_headers",
"ordinal": 12,
"type_info": "Int64"
},
{
"name": "error",
"ordinal": 13,
"type_info": "Text"
},
{
"name": "version",
"ordinal": 14,
"type_info": "Text"
},
{
"name": "remote_addr",
"ordinal": 15,
"type_info": "Text"
},
{
"name": "headers!: sqlx::types::Json<Vec<HttpResponseHeader>>",
"ordinal": 16,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
true,
true,
true,
false,
false,
true,
true,
true,
false
]
},
"hash": "ac38621cd947c3be9ca0d8ea73325fe35c3866d16f6482fc32c23762f112dc83"
}

View File

@@ -1,62 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, created_at, updated_at, folder_id, name, sort_priority\n FROM folders\n WHERE id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "folder_id",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "name",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "sort_priority",
"ordinal": 7,
"type_info": "Float"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "ae98a7b35a5cb80a4bcd04faa22545deac2a5e9bfb814b60191f16b98ed49796"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM environments\n WHERE id = ?\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "aeb0712785a9964d516dc8939bc54aa8206ad852e608b362d014b67a0f21b0ed"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO cookie_jars (\n id, workspace_id, name, cookies\n )\n VALUES (?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n cookies = excluded.cookies\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "b3fae40a793a6724dd2286a9ca4bc0a9c000a631ee0d751a9dc4f3e76de3d57c"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM cookie_jars\n WHERE id = ?\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "b98609f65dd3a6bbd1ea8dc8bed2840a6d5d13fec1bbc0aa61ca4f60de98a09c"
}

View File

@@ -1,56 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT id, workspace_id, model, created_at, updated_at, name,\n variables AS \"variables!: sqlx::types::Json<Vec<EnvironmentVariable>>\"\n FROM environments\n WHERE workspace_id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "variables!: sqlx::types::Json<Vec<EnvironmentVariable>>",
"ordinal": 6,
"type_info": "Null"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false
]
},
"hash": "ba2b34a77723f24f86e4c3c45274dbfec6ca130e16e592f948844c037bdc0593"
}

View File

@@ -1,92 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, request_id, created_at, updated_at, service,\n method, elapsed, status, error, url,\n trailers AS \"trailers!: sqlx::types::Json<HashMap<String, String>>\"\n FROM grpc_connections\n WHERE id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "request_id",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "service",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "method",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "elapsed",
"ordinal": 8,
"type_info": "Int64"
},
{
"name": "status",
"ordinal": 9,
"type_info": "Int64"
},
{
"name": "error",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "url",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "trailers!: sqlx::types::Json<HashMap<String, String>>",
"ordinal": 12,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
true,
false,
false
]
},
"hash": "d4b64c466624eb75e0f5bd201ebfb6a73d25eb7c9e09cb9690afdb7fef5fca8b"
}

View File

@@ -1,116 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, request_id, updated_at, created_at, url, status,\n status_reason, content_length, body_path, elapsed, elapsed_headers, error,\n version, remote_addr,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpResponseHeader>>\"\n FROM http_responses\n WHERE workspace_id = ?\n ORDER BY created_at DESC\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "request_id",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "updated_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "created_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "url",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "status",
"ordinal": 7,
"type_info": "Int64"
},
{
"name": "status_reason",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "content_length",
"ordinal": 9,
"type_info": "Int64"
},
{
"name": "body_path",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "elapsed",
"ordinal": 11,
"type_info": "Int64"
},
{
"name": "elapsed_headers",
"ordinal": 12,
"type_info": "Int64"
},
{
"name": "error",
"ordinal": 13,
"type_info": "Text"
},
{
"name": "version",
"ordinal": 14,
"type_info": "Text"
},
{
"name": "remote_addr",
"ordinal": 15,
"type_info": "Text"
},
{
"name": "headers!: sqlx::types::Json<Vec<HttpResponseHeader>>",
"ordinal": 16,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
true,
true,
true,
false,
false,
true,
true,
true,
false
]
},
"hash": "d5e087caa163a0c7bfbbadf07eccb80105501cf5baab706aa6792dfe90af8fc9"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO key_values (namespace, key, value)\n VALUES (?, ?, ?) ON CONFLICT DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n value = excluded.value\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 3
},
"nullable": []
},
"hash": "d80c09497771e3641022e73ec6c6a87e73a551f88a948a5445d754922b82b50b"
}

View File

@@ -1,98 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, created_at, updated_at, theme, appearance,\n theme_dark, theme_light, update_channel,\n interface_font_size, interface_scale, editor_font_size, editor_soft_wrap,\n open_workspace_new_window\n FROM settings\n WHERE id = 'default'\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "theme",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "appearance",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "theme_dark",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "theme_light",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "update_channel",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "interface_font_size",
"ordinal": 9,
"type_info": "Int64"
},
{
"name": "interface_scale",
"ordinal": 10,
"type_info": "Int64"
},
{
"name": "editor_font_size",
"ordinal": 11,
"type_info": "Int64"
},
{
"name": "editor_soft_wrap",
"ordinal": 12,
"type_info": "Bool"
},
{
"name": "open_workspace_new_window",
"ordinal": 13,
"type_info": "Bool"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
true
]
},
"hash": "daa61066517df649e7c80a8ce407839ad502e8e5e43aa8c02e049865acbbae75"
}

View File

@@ -1,104 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, folder_id, created_at, updated_at, name, sort_priority,\n url, service, method, message, authentication_type,\n authentication AS \"authentication!: Json<HashMap<String, JsonValue>>\",\n metadata AS \"metadata!: sqlx::types::Json<Vec<GrpcMetadataEntry>>\"\n FROM grpc_requests\n WHERE id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "folder_id",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "sort_priority",
"ordinal": 7,
"type_info": "Float"
},
{
"name": "url",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "service",
"ordinal": 9,
"type_info": "Text"
},
{
"name": "method",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "message",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "authentication_type",
"ordinal": 12,
"type_info": "Text"
},
{
"name": "authentication!: Json<HashMap<String, JsonValue>>",
"ordinal": 13,
"type_info": "Text"
},
{
"name": "metadata!: sqlx::types::Json<Vec<GrpcMetadataEntry>>",
"ordinal": 14,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
true,
false,
false,
false,
false,
false,
true,
true,
false,
true,
false,
false
]
},
"hash": "e1cdba43bd938772631263966a9bee263923c387f4864917f36a04043bec4857"
}

View File

@@ -1,110 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, workspace_id, folder_id, created_at, updated_at, name, url,\n url_parameters AS \"url_parameters!: sqlx::types::Json<Vec<HttpUrlParameter>>\",\n method, body_type, authentication_type, sort_priority,\n body AS \"body!: Json<HashMap<String, JsonValue>>\",\n authentication AS \"authentication!: Json<HashMap<String, JsonValue>>\",\n headers AS \"headers!: sqlx::types::Json<Vec<HttpRequestHeader>>\"\n FROM http_requests\n WHERE workspace_id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "workspace_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "folder_id",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 4,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 5,
"type_info": "Datetime"
},
{
"name": "name",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "url",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "url_parameters!: sqlx::types::Json<Vec<HttpUrlParameter>>",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "method",
"ordinal": 9,
"type_info": "Text"
},
{
"name": "body_type",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "authentication_type",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "sort_priority",
"ordinal": 12,
"type_info": "Float"
},
{
"name": "body!: Json<HashMap<String, JsonValue>>",
"ordinal": 13,
"type_info": "Text"
},
{
"name": "authentication!: Json<HashMap<String, JsonValue>>",
"ordinal": 14,
"type_info": "Text"
},
{
"name": "headers!: sqlx::types::Json<Vec<HttpRequestHeader>>",
"ordinal": 15,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
true,
false,
false,
false,
false,
false,
false,
true,
true,
false,
false,
false,
false
]
},
"hash": "e61c0dddb3e86d271cb9399faa4e4443342796cb72bdd43a821fae2994ae8e2f"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n UPDATE http_responses SET (\n elapsed, elapsed_headers, url, status, status_reason, content_length, body_path,\n error, headers, version, remote_addr, updated_at\n ) = (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP) WHERE id = ?;\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 12
},
"nullable": []
},
"hash": "e7124f5570076bfd65985744f48d8e12cf29d6d243fffdd62ade2ab70c7bddda"
}

View File

@@ -1,56 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, model, created_at, updated_at, workspace_id, name,\n cookies AS \"cookies!: sqlx::types::Json<Vec<JsonValue>>\"\n FROM cookie_jars WHERE id = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "model",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 2,
"type_info": "Datetime"
},
{
"name": "updated_at",
"ordinal": 3,
"type_info": "Datetime"
},
{
"name": "workspace_id",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "name",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "cookies!: sqlx::types::Json<Vec<JsonValue>>",
"ordinal": 6,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
false,
false
]
},
"hash": "f5f20f3b37d932617499a0da50997edad59e4f5998b15c50ed6eae2e97064068"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM grpc_requests\n WHERE id = ?\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "fe0652396bc30d926cf99083651c1cbd668bcf00ebe1a5f36616700c84972b39"
}

6272
src-tauri/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,58 +1,40 @@
workspace = { members = ["grpc", "templates", "tauri-plugin-plugin-runtime"] }
[package]
name = "yaak-app"
version = "0.0.0"
description = "A network protocol testing utility app"
authors = ["Gregory Schier"]
license = "MIT"
repository = "https://github.com/gschier/yaak-app"
edition = "2021"
# Produce a library for mobile support
[lib]
name = "tauri_app_lib"
crate-type = ["staticlib", "cdylib", "lib"]
[profile.release]
strip = true # Automatically strip symbols from the binary.
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[build-dependencies]
tauri-build = { version = "2.0.0-beta", features = [] }
tauri-build = { version = "1.2", features = [] }
[target.'cfg(target_os = "macos")'.dependencies]
objc = "0.2.7"
cocoa = "0.25.0"
[target.'cfg(target_os = "linux")'.dependencies]
openssl-sys = { version = "0.9", features = ["vendored"] } # For Ubuntu installation to work
objc = { version = "0.2.7" }
cocoa = { version = "0.24.1" }
[dependencies]
grpc = { path = "./grpc" }
templates = { path = "./templates" }
plugin_runtime = { path = "tauri-plugin-plugin-runtime" }
anyhow = "1.0.86"
base64 = "0.22.0"
chrono = { version = "0.4.31", features = ["serde"] }
datetime = "0.5.2"
hex_color = "3.0.0"
http = "1"
log = "0.4.21"
rand = "0.8.5"
regex = "1.10.2"
reqwest = { version = "0.12.4", features = ["multipart", "cookies", "gzip", "brotli", "deflate", "json", "native-tls-alpn"] }
reqwest_cookie_store = "0.8.0"
serde = { version = "1.0.198", features = ["derive"] }
serde_json = { version = "1.0.116", features = ["raw_value"] }
serde_yaml = "0.9.34"
sqlx = { version = "0.7.4", features = ["sqlite", "runtime-tokio-rustls", "json", "chrono", "time"] }
tauri = { version = "2.0.0-beta", features = ["devtools", "protocol-asset"] }
tauri-plugin-clipboard-manager = "2.1.0-beta"
tauri-plugin-dialog = "2.0.0-beta"
tauri-plugin-fs = "2.0.0-beta"
tauri-plugin-log = { version = "2.0.0-beta", features = ["colored"] }
tauri-plugin-os = "2.0.0-beta"
tauri-plugin-shell = "2.0.0-beta"
tauri-plugin-updater = "2.0.0-beta"
tauri-plugin-window-state = "2.0.0-beta"
tokio = { version = "1.36.0", features = ["sync"] }
tokio-stream = "0.1.15"
uuid = "1.7.0"
thiserror = "1.0.61"
mime_guess = "2.0.5"
serde_json = { version = "1.0", features = ["raw_value"] }
serde = { version = "1.0", features = ["derive"] }
tauri = { version = "1.2", features = ["config-toml", "devtools", "shell-open", "system-tray", "updater", "window-start-dragging"] }
http = { version = "0.2.8" }
reqwest = { version = "0.11.14", features = ["json"] }
tokio = { version = "1.25.0", features = ["sync"] }
futures = { version = "0.3.26" }
deno_core = { version = "0.174.0" }
deno_ast = { version = "0.24.0", features = ["transpiling"] }
sqlx = { version = "0.6.2", features = ["sqlite", "runtime-tokio-rustls", "json", "chrono", "time", "offline"] }
uuid = { version = "1.3.0" }
rand = { version = "0.8.5" }
chrono = { version = "0.4.23", features = ["serde"] }
[features]
# by default Tauri runs in production mode
# when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` is an URL
default = ["custom-protocol"]
# this feature is used used for production builds where `devPath` points to the filesystem
# DO NOT remove this
custom-protocol = ["tauri/custom-protocol"]

View File

@@ -1,46 +0,0 @@
{
"$schema": "../gen/schemas/capabilities.json",
"identifier": "main",
"description": "Main permissions",
"local": true,
"windows": [
"*"
],
"permissions": [
"os:allow-os-type",
"event:allow-emit",
"clipboard-manager:allow-write-text",
"clipboard-manager:allow-read-text",
"dialog:allow-open",
"dialog:allow-save",
"event:allow-listen",
"event:allow-unlisten",
"fs:allow-read-file",
"fs:allow-read-text-file",
{
"identifier": "fs:scope",
"allow": [
{
"path": "$APPDATA"
},
{
"path": "$APPDATA/**"
}
]
},
"shell:allow-open",
"webview:allow-set-webview-zoom",
"window:allow-close",
"window:allow-is-fullscreen",
"window:allow-maximize",
"window:allow-minimize",
"window:allow-toggle-maximize",
"window:allow-set-decorations",
"window:allow-set-title",
"window:allow-start-dragging",
"window:allow-unmaximize",
"window:allow-theme",
"clipboard-manager:allow-read-text",
"clipboard-manager:allow-write-text"
]
}

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
{"main":{"identifier":"main","description":"Main permissions","local":true,"windows":["*"],"permissions":["os:allow-os-type","event:allow-emit","clipboard-manager:allow-write-text","clipboard-manager:allow-read-text","dialog:allow-open","dialog:allow-save","event:allow-listen","event:allow-unlisten","fs:allow-read-file","fs:allow-read-text-file",{"identifier":"fs:scope","allow":[{"path":"$APPDATA"},{"path":"$APPDATA/**"}]},"shell:allow-open","webview:allow-set-webview-zoom","window:allow-close","window:allow-is-fullscreen","window:allow-maximize","window:allow-minimize","window:allow-toggle-maximize","window:allow-set-decorations","window:allow-set-title","window:allow-start-dragging","window:allow-unmaximize","window:allow-theme","clipboard-manager:allow-read-text","clipboard-manager:allow-write-text"]}}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,24 +0,0 @@
[package]
name = "grpc"
version = "0.1.0"
edition = "2021"
[dependencies]
tonic = "0.10.2"
prost = "0.12"
tokio = { version = "1.0", features = ["macros", "rt-multi-thread", "fs"] }
tonic-reflection = "0.10.2"
tokio-stream = "0.1.14"
prost-types = "0.12.3"
serde = { version = "1.0.196", features = ["derive"] }
serde_json = "1.0.113"
prost-reflect = { version = "0.12.0", features = ["serde", "derive"] }
log = "0.4.20"
anyhow = "1.0.79"
hyper = { version = "0.14" }
hyper-rustls = { version = "0.24.0", features = ["http2"] }
uuid = { version = "1.7.0", features = ["v4"] }
tauri = { version = "2.0.0-beta" }
tauri-plugin-shell = "2.0.0-beta"
md5 = "0.7.0"
dunce = "1.0.4"

View File

@@ -1,52 +0,0 @@
use prost_reflect::prost::Message;
use prost_reflect::{DynamicMessage, MethodDescriptor};
use tonic::codec::{Codec, DecodeBuf, Decoder, EncodeBuf, Encoder};
use tonic::Status;
#[derive(Clone)]
pub struct DynamicCodec(MethodDescriptor);
impl DynamicCodec {
#[allow(dead_code)]
pub fn new(md: MethodDescriptor) -> Self {
Self(md)
}
}
impl Codec for DynamicCodec {
type Encode = DynamicMessage;
type Decode = DynamicMessage;
type Encoder = Self;
type Decoder = Self;
fn encoder(&mut self) -> Self::Encoder {
self.clone()
}
fn decoder(&mut self) -> Self::Decoder {
self.clone()
}
}
impl Encoder for DynamicCodec {
type Item = DynamicMessage;
type Error = Status;
fn encode(&mut self, item: Self::Item, dst: &mut EncodeBuf<'_>) -> Result<(), Self::Error> {
item.encode(dst)
.expect("buffer is too small to decode this message");
Ok(())
}
}
impl Decoder for DynamicCodec {
type Item = DynamicMessage;
type Error = Status;
fn decode(&mut self, src: &mut DecodeBuf<'_>) -> Result<Option<Self::Item>, Self::Error> {
let mut msg = DynamicMessage::new(self.0.output());
msg.merge(src)
.map_err(|err| Status::internal(err.to_string()))?;
Ok(Some(msg))
}
}

View File

@@ -1,179 +0,0 @@
use prost_reflect::{DescriptorPool, MessageDescriptor};
use prost_types::field_descriptor_proto;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Default, Serialize, Deserialize)]
#[serde(default, rename_all = "camelCase")]
pub struct JsonSchemaEntry {
#[serde(skip_serializing_if = "Option::is_none")]
title: Option<String>,
#[serde(rename = "type")]
type_: JsonType,
#[serde(skip_serializing_if = "Option::is_none")]
description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
properties: Option<HashMap<String, JsonSchemaEntry>>,
#[serde(rename = "enum", skip_serializing_if = "Option::is_none")]
enum_: Option<Vec<String>>,
/// Don't allow any other properties in the object
additional_properties: bool,
/// Set all properties to required
#[serde(skip_serializing_if = "Option::is_none")]
required: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
items: Option<Box<JsonSchemaEntry>>,
}
enum JsonType {
String,
Number,
Object,
Array,
Boolean,
Null,
_UNKNOWN,
}
impl Default for JsonType {
fn default() -> Self {
JsonType::_UNKNOWN
}
}
impl serde::Serialize for JsonType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
JsonType::String => serializer.serialize_str("string"),
JsonType::Number => serializer.serialize_str("number"),
JsonType::Object => serializer.serialize_str("object"),
JsonType::Array => serializer.serialize_str("array"),
JsonType::Boolean => serializer.serialize_str("boolean"),
JsonType::Null => serializer.serialize_str("null"),
JsonType::_UNKNOWN => serializer.serialize_str("unknown"),
}
}
}
impl<'de> serde::Deserialize<'de> for JsonType {
fn deserialize<D>(deserializer: D) -> Result<JsonType, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
match s.as_str() {
"string" => Ok(JsonType::String),
"number" => Ok(JsonType::Number),
"object" => Ok(JsonType::Object),
"array" => Ok(JsonType::Array),
"boolean" => Ok(JsonType::Boolean),
"null" => Ok(JsonType::Null),
_ => Ok(JsonType::_UNKNOWN),
}
}
}
pub fn message_to_json_schema(
pool: &DescriptorPool,
message: MessageDescriptor,
) -> JsonSchemaEntry {
let mut schema = JsonSchemaEntry {
title: Some(message.name().to_string()),
type_: JsonType::Object, // Messages are objects
..Default::default()
};
let mut properties = HashMap::new();
message.fields().for_each(|f| match f.kind() {
prost_reflect::Kind::Message(m) => {
properties.insert(f.name().to_string(), message_to_json_schema(pool, m));
}
prost_reflect::Kind::Enum(e) => {
properties.insert(
f.name().to_string(),
JsonSchemaEntry {
type_: map_proto_type_to_json_type(f.field_descriptor_proto().r#type()),
enum_: Some(e.values().map(|v| v.name().to_string()).collect::<Vec<_>>()),
..Default::default()
},
);
}
_ => {
// TODO: Handle repeated label
match f.field_descriptor_proto().label() {
field_descriptor_proto::Label::Repeated => {
// TODO: Handle more complex repeated types. This just handles primitives for now
properties.insert(
f.name().to_string(),
JsonSchemaEntry {
type_: JsonType::Array,
items: Some(Box::new(JsonSchemaEntry {
type_: map_proto_type_to_json_type(
f.field_descriptor_proto().r#type(),
),
..Default::default()
})),
..Default::default()
},
);
}
_ => {
// Regular JSON field
properties.insert(
f.name().to_string(),
JsonSchemaEntry {
type_: map_proto_type_to_json_type(f.field_descriptor_proto().r#type()),
..Default::default()
},
);
}
};
}
});
schema.properties = Some(properties);
// All proto 3 fields are optional, so maybe we could
// make this a setting?
// schema.required = Some(
// message
// .fields()
// .map(|f| f.name().to_string())
// .collect::<Vec<_>>(),
// );
schema
}
fn map_proto_type_to_json_type(proto_type: field_descriptor_proto::Type) -> JsonType {
match proto_type {
field_descriptor_proto::Type::Double => JsonType::Number,
field_descriptor_proto::Type::Float => JsonType::Number,
field_descriptor_proto::Type::Int64 => JsonType::Number,
field_descriptor_proto::Type::Uint64 => JsonType::Number,
field_descriptor_proto::Type::Int32 => JsonType::Number,
field_descriptor_proto::Type::Fixed64 => JsonType::Number,
field_descriptor_proto::Type::Fixed32 => JsonType::Number,
field_descriptor_proto::Type::Bool => JsonType::Boolean,
field_descriptor_proto::Type::String => JsonType::String,
field_descriptor_proto::Type::Group => JsonType::_UNKNOWN,
field_descriptor_proto::Type::Message => JsonType::Object,
field_descriptor_proto::Type::Bytes => JsonType::String,
field_descriptor_proto::Type::Uint32 => JsonType::Number,
field_descriptor_proto::Type::Enum => JsonType::String,
field_descriptor_proto::Type::Sfixed32 => JsonType::Number,
field_descriptor_proto::Type::Sfixed64 => JsonType::Number,
field_descriptor_proto::Type::Sint32 => JsonType::Number,
field_descriptor_proto::Type::Sint64 => JsonType::Number,
}
}

View File

@@ -1,52 +0,0 @@
use prost_reflect::{DynamicMessage, MethodDescriptor, SerializeOptions};
use serde::{Deserialize, Serialize};
use serde_json::Deserializer;
mod codec;
mod json_schema;
pub mod manager;
mod proto;
pub use tonic::metadata::*;
pub use tonic::Code;
pub fn serialize_options() -> SerializeOptions {
SerializeOptions::new().skip_default_fields(false)
}
#[derive(Serialize, Deserialize, Debug, Default)]
#[serde(default, rename_all = "camelCase")]
pub struct ServiceDefinition {
pub name: String,
pub methods: Vec<MethodDefinition>,
}
#[derive(Serialize, Deserialize, Debug, Default)]
#[serde(default, rename_all = "camelCase")]
pub struct MethodDefinition {
pub name: String,
pub schema: String,
pub client_streaming: bool,
pub server_streaming: bool,
}
static SERIALIZE_OPTIONS: &'static SerializeOptions = &SerializeOptions::new()
.skip_default_fields(false)
.stringify_64_bit_integers(false);
pub fn serialize_message(msg: &DynamicMessage) -> Result<String, String> {
let mut buf = Vec::new();
let mut se = serde_json::Serializer::pretty(&mut buf);
msg.serialize_with_options(&mut se, SERIALIZE_OPTIONS)
.map_err(|e| e.to_string())?;
let s = String::from_utf8(buf).expect("serde_json to emit valid utf8");
Ok(s)
}
pub fn deserialize_message(msg: &str, method: MethodDescriptor) -> Result<DynamicMessage, String> {
let mut deserializer = Deserializer::from_str(&msg);
let req_message = DynamicMessage::deserialize(method.input(), &mut deserializer)
.map_err(|e| e.to_string())?;
deserializer.end().map_err(|e| e.to_string())?;
Ok(req_message)
}

View File

@@ -1,304 +0,0 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::str::FromStr;
use hyper::client::HttpConnector;
use hyper::Client;
use hyper_rustls::HttpsConnector;
pub use prost_reflect::DynamicMessage;
use prost_reflect::{DescriptorPool, MethodDescriptor, ServiceDescriptor};
use serde_json::Deserializer;
use tauri::AppHandle;
use tokio_stream::wrappers::ReceiverStream;
use tonic::body::BoxBody;
use tonic::metadata::{MetadataKey, MetadataValue};
use tonic::transport::Uri;
use tonic::{IntoRequest, IntoStreamingRequest, Request, Response, Status, Streaming};
use crate::codec::DynamicCodec;
use crate::proto::{
fill_pool_from_files, fill_pool_from_reflection, get_transport, method_desc_to_path,
};
use crate::{json_schema, MethodDefinition, ServiceDefinition};
#[derive(Clone)]
pub struct GrpcConnection {
pool: DescriptorPool,
conn: Client<HttpsConnector<HttpConnector>, BoxBody>,
pub uri: Uri,
}
#[derive(Default)]
pub struct StreamError {
pub message: String,
pub status: Option<Status>,
}
impl From<String> for StreamError {
fn from(value: String) -> Self {
StreamError {
message: value.to_string(),
status: None,
}
}
}
impl From<Status> for StreamError {
fn from(s: Status) -> Self {
StreamError {
message: s.message().to_string(),
status: Some(s),
}
}
}
impl GrpcConnection {
pub fn service(&self, service: &str) -> Result<ServiceDescriptor, String> {
let service = self
.pool
.get_service_by_name(service)
.ok_or("Failed to find service")?;
Ok(service)
}
pub fn method(&self, service: &str, method: &str) -> Result<MethodDescriptor, String> {
let service = self.service(service)?;
let method = service
.methods()
.find(|m| m.name() == method)
.ok_or("Failed to find method")?;
Ok(method)
}
pub async fn unary(
&self,
service: &str,
method: &str,
message: &str,
metadata: HashMap<String, String>,
) -> Result<Response<DynamicMessage>, StreamError> {
let method = &self.method(&service, &method)?;
let input_message = method.input();
let mut deserializer = Deserializer::from_str(message);
let req_message = DynamicMessage::deserialize(input_message, &mut deserializer)
.map_err(|e| e.to_string())?;
deserializer.end().unwrap();
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
let mut req = req_message.into_request();
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
let path = method_desc_to_path(method);
let codec = DynamicCodec::new(method.clone());
client.ready().await.unwrap();
Ok(client.unary(req, path, codec).await?)
}
pub async fn streaming(
&self,
service: &str,
method: &str,
stream: ReceiverStream<DynamicMessage>,
metadata: HashMap<String, String>,
) -> Result<Response<Streaming<DynamicMessage>>, StreamError> {
let method = &self.method(&service, &method)?;
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
let mut req = stream.into_streaming_request();
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
let path = method_desc_to_path(method);
let codec = DynamicCodec::new(method.clone());
client.ready().await.map_err(|e| e.to_string())?;
Ok(client.streaming(req, path, codec).await?)
}
pub async fn client_streaming(
&self,
service: &str,
method: &str,
stream: ReceiverStream<DynamicMessage>,
metadata: HashMap<String, String>,
) -> Result<Response<DynamicMessage>, StreamError> {
let method = &self.method(&service, &method)?;
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
let mut req = stream.into_streaming_request();
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
let path = method_desc_to_path(method);
let codec = DynamicCodec::new(method.clone());
client.ready().await.unwrap();
client
.client_streaming(req, path, codec)
.await
.map_err(|e| StreamError {
message: e.message().to_string(),
status: Some(e),
})
}
pub async fn server_streaming(
&self,
service: &str,
method: &str,
message: &str,
metadata: HashMap<String, String>,
) -> Result<Response<Streaming<DynamicMessage>>, StreamError> {
let method = &self.method(&service, &method)?;
let input_message = method.input();
let mut deserializer = Deserializer::from_str(message);
let req_message = DynamicMessage::deserialize(input_message, &mut deserializer)
.map_err(|e| e.to_string())?;
deserializer.end().unwrap();
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
let mut req = req_message.into_request();
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
let path = method_desc_to_path(method);
let codec = DynamicCodec::new(method.clone());
client.ready().await.map_err(|e| e.to_string())?;
Ok(client.server_streaming(req, path, codec).await?)
}
}
pub struct GrpcHandle {
app_handle: AppHandle,
pools: HashMap<String, DescriptorPool>,
}
impl GrpcHandle {
pub fn new(app_handle: &AppHandle) -> Self {
let pools = HashMap::new();
Self {
pools,
app_handle: app_handle.clone(),
}
}
}
impl GrpcHandle {
pub async fn reflect(
&mut self,
id: &str,
uri: &str,
proto_files: &Vec<PathBuf>,
) -> Result<(), String> {
let pool = if proto_files.is_empty() {
let full_uri = uri_from_str(uri)?;
fill_pool_from_reflection(&full_uri).await
} else {
fill_pool_from_files(&self.app_handle, proto_files).await
}?;
self.pools
.insert(make_pool_key(id, uri, proto_files), pool.clone());
Ok(())
}
pub async fn services(
&mut self,
id: &str,
uri: &str,
proto_files: &Vec<PathBuf>,
) -> Result<Vec<ServiceDefinition>, String> {
// Ensure reflection is up-to-date
self.reflect(id, uri, proto_files).await?;
let pool = self
.get_pool(id, uri, proto_files)
.ok_or("Failed to get pool".to_string())?;
Ok(self.services_from_pool(&pool))
}
fn services_from_pool(&self, pool: &DescriptorPool) -> Vec<ServiceDefinition> {
pool.services()
.map(|s| {
let mut def = ServiceDefinition {
name: s.full_name().to_string(),
methods: vec![],
};
for method in s.methods() {
let input_message = method.input();
def.methods.push(MethodDefinition {
name: method.name().to_string(),
server_streaming: method.is_server_streaming(),
client_streaming: method.is_client_streaming(),
schema: serde_json::to_string_pretty(&json_schema::message_to_json_schema(
&pool,
input_message,
))
.unwrap(),
})
}
def
})
.collect::<Vec<_>>()
}
pub async fn connect(
&mut self,
id: &str,
uri: &str,
proto_files: &Vec<PathBuf>,
) -> Result<GrpcConnection, String> {
self.reflect(id, uri, proto_files).await?;
let pool = self
.get_pool(id, uri, proto_files)
.ok_or("Failed to get pool")?;
let uri = uri_from_str(uri)?;
let conn = get_transport();
let connection = GrpcConnection {
pool: pool.clone(),
conn,
uri,
};
Ok(connection)
}
fn get_pool(&self, id: &str, uri: &str, proto_files: &Vec<PathBuf>) -> Option<&DescriptorPool> {
self.pools.get(make_pool_key(id, uri, proto_files).as_str())
}
}
fn decorate_req<T>(metadata: HashMap<String, String>, req: &mut Request<T>) -> Result<(), String> {
for (k, v) in metadata {
req.metadata_mut().insert(
MetadataKey::from_str(k.as_str()).map_err(|e| e.to_string())?,
MetadataValue::from_str(v.as_str()).map_err(|e| e.to_string())?,
);
}
Ok(())
}
fn uri_from_str(uri_str: &str) -> Result<Uri, String> {
match Uri::from_str(uri_str) {
Ok(uri) => Ok(uri),
Err(err) => {
// Uri::from_str basically only returns "invalid format" so we add more context here
Err(format!("Failed to parse URL, {}", err.to_string()))
}
}
}
fn make_pool_key(id: &str, uri: &str, proto_files: &Vec<PathBuf>) -> String {
let pool_key = format!(
"{}::{}::{}",
id,
uri,
proto_files
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect::<Vec<String>>()
.join(":")
);
format!("{:x}", md5::compute(pool_key))
}

View File

@@ -1,252 +0,0 @@
use std::env::temp_dir;
use std::ops::Deref;
use std::path::PathBuf;
use std::str::FromStr;
use anyhow::anyhow;
use hyper::client::HttpConnector;
use hyper::Client;
use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder};
use log::{debug, warn};
use prost::Message;
use prost_reflect::{DescriptorPool, MethodDescriptor};
use prost_types::{FileDescriptorProto, FileDescriptorSet};
use tauri::path::BaseDirectory;
use tauri::{AppHandle, Manager};
use tauri_plugin_shell::ShellExt;
use tokio::fs;
use tokio_stream::StreamExt;
use tonic::body::BoxBody;
use tonic::codegen::http::uri::PathAndQuery;
use tonic::transport::Uri;
use tonic::Request;
use tonic_reflection::pb::server_reflection_client::ServerReflectionClient;
use tonic_reflection::pb::server_reflection_request::MessageRequest;
use tonic_reflection::pb::server_reflection_response::MessageResponse;
use tonic_reflection::pb::ServerReflectionRequest;
pub async fn fill_pool_from_files(
app_handle: &AppHandle,
paths: &Vec<PathBuf>,
) -> Result<DescriptorPool, String> {
let mut pool = DescriptorPool::new();
let random_file_name = format!("{}.desc", uuid::Uuid::new_v4());
let desc_path = temp_dir().join(random_file_name);
let global_import_dir = app_handle
.path()
.resolve("protoc-include", BaseDirectory::Resource)
.expect("failed to resolve protoc include directory");
// HACK: Remove UNC prefix for Windows paths
let global_import_dir = dunce::simplified(global_import_dir.as_path())
.to_string_lossy()
.to_string();
let desc_path = dunce::simplified(desc_path.as_path());
let mut args = vec![
"--include_imports".to_string(),
"--include_source_info".to_string(),
"-I".to_string(),
global_import_dir,
"-o".to_string(),
desc_path.to_string_lossy().to_string(),
];
for p in paths {
if p.as_path().exists() {
args.push(p.to_string_lossy().to_string());
} else {
continue;
}
let parent = p.as_path().parent();
if let Some(parent_path) = parent {
args.push("-I".to_string());
args.push(parent_path.to_string_lossy().to_string());
args.push("-I".to_string());
args.push(parent_path.parent().unwrap().to_string_lossy().to_string());
} else {
debug!("ignoring {:?} since it does not exist.", parent)
}
}
let out = app_handle
.shell()
.sidecar("yaakprotoc")
.expect("yaakprotoc not found")
.args(args)
.output()
.await
.expect("yaakprotoc failed to run");
if !out.status.success() {
return Err(format!(
"protoc failed with status {}: {}",
out.status.code().unwrap(),
String::from_utf8_lossy(out.stderr.as_slice())
));
}
let bytes = fs::read(desc_path).await.map_err(|e| e.to_string())?;
let fdp = FileDescriptorSet::decode(bytes.deref()).map_err(|e| e.to_string())?;
pool.add_file_descriptor_set(fdp)
.map_err(|e| e.to_string())?;
fs::remove_file(desc_path)
.await
.map_err(|e| e.to_string())?;
Ok(pool)
}
pub async fn fill_pool_from_reflection(uri: &Uri) -> Result<DescriptorPool, String> {
let mut pool = DescriptorPool::new();
let mut client = ServerReflectionClient::with_origin(get_transport(), uri.clone());
for service in list_services(&mut client).await? {
if service == "grpc.reflection.v1alpha.ServerReflection" {
continue;
}
file_descriptor_set_from_service_name(&service, &mut pool, &mut client).await;
}
Ok(pool)
}
pub fn get_transport() -> Client<HttpsConnector<HttpConnector>, BoxBody> {
let connector = HttpsConnectorBuilder::new().with_native_roots();
let connector = connector.https_or_http().enable_http2().wrap_connector({
let mut http_connector = HttpConnector::new();
http_connector.enforce_http(false);
http_connector
});
Client::builder()
.pool_max_idle_per_host(0)
.http2_only(true)
.build(connector)
}
async fn list_services(
reflect_client: &mut ServerReflectionClient<Client<HttpsConnector<HttpConnector>, BoxBody>>,
) -> Result<Vec<String>, String> {
let response =
send_reflection_request(reflect_client, MessageRequest::ListServices("".into())).await?;
let list_services_response = match response {
MessageResponse::ListServicesResponse(resp) => resp,
_ => panic!("Expected a ListServicesResponse variant"),
};
Ok(list_services_response
.service
.iter()
.map(|s| s.name.clone())
.collect::<Vec<_>>())
}
async fn file_descriptor_set_from_service_name(
service_name: &str,
pool: &mut DescriptorPool,
client: &mut ServerReflectionClient<Client<HttpsConnector<HttpConnector>, BoxBody>>,
) {
let response = match send_reflection_request(
client,
MessageRequest::FileContainingSymbol(service_name.into()),
)
.await
{
Ok(resp) => resp,
Err(e) => {
warn!(
"Error fetching file descriptor for service {}: {}",
service_name, e
);
return;
}
};
let file_descriptor_response = match response {
MessageResponse::FileDescriptorResponse(resp) => resp,
_ => panic!("Expected a FileDescriptorResponse variant"),
};
for fd in file_descriptor_response.file_descriptor_proto {
let fdp = FileDescriptorProto::decode(fd.deref()).unwrap();
// Add deps first or else we'll get an error
for dep_name in fdp.clone().dependency {
file_descriptor_set_by_filename(&dep_name, pool, client).await;
}
pool.add_file_descriptor_proto(fdp)
.expect("add file descriptor proto");
}
}
async fn file_descriptor_set_by_filename(
filename: &str,
pool: &mut DescriptorPool,
client: &mut ServerReflectionClient<Client<HttpsConnector<HttpConnector>, BoxBody>>,
) {
// We already fetched this file
if let Some(_) = pool.get_file_by_name(filename) {
return;
}
let response =
send_reflection_request(client, MessageRequest::FileByFilename(filename.into())).await;
let file_descriptor_response = match response {
Ok(MessageResponse::FileDescriptorResponse(resp)) => resp,
Ok(_) => {
panic!("Expected a FileDescriptorResponse variant")
}
Err(e) => {
warn!("Error fetching file descriptor for {}: {}", filename, e);
return;
}
};
for fd in file_descriptor_response.file_descriptor_proto {
let fdp = FileDescriptorProto::decode(fd.deref()).unwrap();
pool.add_file_descriptor_proto(fdp)
.expect("add file descriptor proto");
}
}
async fn send_reflection_request(
client: &mut ServerReflectionClient<Client<HttpsConnector<HttpConnector>, BoxBody>>,
message: MessageRequest,
) -> Result<MessageResponse, String> {
let reflection_request = ServerReflectionRequest {
host: "".into(), // Doesn't matter
message_request: Some(message),
};
let request = Request::new(tokio_stream::once(reflection_request));
client
.server_reflection_info(request)
.await
.map_err(|e| match e.code() {
tonic::Code::Unavailable => "Failed to connect to endpoint".to_string(),
tonic::Code::Unauthenticated => "Authentication failed".to_string(),
tonic::Code::DeadlineExceeded => "Deadline exceeded".to_string(),
_ => e.to_string(),
})?
.into_inner()
.next()
.await
.expect("steamed response")
.map_err(|e| e.to_string())?
.message_response
.ok_or("No reflection response".to_string())
}
pub fn method_desc_to_path(md: &MethodDescriptor) -> PathAndQuery {
let full_name = md.full_name();
let (namespace, method_name) = full_name
.rsplit_once('.')
.ok_or_else(|| anyhow!("invalid method path"))
.expect("invalid method path");
PathAndQuery::from_str(&format!("/{}/{}", namespace, method_name)).expect("invalid method path")
}

BIN
src-tauri/icons/128x128.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Some files were not shown because too many files have changed in this diff Show More