mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-02-14 11:47:46 +01:00
Compare commits
474 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
626aacf982 | ||
|
|
d5855c45a6 | ||
|
|
793bff9f27 | ||
|
|
88ea68e72f | ||
|
|
35e40d2c55 | ||
|
|
c472b83409 | ||
|
|
52c26d235c | ||
|
|
ac54729012 | ||
|
|
0586034ef4 | ||
|
|
91790ba708 | ||
|
|
d8ab6c0b50 | ||
|
|
b600a21a2b | ||
|
|
4f9d1278f7 | ||
|
|
15aa93f5f9 | ||
|
|
c7798092d8 | ||
|
|
5560593aaa | ||
|
|
66639e651d | ||
|
|
8e42d5ccdb | ||
|
|
5c62594087 | ||
|
|
26b6c48657 | ||
|
|
0290aba982 | ||
|
|
0bafc4e4f5 | ||
|
|
9a36f94279 | ||
|
|
1d8e66179e | ||
|
|
fda6d16d8e | ||
|
|
c4737916df | ||
|
|
919465cdbb | ||
|
|
de3730fa4f | ||
|
|
aff26fdd46 | ||
|
|
3c0edf06af | ||
|
|
cb8939db88 | ||
|
|
bf4b3213c4 | ||
|
|
633d7c52c4 | ||
|
|
0401cb92aa | ||
|
|
bff6c668a0 | ||
|
|
ee87e65763 | ||
|
|
f165a0b827 | ||
|
|
f7426dc8ce | ||
|
|
6114039f7e | ||
|
|
da414debe1 | ||
|
|
11f5541558 | ||
|
|
1bc155d684 | ||
|
|
335231060e | ||
|
|
0fdf64440f | ||
|
|
a984fb33dc | ||
|
|
41b1ec96c9 | ||
|
|
df83a61d6f | ||
|
|
d289f1fd13 | ||
|
|
aea4e961aa | ||
|
|
c554b73d48 | ||
|
|
b519bff3d6 | ||
|
|
8381104302 | ||
|
|
5ef7c6a1a2 | ||
|
|
6d7a81850c | ||
|
|
6e5d5fcb95 | ||
|
|
004fef6729 | ||
|
|
0bec5a6405 | ||
|
|
60b091ff1c | ||
|
|
bee1a5cb2d | ||
|
|
bb2d3dd5b1 | ||
|
|
bf8aad04c7 | ||
|
|
4306294a72 | ||
|
|
10f3722fe3 | ||
|
|
c1af9ca44a | ||
|
|
5b230c74f0 | ||
|
|
5cebb4e61a | ||
|
|
bd9d1e2244 | ||
|
|
9bdaa05f00 | ||
|
|
750ad0c902 | ||
|
|
a9c16838e6 | ||
|
|
d5065ab6d9 | ||
|
|
9ebb3ef532 | ||
|
|
aeda72f13e | ||
|
|
83aa9041cb | ||
|
|
d51913509d | ||
|
|
5106f28ba5 | ||
|
|
0c55c6eaab | ||
|
|
b0edbd19c8 | ||
|
|
7630db79b7 | ||
|
|
55a7b82567 | ||
|
|
b5cb46918a | ||
|
|
a793ece1a5 | ||
|
|
0f6e4b641a | ||
|
|
5ac5fab0c6 | ||
|
|
8030a8a235 | ||
|
|
d98426cad3 | ||
|
|
06034a8fc4 | ||
|
|
1ee9f9bb51 | ||
|
|
4b99d1405e | ||
|
|
8480e52195 | ||
|
|
243e65a992 | ||
|
|
b82304a233 | ||
|
|
f7a4ea9735 | ||
|
|
33d1a84ecd | ||
|
|
f4a071ee05 | ||
|
|
e26ba0f9d0 | ||
|
|
b4e2a12375 | ||
|
|
5e7aacd31a | ||
|
|
00718df49e | ||
|
|
bb9025ab07 | ||
|
|
867f3908ed | ||
|
|
30e1ecac39 | ||
|
|
7eb2abe9b2 | ||
|
|
a5ac8fa035 | ||
|
|
dd705de155 | ||
|
|
b15cdec701 | ||
|
|
a99a36b5cc | ||
|
|
e0b0e3d781 | ||
|
|
98a4834d4f | ||
|
|
32b135dbaf | ||
|
|
0fc8d12a06 | ||
|
|
3c2bdab101 | ||
|
|
8b5d7ae3ed | ||
|
|
51949f4fbf | ||
|
|
6013cd2329 | ||
|
|
eba28ade48 | ||
|
|
44af1ddc8a | ||
|
|
63c0d09df8 | ||
|
|
f305633d94 | ||
|
|
13155f8591 | ||
|
|
f2ac97aa62 | ||
|
|
18eb0027a1 | ||
|
|
9e2803fcfb | ||
|
|
705e30b6e0 | ||
|
|
f1260911ea | ||
|
|
076ff63dbe | ||
|
|
899092b4d2 | ||
|
|
c2c3a28aab | ||
|
|
25c0db502e | ||
|
|
6dcbe45a53 | ||
|
|
e2b46f25ff | ||
|
|
981182be46 | ||
|
|
ad164ebd5e | ||
|
|
cacdad8826 | ||
|
|
77e5142a7c | ||
|
|
613081728d | ||
|
|
23e77dfec1 | ||
|
|
6e273ae2a3 | ||
|
|
4061094988 | ||
|
|
82b185e27f | ||
|
|
27dc261639 | ||
|
|
7e45fecf19 | ||
|
|
1a5053380b | ||
|
|
408665c62d | ||
|
|
65efee2048 | ||
|
|
3faa66a1fc | ||
|
|
9dafe4f704 | ||
|
|
356eaf1713 | ||
|
|
f8584f1537 | ||
|
|
6ad6cb34b0 | ||
|
|
32b27cd780 | ||
|
|
0344a1e8c9 | ||
|
|
0515271c12 | ||
|
|
5ae8d54ce0 | ||
|
|
33c406ce49 | ||
|
|
3b660ddbd0 | ||
|
|
3132728a27 | ||
|
|
7063128342 | ||
|
|
2187775462 | ||
|
|
18adcd1004 | ||
|
|
b0656d1e38 | ||
|
|
38e66047e0 | ||
|
|
c24f049dac | ||
|
|
53d13c8172 | ||
|
|
0727c6e437 | ||
|
|
8328d20150 | ||
|
|
afe6a3bf57 | ||
|
|
d920632cbd | ||
|
|
5c456fd4d5 | ||
|
|
38c247e350 | ||
|
|
0c8f72124a | ||
|
|
80ed6b1525 | ||
|
|
4424b3f208 | ||
|
|
2c75abce09 | ||
|
|
4e15eb197f | ||
|
|
a7544b4f8c | ||
|
|
d126aad172 | ||
|
|
acc5c0de50 | ||
|
|
3391da111d | ||
|
|
e37ce96956 | ||
|
|
c51831c975 | ||
|
|
180aa39de4 | ||
|
|
3bd780782e | ||
|
|
f9ba2f79c2 | ||
|
|
d9493de2be | ||
|
|
bc9a623742 | ||
|
|
532edbf274 | ||
|
|
1585692328 | ||
|
|
083f565b12 | ||
|
|
f7f7438c9e | ||
|
|
19934a93bb | ||
|
|
577cfe5bdc | ||
|
|
43ac6afae1 | ||
|
|
8cc11703d3 | ||
|
|
4f7a116378 | ||
|
|
513793d9ce | ||
|
|
67f32b6734 | ||
|
|
66813d67fe | ||
|
|
a38691ed53 | ||
|
|
deeefdcfbf | ||
|
|
db292511b1 | ||
|
|
1a5334c1ce | ||
|
|
11002abe39 | ||
|
|
d922dcb062 | ||
|
|
6fcaa18e86 | ||
|
|
7664c941dd | ||
|
|
6f5cb528c6 | ||
|
|
ebb78922f0 | ||
|
|
2285fe9f1c | ||
|
|
38ba8625d8 | ||
|
|
ab5681c7ad | ||
|
|
f66dcb9267 | ||
|
|
1b6cfbac77 | ||
|
|
4c27e788ea | ||
|
|
769da0b052 | ||
|
|
6b60c86300 | ||
|
|
30c1b5e8c7 | ||
|
|
10af9b6f99 | ||
|
|
aa8c066f2d | ||
|
|
b913b74449 | ||
|
|
b71adce50b | ||
|
|
0fbb44c701 | ||
|
|
de335e8637 | ||
|
|
2999f63a4c | ||
|
|
2abc5e6f0b | ||
|
|
639de4321e | ||
|
|
b3c461afdd | ||
|
|
7d154800a0 | ||
|
|
b48ed0399e | ||
|
|
c5d6e7d74a | ||
|
|
e82f915363 | ||
|
|
3128e9ce76 | ||
|
|
bc0e86757c | ||
|
|
fec99916c2 | ||
|
|
3b5d059b11 | ||
|
|
c3fe2acc8a | ||
|
|
4d002c412b | ||
|
|
46d152b5f1 | ||
|
|
25fa81ebbc | ||
|
|
7c2de3c360 | ||
|
|
3a3b187cd0 | ||
|
|
3226bbe083 | ||
|
|
a1e4e0e6c9 | ||
|
|
b3aa8b893b | ||
|
|
f057139634 | ||
|
|
71a2b11ab4 | ||
|
|
587254a0e7 | ||
|
|
9f4de66f3c | ||
|
|
b0d8908724 | ||
|
|
15c22d98c6 | ||
|
|
3105ae0edc | ||
|
|
11a89f06c1 | ||
|
|
9cbe24e740 | ||
|
|
bfbed13b8f | ||
|
|
2268de6321 | ||
|
|
dd99aa7fcd | ||
|
|
be436bb706 | ||
|
|
bd48726f44 | ||
|
|
10bea83f98 | ||
|
|
8122b4fb84 | ||
|
|
3ae57fb2d8 | ||
|
|
6dc3eecca4 | ||
|
|
9d1d732154 | ||
|
|
8a117415b7 | ||
|
|
d36623ebc9 | ||
|
|
94a3ae3696 | ||
|
|
2836a28988 | ||
|
|
946d7dc89e | ||
|
|
af6300f18b | ||
|
|
905cb4b18e | ||
|
|
305ed09547 | ||
|
|
643356bad3 | ||
|
|
e458675627 | ||
|
|
91e3853692 | ||
|
|
5f0876a136 | ||
|
|
3a38127fb4 | ||
|
|
f3b6070235 | ||
|
|
5e6e78eb9e | ||
|
|
9b66a1d1a8 | ||
|
|
e954d0d7bc | ||
|
|
dab2df7e79 | ||
|
|
bc40e22008 | ||
|
|
eef262c398 | ||
|
|
8eab6e14db | ||
|
|
ded33a110a | ||
|
|
e448a7602a | ||
|
|
4c22215ca5 | ||
|
|
4f501abb72 | ||
|
|
b2dcc38982 | ||
|
|
11b719955b | ||
|
|
d563ac63db | ||
|
|
6d826064c6 | ||
|
|
d30b9d6518 | ||
|
|
8da3364d0f | ||
|
|
07c372b7f5 | ||
|
|
7e01f38253 | ||
|
|
ba637009a7 | ||
|
|
da7388e510 | ||
|
|
3ec88fc896 | ||
|
|
1c9381b2bd | ||
|
|
06349b8d5b | ||
|
|
6dc7dc6ad2 | ||
|
|
f981a15ec3 | ||
|
|
8b648c0301 | ||
|
|
83ce09075b | ||
|
|
168dfb9f6b | ||
|
|
9b8961c23d | ||
|
|
89bca42ee6 | ||
|
|
07d2a43a17 | ||
|
|
c84f2afd09 | ||
|
|
df4dbaecc8 | ||
|
|
d9bf03cefe | ||
|
|
39223e8d89 | ||
|
|
67925e18b2 | ||
|
|
89ad65513d | ||
|
|
90166ddfa3 | ||
|
|
0981b23faf | ||
|
|
664f3b4d87 | ||
|
|
dc97b91a4e | ||
|
|
d310272d19 | ||
|
|
f1be3f01e1 | ||
|
|
c57b6e1d73 | ||
|
|
a938dc45f0 | ||
|
|
bb139744a1 | ||
|
|
3aa3e09552 | ||
|
|
74abfd21b8 | ||
|
|
e703817ba2 | ||
|
|
80dd1e457b | ||
|
|
ea9f8d3ab2 | ||
|
|
fa222bdf12 | ||
|
|
45b360dabd | ||
|
|
5923399359 | ||
|
|
f4600f3e90 | ||
|
|
f883837685 | ||
|
|
b58bc409f0 | ||
|
|
e893e539bb | ||
|
|
90294fbb5d | ||
|
|
ae65f222bc | ||
|
|
1b9813fb4c | ||
|
|
b708b5ae41 | ||
|
|
df136fa915 | ||
|
|
f8329f5b8d | ||
|
|
21141090de | ||
|
|
c0d9740a7d | ||
|
|
afcf630443 | ||
|
|
1fe2c9826a | ||
|
|
7272b80a3f | ||
|
|
92114b7368 | ||
|
|
f39d3e7eed | ||
|
|
cbe0d27a5e | ||
|
|
cd39699467 | ||
|
|
b3ea67aacf | ||
|
|
db4ed9797c | ||
|
|
1ea7d7d685 | ||
|
|
2df725b57a | ||
|
|
74e6648249 | ||
|
|
1026350d9c | ||
|
|
98fb87874d | ||
|
|
41fc3afdc1 | ||
|
|
83dbf46ba4 | ||
|
|
0b2e35bdde | ||
|
|
d90a7331c9 | ||
|
|
264e64a996 | ||
|
|
8915915c47 | ||
|
|
951ed787fa | ||
|
|
64ef6b0c22 | ||
|
|
ef18377b3c | ||
|
|
5904b6fded | ||
|
|
f4401e77bb | ||
|
|
efa5455a7b | ||
|
|
619c8d9e72 | ||
|
|
bdf89ac288 | ||
|
|
debd3c8185 | ||
|
|
f81a3ae8e7 | ||
|
|
7d4e9894c3 | ||
|
|
4bf22d8a60 | ||
|
|
8be4971a23 | ||
|
|
359e916b73 | ||
|
|
68058f3e41 | ||
|
|
0c6fa3e634 | ||
|
|
0fa25c6335 | ||
|
|
5684479f1d | ||
|
|
2d1603601c | ||
|
|
f5394b2210 | ||
|
|
833db5df06 | ||
|
|
525ac7e980 | ||
|
|
44a747c80a | ||
|
|
2056e7f40a | ||
|
|
9b6c1ad364 | ||
|
|
34987bcacb | ||
|
|
b62c11222a | ||
|
|
b3cee3ace3 | ||
|
|
222c054c95 | ||
|
|
46f18a2491 | ||
|
|
f2ca8e2753 | ||
|
|
b0d243c378 | ||
|
|
6161fb86c8 | ||
|
|
b09cc91fe5 | ||
|
|
ef1638cbb3 | ||
|
|
00ef8743f2 | ||
|
|
68222659e3 | ||
|
|
69420a4bba | ||
|
|
0161bbaeb1 | ||
|
|
948dbfe3cc | ||
|
|
338ba8b189 | ||
|
|
ca4655b441 | ||
|
|
bf37499428 | ||
|
|
0b94b57e2a | ||
|
|
fc40aead98 | ||
|
|
7d7f934e6a | ||
|
|
d5fbf4d622 | ||
|
|
e4f6c919dc | ||
|
|
4d806ff2b1 | ||
|
|
bf8f12274f | ||
|
|
f4f438d9fe | ||
|
|
2434f373be | ||
|
|
2bb2061f97 | ||
|
|
2c011a5c2a | ||
|
|
f66b0ccea1 | ||
|
|
665dd8447d | ||
|
|
1b61ce31e6 | ||
|
|
ef4d960698 | ||
|
|
b6d557b632 | ||
|
|
b700bd356c | ||
|
|
620dd7d3ef | ||
|
|
6575121902 | ||
|
|
7c1755a0dc | ||
|
|
8ad301a666 | ||
|
|
ae24cd4939 | ||
|
|
7152e1845e | ||
|
|
96c1dd4081 | ||
|
|
87c7b3a663 | ||
|
|
c1be46a539 | ||
|
|
4655e0018b | ||
|
|
da5ba2e3be | ||
|
|
aaf95f565f | ||
|
|
f32b984e77 | ||
|
|
548aa4c7cd | ||
|
|
0ccceaac77 | ||
|
|
70f534f1d8 | ||
|
|
61fe95b300 | ||
|
|
915e0e8613 | ||
|
|
aace2580da | ||
|
|
3d36905664 | ||
|
|
0d671423da | ||
|
|
aebfcb9437 | ||
|
|
be7ef7beb1 | ||
|
|
d77ed0c5cc | ||
|
|
e57e7bcec5 | ||
|
|
a637842ce4 | ||
|
|
fc54ec49af | ||
|
|
5c43d8510a | ||
|
|
83f84ded8d | ||
|
|
5658da34a2 | ||
|
|
38e8ef6535 | ||
|
|
8c89b06238 | ||
|
|
d85c021305 | ||
|
|
83bb18df03 | ||
|
|
93105a3e89 | ||
|
|
ba3b899115 | ||
|
|
fcfbc1d1da | ||
|
|
72486b448c | ||
|
|
7dea1b7870 | ||
|
|
4de2c496c9 | ||
|
|
9e1393a392 | ||
|
|
0901690ed6 | ||
|
|
95303648cc | ||
|
|
1dbb08c045 | ||
|
|
00a7d9a180 | ||
|
|
6c549dc086 | ||
|
|
dc368e326a | ||
|
|
e42188a627 | ||
|
|
7a6a337eff | ||
|
|
3907344884 |
@@ -12,7 +12,7 @@ module.exports = {
|
||||
parserOptions: {
|
||||
project: ["./tsconfig.json"]
|
||||
},
|
||||
ignorePatterns: ["scripts/**/*", "plugin-runtime/**/*", "src-tauri/**/*", "plugins/**/*"],
|
||||
ignorePatterns: ["src-tauri/**/*", "plugins/**/*"],
|
||||
settings: {
|
||||
react: {
|
||||
version: "detect"
|
||||
|
||||
71
.github/workflows/artifacts.yml
vendored
Normal file
71
.github/workflows/artifacts.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
name: Generate Artifacts
|
||||
on:
|
||||
push:
|
||||
tags: [ v* ]
|
||||
|
||||
permissions: write-all
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-12
|
||||
target: aarch64-apple-darwin
|
||||
- os: macos-latest
|
||||
target: x86_64-apple-darwin
|
||||
- os: windows-2022
|
||||
target: x86_64-pc-windows-msvc
|
||||
- os: ubuntu-20.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
- name: Cache Rust
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
./src-tauri/target
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('src-tauri/Cargo.lock') }}
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
cache: 'npm'
|
||||
- name: install dependencies (ubuntu only)
|
||||
if: matrix.os == 'ubuntu-20.04'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
# Pin dev version to get non-default targets
|
||||
# https://github.com/tauri-apps/tauri-action/issues/356
|
||||
- uses: tauri-apps/tauri-action@dev
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||
ENABLE_CODE_SIGNING: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
with:
|
||||
tagName: 'v__VERSION__'
|
||||
releaseName: 'Release __VERSION__'
|
||||
releaseBody: '<!-- Release Notes -->'
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: '--target ${{ matrix.target }}'
|
||||
18
.github/workflows/ci-js.yml
vendored
18
.github/workflows/ci-js.yml
vendored
@@ -1,18 +0,0 @@
|
||||
on:
|
||||
pull_request:
|
||||
branches: [develop]
|
||||
|
||||
name: CI (JS)
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Lint/Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm test
|
||||
36
.github/workflows/ci-rust.yml
vendored
36
.github/workflows/ci-rust.yml
vendored
@@ -1,36 +0,0 @@
|
||||
on:
|
||||
pull_request:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- src-tauri/**
|
||||
- .github/workflows/**
|
||||
|
||||
name: CI (Rust)
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src-tauri
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Check/Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- uses: actions/cache@v3
|
||||
continue-on-error: false
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-cargo-
|
||||
- run: cargo check --all
|
||||
- run: cargo test --all
|
||||
120
.github/workflows/release.yml
vendored
120
.github/workflows/release.yml
vendored
@@ -1,120 +0,0 @@
|
||||
name: Generate Artifacts
|
||||
on:
|
||||
push:
|
||||
tags: [ v* ]
|
||||
|
||||
env:
|
||||
YAAK_PLUGINS_DIR: checkout/plugins
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
name: Build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: 'macos-latest' # for Arm-based macs (M1 and above).
|
||||
args: '--target aarch64-apple-darwin'
|
||||
yaak_arch: 'arm64'
|
||||
- platform: 'macos-latest' # for Intel-based macs.
|
||||
args: '--target x86_64-apple-darwin'
|
||||
yaak_arch: 'x64'
|
||||
- platform: 'ubuntu-22.04' # for Tauri v1, you could replace this with ubuntu-20.04.
|
||||
args: ''
|
||||
yaak_arch: 'x64'
|
||||
- platform: 'windows-latest'
|
||||
args: ''
|
||||
yaak_arch: 'x64'
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout yaakapp/app
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.22'
|
||||
|
||||
- name: install Rust stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
# Those targets are only used on macos runners so it's in an `if` to slightly speed up windows and linux builds.
|
||||
targets: ${{ matrix.platform == 'macos-latest' && 'aarch64-apple-darwin,x86_64-apple-darwin' || '' }}
|
||||
|
||||
- uses: actions/cache@v3
|
||||
continue-on-error: false
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-cargo-
|
||||
|
||||
- name: install dependencies (ubuntu only)
|
||||
if: matrix.platform == 'ubuntu-22.04' # This must match the platform value defined above.
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
|
||||
- name: Install Node dependencies
|
||||
run: |
|
||||
npm ci
|
||||
|
||||
- name: Install plugin-runtime Node dependencies
|
||||
working-directory: plugin-runtime
|
||||
run: |
|
||||
npm ci
|
||||
|
||||
- name: Install Protoc for plugin-runtime
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Install yaak CLI
|
||||
run: go install github.com/yaakapp/yaakcli@latest
|
||||
|
||||
- name: Run lint
|
||||
run: npm run lint
|
||||
|
||||
- name: Checkout yaakapp/plugins
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: yaakapp/plugins
|
||||
path: ${{ env.YAAK_PLUGINS_DIR }}
|
||||
|
||||
- name: Set version
|
||||
run: npm run replace-version
|
||||
env:
|
||||
YAAK_VERSION: ${{ github.ref_name }}
|
||||
|
||||
- uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
YAAK_PLUGINS_DIR: ${{ env.YAAK_PLUGINS_DIR }}
|
||||
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||
ENABLE_CODE_SIGNING: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
with:
|
||||
tagName: 'v__VERSION__'
|
||||
releaseName: 'Release __VERSION__'
|
||||
releaseBody: 'https://yaak.app/changelog/__VERSION__'
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }}
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -26,8 +26,3 @@ dist-ssr
|
||||
|
||||
*.sqlite
|
||||
*.sqlite-*
|
||||
|
||||
.cargo
|
||||
|
||||
.tmp
|
||||
tmp
|
||||
|
||||
20
.run/Build Desktop.run.xml
Normal file
20
.run/Build Desktop.run.xml
Normal file
@@ -0,0 +1,20 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Build Desktop" type="ShConfigurationType">
|
||||
<option name="SCRIPT_TEXT" value="npm run tauri build -- --target universal-apple-darwin" />
|
||||
<option name="INDEPENDENT_SCRIPT_PATH" value="true" />
|
||||
<option name="SCRIPT_PATH" value="" />
|
||||
<option name="SCRIPT_OPTIONS" value="" />
|
||||
<option name="INDEPENDENT_SCRIPT_WORKING_DIRECTORY" value="true" />
|
||||
<option name="SCRIPT_WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||
<option name="INDEPENDENT_INTERPRETER_PATH" value="true" />
|
||||
<option name="INTERPRETER_PATH" value="/bin/zsh" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="EXECUTE_IN_TERMINAL" value="true" />
|
||||
<option name="EXECUTE_SCRIPT_FILE" value="false" />
|
||||
<envs>
|
||||
<env name="TAURI_KEY_PASSWORD" value="fishhook-upstream-wash-assured" />
|
||||
<env name="TAURI_PRIVATE_KEY" value="dW50cnVzdGVkIGNvbW1lbnQ6IHJzaWduIGVuY3J5cHRlZCBzZWNyZXQga2V5ClJXUlRZMEl5OGxWaytTa3dIa2xXVUltQzRGUXIzd2lYQ2NpV0ZhQURSbWJWZ1NrK0tnY0FBQkFBQUFBQUFBQUFBQUlBQUFBQUV2M1VKdVRyVHpHSzhQdGc2ZVFtOVNsMU5tNEVSN280cFNrbXhncW9tdjNXaFJZUTJqUzQ5Q01zWTJWRVhaY1pGNHNjR1NFR3JmcWFRN09NdWdGMXpZVXhzejR4V3lDV1JpZHlnbW5LNS9vMFFtRlZjbUl4YjZSNzhlMmk3ait5SExYcG5QZUkxOFE9Cg==" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
1
.tauriignore
Normal file
1
.tauriignore
Normal file
@@ -0,0 +1 @@
|
||||
plugins
|
||||
Binary file not shown.
27
index.html
27
index.html
@@ -1,27 +1,24 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Yaak App</title>
|
||||
<script src="http://localhost:8097"></script>
|
||||
|
||||
<!-- Certain elements like webview (and maybe <select>?) will use background
|
||||
color depending on document background color-->
|
||||
<!-- <script src="http://localhost:8097"></script>-->
|
||||
<style>
|
||||
html, body {
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
html, body {
|
||||
background-color: #1b1a29;
|
||||
body {
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
body {
|
||||
background-color: black;
|
||||
}
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body class="text-base">
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<div id="cm-portal" class="cm-portal"></div>
|
||||
<div id="react-portal"></div>
|
||||
|
||||
6343
package-lock.json
generated
6343
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
79
package.json
79
package.json
@@ -4,27 +4,27 @@
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"start": "npm run tauri-dev:desktop",
|
||||
"tauri-dev:desktop": "tauri dev --no-watch --config ./src-tauri/tauri-dev.conf.json",
|
||||
"tauri-dev:ios": "tauri ios dev --force-ip-prompt --config ./src-tauri/tauri-dev.conf.json",
|
||||
"start": "npm run build:plugins && npm run tauri-dev",
|
||||
"tauri-dev": "tauri dev --no-watch --config src-tauri/tauri-dev.conf.json",
|
||||
"tauri-build": "tauri build",
|
||||
"tauri": "tauri",
|
||||
"dev:js": "vite dev",
|
||||
"build": "npm run build:frontend",
|
||||
"dev": "vite dev",
|
||||
"lint": "tsc && eslint . --ext .ts,.tsx",
|
||||
"build:icon:release": "tauri icon design/icon.png --output ./src-tauri/icons/release",
|
||||
"build:icon:dev": "tauri icon design/icon-dev.png --output ./src-tauri/icons/dev",
|
||||
"build": "run-p build:*",
|
||||
"build:js": "vite build",
|
||||
"build:vendor-protoc": "node scripts/vendor-protoc.cjs",
|
||||
"build:vendor-plugins": "node scripts/vendor-plugins.cjs",
|
||||
"build:vendor-node": "node scripts/vendor-node.cjs",
|
||||
"build:plugin-runtime": "npm run --prefix plugin-runtime build",
|
||||
"prepare": "husky install",
|
||||
"replace-version": "node scripts/replace-version.cjs"
|
||||
"build:icon:release": "tauri icon design/icon.png --output src-tauri/icons/release",
|
||||
"build:icon:dev": "tauri icon design/icon-dev.png --output src-tauri/icons/dev",
|
||||
"build:frontend": "vite build",
|
||||
"build:plugins": "run-p build:plugin:importer-insomnia build:plugin:importer-postman build:plugin:importer-yaak",
|
||||
"build:plugin:importer-insomnia": "cd src-tauri/plugins/importer-insomnia && vite build",
|
||||
"build:plugin:importer-postman": "cd src-tauri/plugins/importer-postman && vite build",
|
||||
"build:plugin:importer-yaak": "cd src-tauri/plugins/importer-yaak && vite build",
|
||||
"test": "vitest",
|
||||
"coverage": "vitest run --coverage",
|
||||
"prepare": "husky install"
|
||||
},
|
||||
"dependencies": {
|
||||
"@codemirror/commands": "^6.2.1",
|
||||
"@codemirror/lang-javascript": "^6.2.2",
|
||||
"@codemirror/lang-javascript": "^6.1.4",
|
||||
"@codemirror/lang-json": "^6.0.1",
|
||||
"@codemirror/lang-xml": "^6.0.2",
|
||||
"@codemirror/language": "^6.6.0",
|
||||
@@ -32,27 +32,21 @@
|
||||
"@lezer/generator": "^1.2.2",
|
||||
"@lezer/highlight": "^1.1.3",
|
||||
"@lezer/lr": "^1.3.3",
|
||||
"@radix-ui/react-icons": "^1.2.0",
|
||||
"@react-hook/resize-observer": "^1.2.6",
|
||||
"@tailwindcss/container-queries": "^0.1.0",
|
||||
"@tanstack/react-query": "^5.45.1",
|
||||
"@tauri-apps/api": "^2.0.0-beta.15",
|
||||
"@tauri-apps/plugin-clipboard-manager": "^2.1.0-beta.5",
|
||||
"@tauri-apps/plugin-dialog": "^2.0.0-beta.7",
|
||||
"@tauri-apps/plugin-fs": "^2.0.0-beta.7",
|
||||
"@tauri-apps/plugin-os": "^2.0.0-beta.7",
|
||||
"@tauri-apps/plugin-shell": "^2.0.0-beta.8",
|
||||
"@tanstack/query-sync-storage-persister": "^4.27.1",
|
||||
"@tanstack/react-query": "^4.28.0",
|
||||
"@tanstack/react-query-devtools": "^4.28.0",
|
||||
"@tanstack/react-query-persist-client": "^4.28.0",
|
||||
"@tauri-apps/api": "^1.5.1",
|
||||
"buffer": "^6.0.3",
|
||||
"classnames": "^2.3.2",
|
||||
"cm6-graphql": "^0.0.9",
|
||||
"codemirror": "^6.0.1",
|
||||
"codemirror-json-schema": "^0.6.1",
|
||||
"date-fns": "^3.3.1",
|
||||
"fast-fuzzy": "^1.12.0",
|
||||
"focus-trap-react": "^10.1.1",
|
||||
"format-graphql": "^1.4.0",
|
||||
"framer-motion": "^9.0.4",
|
||||
"lucide-react": "^0.309.0",
|
||||
"mime": "^4.0.1",
|
||||
"papaparse": "^5.4.1",
|
||||
"parse-color": "^1.0.0",
|
||||
"react": "^18.2.0",
|
||||
@@ -60,18 +54,15 @@
|
||||
"react-dnd-html5-backend": "^16.0.1",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-helmet-async": "^1.3.0",
|
||||
"react-pdf": "^9.0.0",
|
||||
"react-router-dom": "^6.8.1",
|
||||
"react-use": "^17.4.0",
|
||||
"slugify": "^1.6.6",
|
||||
"tauri-plugin-log-api": "github:tauri-apps/tauri-plugin-log#v2",
|
||||
"uuid": "^9.0.0",
|
||||
"xml-formatter": "^3.6.2"
|
||||
"tauri-plugin-log-api": "github:tauri-apps/tauri-plugin-log#v1",
|
||||
"uuid": "^9.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/nesting": "^0.0.0-insiders.565cd3e",
|
||||
"@tanstack/react-query-devtools": "^5.45.1",
|
||||
"@tauri-apps/cli": "^2.0.0-beta.22",
|
||||
"@tauri-apps/cli": "^1.5.6",
|
||||
"@types/node": "^18.7.10",
|
||||
"@types/papaparse": "^5.3.7",
|
||||
"@types/parse-color": "^1.0.1",
|
||||
@@ -79,11 +70,10 @@
|
||||
"@types/react": "^18.0.31",
|
||||
"@types/react-dom": "^18.0.11",
|
||||
"@types/uuid": "^9.0.1",
|
||||
"@typescript-eslint/eslint-plugin": "^7.0.2",
|
||||
"@typescript-eslint/parser": "^7.0.2",
|
||||
"@vitejs/plugin-react": "^4.2.1",
|
||||
"@typescript-eslint/eslint-plugin": "^5.57.0",
|
||||
"@typescript-eslint/parser": "^5.57.0",
|
||||
"@vitejs/plugin-react": "^3.1.0",
|
||||
"autoprefixer": "^10.4.13",
|
||||
"decompress": "^4.2.1",
|
||||
"eslint": "^8.34.0",
|
||||
"eslint-config-prettier": "^8.6.0",
|
||||
"eslint-plugin-import": "^2.27.5",
|
||||
@@ -91,21 +81,18 @@
|
||||
"eslint-plugin-react": "^7.32.2",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"husky": "^8.0.3",
|
||||
"internal-ip": "^8.0.0",
|
||||
"lint-staged": "^15.0.2",
|
||||
"nodejs-file-downloader": "^4.13.0",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"postcss": "^8.4.21",
|
||||
"postcss-nesting": "^11.2.1",
|
||||
"prettier": "^2.8.4",
|
||||
"react-devtools": "^4.27.2",
|
||||
"rimraf": "^5.0.7",
|
||||
"react-devtools": "^4.28.5",
|
||||
"tailwindcss": "^3.2.7",
|
||||
"typescript": "^5.4.5",
|
||||
"vite": "^5.0.0",
|
||||
"vite-plugin-static-copy": "^1.0.5",
|
||||
"vite-plugin-svgr": "^4.2.0",
|
||||
"vite-plugin-top-level-await": "^1.4.1"
|
||||
"typescript": "^5.0.2",
|
||||
"vite": "^4.0.0",
|
||||
"vite-plugin-svgr": "^2.4.0",
|
||||
"vite-plugin-top-level-await": "^1.2.4",
|
||||
"vitest": "^0.29.2"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{ts,tsx}": "eslint --cache --fix",
|
||||
|
||||
3
plugin-runtime/.gitignore
vendored
3
plugin-runtime/.gitignore
vendored
@@ -1,3 +0,0 @@
|
||||
build
|
||||
node_modules
|
||||
*.blob
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"ext": "ts",
|
||||
"exec": "node -r ts-node/register ./src/index.ts"
|
||||
}
|
||||
3004
plugin-runtime/package-lock.json
generated
3004
plugin-runtime/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,23 +0,0 @@
|
||||
{
|
||||
"name": "@yaak/plugin-runtime",
|
||||
"scripts": {
|
||||
"dev": "nodemon",
|
||||
"build": "run-p build:*",
|
||||
"build:main": "esbuild src/index.ts --bundle --platform=node --outfile=build/index.cjs",
|
||||
"build:worker": "esbuild src/index.worker.ts --bundle --platform=node --outfile=build/index.worker.cjs",
|
||||
"build:proto": "grpc_tools_node_protoc --ts_proto_out=src/gen --ts_proto_opt=outputServices=nice-grpc,outputServices=generic-definitions,useExactTypes=false --proto_path=../proto ../proto/plugins/*.proto"
|
||||
},
|
||||
"dependencies": {
|
||||
"long": "^5.2.3",
|
||||
"nice-grpc": "^2.1.9",
|
||||
"protobufjs": "^7.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"grpc-tools": "^1.12.4",
|
||||
"nodemon": "^3.1.4",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"ts-proto": "^1.180.0",
|
||||
"typescript": "^5.5.2"
|
||||
}
|
||||
}
|
||||
@@ -1,92 +0,0 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import path from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
import { PluginInfo } from './plugins';
|
||||
|
||||
export interface ParentToWorkerEvent<T = any> {
|
||||
callbackId: string;
|
||||
name: string;
|
||||
payload: T;
|
||||
}
|
||||
|
||||
export type WorkerToParentSuccessEvent<T> = {
|
||||
callbackId: string;
|
||||
payload: T;
|
||||
};
|
||||
|
||||
export type WorkerToParentErrorEvent = {
|
||||
callbackId: string;
|
||||
error: string;
|
||||
};
|
||||
|
||||
export type WorkerToParentEvent<T = any> = WorkerToParentErrorEvent | WorkerToParentSuccessEvent<T>;
|
||||
|
||||
export class PluginHandle {
|
||||
readonly pluginDir: string;
|
||||
readonly #worker: Worker;
|
||||
|
||||
constructor(pluginDir: string) {
|
||||
this.pluginDir = pluginDir;
|
||||
|
||||
const workerPath = path.join(__dirname, 'index.worker.cjs');
|
||||
this.#worker = new Worker(workerPath, {
|
||||
workerData: {
|
||||
pluginDir: this.pluginDir,
|
||||
},
|
||||
});
|
||||
|
||||
this.#worker.on('error', this.#handleError.bind(this));
|
||||
this.#worker.on('exit', this.#handleExit.bind(this));
|
||||
}
|
||||
|
||||
async getInfo(): Promise<PluginInfo> {
|
||||
return this.#callPlugin('info', null);
|
||||
}
|
||||
|
||||
async runResponseFilter({ filter, body }: { filter: string; body: string }): Promise<string> {
|
||||
return this.#callPlugin('run-filter', { filter, body });
|
||||
}
|
||||
|
||||
async runExport(request: any): Promise<string> {
|
||||
return this.#callPlugin('run-export', request);
|
||||
}
|
||||
|
||||
async runImport(data: string): Promise<string> {
|
||||
const result = await this.#callPlugin('run-import', data);
|
||||
|
||||
// Plugin returns object, but we convert to string
|
||||
return JSON.stringify(result, null, 2);
|
||||
}
|
||||
|
||||
#callPlugin<P, R>(name: string, payload: P): Promise<R> {
|
||||
const callbackId = `cb_${randomUUID().replaceAll('-', '')}`;
|
||||
return new Promise((resolve, reject) => {
|
||||
const cb = (e: WorkerToParentEvent<R>) => {
|
||||
if (e.callbackId !== callbackId) return;
|
||||
|
||||
if ('error' in e) {
|
||||
reject(e.error);
|
||||
} else {
|
||||
resolve(e.payload as R);
|
||||
}
|
||||
|
||||
this.#worker.removeListener('message', cb);
|
||||
};
|
||||
|
||||
this.#worker.addListener('message', cb);
|
||||
this.#worker.postMessage({ callbackId, name, payload });
|
||||
});
|
||||
}
|
||||
|
||||
async #handleError(err: Error) {
|
||||
console.error('Plugin errored', this.pluginDir, err);
|
||||
}
|
||||
|
||||
async #handleExit(code: number) {
|
||||
if (code === 0) {
|
||||
console.log('Plugin exited successfully', this.pluginDir);
|
||||
} else {
|
||||
console.log('Plugin exited with error', code, this.pluginDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
import { PluginHandle } from './PluginHandle';
|
||||
import { loadPlugins, PluginInfo } from './plugins';
|
||||
|
||||
export class PluginManager {
|
||||
#handles: PluginHandle[] | null = null;
|
||||
static #instance: PluginManager | null = null;
|
||||
|
||||
public static instance(): PluginManager {
|
||||
if (PluginManager.#instance == null) {
|
||||
PluginManager.#instance = new PluginManager();
|
||||
PluginManager.#instance.plugins(); // Trigger workers to boot, as it takes a few seconds
|
||||
}
|
||||
return PluginManager.#instance;
|
||||
}
|
||||
|
||||
async plugins(): Promise<PluginHandle[]> {
|
||||
this.#handles = this.#handles ?? loadPlugins();
|
||||
return this.#handles;
|
||||
}
|
||||
|
||||
async #pluginsWithInfo(): Promise<{ plugin: PluginHandle; info: PluginInfo }[]> {
|
||||
const plugins = await this.plugins();
|
||||
return Promise.all(plugins.map(async (plugin) => ({ plugin, info: await plugin.getInfo() })));
|
||||
}
|
||||
|
||||
async pluginsWith(capability: PluginInfo['capabilities'][0]): Promise<PluginHandle[]> {
|
||||
return (await this.#pluginsWithInfo())
|
||||
.filter((v) => v.info.capabilities.includes(capability))
|
||||
.map((v) => v.plugin);
|
||||
}
|
||||
|
||||
async plugin(name: string): Promise<PluginHandle | null> {
|
||||
return (await this.#pluginsWithInfo()).find((v) => v.info.name === name)?.plugin ?? null;
|
||||
}
|
||||
|
||||
async pluginOrThrow(name: string): Promise<PluginHandle> {
|
||||
const plugin = await this.plugin(name);
|
||||
if (plugin == null) {
|
||||
throw new Error(`Failed to find plugin by ${name}`);
|
||||
}
|
||||
|
||||
return plugin;
|
||||
}
|
||||
}
|
||||
@@ -1,432 +0,0 @@
|
||||
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-ts_proto v1.180.0
|
||||
// protoc v3.19.1
|
||||
// source: plugins/runtime.proto
|
||||
|
||||
/* eslint-disable */
|
||||
import { type CallContext, type CallOptions } from "nice-grpc-common";
|
||||
import * as _m0 from "protobufjs/minimal";
|
||||
|
||||
export const protobufPackage = "yaak.plugins.runtime";
|
||||
|
||||
export interface PluginInfo {
|
||||
plugin: string;
|
||||
}
|
||||
|
||||
export interface HookResponse {
|
||||
info: PluginInfo | undefined;
|
||||
data: string;
|
||||
}
|
||||
|
||||
export interface HookImportRequest {
|
||||
data: string;
|
||||
}
|
||||
|
||||
export interface HookResponseFilterRequest {
|
||||
filter: string;
|
||||
body: string;
|
||||
contentType: string;
|
||||
}
|
||||
|
||||
export interface HookExportRequest {
|
||||
request: string;
|
||||
}
|
||||
|
||||
function createBasePluginInfo(): PluginInfo {
|
||||
return { plugin: "" };
|
||||
}
|
||||
|
||||
export const PluginInfo = {
|
||||
encode(message: PluginInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
|
||||
if (message.plugin !== "") {
|
||||
writer.uint32(10).string(message.plugin);
|
||||
}
|
||||
return writer;
|
||||
},
|
||||
|
||||
decode(input: _m0.Reader | Uint8Array, length?: number): PluginInfo {
|
||||
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
|
||||
let end = length === undefined ? reader.len : reader.pos + length;
|
||||
const message = createBasePluginInfo();
|
||||
while (reader.pos < end) {
|
||||
const tag = reader.uint32();
|
||||
switch (tag >>> 3) {
|
||||
case 1:
|
||||
if (tag !== 10) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.plugin = reader.string();
|
||||
continue;
|
||||
}
|
||||
if ((tag & 7) === 4 || tag === 0) {
|
||||
break;
|
||||
}
|
||||
reader.skipType(tag & 7);
|
||||
}
|
||||
return message;
|
||||
},
|
||||
|
||||
fromJSON(object: any): PluginInfo {
|
||||
return { plugin: isSet(object.plugin) ? globalThis.String(object.plugin) : "" };
|
||||
},
|
||||
|
||||
toJSON(message: PluginInfo): unknown {
|
||||
const obj: any = {};
|
||||
if (message.plugin !== "") {
|
||||
obj.plugin = message.plugin;
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
|
||||
create(base?: DeepPartial<PluginInfo>): PluginInfo {
|
||||
return PluginInfo.fromPartial(base ?? {});
|
||||
},
|
||||
fromPartial(object: DeepPartial<PluginInfo>): PluginInfo {
|
||||
const message = createBasePluginInfo();
|
||||
message.plugin = object.plugin ?? "";
|
||||
return message;
|
||||
},
|
||||
};
|
||||
|
||||
function createBaseHookResponse(): HookResponse {
|
||||
return { info: undefined, data: "" };
|
||||
}
|
||||
|
||||
export const HookResponse = {
|
||||
encode(message: HookResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
|
||||
if (message.info !== undefined) {
|
||||
PluginInfo.encode(message.info, writer.uint32(10).fork()).ldelim();
|
||||
}
|
||||
if (message.data !== "") {
|
||||
writer.uint32(18).string(message.data);
|
||||
}
|
||||
return writer;
|
||||
},
|
||||
|
||||
decode(input: _m0.Reader | Uint8Array, length?: number): HookResponse {
|
||||
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
|
||||
let end = length === undefined ? reader.len : reader.pos + length;
|
||||
const message = createBaseHookResponse();
|
||||
while (reader.pos < end) {
|
||||
const tag = reader.uint32();
|
||||
switch (tag >>> 3) {
|
||||
case 1:
|
||||
if (tag !== 10) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.info = PluginInfo.decode(reader, reader.uint32());
|
||||
continue;
|
||||
case 2:
|
||||
if (tag !== 18) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.data = reader.string();
|
||||
continue;
|
||||
}
|
||||
if ((tag & 7) === 4 || tag === 0) {
|
||||
break;
|
||||
}
|
||||
reader.skipType(tag & 7);
|
||||
}
|
||||
return message;
|
||||
},
|
||||
|
||||
fromJSON(object: any): HookResponse {
|
||||
return {
|
||||
info: isSet(object.info) ? PluginInfo.fromJSON(object.info) : undefined,
|
||||
data: isSet(object.data) ? globalThis.String(object.data) : "",
|
||||
};
|
||||
},
|
||||
|
||||
toJSON(message: HookResponse): unknown {
|
||||
const obj: any = {};
|
||||
if (message.info !== undefined) {
|
||||
obj.info = PluginInfo.toJSON(message.info);
|
||||
}
|
||||
if (message.data !== "") {
|
||||
obj.data = message.data;
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
|
||||
create(base?: DeepPartial<HookResponse>): HookResponse {
|
||||
return HookResponse.fromPartial(base ?? {});
|
||||
},
|
||||
fromPartial(object: DeepPartial<HookResponse>): HookResponse {
|
||||
const message = createBaseHookResponse();
|
||||
message.info = (object.info !== undefined && object.info !== null)
|
||||
? PluginInfo.fromPartial(object.info)
|
||||
: undefined;
|
||||
message.data = object.data ?? "";
|
||||
return message;
|
||||
},
|
||||
};
|
||||
|
||||
function createBaseHookImportRequest(): HookImportRequest {
|
||||
return { data: "" };
|
||||
}
|
||||
|
||||
export const HookImportRequest = {
|
||||
encode(message: HookImportRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
|
||||
if (message.data !== "") {
|
||||
writer.uint32(10).string(message.data);
|
||||
}
|
||||
return writer;
|
||||
},
|
||||
|
||||
decode(input: _m0.Reader | Uint8Array, length?: number): HookImportRequest {
|
||||
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
|
||||
let end = length === undefined ? reader.len : reader.pos + length;
|
||||
const message = createBaseHookImportRequest();
|
||||
while (reader.pos < end) {
|
||||
const tag = reader.uint32();
|
||||
switch (tag >>> 3) {
|
||||
case 1:
|
||||
if (tag !== 10) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.data = reader.string();
|
||||
continue;
|
||||
}
|
||||
if ((tag & 7) === 4 || tag === 0) {
|
||||
break;
|
||||
}
|
||||
reader.skipType(tag & 7);
|
||||
}
|
||||
return message;
|
||||
},
|
||||
|
||||
fromJSON(object: any): HookImportRequest {
|
||||
return { data: isSet(object.data) ? globalThis.String(object.data) : "" };
|
||||
},
|
||||
|
||||
toJSON(message: HookImportRequest): unknown {
|
||||
const obj: any = {};
|
||||
if (message.data !== "") {
|
||||
obj.data = message.data;
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
|
||||
create(base?: DeepPartial<HookImportRequest>): HookImportRequest {
|
||||
return HookImportRequest.fromPartial(base ?? {});
|
||||
},
|
||||
fromPartial(object: DeepPartial<HookImportRequest>): HookImportRequest {
|
||||
const message = createBaseHookImportRequest();
|
||||
message.data = object.data ?? "";
|
||||
return message;
|
||||
},
|
||||
};
|
||||
|
||||
function createBaseHookResponseFilterRequest(): HookResponseFilterRequest {
|
||||
return { filter: "", body: "", contentType: "" };
|
||||
}
|
||||
|
||||
export const HookResponseFilterRequest = {
|
||||
encode(message: HookResponseFilterRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
|
||||
if (message.filter !== "") {
|
||||
writer.uint32(10).string(message.filter);
|
||||
}
|
||||
if (message.body !== "") {
|
||||
writer.uint32(18).string(message.body);
|
||||
}
|
||||
if (message.contentType !== "") {
|
||||
writer.uint32(26).string(message.contentType);
|
||||
}
|
||||
return writer;
|
||||
},
|
||||
|
||||
decode(input: _m0.Reader | Uint8Array, length?: number): HookResponseFilterRequest {
|
||||
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
|
||||
let end = length === undefined ? reader.len : reader.pos + length;
|
||||
const message = createBaseHookResponseFilterRequest();
|
||||
while (reader.pos < end) {
|
||||
const tag = reader.uint32();
|
||||
switch (tag >>> 3) {
|
||||
case 1:
|
||||
if (tag !== 10) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.filter = reader.string();
|
||||
continue;
|
||||
case 2:
|
||||
if (tag !== 18) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.body = reader.string();
|
||||
continue;
|
||||
case 3:
|
||||
if (tag !== 26) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.contentType = reader.string();
|
||||
continue;
|
||||
}
|
||||
if ((tag & 7) === 4 || tag === 0) {
|
||||
break;
|
||||
}
|
||||
reader.skipType(tag & 7);
|
||||
}
|
||||
return message;
|
||||
},
|
||||
|
||||
fromJSON(object: any): HookResponseFilterRequest {
|
||||
return {
|
||||
filter: isSet(object.filter) ? globalThis.String(object.filter) : "",
|
||||
body: isSet(object.body) ? globalThis.String(object.body) : "",
|
||||
contentType: isSet(object.contentType) ? globalThis.String(object.contentType) : "",
|
||||
};
|
||||
},
|
||||
|
||||
toJSON(message: HookResponseFilterRequest): unknown {
|
||||
const obj: any = {};
|
||||
if (message.filter !== "") {
|
||||
obj.filter = message.filter;
|
||||
}
|
||||
if (message.body !== "") {
|
||||
obj.body = message.body;
|
||||
}
|
||||
if (message.contentType !== "") {
|
||||
obj.contentType = message.contentType;
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
|
||||
create(base?: DeepPartial<HookResponseFilterRequest>): HookResponseFilterRequest {
|
||||
return HookResponseFilterRequest.fromPartial(base ?? {});
|
||||
},
|
||||
fromPartial(object: DeepPartial<HookResponseFilterRequest>): HookResponseFilterRequest {
|
||||
const message = createBaseHookResponseFilterRequest();
|
||||
message.filter = object.filter ?? "";
|
||||
message.body = object.body ?? "";
|
||||
message.contentType = object.contentType ?? "";
|
||||
return message;
|
||||
},
|
||||
};
|
||||
|
||||
function createBaseHookExportRequest(): HookExportRequest {
|
||||
return { request: "" };
|
||||
}
|
||||
|
||||
export const HookExportRequest = {
|
||||
encode(message: HookExportRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
|
||||
if (message.request !== "") {
|
||||
writer.uint32(10).string(message.request);
|
||||
}
|
||||
return writer;
|
||||
},
|
||||
|
||||
decode(input: _m0.Reader | Uint8Array, length?: number): HookExportRequest {
|
||||
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
|
||||
let end = length === undefined ? reader.len : reader.pos + length;
|
||||
const message = createBaseHookExportRequest();
|
||||
while (reader.pos < end) {
|
||||
const tag = reader.uint32();
|
||||
switch (tag >>> 3) {
|
||||
case 1:
|
||||
if (tag !== 10) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.request = reader.string();
|
||||
continue;
|
||||
}
|
||||
if ((tag & 7) === 4 || tag === 0) {
|
||||
break;
|
||||
}
|
||||
reader.skipType(tag & 7);
|
||||
}
|
||||
return message;
|
||||
},
|
||||
|
||||
fromJSON(object: any): HookExportRequest {
|
||||
return { request: isSet(object.request) ? globalThis.String(object.request) : "" };
|
||||
},
|
||||
|
||||
toJSON(message: HookExportRequest): unknown {
|
||||
const obj: any = {};
|
||||
if (message.request !== "") {
|
||||
obj.request = message.request;
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
|
||||
create(base?: DeepPartial<HookExportRequest>): HookExportRequest {
|
||||
return HookExportRequest.fromPartial(base ?? {});
|
||||
},
|
||||
fromPartial(object: DeepPartial<HookExportRequest>): HookExportRequest {
|
||||
const message = createBaseHookExportRequest();
|
||||
message.request = object.request ?? "";
|
||||
return message;
|
||||
},
|
||||
};
|
||||
|
||||
export type PluginRuntimeDefinition = typeof PluginRuntimeDefinition;
|
||||
export const PluginRuntimeDefinition = {
|
||||
name: "PluginRuntime",
|
||||
fullName: "yaak.plugins.runtime.PluginRuntime",
|
||||
methods: {
|
||||
hookImport: {
|
||||
name: "hookImport",
|
||||
requestType: HookImportRequest,
|
||||
requestStream: false,
|
||||
responseType: HookResponse,
|
||||
responseStream: false,
|
||||
options: {},
|
||||
},
|
||||
hookExport: {
|
||||
name: "hookExport",
|
||||
requestType: HookExportRequest,
|
||||
requestStream: false,
|
||||
responseType: HookResponse,
|
||||
responseStream: false,
|
||||
options: {},
|
||||
},
|
||||
hookResponseFilter: {
|
||||
name: "hookResponseFilter",
|
||||
requestType: HookResponseFilterRequest,
|
||||
requestStream: false,
|
||||
responseType: HookResponse,
|
||||
responseStream: false,
|
||||
options: {},
|
||||
},
|
||||
},
|
||||
} as const;
|
||||
|
||||
export interface PluginRuntimeServiceImplementation<CallContextExt = {}> {
|
||||
hookImport(request: HookImportRequest, context: CallContext & CallContextExt): Promise<DeepPartial<HookResponse>>;
|
||||
hookExport(request: HookExportRequest, context: CallContext & CallContextExt): Promise<DeepPartial<HookResponse>>;
|
||||
hookResponseFilter(
|
||||
request: HookResponseFilterRequest,
|
||||
context: CallContext & CallContextExt,
|
||||
): Promise<DeepPartial<HookResponse>>;
|
||||
}
|
||||
|
||||
export interface PluginRuntimeClient<CallOptionsExt = {}> {
|
||||
hookImport(request: DeepPartial<HookImportRequest>, options?: CallOptions & CallOptionsExt): Promise<HookResponse>;
|
||||
hookExport(request: DeepPartial<HookExportRequest>, options?: CallOptions & CallOptionsExt): Promise<HookResponse>;
|
||||
hookResponseFilter(
|
||||
request: DeepPartial<HookResponseFilterRequest>,
|
||||
options?: CallOptions & CallOptionsExt,
|
||||
): Promise<HookResponse>;
|
||||
}
|
||||
|
||||
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
|
||||
|
||||
export type DeepPartial<T> = T extends Builtin ? T
|
||||
: T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>>
|
||||
: T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
|
||||
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
|
||||
: Partial<T>;
|
||||
|
||||
function isSet(value: any): boolean {
|
||||
return value !== null && value !== undefined;
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
import { isAbortError } from 'abort-controller-x';
|
||||
import { createServer, ServerError, ServerMiddlewareCall, Status } from 'nice-grpc';
|
||||
import { CallContext } from 'nice-grpc-common';
|
||||
import * as fs from 'node:fs';
|
||||
import {
|
||||
DeepPartial,
|
||||
HookExportRequest,
|
||||
HookImportRequest,
|
||||
HookResponse,
|
||||
HookResponseFilterRequest,
|
||||
PluginRuntimeDefinition,
|
||||
PluginRuntimeServiceImplementation,
|
||||
} from './gen/plugins/runtime';
|
||||
import { PluginManager } from './PluginManager';
|
||||
|
||||
class PluginRuntimeService implements PluginRuntimeServiceImplementation {
|
||||
#manager: PluginManager;
|
||||
|
||||
constructor() {
|
||||
this.#manager = PluginManager.instance();
|
||||
}
|
||||
|
||||
async hookExport(request: HookExportRequest): Promise<DeepPartial<HookResponse>> {
|
||||
const plugin = await this.#manager.pluginOrThrow('exporter-curl');
|
||||
const data = await plugin.runExport(JSON.parse(request.request));
|
||||
const info = { plugin: (await plugin.getInfo()).name };
|
||||
return { info, data };
|
||||
}
|
||||
|
||||
async hookImport(request: HookImportRequest): Promise<DeepPartial<HookResponse>> {
|
||||
const plugins = await this.#manager.pluginsWith('import');
|
||||
for (const p of plugins) {
|
||||
const data = await p.runImport(request.data);
|
||||
if (data != null && data !== 'null') {
|
||||
const info = { plugin: (await p.getInfo()).name };
|
||||
return { info, data };
|
||||
}
|
||||
}
|
||||
|
||||
throw new ServerError(Status.UNKNOWN, 'No importers found for data');
|
||||
}
|
||||
|
||||
async hookResponseFilter(request: HookResponseFilterRequest): Promise<DeepPartial<HookResponse>> {
|
||||
const pluginName = request.contentType.includes('json') ? 'filter-jsonpath' : 'filter-xpath';
|
||||
const plugin = await this.#manager.pluginOrThrow(pluginName);
|
||||
const data = await plugin.runResponseFilter(request);
|
||||
const info = { plugin: (await plugin.getInfo()).name };
|
||||
return { info, data };
|
||||
}
|
||||
}
|
||||
|
||||
let server = createServer();
|
||||
|
||||
async function* errorHandlingMiddleware<Request, Response>(
|
||||
call: ServerMiddlewareCall<Request, Response>,
|
||||
context: CallContext,
|
||||
) {
|
||||
try {
|
||||
return yield* call.next(call.request, context);
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof ServerError || isAbortError(error)) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
let details = String(error);
|
||||
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
// @ts-ignore
|
||||
details += `: ${error.stack}`;
|
||||
}
|
||||
|
||||
throw new ServerError(Status.UNKNOWN, details);
|
||||
}
|
||||
}
|
||||
|
||||
server = server.use(errorHandlingMiddleware);
|
||||
server.add(PluginRuntimeDefinition, new PluginRuntimeService());
|
||||
|
||||
// Start on random port if YAAK_GRPC_PORT_FILE_PATH is set, or :4000
|
||||
const addr = process.env.YAAK_GRPC_PORT_FILE_PATH ? 'localhost:0' : 'localhost:4000';
|
||||
server.listen(addr).then((port) => {
|
||||
console.log('gRPC server listening on', `http://localhost:${port}`);
|
||||
if (process.env.YAAK_GRPC_PORT_FILE_PATH) {
|
||||
console.log('Wrote port file to', process.env.YAAK_GRPC_PORT_FILE_PATH);
|
||||
fs.writeFileSync(process.env.YAAK_GRPC_PORT_FILE_PATH, JSON.stringify({ port }, null, 2));
|
||||
}
|
||||
});
|
||||
@@ -1,76 +0,0 @@
|
||||
import { readFileSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { parentPort, workerData } from 'node:worker_threads';
|
||||
import { ParentToWorkerEvent } from './PluginHandle';
|
||||
import { PluginInfo } from './plugins';
|
||||
|
||||
new Promise<void>(async (resolve, reject) => {
|
||||
const { pluginDir } = workerData;
|
||||
const pathMod = path.join(pluginDir, 'build/index.js');
|
||||
const pathPkg = path.join(pluginDir, 'package.json');
|
||||
|
||||
let pkg: { [x: string]: any };
|
||||
try {
|
||||
pkg = JSON.parse(readFileSync(pathPkg, 'utf8'));
|
||||
} catch (err) {
|
||||
// TODO: Do something better here
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const mod = (await import(`file://${pathMod}`)).default ?? {};
|
||||
|
||||
const info: PluginInfo = {
|
||||
capabilities: [],
|
||||
name: pkg['name'] ?? 'n/a',
|
||||
dir: pluginDir,
|
||||
};
|
||||
|
||||
if (typeof mod['pluginHookImport'] === 'function') {
|
||||
info.capabilities.push('import');
|
||||
}
|
||||
|
||||
if (typeof mod['pluginHookExport'] === 'function') {
|
||||
info.capabilities.push('export');
|
||||
}
|
||||
|
||||
if (typeof mod['pluginHookResponseFilter'] === 'function') {
|
||||
info.capabilities.push('filter');
|
||||
}
|
||||
|
||||
console.log('Loaded plugin', info.name, info.capabilities, info.dir);
|
||||
|
||||
function reply<T>(originalMsg: ParentToWorkerEvent, payload: T) {
|
||||
parentPort!.postMessage({ payload, callbackId: originalMsg.callbackId });
|
||||
}
|
||||
|
||||
function replyErr(originalMsg: ParentToWorkerEvent, error: unknown) {
|
||||
parentPort!.postMessage({
|
||||
error: String(error),
|
||||
callbackId: originalMsg.callbackId,
|
||||
});
|
||||
}
|
||||
|
||||
parentPort!.on('message', async (msg: ParentToWorkerEvent) => {
|
||||
try {
|
||||
const ctx = { todo: 'implement me' };
|
||||
if (msg.name === 'run-import') {
|
||||
reply(msg, await mod.pluginHookImport(ctx, msg.payload));
|
||||
} else if (msg.name === 'run-filter') {
|
||||
reply(msg, await mod.pluginHookResponseFilter(ctx, msg.payload));
|
||||
} else if (msg.name === 'run-export') {
|
||||
reply(msg, await mod.pluginHookExport(ctx, msg.payload));
|
||||
} else if (msg.name === 'info') {
|
||||
reply(msg, info);
|
||||
} else {
|
||||
console.log('Unknown message', msg);
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
replyErr(msg, err);
|
||||
}
|
||||
});
|
||||
|
||||
resolve();
|
||||
}).catch((err) => {
|
||||
console.log('failed to boot plugin', err);
|
||||
});
|
||||
@@ -1,18 +0,0 @@
|
||||
import * as fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { PluginHandle } from './PluginHandle';
|
||||
|
||||
export interface PluginInfo {
|
||||
name: string;
|
||||
dir: string;
|
||||
capabilities: ('import' | 'export' | 'filter')[];
|
||||
}
|
||||
|
||||
export function loadPlugins(): PluginHandle[] {
|
||||
const pluginsDir = process.env.YAAK_PLUGINS_DIR;
|
||||
if (!pluginsDir) throw new Error('YAAK_PLUGINS_DIR is not set');
|
||||
console.log('Loading plugins from', pluginsDir);
|
||||
|
||||
const pluginDirs = fs.readdirSync(pluginsDir).map((p) => path.join(pluginsDir, p));
|
||||
return pluginDirs.map((pluginDir) => new PluginHandle(pluginDir));
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "node16",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"target": "es2021",
|
||||
"lib": ["es2021"],
|
||||
"noImplicitAny": false,
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"outDir": "dist",
|
||||
"baseUrl": ".",
|
||||
"skipLibCheck": true,
|
||||
"paths": {
|
||||
"*": [
|
||||
"node_modules/*",
|
||||
"src/types/*"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package yaak.plugins.runtime;
|
||||
|
||||
service PluginRuntime {
|
||||
rpc hookImport (HookImportRequest) returns (HookResponse);
|
||||
rpc hookExport (HookExportRequest) returns (HookResponse);
|
||||
rpc hookResponseFilter (HookResponseFilterRequest) returns (HookResponse);
|
||||
}
|
||||
|
||||
message PluginInfo {
|
||||
string plugin = 1;
|
||||
}
|
||||
|
||||
message HookResponse {
|
||||
PluginInfo info = 1;
|
||||
string data = 2;
|
||||
}
|
||||
|
||||
message HookImportRequest {
|
||||
string data = 1;
|
||||
}
|
||||
|
||||
message HookResponseFilterRequest {
|
||||
string filter = 1;
|
||||
string body = 2;
|
||||
string contentType = 3;
|
||||
}
|
||||
|
||||
message HookExportRequest {
|
||||
string request = 1;
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
const version = process.env.YAAK_VERSION?.replace('v', '');
|
||||
if (!version) {
|
||||
throw new Error('YAAK_VERSION environment variable not set')
|
||||
}
|
||||
|
||||
const tauriConfigPath = path.join(__dirname, '../src-tauri/tauri.conf.json');
|
||||
const tauriConfig = JSON.parse(fs.readFileSync(tauriConfigPath, 'utf8'));
|
||||
|
||||
tauriConfig.version = version;
|
||||
|
||||
console.log('Writing version ' + version + ' to ' + tauriConfigPath)
|
||||
fs.writeFileSync(tauriConfigPath, JSON.stringify(tauriConfig, null, 2));
|
||||
@@ -1,69 +0,0 @@
|
||||
const path = require('node:path');
|
||||
const decompress = require('decompress');
|
||||
const Downloader = require("nodejs-file-downloader");
|
||||
const {rmSync, cpSync, mkdirSync, existsSync} = require("node:fs");
|
||||
const {execSync} = require("node:child_process");
|
||||
|
||||
const NODE_VERSION = 'v22.5.1';
|
||||
|
||||
// `${process.platform}_${process.arch}`
|
||||
const MAC_ARM = 'darwin_arm64';
|
||||
const MAC_X64 = 'darwin_x64';
|
||||
const LNX_X64 = 'linux_x64';
|
||||
const WIN_X64 = 'win32_x64';
|
||||
|
||||
const URL_MAP = {
|
||||
[MAC_ARM]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-darwin-arm64.tar.gz`,
|
||||
[MAC_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-darwin-x64.tar.gz`,
|
||||
[LNX_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-linux-x64.tar.gz`,
|
||||
[WIN_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-win-x64.zip`,
|
||||
};
|
||||
|
||||
const SRC_BIN_MAP = {
|
||||
[MAC_ARM]: `node-${NODE_VERSION}-darwin-arm64/bin/node`,
|
||||
[MAC_X64]: `node-${NODE_VERSION}-darwin-x64/bin/node`,
|
||||
[LNX_X64]: `node-${NODE_VERSION}-linux-x64/bin/node`,
|
||||
[WIN_X64]: `node-${NODE_VERSION}-win-x64/node.exe`,
|
||||
};
|
||||
|
||||
const DST_BIN_MAP = {
|
||||
darwin_arm64: 'yaaknode-aarch64-apple-darwin',
|
||||
darwin_x64: 'yaaknode-x86_64-apple-darwin',
|
||||
linux_x64: 'yaaknode-x86_64-unknown-linux-gnu',
|
||||
win32_x64: 'yaaknode-x86_64-pc-windows-msvc.exe',
|
||||
};
|
||||
|
||||
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
|
||||
|
||||
const destDir = path.join(__dirname, `..`, 'src-tauri', 'vendored', 'node');
|
||||
const binDest = path.join(destDir, DST_BIN_MAP[key]);
|
||||
console.log(`Vendoring NodeJS ${NODE_VERSION} for ${key}`);
|
||||
|
||||
if (existsSync(binDest) && execSync(`${binDest} --version`).toString('utf-8').trim() === NODE_VERSION) {
|
||||
console.log("NodeJS already vendored");
|
||||
return;
|
||||
}
|
||||
|
||||
rmSync(destDir, {recursive: true, force: true});
|
||||
mkdirSync(destDir, {recursive: true});
|
||||
|
||||
(async function () {
|
||||
const url = URL_MAP[key];
|
||||
const tmpDir = path.join(__dirname, 'tmp', Date.now().toString());
|
||||
|
||||
// Download GitHub release artifact
|
||||
const {filePath} = await new Downloader({url, directory: tmpDir,}).download();
|
||||
|
||||
// Decompress to the same directory
|
||||
await decompress(filePath, tmpDir, {});
|
||||
|
||||
// Copy binary
|
||||
const binSrc = path.join(tmpDir, SRC_BIN_MAP[key]);
|
||||
cpSync(binSrc, binDest);
|
||||
rmSync(tmpDir, {recursive: true, force: true});
|
||||
|
||||
console.log("Downloaded NodeJS to", binDest);
|
||||
})().catch(err => {
|
||||
console.log('Script failed:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,25 +0,0 @@
|
||||
const {readdirSync, cpSync} = require("node:fs");
|
||||
const path = require("node:path");
|
||||
const {execSync} = require("node:child_process");
|
||||
const pluginsDir = process.env.YAAK_PLUGINS_DIR;
|
||||
if (!pluginsDir) {
|
||||
console.log("YAAK_PLUGINS_DIR is not set");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('Installing Yaak plugins dependencies', pluginsDir);
|
||||
execSync('npm ci', {cwd: pluginsDir});
|
||||
console.log('Building Yaak plugins', pluginsDir);
|
||||
execSync('npm run build', {cwd: pluginsDir});
|
||||
|
||||
console.log('Copying Yaak plugins to', pluginsDir);
|
||||
|
||||
const pluginsRoot = path.join(pluginsDir, 'plugins');
|
||||
for (const name of readdirSync(pluginsRoot)) {
|
||||
const dir = path.join(pluginsRoot, name);
|
||||
if (name.startsWith('.')) continue;
|
||||
const destDir = path.join(__dirname, '../src-tauri/vendored/plugins/', name);
|
||||
console.log(`Copying ${name} to ${destDir}`);
|
||||
cpSync(path.join(dir, 'package.json'), path.join(destDir, 'package.json'));
|
||||
cpSync(path.join(dir, 'build/index.js'), path.join(destDir, 'build/index.js'));
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
const decompress = require('decompress');
|
||||
const Downloader = require("nodejs-file-downloader");
|
||||
const path = require("node:path");
|
||||
const {rmSync, mkdirSync, cpSync} = require("node:fs");
|
||||
|
||||
// `${process.platform}_${process.arch}`
|
||||
const MAC_ARM = 'darwin_arm64';
|
||||
const MAC_X64 = 'darwin_x64';
|
||||
const LNX_X64 = 'linux_x64';
|
||||
const WIN_X64 = 'win32_x64';
|
||||
|
||||
const URL_MAP = {
|
||||
[MAC_ARM]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-osx-aarch_64.zip',
|
||||
[MAC_X64]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-osx-x86_64.zip',
|
||||
[LNX_X64]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-linux-x86_64.zip',
|
||||
[WIN_X64]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-win64.zip',
|
||||
};
|
||||
|
||||
const SRC_BIN_MAP = {
|
||||
[MAC_ARM]: 'bin/protoc',
|
||||
[MAC_X64]: 'bin/protoc',
|
||||
[LNX_X64]: 'bin/protoc',
|
||||
[WIN_X64]: 'bin/protoc.exe',
|
||||
};
|
||||
|
||||
const DST_BIN_MAP = {
|
||||
[MAC_ARM]: 'yaakprotoc-aarch64-apple-darwin',
|
||||
[MAC_X64]: 'yaakprotoc-x86_64-apple-darwin',
|
||||
[LNX_X64]: 'yaakprotoc-x86_64-unknown-linux-gnu',
|
||||
[WIN_X64]: 'yaakprotoc-x86_64-pc-windows-msvc.exe',
|
||||
};
|
||||
|
||||
const dstDir = path.join(__dirname, `..`, 'src-tauri', 'vendored', 'protoc');
|
||||
rmSync(dstDir, {recursive: true, force: true});
|
||||
mkdirSync(dstDir, {recursive: true});
|
||||
|
||||
(async function () {
|
||||
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
|
||||
console.log("Vendoring protoc binary for", key);
|
||||
const url = URL_MAP[key];
|
||||
const tmpDir = path.join(__dirname, 'tmp', Date.now().toString());
|
||||
|
||||
// Download GitHub release artifact
|
||||
const {filePath} = await new Downloader({url, directory: tmpDir,}).download();
|
||||
|
||||
// Decompress to the same directory
|
||||
await decompress(filePath, tmpDir, {});
|
||||
|
||||
// Copy binary
|
||||
const binSrc = path.join(tmpDir, SRC_BIN_MAP[key]);
|
||||
const binDst = path.join(dstDir, DST_BIN_MAP[key]);
|
||||
cpSync(binSrc, binDst);
|
||||
|
||||
// Copy other files
|
||||
const includeSrc = path.join(tmpDir, 'include');
|
||||
const includeDst = path.join(dstDir, 'include');
|
||||
cpSync(includeSrc, includeDst, {recursive: true});
|
||||
|
||||
rmSync(tmpDir, {recursive: true, force: true});
|
||||
|
||||
console.log("Downloaded protoc to", binDst);
|
||||
})().catch(err => console.log('Script failed:', err));
|
||||
1
src-tauri/.gitignore
vendored
1
src-tauri/.gitignore
vendored
@@ -2,4 +2,3 @@
|
||||
# will have compiled files and executables
|
||||
/target/
|
||||
|
||||
vendored
|
||||
|
||||
12
src-tauri/.sqlx/query-02506ad41cc94cd937422ef1977a97174431f008a9fb4ce39667d587a858b876.json
generated
Normal file
12
src-tauri/.sqlx/query-02506ad41cc94cd937422ef1977a97174431f008a9fb4ce39667d587a858b876.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO folders (\n id,\n workspace_id,\n folder_id,\n name,\n sort_priority\n )\n VALUES (?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n folder_id = excluded.folder_id,\n sort_priority = excluded.sort_priority\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 5
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "02506ad41cc94cd937422ef1977a97174431f008a9fb4ce39667d587a858b876"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, request_id, updated_at, created_at, url, status,\n status_reason, content_length, body_path, elapsed, elapsed_headers, error,\n version, remote_addr,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpResponseHeader>>\"\n FROM http_responses\n WHERE request_id = ?\n ORDER BY created_at DESC\n LIMIT ?\n ",
|
||||
"query": "\n SELECT id, model, workspace_id, request_id, updated_at, created_at, url,\n status, status_reason, content_length, body_path, elapsed, error,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpResponseHeader>>\"\n FROM http_responses\n WHERE request_id = ?\n ORDER BY created_at DESC\n LIMIT ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -63,29 +63,14 @@
|
||||
"ordinal": 11,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "elapsed_headers",
|
||||
"ordinal": 12,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"ordinal": 14,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "remote_addr",
|
||||
"ordinal": 15,
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "headers!: sqlx::types::Json<Vec<HttpResponseHeader>>",
|
||||
"ordinal": 16,
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
@@ -105,12 +90,9 @@
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "ac38621cd947c3be9ca0d8ea73325fe35c3866d16f6482fc32c23762f112dc83"
|
||||
"hash": "07b0c398efd1d5f8f479652de658716a9e7faef6aba6583dd209a4f290c5edd1"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO workspaces (\n id, name, description, variables, setting_request_timeout,\n setting_follow_redirects, setting_validate_certificates\n )\n VALUES (?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n description = excluded.description,\n variables = excluded.variables,\n setting_request_timeout = excluded.setting_request_timeout,\n setting_follow_redirects = excluded.setting_follow_redirects,\n setting_validate_certificates = excluded.setting_validate_certificates\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 7
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "12b265491d1ebba19e1ce8a660e458ffbcd8c0850aef16ba1f70e358623ac66a"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO environments (\n id, workspace_id, name, variables\n )\n VALUES (?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n variables = excluded.variables\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "13cb883199e81966174e6fda9c252bf7213fe01b5346266c0a89dc0ac89eda64"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, created_at, updated_at, folder_id, name, sort_priority\n FROM folders\n WHERE id = ?\n ",
|
||||
"query": "\n SELECT\n id,\n model,\n workspace_id,\n created_at,\n updated_at,\n folder_id,\n name,\n sort_priority\n FROM folders\n WHERE id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -58,5 +58,5 @@
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "ae98a7b35a5cb80a4bcd04faa22545deac2a5e9bfb814b60191f16b98ed49796"
|
||||
"hash": "1428d25b6aa3d6ec55742a968571fa951da0406d7bb32408883c584eae7dd53c"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO grpc_events (\n id, workspace_id, request_id, connection_id, content, event_type, metadata,\n status, error\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n content = excluded.content,\n event_type = excluded.event_type,\n metadata = excluded.metadata,\n status = excluded.status,\n error = excluded.error\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 9
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "14930955e8a914e292dfbebfce2ea43cc41c1d517386ed816c16d436bf626bf3"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, created_at, updated_at, folder_id, name, sort_priority\n FROM folders\n WHERE workspace_id = ?\n ",
|
||||
"query": "\n SELECT\n id,\n model,\n workspace_id,\n created_at,\n updated_at,\n folder_id,\n name,\n sort_priority\n FROM folders\n WHERE workspace_id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -58,5 +58,5 @@
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "558e72df3c6f2635c6b3d52a199f9a5f7a3d82b379ff9af36645dcfb92548fdd"
|
||||
"hash": "1517b0f86c841b5f1247bd40c3a9b38ab001d846a410b6e3cd36f9e844d50ddb"
|
||||
}
|
||||
@@ -1,104 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, folder_id, created_at, updated_at, name, sort_priority,\n url, service, method, message, authentication_type,\n authentication AS \"authentication!: Json<HashMap<String, JsonValue>>\",\n metadata AS \"metadata!: sqlx::types::Json<Vec<GrpcMetadataEntry>>\"\n FROM grpc_requests\n WHERE workspace_id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "workspace_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "folder_id",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 4,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 5,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "sort_priority",
|
||||
"ordinal": 7,
|
||||
"type_info": "Float"
|
||||
},
|
||||
{
|
||||
"name": "url",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "service",
|
||||
"ordinal": 9,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "method",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "message",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "authentication_type",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "authentication!: Json<HashMap<String, JsonValue>>",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "metadata!: sqlx::types::Json<Vec<GrpcMetadataEntry>>",
|
||||
"ordinal": 14,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "1821c2f60b9fa4514d58eb73b23e25ad683b80b9bd0c2944063190a0d0a19ee5"
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, request_id, connection_id, created_at, content, status, error,\n event_type AS \"event_type!: GrpcEventType\",\n metadata AS \"metadata!: sqlx::types::Json<HashMap<String, String>>\"\n FROM grpc_events\n WHERE connection_id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "workspace_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "request_id",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "connection_id",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 5,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "content",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "status",
|
||||
"ordinal": 7,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "event_type!: GrpcEventType",
|
||||
"ordinal": 9,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "metadata!: sqlx::types::Json<HashMap<String, String>>",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "18ada3bb42c29f1940ab2e61961d79cdd69210f3dc2076aedcadeba8e34dcb6e"
|
||||
}
|
||||
12
src-tauri/.sqlx/query-198bd086ccc87d2e6c24cb1c717f486d3ab58c0c958ede850c018fc266eade87.json
generated
Normal file
12
src-tauri/.sqlx/query-198bd086ccc87d2e6c24cb1c717f486d3ab58c0c958ede850c018fc266eade87.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO http_responses (\n id,\n request_id,\n workspace_id,\n elapsed,\n url,\n status,\n status_reason,\n content_length,\n body_path,\n headers\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?);\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 10
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "198bd086ccc87d2e6c24cb1c717f486d3ab58c0c958ede850c018fc266eade87"
|
||||
}
|
||||
12
src-tauri/.sqlx/query-294cbe19f9ddd9519ace3558df4308948082ec0ce7096855aa7d8fba519b8b4f.json
generated
Normal file
12
src-tauri/.sqlx/query-294cbe19f9ddd9519ace3558df4308948082ec0ce7096855aa7d8fba519b8b4f.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n UPDATE http_responses SET (\n elapsed,\n url,\n status,\n status_reason,\n content_length,\n body_path,\n error,\n headers,\n updated_at\n ) = (?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP) WHERE id = ?;\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 9
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "294cbe19f9ddd9519ace3558df4308948082ec0ce7096855aa7d8fba519b8b4f"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO settings (id)\n VALUES ('default')\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "2c181a4dc13efc52fe6a5a68291c5678a9624020df4ea744e78396f6926d5c88"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO http_responses (\n id, request_id, workspace_id, elapsed, elapsed_headers, url, status, status_reason,\n content_length, body_path, headers, version, remote_addr\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 13
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "2c9658a639c5e4994ae9c8ec30bd4e40a1945d640962991f879928619950ef62"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, request_id, updated_at, created_at, url, status,\n status_reason, content_length, body_path, elapsed, elapsed_headers, error,\n version, remote_addr,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpResponseHeader>>\"\n FROM http_responses\n WHERE id = ?\n ",
|
||||
"query": "\n SELECT id, model, workspace_id, request_id, updated_at, created_at, url,\n status, status_reason, content_length, body_path, elapsed, error,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpResponseHeader>>\"\n FROM http_responses\n WHERE workspace_id = ?\n ORDER BY created_at DESC\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -63,29 +63,14 @@
|
||||
"ordinal": 11,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "elapsed_headers",
|
||||
"ordinal": 12,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"ordinal": 14,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "remote_addr",
|
||||
"ordinal": 15,
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "headers!: sqlx::types::Json<Vec<HttpResponseHeader>>",
|
||||
"ordinal": 16,
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
@@ -105,12 +90,9 @@
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "0fa6b56f8c996d14908a56928674b4b35af5fa36f63dc48b9b66ee6dfde78976"
|
||||
"hash": "3d199d371be948211f4a50c869b307f5df60784293c52397d77a187633a406dd"
|
||||
}
|
||||
@@ -1,92 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, request_id, created_at, updated_at, service,\n method, elapsed, status, error, url,\n trailers AS \"trailers!: sqlx::types::Json<HashMap<String, String>>\"\n FROM grpc_connections\n WHERE request_id = ?\n ORDER BY created_at DESC\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "workspace_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "request_id",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 4,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 5,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "service",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "method",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "elapsed",
|
||||
"ordinal": 8,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "status",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "url",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "trailers!: sqlx::types::Json<HashMap<String, String>>",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "3e8651cca7feecc208a676dfd24c7d8775040d5287c16890056dcb474674edfb"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n DELETE FROM grpc_connections\n WHERE id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "42bc0ded60b44dab19daf6d8fc7df83d83af5d88ea0b84514fdc877a668c27cd"
|
||||
}
|
||||
12
src-tauri/.sqlx/query-4a5fd6c81401ccafac64b05cb476da92cc30919d5bdb0a0226ea5e30d5b30c0f.json
generated
Normal file
12
src-tauri/.sqlx/query-4a5fd6c81401ccafac64b05cb476da92cc30919d5bdb0a0226ea5e30d5b30c0f.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO http_requests (\n id,\n workspace_id,\n folder_id,\n name,\n url,\n url_parameters,\n method,\n body,\n body_type,\n authentication,\n authentication_type,\n headers,\n sort_priority\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n folder_id = excluded.folder_id,\n method = excluded.method,\n headers = excluded.headers,\n body = excluded.body,\n body_type = excluded.body_type,\n authentication = excluded.authentication,\n authentication_type = excluded.authentication_type,\n url = excluded.url,\n url_parameters = excluded.url_parameters,\n sort_priority = excluded.sort_priority\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 13
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "4a5fd6c81401ccafac64b05cb476da92cc30919d5bdb0a0226ea5e30d5b30c0f"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, created_at, updated_at, workspace_id, name,\n cookies AS \"cookies!: sqlx::types::Json<Vec<JsonValue>>\"\n FROM cookie_jars WHERE id = ?\n ",
|
||||
"query": "\n SELECT id, model, created_at, updated_at, name, description,\n variables AS \"variables!: sqlx::types::Json<Vec<EnvironmentVariable>>\"\n FROM workspaces\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -24,23 +24,23 @@
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "workspace_id",
|
||||
"name": "name",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"name": "description",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "cookies!: sqlx::types::Json<Vec<JsonValue>>",
|
||||
"name": "variables!: sqlx::types::Json<Vec<EnvironmentVariable>>",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
@@ -52,5 +52,5 @@
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "f5f20f3b37d932617499a0da50997edad59e4f5998b15c50ed6eae2e97064068"
|
||||
"hash": "5588db23df7f30dc75857e05395ebbcf2384e2ac0d7cb87f76d74c6d50781d7b"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO grpc_requests (\n id, name, workspace_id, folder_id, sort_priority, url, service, method, message,\n authentication_type, authentication, metadata\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n workspace_id = excluded.workspace_id,\n name = excluded.name,\n folder_id = excluded.folder_id,\n sort_priority = excluded.sort_priority,\n url = excluded.url,\n service = excluded.service,\n method = excluded.method,\n message = excluded.message,\n authentication_type = excluded.authentication_type,\n authentication = excluded.authentication,\n metadata = excluded.metadata\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 12
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "5af82cd333895d3d7d67a92f37b0feb338f615b88aea2bd09cb5809008c645a3"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO http_requests (\n id, workspace_id, folder_id, name, url, url_parameters, method, body, body_type,\n authentication, authentication_type, headers, sort_priority\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n workspace_id = excluded.workspace_id,\n name = excluded.name,\n folder_id = excluded.folder_id,\n method = excluded.method,\n headers = excluded.headers,\n body = excluded.body,\n body_type = excluded.body_type,\n authentication = excluded.authentication,\n authentication_type = excluded.authentication_type,\n url = excluded.url,\n url_parameters = excluded.url_parameters,\n sort_priority = excluded.sort_priority\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 13
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "5f2f40062abbe93e23b38876319cf16d4d2b3f8d0be32ffe7848528c725e1429"
|
||||
}
|
||||
12
src-tauri/.sqlx/query-610223ad10b6e25926d486ba775a74b55625fcc4e6637d8a805d44ec3f3b9532.json
generated
Normal file
12
src-tauri/.sqlx/query-610223ad10b6e25926d486ba775a74b55625fcc4e6637d8a805d44ec3f3b9532.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO workspaces (id, name, description, variables)\n VALUES (?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n description = excluded.description,\n variables = excluded.variables\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "610223ad10b6e25926d486ba775a74b55625fcc4e6637d8a805d44ec3f3b9532"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, folder_id, created_at, updated_at, name, url,\n url_parameters AS \"url_parameters!: sqlx::types::Json<Vec<HttpUrlParameter>>\",\n method, body_type, authentication_type, sort_priority,\n body AS \"body!: Json<HashMap<String, JsonValue>>\",\n authentication AS \"authentication!: Json<HashMap<String, JsonValue>>\",\n headers AS \"headers!: sqlx::types::Json<Vec<HttpRequestHeader>>\"\n FROM http_requests\n WHERE workspace_id = ?\n ",
|
||||
"query": "\n SELECT\n id,\n model,\n workspace_id,\n folder_id,\n created_at,\n updated_at,\n name,\n url,\n url_parameters AS \"url_parameters!: sqlx::types::Json<Vec<HttpUrlParameter>>\",\n method,\n body AS \"body!: Json<HashMap<String, JsonValue>>\",\n body_type,\n authentication AS \"authentication!: Json<HashMap<String, JsonValue>>\",\n authentication_type,\n sort_priority,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpRequestHeader>>\"\n FROM http_requests\n WHERE id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -54,29 +54,29 @@
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "body_type",
|
||||
"name": "body!: Json<HashMap<String, JsonValue>>",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "authentication_type",
|
||||
"name": "body_type",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "sort_priority",
|
||||
"name": "authentication!: Json<HashMap<String, JsonValue>>",
|
||||
"ordinal": 12,
|
||||
"type_info": "Float"
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "body!: Json<HashMap<String, JsonValue>>",
|
||||
"name": "authentication_type",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "authentication!: Json<HashMap<String, JsonValue>>",
|
||||
"name": "sort_priority",
|
||||
"ordinal": 14,
|
||||
"type_info": "Text"
|
||||
"type_info": "Float"
|
||||
},
|
||||
{
|
||||
"name": "headers!: sqlx::types::Json<Vec<HttpRequestHeader>>",
|
||||
@@ -98,13 +98,13 @@
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "e61c0dddb3e86d271cb9399faa4e4443342796cb72bdd43a821fae2994ae8e2f"
|
||||
"hash": "6483f3ffeb90e019e9078d98bb831b8e4fbedfb45751d6cd33bd42e518b634dd"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO grpc_connections (\n id, workspace_id, request_id, service, method, elapsed,\n status, error, trailers, url\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n service = excluded.service,\n method = excluded.method,\n elapsed = excluded.elapsed,\n status = excluded.status,\n error = excluded.error,\n trailers = excluded.trailers,\n url = excluded.url\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 10
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "66deed028199c78ed15ea2f837907887c2a2cb564d1d076dd4ebf0ecbc82e098"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, request_id, created_at, updated_at, service,\n method, elapsed, status, error, url,\n trailers AS \"trailers!: sqlx::types::Json<HashMap<String, String>>\"\n FROM grpc_connections\n WHERE id = ?\n ",
|
||||
"query": "\n SELECT id, model, workspace_id, request_id, updated_at, created_at, url,\n status, status_reason, content_length, body_path, elapsed, error,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpResponseHeader>>\"\n FROM http_responses\n WHERE id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -24,48 +24,53 @@
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"name": "updated_at",
|
||||
"ordinal": 4,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"name": "created_at",
|
||||
"ordinal": 5,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "service",
|
||||
"name": "url",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "method",
|
||||
"name": "status",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "elapsed",
|
||||
"ordinal": 8,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "status",
|
||||
"name": "status_reason",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "content_length",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"name": "body_path",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "url",
|
||||
"name": "elapsed",
|
||||
"ordinal": 11,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "trailers!: sqlx::types::Json<HashMap<String, String>>",
|
||||
"ordinal": 12,
|
||||
"name": "headers!: sqlx::types::Json<Vec<HttpResponseHeader>>",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
@@ -81,12 +86,13 @@
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "d4b64c466624eb75e0f5bd201ebfb6a73d25eb7c9e09cb9690afdb7fef5fca8b"
|
||||
"hash": "679a519475adeb50abf046114d3c0d1e48e103f2bb11ef47637d7f0b00ed241f"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, created_at, updated_at, workspace_id, name,\n cookies AS \"cookies!: sqlx::types::Json<Vec<JsonValue>>\"\n FROM cookie_jars WHERE workspace_id = ?\n ",
|
||||
"query": "\n SELECT\n id,\n model,\n workspace_id,\n created_at,\n updated_at,\n name,\n variables AS \"variables!: sqlx::types::Json<Vec<EnvironmentVariable>>\"\n FROM environments\n WHERE id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -14,19 +14,19 @@
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"name": "workspace_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Datetime"
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"name": "created_at",
|
||||
"ordinal": 3,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "workspace_id",
|
||||
"name": "updated_at",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
@@ -34,9 +34,9 @@
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "cookies!: sqlx::types::Json<Vec<JsonValue>>",
|
||||
"name": "variables!: sqlx::types::Json<Vec<EnvironmentVariable>>",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
"type_info": "Null"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
@@ -52,5 +52,5 @@
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "612efa9ac45723dc604a88f5e7e288b4055fec4ba7d9102131bd255c037fa021"
|
||||
"hash": "689bcc92b914f50c14921faa796c07a256deb84c832fc3d90200b393fb159417"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n UPDATE settings SET (\n theme, appearance, theme_dark, theme_light, update_channel,\n interface_font_size, interface_scale, editor_font_size, editor_soft_wrap,\n open_workspace_new_window\n ) = (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) WHERE id = 'default';\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 10
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "6b5edf45a6799cd7f87c23a3c7f818ad110d58c601f694a619d9345ae9e8e11d"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, folder_id, created_at, updated_at, name, url, method,\n body_type, authentication_type, sort_priority,\n url_parameters AS \"url_parameters!: sqlx::types::Json<Vec<HttpUrlParameter>>\",\n body AS \"body!: Json<HashMap<String, JsonValue>>\",\n authentication AS \"authentication!: Json<HashMap<String, JsonValue>>\",\n headers AS \"headers!: sqlx::types::Json<Vec<HttpRequestHeader>>\"\n FROM http_requests\n WHERE id = ?\n ",
|
||||
"query": "\n SELECT\n id,\n model,\n workspace_id,\n folder_id,\n created_at,\n updated_at,\n name,\n url,\n url_parameters AS \"url_parameters!: sqlx::types::Json<Vec<HttpUrlParameter>>\",\n method,\n body AS \"body!: Json<HashMap<String, JsonValue>>\",\n body_type,\n authentication AS \"authentication!: Json<HashMap<String, JsonValue>>\",\n authentication_type,\n sort_priority,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpRequestHeader>>\"\n FROM http_requests\n WHERE workspace_id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -44,40 +44,40 @@
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "method",
|
||||
"name": "url_parameters!: sqlx::types::Json<Vec<HttpUrlParameter>>",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "body_type",
|
||||
"name": "method",
|
||||
"ordinal": 9,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "authentication_type",
|
||||
"name": "body!: Json<HashMap<String, JsonValue>>",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "sort_priority",
|
||||
"name": "body_type",
|
||||
"ordinal": 11,
|
||||
"type_info": "Float"
|
||||
},
|
||||
{
|
||||
"name": "url_parameters!: sqlx::types::Json<Vec<HttpUrlParameter>>",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "body!: Json<HashMap<String, JsonValue>>",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "authentication!: Json<HashMap<String, JsonValue>>",
|
||||
"ordinal": 14,
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "authentication_type",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "sort_priority",
|
||||
"ordinal": 14,
|
||||
"type_info": "Float"
|
||||
},
|
||||
{
|
||||
"name": "headers!: sqlx::types::Json<Vec<HttpRequestHeader>>",
|
||||
"ordinal": 15,
|
||||
@@ -97,14 +97,14 @@
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "573db23160de025e5c72efb90be7fff5e3ec4619b962d149fdd4d618fe02c680"
|
||||
"hash": "7a7bc4df7e52ad3a987c97af8f43b46381e2cc16ba0c553713d0b6c64354eb39"
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, created_at, updated_at, name, description, setting_request_timeout,\n setting_follow_redirects, setting_validate_certificates,\n variables AS \"variables!: sqlx::types::Json<Vec<EnvironmentVariable>>\"\n FROM workspaces\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 2,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 3,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "setting_request_timeout",
|
||||
"ordinal": 6,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "setting_follow_redirects",
|
||||
"ordinal": 7,
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "setting_validate_certificates",
|
||||
"ordinal": 8,
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "variables!: sqlx::types::Json<Vec<EnvironmentVariable>>",
|
||||
"ordinal": 9,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "8dfbae65ddec905ea3734448cc9f7029b6c78de227c6fa3a85d75d0a7f21e0e9"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO folders (\n id, workspace_id, folder_id, name, sort_priority\n )\n VALUES (?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n folder_id = excluded.folder_id,\n sort_priority = excluded.sort_priority\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 5
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "9238f94c688d91f42627e5b73c627c514bab4039ab5edadc79b77dfdfd63b208"
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, request_id, connection_id, created_at, content, status, error,\n event_type AS \"event_type!: GrpcEventType\",\n metadata AS \"metadata!: sqlx::types::Json<HashMap<String, String>>\"\n FROM grpc_events\n WHERE id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "workspace_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "request_id",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "connection_id",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 5,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "content",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "status",
|
||||
"ordinal": 7,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "event_type!: GrpcEventType",
|
||||
"ordinal": 9,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "metadata!: sqlx::types::Json<HashMap<String, String>>",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "92d8f003a8f7df692345f2d2fd2504c9222645976e3433e32e190f4ee4bf100d"
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, created_at, updated_at, name, description, setting_request_timeout,\n setting_follow_redirects, setting_validate_certificates,\n variables AS \"variables!: sqlx::types::Json<Vec<EnvironmentVariable>>\"\n FROM workspaces WHERE id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 2,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 3,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "setting_request_timeout",
|
||||
"ordinal": 6,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "setting_follow_redirects",
|
||||
"ordinal": 7,
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "setting_validate_certificates",
|
||||
"ordinal": 8,
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "variables!: sqlx::types::Json<Vec<EnvironmentVariable>>",
|
||||
"ordinal": 9,
|
||||
"type_info": "Null"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "9ba3f783238b77637ffded4171b2fbb5e5ad0be952a0d832448d65cc5f0effc1"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n UPDATE grpc_connections\n SET (elapsed) = (-1)\n WHERE elapsed = 0;\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "a690a04cd1ebe8c3dbfd0cd98ae4ef093a1696d7b7ecaf694d12e5fafd62b685"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO cookie_jars (\n id, workspace_id, name, cookies\n )\n VALUES (?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n cookies = excluded.cookies\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "b3fae40a793a6724dd2286a9ca4bc0a9c000a631ee0d751a9dc4f3e76de3d57c"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n DELETE FROM cookie_jars\n WHERE id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "b98609f65dd3a6bbd1ea8dc8bed2840a6d5d13fec1bbc0aa61ca4f60de98a09c"
|
||||
}
|
||||
@@ -1,116 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, request_id, updated_at, created_at, url, status,\n status_reason, content_length, body_path, elapsed, elapsed_headers, error,\n version, remote_addr,\n headers AS \"headers!: sqlx::types::Json<Vec<HttpResponseHeader>>\"\n FROM http_responses\n WHERE workspace_id = ?\n ORDER BY created_at DESC\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "workspace_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "request_id",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 4,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 5,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "url",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "status",
|
||||
"ordinal": 7,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "status_reason",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "content_length",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "body_path",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "elapsed",
|
||||
"ordinal": 11,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "elapsed_headers",
|
||||
"ordinal": 12,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"ordinal": 14,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "remote_addr",
|
||||
"ordinal": 15,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "headers!: sqlx::types::Json<Vec<HttpResponseHeader>>",
|
||||
"ordinal": 16,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "d5e087caa163a0c7bfbbadf07eccb80105501cf5baab706aa6792dfe90af8fc9"
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, created_at, updated_at, theme, appearance,\n theme_dark, theme_light, update_channel,\n interface_font_size, interface_scale, editor_font_size, editor_soft_wrap,\n open_workspace_new_window\n FROM settings\n WHERE id = 'default'\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 2,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 3,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "theme",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "appearance",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "theme_dark",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "theme_light",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "update_channel",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "interface_font_size",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "interface_scale",
|
||||
"ordinal": 10,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "editor_font_size",
|
||||
"ordinal": 11,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "editor_soft_wrap",
|
||||
"ordinal": 12,
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "open_workspace_new_window",
|
||||
"ordinal": 13,
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "daa61066517df649e7c80a8ce407839ad502e8e5e43aa8c02e049865acbbae75"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, created_at, updated_at, name,\n variables AS \"variables!: sqlx::types::Json<Vec<EnvironmentVariable>>\"\n FROM environments\n WHERE id = ?\n ",
|
||||
"query": "\n SELECT id, model, created_at, updated_at, name, description,\n variables AS \"variables!: sqlx::types::Json<Vec<EnvironmentVariable>>\"\n FROM workspaces WHERE id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -13,23 +13,23 @@
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "workspace_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 3,
|
||||
"ordinal": 2,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 4,
|
||||
"ordinal": 3,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
@@ -52,5 +52,5 @@
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "5765e9565a8b89c5bc2d72197e0e4a1093739e9abba69f6fe5527d453fab4db8"
|
||||
"hash": "dbe457087a7bccbca4c1d673aa8e547df04530a7f860a6ccd4e20126a7cdfa4f"
|
||||
}
|
||||
12
src-tauri/.sqlx/query-dcc2f405f8e29d0599d86bcde509187e9cc5fc647067eaa5c738cb24e2f081e5.json
generated
Normal file
12
src-tauri/.sqlx/query-dcc2f405f8e29d0599d86bcde509187e9cc5fc647067eaa5c738cb24e2f081e5.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO environments (\n id,\n workspace_id,\n name,\n variables\n )\n VALUES (?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n variables = excluded.variables\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 4
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "dcc2f405f8e29d0599d86bcde509187e9cc5fc647067eaa5c738cb24e2f081e5"
|
||||
}
|
||||
@@ -1,104 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, workspace_id, folder_id, created_at, updated_at, name, sort_priority,\n url, service, method, message, authentication_type,\n authentication AS \"authentication!: Json<HashMap<String, JsonValue>>\",\n metadata AS \"metadata!: sqlx::types::Json<Vec<GrpcMetadataEntry>>\"\n FROM grpc_requests\n WHERE id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "workspace_id",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "folder_id",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 4,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 5,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "sort_priority",
|
||||
"ordinal": 7,
|
||||
"type_info": "Float"
|
||||
},
|
||||
{
|
||||
"name": "url",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "service",
|
||||
"ordinal": 9,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "method",
|
||||
"ordinal": 10,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "message",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "authentication_type",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "authentication!: Json<HashMap<String, JsonValue>>",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "metadata!: sqlx::types::Json<Vec<GrpcMetadataEntry>>",
|
||||
"ordinal": 14,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "e1cdba43bd938772631263966a9bee263923c387f4864917f36a04043bec4857"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n UPDATE http_responses SET (\n elapsed, elapsed_headers, url, status, status_reason, content_length, body_path,\n error, headers, version, remote_addr, updated_at\n ) = (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP) WHERE id = ?;\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 12
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "e7124f5570076bfd65985744f48d8e12cf29d6d243fffdd62ade2ab70c7bddda"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n DELETE FROM grpc_requests\n WHERE id = ?\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "fe0652396bc30d926cf99083651c1cbd668bcf00ebe1a5f36616700c84972b39"
|
||||
}
|
||||
4997
src-tauri/Cargo.lock
generated
4997
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,58 +1,58 @@
|
||||
workspace = { members = ["grpc", "templates", "tauri-plugin-plugin-runtime"] }
|
||||
|
||||
[package]
|
||||
name = "yaak-app"
|
||||
version = "0.0.0"
|
||||
description = "A network protocol testing utility app"
|
||||
authors = ["Gregory Schier"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/gschier/yaak-app"
|
||||
edition = "2021"
|
||||
|
||||
# Produce a library for mobile support
|
||||
[lib]
|
||||
name = "tauri_app_lib"
|
||||
crate-type = ["staticlib", "cdylib", "lib"]
|
||||
|
||||
[profile.release]
|
||||
strip = true # Automatically strip symbols from the binary.
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.0.0-beta", features = [] }
|
||||
tauri-build = { version = "1.2", features = [] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
objc = "0.2.7"
|
||||
cocoa = "0.25.0"
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
openssl-sys = { version = "0.9", features = ["vendored"] } # For Ubuntu installation to work
|
||||
|
||||
[dependencies]
|
||||
grpc = { path = "./grpc" }
|
||||
templates = { path = "./templates" }
|
||||
plugin_runtime = { path = "tauri-plugin-plugin-runtime" }
|
||||
anyhow = "1.0.86"
|
||||
base64 = "0.22.0"
|
||||
chrono = { version = "0.4.31", features = ["serde"] }
|
||||
datetime = "0.5.2"
|
||||
hex_color = "3.0.0"
|
||||
http = "1"
|
||||
log = "0.4.21"
|
||||
base64 = "0.21.0"
|
||||
boa_engine = "0.17.3"
|
||||
boa_runtime = "0.17.3"
|
||||
chrono = { version = "0.4.23", features = ["serde"] }
|
||||
futures = "0.3.26"
|
||||
http = "0.2.8"
|
||||
rand = "0.8.5"
|
||||
regex = "1.10.2"
|
||||
reqwest = { version = "0.12.4", features = ["multipart", "cookies", "gzip", "brotli", "deflate", "json", "native-tls-alpn"] }
|
||||
reqwest_cookie_store = "0.8.0"
|
||||
serde = { version = "1.0.198", features = ["derive"] }
|
||||
serde_json = { version = "1.0.116", features = ["raw_value"] }
|
||||
serde_yaml = "0.9.34"
|
||||
sqlx = { version = "0.7.4", features = ["sqlite", "runtime-tokio-rustls", "json", "chrono", "time"] }
|
||||
tauri = { version = "2.0.0-beta", features = ["devtools", "protocol-asset"] }
|
||||
tauri-plugin-clipboard-manager = "2.1.0-beta"
|
||||
tauri-plugin-dialog = "2.0.0-beta"
|
||||
tauri-plugin-fs = "2.0.0-beta"
|
||||
tauri-plugin-log = { version = "2.0.0-beta", features = ["colored"] }
|
||||
tauri-plugin-os = "2.0.0-beta"
|
||||
tauri-plugin-shell = "2.0.0-beta"
|
||||
tauri-plugin-updater = "2.0.0-beta"
|
||||
tauri-plugin-window-state = "2.0.0-beta"
|
||||
tokio = { version = "1.36.0", features = ["sync"] }
|
||||
tokio-stream = "0.1.15"
|
||||
uuid = "1.7.0"
|
||||
thiserror = "1.0.61"
|
||||
mime_guess = "2.0.5"
|
||||
reqwest = { version = "0.11.14", features = ["json", "multipart"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["raw_value"] }
|
||||
sqlx = { version = "0.7.2", features = ["sqlite", "runtime-tokio-rustls", "json", "chrono", "time"] }
|
||||
tauri = { version = "1.3", features = [
|
||||
"config-toml",
|
||||
"devtools",
|
||||
"fs-read-file",
|
||||
"os-all",
|
||||
"protocol-asset",
|
||||
"shell-open",
|
||||
"updater",
|
||||
"window-start-dragging",
|
||||
"dialog-open",
|
||||
"dialog-save",
|
||||
"window-set-title",
|
||||
] }
|
||||
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
||||
tauri-plugin-log = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1", features = ["colored"] }
|
||||
tokio = { version = "1.25.0", features = ["sync"] }
|
||||
uuid = "1.3.0"
|
||||
log = "0.4.20"
|
||||
datetime = "0.5.2"
|
||||
|
||||
[features]
|
||||
# by default Tauri runs in production mode
|
||||
# when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` is an URL
|
||||
default = ["custom-protocol"]
|
||||
# this feature is used used for production builds where `devPath` points to the filesystem
|
||||
# DO NOT remove this
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
{
|
||||
"$schema": "../gen/schemas/capabilities.json",
|
||||
"identifier": "main",
|
||||
"description": "Main permissions",
|
||||
"local": true,
|
||||
"windows": [
|
||||
"*"
|
||||
],
|
||||
"permissions": [
|
||||
"os:allow-os-type",
|
||||
"event:allow-emit",
|
||||
"clipboard-manager:allow-write-text",
|
||||
"clipboard-manager:allow-read-text",
|
||||
"dialog:allow-open",
|
||||
"dialog:allow-save",
|
||||
"event:allow-listen",
|
||||
"event:allow-unlisten",
|
||||
"fs:allow-read-file",
|
||||
"fs:allow-read-text-file",
|
||||
{
|
||||
"identifier": "fs:scope",
|
||||
"allow": [
|
||||
{
|
||||
"path": "$APPDATA"
|
||||
},
|
||||
{
|
||||
"path": "$APPDATA/**"
|
||||
}
|
||||
]
|
||||
},
|
||||
"shell:allow-open",
|
||||
"webview:allow-set-webview-zoom",
|
||||
"window:allow-close",
|
||||
"window:allow-is-fullscreen",
|
||||
"window:allow-maximize",
|
||||
"window:allow-minimize",
|
||||
"window:allow-toggle-maximize",
|
||||
"window:allow-set-decorations",
|
||||
"window:allow-set-title",
|
||||
"window:allow-start-dragging",
|
||||
"window:allow-unmaximize",
|
||||
"window:allow-theme",
|
||||
"clipboard-manager:allow-read-text",
|
||||
"clipboard-manager:allow-write-text"
|
||||
]
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
{"main":{"identifier":"main","description":"Main permissions","local":true,"windows":["*"],"permissions":["os:allow-os-type","event:allow-emit","clipboard-manager:allow-write-text","clipboard-manager:allow-read-text","dialog:allow-open","dialog:allow-save","event:allow-listen","event:allow-unlisten","fs:allow-read-file","fs:allow-read-text-file",{"identifier":"fs:scope","allow":[{"path":"$APPDATA"},{"path":"$APPDATA/**"}]},"shell:allow-open","webview:allow-set-webview-zoom","window:allow-close","window:allow-is-fullscreen","window:allow-maximize","window:allow-minimize","window:allow-toggle-maximize","window:allow-set-decorations","window:allow-set-title","window:allow-start-dragging","window:allow-unmaximize","window:allow-theme","clipboard-manager:allow-read-text","clipboard-manager:allow-write-text"]}}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,24 +0,0 @@
|
||||
[package]
|
||||
name = "grpc"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
tonic = "0.10.2"
|
||||
prost = "0.12"
|
||||
tokio = { version = "1.0", features = ["macros", "rt-multi-thread", "fs"] }
|
||||
tonic-reflection = "0.10.2"
|
||||
tokio-stream = "0.1.14"
|
||||
prost-types = "0.12.3"
|
||||
serde = { version = "1.0.196", features = ["derive"] }
|
||||
serde_json = "1.0.113"
|
||||
prost-reflect = { version = "0.12.0", features = ["serde", "derive"] }
|
||||
log = "0.4.20"
|
||||
anyhow = "1.0.79"
|
||||
hyper = { version = "0.14" }
|
||||
hyper-rustls = { version = "0.24.0", features = ["http2"] }
|
||||
uuid = { version = "1.7.0", features = ["v4"] }
|
||||
tauri = { version = "2.0.0-beta" }
|
||||
tauri-plugin-shell = "2.0.0-beta"
|
||||
md5 = "0.7.0"
|
||||
dunce = "1.0.4"
|
||||
@@ -1,52 +0,0 @@
|
||||
use prost_reflect::prost::Message;
|
||||
use prost_reflect::{DynamicMessage, MethodDescriptor};
|
||||
use tonic::codec::{Codec, DecodeBuf, Decoder, EncodeBuf, Encoder};
|
||||
use tonic::Status;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DynamicCodec(MethodDescriptor);
|
||||
|
||||
impl DynamicCodec {
|
||||
#[allow(dead_code)]
|
||||
pub fn new(md: MethodDescriptor) -> Self {
|
||||
Self(md)
|
||||
}
|
||||
}
|
||||
|
||||
impl Codec for DynamicCodec {
|
||||
type Encode = DynamicMessage;
|
||||
type Decode = DynamicMessage;
|
||||
type Encoder = Self;
|
||||
type Decoder = Self;
|
||||
|
||||
fn encoder(&mut self) -> Self::Encoder {
|
||||
self.clone()
|
||||
}
|
||||
|
||||
fn decoder(&mut self) -> Self::Decoder {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Encoder for DynamicCodec {
|
||||
type Item = DynamicMessage;
|
||||
type Error = Status;
|
||||
|
||||
fn encode(&mut self, item: Self::Item, dst: &mut EncodeBuf<'_>) -> Result<(), Self::Error> {
|
||||
item.encode(dst)
|
||||
.expect("buffer is too small to decode this message");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Decoder for DynamicCodec {
|
||||
type Item = DynamicMessage;
|
||||
type Error = Status;
|
||||
|
||||
fn decode(&mut self, src: &mut DecodeBuf<'_>) -> Result<Option<Self::Item>, Self::Error> {
|
||||
let mut msg = DynamicMessage::new(self.0.output());
|
||||
msg.merge(src)
|
||||
.map_err(|err| Status::internal(err.to_string()))?;
|
||||
Ok(Some(msg))
|
||||
}
|
||||
}
|
||||
@@ -1,179 +0,0 @@
|
||||
use prost_reflect::{DescriptorPool, MessageDescriptor};
|
||||
use prost_types::field_descriptor_proto;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct JsonSchemaEntry {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
title: Option<String>,
|
||||
|
||||
#[serde(rename = "type")]
|
||||
type_: JsonType,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
description: Option<String>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
properties: Option<HashMap<String, JsonSchemaEntry>>,
|
||||
|
||||
#[serde(rename = "enum", skip_serializing_if = "Option::is_none")]
|
||||
enum_: Option<Vec<String>>,
|
||||
|
||||
/// Don't allow any other properties in the object
|
||||
additional_properties: bool,
|
||||
|
||||
/// Set all properties to required
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
required: Option<Vec<String>>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
items: Option<Box<JsonSchemaEntry>>,
|
||||
}
|
||||
|
||||
enum JsonType {
|
||||
String,
|
||||
Number,
|
||||
Object,
|
||||
Array,
|
||||
Boolean,
|
||||
Null,
|
||||
_UNKNOWN,
|
||||
}
|
||||
|
||||
impl Default for JsonType {
|
||||
fn default() -> Self {
|
||||
JsonType::_UNKNOWN
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for JsonType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
match self {
|
||||
JsonType::String => serializer.serialize_str("string"),
|
||||
JsonType::Number => serializer.serialize_str("number"),
|
||||
JsonType::Object => serializer.serialize_str("object"),
|
||||
JsonType::Array => serializer.serialize_str("array"),
|
||||
JsonType::Boolean => serializer.serialize_str("boolean"),
|
||||
JsonType::Null => serializer.serialize_str("null"),
|
||||
JsonType::_UNKNOWN => serializer.serialize_str("unknown"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for JsonType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<JsonType, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
match s.as_str() {
|
||||
"string" => Ok(JsonType::String),
|
||||
"number" => Ok(JsonType::Number),
|
||||
"object" => Ok(JsonType::Object),
|
||||
"array" => Ok(JsonType::Array),
|
||||
"boolean" => Ok(JsonType::Boolean),
|
||||
"null" => Ok(JsonType::Null),
|
||||
_ => Ok(JsonType::_UNKNOWN),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn message_to_json_schema(
|
||||
pool: &DescriptorPool,
|
||||
message: MessageDescriptor,
|
||||
) -> JsonSchemaEntry {
|
||||
let mut schema = JsonSchemaEntry {
|
||||
title: Some(message.name().to_string()),
|
||||
type_: JsonType::Object, // Messages are objects
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut properties = HashMap::new();
|
||||
message.fields().for_each(|f| match f.kind() {
|
||||
prost_reflect::Kind::Message(m) => {
|
||||
properties.insert(f.name().to_string(), message_to_json_schema(pool, m));
|
||||
}
|
||||
prost_reflect::Kind::Enum(e) => {
|
||||
properties.insert(
|
||||
f.name().to_string(),
|
||||
JsonSchemaEntry {
|
||||
type_: map_proto_type_to_json_type(f.field_descriptor_proto().r#type()),
|
||||
enum_: Some(e.values().map(|v| v.name().to_string()).collect::<Vec<_>>()),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
// TODO: Handle repeated label
|
||||
match f.field_descriptor_proto().label() {
|
||||
field_descriptor_proto::Label::Repeated => {
|
||||
// TODO: Handle more complex repeated types. This just handles primitives for now
|
||||
properties.insert(
|
||||
f.name().to_string(),
|
||||
JsonSchemaEntry {
|
||||
type_: JsonType::Array,
|
||||
items: Some(Box::new(JsonSchemaEntry {
|
||||
type_: map_proto_type_to_json_type(
|
||||
f.field_descriptor_proto().r#type(),
|
||||
),
|
||||
..Default::default()
|
||||
})),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
// Regular JSON field
|
||||
properties.insert(
|
||||
f.name().to_string(),
|
||||
JsonSchemaEntry {
|
||||
type_: map_proto_type_to_json_type(f.field_descriptor_proto().r#type()),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
schema.properties = Some(properties);
|
||||
|
||||
// All proto 3 fields are optional, so maybe we could
|
||||
// make this a setting?
|
||||
// schema.required = Some(
|
||||
// message
|
||||
// .fields()
|
||||
// .map(|f| f.name().to_string())
|
||||
// .collect::<Vec<_>>(),
|
||||
// );
|
||||
|
||||
schema
|
||||
}
|
||||
|
||||
fn map_proto_type_to_json_type(proto_type: field_descriptor_proto::Type) -> JsonType {
|
||||
match proto_type {
|
||||
field_descriptor_proto::Type::Double => JsonType::Number,
|
||||
field_descriptor_proto::Type::Float => JsonType::Number,
|
||||
field_descriptor_proto::Type::Int64 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Uint64 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Int32 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Fixed64 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Fixed32 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Bool => JsonType::Boolean,
|
||||
field_descriptor_proto::Type::String => JsonType::String,
|
||||
field_descriptor_proto::Type::Group => JsonType::_UNKNOWN,
|
||||
field_descriptor_proto::Type::Message => JsonType::Object,
|
||||
field_descriptor_proto::Type::Bytes => JsonType::String,
|
||||
field_descriptor_proto::Type::Uint32 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Enum => JsonType::String,
|
||||
field_descriptor_proto::Type::Sfixed32 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Sfixed64 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Sint32 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Sint64 => JsonType::Number,
|
||||
}
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
use prost_reflect::{DynamicMessage, MethodDescriptor, SerializeOptions};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Deserializer;
|
||||
|
||||
mod codec;
|
||||
mod json_schema;
|
||||
pub mod manager;
|
||||
mod proto;
|
||||
|
||||
pub use tonic::metadata::*;
|
||||
pub use tonic::Code;
|
||||
|
||||
pub fn serialize_options() -> SerializeOptions {
|
||||
SerializeOptions::new().skip_default_fields(false)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct ServiceDefinition {
|
||||
pub name: String,
|
||||
pub methods: Vec<MethodDefinition>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct MethodDefinition {
|
||||
pub name: String,
|
||||
pub schema: String,
|
||||
pub client_streaming: bool,
|
||||
pub server_streaming: bool,
|
||||
}
|
||||
|
||||
static SERIALIZE_OPTIONS: &'static SerializeOptions = &SerializeOptions::new()
|
||||
.skip_default_fields(false)
|
||||
.stringify_64_bit_integers(false);
|
||||
|
||||
pub fn serialize_message(msg: &DynamicMessage) -> Result<String, String> {
|
||||
let mut buf = Vec::new();
|
||||
let mut se = serde_json::Serializer::pretty(&mut buf);
|
||||
msg.serialize_with_options(&mut se, SERIALIZE_OPTIONS)
|
||||
.map_err(|e| e.to_string())?;
|
||||
let s = String::from_utf8(buf).expect("serde_json to emit valid utf8");
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
pub fn deserialize_message(msg: &str, method: MethodDescriptor) -> Result<DynamicMessage, String> {
|
||||
let mut deserializer = Deserializer::from_str(&msg);
|
||||
let req_message = DynamicMessage::deserialize(method.input(), &mut deserializer)
|
||||
.map_err(|e| e.to_string())?;
|
||||
deserializer.end().map_err(|e| e.to_string())?;
|
||||
Ok(req_message)
|
||||
}
|
||||
@@ -1,304 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::Client;
|
||||
use hyper_rustls::HttpsConnector;
|
||||
pub use prost_reflect::DynamicMessage;
|
||||
use prost_reflect::{DescriptorPool, MethodDescriptor, ServiceDescriptor};
|
||||
use serde_json::Deserializer;
|
||||
use tauri::AppHandle;
|
||||
use tokio_stream::wrappers::ReceiverStream;
|
||||
use tonic::body::BoxBody;
|
||||
use tonic::metadata::{MetadataKey, MetadataValue};
|
||||
use tonic::transport::Uri;
|
||||
use tonic::{IntoRequest, IntoStreamingRequest, Request, Response, Status, Streaming};
|
||||
|
||||
use crate::codec::DynamicCodec;
|
||||
use crate::proto::{
|
||||
fill_pool_from_files, fill_pool_from_reflection, get_transport, method_desc_to_path,
|
||||
};
|
||||
use crate::{json_schema, MethodDefinition, ServiceDefinition};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct GrpcConnection {
|
||||
pool: DescriptorPool,
|
||||
conn: Client<HttpsConnector<HttpConnector>, BoxBody>,
|
||||
pub uri: Uri,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct StreamError {
|
||||
pub message: String,
|
||||
pub status: Option<Status>,
|
||||
}
|
||||
|
||||
impl From<String> for StreamError {
|
||||
fn from(value: String) -> Self {
|
||||
StreamError {
|
||||
message: value.to_string(),
|
||||
status: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Status> for StreamError {
|
||||
fn from(s: Status) -> Self {
|
||||
StreamError {
|
||||
message: s.message().to_string(),
|
||||
status: Some(s),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl GrpcConnection {
|
||||
pub fn service(&self, service: &str) -> Result<ServiceDescriptor, String> {
|
||||
let service = self
|
||||
.pool
|
||||
.get_service_by_name(service)
|
||||
.ok_or("Failed to find service")?;
|
||||
Ok(service)
|
||||
}
|
||||
|
||||
pub fn method(&self, service: &str, method: &str) -> Result<MethodDescriptor, String> {
|
||||
let service = self.service(service)?;
|
||||
let method = service
|
||||
.methods()
|
||||
.find(|m| m.name() == method)
|
||||
.ok_or("Failed to find method")?;
|
||||
Ok(method)
|
||||
}
|
||||
|
||||
pub async fn unary(
|
||||
&self,
|
||||
service: &str,
|
||||
method: &str,
|
||||
message: &str,
|
||||
metadata: HashMap<String, String>,
|
||||
) -> Result<Response<DynamicMessage>, StreamError> {
|
||||
let method = &self.method(&service, &method)?;
|
||||
let input_message = method.input();
|
||||
|
||||
let mut deserializer = Deserializer::from_str(message);
|
||||
let req_message = DynamicMessage::deserialize(input_message, &mut deserializer)
|
||||
.map_err(|e| e.to_string())?;
|
||||
deserializer.end().unwrap();
|
||||
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
|
||||
let mut req = req_message.into_request();
|
||||
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
|
||||
|
||||
let path = method_desc_to_path(method);
|
||||
let codec = DynamicCodec::new(method.clone());
|
||||
client.ready().await.unwrap();
|
||||
|
||||
Ok(client.unary(req, path, codec).await?)
|
||||
}
|
||||
|
||||
pub async fn streaming(
|
||||
&self,
|
||||
service: &str,
|
||||
method: &str,
|
||||
stream: ReceiverStream<DynamicMessage>,
|
||||
metadata: HashMap<String, String>,
|
||||
) -> Result<Response<Streaming<DynamicMessage>>, StreamError> {
|
||||
let method = &self.method(&service, &method)?;
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
|
||||
let mut req = stream.into_streaming_request();
|
||||
|
||||
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
|
||||
|
||||
let path = method_desc_to_path(method);
|
||||
let codec = DynamicCodec::new(method.clone());
|
||||
client.ready().await.map_err(|e| e.to_string())?;
|
||||
Ok(client.streaming(req, path, codec).await?)
|
||||
}
|
||||
|
||||
pub async fn client_streaming(
|
||||
&self,
|
||||
service: &str,
|
||||
method: &str,
|
||||
stream: ReceiverStream<DynamicMessage>,
|
||||
metadata: HashMap<String, String>,
|
||||
) -> Result<Response<DynamicMessage>, StreamError> {
|
||||
let method = &self.method(&service, &method)?;
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
let mut req = stream.into_streaming_request();
|
||||
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
|
||||
|
||||
let path = method_desc_to_path(method);
|
||||
let codec = DynamicCodec::new(method.clone());
|
||||
client.ready().await.unwrap();
|
||||
client
|
||||
.client_streaming(req, path, codec)
|
||||
.await
|
||||
.map_err(|e| StreamError {
|
||||
message: e.message().to_string(),
|
||||
status: Some(e),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn server_streaming(
|
||||
&self,
|
||||
service: &str,
|
||||
method: &str,
|
||||
message: &str,
|
||||
metadata: HashMap<String, String>,
|
||||
) -> Result<Response<Streaming<DynamicMessage>>, StreamError> {
|
||||
let method = &self.method(&service, &method)?;
|
||||
let input_message = method.input();
|
||||
|
||||
let mut deserializer = Deserializer::from_str(message);
|
||||
let req_message = DynamicMessage::deserialize(input_message, &mut deserializer)
|
||||
.map_err(|e| e.to_string())?;
|
||||
deserializer.end().unwrap();
|
||||
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
|
||||
let mut req = req_message.into_request();
|
||||
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
|
||||
|
||||
let path = method_desc_to_path(method);
|
||||
let codec = DynamicCodec::new(method.clone());
|
||||
client.ready().await.map_err(|e| e.to_string())?;
|
||||
Ok(client.server_streaming(req, path, codec).await?)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GrpcHandle {
|
||||
app_handle: AppHandle,
|
||||
pools: HashMap<String, DescriptorPool>,
|
||||
}
|
||||
|
||||
impl GrpcHandle {
|
||||
pub fn new(app_handle: &AppHandle) -> Self {
|
||||
let pools = HashMap::new();
|
||||
Self {
|
||||
pools,
|
||||
app_handle: app_handle.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl GrpcHandle {
|
||||
pub async fn reflect(
|
||||
&mut self,
|
||||
id: &str,
|
||||
uri: &str,
|
||||
proto_files: &Vec<PathBuf>,
|
||||
) -> Result<(), String> {
|
||||
let pool = if proto_files.is_empty() {
|
||||
let full_uri = uri_from_str(uri)?;
|
||||
fill_pool_from_reflection(&full_uri).await
|
||||
} else {
|
||||
fill_pool_from_files(&self.app_handle, proto_files).await
|
||||
}?;
|
||||
|
||||
self.pools
|
||||
.insert(make_pool_key(id, uri, proto_files), pool.clone());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn services(
|
||||
&mut self,
|
||||
id: &str,
|
||||
uri: &str,
|
||||
proto_files: &Vec<PathBuf>,
|
||||
) -> Result<Vec<ServiceDefinition>, String> {
|
||||
// Ensure reflection is up-to-date
|
||||
self.reflect(id, uri, proto_files).await?;
|
||||
|
||||
let pool = self
|
||||
.get_pool(id, uri, proto_files)
|
||||
.ok_or("Failed to get pool".to_string())?;
|
||||
Ok(self.services_from_pool(&pool))
|
||||
}
|
||||
|
||||
fn services_from_pool(&self, pool: &DescriptorPool) -> Vec<ServiceDefinition> {
|
||||
pool.services()
|
||||
.map(|s| {
|
||||
let mut def = ServiceDefinition {
|
||||
name: s.full_name().to_string(),
|
||||
methods: vec![],
|
||||
};
|
||||
for method in s.methods() {
|
||||
let input_message = method.input();
|
||||
def.methods.push(MethodDefinition {
|
||||
name: method.name().to_string(),
|
||||
server_streaming: method.is_server_streaming(),
|
||||
client_streaming: method.is_client_streaming(),
|
||||
schema: serde_json::to_string_pretty(&json_schema::message_to_json_schema(
|
||||
&pool,
|
||||
input_message,
|
||||
))
|
||||
.unwrap(),
|
||||
})
|
||||
}
|
||||
def
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
pub async fn connect(
|
||||
&mut self,
|
||||
id: &str,
|
||||
uri: &str,
|
||||
proto_files: &Vec<PathBuf>,
|
||||
) -> Result<GrpcConnection, String> {
|
||||
self.reflect(id, uri, proto_files).await?;
|
||||
let pool = self
|
||||
.get_pool(id, uri, proto_files)
|
||||
.ok_or("Failed to get pool")?;
|
||||
|
||||
let uri = uri_from_str(uri)?;
|
||||
let conn = get_transport();
|
||||
let connection = GrpcConnection {
|
||||
pool: pool.clone(),
|
||||
conn,
|
||||
uri,
|
||||
};
|
||||
Ok(connection)
|
||||
}
|
||||
|
||||
fn get_pool(&self, id: &str, uri: &str, proto_files: &Vec<PathBuf>) -> Option<&DescriptorPool> {
|
||||
self.pools.get(make_pool_key(id, uri, proto_files).as_str())
|
||||
}
|
||||
}
|
||||
|
||||
fn decorate_req<T>(metadata: HashMap<String, String>, req: &mut Request<T>) -> Result<(), String> {
|
||||
for (k, v) in metadata {
|
||||
req.metadata_mut().insert(
|
||||
MetadataKey::from_str(k.as_str()).map_err(|e| e.to_string())?,
|
||||
MetadataValue::from_str(v.as_str()).map_err(|e| e.to_string())?,
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn uri_from_str(uri_str: &str) -> Result<Uri, String> {
|
||||
match Uri::from_str(uri_str) {
|
||||
Ok(uri) => Ok(uri),
|
||||
Err(err) => {
|
||||
// Uri::from_str basically only returns "invalid format" so we add more context here
|
||||
Err(format!("Failed to parse URL, {}", err.to_string()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn make_pool_key(id: &str, uri: &str, proto_files: &Vec<PathBuf>) -> String {
|
||||
let pool_key = format!(
|
||||
"{}::{}::{}",
|
||||
id,
|
||||
uri,
|
||||
proto_files
|
||||
.iter()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(":")
|
||||
);
|
||||
|
||||
format!("{:x}", md5::compute(pool_key))
|
||||
}
|
||||
@@ -1,252 +0,0 @@
|
||||
use std::env::temp_dir;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::Client;
|
||||
use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder};
|
||||
use log::{debug, warn};
|
||||
use prost::Message;
|
||||
use prost_reflect::{DescriptorPool, MethodDescriptor};
|
||||
use prost_types::{FileDescriptorProto, FileDescriptorSet};
|
||||
use tauri::path::BaseDirectory;
|
||||
use tauri::{AppHandle, Manager};
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
use tokio::fs;
|
||||
use tokio_stream::StreamExt;
|
||||
use tonic::body::BoxBody;
|
||||
use tonic::codegen::http::uri::PathAndQuery;
|
||||
use tonic::transport::Uri;
|
||||
use tonic::Request;
|
||||
use tonic_reflection::pb::server_reflection_client::ServerReflectionClient;
|
||||
use tonic_reflection::pb::server_reflection_request::MessageRequest;
|
||||
use tonic_reflection::pb::server_reflection_response::MessageResponse;
|
||||
use tonic_reflection::pb::ServerReflectionRequest;
|
||||
|
||||
pub async fn fill_pool_from_files(
|
||||
app_handle: &AppHandle,
|
||||
paths: &Vec<PathBuf>,
|
||||
) -> Result<DescriptorPool, String> {
|
||||
let mut pool = DescriptorPool::new();
|
||||
let random_file_name = format!("{}.desc", uuid::Uuid::new_v4());
|
||||
let desc_path = temp_dir().join(random_file_name);
|
||||
let global_import_dir = app_handle
|
||||
.path()
|
||||
.resolve("protoc-include", BaseDirectory::Resource)
|
||||
.expect("failed to resolve protoc include directory");
|
||||
|
||||
// HACK: Remove UNC prefix for Windows paths
|
||||
let global_import_dir = dunce::simplified(global_import_dir.as_path())
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
let desc_path = dunce::simplified(desc_path.as_path());
|
||||
|
||||
let mut args = vec![
|
||||
"--include_imports".to_string(),
|
||||
"--include_source_info".to_string(),
|
||||
"-I".to_string(),
|
||||
global_import_dir,
|
||||
"-o".to_string(),
|
||||
desc_path.to_string_lossy().to_string(),
|
||||
];
|
||||
|
||||
for p in paths {
|
||||
if p.as_path().exists() {
|
||||
args.push(p.to_string_lossy().to_string());
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
let parent = p.as_path().parent();
|
||||
if let Some(parent_path) = parent {
|
||||
args.push("-I".to_string());
|
||||
args.push(parent_path.to_string_lossy().to_string());
|
||||
args.push("-I".to_string());
|
||||
args.push(parent_path.parent().unwrap().to_string_lossy().to_string());
|
||||
} else {
|
||||
debug!("ignoring {:?} since it does not exist.", parent)
|
||||
}
|
||||
}
|
||||
|
||||
let out = app_handle
|
||||
.shell()
|
||||
.sidecar("yaakprotoc")
|
||||
.expect("yaakprotoc not found")
|
||||
.args(args)
|
||||
.output()
|
||||
.await
|
||||
.expect("yaakprotoc failed to run");
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(format!(
|
||||
"protoc failed with status {}: {}",
|
||||
out.status.code().unwrap(),
|
||||
String::from_utf8_lossy(out.stderr.as_slice())
|
||||
));
|
||||
}
|
||||
|
||||
let bytes = fs::read(desc_path).await.map_err(|e| e.to_string())?;
|
||||
let fdp = FileDescriptorSet::decode(bytes.deref()).map_err(|e| e.to_string())?;
|
||||
pool.add_file_descriptor_set(fdp)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
fs::remove_file(desc_path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(pool)
|
||||
}
|
||||
|
||||
pub async fn fill_pool_from_reflection(uri: &Uri) -> Result<DescriptorPool, String> {
|
||||
let mut pool = DescriptorPool::new();
|
||||
let mut client = ServerReflectionClient::with_origin(get_transport(), uri.clone());
|
||||
|
||||
for service in list_services(&mut client).await? {
|
||||
if service == "grpc.reflection.v1alpha.ServerReflection" {
|
||||
continue;
|
||||
}
|
||||
file_descriptor_set_from_service_name(&service, &mut pool, &mut client).await;
|
||||
}
|
||||
|
||||
Ok(pool)
|
||||
}
|
||||
|
||||
pub fn get_transport() -> Client<HttpsConnector<HttpConnector>, BoxBody> {
|
||||
let connector = HttpsConnectorBuilder::new().with_native_roots();
|
||||
let connector = connector.https_or_http().enable_http2().wrap_connector({
|
||||
let mut http_connector = HttpConnector::new();
|
||||
http_connector.enforce_http(false);
|
||||
http_connector
|
||||
});
|
||||
Client::builder()
|
||||
.pool_max_idle_per_host(0)
|
||||
.http2_only(true)
|
||||
.build(connector)
|
||||
}
|
||||
|
||||
async fn list_services(
|
||||
reflect_client: &mut ServerReflectionClient<Client<HttpsConnector<HttpConnector>, BoxBody>>,
|
||||
) -> Result<Vec<String>, String> {
|
||||
let response =
|
||||
send_reflection_request(reflect_client, MessageRequest::ListServices("".into())).await?;
|
||||
|
||||
let list_services_response = match response {
|
||||
MessageResponse::ListServicesResponse(resp) => resp,
|
||||
_ => panic!("Expected a ListServicesResponse variant"),
|
||||
};
|
||||
|
||||
Ok(list_services_response
|
||||
.service
|
||||
.iter()
|
||||
.map(|s| s.name.clone())
|
||||
.collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
async fn file_descriptor_set_from_service_name(
|
||||
service_name: &str,
|
||||
pool: &mut DescriptorPool,
|
||||
client: &mut ServerReflectionClient<Client<HttpsConnector<HttpConnector>, BoxBody>>,
|
||||
) {
|
||||
let response = match send_reflection_request(
|
||||
client,
|
||||
MessageRequest::FileContainingSymbol(service_name.into()),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(resp) => resp,
|
||||
Err(e) => {
|
||||
warn!(
|
||||
"Error fetching file descriptor for service {}: {}",
|
||||
service_name, e
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let file_descriptor_response = match response {
|
||||
MessageResponse::FileDescriptorResponse(resp) => resp,
|
||||
_ => panic!("Expected a FileDescriptorResponse variant"),
|
||||
};
|
||||
|
||||
for fd in file_descriptor_response.file_descriptor_proto {
|
||||
let fdp = FileDescriptorProto::decode(fd.deref()).unwrap();
|
||||
|
||||
// Add deps first or else we'll get an error
|
||||
for dep_name in fdp.clone().dependency {
|
||||
file_descriptor_set_by_filename(&dep_name, pool, client).await;
|
||||
}
|
||||
|
||||
pool.add_file_descriptor_proto(fdp)
|
||||
.expect("add file descriptor proto");
|
||||
}
|
||||
}
|
||||
|
||||
async fn file_descriptor_set_by_filename(
|
||||
filename: &str,
|
||||
pool: &mut DescriptorPool,
|
||||
client: &mut ServerReflectionClient<Client<HttpsConnector<HttpConnector>, BoxBody>>,
|
||||
) {
|
||||
// We already fetched this file
|
||||
if let Some(_) = pool.get_file_by_name(filename) {
|
||||
return;
|
||||
}
|
||||
|
||||
let response =
|
||||
send_reflection_request(client, MessageRequest::FileByFilename(filename.into())).await;
|
||||
let file_descriptor_response = match response {
|
||||
Ok(MessageResponse::FileDescriptorResponse(resp)) => resp,
|
||||
Ok(_) => {
|
||||
panic!("Expected a FileDescriptorResponse variant")
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Error fetching file descriptor for {}: {}", filename, e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
for fd in file_descriptor_response.file_descriptor_proto {
|
||||
let fdp = FileDescriptorProto::decode(fd.deref()).unwrap();
|
||||
pool.add_file_descriptor_proto(fdp)
|
||||
.expect("add file descriptor proto");
|
||||
}
|
||||
}
|
||||
|
||||
async fn send_reflection_request(
|
||||
client: &mut ServerReflectionClient<Client<HttpsConnector<HttpConnector>, BoxBody>>,
|
||||
message: MessageRequest,
|
||||
) -> Result<MessageResponse, String> {
|
||||
let reflection_request = ServerReflectionRequest {
|
||||
host: "".into(), // Doesn't matter
|
||||
message_request: Some(message),
|
||||
};
|
||||
|
||||
let request = Request::new(tokio_stream::once(reflection_request));
|
||||
|
||||
client
|
||||
.server_reflection_info(request)
|
||||
.await
|
||||
.map_err(|e| match e.code() {
|
||||
tonic::Code::Unavailable => "Failed to connect to endpoint".to_string(),
|
||||
tonic::Code::Unauthenticated => "Authentication failed".to_string(),
|
||||
tonic::Code::DeadlineExceeded => "Deadline exceeded".to_string(),
|
||||
_ => e.to_string(),
|
||||
})?
|
||||
.into_inner()
|
||||
.next()
|
||||
.await
|
||||
.expect("steamed response")
|
||||
.map_err(|e| e.to_string())?
|
||||
.message_response
|
||||
.ok_or("No reflection response".to_string())
|
||||
}
|
||||
|
||||
pub fn method_desc_to_path(md: &MethodDescriptor) -> PathAndQuery {
|
||||
let full_name = md.full_name();
|
||||
let (namespace, method_name) = full_name
|
||||
.rsplit_once('.')
|
||||
.ok_or_else(|| anyhow!("invalid method path"))
|
||||
.expect("invalid method path");
|
||||
PathAndQuery::from_str(&format!("/{}/{}", namespace, method_name)).expect("invalid method path")
|
||||
}
|
||||
@@ -1,14 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Enable for v8 execution -->
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
|
||||
<!-- Re-enable for sandboxing. Currently disabled because auto-updater doesn't work with sandboxing.-->
|
||||
<!-- <key>com.apple.security.app-sandbox</key> <true/>-->
|
||||
<!-- <key>com.apple.security.files.user-selected.read-write</key> <true/>-->
|
||||
<!-- <key>com.apple.security.network.client</key> <true/>-->
|
||||
</dict>
|
||||
<dict>
|
||||
<!-- Re-enable for sandboxing. Currently disabled because auto-updater doesn't work with sandboxing.-->
|
||||
<!-- <key>com.apple.security.app-sandbox</key> <true/>-->
|
||||
<!-- <key>com.apple.security.files.user-selected.read-write</key> <true/>-->
|
||||
<!-- <key>com.apple.security.network.client</key> <true/>-->
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
CREATE TABLE settings
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'settings' NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
follow_redirects BOOLEAN DEFAULT TRUE NOT NULL,
|
||||
validate_certificates BOOLEAN DEFAULT TRUE NOT NULL,
|
||||
request_timeout INTEGER DEFAULT 0 NOT NULL,
|
||||
theme TEXT DEFAULT 'default' NOT NULL,
|
||||
appearance TEXT DEFAULT 'system' NOT NULL
|
||||
);
|
||||
@@ -1,9 +0,0 @@
|
||||
-- Add existing request-related settings to workspace
|
||||
ALTER TABLE workspaces ADD COLUMN setting_request_timeout INTEGER DEFAULT '0' NOT NULL;
|
||||
ALTER TABLE workspaces ADD COLUMN setting_validate_certificates BOOLEAN DEFAULT TRUE NOT NULL;
|
||||
ALTER TABLE workspaces ADD COLUMN setting_follow_redirects BOOLEAN DEFAULT TRUE NOT NULL;
|
||||
|
||||
-- Remove old settings that used to be global
|
||||
ALTER TABLE settings DROP COLUMN request_timeout;
|
||||
ALTER TABLE settings DROP COLUMN follow_redirects;
|
||||
ALTER TABLE settings DROP COLUMN validate_certificates;
|
||||
@@ -1 +0,0 @@
|
||||
ALTER TABLE settings ADD COLUMN update_channel TEXT DEFAULT 'stable' NOT NULL;
|
||||
@@ -1,10 +0,0 @@
|
||||
CREATE TABLE cookie_jars
|
||||
(
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
model TEXT DEFAULT 'cookie_jar' NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
cookies TEXT DEFAULT '[]' NOT NULL,
|
||||
workspace_id TEXT NOT NULL
|
||||
);
|
||||
@@ -1,3 +0,0 @@
|
||||
ALTER TABLE http_responses ADD COLUMN elapsed_headers INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE http_responses ADD COLUMN remote_addr TEXT;
|
||||
ALTER TABLE http_responses ADD COLUMN version TEXT;
|
||||
@@ -1,68 +0,0 @@
|
||||
CREATE TABLE grpc_requests
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'grpc_request' NOT NULL,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
folder_id TEXT NULL
|
||||
REFERENCES folders
|
||||
ON DELETE CASCADE,
|
||||
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
|
||||
updated_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
sort_priority REAL NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
service TEXT NULL,
|
||||
method TEXT NULL,
|
||||
message TEXT NOT NULL,
|
||||
authentication TEXT DEFAULT '{}' NOT NULL,
|
||||
authentication_type TEXT NULL,
|
||||
metadata TEXT DEFAULT '[]' NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE grpc_connections
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'grpc_connection' NOT NULL,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
request_id TEXT NOT NULL
|
||||
REFERENCES grpc_requests
|
||||
ON DELETE CASCADE,
|
||||
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
|
||||
updated_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
service TEXT NOT NULL,
|
||||
method TEXT NOT NULL,
|
||||
status INTEGER DEFAULT -1 NOT NULL,
|
||||
error TEXT NULL,
|
||||
elapsed INTEGER DEFAULT 0 NOT NULL,
|
||||
trailers TEXT DEFAULT '{}' NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE grpc_events
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'grpc_event' NOT NULL,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
request_id TEXT NOT NULL
|
||||
REFERENCES grpc_requests
|
||||
ON DELETE CASCADE,
|
||||
connection_id TEXT NOT NULL
|
||||
REFERENCES grpc_connections
|
||||
ON DELETE CASCADE,
|
||||
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
|
||||
updated_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
|
||||
metadata TEXT DEFAULT '{}' NOT NULL,
|
||||
event_type TEXT NOT NULL,
|
||||
status INTEGER NULL,
|
||||
error TEXT NULL,
|
||||
content TEXT NOT NULL
|
||||
);
|
||||
@@ -1,4 +0,0 @@
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN theme_dark TEXT DEFAULT 'yaak-dark' NOT NULL;
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN theme_light TEXT DEFAULT 'yaak-light' NOT NULL;
|
||||
@@ -1,4 +0,0 @@
|
||||
ALTER TABLE settings ADD COLUMN interface_font_size INTEGER DEFAULT 15 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN interface_scale INTEGER DEFAULT 1 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN editor_font_size INTEGER DEFAULT 13 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN editor_soft_wrap BOOLEAN DEFAULT 1 NOT NULL;
|
||||
@@ -1 +0,0 @@
|
||||
ALTER TABLE settings ADD COLUMN open_workspace_new_window BOOLEAN NULL DEFAULT NULL;
|
||||
134
src-tauri/plugins/importer-insomnia/out/index.js
Normal file
134
src-tauri/plugins/importer-insomnia/out/index.js
Normal file
@@ -0,0 +1,134 @@
|
||||
function S(e, t) {
|
||||
return console.log("IMPORTING Environment", e._id, e.name, JSON.stringify(e, null, 2)), {
|
||||
id: e._id,
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace("Z", ""),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace("Z", ""),
|
||||
workspaceId: t,
|
||||
model: "environment",
|
||||
name: e.name,
|
||||
variables: Object.entries(e.data).map(([n, a]) => ({
|
||||
enabled: !0,
|
||||
name: n,
|
||||
value: `${a}`
|
||||
}))
|
||||
};
|
||||
}
|
||||
function I(e) {
|
||||
return m(e) && e._type === "workspace";
|
||||
}
|
||||
function y(e) {
|
||||
return m(e) && e._type === "request_group";
|
||||
}
|
||||
function g(e) {
|
||||
return m(e) && e._type === "request";
|
||||
}
|
||||
function f(e) {
|
||||
return m(e) && e._type === "environment";
|
||||
}
|
||||
function m(e) {
|
||||
return Object.prototype.toString.call(e) === "[object Object]";
|
||||
}
|
||||
function w(e) {
|
||||
return Object.prototype.toString.call(e) === "[object String]";
|
||||
}
|
||||
function O(e) {
|
||||
return Object.entries(e).map(([t, n]) => ({
|
||||
enabled: !0,
|
||||
name: t,
|
||||
value: `${n}`
|
||||
}));
|
||||
}
|
||||
function l(e) {
|
||||
return w(e) ? e.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, "${[$2]}") : e;
|
||||
}
|
||||
function h(e, t, n = 0) {
|
||||
var c, o;
|
||||
console.log("IMPORTING REQUEST", e._id, e.name, JSON.stringify(e, null, 2));
|
||||
let a = null, r = null;
|
||||
((c = e.body) == null ? void 0 : c.mimeType) === "application/graphql" ? (a = "graphql", r = l(e.body.text)) : ((o = e.body) == null ? void 0 : o.mimeType) === "application/json" && (a = "application/json", r = l(e.body.text));
|
||||
let i = null, u = {};
|
||||
return e.authentication.type === "bearer" ? (i = "bearer", u = {
|
||||
token: l(e.authentication.token)
|
||||
}) : e.authentication.type === "basic" && (i = "basic", u = {
|
||||
username: l(e.authentication.username),
|
||||
password: l(e.authentication.password)
|
||||
}), {
|
||||
id: e._id,
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace("Z", ""),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace("Z", ""),
|
||||
workspaceId: t,
|
||||
folderId: e.parentId === t ? null : e.parentId,
|
||||
model: "http_request",
|
||||
sortPriority: n,
|
||||
name: e.name,
|
||||
url: l(e.url),
|
||||
body: r,
|
||||
bodyType: a,
|
||||
authentication: u,
|
||||
authenticationType: i,
|
||||
method: e.method,
|
||||
headers: (e.headers ?? []).map(({ name: d, value: p, disabled: s }) => ({
|
||||
enabled: !s,
|
||||
name: d,
|
||||
value: p
|
||||
})).filter(({ name: d, value: p }) => d !== "" || p !== "")
|
||||
};
|
||||
}
|
||||
function _(e, t) {
|
||||
return console.log("IMPORTING FOLDER", e._id, e.name, JSON.stringify(e, null, 2)), {
|
||||
id: e._id,
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace("Z", ""),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace("Z", ""),
|
||||
folderId: e.parentId === t ? null : e.parentId,
|
||||
workspaceId: t,
|
||||
model: "folder",
|
||||
name: e.name
|
||||
};
|
||||
}
|
||||
function b(e) {
|
||||
console.log("RUNNING INSOMNIA");
|
||||
let t;
|
||||
try {
|
||||
t = JSON.parse(e);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
if (!m(t) || !Array.isArray(t.resources))
|
||||
return;
|
||||
const n = {
|
||||
workspaces: [],
|
||||
requests: [],
|
||||
environments: [],
|
||||
folders: []
|
||||
}, a = t.resources.filter(I);
|
||||
for (const r of a) {
|
||||
const i = t.resources.find(
|
||||
(o) => f(o) && o.parentId === r._id
|
||||
);
|
||||
n.workspaces.push({
|
||||
id: r._id,
|
||||
createdAt: new Date(a.created ?? Date.now()).toISOString().replace("Z", ""),
|
||||
updatedAt: new Date(a.updated ?? Date.now()).toISOString().replace("Z", ""),
|
||||
model: "workspace",
|
||||
name: r.name,
|
||||
variables: i ? O(i.data) : []
|
||||
});
|
||||
const u = t.resources.filter(
|
||||
(o) => f(o) && o.parentId === (i == null ? void 0 : i._id)
|
||||
);
|
||||
n.environments.push(
|
||||
...u.map((o) => S(o, r._id))
|
||||
);
|
||||
const c = (o) => {
|
||||
const d = t.resources.filter((s) => s.parentId === o);
|
||||
let p = 0;
|
||||
for (const s of d)
|
||||
y(s) ? (n.folders.push(_(s, r._id)), c(s._id)) : g(s) && n.requests.push(h(s, r._id, p++));
|
||||
};
|
||||
c(r._id);
|
||||
}
|
||||
return n.requests = n.requests.filter(Boolean), n.environments = n.environments.filter(Boolean), n.workspaces = n.workspaces.filter(Boolean), { resources: n };
|
||||
}
|
||||
export {
|
||||
b as pluginHookImport
|
||||
};
|
||||
23
src-tauri/plugins/importer-insomnia/src/helpers/types.js
Normal file
23
src-tauri/plugins/importer-insomnia/src/helpers/types.js
Normal file
@@ -0,0 +1,23 @@
|
||||
export function isWorkspace(obj) {
|
||||
return isJSObject(obj) && obj._type === 'workspace';
|
||||
}
|
||||
|
||||
export function isRequestGroup(obj) {
|
||||
return isJSObject(obj) && obj._type === 'request_group';
|
||||
}
|
||||
|
||||
export function isRequest(obj) {
|
||||
return isJSObject(obj) && obj._type === 'request';
|
||||
}
|
||||
|
||||
export function isEnvironment(obj) {
|
||||
return isJSObject(obj) && obj._type === 'environment';
|
||||
}
|
||||
|
||||
export function isJSObject(obj) {
|
||||
return Object.prototype.toString.call(obj) === '[object Object]';
|
||||
}
|
||||
|
||||
export function isJSString(obj) {
|
||||
return Object.prototype.toString.call(obj) === '[object String]';
|
||||
}
|
||||
18
src-tauri/plugins/importer-insomnia/src/helpers/variables.js
Normal file
18
src-tauri/plugins/importer-insomnia/src/helpers/variables.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { isJSString } from './types.js';
|
||||
|
||||
export function parseVariables(data) {
|
||||
return Object.entries(data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Insomnia syntax to Yaak syntax
|
||||
* @param {string} variable - Text to convert
|
||||
*/
|
||||
export function convertSyntax(variable) {
|
||||
if (!isJSString(variable)) return variable;
|
||||
return variable.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, '${[$2]}');
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* Import an Insomnia environment object.
|
||||
* @param {Object} e - The environment object to import.
|
||||
* @param workspaceId - Workspace to import into.
|
||||
*/
|
||||
export function importEnvironment(e, workspaceId) {
|
||||
console.log('IMPORTING Environment', e._id, e.name, JSON.stringify(e, null, 2));
|
||||
return {
|
||||
id: e._id,
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId,
|
||||
model: 'environment',
|
||||
name: e.name,
|
||||
variables: Object.entries(e.data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
})),
|
||||
};
|
||||
}
|
||||
17
src-tauri/plugins/importer-insomnia/src/importers/folder.js
Normal file
17
src-tauri/plugins/importer-insomnia/src/importers/folder.js
Normal file
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* Import an Insomnia folder object.
|
||||
* @param {Object} f - The environment object to import.
|
||||
* @param workspaceId - Workspace to import into.
|
||||
*/
|
||||
export function importFolder(f, workspaceId) {
|
||||
console.log('IMPORTING FOLDER', f._id, f.name, JSON.stringify(f, null, 2));
|
||||
return {
|
||||
id: f._id,
|
||||
createdAt: new Date(f.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(f.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
folderId: f.parentId === workspaceId ? null : f.parentId,
|
||||
workspaceId,
|
||||
model: 'folder',
|
||||
name: f.name,
|
||||
};
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user