mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-02-16 07:37:48 +01:00
Compare commits
145 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
041298b3f8 | ||
|
|
b400940f0e | ||
|
|
2e144f064d | ||
|
|
d8b1cadae6 | ||
|
|
c2f9760d08 | ||
|
|
a4c600cb48 | ||
|
|
bc3a5e3e58 | ||
|
|
4c3a02ac53 | ||
|
|
1974d61aa4 | ||
|
|
0bcb092854 | ||
|
|
409620f533 | ||
|
|
3e9037f70a | ||
|
|
be82b67ed3 | ||
|
|
432b366105 | ||
|
|
42e70b941d | ||
|
|
3808215210 | ||
|
|
763a60982a | ||
|
|
a05679fd93 | ||
|
|
73c366dc27 | ||
|
|
0be7d0283b | ||
|
|
749df338c5 | ||
|
|
3184c1b79e | ||
|
|
b52bf7cd56 | ||
|
|
d962d7f94b | ||
|
|
21e2a67c1e | ||
|
|
c188435524 | ||
|
|
8a7a7ba49d | ||
|
|
cbc40230bb | ||
|
|
bc4c3178c9 | ||
|
|
121fe5b3ea | ||
|
|
861609ddc0 | ||
|
|
e5070513ac | ||
|
|
f5c3798df9 | ||
|
|
469d12fede | ||
|
|
417a02744b | ||
|
|
81e78ef24c | ||
|
|
dad9cebb9e | ||
|
|
b3ede3d6d6 | ||
|
|
035fe54df0 | ||
|
|
5f8d99ba64 | ||
|
|
84b8d130dc | ||
|
|
94d4227bc1 | ||
|
|
77cdea2f9f | ||
|
|
8b1ca4cb47 | ||
|
|
d3b8a42180 | ||
|
|
95f39c514a | ||
|
|
7cba082eb0 | ||
|
|
3b9b320be2 | ||
|
|
18664975a9 | ||
|
|
bb014b7c43 | ||
|
|
9fa0650647 | ||
|
|
b8c42677ca | ||
|
|
2eb3c2241c | ||
|
|
8fb7bbfe2e | ||
|
|
52eba74151 | ||
|
|
e651760713 | ||
|
|
82451a26f6 | ||
|
|
cc15f60fb6 | ||
|
|
2f8b2a81c7 | ||
|
|
6d4fdc91fe | ||
|
|
faca29c789 | ||
|
|
1ab937aae4 | ||
|
|
45fcea1954 | ||
|
|
73554078d1 | ||
|
|
a42a88de7b | ||
|
|
14a6079176 | ||
|
|
6c513616c0 | ||
|
|
cdf5f1b7a5 | ||
|
|
6566857d54 | ||
|
|
2e55a1bd6d | ||
|
|
e114a85c39 | ||
|
|
92be088e6c | ||
|
|
f1757ae427 | ||
|
|
ce885c3551 | ||
|
|
17657a4d04 | ||
|
|
b7f62b78b1 | ||
|
|
006284b99c | ||
|
|
bac3968aac | ||
|
|
e5fa044eda | ||
|
|
5969120140 | ||
|
|
8801936ad2 | ||
|
|
1d37d46130 | ||
|
|
445c30f3a9 | ||
|
|
5fedea38c2 | ||
|
|
d86549f492 | ||
|
|
4c4eaba7d2 | ||
|
|
cf8f8743bb | ||
|
|
aa75636026 | ||
|
|
2c41b243b6 | ||
|
|
6aea343d4f | ||
|
|
c300e8cbd5 | ||
|
|
6e25c26e9f | ||
|
|
dce1455be7 | ||
|
|
736025b12f | ||
|
|
cb9e9a67a3 | ||
|
|
93c323458f | ||
|
|
6f8c03d8c1 | ||
|
|
afd4228fcf | ||
|
|
d478e5a12e | ||
|
|
0db9ebe67d | ||
|
|
80ea5e6b91 | ||
|
|
cb773babe1 | ||
|
|
b9ed554aca | ||
|
|
f42f3d0e27 | ||
|
|
93ba5b6e5c | ||
|
|
be11d5968e | ||
|
|
0828599e4f | ||
|
|
f47d22c395 | ||
|
|
12233cb6f6 | ||
|
|
cdce2ac53a | ||
|
|
f4d0371060 | ||
|
|
787a0433cb | ||
|
|
60ea408e51 | ||
|
|
0db0cdfd6c | ||
|
|
26371e5f6b | ||
|
|
6b7c144a11 | ||
|
|
62f43ca24c | ||
|
|
fbf4d3c11e | ||
|
|
7a1a0689b0 | ||
|
|
9ead45d67a | ||
|
|
eb8153f409 | ||
|
|
80de232bec | ||
|
|
7af8c95fea | ||
|
|
2db72fe6ef | ||
|
|
d297e92a5a | ||
|
|
7e1da4395d | ||
|
|
7f8b0479e1 | ||
|
|
c8d6183456 | ||
|
|
9d5f7784c4 | ||
|
|
05ac836265 | ||
|
|
af7782c93b | ||
|
|
2b1431d041 | ||
|
|
9d8b7a5265 | ||
|
|
95c12ad291 | ||
|
|
dac2cec52f | ||
|
|
efe4eef1b7 | ||
|
|
a0e196a9e7 | ||
|
|
c6427dc724 | ||
|
|
8ce1e22b4e | ||
|
|
022d725e03 | ||
|
|
ed7fdb1b4c | ||
|
|
e510204b8c | ||
|
|
d31b4448df | ||
|
|
e420a0a45e | ||
|
|
84ecbe0cd6 |
15
.github/FUNDING.yml
vendored
Normal file
15
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: gschier
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
polar: # Replace with a single Polar username
|
||||
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||
thanks_dev: # Replace with a single thanks.dev username
|
||||
custom: https://yaak.app/pricing
|
||||
38
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
38
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Desktop (please complete the following information):**
|
||||
- OS: [e.g. iOS]
|
||||
- Browser [e.g. chrome, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Smartphone (please complete the following information):**
|
||||
- Device: [e.g. iPhone6]
|
||||
- OS: [e.g. iOS8.1]
|
||||
- Browser [e.g. stock browser, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
|
||||
- name: install dependencies (windows only)
|
||||
if: matrix.platform == 'windows-latest'
|
||||
run: cargo install --force trusted-signing-cli
|
||||
run: cargo install --force trusted-signing-cli --version 0.5.0
|
||||
|
||||
- name: Install NPM Dependencies
|
||||
run: |
|
||||
|
||||
@@ -60,3 +60,10 @@ Run the app to apply the migrations.
|
||||
If nothing happens, try `cargo clean` and run the app again.
|
||||
|
||||
_Note: Development builds use a separate database location from production builds._
|
||||
|
||||
## Lezer Grammer Generation
|
||||
|
||||
```sh
|
||||
# Example
|
||||
lezer-generator components/core/Editor/<LANG>/<LANG>.grammar > components/core/Editor/<LANG>/<LANG>.ts
|
||||
```
|
||||
|
||||
58
README.md
58
README.md
@@ -3,40 +3,32 @@
|
||||
Yaak is a desktop API client for interacting with REST, GraphQL, Server Sent Events (SSE), WebSocket, and gRPC
|
||||
APIs. It's built using [Tauri](https://tauri.app), Rust, and ReactJS.
|
||||
|
||||

|
||||
|
||||
## Feature Overview
|
||||
|
||||
🪂 Import data from Postman, Insomnia, OpenAPI, Swagger, or Curl.<br/>
|
||||
📤 Send requests via REST, GraphQL, Server Sent Events (SSE), WebSockets, or gRPC.<br/>
|
||||
🔐 Automatically authorize requests with OAuth 2.0, JWT tokens, Basic Auth, and more.<br/>
|
||||
🔎 Filter response bodies using JSONPath or XPath queries.<br/>
|
||||
⛓️ Chain together multiple requests to dynamically reference values.<br/>
|
||||
📂 Organize requests into workspaces and nested folders.<br/>
|
||||
🧮 Use environment variables to easily switch between Prod and Dev.<br/>
|
||||
🏷️ Send dynamic values like UUIDs or timestamps using template tags.<br/>
|
||||
🎨 Choose from many of the included themes, or make your own.<br/>
|
||||
💽 Mirror workspace data to a directory for integration with Git or Dropbox.<br/>
|
||||
📜 View response history for each request.<br/>
|
||||
🔌 Create your own plugins for authentication, template tags, and more!<br/>
|
||||
🛜 Configure a proxy to access firewall-blocked APIs
|
||||
|
||||
## Feedback and Bug Reports
|
||||
|
||||
All feedback, bug reports, questions, and feature requests should be reported to
|
||||
[feedback.yaak.app](https://feedback.yaak.app). Issues will be duplicated
|
||||
in this repository if applicable.
|
||||
|
||||
## Community Projects
|
||||
|
||||
- [`yaak2postman`](https://github.com/BiteCraft/yaak2postman) CLI for converting Yaak data
|
||||
exports to Postman-compatible collections
|
||||

|
||||
|
||||
## Contribution Policy
|
||||
|
||||
Yaak is open source, but only accepting contributions for bug fixes. See the
|
||||
[`good first issue`](https://github.com/yaakapp/app/labels/good%20first%20issue) label for
|
||||
issues that are more approachable for contribution.
|
||||
Yaak is open source, but only accepting contributions for bug fixes. To get started,
|
||||
visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment.
|
||||
|
||||
To get started, visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your
|
||||
environment.
|
||||
## Feature Overview
|
||||
|
||||
- 🪂 Import data from Postman, Insomnia, OpenAPI, Swagger, or Curl.<br/>
|
||||
- 📤 Send requests via REST, GraphQL, Server Sent Events (SSE), WebSockets, or gRPC.<br/>
|
||||
- 🔐 Automatically authorize requests with OAuth 2.0, JWT tokens, Basic Auth, and more.<br/>
|
||||
- 🔎 Filter response bodies using JSONPath or XPath queries.<br/>
|
||||
- ⛓️ Chain together multiple requests to dynamically reference values.<br/>
|
||||
- 📂 Organize requests into workspaces and nested folders.<br/>
|
||||
- 🧮 Use environment variables to easily switch between Prod and Dev.<br/>
|
||||
- 🛡️ Secure arbitrary text values with end-to-end encryption<br/>
|
||||
- 🏷️ Send dynamic values like UUIDs or timestamps using template tags.<br/>
|
||||
- 🎨 Choose from many of the included themes, or make your own.<br/>
|
||||
- 💽 Mirror workspace data to a directory for integration with Git or Dropbox.<br/>
|
||||
- 📜 View response history for each request.<br/>
|
||||
- 🔌 Create your own plugins for authentication, template tags, and more!<br/>
|
||||
- 🛜 Configure a proxy to access firewall-blocked APIs
|
||||
|
||||
## Useful Resources
|
||||
|
||||
- [Feedback and Bug Reports](https://feedback.yaak.app)
|
||||
- [Documentation](https://feedback.yaak.app/help)
|
||||
- [Yaak vs Postman](https://yaak.app/blog/postman-alternative)
|
||||
|
||||
3896
package-lock.json
generated
3896
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
15
package.json
15
package.json
@@ -10,8 +10,10 @@
|
||||
"packages/plugin-runtime",
|
||||
"packages/plugin-runtime-types",
|
||||
"packages/common-lib",
|
||||
"src-tauri/yaak-license",
|
||||
"src-tauri/yaak-crypto",
|
||||
"src-tauri/yaak-git",
|
||||
"src-tauri/yaak-license",
|
||||
"src-tauri/yaak-mac-window",
|
||||
"src-tauri/yaak-models",
|
||||
"src-tauri/yaak-plugins",
|
||||
"src-tauri/yaak-sse",
|
||||
@@ -35,10 +37,13 @@
|
||||
"tauri-before-build": "npm run bootstrap && npm run --workspaces --if-present build",
|
||||
"tauri-before-dev": "npm run --workspaces --if-present dev"
|
||||
},
|
||||
"dependencies": {
|
||||
"jotai": "^2.12.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tauri-apps/cli": "^2.2.7",
|
||||
"@typescript-eslint/eslint-plugin": "^8.18.1",
|
||||
"@typescript-eslint/parser": "^8.18.1",
|
||||
"@tauri-apps/cli": "^2.4.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.27.0",
|
||||
"@typescript-eslint/parser": "^8.27.0",
|
||||
"eslint": "^8",
|
||||
"eslint-config-prettier": "^8",
|
||||
"eslint-plugin-import": "^2.31.0",
|
||||
@@ -48,6 +53,6 @@
|
||||
"nodejs-file-downloader": "^4.13.0",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "^3.4.2",
|
||||
"typescript": "^5.7.2"
|
||||
"typescript": "^5.8.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@yaakapp/api",
|
||||
"version": "0.4.1",
|
||||
"version": "0.5.3",
|
||||
"main": "lib/index.js",
|
||||
"typings": "./lib/index.d.ts",
|
||||
"files": [
|
||||
@@ -20,8 +20,6 @@
|
||||
"@types/node": "^22.5.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"cpy-cli": "^5.0.0",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"typescript": "^5.6.2"
|
||||
"cpy-cli": "^5.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -344,9 +344,9 @@ export type ImportResources = { workspaces: Array<Workspace>, environments: Arra
|
||||
|
||||
export type ImportResponse = { resources: ImportResources, };
|
||||
|
||||
export type InternalEvent = { id: string, pluginRefId: string, pluginName: string, replyId: string | null, windowContext: WindowContext, payload: InternalEventPayload, };
|
||||
export type InternalEvent = { id: string, pluginRefId: string, pluginName: string, replyId: string | null, windowContext: PluginWindowContext, payload: InternalEventPayload, };
|
||||
|
||||
export type InternalEventPayload = { "type": "boot_request" } & BootRequest | { "type": "boot_response" } & BootResponse | { "type": "reload_request" } & EmptyPayload | { "type": "reload_response" } & EmptyPayload | { "type": "terminate_request" } | { "type": "terminate_response" } | { "type": "import_request" } & ImportRequest | { "type": "import_response" } & ImportResponse | { "type": "filter_request" } & FilterRequest | { "type": "filter_response" } & FilterResponse | { "type": "export_http_request_request" } & ExportHttpRequestRequest | { "type": "export_http_request_response" } & ExportHttpRequestResponse | { "type": "send_http_request_request" } & SendHttpRequestRequest | { "type": "send_http_request_response" } & SendHttpRequestResponse | { "type": "get_http_request_actions_request" } & EmptyPayload | { "type": "get_http_request_actions_response" } & GetHttpRequestActionsResponse | { "type": "call_http_request_action_request" } & CallHttpRequestActionRequest | { "type": "get_template_functions_request" } | { "type": "get_template_functions_response" } & GetTemplateFunctionsResponse | { "type": "call_template_function_request" } & CallTemplateFunctionRequest | { "type": "call_template_function_response" } & CallTemplateFunctionResponse | { "type": "get_http_authentication_summary_request" } & EmptyPayload | { "type": "get_http_authentication_summary_response" } & GetHttpAuthenticationSummaryResponse | { "type": "get_http_authentication_config_request" } & GetHttpAuthenticationConfigRequest | { "type": "get_http_authentication_config_response" } & GetHttpAuthenticationConfigResponse | { "type": "call_http_authentication_request" } & CallHttpAuthenticationRequest | { "type": "call_http_authentication_response" } & CallHttpAuthenticationResponse | { "type": "call_http_authentication_action_request" } & CallHttpAuthenticationActionRequest | { "type": "call_http_authentication_action_response" } & EmptyPayload | { "type": "copy_text_request" } & CopyTextRequest | { "type": "copy_text_response" } & EmptyPayload | { "type": "render_http_request_request" } & RenderHttpRequestRequest | { "type": "render_http_request_response" } & RenderHttpRequestResponse | { "type": "get_key_value_request" } & GetKeyValueRequest | { "type": "get_key_value_response" } & GetKeyValueResponse | { "type": "set_key_value_request" } & SetKeyValueRequest | { "type": "set_key_value_response" } & SetKeyValueResponse | { "type": "delete_key_value_request" } & DeleteKeyValueRequest | { "type": "delete_key_value_response" } & DeleteKeyValueResponse | { "type": "open_window_request" } & OpenWindowRequest | { "type": "window_navigate_event" } & WindowNavigateEvent | { "type": "close_window_request" } & CloseWindowRequest | { "type": "template_render_request" } & TemplateRenderRequest | { "type": "template_render_response" } & TemplateRenderResponse | { "type": "show_toast_request" } & ShowToastRequest | { "type": "show_toast_response" } & EmptyPayload | { "type": "prompt_text_request" } & PromptTextRequest | { "type": "prompt_text_response" } & PromptTextResponse | { "type": "get_http_request_by_id_request" } & GetHttpRequestByIdRequest | { "type": "get_http_request_by_id_response" } & GetHttpRequestByIdResponse | { "type": "find_http_responses_request" } & FindHttpResponsesRequest | { "type": "find_http_responses_response" } & FindHttpResponsesResponse | { "type": "empty_response" } & EmptyPayload | { "type": "error_response" } & ErrorResponse;
|
||||
export type InternalEventPayload = { "type": "boot_request" } & BootRequest | { "type": "boot_response" } & BootResponse | { "type": "reload_request" } & EmptyPayload | { "type": "reload_response" } & EmptyPayload | { "type": "terminate_request" } | { "type": "terminate_response" } | { "type": "import_request" } & ImportRequest | { "type": "import_response" } & ImportResponse | { "type": "filter_request" } & FilterRequest | { "type": "filter_response" } & FilterResponse | { "type": "export_http_request_request" } & ExportHttpRequestRequest | { "type": "export_http_request_response" } & ExportHttpRequestResponse | { "type": "send_http_request_request" } & SendHttpRequestRequest | { "type": "send_http_request_response" } & SendHttpRequestResponse | { "type": "get_http_request_actions_request" } & EmptyPayload | { "type": "get_http_request_actions_response" } & GetHttpRequestActionsResponse | { "type": "call_http_request_action_request" } & CallHttpRequestActionRequest | { "type": "get_template_functions_request" } | { "type": "get_template_functions_response" } & GetTemplateFunctionsResponse | { "type": "call_template_function_request" } & CallTemplateFunctionRequest | { "type": "call_template_function_response" } & CallTemplateFunctionResponse | { "type": "get_http_authentication_summary_request" } & EmptyPayload | { "type": "get_http_authentication_summary_response" } & GetHttpAuthenticationSummaryResponse | { "type": "get_http_authentication_config_request" } & GetHttpAuthenticationConfigRequest | { "type": "get_http_authentication_config_response" } & GetHttpAuthenticationConfigResponse | { "type": "call_http_authentication_request" } & CallHttpAuthenticationRequest | { "type": "call_http_authentication_response" } & CallHttpAuthenticationResponse | { "type": "call_http_authentication_action_request" } & CallHttpAuthenticationActionRequest | { "type": "call_http_authentication_action_response" } & EmptyPayload | { "type": "copy_text_request" } & CopyTextRequest | { "type": "copy_text_response" } & EmptyPayload | { "type": "render_http_request_request" } & RenderHttpRequestRequest | { "type": "render_http_request_response" } & RenderHttpRequestResponse | { "type": "get_key_value_request" } & GetKeyValueRequest | { "type": "get_key_value_response" } & GetKeyValueResponse | { "type": "set_key_value_request" } & SetKeyValueRequest | { "type": "set_key_value_response" } & SetKeyValueResponse | { "type": "delete_key_value_request" } & DeleteKeyValueRequest | { "type": "delete_key_value_response" } & DeleteKeyValueResponse | { "type": "open_window_request" } & OpenWindowRequest | { "type": "window_navigate_event" } & WindowNavigateEvent | { "type": "window_close_event" } | { "type": "close_window_request" } & CloseWindowRequest | { "type": "template_render_request" } & TemplateRenderRequest | { "type": "template_render_response" } & TemplateRenderResponse | { "type": "show_toast_request" } & ShowToastRequest | { "type": "show_toast_response" } & EmptyPayload | { "type": "prompt_text_request" } & PromptTextRequest | { "type": "prompt_text_response" } & PromptTextResponse | { "type": "get_http_request_by_id_request" } & GetHttpRequestByIdRequest | { "type": "get_http_request_by_id_response" } & GetHttpRequestByIdResponse | { "type": "find_http_responses_request" } & FindHttpResponsesRequest | { "type": "find_http_responses_response" } & FindHttpResponsesResponse | { "type": "empty_response" } & EmptyPayload | { "type": "error_response" } & ErrorResponse;
|
||||
|
||||
export type JsonPrimitive = string | number | boolean | null;
|
||||
|
||||
@@ -354,7 +354,9 @@ export type OpenWindowRequest = { url: string,
|
||||
/**
|
||||
* Label for the window. If not provided, a random one will be generated.
|
||||
*/
|
||||
label: string, title?: string, size?: WindowSize, };
|
||||
label: string, title?: string, size?: WindowSize, dataDirKey?: string, };
|
||||
|
||||
export type PluginWindowContext = { "type": "none" } | { "type": "label", label: string, workspace_id: string | null, };
|
||||
|
||||
export type PromptTextRequest = { id: string, title: string, label: string, description?: string, defaultValue?: string, placeholder?: string,
|
||||
/**
|
||||
@@ -393,14 +395,17 @@ export type TemplateFunction = { name: string, description?: string,
|
||||
* Also support alternative names. This is useful for not breaking existing
|
||||
* tags when changing the `name` property
|
||||
*/
|
||||
aliases?: Array<string>, args: Array<FormInput>, };
|
||||
aliases?: Array<string>, args: Array<TemplateFunctionArg>, };
|
||||
|
||||
/**
|
||||
* Similar to FormInput, but contains
|
||||
*/
|
||||
export type TemplateFunctionArg = FormInput;
|
||||
|
||||
export type TemplateRenderRequest = { data: JsonValue, purpose: RenderPurpose, };
|
||||
|
||||
export type TemplateRenderResponse = { data: JsonValue, };
|
||||
|
||||
export type WindowContext = { "type": "none" } | { "type": "label", label: string, };
|
||||
|
||||
export type WindowNavigateEvent = { url: string, };
|
||||
|
||||
export type WindowSize = { width: number, height: number, };
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, environmentId: string | null, createdAt: string, updatedAt: string, name: string, variables: Array<EnvironmentVariable>, };
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, base: boolean, variables: Array<EnvironmentVariable>, };
|
||||
|
||||
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
@@ -24,4 +24,4 @@ export type HttpUrlParameter = { enabled?: boolean, name: string, value: string,
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, name: string, description: string, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, name: string, description: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type JsonValue = number | string | Array<JsonValue> | { [key in string]?: JsonValue };
|
||||
export type JsonValue = number | string | boolean | Array<JsonValue> | { [key in string]?: JsonValue } | null;
|
||||
|
||||
@@ -3,3 +3,7 @@ export type * from './themes';
|
||||
|
||||
export * from './bindings/gen_models';
|
||||
export * from './bindings/gen_events';
|
||||
|
||||
// Some extras for utility
|
||||
|
||||
export type { PartialImportResources } from './plugins/ImporterPlugin';
|
||||
|
||||
@@ -32,7 +32,10 @@ export interface Context {
|
||||
};
|
||||
window: {
|
||||
openUrl(
|
||||
args: OpenWindowRequest & { onNavigate?: (args: { url: string }) => void },
|
||||
args: OpenWindowRequest & {
|
||||
onNavigate?: (args: { url: string }) => void;
|
||||
onClose?: () => void;
|
||||
},
|
||||
): Promise<{ close: () => void }>;
|
||||
};
|
||||
httpRequest: {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { Context } from './Context';
|
||||
|
||||
export type FilterPluginResponse = { filtered: string };
|
||||
type FilterPluginResponse = { filtered: string };
|
||||
|
||||
export type FilterPlugin = {
|
||||
name: string;
|
||||
|
||||
@@ -1,15 +1,21 @@
|
||||
import { Environment, Folder, GrpcRequest, HttpRequest, Workspace } from '../bindings/gen_models';
|
||||
import type { AtLeast } from '../helpers';
|
||||
import { ImportResources } from '../bindings/gen_events';
|
||||
import { AtLeast } from '../helpers';
|
||||
import type { Context } from './Context';
|
||||
|
||||
type ImportPluginResponse = null | {
|
||||
resources: {
|
||||
workspaces: AtLeast<Workspace, 'name' | 'id' | 'model'>[];
|
||||
environments: AtLeast<Environment, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
folders: AtLeast<Folder, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
httpRequests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
grpcRequests: AtLeast<GrpcRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
};
|
||||
type RootFields = 'name' | 'id' | 'model';
|
||||
type CommonFields = RootFields | 'workspaceId';
|
||||
|
||||
export type PartialImportResources = {
|
||||
workspaces: Array<AtLeast<ImportResources['workspaces'][0], RootFields>>;
|
||||
environments: Array<AtLeast<ImportResources['environments'][0], CommonFields>>;
|
||||
folders: Array<AtLeast<ImportResources['folders'][0], CommonFields>>;
|
||||
httpRequests: Array<AtLeast<ImportResources['httpRequests'][0], CommonFields>>;
|
||||
grpcRequests: Array<AtLeast<ImportResources['grpcRequests'][0], CommonFields>>;
|
||||
websocketRequests: Array<AtLeast<ImportResources['websocketRequests'][0], CommonFields>>;
|
||||
};
|
||||
|
||||
export type ImportPluginResponse = null | {
|
||||
resources: PartialImportResources;
|
||||
};
|
||||
|
||||
export type ImporterPlugin = {
|
||||
|
||||
@@ -3,10 +3,7 @@
|
||||
"scripts": {
|
||||
"bootstrap": "npm run build",
|
||||
"build": "run-p build:*",
|
||||
"build:main": "esbuild src/index.ts --bundle --platform=node --outfile=../../src-tauri/vendored/plugin-runtime/index.cjs",
|
||||
"build:worker": "esbuild src/index.worker.ts --bundle --platform=node --outfile=../../src-tauri/vendored/plugin-runtime/index.worker.cjs",
|
||||
"build:__main": "esbuild src/index.ts --bundle --platform=node --outfile=../../src-tauri/target/debug/vendored/plugin-runtime/index.cjs",
|
||||
"build:__worker": "esbuild src/index.ts --bundle --platform=node --outfile=../../src-tauri/target/debug/vendored/plugin-runtime/index.worker.cjs"
|
||||
"build:main": "esbuild src/index.ts --bundle --platform=node --outfile=../../src-tauri/vendored/plugin-runtime/index.cjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"ws": "^8.18.0"
|
||||
|
||||
@@ -1,14 +1,19 @@
|
||||
import type { InternalEvent } from "@yaakapp/api";
|
||||
import EventEmitter from "node:events";
|
||||
import type { InternalEvent } from '@yaakapp/api';
|
||||
|
||||
export class EventChannel {
|
||||
emitter: EventEmitter = new EventEmitter();
|
||||
#listeners = new Set<(event: InternalEvent) => void>();
|
||||
|
||||
emit(e: InternalEvent) {
|
||||
this.emitter.emit("__plugin_event__", e);
|
||||
for (const l of this.#listeners) {
|
||||
l(e);
|
||||
}
|
||||
}
|
||||
|
||||
listen(cb: (e: InternalEvent) => void) {
|
||||
this.emitter.on("__plugin_event__", cb);
|
||||
this.#listeners.add(cb);
|
||||
}
|
||||
|
||||
unlisten(cb: (e: InternalEvent) => void) {
|
||||
this.#listeners.delete(cb);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,56 +1,27 @@
|
||||
import type { BootRequest, InternalEvent } from '@yaakapp/api';
|
||||
import path from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
import type { EventChannel } from './EventChannel';
|
||||
import type { PluginWorkerData } from './index.worker';
|
||||
import { PluginInstance, PluginWorkerData } from './PluginInstance';
|
||||
|
||||
export class PluginHandle {
|
||||
#worker: Worker;
|
||||
#instance: PluginInstance;
|
||||
|
||||
constructor(
|
||||
readonly pluginRefId: string,
|
||||
readonly bootRequest: BootRequest,
|
||||
readonly events: EventChannel,
|
||||
readonly pluginToAppEvents: EventChannel,
|
||||
) {
|
||||
this.#worker = this.#createWorker();
|
||||
}
|
||||
|
||||
sendToWorker(event: InternalEvent) {
|
||||
this.#worker.postMessage(event);
|
||||
}
|
||||
|
||||
async terminate() {
|
||||
await this.#worker.terminate();
|
||||
}
|
||||
|
||||
#createWorker(): Worker {
|
||||
const workerPath = process.env.YAAK_WORKER_PATH ?? path.join(__dirname, 'index.worker.cjs');
|
||||
const workerData: PluginWorkerData = {
|
||||
pluginRefId: this.pluginRefId,
|
||||
bootRequest: this.bootRequest,
|
||||
};
|
||||
const worker = new Worker(workerPath, {
|
||||
workerData,
|
||||
});
|
||||
|
||||
worker.on('message', (e) => this.events.emit(e));
|
||||
worker.on('error', this.#handleError.bind(this));
|
||||
worker.on('exit', this.#handleExit.bind(this));
|
||||
|
||||
console.log('Created plugin worker for ', this.bootRequest.dir);
|
||||
|
||||
return worker;
|
||||
this.#instance = new PluginInstance(workerData, pluginToAppEvents);
|
||||
}
|
||||
|
||||
async #handleError(err: Error) {
|
||||
console.error('Plugin errored', this.bootRequest.dir, err);
|
||||
sendToWorker(event: InternalEvent) {
|
||||
this.#instance.postMessage(event);
|
||||
}
|
||||
|
||||
async #handleExit(code: number) {
|
||||
if (code === 0) {
|
||||
console.log('Plugin exited successfully', this.bootRequest.dir);
|
||||
} else {
|
||||
console.log('Plugin exited with status', code, this.bootRequest.dir);
|
||||
}
|
||||
terminate() {
|
||||
this.#instance.terminate();
|
||||
}
|
||||
}
|
||||
|
||||
582
packages/plugin-runtime/src/PluginInstance.ts
Normal file
582
packages/plugin-runtime/src/PluginInstance.ts
Normal file
@@ -0,0 +1,582 @@
|
||||
import { PluginWindowContext, TemplateFunctionArg } from '@yaakapp-internal/plugins';
|
||||
import type {
|
||||
BootRequest,
|
||||
Context,
|
||||
DeleteKeyValueResponse,
|
||||
FindHttpResponsesResponse,
|
||||
FormInput,
|
||||
GetHttpRequestByIdResponse,
|
||||
GetKeyValueResponse,
|
||||
HttpAuthenticationAction,
|
||||
HttpRequestAction,
|
||||
InternalEvent,
|
||||
InternalEventPayload,
|
||||
JsonPrimitive,
|
||||
PluginDefinition,
|
||||
PromptTextResponse,
|
||||
RenderHttpRequestResponse,
|
||||
SendHttpRequestResponse,
|
||||
TemplateFunction,
|
||||
TemplateRenderResponse,
|
||||
} from '@yaakapp/api';
|
||||
import console from 'node:console';
|
||||
import { readFileSync, type Stats, statSync, watch } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
// import util from 'node:util';
|
||||
import { EventChannel } from './EventChannel';
|
||||
// import { interceptStdout } from './interceptStdout';
|
||||
import { migrateTemplateFunctionSelectOptions } from './migrations';
|
||||
|
||||
export interface PluginWorkerData {
|
||||
bootRequest: BootRequest;
|
||||
pluginRefId: string;
|
||||
}
|
||||
|
||||
export class PluginInstance {
|
||||
#workerData: PluginWorkerData;
|
||||
#mod: PluginDefinition;
|
||||
#pkg: { name?: string; version?: string };
|
||||
#pluginToAppEvents: EventChannel;
|
||||
#appToPluginEvents: EventChannel;
|
||||
|
||||
constructor(workerData: PluginWorkerData, pluginEvents: EventChannel) {
|
||||
this.#workerData = workerData;
|
||||
this.#pluginToAppEvents = pluginEvents;
|
||||
this.#appToPluginEvents = new EventChannel();
|
||||
|
||||
// Forward incoming events to onMessage()
|
||||
this.#appToPluginEvents.listen(async (event) => {
|
||||
await this.#onMessage(event);
|
||||
});
|
||||
|
||||
// Reload plugin if the JS or package.json changes
|
||||
const windowContextNone: PluginWindowContext = { type: 'none' };
|
||||
const fileChangeCallback = async () => {
|
||||
this.#importModule();
|
||||
return this.#sendPayload(windowContextNone, { type: 'reload_response' }, null);
|
||||
};
|
||||
|
||||
if (this.#workerData.bootRequest.watch) {
|
||||
watchFile(this.#pathMod(), fileChangeCallback);
|
||||
watchFile(this.#pathPkg(), fileChangeCallback);
|
||||
}
|
||||
|
||||
this.#mod = {};
|
||||
this.#pkg = JSON.parse(readFileSync(this.#pathPkg(), 'utf8'));
|
||||
|
||||
// TODO: Re-implement this now that we're not using workers
|
||||
// prefixStdout(`[plugin][${this.#pkg.name}] %s`);
|
||||
|
||||
this.#importModule();
|
||||
}
|
||||
|
||||
postMessage(event: InternalEvent) {
|
||||
this.#appToPluginEvents.emit(event);
|
||||
}
|
||||
|
||||
terminate() {
|
||||
this.#unimportModule();
|
||||
}
|
||||
|
||||
async #onMessage(event: InternalEvent) {
|
||||
const ctx = this.#newCtx(event);
|
||||
|
||||
const { windowContext, payload, id: replyId } = event;
|
||||
try {
|
||||
if (payload.type === 'boot_request') {
|
||||
// console.log('Plugin initialized', pkg.name, { capabilities, enableWatch });
|
||||
const payload: InternalEventPayload = {
|
||||
type: 'boot_response',
|
||||
name: this.#pkg.name ?? 'unknown',
|
||||
version: this.#pkg.version ?? '0.0.1',
|
||||
};
|
||||
this.#sendPayload(windowContext, payload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (payload.type === 'terminate_request') {
|
||||
const payload: InternalEventPayload = {
|
||||
type: 'terminate_response',
|
||||
};
|
||||
this.#sendPayload(windowContext, payload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
payload.type === 'import_request' &&
|
||||
typeof this.#mod?.importer?.onImport === 'function'
|
||||
) {
|
||||
const reply = await this.#mod.importer.onImport(ctx, {
|
||||
text: payload.content,
|
||||
});
|
||||
if (reply != null) {
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'import_response',
|
||||
// deno-lint-ignore no-explicit-any
|
||||
resources: reply.resources as any,
|
||||
};
|
||||
this.#sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
} else {
|
||||
// Continue, to send back an empty reply
|
||||
}
|
||||
}
|
||||
|
||||
if (payload.type === 'filter_request' && typeof this.#mod?.filter?.onFilter === 'function') {
|
||||
const reply = await this.#mod.filter.onFilter(ctx, {
|
||||
filter: payload.filter,
|
||||
payload: payload.content,
|
||||
mimeType: payload.type,
|
||||
});
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'filter_response',
|
||||
content: reply.filtered,
|
||||
};
|
||||
this.#sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
payload.type === 'get_http_request_actions_request' &&
|
||||
Array.isArray(this.#mod?.httpRequestActions)
|
||||
) {
|
||||
const reply: HttpRequestAction[] = this.#mod.httpRequestActions.map((a) => ({
|
||||
...a,
|
||||
// Add everything except onSelect
|
||||
onSelect: undefined,
|
||||
}));
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'get_http_request_actions_response',
|
||||
pluginRefId: this.#workerData.pluginRefId,
|
||||
actions: reply,
|
||||
};
|
||||
this.#sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
payload.type === 'get_template_functions_request' &&
|
||||
Array.isArray(this.#mod?.templateFunctions)
|
||||
) {
|
||||
const reply: TemplateFunction[] = this.#mod.templateFunctions.map((templateFunction) => {
|
||||
return {
|
||||
...migrateTemplateFunctionSelectOptions(templateFunction),
|
||||
// Add everything except render
|
||||
onRender: undefined,
|
||||
};
|
||||
});
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'get_template_functions_response',
|
||||
pluginRefId: this.#workerData.pluginRefId,
|
||||
functions: reply,
|
||||
};
|
||||
this.#sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (payload.type === 'get_http_authentication_summary_request' && this.#mod?.authentication) {
|
||||
const { name, shortLabel, label } = this.#mod.authentication;
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'get_http_authentication_summary_response',
|
||||
name,
|
||||
label,
|
||||
shortLabel,
|
||||
};
|
||||
|
||||
this.#sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (payload.type === 'get_http_authentication_config_request' && this.#mod?.authentication) {
|
||||
const { args, actions } = this.#mod.authentication;
|
||||
const resolvedArgs: FormInput[] = [];
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
let v = args[i];
|
||||
if ('dynamic' in v) {
|
||||
const dynamicAttrs = await v.dynamic(ctx, payload);
|
||||
const { dynamic, ...other } = v;
|
||||
resolvedArgs.push({ ...other, ...dynamicAttrs } as FormInput);
|
||||
} else {
|
||||
resolvedArgs.push(v);
|
||||
}
|
||||
}
|
||||
const resolvedActions: HttpAuthenticationAction[] = [];
|
||||
for (const { onSelect, ...action } of actions ?? []) {
|
||||
resolvedActions.push(action);
|
||||
}
|
||||
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'get_http_authentication_config_response',
|
||||
args: resolvedArgs,
|
||||
actions: resolvedActions,
|
||||
pluginRefId: this.#workerData.pluginRefId,
|
||||
};
|
||||
|
||||
this.#sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (payload.type === 'call_http_authentication_request' && this.#mod?.authentication) {
|
||||
const auth = this.#mod.authentication;
|
||||
if (typeof auth?.onApply === 'function') {
|
||||
applyFormInputDefaults(auth.args, payload.values);
|
||||
const result = await auth.onApply(ctx, payload);
|
||||
this.#sendPayload(
|
||||
windowContext,
|
||||
{
|
||||
type: 'call_http_authentication_response',
|
||||
setHeaders: result.setHeaders,
|
||||
},
|
||||
replyId,
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
payload.type === 'call_http_authentication_action_request' &&
|
||||
this.#mod.authentication != null
|
||||
) {
|
||||
const action = this.#mod.authentication.actions?.[payload.index];
|
||||
if (typeof action?.onSelect === 'function') {
|
||||
await action.onSelect(ctx, payload.args);
|
||||
this.#sendEmpty(windowContext, replyId);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
payload.type === 'call_http_request_action_request' &&
|
||||
Array.isArray(this.#mod.httpRequestActions)
|
||||
) {
|
||||
const action = this.#mod.httpRequestActions[payload.index];
|
||||
if (typeof action?.onSelect === 'function') {
|
||||
await action.onSelect(ctx, payload.args);
|
||||
this.#sendEmpty(windowContext, replyId);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
payload.type === 'call_template_function_request' &&
|
||||
Array.isArray(this.#mod?.templateFunctions)
|
||||
) {
|
||||
const fn = this.#mod.templateFunctions.find((a) => a.name === payload.name);
|
||||
if (typeof fn?.onRender === 'function') {
|
||||
applyFormInputDefaults(fn.args, payload.args.values);
|
||||
const result = await fn.onRender(ctx, payload.args);
|
||||
this.#sendPayload(
|
||||
windowContext,
|
||||
{
|
||||
type: 'call_template_function_response',
|
||||
value: result ?? null,
|
||||
},
|
||||
replyId,
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (payload.type === 'reload_request') {
|
||||
this.#importModule();
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('Plugin call threw exception', payload.type, err);
|
||||
this.#sendPayload(
|
||||
windowContext,
|
||||
{
|
||||
type: 'error_response',
|
||||
error: `${err}`,
|
||||
},
|
||||
replyId,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// No matches, so send back an empty response so the caller doesn't block forever
|
||||
this.#sendEmpty(windowContext, replyId);
|
||||
}
|
||||
|
||||
#pathMod() {
|
||||
return path.posix.join(this.#workerData.bootRequest.dir, 'build', 'index.js');
|
||||
}
|
||||
|
||||
#pathPkg() {
|
||||
return path.join(this.#workerData.bootRequest.dir, 'package.json');
|
||||
}
|
||||
|
||||
#unimportModule() {
|
||||
const id = require.resolve(this.#pathMod());
|
||||
delete require.cache[id];
|
||||
}
|
||||
|
||||
#importModule() {
|
||||
const id = require.resolve(this.#pathMod());
|
||||
delete require.cache[id];
|
||||
this.#mod = require(id).plugin;
|
||||
}
|
||||
|
||||
#buildEventToSend(
|
||||
windowContext: PluginWindowContext,
|
||||
payload: InternalEventPayload,
|
||||
replyId: string | null = null,
|
||||
): InternalEvent {
|
||||
return {
|
||||
pluginRefId: this.#workerData.pluginRefId,
|
||||
pluginName: path.basename(this.#workerData.bootRequest.dir),
|
||||
id: genId(),
|
||||
replyId,
|
||||
payload,
|
||||
windowContext,
|
||||
};
|
||||
}
|
||||
|
||||
#sendPayload(
|
||||
windowContext: PluginWindowContext,
|
||||
payload: InternalEventPayload,
|
||||
replyId: string | null,
|
||||
): string {
|
||||
const event = this.#buildEventToSend(windowContext, payload, replyId);
|
||||
this.#sendEvent(event);
|
||||
return event.id;
|
||||
}
|
||||
|
||||
#sendEvent(event: InternalEvent) {
|
||||
// if (event.payload.type !== 'empty_response') {
|
||||
// console.log('Sending event to app', this.#pkg.name, event.id, event.payload.type);
|
||||
// }
|
||||
this.#pluginToAppEvents.emit(event);
|
||||
}
|
||||
|
||||
#sendEmpty(windowContext: PluginWindowContext, replyId: string | null = null): string {
|
||||
return this.#sendPayload(windowContext, { type: 'empty_response' }, replyId);
|
||||
}
|
||||
|
||||
#sendAndWaitForReply<T extends Omit<InternalEventPayload, 'type'>>(
|
||||
windowContext: PluginWindowContext,
|
||||
payload: InternalEventPayload,
|
||||
): Promise<T> {
|
||||
// 1. Build event to send
|
||||
const eventToSend = this.#buildEventToSend(windowContext, payload, null);
|
||||
|
||||
// 2. Spawn listener in background
|
||||
const promise = new Promise<T>((resolve) => {
|
||||
const cb = (event: InternalEvent) => {
|
||||
if (event.replyId === eventToSend.id) {
|
||||
this.#appToPluginEvents.unlisten(cb); // Unlisten, now that we're done
|
||||
const { type: _, ...payload } = event.payload;
|
||||
resolve(payload as T);
|
||||
}
|
||||
};
|
||||
this.#appToPluginEvents.listen(cb);
|
||||
});
|
||||
|
||||
// 3. Send the event after we start listening (to prevent race)
|
||||
this.#sendEvent(eventToSend);
|
||||
|
||||
// 4. Return the listener promise
|
||||
return promise as unknown as Promise<T>;
|
||||
}
|
||||
|
||||
#sendAndListenForEvents(
|
||||
windowContext: PluginWindowContext,
|
||||
payload: InternalEventPayload,
|
||||
onEvent: (event: InternalEventPayload) => void,
|
||||
): void {
|
||||
// 1. Build event to send
|
||||
const eventToSend = this.#buildEventToSend(windowContext, payload, null);
|
||||
|
||||
// 2. Listen for replies in the background
|
||||
this.#appToPluginEvents.listen((event: InternalEvent) => {
|
||||
if (event.replyId === eventToSend.id) {
|
||||
onEvent(event.payload);
|
||||
}
|
||||
});
|
||||
|
||||
// 3. Send the event after we start listening (to prevent race)
|
||||
this.#sendEvent(eventToSend);
|
||||
}
|
||||
|
||||
#newCtx(event: InternalEvent): Context {
|
||||
return {
|
||||
clipboard: {
|
||||
copyText: async (text) => {
|
||||
await this.#sendAndWaitForReply(event.windowContext, {
|
||||
type: 'copy_text_request',
|
||||
text,
|
||||
});
|
||||
},
|
||||
},
|
||||
toast: {
|
||||
show: async (args) => {
|
||||
await this.#sendAndWaitForReply(event.windowContext, {
|
||||
type: 'show_toast_request',
|
||||
...args,
|
||||
});
|
||||
},
|
||||
},
|
||||
window: {
|
||||
openUrl: async ({ onNavigate, onClose, ...args }) => {
|
||||
args.label = args.label || `${Math.random()}`;
|
||||
const payload: InternalEventPayload = { type: 'open_window_request', ...args };
|
||||
const onEvent = (event: InternalEventPayload) => {
|
||||
if (event.type === 'window_navigate_event') {
|
||||
onNavigate?.(event);
|
||||
} else if (event.type === 'window_close_event') {
|
||||
onClose?.();
|
||||
}
|
||||
};
|
||||
this.#sendAndListenForEvents(event.windowContext, payload, onEvent);
|
||||
return {
|
||||
close: () => {
|
||||
const closePayload: InternalEventPayload = {
|
||||
type: 'close_window_request',
|
||||
label: args.label,
|
||||
};
|
||||
this.#sendPayload(event.windowContext, closePayload, null);
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
prompt: {
|
||||
text: async (args) => {
|
||||
const reply: PromptTextResponse = await this.#sendAndWaitForReply(event.windowContext, {
|
||||
type: 'prompt_text_request',
|
||||
...args,
|
||||
});
|
||||
return reply.value;
|
||||
},
|
||||
},
|
||||
httpResponse: {
|
||||
find: async (args) => {
|
||||
const payload = {
|
||||
type: 'find_http_responses_request',
|
||||
...args,
|
||||
} as const;
|
||||
const { httpResponses } = await this.#sendAndWaitForReply<FindHttpResponsesResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return httpResponses;
|
||||
},
|
||||
},
|
||||
httpRequest: {
|
||||
getById: async (args) => {
|
||||
const payload = {
|
||||
type: 'get_http_request_by_id_request',
|
||||
...args,
|
||||
} as const;
|
||||
const { httpRequest } = await this.#sendAndWaitForReply<GetHttpRequestByIdResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return httpRequest;
|
||||
},
|
||||
send: async (args) => {
|
||||
const payload = {
|
||||
type: 'send_http_request_request',
|
||||
...args,
|
||||
} as const;
|
||||
const { httpResponse } = await this.#sendAndWaitForReply<SendHttpRequestResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return httpResponse;
|
||||
},
|
||||
render: async (args) => {
|
||||
const payload = {
|
||||
type: 'render_http_request_request',
|
||||
...args,
|
||||
} as const;
|
||||
const { httpRequest } = await this.#sendAndWaitForReply<RenderHttpRequestResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return httpRequest;
|
||||
},
|
||||
},
|
||||
templates: {
|
||||
/**
|
||||
* Invoke Yaak's template engine to render a value. If the value is a nested type
|
||||
* (eg. object), it will be recursively rendered.
|
||||
*/
|
||||
render: async (args) => {
|
||||
const payload = { type: 'template_render_request', ...args } as const;
|
||||
const result = await this.#sendAndWaitForReply<TemplateRenderResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return result.data;
|
||||
},
|
||||
},
|
||||
store: {
|
||||
get: async <T>(key: string) => {
|
||||
const payload = { type: 'get_key_value_request', key } as const;
|
||||
const result = await this.#sendAndWaitForReply<GetKeyValueResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return result.value ? (JSON.parse(result.value) as T) : undefined;
|
||||
},
|
||||
set: async <T>(key: string, value: T) => {
|
||||
const valueStr = JSON.stringify(value);
|
||||
const payload: InternalEventPayload = {
|
||||
type: 'set_key_value_request',
|
||||
key,
|
||||
value: valueStr,
|
||||
};
|
||||
await this.#sendAndWaitForReply<GetKeyValueResponse>(event.windowContext, payload);
|
||||
},
|
||||
delete: async (key: string) => {
|
||||
const payload = { type: 'delete_key_value_request', key } as const;
|
||||
const result = await this.#sendAndWaitForReply<DeleteKeyValueResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return result.deleted;
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function genId(len = 5): string {
|
||||
const alphabet = '01234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
||||
let id = '';
|
||||
for (let i = 0; i < len; i++) {
|
||||
id += alphabet[Math.floor(Math.random() * alphabet.length)];
|
||||
}
|
||||
return id;
|
||||
}
|
||||
|
||||
/** Recursively apply form input defaults to a set of values */
|
||||
function applyFormInputDefaults(
|
||||
inputs: TemplateFunctionArg[],
|
||||
values: { [p: string]: JsonPrimitive | undefined },
|
||||
) {
|
||||
for (const input of inputs) {
|
||||
if ('inputs' in input) {
|
||||
applyFormInputDefaults(input.inputs ?? [], values);
|
||||
} else if ('defaultValue' in input && values[input.name] === undefined) {
|
||||
values[input.name] = input.defaultValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const watchedFiles: Record<string, Stats> = {};
|
||||
|
||||
/**
|
||||
* Watch a file and trigger callback on change.
|
||||
*
|
||||
* We also track the stat for each file because fs.watch() will
|
||||
* trigger a "change" event when the access date changes
|
||||
*/
|
||||
function watchFile(filepath: string, cb: (filepath: string) => void) {
|
||||
watch(filepath, () => {
|
||||
const stat = statSync(filepath);
|
||||
if (stat.mtimeMs !== watchedFiles[filepath]?.mtimeMs) {
|
||||
cb(filepath);
|
||||
}
|
||||
watchedFiles[filepath] = stat;
|
||||
});
|
||||
}
|
||||
@@ -8,7 +8,7 @@ if (!port) {
|
||||
throw new Error('Plugin runtime missing PORT')
|
||||
}
|
||||
|
||||
const events = new EventChannel();
|
||||
const pluginToAppEvents = new EventChannel();
|
||||
const plugins: Record<string, PluginHandle> = {};
|
||||
|
||||
const ws = new WebSocket(`ws://localhost:${port}`);
|
||||
@@ -25,7 +25,7 @@ ws.on('error', (err: any) => console.error('Plugin runtime websocket error', err
|
||||
ws.on('close', (code: number) => console.log('Plugin runtime websocket closed', code));
|
||||
|
||||
// Listen for incoming events from plugins
|
||||
events.listen((e) => {
|
||||
pluginToAppEvents.listen((e) => {
|
||||
const eventStr = JSON.stringify(e);
|
||||
ws.send(eventStr);
|
||||
});
|
||||
@@ -34,7 +34,7 @@ async function handleIncoming(msg: string) {
|
||||
const pluginEvent: InternalEvent = JSON.parse(msg);
|
||||
// Handle special event to bootstrap plugin
|
||||
if (pluginEvent.payload.type === 'boot_request') {
|
||||
const plugin = new PluginHandle(pluginEvent.pluginRefId, pluginEvent.payload, events);
|
||||
const plugin = new PluginHandle(pluginEvent.pluginRefId, pluginEvent.payload, pluginToAppEvents);
|
||||
plugins[pluginEvent.pluginRefId] = plugin;
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ async function handleIncoming(msg: string) {
|
||||
}
|
||||
|
||||
if (pluginEvent.payload.type === 'terminate_request') {
|
||||
await plugin.terminate();
|
||||
plugin.terminate();
|
||||
console.log('Terminated plugin worker', pluginEvent.pluginRefId);
|
||||
delete plugins[pluginEvent.pluginRefId];
|
||||
}
|
||||
|
||||
@@ -1,568 +0,0 @@
|
||||
// OAuth 2.0 spec -> https://datatracker.ietf.org/doc/html/rfc6749
|
||||
|
||||
import type {
|
||||
BootRequest,
|
||||
Context,
|
||||
DeleteKeyValueResponse,
|
||||
FindHttpResponsesResponse,
|
||||
FormInput,
|
||||
GetHttpRequestByIdResponse,
|
||||
GetKeyValueResponse,
|
||||
HttpAuthenticationAction,
|
||||
HttpRequestAction,
|
||||
InternalEvent,
|
||||
InternalEventPayload,
|
||||
JsonPrimitive,
|
||||
PluginDefinition,
|
||||
PromptTextResponse,
|
||||
RenderHttpRequestResponse,
|
||||
SendHttpRequestResponse,
|
||||
TemplateFunction,
|
||||
TemplateRenderResponse,
|
||||
WindowContext,
|
||||
} from '@yaakapp/api';
|
||||
import * as console from 'node:console';
|
||||
import type { Stats } from 'node:fs';
|
||||
import { readFileSync, statSync, watch } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import * as util from 'node:util';
|
||||
import { parentPort as nullableParentPort, workerData } from 'node:worker_threads';
|
||||
import { interceptStdout } from './interceptStdout';
|
||||
import { migrateTemplateFunctionSelectOptions } from './migrations';
|
||||
|
||||
if (nullableParentPort == null) {
|
||||
throw new Error('Worker does not have access to parentPort');
|
||||
}
|
||||
|
||||
const parentPort = nullableParentPort;
|
||||
|
||||
export interface PluginWorkerData {
|
||||
bootRequest: BootRequest;
|
||||
pluginRefId: string;
|
||||
}
|
||||
|
||||
function initialize(workerData: PluginWorkerData) {
|
||||
const {
|
||||
bootRequest: { dir: pluginDir, watch: enableWatch },
|
||||
pluginRefId,
|
||||
}: PluginWorkerData = workerData;
|
||||
|
||||
const pathPkg = path.join(pluginDir, 'package.json');
|
||||
const pathMod = path.posix.join(pluginDir, 'build', 'index.js');
|
||||
|
||||
const pkg = JSON.parse(readFileSync(pathPkg, 'utf8'));
|
||||
|
||||
prefixStdout(`[plugin][${pkg.name}] %s`);
|
||||
|
||||
function buildEventToSend(
|
||||
windowContext: WindowContext,
|
||||
payload: InternalEventPayload,
|
||||
replyId: string | null = null,
|
||||
): InternalEvent {
|
||||
return {
|
||||
pluginRefId,
|
||||
pluginName: path.basename(pluginDir),
|
||||
id: genId(),
|
||||
replyId,
|
||||
payload,
|
||||
windowContext,
|
||||
};
|
||||
}
|
||||
|
||||
function sendEmpty(windowContext: WindowContext, replyId: string | null = null): string {
|
||||
return sendPayload(windowContext, { type: 'empty_response' }, replyId);
|
||||
}
|
||||
|
||||
function sendPayload(
|
||||
windowContext: WindowContext,
|
||||
payload: InternalEventPayload,
|
||||
replyId: string | null,
|
||||
): string {
|
||||
const event = buildEventToSend(windowContext, payload, replyId);
|
||||
sendEvent(event);
|
||||
return event.id;
|
||||
}
|
||||
|
||||
function sendEvent(event: InternalEvent) {
|
||||
if (event.payload.type !== 'empty_response') {
|
||||
console.log('Sending event to app', event.id, event.payload.type);
|
||||
}
|
||||
parentPort.postMessage(event);
|
||||
}
|
||||
|
||||
function sendAndWaitForReply<T extends Omit<InternalEventPayload, 'type'>>(
|
||||
windowContext: WindowContext,
|
||||
payload: InternalEventPayload,
|
||||
): Promise<T> {
|
||||
// 1. Build event to send
|
||||
const eventToSend = buildEventToSend(windowContext, payload, null);
|
||||
|
||||
// 2. Spawn listener in background
|
||||
const promise = new Promise<T>((resolve) => {
|
||||
const cb = (event: InternalEvent) => {
|
||||
if (event.replyId === eventToSend.id) {
|
||||
parentPort.off('message', cb); // Unlisten, now that we're done
|
||||
const { type: _, ...payload } = event.payload;
|
||||
resolve(payload as T);
|
||||
}
|
||||
};
|
||||
parentPort.on('message', cb);
|
||||
});
|
||||
|
||||
// 3. Send the event after we start listening (to prevent race)
|
||||
sendEvent(eventToSend);
|
||||
|
||||
// 4. Return the listener promise
|
||||
return promise as unknown as Promise<T>;
|
||||
}
|
||||
|
||||
function sendAndListenForEvents(
|
||||
windowContext: WindowContext,
|
||||
payload: InternalEventPayload,
|
||||
onEvent: (event: InternalEventPayload) => void,
|
||||
): void {
|
||||
// 1. Build event to send
|
||||
const eventToSend = buildEventToSend(windowContext, payload, null);
|
||||
|
||||
// 2. Listen for replies in the background
|
||||
parentPort.on('message', (event: InternalEvent) => {
|
||||
if (event.replyId === eventToSend.id) {
|
||||
onEvent(event.payload);
|
||||
}
|
||||
});
|
||||
|
||||
// 3. Send the event after we start listening (to prevent race)
|
||||
sendEvent(eventToSend);
|
||||
}
|
||||
|
||||
// Reload plugin if the JS or package.json changes
|
||||
const windowContextNone: WindowContext = { type: 'none' };
|
||||
const fileChangeCallback = async () => {
|
||||
importModule();
|
||||
return sendPayload(windowContextNone, { type: 'reload_response' }, null);
|
||||
};
|
||||
|
||||
if (enableWatch) {
|
||||
watchFile(pathMod, fileChangeCallback);
|
||||
watchFile(pathPkg, fileChangeCallback);
|
||||
}
|
||||
|
||||
const newCtx = (event: InternalEvent): Context => ({
|
||||
clipboard: {
|
||||
async copyText(text) {
|
||||
await sendAndWaitForReply(event.windowContext, {
|
||||
type: 'copy_text_request',
|
||||
text,
|
||||
});
|
||||
},
|
||||
},
|
||||
toast: {
|
||||
async show(args) {
|
||||
await sendAndWaitForReply(event.windowContext, {
|
||||
type: 'show_toast_request',
|
||||
...args,
|
||||
});
|
||||
},
|
||||
},
|
||||
window: {
|
||||
async openUrl({ onNavigate, ...args }) {
|
||||
args.label = args.label || `${Math.random()}`;
|
||||
const payload: InternalEventPayload = { type: 'open_window_request', ...args };
|
||||
const onEvent = (event: InternalEventPayload) => {
|
||||
if (event.type === 'window_navigate_event') {
|
||||
onNavigate?.(event);
|
||||
}
|
||||
};
|
||||
sendAndListenForEvents(event.windowContext, payload, onEvent);
|
||||
return {
|
||||
close: () => {
|
||||
const closePayload: InternalEventPayload = {
|
||||
type: 'close_window_request',
|
||||
label: args.label,
|
||||
};
|
||||
sendPayload(event.windowContext, closePayload, null);
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
prompt: {
|
||||
async text(args) {
|
||||
const reply: PromptTextResponse = await sendAndWaitForReply(event.windowContext, {
|
||||
type: 'prompt_text_request',
|
||||
...args,
|
||||
});
|
||||
return reply.value;
|
||||
},
|
||||
},
|
||||
httpResponse: {
|
||||
async find(args) {
|
||||
const payload = {
|
||||
type: 'find_http_responses_request',
|
||||
...args,
|
||||
} as const;
|
||||
const { httpResponses } = await sendAndWaitForReply<FindHttpResponsesResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return httpResponses;
|
||||
},
|
||||
},
|
||||
httpRequest: {
|
||||
async getById(args) {
|
||||
const payload = {
|
||||
type: 'get_http_request_by_id_request',
|
||||
...args,
|
||||
} as const;
|
||||
const { httpRequest } = await sendAndWaitForReply<GetHttpRequestByIdResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return httpRequest;
|
||||
},
|
||||
async send(args) {
|
||||
const payload = {
|
||||
type: 'send_http_request_request',
|
||||
...args,
|
||||
} as const;
|
||||
const { httpResponse } = await sendAndWaitForReply<SendHttpRequestResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return httpResponse;
|
||||
},
|
||||
async render(args) {
|
||||
const payload = {
|
||||
type: 'render_http_request_request',
|
||||
...args,
|
||||
} as const;
|
||||
const { httpRequest } = await sendAndWaitForReply<RenderHttpRequestResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return httpRequest;
|
||||
},
|
||||
},
|
||||
templates: {
|
||||
/**
|
||||
* Invoke Yaak's template engine to render a value. If the value is a nested type
|
||||
* (eg. object), it will be recursively rendered.
|
||||
*/
|
||||
async render(args) {
|
||||
const payload = { type: 'template_render_request', ...args } as const;
|
||||
const result = await sendAndWaitForReply<TemplateRenderResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return result.data;
|
||||
},
|
||||
},
|
||||
store: {
|
||||
async get<T>(key: string) {
|
||||
const payload = { type: 'get_key_value_request', key } as const;
|
||||
const result = await sendAndWaitForReply<GetKeyValueResponse>(event.windowContext, payload);
|
||||
return result.value ? (JSON.parse(result.value) as T) : undefined;
|
||||
},
|
||||
async set<T>(key: string, value: T) {
|
||||
const valueStr = JSON.stringify(value);
|
||||
const payload: InternalEventPayload = {
|
||||
type: 'set_key_value_request',
|
||||
key,
|
||||
value: valueStr,
|
||||
};
|
||||
await sendAndWaitForReply<GetKeyValueResponse>(event.windowContext, payload);
|
||||
},
|
||||
async delete(key: string) {
|
||||
const payload = { type: 'delete_key_value_request', key } as const;
|
||||
const result = await sendAndWaitForReply<DeleteKeyValueResponse>(
|
||||
event.windowContext,
|
||||
payload,
|
||||
);
|
||||
return result.deleted;
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let plug: PluginDefinition | null = null;
|
||||
|
||||
function importModule() {
|
||||
const id = require.resolve(pathMod);
|
||||
delete require.cache[id];
|
||||
plug = require(id).plugin;
|
||||
}
|
||||
|
||||
importModule();
|
||||
|
||||
// Message comes into the plugin to be processed
|
||||
parentPort.on('message', async (event: InternalEvent) => {
|
||||
const ctx = newCtx(event);
|
||||
const { windowContext, payload, id: replyId } = event;
|
||||
try {
|
||||
if (payload.type === 'boot_request') {
|
||||
// console.log('Plugin initialized', pkg.name, { capabilities, enableWatch });
|
||||
const payload: InternalEventPayload = {
|
||||
type: 'boot_response',
|
||||
name: pkg.name,
|
||||
version: pkg.version,
|
||||
};
|
||||
sendPayload(windowContext, payload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (payload.type === 'terminate_request') {
|
||||
const payload: InternalEventPayload = {
|
||||
type: 'terminate_response',
|
||||
};
|
||||
sendPayload(windowContext, payload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (payload.type === 'import_request' && typeof plug?.importer?.onImport === 'function') {
|
||||
const reply = await plug.importer.onImport(ctx, {
|
||||
text: payload.content,
|
||||
});
|
||||
if (reply != null) {
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'import_response',
|
||||
// deno-lint-ignore no-explicit-any
|
||||
resources: reply.resources as any,
|
||||
};
|
||||
sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
} else {
|
||||
// Continue, to send back an empty reply
|
||||
}
|
||||
}
|
||||
|
||||
if (payload.type === 'filter_request' && typeof plug?.filter?.onFilter === 'function') {
|
||||
const reply = await plug.filter.onFilter(ctx, {
|
||||
filter: payload.filter,
|
||||
payload: payload.content,
|
||||
mimeType: payload.type,
|
||||
});
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'filter_response',
|
||||
content: reply.filtered,
|
||||
};
|
||||
sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
payload.type === 'get_http_request_actions_request' &&
|
||||
Array.isArray(plug?.httpRequestActions)
|
||||
) {
|
||||
const reply: HttpRequestAction[] = plug.httpRequestActions.map((a) => ({
|
||||
...a,
|
||||
// Add everything except onSelect
|
||||
onSelect: undefined,
|
||||
}));
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'get_http_request_actions_response',
|
||||
pluginRefId,
|
||||
actions: reply,
|
||||
};
|
||||
sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
payload.type === 'get_template_functions_request' &&
|
||||
Array.isArray(plug?.templateFunctions)
|
||||
) {
|
||||
const reply: TemplateFunction[] = plug.templateFunctions.map((templateFunction) => {
|
||||
return {
|
||||
...migrateTemplateFunctionSelectOptions(templateFunction),
|
||||
// Add everything except render
|
||||
onRender: undefined,
|
||||
};
|
||||
});
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'get_template_functions_response',
|
||||
pluginRefId,
|
||||
functions: reply,
|
||||
};
|
||||
sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (payload.type === 'get_http_authentication_summary_request' && plug?.authentication) {
|
||||
const { name, shortLabel, label } = plug.authentication;
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'get_http_authentication_summary_response',
|
||||
name,
|
||||
label,
|
||||
shortLabel,
|
||||
};
|
||||
|
||||
sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (payload.type === 'get_http_authentication_config_request' && plug?.authentication) {
|
||||
const { args, actions } = plug.authentication;
|
||||
const resolvedArgs: FormInput[] = [];
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
let v = args[i];
|
||||
if ('dynamic' in v) {
|
||||
const dynamicAttrs = await v.dynamic(ctx, payload);
|
||||
const { dynamic, ...other } = v;
|
||||
resolvedArgs.push({ ...other, ...dynamicAttrs } as FormInput);
|
||||
} else {
|
||||
resolvedArgs.push(v);
|
||||
}
|
||||
}
|
||||
const resolvedActions: HttpAuthenticationAction[] = [];
|
||||
for (const { onSelect, ...action } of actions ?? []) {
|
||||
resolvedActions.push(action);
|
||||
}
|
||||
|
||||
const replyPayload: InternalEventPayload = {
|
||||
type: 'get_http_authentication_config_response',
|
||||
args: resolvedArgs,
|
||||
actions: resolvedActions,
|
||||
pluginRefId,
|
||||
};
|
||||
|
||||
sendPayload(windowContext, replyPayload, replyId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (payload.type === 'call_http_authentication_request' && plug?.authentication) {
|
||||
const auth = plug.authentication;
|
||||
if (typeof auth?.onApply === 'function') {
|
||||
applyFormInputDefaults(auth.args, payload.values);
|
||||
const result = await auth.onApply(ctx, payload);
|
||||
sendPayload(
|
||||
windowContext,
|
||||
{
|
||||
type: 'call_http_authentication_response',
|
||||
setHeaders: result.setHeaders,
|
||||
},
|
||||
replyId,
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
payload.type === 'call_http_authentication_action_request' &&
|
||||
plug?.authentication != null
|
||||
) {
|
||||
const action = plug.authentication.actions?.[payload.index];
|
||||
if (typeof action?.onSelect === 'function') {
|
||||
await action.onSelect(ctx, payload.args);
|
||||
sendEmpty(windowContext, replyId);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
payload.type === 'call_http_request_action_request' &&
|
||||
Array.isArray(plug?.httpRequestActions)
|
||||
) {
|
||||
const action = plug.httpRequestActions[payload.index];
|
||||
if (typeof action?.onSelect === 'function') {
|
||||
await action.onSelect(ctx, payload.args);
|
||||
sendEmpty(windowContext, replyId);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
payload.type === 'call_template_function_request' &&
|
||||
Array.isArray(plug?.templateFunctions)
|
||||
) {
|
||||
const action = plug.templateFunctions.find((a) => a.name === payload.name);
|
||||
if (typeof action?.onRender === 'function') {
|
||||
applyFormInputDefaults(action.args, payload.args.values);
|
||||
const result = await action.onRender(ctx, payload.args);
|
||||
sendPayload(
|
||||
windowContext,
|
||||
{
|
||||
type: 'call_template_function_response',
|
||||
value: result ?? null,
|
||||
},
|
||||
replyId,
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (payload.type === 'reload_request') {
|
||||
importModule();
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('Plugin call threw exception', payload.type, err);
|
||||
sendPayload(
|
||||
windowContext,
|
||||
{
|
||||
type: 'error_response',
|
||||
error: `${err}`,
|
||||
},
|
||||
replyId,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// No matches, so send back an empty response so the caller doesn't block forever
|
||||
sendEmpty(windowContext, replyId);
|
||||
});
|
||||
}
|
||||
|
||||
initialize(workerData);
|
||||
|
||||
function genId(len = 5): string {
|
||||
const alphabet = '01234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
||||
let id = '';
|
||||
for (let i = 0; i < len; i++) {
|
||||
id += alphabet[Math.floor(Math.random() * alphabet.length)];
|
||||
}
|
||||
return id;
|
||||
}
|
||||
|
||||
function prefixStdout(s: string) {
|
||||
if (!s.includes('%s')) {
|
||||
throw new Error('Console prefix must contain a "%s" replacer');
|
||||
}
|
||||
interceptStdout((text: string) => {
|
||||
const lines = text.split(/\n/);
|
||||
let newText = '';
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
if (lines[i] == '') continue;
|
||||
newText += util.format(s, lines[i]) + '\n';
|
||||
}
|
||||
return newText.trimEnd();
|
||||
});
|
||||
}
|
||||
|
||||
const watchedFiles: Record<string, Stats> = {};
|
||||
|
||||
/**
|
||||
* Watch a file and trigger callback on change.
|
||||
*
|
||||
* We also track the stat for each file because fs.watch() will
|
||||
* trigger a "change" event when the access date changes
|
||||
*/
|
||||
function watchFile(filepath: string, cb: (filepath: string) => void) {
|
||||
watch(filepath, () => {
|
||||
const stat = statSync(filepath);
|
||||
if (stat.mtimeMs !== watchedFiles[filepath]?.mtimeMs) {
|
||||
cb(filepath);
|
||||
}
|
||||
watchedFiles[filepath] = stat;
|
||||
});
|
||||
}
|
||||
|
||||
/** Recursively apply form input defaults to a set of values */
|
||||
function applyFormInputDefaults(
|
||||
inputs: FormInput[],
|
||||
values: { [p: string]: JsonPrimitive | undefined },
|
||||
) {
|
||||
for (const input of inputs) {
|
||||
if ('inputs' in input) {
|
||||
applyFormInputDefaults(input.inputs ?? [], values);
|
||||
} else if ('defaultValue' in input && values[input.name] === undefined) {
|
||||
values[input.name] = input.defaultValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
edition = "2018"
|
||||
edition = "2024"
|
||||
|
||||
# Widths
|
||||
chain_width = 100
|
||||
|
||||
@@ -9,12 +9,14 @@ const NODE_VERSION = 'v22.9.0';
|
||||
// `${process.platform}_${process.arch}`
|
||||
const MAC_ARM = 'darwin_arm64';
|
||||
const MAC_X64 = 'darwin_x64';
|
||||
const LNX_ARM = 'linux_arm64';
|
||||
const LNX_X64 = 'linux_x64';
|
||||
const WIN_X64 = 'win32_x64';
|
||||
|
||||
const URL_MAP = {
|
||||
[MAC_ARM]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-darwin-arm64.tar.gz`,
|
||||
[MAC_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-darwin-x64.tar.gz`,
|
||||
[LNX_ARM]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-linux-arm64.tar.gz`,
|
||||
[LNX_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-linux-x64.tar.gz`,
|
||||
[WIN_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-win-x64.zip`,
|
||||
};
|
||||
@@ -22,15 +24,17 @@ const URL_MAP = {
|
||||
const SRC_BIN_MAP = {
|
||||
[MAC_ARM]: `node-${NODE_VERSION}-darwin-arm64/bin/node`,
|
||||
[MAC_X64]: `node-${NODE_VERSION}-darwin-x64/bin/node`,
|
||||
[LNX_ARM]: `node-${NODE_VERSION}-linux-arm64/bin/node`,
|
||||
[LNX_X64]: `node-${NODE_VERSION}-linux-x64/bin/node`,
|
||||
[WIN_X64]: `node-${NODE_VERSION}-win-x64/node.exe`,
|
||||
};
|
||||
|
||||
const DST_BIN_MAP = {
|
||||
darwin_arm64: 'yaaknode-aarch64-apple-darwin',
|
||||
darwin_x64: 'yaaknode-x86_64-apple-darwin',
|
||||
linux_x64: 'yaaknode-x86_64-unknown-linux-gnu',
|
||||
win32_x64: 'yaaknode-x86_64-pc-windows-msvc.exe',
|
||||
[MAC_ARM]: 'yaaknode-aarch64-apple-darwin',
|
||||
[MAC_X64]: 'yaaknode-x86_64-apple-darwin',
|
||||
[LNX_ARM]: 'yaaknode-aarch64-unknown-linux-gnu',
|
||||
[LNX_X64]: 'yaaknode-x86_64-unknown-linux-gnu',
|
||||
[WIN_X64]: 'yaaknode-x86_64-pc-windows-msvc.exe',
|
||||
};
|
||||
|
||||
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
|
||||
|
||||
@@ -9,12 +9,14 @@ const VERSION = '28.3';
|
||||
// `${process.platform}_${process.arch}`
|
||||
const MAC_ARM = 'darwin_arm64';
|
||||
const MAC_X64 = 'darwin_x64';
|
||||
const LNX_ARM = 'linux_arm64';
|
||||
const LNX_X64 = 'linux_x64';
|
||||
const WIN_X64 = 'win32_x64';
|
||||
|
||||
const URL_MAP = {
|
||||
[MAC_ARM]: `https://github.com/protocolbuffers/protobuf/releases/download/v${VERSION}/protoc-${VERSION}-osx-aarch_64.zip`,
|
||||
[MAC_X64]: `https://github.com/protocolbuffers/protobuf/releases/download/v${VERSION}/protoc-${VERSION}-osx-x86_64.zip`,
|
||||
[LNX_ARM]: `https://github.com/protocolbuffers/protobuf/releases/download/v${VERSION}/protoc-${VERSION}-linux-aarch_64.zip`,
|
||||
[LNX_X64]: `https://github.com/protocolbuffers/protobuf/releases/download/v${VERSION}/protoc-${VERSION}-linux-x86_64.zip`,
|
||||
[WIN_X64]: `https://github.com/protocolbuffers/protobuf/releases/download/v${VERSION}/protoc-${VERSION}-win64.zip`,
|
||||
};
|
||||
@@ -22,6 +24,7 @@ const URL_MAP = {
|
||||
const SRC_BIN_MAP = {
|
||||
[MAC_ARM]: 'bin/protoc',
|
||||
[MAC_X64]: 'bin/protoc',
|
||||
[LNX_ARM]: 'bin/protoc',
|
||||
[LNX_X64]: 'bin/protoc',
|
||||
[WIN_X64]: 'bin/protoc.exe',
|
||||
};
|
||||
@@ -29,6 +32,7 @@ const SRC_BIN_MAP = {
|
||||
const DST_BIN_MAP = {
|
||||
[MAC_ARM]: 'yaakprotoc-aarch64-apple-darwin',
|
||||
[MAC_X64]: 'yaakprotoc-x86_64-apple-darwin',
|
||||
[LNX_ARM]: 'yaakprotoc-aarch64-unknown-linux-gnu',
|
||||
[LNX_X64]: 'yaakprotoc-x86_64-unknown-linux-gnu',
|
||||
[WIN_X64]: 'yaakprotoc-x86_64-pc-windows-msvc.exe',
|
||||
};
|
||||
|
||||
5
src-tauri/.gitignore
vendored
5
src-tauri/.gitignore
vendored
@@ -4,3 +4,8 @@ target/
|
||||
|
||||
vendored/*
|
||||
!vendored/plugins
|
||||
|
||||
gen/*
|
||||
|
||||
**/permissions/autogenerated
|
||||
**/permissions/schemas
|
||||
|
||||
1556
src-tauri/Cargo.lock
generated
1556
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,11 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"yaak-crypto",
|
||||
"yaak-git",
|
||||
"yaak-grpc",
|
||||
"yaak-http",
|
||||
"yaak-license",
|
||||
"yaak-mac-window",
|
||||
"yaak-models",
|
||||
"yaak-plugins",
|
||||
"yaak-sse",
|
||||
@@ -15,7 +17,7 @@ members = [
|
||||
[package]
|
||||
name = "yaak-app"
|
||||
version = "0.0.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
authors = ["Gregory Schier"]
|
||||
publish = false
|
||||
|
||||
@@ -31,52 +33,48 @@ strip = true # Automatically strip symbols from the binary.
|
||||
cargo-clippy = []
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.0.5", features = [] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
objc = "0.2.7"
|
||||
cocoa = "0.26.0"
|
||||
tauri-build = { version = "2.1.1", features = [] }
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
openssl-sys = { version = "0.9.105", features = ["vendored"] } # For Ubuntu installation to work
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4.31", features = ["serde"] }
|
||||
datetime = "0.5.2"
|
||||
encoding_rs = "0.8.35"
|
||||
eventsource-client = { git = "https://github.com/yaakapp/rust-eventsource-client", version = "0.14.0" }
|
||||
hex_color = "3.0.0"
|
||||
http = { version = "1.2.0", default-features = false }
|
||||
log = "0.4.21"
|
||||
log = "0.4.27"
|
||||
md5 = "0.7.0"
|
||||
mime_guess = "2.0.5"
|
||||
rand = "0.9.0"
|
||||
regex = "1.10.2"
|
||||
reqwest = { workspace = true, features = ["multipart", "cookies", "gzip", "brotli", "deflate", "json", "rustls-tls-manual-roots-no-provider"] }
|
||||
reqwest_cookie_store = "0.8.0"
|
||||
rustls = { version = "0.23.22", default-features = false, features = ["custom-provider", "ring"] }
|
||||
rustls-platform-verifier = "0.5.0"
|
||||
rustls = { version = "0.23.25", default-features = false, features = ["custom-provider", "ring"] }
|
||||
rustls-platform-verifier = "0.5.1"
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true, features = ["raw_value"] }
|
||||
tauri = { workspace = true, features = ["devtools", "protocol-asset"] }
|
||||
tauri-plugin-clipboard-manager = "2.2.1"
|
||||
tauri-plugin-clipboard-manager = "2.2.2"
|
||||
tauri-plugin-dialog = "2.2.0"
|
||||
tauri-plugin-fs = "2.2.0"
|
||||
tauri-plugin-log = { version = "2.2.1", features = ["colored"] }
|
||||
tauri-plugin-opener = "2.2.5"
|
||||
tauri-plugin-os = "2.2.0"
|
||||
tauri-plugin-log = { version = "2.3.1", features = ["colored"] }
|
||||
tauri-plugin-opener = "2.2.6"
|
||||
tauri-plugin-os = "2.2.1"
|
||||
tauri-plugin-shell = { workspace = true }
|
||||
tauri-plugin-single-instance = "2.2.1"
|
||||
tauri-plugin-updater = "2.4.0"
|
||||
tauri-plugin-single-instance = "2.2.2"
|
||||
tauri-plugin-updater = "2.6.1"
|
||||
tauri-plugin-window-state = "2.2.1"
|
||||
tokio = { version = "1.43.0", features = ["sync"] }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true, features = ["sync"] }
|
||||
tokio-stream = "0.1.17"
|
||||
ts-rs = { workspace = true }
|
||||
uuid = "1.12.1"
|
||||
yaak-common = { workspace = true }
|
||||
yaak-crypto = { workspace = true }
|
||||
yaak-git = { path = "yaak-git" }
|
||||
yaak-grpc = { path = "yaak-grpc" }
|
||||
yaak-http = { workspace = true }
|
||||
yaak-license = { path = "yaak-license" }
|
||||
yaak-mac-window = { path = "yaak-mac-window" }
|
||||
yaak-models = { workspace = true }
|
||||
yaak-plugins = { workspace = true }
|
||||
yaak-sse = { workspace = true }
|
||||
@@ -85,17 +83,20 @@ yaak-templates = { workspace = true }
|
||||
yaak-ws = { path = "yaak-ws" }
|
||||
|
||||
[workspace.dependencies]
|
||||
reqwest = "0.12.12"
|
||||
serde = "1.0.215"
|
||||
serde_json = "1.0.132"
|
||||
tauri = "2.2.5"
|
||||
tauri-plugin = "2.0.4"
|
||||
tauri-plugin-shell = "2.2.0"
|
||||
thiserror = "2.0.3"
|
||||
ts-rs = "10.0.0"
|
||||
reqwest = "0.12.15"
|
||||
serde = "1.0.219"
|
||||
serde_json = "1.0.140"
|
||||
tauri = "2.4.1"
|
||||
tauri-plugin = "2.1.1"
|
||||
tauri-plugin-shell = "2.2.1"
|
||||
tokio = "1.44.2"
|
||||
thiserror = "2.0.12"
|
||||
ts-rs = "10.1.0"
|
||||
yaak-common = { path = "yaak-common" }
|
||||
yaak-http = { path = "yaak-http" }
|
||||
yaak-models = { path = "yaak-models" }
|
||||
yaak-plugins = { path = "yaak-plugins" }
|
||||
yaak-sync = { path = "yaak-sync" }
|
||||
yaak-sse = { path = "yaak-sse" }
|
||||
yaak-sync = { path = "yaak-sync" }
|
||||
yaak-templates = { path = "yaak-templates" }
|
||||
yaak-crypto = { path = "yaak-crypto" }
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type AnalyticsAction = "cancel" | "click" | "commit" | "create" | "delete" | "delete_many" | "duplicate" | "error" | "export" | "hide" | "import" | "launch" | "launch_first" | "launch_update" | "send" | "show" | "toggle" | "update" | "upsert";
|
||||
|
||||
export type AnalyticsResource = "app" | "appearance" | "button" | "checkbox" | "cookie_jar" | "dialog" | "environment" | "folder" | "grpc_connection" | "grpc_event" | "grpc_request" | "http_request" | "http_response" | "key_value" | "link" | "mutation" | "plugin" | "select" | "setting" | "sidebar" | "tab" | "theme" | "websocket_connection" | "websocket_event" | "websocket_request" | "workspace";
|
||||
@@ -7,6 +7,7 @@
|
||||
"*"
|
||||
],
|
||||
"permissions": [
|
||||
"core:app:allow-identifier",
|
||||
"core:event:allow-emit",
|
||||
"core:event:allow-listen",
|
||||
"core:event:allow-unlisten",
|
||||
@@ -50,8 +51,11 @@
|
||||
"opener:allow-open-url",
|
||||
"opener:allow-reveal-item-in-dir",
|
||||
"shell:allow-open",
|
||||
"yaak-license:default",
|
||||
"yaak-crypto:default",
|
||||
"yaak-git:default",
|
||||
"yaak-license:default",
|
||||
"yaak-mac-window:default",
|
||||
"yaak-models:default",
|
||||
"yaak-sync:default",
|
||||
"yaak-ws:default"
|
||||
]
|
||||
|
||||
1
src-tauri/gen/schemas/acl-manifests.json
generated
1
src-tauri/gen/schemas/acl-manifests.json
generated
File diff suppressed because one or more lines are too long
1
src-tauri/gen/schemas/capabilities.json
generated
1
src-tauri/gen/schemas/capabilities.json
generated
@@ -1 +0,0 @@
|
||||
{"main":{"identifier":"main","description":"Main permissions","local":true,"windows":["*"],"permissions":["core:event:allow-emit","core:event:allow-listen","core:event:allow-unlisten","os:allow-os-type","clipboard-manager:allow-clear","clipboard-manager:allow-write-text","clipboard-manager:allow-read-text","dialog:allow-open","dialog:allow-save","fs:allow-read-dir","fs:allow-read-file","fs:allow-read-text-file",{"identifier":"fs:scope","allow":[{"path":"$APPDATA"},{"path":"$APPDATA/**"}]},"clipboard-manager:allow-read-text","clipboard-manager:allow-write-text","core:webview:allow-set-webview-zoom","core:window:allow-close","core:window:allow-internal-toggle-maximize","core:window:allow-is-fullscreen","core:window:allow-is-maximized","core:window:allow-maximize","core:window:allow-minimize","core:window:allow-set-decorations","core:window:allow-set-title","core:window:allow-show","core:window:allow-start-dragging","core:window:allow-theme","core:window:allow-unmaximize","opener:allow-default-urls","opener:allow-open-path","opener:allow-open-url","opener:allow-reveal-item-in-dir","shell:allow-open","yaak-license:default","yaak-git:default","yaak-sync:default","yaak-ws:default"]}}
|
||||
5943
src-tauri/gen/schemas/desktop-schema.json
generated
5943
src-tauri/gen/schemas/desktop-schema.json
generated
File diff suppressed because it is too large
Load Diff
5643
src-tauri/gen/schemas/linux-schema.json
generated
5643
src-tauri/gen/schemas/linux-schema.json
generated
File diff suppressed because it is too large
Load Diff
5943
src-tauri/gen/schemas/macOS-schema.json
generated
5943
src-tauri/gen/schemas/macOS-schema.json
generated
File diff suppressed because it is too large
Load Diff
5643
src-tauri/gen/schemas/windows-schema.json
generated
5643
src-tauri/gen/schemas/windows-schema.json
generated
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN hide_window_controls BOOLEAN DEFAULT FALSE NOT NULL;
|
||||
43
src-tauri/migrations/20250326193143_key-value-id.sql
Normal file
43
src-tauri/migrations/20250326193143_key-value-id.sql
Normal file
@@ -0,0 +1,43 @@
|
||||
-- 1. Create the new table with `id` as the primary key
|
||||
CREATE TABLE key_values_new
|
||||
(
|
||||
id TEXT PRIMARY KEY,
|
||||
model TEXT DEFAULT 'key_value' NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
deleted_at DATETIME,
|
||||
namespace TEXT NOT NULL,
|
||||
key TEXT NOT NULL,
|
||||
value TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- 2. Copy data from the old table
|
||||
INSERT INTO key_values_new (id, model, created_at, updated_at, deleted_at, namespace, key, value)
|
||||
SELECT (
|
||||
-- This is the best way to generate a random string in SQLite, apparently
|
||||
'kv_' || SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1)
|
||||
) AS id,
|
||||
model,
|
||||
created_at,
|
||||
updated_at,
|
||||
deleted_at,
|
||||
namespace,
|
||||
key,
|
||||
value
|
||||
FROM key_values;
|
||||
|
||||
-- 3. Drop the old table
|
||||
DROP TABLE key_values;
|
||||
|
||||
-- 4. Rename the new table
|
||||
ALTER TABLE key_values_new
|
||||
RENAME TO key_values;
|
||||
1
src-tauri/migrations/20250401122407_encrypted-key.sql
Normal file
1
src-tauri/migrations/20250401122407_encrypted-key.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE workspace_metas ADD COLUMN encryption_key TEXT NULL DEFAULT NULL;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE workspaces ADD COLUMN encryption_key_challenge TEXT NULL;
|
||||
245
src-tauri/migrations/20250424152740_remove-fks.sql
Normal file
245
src-tauri/migrations/20250424152740_remove-fks.sql
Normal file
@@ -0,0 +1,245 @@
|
||||
-- NOTE: SQLite does not support dropping foreign keys, so we need to create new
|
||||
-- tables and copy data instead. To prevent cascade deletes from wrecking stuff,
|
||||
-- we start with the leaf tables and finish with the parent tables (eg. folder).
|
||||
|
||||
----------------------------
|
||||
-- Remove http request FK --
|
||||
----------------------------
|
||||
|
||||
CREATE TABLE http_requests_dg_tmp
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'http_request' NOT NULL,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
deleted_at DATETIME,
|
||||
name TEXT NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
method TEXT NOT NULL,
|
||||
headers TEXT NOT NULL,
|
||||
body_type TEXT,
|
||||
sort_priority REAL DEFAULT 0 NOT NULL,
|
||||
authentication TEXT DEFAULT '{}' NOT NULL,
|
||||
authentication_type TEXT,
|
||||
folder_id TEXT,
|
||||
body TEXT DEFAULT '{}' NOT NULL,
|
||||
url_parameters TEXT DEFAULT '[]' NOT NULL,
|
||||
description TEXT DEFAULT '' NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO http_requests_dg_tmp(id, model, workspace_id, created_at, updated_at, deleted_at, name, url, method,
|
||||
headers, body_type, sort_priority, authentication, authentication_type, folder_id,
|
||||
body, url_parameters, description)
|
||||
SELECT id,
|
||||
model,
|
||||
workspace_id,
|
||||
created_at,
|
||||
updated_at,
|
||||
deleted_at,
|
||||
name,
|
||||
url,
|
||||
method,
|
||||
headers,
|
||||
body_type,
|
||||
sort_priority,
|
||||
authentication,
|
||||
authentication_type,
|
||||
folder_id,
|
||||
body,
|
||||
url_parameters,
|
||||
description
|
||||
FROM http_requests;
|
||||
|
||||
DROP TABLE http_requests;
|
||||
|
||||
ALTER TABLE http_requests_dg_tmp
|
||||
RENAME TO http_requests;
|
||||
|
||||
----------------------------
|
||||
-- Remove grpc request FK --
|
||||
----------------------------
|
||||
|
||||
CREATE TABLE grpc_requests_dg_tmp
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'grpc_request' NOT NULL,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
folder_id TEXT,
|
||||
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
|
||||
updated_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
sort_priority REAL NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
service TEXT,
|
||||
method TEXT,
|
||||
message TEXT NOT NULL,
|
||||
authentication TEXT DEFAULT '{}' NOT NULL,
|
||||
authentication_type TEXT,
|
||||
metadata TEXT DEFAULT '[]' NOT NULL,
|
||||
description TEXT DEFAULT '' NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO grpc_requests_dg_tmp(id, model, workspace_id, folder_id, created_at, updated_at, name, sort_priority, url,
|
||||
service, method, message, authentication, authentication_type, metadata, description)
|
||||
SELECT id,
|
||||
model,
|
||||
workspace_id,
|
||||
folder_id,
|
||||
created_at,
|
||||
updated_at,
|
||||
name,
|
||||
sort_priority,
|
||||
url,
|
||||
service,
|
||||
method,
|
||||
message,
|
||||
authentication,
|
||||
authentication_type,
|
||||
metadata,
|
||||
description
|
||||
FROM grpc_requests;
|
||||
|
||||
DROP TABLE grpc_requests;
|
||||
|
||||
ALTER TABLE grpc_requests_dg_tmp
|
||||
RENAME TO grpc_requests;
|
||||
|
||||
---------------------------------
|
||||
-- Remove websocket request FK --
|
||||
---------------------------------
|
||||
|
||||
CREATE TABLE websocket_requests_dg_tmp
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'websocket_request' NOT NULL,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
folder_id TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
deleted_at DATETIME,
|
||||
authentication TEXT DEFAULT '{}' NOT NULL,
|
||||
authentication_type TEXT,
|
||||
description TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
headers TEXT NOT NULL,
|
||||
message TEXT NOT NULL,
|
||||
sort_priority REAL NOT NULL,
|
||||
url_parameters TEXT DEFAULT '[]' NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO websocket_requests_dg_tmp(id, model, workspace_id, folder_id, created_at, updated_at, deleted_at,
|
||||
authentication, authentication_type, description, name, url, headers, message,
|
||||
sort_priority, url_parameters)
|
||||
SELECT id,
|
||||
model,
|
||||
workspace_id,
|
||||
folder_id,
|
||||
created_at,
|
||||
updated_at,
|
||||
deleted_at,
|
||||
authentication,
|
||||
authentication_type,
|
||||
description,
|
||||
name,
|
||||
url,
|
||||
headers,
|
||||
message,
|
||||
sort_priority,
|
||||
url_parameters
|
||||
FROM websocket_requests;
|
||||
|
||||
DROP TABLE websocket_requests;
|
||||
|
||||
ALTER TABLE websocket_requests_dg_tmp
|
||||
RENAME TO websocket_requests;
|
||||
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
---------------------------
|
||||
-- Remove environment FK --
|
||||
---------------------------
|
||||
|
||||
CREATE TABLE environments_dg_tmp
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
deleted_at DATETIME,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
variables DEFAULT '[]' NOT NULL,
|
||||
model TEXT DEFAULT 'environment',
|
||||
environment_id TEXT
|
||||
);
|
||||
|
||||
INSERT INTO environments_dg_tmp(id, created_at, updated_at, deleted_at, workspace_id, name, variables, model,
|
||||
environment_id)
|
||||
SELECT id,
|
||||
created_at,
|
||||
updated_at,
|
||||
deleted_at,
|
||||
workspace_id,
|
||||
name,
|
||||
variables,
|
||||
model,
|
||||
environment_id
|
||||
FROM environments;
|
||||
|
||||
DROP TABLE environments;
|
||||
|
||||
ALTER TABLE environments_dg_tmp
|
||||
RENAME TO environments;
|
||||
|
||||
----------------------
|
||||
-- Remove folder FK --
|
||||
----------------------
|
||||
|
||||
CREATE TABLE folders_dg_tmp
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'folder' NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
deleted_at DATETIME,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
folder_id TEXT,
|
||||
name TEXT NOT NULL,
|
||||
sort_priority REAL DEFAULT 0 NOT NULL,
|
||||
description TEXT DEFAULT '' NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO folders_dg_tmp(id, model, created_at, updated_at, deleted_at, workspace_id, folder_id, name, sort_priority,
|
||||
description)
|
||||
SELECT id,
|
||||
model,
|
||||
created_at,
|
||||
updated_at,
|
||||
deleted_at,
|
||||
workspace_id,
|
||||
folder_id,
|
||||
name,
|
||||
sort_priority,
|
||||
description
|
||||
FROM folders;
|
||||
|
||||
DROP TABLE folders;
|
||||
|
||||
ALTER TABLE folders_dg_tmp
|
||||
RENAME TO folders;
|
||||
@@ -0,0 +1,11 @@
|
||||
-- There used to be sync code that skipped over environments because we didn't
|
||||
-- want to sync potentially insecure data. With encryption, it is now possible
|
||||
-- to sync environments securely. However, there were already sync states in the
|
||||
-- DB that marked environments as "Synced". Running the sync code on these envs
|
||||
-- would mark them as deleted by FS (exist in SyncState but not on FS).
|
||||
--
|
||||
-- To undo this mess, we have this migration to delete all environment-related
|
||||
-- sync states so we can sync from a clean slate.
|
||||
DELETE
|
||||
FROM sync_states
|
||||
WHERE model_id LIKE 'ev_%';
|
||||
20
src-tauri/migrations/20250508161145_public-environments.sql
Normal file
20
src-tauri/migrations/20250508161145_public-environments.sql
Normal file
@@ -0,0 +1,20 @@
|
||||
-- Add a public column to represent whether an environment can be shared or exported
|
||||
ALTER TABLE environments
|
||||
ADD COLUMN public BOOLEAN DEFAULT FALSE;
|
||||
|
||||
-- Add a base column to represent whether an environment is a base or sub environment. We used to
|
||||
-- do this with environment_id, but we need a more flexible solution now that envs can be optionally
|
||||
-- synced. E.g., it's now possible to only import a sub environment from a different client without
|
||||
-- its base environment "parent."
|
||||
ALTER TABLE environments
|
||||
ADD COLUMN base BOOLEAN DEFAULT FALSE;
|
||||
|
||||
-- SQLite doesn't support dynamic default values, so we update `base` based on the value of
|
||||
-- environment_id.
|
||||
UPDATE environments
|
||||
SET base = TRUE
|
||||
WHERE environment_id IS NULL;
|
||||
|
||||
-- Finally, we drop the old `environment_id` column that will no longer be used
|
||||
ALTER TABLE environments
|
||||
DROP COLUMN environment_id;
|
||||
@@ -1,247 +0,0 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use log::{debug, info};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{json, Value};
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
|
||||
use ts_rs::TS;
|
||||
use yaak_models::queries::{
|
||||
generate_id, get_key_value_int, get_key_value_string, get_or_create_settings,
|
||||
set_key_value_int, set_key_value_string, UpdateSource,
|
||||
};
|
||||
|
||||
use crate::is_dev;
|
||||
|
||||
const NAMESPACE: &str = "analytics";
|
||||
const NUM_LAUNCHES_KEY: &str = "num_launches";
|
||||
|
||||
// serializable
|
||||
#[derive(Serialize, Deserialize, Debug, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export, export_to = "analytics.ts")]
|
||||
pub enum AnalyticsResource {
|
||||
App,
|
||||
Appearance,
|
||||
Button,
|
||||
Checkbox,
|
||||
CookieJar,
|
||||
Dialog,
|
||||
Environment,
|
||||
Folder,
|
||||
GrpcConnection,
|
||||
GrpcEvent,
|
||||
GrpcRequest,
|
||||
HttpRequest,
|
||||
HttpResponse,
|
||||
KeyValue,
|
||||
Link,
|
||||
Mutation,
|
||||
Plugin,
|
||||
Select,
|
||||
Setting,
|
||||
Sidebar,
|
||||
Tab,
|
||||
Theme,
|
||||
WebsocketConnection,
|
||||
WebsocketEvent,
|
||||
WebsocketRequest,
|
||||
Workspace,
|
||||
}
|
||||
|
||||
impl AnalyticsResource {
|
||||
pub fn from_str(s: &str) -> serde_json::Result<AnalyticsResource> {
|
||||
serde_json::from_str(format!("\"{s}\"").as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for AnalyticsResource {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", serde_json::to_string(self).unwrap().replace("\"", ""))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export, export_to = "analytics.ts")]
|
||||
pub enum AnalyticsAction {
|
||||
Cancel,
|
||||
Click,
|
||||
Commit,
|
||||
Create,
|
||||
Delete,
|
||||
DeleteMany,
|
||||
Duplicate,
|
||||
Error,
|
||||
Export,
|
||||
Hide,
|
||||
Import,
|
||||
Launch,
|
||||
LaunchFirst,
|
||||
LaunchUpdate,
|
||||
Send,
|
||||
Show,
|
||||
Toggle,
|
||||
Update,
|
||||
Upsert,
|
||||
}
|
||||
|
||||
impl AnalyticsAction {
|
||||
pub fn from_str(s: &str) -> serde_json::Result<AnalyticsAction> {
|
||||
serde_json::from_str(format!("\"{s}\"").as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for AnalyticsAction {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", serde_json::to_string(self).unwrap().replace("\"", ""))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LaunchEventInfo {
|
||||
pub current_version: String,
|
||||
pub previous_version: String,
|
||||
pub launched_after_update: bool,
|
||||
pub num_launches: i32,
|
||||
}
|
||||
|
||||
pub async fn track_launch_event<R: Runtime>(w: &WebviewWindow<R>) -> LaunchEventInfo {
|
||||
let last_tracked_version_key = "last_tracked_version";
|
||||
|
||||
let mut info = LaunchEventInfo::default();
|
||||
|
||||
info.num_launches = get_num_launches(w).await + 1;
|
||||
info.previous_version = get_key_value_string(w, NAMESPACE, last_tracked_version_key, "").await;
|
||||
info.current_version = w.package_info().version.to_string();
|
||||
|
||||
if info.previous_version.is_empty() {
|
||||
track_event(w, AnalyticsResource::App, AnalyticsAction::LaunchFirst, None).await;
|
||||
} else {
|
||||
info.launched_after_update = info.current_version != info.previous_version;
|
||||
if info.launched_after_update {
|
||||
track_event(
|
||||
w,
|
||||
AnalyticsResource::App,
|
||||
AnalyticsAction::LaunchUpdate,
|
||||
Some(json!({ NUM_LAUNCHES_KEY: info.num_launches })),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
};
|
||||
|
||||
// Track a launch event in all cases
|
||||
track_event(
|
||||
w,
|
||||
AnalyticsResource::App,
|
||||
AnalyticsAction::Launch,
|
||||
Some(json!({ NUM_LAUNCHES_KEY: info.num_launches })),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Update key values
|
||||
|
||||
set_key_value_string(
|
||||
w,
|
||||
NAMESPACE,
|
||||
last_tracked_version_key,
|
||||
info.current_version.as_str(),
|
||||
&UpdateSource::Background,
|
||||
)
|
||||
.await;
|
||||
set_key_value_int(w, NAMESPACE, NUM_LAUNCHES_KEY, info.num_launches, &UpdateSource::Background)
|
||||
.await;
|
||||
|
||||
info
|
||||
}
|
||||
|
||||
pub async fn track_event<R: Runtime>(
|
||||
w: &WebviewWindow<R>,
|
||||
resource: AnalyticsResource,
|
||||
action: AnalyticsAction,
|
||||
attributes: Option<Value>,
|
||||
) {
|
||||
let id = get_id(w).await;
|
||||
let event = format!("{}.{}", resource, action);
|
||||
let attributes_json = attributes.unwrap_or("{}".to_string().into()).to_string();
|
||||
let info = w.app_handle().package_info();
|
||||
let tz = datetime::sys_timezone().unwrap_or("unknown".to_string());
|
||||
let site = match is_dev() {
|
||||
true => "site_TkHWjoXwZPq3HfhERb",
|
||||
false => "site_zOK0d7jeBy2TLxFCnZ",
|
||||
};
|
||||
let base_url = match is_dev() {
|
||||
true => "http://localhost:7194",
|
||||
false => "https://t.yaak.app",
|
||||
};
|
||||
let params = vec![
|
||||
("u", id),
|
||||
("e", event.clone()),
|
||||
("a", attributes_json.clone()),
|
||||
("id", site.to_string()),
|
||||
("v", info.version.clone().to_string()),
|
||||
("os", get_os().to_string()),
|
||||
("tz", tz),
|
||||
("xy", get_window_size(w)),
|
||||
];
|
||||
let req =
|
||||
reqwest::Client::builder().build().unwrap().get(format!("{base_url}/t/e")).query(¶ms);
|
||||
|
||||
let settings = get_or_create_settings(w).await;
|
||||
if !settings.telemetry {
|
||||
info!("Track event (disabled): {}", event);
|
||||
return;
|
||||
}
|
||||
|
||||
// Disable analytics actual sending in dev
|
||||
if is_dev() {
|
||||
debug!("Track event: {} {}", event, attributes_json);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(e) = req.send().await {
|
||||
info!("Error sending analytics event: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_os() -> &'static str {
|
||||
if cfg!(target_os = "windows") {
|
||||
"windows"
|
||||
} else if cfg!(target_os = "macos") {
|
||||
"macos"
|
||||
} else if cfg!(target_os = "linux") {
|
||||
"linux"
|
||||
} else {
|
||||
"unknown"
|
||||
}
|
||||
}
|
||||
|
||||
fn get_window_size<R: Runtime>(w: &WebviewWindow<R>) -> String {
|
||||
let current_monitor = match w.current_monitor() {
|
||||
Ok(Some(m)) => m,
|
||||
_ => return "unknown".to_string(),
|
||||
};
|
||||
|
||||
let scale_factor = current_monitor.scale_factor();
|
||||
let size = current_monitor.size();
|
||||
let width: f64 = size.width as f64 / scale_factor;
|
||||
let height: f64 = size.height as f64 / scale_factor;
|
||||
|
||||
format!("{}x{}", (width / 100.0).round() * 100.0, (height / 100.0).round() * 100.0)
|
||||
}
|
||||
|
||||
async fn get_id<R: Runtime>(w: &WebviewWindow<R>) -> String {
|
||||
let id = get_key_value_string(w, "analytics", "id", "").await;
|
||||
if id.is_empty() {
|
||||
let new_id = generate_id();
|
||||
set_key_value_string(w, "analytics", "id", new_id.as_str(), &UpdateSource::Background)
|
||||
.await;
|
||||
new_id
|
||||
} else {
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_num_launches<R: Runtime>(w: &WebviewWindow<R>) -> i32 {
|
||||
get_key_value_int(w, NAMESPACE, NUM_LAUNCHES_KEY, 0).await
|
||||
}
|
||||
40
src-tauri/src/commands.rs
Normal file
40
src-tauri/src/commands.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
use crate::error::Result;
|
||||
use tauri::{command, AppHandle, Manager, Runtime, WebviewWindow};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
||||
use yaak_crypto::manager::EncryptionManagerExt;
|
||||
use yaak_plugins::events::PluginWindowContext;
|
||||
use yaak_plugins::native_template_functions::{decrypt_secure_template_function, encrypt_secure_template_function};
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_show_workspace_key<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
) -> Result<()> {
|
||||
let key = window.crypto().reveal_workspace_key(workspace_id)?;
|
||||
window
|
||||
.dialog()
|
||||
.message(format!("Your workspace key is \n\n{}", key))
|
||||
.kind(MessageDialogKind::Info)
|
||||
.show(|_v| {});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_decrypt_template<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
template: &str,
|
||||
) -> Result<String> {
|
||||
let app_handle = window.app_handle();
|
||||
let window_context = &PluginWindowContext::new(&window);
|
||||
Ok(decrypt_secure_template_function(&app_handle, window_context, template)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_secure_template<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
template: &str,
|
||||
) -> Result<String> {
|
||||
let window_context = &PluginWindowContext::new(&window);
|
||||
Ok(encrypt_secure_template_function(&app_handle, window_context, template)?)
|
||||
}
|
||||
62
src-tauri/src/error.rs
Normal file
62
src-tauri/src/error.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use std::io;
|
||||
use serde::{Serialize, Serializer};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
TemplateError(#[from] yaak_templates::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
ModelError(#[from] yaak_models::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
SyncError(#[from] yaak_sync::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
CryptoError(#[from] yaak_crypto::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
GitError(#[from] yaak_git::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
WebsocketError(#[from] yaak_ws::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
LicenseError(#[from] yaak_license::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
PluginError(#[from] yaak_plugins::error::Error),
|
||||
|
||||
#[error("Updater error: {0}")]
|
||||
UpdaterError(#[from] tauri_plugin_updater::Error),
|
||||
|
||||
#[error("JSON error: {0}")]
|
||||
JsonError(#[from] serde_json::error::Error),
|
||||
|
||||
#[error("Tauri error: {0}")]
|
||||
TauriError(#[from] tauri::Error),
|
||||
|
||||
#[error("Event source error: {0}")]
|
||||
EventSourceError(#[from] eventsource_client::Error),
|
||||
|
||||
#[error("I/O error: {0}")]
|
||||
IOError(#[from] io::Error),
|
||||
|
||||
#[error("Request error: {0}")]
|
||||
RequestError(#[from] reqwest::Error),
|
||||
|
||||
#[error("Generic error: {0}")]
|
||||
GenericError(String),
|
||||
}
|
||||
|
||||
impl Serialize for Error {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.to_string().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
@@ -1,10 +1,14 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::error::Result;
|
||||
use KeyAndValueRef::{Ascii, Binary};
|
||||
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use yaak_grpc::{KeyAndValueRef, MetadataMap};
|
||||
use yaak_models::models::GrpcRequest;
|
||||
use yaak_plugins::events::{CallHttpAuthenticationRequest, HttpHeader};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
pub fn metadata_to_map(metadata: MetadataMap) -> BTreeMap<String, String> {
|
||||
pub(crate) fn metadata_to_map(metadata: MetadataMap) -> BTreeMap<String, String> {
|
||||
let mut entries = BTreeMap::new();
|
||||
for r in metadata.iter() {
|
||||
match r {
|
||||
@@ -14,3 +18,48 @@ pub fn metadata_to_map(metadata: MetadataMap) -> BTreeMap<String, String> {
|
||||
}
|
||||
entries
|
||||
}
|
||||
|
||||
pub(crate) async fn build_metadata<R: Runtime>(
|
||||
window: &WebviewWindow<R>,
|
||||
request: &GrpcRequest,
|
||||
) -> Result<BTreeMap<String, String>> {
|
||||
let plugin_manager = window.state::<PluginManager>();
|
||||
let mut metadata = BTreeMap::new();
|
||||
|
||||
// Add the rest of metadata
|
||||
for h in request.clone().metadata {
|
||||
if h.name.is_empty() && h.value.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if !h.enabled {
|
||||
continue;
|
||||
}
|
||||
|
||||
metadata.insert(h.name, h.value);
|
||||
}
|
||||
|
||||
if let Some(auth_name) = request.authentication_type.clone() {
|
||||
let auth = request.authentication.clone();
|
||||
let plugin_req = CallHttpAuthenticationRequest {
|
||||
context_id: format!("{:x}", md5::compute(request.id.clone())),
|
||||
values: serde_json::from_value(serde_json::to_value(&auth).unwrap()).unwrap(),
|
||||
method: "POST".to_string(),
|
||||
url: request.url.clone(),
|
||||
headers: metadata
|
||||
.iter()
|
||||
.map(|(name, value)| HttpHeader {
|
||||
name: name.to_string(),
|
||||
value: value.to_string(),
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
let plugin_result =
|
||||
plugin_manager.call_http_authentication(&window, &auth_name, plugin_req).await?;
|
||||
for header in plugin_result.set_headers {
|
||||
metadata.insert(header.name, header.value);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
60
src-tauri/src/history.rs
Normal file
60
src-tauri/src/history.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use yaak_models::query_manager::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
const NAMESPACE: &str = "analytics";
|
||||
const NUM_LAUNCHES_KEY: &str = "num_launches";
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LaunchEventInfo {
|
||||
pub current_version: String,
|
||||
pub previous_version: String,
|
||||
pub launched_after_update: bool,
|
||||
pub num_launches: i32,
|
||||
}
|
||||
|
||||
pub async fn store_launch_history<R: Runtime>(app_handle: &AppHandle<R>) -> LaunchEventInfo {
|
||||
let last_tracked_version_key = "last_tracked_version";
|
||||
|
||||
let mut info = LaunchEventInfo::default();
|
||||
|
||||
info.num_launches = get_num_launches(app_handle).await + 1;
|
||||
info.current_version = app_handle.package_info().version.to_string();
|
||||
|
||||
app_handle
|
||||
.with_tx(|tx| {
|
||||
info.previous_version =
|
||||
tx.get_key_value_string(NAMESPACE, last_tracked_version_key, "");
|
||||
|
||||
if !info.previous_version.is_empty() {
|
||||
info.launched_after_update = info.current_version != info.previous_version;
|
||||
};
|
||||
|
||||
// Update key values
|
||||
|
||||
let source = &UpdateSource::Background;
|
||||
let version = info.current_version.as_str();
|
||||
tx.set_key_value_string(NAMESPACE, last_tracked_version_key, version, source);
|
||||
tx.set_key_value_int(NAMESPACE, NUM_LAUNCHES_KEY, info.num_launches, source);
|
||||
Ok(())
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info
|
||||
}
|
||||
|
||||
pub fn get_os() -> &'static str {
|
||||
if cfg!(target_os = "windows") {
|
||||
"windows"
|
||||
} else if cfg!(target_os = "macos") {
|
||||
"macos"
|
||||
} else if cfg!(target_os = "linux") {
|
||||
"linux"
|
||||
} else {
|
||||
"unknown"
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_num_launches<R: Runtime>(app_handle: &AppHandle<R>) -> i32 {
|
||||
app_handle.db().get_key_value_int(NAMESPACE, NUM_LAUNCHES_KEY, 0)
|
||||
}
|
||||
@@ -1,14 +1,16 @@
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use crate::render::render_http_request;
|
||||
use crate::response_err;
|
||||
use http::header::{ACCEPT, USER_AGENT};
|
||||
use http::{HeaderMap, HeaderName, HeaderValue, Uri};
|
||||
use http::{HeaderMap, HeaderName, HeaderValue};
|
||||
use log::{debug, error, warn};
|
||||
use mime_guess::Mime;
|
||||
use reqwest::redirect::Policy;
|
||||
use reqwest::{multipart, Proxy, Url};
|
||||
use reqwest::{Method, Response};
|
||||
use rustls::crypto::ring;
|
||||
use reqwest::{Proxy, Url, multipart};
|
||||
use rustls::ClientConfig;
|
||||
use rustls::crypto::ring;
|
||||
use rustls_platform_verifier::BuilderVerifierExt;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
@@ -18,20 +20,18 @@ use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use tokio::fs;
|
||||
use tokio::fs::{create_dir_all, File};
|
||||
use tokio::fs::{File, create_dir_all};
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::sync::watch::Receiver;
|
||||
use tokio::sync::{oneshot, Mutex};
|
||||
use tokio::sync::{Mutex, oneshot};
|
||||
use yaak_models::models::{
|
||||
Cookie, CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseHeader,
|
||||
HttpResponseState, ProxySetting, ProxySettingAuth,
|
||||
};
|
||||
use yaak_models::queries::{
|
||||
get_base_environment, get_http_response, get_or_create_settings, get_workspace,
|
||||
update_response_if_id, upsert_cookie_jar, UpdateSource,
|
||||
};
|
||||
use yaak_models::query_manager::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{
|
||||
CallHttpAuthenticationRequest, HttpHeader, RenderPurpose, WindowContext,
|
||||
CallHttpAuthenticationRequest, HttpHeader, PluginWindowContext, RenderPurpose,
|
||||
};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
@@ -43,27 +43,46 @@ pub async fn send_http_request<R: Runtime>(
|
||||
environment: Option<Environment>,
|
||||
cookie_jar: Option<CookieJar>,
|
||||
cancelled_rx: &mut Receiver<bool>,
|
||||
) -> Result<HttpResponse, String> {
|
||||
let plugin_manager = window.state::<PluginManager>();
|
||||
let workspace = get_workspace(window, &unrendered_request.workspace_id)
|
||||
.await
|
||||
.expect("Failed to get Workspace");
|
||||
let base_environment = get_base_environment(window, &unrendered_request.workspace_id)
|
||||
.await
|
||||
.expect("Failed to get base environment");
|
||||
let settings = get_or_create_settings(window).await;
|
||||
let cb = PluginTemplateCallback::new(
|
||||
window.app_handle(),
|
||||
&WindowContext::from_window(window),
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
) -> Result<HttpResponse> {
|
||||
let app_handle = window.app_handle().clone();
|
||||
let plugin_manager = app_handle.state::<PluginManager>();
|
||||
let (settings, workspace) = {
|
||||
let db = window.db();
|
||||
let settings = db.get_settings();
|
||||
let workspace = db.get_workspace(&unrendered_request.workspace_id)?;
|
||||
(settings, workspace)
|
||||
};
|
||||
let base_environment =
|
||||
app_handle.db().get_base_environment(&unrendered_request.workspace_id)?;
|
||||
|
||||
let response_id = og_response.id.clone();
|
||||
let response = Arc::new(Mutex::new(og_response.clone()));
|
||||
|
||||
let request =
|
||||
render_http_request(&unrendered_request, &base_environment, environment.as_ref(), &cb)
|
||||
.await;
|
||||
let cb = PluginTemplateCallback::new(
|
||||
window.app_handle(),
|
||||
&PluginWindowContext::new(window),
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
let update_source = UpdateSource::from_window(window);
|
||||
|
||||
let request = match render_http_request(
|
||||
&unrendered_request,
|
||||
&base_environment,
|
||||
environment.as_ref(),
|
||||
&cb,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e.to_string(),
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let mut url_string = request.url;
|
||||
|
||||
@@ -104,7 +123,12 @@ pub async fn send_http_request<R: Runtime>(
|
||||
|
||||
match settings.proxy {
|
||||
Some(ProxySetting::Disabled) => client_builder = client_builder.no_proxy(),
|
||||
Some(ProxySetting::Enabled { http, https, auth }) => {
|
||||
Some(ProxySetting::Enabled {
|
||||
http,
|
||||
https,
|
||||
auth,
|
||||
disabled,
|
||||
}) if !disabled => {
|
||||
debug!("Using proxy http={http} https={https}");
|
||||
let mut proxy = Proxy::custom(move |url| {
|
||||
let http = if http.is_empty() { None } else { Some(http.to_owned()) };
|
||||
@@ -124,7 +148,7 @@ pub async fn send_http_request<R: Runtime>(
|
||||
|
||||
client_builder = client_builder.proxy(proxy);
|
||||
}
|
||||
None => {} // Nothing to do for this one, as it is the default
|
||||
_ => {} // Nothing to do for this one, as it is the default
|
||||
}
|
||||
|
||||
// Add cookie store if specified
|
||||
@@ -139,10 +163,9 @@ pub async fn send_http_request<R: Runtime>(
|
||||
serde_json::from_value(json_cookie).expect("Failed to deserialize cookie")
|
||||
})
|
||||
.map(|c| Ok(c))
|
||||
.collect::<Vec<Result<_, ()>>>();
|
||||
.collect::<Vec<Result<_>>>();
|
||||
|
||||
let store = reqwest_cookie_store::CookieStore::from_cookies(cookies, true)
|
||||
.expect("Failed to create cookie store");
|
||||
let store = reqwest_cookie_store::CookieStore::from_cookies(cookies, true)?;
|
||||
let cookie_store = reqwest_cookie_store::CookieStoreMutex::new(store);
|
||||
let cookie_store = Arc::new(cookie_store);
|
||||
client_builder = client_builder.cookie_provider(Arc::clone(&cookie_store));
|
||||
@@ -158,7 +181,7 @@ pub async fn send_http_request<R: Runtime>(
|
||||
));
|
||||
}
|
||||
|
||||
let client = client_builder.build().expect("Failed to build client");
|
||||
let client = client_builder.build()?;
|
||||
|
||||
// Render query parameters
|
||||
let mut query_params = Vec::new();
|
||||
@@ -169,32 +192,20 @@ pub async fn send_http_request<R: Runtime>(
|
||||
query_params.push((p.name, p.value));
|
||||
}
|
||||
|
||||
let uri = match Uri::from_str(url_string.as_str()) {
|
||||
let url = match Url::from_str(&url_string) {
|
||||
Ok(u) => u,
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
format!("Failed to parse URL \"{}\": {}", url_string, e.to_string()),
|
||||
window,
|
||||
)
|
||||
.await);
|
||||
}
|
||||
};
|
||||
// Yes, we're parsing both URI and URL because they could return different errors
|
||||
let url = match Url::from_str(uri.to_string().as_str()) {
|
||||
Ok(u) => u,
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&*response.lock().await,
|
||||
format!("Failed to parse URL \"{}\": {}", url_string, e.to_string()),
|
||||
window,
|
||||
)
|
||||
.await);
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let m = Method::from_bytes(request.method.to_uppercase().as_bytes())
|
||||
.expect("Failed to create method");
|
||||
let m = Method::from_str(&request.method.to_uppercase())
|
||||
.map_err(|e| GenericError(e.to_string()))?;
|
||||
let mut request_builder = client.request(m, url).query(&query_params);
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
@@ -282,7 +293,7 @@ pub async fn send_http_request<R: Runtime>(
|
||||
} else if body_type == "binary" && request_body.contains_key("filePath") {
|
||||
let file_path = request_body
|
||||
.get("filePath")
|
||||
.ok_or("filePath not set")?
|
||||
.ok_or(GenericError("filePath not set".to_string()))?
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
|
||||
@@ -291,7 +302,12 @@ pub async fn send_http_request<R: Runtime>(
|
||||
request_builder = request_builder.body(f);
|
||||
}
|
||||
Err(e) => {
|
||||
return Ok(response_err(&*response.lock().await, e, window).await);
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e,
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
}
|
||||
} else if body_type == "multipart/form-data" && request_body.contains_key("form") {
|
||||
@@ -318,11 +334,11 @@ pub async fn send_http_request<R: Runtime>(
|
||||
Ok(f) => multipart::Part::bytes(f),
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e.to_string(),
|
||||
window,
|
||||
)
|
||||
.await);
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -335,11 +351,11 @@ pub async fn send_http_request<R: Runtime>(
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
format!("Invalid mime for multi-part entry {e:?}"),
|
||||
window,
|
||||
)
|
||||
.await);
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
} else if !file_path.is_empty() {
|
||||
@@ -351,16 +367,16 @@ pub async fn send_http_request<R: Runtime>(
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
format!("Invalid mime for multi-part entry {e:?}"),
|
||||
window,
|
||||
)
|
||||
.await);
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Set file path if not empty
|
||||
// Set file path if it is not empty
|
||||
if !file_path.is_empty() {
|
||||
let filename = PathBuf::from(file_path)
|
||||
.file_name()
|
||||
@@ -383,6 +399,15 @@ pub async fn send_http_request<R: Runtime>(
|
||||
} else {
|
||||
warn!("Unsupported body type: {}", body_type);
|
||||
}
|
||||
} else {
|
||||
// No body set
|
||||
let method = request.method.to_ascii_lowercase();
|
||||
let is_body_method = method == "post" || method == "put" || method == "patch";
|
||||
// Add Content-Length for methods that commonly accept a body because some servers
|
||||
// will error if they don't receive it.
|
||||
if is_body_method && !headers.contains_key("content-length") {
|
||||
headers.insert("Content-Length", HeaderValue::from_static("0"));
|
||||
}
|
||||
}
|
||||
|
||||
// Add headers last, because previous steps may modify them
|
||||
@@ -392,7 +417,12 @@ pub async fn send_http_request<R: Runtime>(
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
warn!("Failed to build request builder {e:?}");
|
||||
return Ok(response_err(&*response.lock().await, e.to_string(), window).await);
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e.to_string(),
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -414,11 +444,16 @@ pub async fn send_http_request<R: Runtime>(
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
let auth_result = plugin_manager.call_http_authentication(window, &auth_name, req).await;
|
||||
let auth_result = plugin_manager.call_http_authentication(&window, &auth_name, req).await;
|
||||
let plugin_result = match auth_result {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
return Ok(response_err(&*response.lock().await, e.to_string(), window).await);
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e.to_string(),
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -431,7 +466,7 @@ pub async fn send_http_request<R: Runtime>(
|
||||
}
|
||||
}
|
||||
|
||||
let (resp_tx, resp_rx) = oneshot::channel::<Result<Response, reqwest::Error>>();
|
||||
let (resp_tx, resp_rx) = oneshot::channel::<std::result::Result<Response, reqwest::Error>>();
|
||||
let (done_tx, done_rx) = oneshot::channel::<HttpResponse>();
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
@@ -443,22 +478,26 @@ pub async fn send_http_request<R: Runtime>(
|
||||
let raw_response = tokio::select! {
|
||||
Ok(r) = resp_rx => r,
|
||||
_ = cancelled_rx.changed() => {
|
||||
debug!("Request cancelled");
|
||||
return Ok(response_err(&*response.lock().await, "Request was cancelled".to_string(), window).await);
|
||||
let mut r = response.lock().await;
|
||||
r.elapsed_headers = start.elapsed().as_millis() as i32;
|
||||
r.elapsed = start.elapsed().as_millis() as i32;
|
||||
return Ok(response_err(&app_handle, &r, "Request was cancelled".to_string(), &update_source));
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
let app_handle = app_handle.clone();
|
||||
let window = window.clone();
|
||||
let cancelled_rx = cancelled_rx.clone();
|
||||
let response_id = response_id.clone();
|
||||
let response = response.clone();
|
||||
let update_source = update_source.clone();
|
||||
tokio::spawn(async move {
|
||||
match raw_response {
|
||||
Ok(mut v) => {
|
||||
let content_length = v.content_length();
|
||||
let response_headers = v.headers().clone();
|
||||
let dir = window.app_handle().path().app_data_dir().unwrap();
|
||||
let dir = app_handle.path().app_data_dir().unwrap();
|
||||
let base_dir = dir.join("responses");
|
||||
create_dir_all(base_dir.clone()).await.expect("Failed to create responses dir");
|
||||
let body_path = if response_id.is_empty() {
|
||||
@@ -471,6 +510,7 @@ pub async fn send_http_request<R: Runtime>(
|
||||
let mut r = response.lock().await;
|
||||
r.body_path = Some(body_path.to_str().unwrap().to_string());
|
||||
r.elapsed_headers = start.elapsed().as_millis() as i32;
|
||||
r.elapsed = start.elapsed().as_millis() as i32;
|
||||
r.status = v.status().as_u16() as i32;
|
||||
r.status_reason = v.status().canonical_reason().map(|s| s.to_string());
|
||||
r.headers = response_headers
|
||||
@@ -492,8 +532,9 @@ pub async fn send_http_request<R: Runtime>(
|
||||
};
|
||||
|
||||
r.state = HttpResponseState::Connected;
|
||||
update_response_if_id(&window, &r, &UpdateSource::Window)
|
||||
.await
|
||||
app_handle
|
||||
.db()
|
||||
.update_http_response_if_id(&r, &update_source)
|
||||
.expect("Failed to update response after connected");
|
||||
}
|
||||
|
||||
@@ -521,21 +562,27 @@ pub async fn send_http_request<R: Runtime>(
|
||||
f.flush().await.expect("Failed to flush file");
|
||||
written_bytes += bytes.len();
|
||||
r.content_length = Some(written_bytes as i32);
|
||||
update_response_if_id(&window, &r, &UpdateSource::Window)
|
||||
.await
|
||||
app_handle
|
||||
.db()
|
||||
.update_http_response_if_id(&r, &update_source)
|
||||
.expect("Failed to update response");
|
||||
}
|
||||
Ok(None) => {
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
response_err(&*response.lock().await, e.to_string(), &window).await;
|
||||
response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e.to_string(),
|
||||
&update_source,
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set final content length
|
||||
// Set the final content length
|
||||
{
|
||||
let mut r = response.lock().await;
|
||||
r.content_length = match content_length {
|
||||
@@ -543,8 +590,9 @@ pub async fn send_http_request<R: Runtime>(
|
||||
None => Some(written_bytes as i32),
|
||||
};
|
||||
r.state = HttpResponseState::Closed;
|
||||
update_response_if_id(&window, &r, &UpdateSource::Window)
|
||||
.await
|
||||
app_handle
|
||||
.db()
|
||||
.update_http_response_if_id(&r, &UpdateSource::from_window(&window))
|
||||
.expect("Failed to update response");
|
||||
};
|
||||
|
||||
@@ -569,8 +617,9 @@ pub async fn send_http_request<R: Runtime>(
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
cookie_jar.cookies = json_cookies;
|
||||
if let Err(e) =
|
||||
upsert_cookie_jar(&window, &cookie_jar, &UpdateSource::Window).await
|
||||
if let Err(e) = app_handle
|
||||
.db()
|
||||
.upsert_cookie_jar(&cookie_jar, &UpdateSource::from_window(&window))
|
||||
{
|
||||
error!("Failed to update cookie jar: {}", e);
|
||||
};
|
||||
@@ -578,7 +627,12 @@ pub async fn send_http_request<R: Runtime>(
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to execute request {e}");
|
||||
response_err(&*response.lock().await, format!("{e} → {e:?}"), &window).await;
|
||||
response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
format!("{e} → {e:?}"),
|
||||
&update_source,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -587,16 +641,20 @@ pub async fn send_http_request<R: Runtime>(
|
||||
});
|
||||
};
|
||||
|
||||
let app_handle = app_handle.clone();
|
||||
Ok(tokio::select! {
|
||||
Ok(r) = done_rx => r,
|
||||
_ = cancelled_rx.changed() => {
|
||||
match get_http_response(window, response_id.as_str()).await {
|
||||
match app_handle.with_db(|c| c.get_http_response(&response_id)) {
|
||||
Ok(mut r) => {
|
||||
r.state = HttpResponseState::Closed;
|
||||
update_response_if_id(&window, &r, &UpdateSource::Window).await.expect("Failed to update response")
|
||||
r.elapsed = start.elapsed().as_millis() as i32;
|
||||
r.elapsed_headers = start.elapsed().as_millis() as i32;
|
||||
app_handle.db().update_http_response_if_id(&r, &UpdateSource::from_window(window))
|
||||
.expect("Failed to update response")
|
||||
},
|
||||
_ => {
|
||||
response_err(&*response.lock().await, "Ephemeral request was cancelled".to_string(), &window).await
|
||||
response_err(&app_handle, &*response.lock().await, "Ephemeral request was cancelled".to_string(), &update_source)
|
||||
}.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
1785
src-tauri/src/lib.rs
1785
src-tauri/src/lib.rs
File diff suppressed because it is too large
Load Diff
@@ -1,13 +1,16 @@
|
||||
use std::time::SystemTime;
|
||||
|
||||
use crate::analytics::{get_num_launches, get_os};
|
||||
use crate::error::Result;
|
||||
use crate::history::{get_num_launches, get_os};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use log::debug;
|
||||
use reqwest::Method;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use tauri::{Emitter, Manager, Runtime, WebviewWindow};
|
||||
use yaak_models::queries::{get_key_value_raw, set_key_value_raw, UpdateSource};
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
|
||||
use yaak_license::{LicenseCheckStatus, check_license};
|
||||
use yaak_models::query_manager::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
// Check for updates every hour
|
||||
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
||||
@@ -43,16 +46,23 @@ impl YaakNotifier {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn seen<R: Runtime>(&mut self, w: &WebviewWindow<R>, id: &str) -> Result<(), String> {
|
||||
let mut seen = get_kv(w).await?;
|
||||
pub async fn seen<R: Runtime>(&mut self, window: &WebviewWindow<R>, id: &str) -> Result<()> {
|
||||
let app_handle = window.app_handle();
|
||||
let mut seen = get_kv(app_handle).await?;
|
||||
seen.push(id.to_string());
|
||||
debug!("Marked notification as seen {}", id);
|
||||
let seen_json = serde_json::to_string(&seen).map_err(|e| e.to_string())?;
|
||||
set_key_value_raw(w, KV_NAMESPACE, KV_KEY, seen_json.as_str(), &UpdateSource::Window).await;
|
||||
let seen_json = serde_json::to_string(&seen)?;
|
||||
window.db().set_key_value_raw(
|
||||
KV_NAMESPACE,
|
||||
KV_KEY,
|
||||
seen_json.as_str(),
|
||||
&UpdateSource::from_window(window),
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn check<R: Runtime>(&mut self, window: &WebviewWindow<R>) -> Result<(), String> {
|
||||
pub async fn check<R: Runtime>(&mut self, window: &WebviewWindow<R>) -> Result<()> {
|
||||
let app_handle = window.app_handle();
|
||||
let ignore_check = self.last_check.elapsed().unwrap().as_secs() < MAX_UPDATE_CHECK_SECONDS;
|
||||
|
||||
if ignore_check {
|
||||
@@ -61,22 +71,31 @@ impl YaakNotifier {
|
||||
|
||||
self.last_check = SystemTime::now();
|
||||
|
||||
let num_launches = get_num_launches(window).await;
|
||||
let info = window.app_handle().package_info().clone();
|
||||
let license_check = match check_license(window).await? {
|
||||
LicenseCheckStatus::PersonalUse { .. } => "personal".to_string(),
|
||||
LicenseCheckStatus::CommercialUse => "commercial".to_string(),
|
||||
LicenseCheckStatus::InvalidLicense => "invalid_license".to_string(),
|
||||
LicenseCheckStatus::Trialing { .. } => "trialing".to_string(),
|
||||
};
|
||||
let settings = window.db().get_settings();
|
||||
let num_launches = get_num_launches(app_handle).await;
|
||||
let info = app_handle.package_info().clone();
|
||||
let req = reqwest::Client::default()
|
||||
.request(Method::GET, "https://notify.yaak.app/notifications")
|
||||
.query(&[
|
||||
("version", info.version.to_string().as_str()),
|
||||
("launches", num_launches.to_string().as_str()),
|
||||
("platform", get_os())
|
||||
("installed", settings.created_at.format("%Y-%m-%d").to_string().as_str()),
|
||||
("license", &license_check),
|
||||
("platform", get_os()),
|
||||
]);
|
||||
let resp = req.send().await.map_err(|e| e.to_string())?;
|
||||
let resp = req.send().await?;
|
||||
if resp.status() != 200 {
|
||||
debug!("Skipping notification status code {}", resp.status());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let result = resp.json::<Value>().await.map_err(|e| e.to_string())?;
|
||||
let result = resp.json::<Value>().await?;
|
||||
|
||||
// Support both single and multiple notifications.
|
||||
// TODO: Remove support for single after April 2025
|
||||
@@ -90,23 +109,24 @@ impl YaakNotifier {
|
||||
|
||||
for notification in notifications {
|
||||
let age = notification.timestamp.signed_duration_since(Utc::now());
|
||||
let seen = get_kv(window).await?;
|
||||
let seen = get_kv(app_handle).await?;
|
||||
if seen.contains(¬ification.id) || (age > Duration::days(2)) {
|
||||
debug!("Already seen notification {}", notification.id);
|
||||
return Ok(());
|
||||
continue;
|
||||
}
|
||||
debug!("Got notification {:?}", notification);
|
||||
|
||||
let _ = window.emit_to(window.label(), "notification", notification.clone());
|
||||
let _ = app_handle.emit_to(window.label(), "notification", notification.clone());
|
||||
break; // Only show one notification
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_kv<R: Runtime>(w: &WebviewWindow<R>) -> Result<Vec<String>, String> {
|
||||
match get_key_value_raw(w, "notifications", "seen").await {
|
||||
async fn get_kv<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Vec<String>> {
|
||||
match app_handle.db().get_key_value_raw("notifications", "seen") {
|
||||
None => Ok(Vec::new()),
|
||||
Some(v) => serde_json::from_str(&v.value).map_err(|e| e.to_string()),
|
||||
Some(v) => Ok(serde_json::from_str(&v.value)?),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::http_request::send_http_request;
|
||||
use crate::render::{render_http_request, render_json_value};
|
||||
use crate::window::{create_window, CreateWindowConfig};
|
||||
use crate::window::{CreateWindowConfig, create_window};
|
||||
use crate::{
|
||||
call_frontend, cookie_jar_from_window, environment_from_window, get_window_from_window_context,
|
||||
workspace_from_window,
|
||||
@@ -10,16 +10,13 @@ use log::warn;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, State};
|
||||
use tauri_plugin_clipboard_manager::ClipboardExt;
|
||||
use yaak_models::models::{HttpResponse, Plugin};
|
||||
use yaak_models::queries::{
|
||||
create_default_http_response, delete_plugin_key_value, get_base_environment, get_http_request,
|
||||
get_plugin_key_value, list_http_responses_for_request, list_plugins, set_plugin_key_value,
|
||||
upsert_plugin, UpdateSource,
|
||||
};
|
||||
use yaak_models::query_manager::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{
|
||||
Color, DeleteKeyValueResponse, EmptyPayload, FindHttpResponsesResponse,
|
||||
GetHttpRequestByIdResponse, GetKeyValueResponse, Icon, InternalEvent, InternalEventPayload,
|
||||
RenderHttpRequestResponse, SendHttpRequestResponse, SetKeyValueResponse, ShowToastRequest,
|
||||
TemplateRenderResponse, WindowContext, WindowNavigateEvent,
|
||||
PluginWindowContext, RenderHttpRequestResponse, SendHttpRequestResponse, SetKeyValueResponse,
|
||||
ShowToastRequest, TemplateRenderResponse, WindowNavigateEvent,
|
||||
};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::plugin_handle::PluginHandle;
|
||||
@@ -30,7 +27,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
event: &InternalEvent,
|
||||
plugin_handle: &PluginHandle,
|
||||
) {
|
||||
// info!("Got event to app {}", event.id);
|
||||
// debug!("Got event to app {event:?}");
|
||||
let window_context = event.window_context.to_owned();
|
||||
let response_event: Option<InternalEventPayload> = match event.clone().payload {
|
||||
InternalEventPayload::CopyTextRequest(req) => {
|
||||
@@ -42,7 +39,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
}
|
||||
InternalEventPayload::ShowToastRequest(req) => {
|
||||
match window_context {
|
||||
WindowContext::Label { label } => app_handle
|
||||
PluginWindowContext::Label { label, .. } => app_handle
|
||||
.emit_to(label, "show_toast", req)
|
||||
.expect("Failed to emit show_toast to window"),
|
||||
_ => app_handle.emit("show_toast", req).expect("Failed to emit show_toast"),
|
||||
@@ -55,19 +52,16 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
call_frontend(window, event).await
|
||||
}
|
||||
InternalEventPayload::FindHttpResponsesRequest(req) => {
|
||||
let http_responses = list_http_responses_for_request(
|
||||
app_handle,
|
||||
req.request_id.as_str(),
|
||||
req.limit.map(|l| l as i64),
|
||||
)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
let http_responses = app_handle
|
||||
.db()
|
||||
.list_http_responses_for_request(&req.request_id, req.limit.map(|l| l as u64))
|
||||
.unwrap_or_default();
|
||||
Some(InternalEventPayload::FindHttpResponsesResponse(FindHttpResponsesResponse {
|
||||
http_responses,
|
||||
}))
|
||||
}
|
||||
InternalEventPayload::GetHttpRequestByIdRequest(req) => {
|
||||
let http_request = get_http_request(app_handle, req.id.as_str()).await.unwrap();
|
||||
let http_request = app_handle.db().get_http_request(&req.id).ok();
|
||||
Some(InternalEventPayload::GetHttpRequestByIdResponse(GetHttpRequestByIdResponse {
|
||||
http_request,
|
||||
}))
|
||||
@@ -76,12 +70,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let window = get_window_from_window_context(app_handle, &window_context)
|
||||
.expect("Failed to find window for render http request");
|
||||
|
||||
let workspace = workspace_from_window(&window)
|
||||
.await
|
||||
.expect("Failed to get workspace_id from window URL");
|
||||
let environment = environment_from_window(&window).await;
|
||||
let base_environment = get_base_environment(&window, workspace.id.as_str())
|
||||
.await
|
||||
let workspace =
|
||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||
let environment = environment_from_window(&window);
|
||||
let base_environment = app_handle
|
||||
.db()
|
||||
.get_base_environment(&workspace.id)
|
||||
.expect("Failed to get base environment");
|
||||
let cb = PluginTemplateCallback::new(app_handle, &window_context, req.purpose);
|
||||
let http_request = render_http_request(
|
||||
@@ -90,7 +84,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
environment.as_ref(),
|
||||
&cb,
|
||||
)
|
||||
.await;
|
||||
.await
|
||||
.expect("Failed to render http request");
|
||||
Some(InternalEventPayload::RenderHttpRequestResponse(RenderHttpRequestResponse {
|
||||
http_request,
|
||||
}))
|
||||
@@ -99,23 +94,22 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let window = get_window_from_window_context(app_handle, &window_context)
|
||||
.expect("Failed to find window for render");
|
||||
|
||||
let workspace = workspace_from_window(&window)
|
||||
.await
|
||||
.expect("Failed to get workspace_id from window URL");
|
||||
let environment = environment_from_window(&window).await;
|
||||
let base_environment = get_base_environment(&window, workspace.id.as_str())
|
||||
.await
|
||||
let workspace =
|
||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||
let environment = environment_from_window(&window);
|
||||
let base_environment = app_handle
|
||||
.db()
|
||||
.get_base_environment(&workspace.id)
|
||||
.expect("Failed to get base environment");
|
||||
let cb = PluginTemplateCallback::new(app_handle, &window_context, req.purpose);
|
||||
let data =
|
||||
render_json_value(req.data, &base_environment, environment.as_ref(), &cb).await;
|
||||
let data = render_json_value(req.data, &base_environment, environment.as_ref(), &cb)
|
||||
.await
|
||||
.expect("Failed to render template");
|
||||
Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data }))
|
||||
}
|
||||
InternalEventPayload::ErrorResponse(resp) => {
|
||||
let window = get_window_from_window_context(app_handle, &window_context)
|
||||
.expect("Failed to find window for plugin reload");
|
||||
let toast_event = plugin_handle.build_event_to_send(
|
||||
&WindowContext::from_window(&window),
|
||||
&window_context,
|
||||
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
||||
message: format!(
|
||||
"Plugin error from {}: {}",
|
||||
@@ -131,9 +125,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
None
|
||||
}
|
||||
InternalEventPayload::ReloadResponse(_) => {
|
||||
let window = get_window_from_window_context(app_handle, &window_context)
|
||||
.expect("Failed to find window for plugin reload");
|
||||
let plugins = list_plugins(app_handle).await.unwrap();
|
||||
let plugins = app_handle.db().list_plugins().unwrap();
|
||||
for plugin in plugins {
|
||||
if plugin.directory != plugin_handle.dir {
|
||||
continue;
|
||||
@@ -143,10 +135,10 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
updated_at: Utc::now().naive_utc(), // TODO: Add reloaded_at field to use instead
|
||||
..plugin
|
||||
};
|
||||
upsert_plugin(&window, new_plugin, &UpdateSource::Plugin).await.unwrap();
|
||||
app_handle.db().upsert_plugin(&new_plugin, &UpdateSource::Plugin).unwrap();
|
||||
}
|
||||
let toast_event = plugin_handle.build_event_to_send(
|
||||
&WindowContext::from_window(&window),
|
||||
&window_context,
|
||||
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
||||
message: format!("Reloaded plugin {}", plugin_handle.dir),
|
||||
icon: Some(Icon::Info),
|
||||
@@ -161,32 +153,35 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let window = get_window_from_window_context(app_handle, &window_context)
|
||||
.expect("Failed to find window for sending HTTP request");
|
||||
let mut http_request = req.http_request;
|
||||
let workspace = workspace_from_window(&window)
|
||||
.await
|
||||
.expect("Failed to get workspace_id from window URL");
|
||||
let cookie_jar = cookie_jar_from_window(&window).await;
|
||||
let environment = environment_from_window(&window).await;
|
||||
let workspace =
|
||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||
let cookie_jar = cookie_jar_from_window(&window);
|
||||
let environment = environment_from_window(&window);
|
||||
|
||||
if http_request.workspace_id.is_empty() {
|
||||
http_request.workspace_id = workspace.id;
|
||||
}
|
||||
|
||||
let resp = if http_request.id.is_empty() {
|
||||
HttpResponse::new()
|
||||
let http_response = if http_request.id.is_empty() {
|
||||
HttpResponse::default()
|
||||
} else {
|
||||
create_default_http_response(
|
||||
&window,
|
||||
http_request.id.as_str(),
|
||||
&UpdateSource::Plugin,
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
window
|
||||
.db()
|
||||
.upsert_http_response(
|
||||
&HttpResponse {
|
||||
request_id: http_request.id.clone(),
|
||||
workspace_id: http_request.workspace_id.clone(),
|
||||
..Default::default()
|
||||
},
|
||||
&UpdateSource::Plugin,
|
||||
)
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
let result = send_http_request(
|
||||
&window,
|
||||
&http_request,
|
||||
&resp,
|
||||
&http_response,
|
||||
environment,
|
||||
cookie_jar,
|
||||
&mut tokio::sync::watch::channel(false).1, // No-op cancel channel
|
||||
@@ -204,32 +199,53 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
}
|
||||
InternalEventPayload::OpenWindowRequest(req) => {
|
||||
let label = req.label;
|
||||
let (tx, mut rx) = tokio::sync::mpsc::channel(128);
|
||||
let (navigation_tx, mut navigation_rx) = tokio::sync::mpsc::channel(128);
|
||||
let (close_tx, mut close_rx) = tokio::sync::mpsc::channel(128);
|
||||
let win_config = CreateWindowConfig {
|
||||
url: &req.url,
|
||||
label: &label.clone(),
|
||||
title: &req.title.unwrap_or_default(),
|
||||
navigation_tx: Some(tx),
|
||||
navigation_tx: Some(navigation_tx),
|
||||
close_tx: Some(close_tx),
|
||||
inner_size: req.size.map(|s| (s.width, s.height)),
|
||||
position: None,
|
||||
hide_titlebar: false,
|
||||
data_dir_key: req.data_dir_key,
|
||||
..Default::default()
|
||||
};
|
||||
create_window(app_handle, win_config);
|
||||
|
||||
let event_id = event.id.clone();
|
||||
let plugin_handle = plugin_handle.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
while let Some(url) = rx.recv().await {
|
||||
let label = label.clone();
|
||||
let url = url.to_string();
|
||||
let event_to_send = plugin_handle.build_event_to_send(
|
||||
&WindowContext::Label { label },
|
||||
&InternalEventPayload::WindowNavigateEvent(WindowNavigateEvent { url }),
|
||||
Some(event_id.clone()),
|
||||
);
|
||||
plugin_handle.send(&event_to_send).await.unwrap();
|
||||
}
|
||||
});
|
||||
{
|
||||
let event_id = event.id.clone();
|
||||
let plugin_handle = plugin_handle.clone();
|
||||
let window_context = window_context.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
while let Some(url) = navigation_rx.recv().await {
|
||||
let url = url.to_string();
|
||||
let event_to_send = plugin_handle.build_event_to_send(
|
||||
&window_context, // NOTE: Sending existing context on purpose here
|
||||
&InternalEventPayload::WindowNavigateEvent(WindowNavigateEvent { url }),
|
||||
Some(event_id.clone()),
|
||||
);
|
||||
plugin_handle.send(&event_to_send).await.unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let event_id = event.id.clone();
|
||||
let plugin_handle = plugin_handle.clone();
|
||||
let window_context = window_context.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
while let Some(_) = close_rx.recv().await {
|
||||
let event_to_send = plugin_handle.build_event_to_send(
|
||||
&window_context,
|
||||
&InternalEventPayload::WindowCloseEvent,
|
||||
Some(event_id.clone()),
|
||||
);
|
||||
plugin_handle.send(&event_to_send).await.unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
InternalEventPayload::CloseWindowRequest(req) => {
|
||||
@@ -240,17 +256,17 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
}
|
||||
InternalEventPayload::SetKeyValueRequest(req) => {
|
||||
let name = plugin_handle.name().await;
|
||||
set_plugin_key_value(app_handle, &name, &req.key, &req.value).await;
|
||||
app_handle.db().set_plugin_key_value(&name, &req.key, &req.value);
|
||||
Some(InternalEventPayload::SetKeyValueResponse(SetKeyValueResponse {}))
|
||||
}
|
||||
InternalEventPayload::GetKeyValueRequest(req) => {
|
||||
let name = plugin_handle.name().await;
|
||||
let value = get_plugin_key_value(app_handle, &name, &req.key).await.map(|v| v.value);
|
||||
let value = app_handle.db().get_plugin_key_value(&name, &req.key).map(|v| v.value);
|
||||
Some(InternalEventPayload::GetKeyValueResponse(GetKeyValueResponse { value }))
|
||||
}
|
||||
InternalEventPayload::DeleteKeyValueRequest(req) => {
|
||||
let name = plugin_handle.name().await;
|
||||
let deleted = delete_plugin_key_value(app_handle, &name, &req.key).await;
|
||||
let deleted = app_handle.db().delete_plugin_key_value(&name, &req.key).unwrap();
|
||||
Some(InternalEventPayload::DeleteKeyValueResponse(DeleteKeyValueResponse { deleted }))
|
||||
}
|
||||
_ => None,
|
||||
|
||||
@@ -12,7 +12,7 @@ pub async fn render_template<T: TemplateCallback>(
|
||||
base_environment: &Environment,
|
||||
environment: Option<&Environment>,
|
||||
cb: &T,
|
||||
) -> String {
|
||||
) -> yaak_templates::error::Result<String> {
|
||||
let vars = &make_vars_hashmap(base_environment, environment);
|
||||
render(template, vars, cb).await
|
||||
}
|
||||
@@ -22,7 +22,7 @@ pub async fn render_json_value<T: TemplateCallback>(
|
||||
base_environment: &Environment,
|
||||
environment: Option<&Environment>,
|
||||
cb: &T,
|
||||
) -> Value {
|
||||
) -> yaak_templates::error::Result<Value> {
|
||||
let vars = &make_vars_hashmap(base_environment, environment);
|
||||
render_json_value_raw(value, vars, cb).await
|
||||
}
|
||||
@@ -32,32 +32,32 @@ pub async fn render_grpc_request<T: TemplateCallback>(
|
||||
base_environment: &Environment,
|
||||
environment: Option<&Environment>,
|
||||
cb: &T,
|
||||
) -> GrpcRequest {
|
||||
) -> yaak_templates::error::Result<GrpcRequest> {
|
||||
let vars = &make_vars_hashmap(base_environment, environment);
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
for p in r.metadata.clone() {
|
||||
metadata.push(GrpcMetadataEntry {
|
||||
enabled: p.enabled,
|
||||
name: render(p.name.as_str(), vars, cb).await,
|
||||
value: render(p.value.as_str(), vars, cb).await,
|
||||
name: render(p.name.as_str(), vars, cb).await?,
|
||||
value: render(p.value.as_str(), vars, cb).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut authentication = BTreeMap::new();
|
||||
for (k, v) in r.authentication.clone() {
|
||||
authentication.insert(k, render_json_value_raw(v, vars, cb).await);
|
||||
authentication.insert(k, render_json_value_raw(v, vars, cb).await?);
|
||||
}
|
||||
|
||||
let url = render(r.url.as_str(), vars, cb).await;
|
||||
let url = render(r.url.as_str(), vars, cb).await?;
|
||||
|
||||
GrpcRequest {
|
||||
Ok(GrpcRequest {
|
||||
url,
|
||||
metadata,
|
||||
authentication,
|
||||
..r.to_owned()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn render_http_request<T: TemplateCallback>(
|
||||
@@ -65,15 +65,15 @@ pub async fn render_http_request<T: TemplateCallback>(
|
||||
base_environment: &Environment,
|
||||
environment: Option<&Environment>,
|
||||
cb: &T,
|
||||
) -> HttpRequest {
|
||||
) -> yaak_templates::error::Result<HttpRequest> {
|
||||
let vars = &make_vars_hashmap(base_environment, environment);
|
||||
|
||||
let mut url_parameters = Vec::new();
|
||||
for p in r.url_parameters.clone() {
|
||||
url_parameters.push(HttpUrlParameter {
|
||||
enabled: p.enabled,
|
||||
name: render(p.name.as_str(), vars, cb).await,
|
||||
value: render(p.value.as_str(), vars, cb).await,
|
||||
name: render(p.name.as_str(), vars, cb).await?,
|
||||
value: render(p.value.as_str(), vars, cb).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
@@ -82,41 +82,41 @@ pub async fn render_http_request<T: TemplateCallback>(
|
||||
for p in r.headers.clone() {
|
||||
headers.push(HttpRequestHeader {
|
||||
enabled: p.enabled,
|
||||
name: render(p.name.as_str(), vars, cb).await,
|
||||
value: render(p.value.as_str(), vars, cb).await,
|
||||
name: render(p.name.as_str(), vars, cb).await?,
|
||||
value: render(p.value.as_str(), vars, cb).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut body = BTreeMap::new();
|
||||
for (k, v) in r.body.clone() {
|
||||
body.insert(k, render_json_value_raw(v, vars, cb).await);
|
||||
body.insert(k, render_json_value_raw(v, vars, cb).await?);
|
||||
}
|
||||
|
||||
let mut authentication = BTreeMap::new();
|
||||
for (k, v) in r.authentication.clone() {
|
||||
authentication.insert(k, render_json_value_raw(v, vars, cb).await);
|
||||
authentication.insert(k, render_json_value_raw(v, vars, cb).await?);
|
||||
}
|
||||
|
||||
let url = render(r.url.clone().as_str(), vars, cb).await;
|
||||
let url = render(r.url.clone().as_str(), vars, cb).await?;
|
||||
|
||||
// This doesn't fit perfectly with the concept of "rendering" but it kind of does
|
||||
let (url, url_parameters) = apply_path_placeholders(&url, url_parameters);
|
||||
|
||||
HttpRequest {
|
||||
Ok(HttpRequest {
|
||||
url,
|
||||
url_parameters,
|
||||
headers,
|
||||
body,
|
||||
authentication,
|
||||
..r.to_owned()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn render<T: TemplateCallback>(
|
||||
template: &str,
|
||||
vars: &HashMap<String, String>,
|
||||
cb: &T,
|
||||
) -> String {
|
||||
) -> yaak_templates::error::Result<String> {
|
||||
parse_and_render(template, vars, cb).await
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::time::SystemTime;
|
||||
|
||||
use crate::error::Result;
|
||||
use log::info;
|
||||
use tauri::{AppHandle, Manager};
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons};
|
||||
use tauri_plugin_updater::UpdaterExt;
|
||||
use tokio::task::block_in_place;
|
||||
use yaak_models::query_manager::QueryManagerExt;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
use crate::is_dev;
|
||||
@@ -46,6 +48,11 @@ impl UpdateMode {
|
||||
}
|
||||
}
|
||||
|
||||
pub enum UpdateTrigger {
|
||||
Background,
|
||||
User,
|
||||
}
|
||||
|
||||
impl YaakUpdater {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
@@ -53,41 +60,52 @@ impl YaakUpdater {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn force_check(
|
||||
pub async fn check_now<R: Runtime>(
|
||||
&mut self,
|
||||
app_handle: &AppHandle,
|
||||
window: &WebviewWindow<R>,
|
||||
mode: UpdateMode,
|
||||
) -> Result<bool, tauri_plugin_updater::Error> {
|
||||
update_trigger: UpdateTrigger,
|
||||
) -> Result<bool> {
|
||||
let settings = window.db().get_settings();
|
||||
let update_key = format!("{:x}", md5::compute(settings.id));
|
||||
self.last_update_check = SystemTime::now();
|
||||
|
||||
info!("Checking for updates mode={}", mode);
|
||||
|
||||
let h = app_handle.clone();
|
||||
let update_check_result = app_handle
|
||||
let w = window.clone();
|
||||
let update_check_result = w
|
||||
.updater_builder()
|
||||
.on_before_exit(move || {
|
||||
// Kill plugin manager before exit or NSIS installer will fail to replace sidecar
|
||||
// while it's running.
|
||||
// NOTE: This is only called on Windows
|
||||
let h = h.clone();
|
||||
let w = w.clone();
|
||||
block_in_place(|| {
|
||||
tauri::async_runtime::block_on(async move {
|
||||
info!("Shutting down plugin manager before update");
|
||||
let plugin_manager = h.state::<PluginManager>();
|
||||
let plugin_manager = w.state::<PluginManager>();
|
||||
plugin_manager.terminate().await;
|
||||
});
|
||||
});
|
||||
})
|
||||
.header("X-Update-Mode", mode.to_string())?
|
||||
.header("X-Update-Key", update_key)?
|
||||
.header(
|
||||
"X-Update-Trigger",
|
||||
match update_trigger {
|
||||
UpdateTrigger::Background => "background",
|
||||
UpdateTrigger::User => "user",
|
||||
},
|
||||
)?
|
||||
.build()?
|
||||
.check()
|
||||
.await;
|
||||
|
||||
match update_check_result {
|
||||
Ok(Some(update)) => {
|
||||
let h = app_handle.clone();
|
||||
app_handle
|
||||
.dialog()
|
||||
let result = match update_check_result? {
|
||||
None => false,
|
||||
Some(update) => {
|
||||
let w = window.clone();
|
||||
w.dialog()
|
||||
.message(format!(
|
||||
"{} is available. Would you like to download and install it now?",
|
||||
update.version
|
||||
@@ -104,7 +122,7 @@ impl YaakUpdater {
|
||||
tauri::async_runtime::spawn(async move {
|
||||
match update.download_and_install(|_, _| {}, || {}).await {
|
||||
Ok(_) => {
|
||||
if h.dialog()
|
||||
if w.dialog()
|
||||
.message("Would you like to restart the app?")
|
||||
.title("Update Installed")
|
||||
.buttons(MessageDialogButtons::OkCancelCustom(
|
||||
@@ -113,27 +131,27 @@ impl YaakUpdater {
|
||||
))
|
||||
.blocking_show()
|
||||
{
|
||||
h.restart();
|
||||
w.app_handle().restart();
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
h.dialog()
|
||||
w.dialog()
|
||||
.message(format!("The update failed to install: {}", e));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
Ok(true)
|
||||
true
|
||||
}
|
||||
Ok(None) => Ok(false),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
};
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
pub async fn check(
|
||||
pub async fn maybe_check<R: Runtime>(
|
||||
&mut self,
|
||||
app_handle: &AppHandle,
|
||||
window: &WebviewWindow<R>,
|
||||
mode: UpdateMode,
|
||||
) -> Result<bool, tauri_plugin_updater::Error> {
|
||||
) -> Result<bool> {
|
||||
let update_period_seconds = match mode {
|
||||
UpdateMode::Stable => MAX_UPDATE_CHECK_HOURS_STABLE,
|
||||
UpdateMode::Beta => MAX_UPDATE_CHECK_HOURS_BETA,
|
||||
@@ -150,6 +168,6 @@ impl YaakUpdater {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
self.force_check(app_handle, mode).await
|
||||
self.check_now(window, mode, UpdateTrigger::Background).await
|
||||
}
|
||||
}
|
||||
|
||||
25
src-tauri/src/uri_scheme.rs
Normal file
25
src-tauri/src/uri_scheme.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use log::{info, warn};
|
||||
use tauri::{Manager, Runtime, UriSchemeContext};
|
||||
|
||||
pub(crate) fn handle_uri_scheme<R: Runtime>(
|
||||
a: UriSchemeContext<R>,
|
||||
req: http::Request<Vec<u8>>,
|
||||
) -> http::Response<Vec<u8>> {
|
||||
println!("------------- Yaak URI scheme invoked!");
|
||||
let uri = req.uri();
|
||||
let window = a
|
||||
.app_handle()
|
||||
.get_webview_window(a.webview_label())
|
||||
.expect("Failed to get webview window for URI scheme event");
|
||||
info!("Yaak URI scheme invoked with {uri:?} {window:?}");
|
||||
|
||||
let path = uri.path();
|
||||
if path == "/data/import" {
|
||||
warn!("TODO: import data")
|
||||
} else if path == "/plugins/install" {
|
||||
warn!("TODO: install plugin")
|
||||
}
|
||||
|
||||
let msg = format!("No handler found for {path}");
|
||||
tauri::http::Response::builder().status(404).body(msg.as_bytes().to_vec()).unwrap()
|
||||
}
|
||||
@@ -1,13 +1,22 @@
|
||||
use crate::window_menu::app_menu;
|
||||
use crate::{DEFAULT_WINDOW_HEIGHT, DEFAULT_WINDOW_WIDTH, MIN_WINDOW_HEIGHT, MIN_WINDOW_WIDTH};
|
||||
use log::{info, warn};
|
||||
use rand::random;
|
||||
use std::process::exit;
|
||||
use tauri::{
|
||||
AppHandle, Emitter, LogicalSize, Manager, Runtime, WebviewUrl, WebviewWindow,
|
||||
AppHandle, Emitter, LogicalSize, Manager, Runtime, WebviewUrl, WebviewWindow, WindowEvent,
|
||||
};
|
||||
use tauri_plugin_opener::OpenerExt;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
const DEFAULT_WINDOW_WIDTH: f64 = 1100.0;
|
||||
const DEFAULT_WINDOW_HEIGHT: f64 = 600.0;
|
||||
|
||||
const MIN_WINDOW_WIDTH: f64 = 300.0;
|
||||
const MIN_WINDOW_HEIGHT: f64 = 300.0;
|
||||
|
||||
pub(crate) const MAIN_WINDOW_PREFIX: &str = "main_";
|
||||
const OTHER_WINDOW_PREFIX: &str = "other_";
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub(crate) struct CreateWindowConfig<'s> {
|
||||
pub url: &'s str,
|
||||
@@ -16,6 +25,8 @@ pub(crate) struct CreateWindowConfig<'s> {
|
||||
pub inner_size: Option<(f64, f64)>,
|
||||
pub position: Option<(f64, f64)>,
|
||||
pub navigation_tx: Option<mpsc::Sender<String>>,
|
||||
pub close_tx: Option<mpsc::Sender<()>>,
|
||||
pub data_dir_key: Option<String>,
|
||||
pub hide_titlebar: bool,
|
||||
}
|
||||
|
||||
@@ -41,6 +52,25 @@ pub(crate) fn create_window<R: Runtime>(
|
||||
.disable_drag_drop_handler() // Required for frontend Dnd on windows
|
||||
.min_inner_size(MIN_WINDOW_WIDTH, MIN_WINDOW_HEIGHT);
|
||||
|
||||
if let Some(key) = config.data_dir_key {
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
{
|
||||
use std::fs;
|
||||
let dir = handle.path().temp_dir().unwrap().join("yaak_sessions").join(key);
|
||||
fs::create_dir_all(dir.clone()).unwrap();
|
||||
win_builder = win_builder.data_directory(dir);
|
||||
}
|
||||
|
||||
// macOS doesn't support data dir so must use this fn instead
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let hash = md5::compute(key.as_bytes());
|
||||
let mut id = [0u8; 16];
|
||||
id.copy_from_slice(&hash[..16]); // Take the first 16 bytes of the hash
|
||||
win_builder = win_builder.data_store_identifier(id);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((w, h)) = config.inner_size {
|
||||
win_builder = win_builder.inner_size(w, h);
|
||||
} else {
|
||||
@@ -85,6 +115,18 @@ pub(crate) fn create_window<R: Runtime>(
|
||||
|
||||
let win = win_builder.build().unwrap();
|
||||
|
||||
if let Some(tx) = config.close_tx {
|
||||
win.on_window_event(move |event| match event {
|
||||
WindowEvent::CloseRequested { .. } => {
|
||||
let tx = tx.clone();
|
||||
tauri::async_runtime::block_on(async move {
|
||||
tx.send(()).await.unwrap();
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
}
|
||||
|
||||
let webview_window = win.clone();
|
||||
win.on_menu_event(move |w, event| {
|
||||
if !w.is_focused().unwrap() {
|
||||
@@ -128,3 +170,95 @@ pub(crate) fn create_window<R: Runtime>(
|
||||
|
||||
win
|
||||
}
|
||||
|
||||
pub(crate) fn create_main_window(handle: &AppHandle, url: &str) -> WebviewWindow {
|
||||
let mut counter = 0;
|
||||
let label = loop {
|
||||
let label = format!("{MAIN_WINDOW_PREFIX}{counter}");
|
||||
match handle.webview_windows().get(label.as_str()) {
|
||||
None => break Some(label),
|
||||
Some(_) => counter += 1,
|
||||
}
|
||||
}
|
||||
.expect("Failed to generate label for new window");
|
||||
|
||||
let config = CreateWindowConfig {
|
||||
url,
|
||||
label: label.as_str(),
|
||||
title: "Yaak",
|
||||
inner_size: Some((DEFAULT_WINDOW_WIDTH, DEFAULT_WINDOW_HEIGHT)),
|
||||
position: Some((
|
||||
// Offset by random amount so it's easier to differentiate
|
||||
100.0 + random::<f64>() * 20.0,
|
||||
100.0 + random::<f64>() * 20.0,
|
||||
)),
|
||||
hide_titlebar: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
create_window(handle, config)
|
||||
}
|
||||
|
||||
pub(crate) fn create_child_window(parent_window: &WebviewWindow, url: &str, label: &str, title: &str, inner_size: (f64, f64)) -> WebviewWindow {
|
||||
let app_handle = parent_window.app_handle();
|
||||
let label = format!("{OTHER_WINDOW_PREFIX}_{label}");
|
||||
let scale_factor = parent_window.scale_factor().unwrap();
|
||||
|
||||
let current_pos = parent_window.inner_position().unwrap().to_logical::<f64>(scale_factor);
|
||||
let current_size = parent_window.inner_size().unwrap().to_logical::<f64>(scale_factor);
|
||||
|
||||
// Position the new window in the middle of the parent
|
||||
let position = (
|
||||
current_pos.x + current_size.width / 2.0 - inner_size.0 / 2.0,
|
||||
current_pos.y + current_size.height / 2.0 - inner_size.1 / 2.0,
|
||||
);
|
||||
|
||||
let config = CreateWindowConfig {
|
||||
label: label.as_str(),
|
||||
title,
|
||||
url,
|
||||
inner_size: Some(inner_size),
|
||||
position: Some(position),
|
||||
hide_titlebar: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let child_window = create_window(&app_handle, config);
|
||||
|
||||
// NOTE: These listeners will remain active even when the windows close. Unfortunately,
|
||||
// there's no way to unlisten to events for now, so we just have to be defensive.
|
||||
|
||||
{
|
||||
let parent_window = parent_window.clone();
|
||||
let child_window = child_window.clone();
|
||||
child_window.clone().on_window_event(move |e| match e {
|
||||
// When the new window is destroyed, bring the other up behind it
|
||||
WindowEvent::Destroyed => {
|
||||
if let Some(w) = parent_window.get_webview_window(child_window.label()) {
|
||||
w.set_focus().unwrap();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let parent_window = parent_window.clone();
|
||||
let child_window = child_window.clone();
|
||||
parent_window.clone().on_window_event(move |e| match e {
|
||||
// When the parent window is closed, close the child
|
||||
WindowEvent::CloseRequested { .. } => child_window.destroy().unwrap(),
|
||||
// When the parent window is focused, bring the child above
|
||||
WindowEvent::Focused(focus) => {
|
||||
if *focus {
|
||||
if let Some(w) = parent_window.get_webview_window(child_window.label()) {
|
||||
w.set_focus().unwrap();
|
||||
};
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
}
|
||||
|
||||
child_window
|
||||
}
|
||||
|
||||
34
src-tauri/vendored/plugins/auth-jwt/build/index.js
generated
34
src-tauri/vendored/plugins/auth-jwt/build/index.js
generated
@@ -1044,6 +1044,7 @@ var require_re = __commonJS({
|
||||
var re = exports2.re = [];
|
||||
var safeRe = exports2.safeRe = [];
|
||||
var src = exports2.src = [];
|
||||
var safeSrc = exports2.safeSrc = [];
|
||||
var t = exports2.t = {};
|
||||
var R = 0;
|
||||
var LETTERDASHNUMBER = "[a-zA-Z0-9-]";
|
||||
@@ -1064,6 +1065,7 @@ var require_re = __commonJS({
|
||||
debug(name, index, value);
|
||||
t[name] = index;
|
||||
src[index] = value;
|
||||
safeSrc[index] = safe;
|
||||
re[index] = new RegExp(value, isGlobal ? "g" : void 0);
|
||||
safeRe[index] = new RegExp(safe, isGlobal ? "g" : void 0);
|
||||
};
|
||||
@@ -1160,7 +1162,7 @@ var require_semver = __commonJS({
|
||||
"../../node_modules/semver/classes/semver.js"(exports2, module2) {
|
||||
var debug = require_debug();
|
||||
var { MAX_LENGTH, MAX_SAFE_INTEGER } = require_constants();
|
||||
var { safeRe: re, t } = require_re();
|
||||
var { safeRe: re, safeSrc: src, t } = require_re();
|
||||
var parseOptions = require_parse_options();
|
||||
var { compareIdentifiers } = require_identifiers();
|
||||
var SemVer = class _SemVer {
|
||||
@@ -1300,6 +1302,18 @@ var require_semver = __commonJS({
|
||||
// preminor will bump the version up to the next minor release, and immediately
|
||||
// down to pre-release. premajor and prepatch work the same way.
|
||||
inc(release, identifier, identifierBase) {
|
||||
if (release.startsWith("pre")) {
|
||||
if (!identifier && identifierBase === false) {
|
||||
throw new Error("invalid increment argument: identifier is empty");
|
||||
}
|
||||
if (identifier) {
|
||||
const r = new RegExp(`^${this.options.loose ? src[t.PRERELEASELOOSE] : src[t.PRERELEASE]}$`);
|
||||
const match = `-${identifier}`.match(r);
|
||||
if (!match || match[1] !== identifier) {
|
||||
throw new Error(`invalid identifier: ${identifier}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
switch (release) {
|
||||
case "premajor":
|
||||
this.prerelease.length = 0;
|
||||
@@ -1325,6 +1339,12 @@ var require_semver = __commonJS({
|
||||
}
|
||||
this.inc("pre", identifier, identifierBase);
|
||||
break;
|
||||
case "release":
|
||||
if (this.prerelease.length === 0) {
|
||||
throw new Error(`version ${this.raw} is not a prerelease`);
|
||||
}
|
||||
this.prerelease.length = 0;
|
||||
break;
|
||||
case "major":
|
||||
if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) {
|
||||
this.major++;
|
||||
@@ -1348,9 +1368,6 @@ var require_semver = __commonJS({
|
||||
break;
|
||||
case "pre": {
|
||||
const base = Number(identifierBase) ? 1 : 0;
|
||||
if (!identifier && identifierBase === false) {
|
||||
throw new Error("invalid increment argument: identifier is empty");
|
||||
}
|
||||
if (this.prerelease.length === 0) {
|
||||
this.prerelease = [base];
|
||||
} else {
|
||||
@@ -1485,13 +1502,12 @@ var require_diff = __commonJS({
|
||||
if (!lowVersion.patch && !lowVersion.minor) {
|
||||
return "major";
|
||||
}
|
||||
if (highVersion.patch) {
|
||||
if (lowVersion.compareMain(highVersion) === 0) {
|
||||
if (lowVersion.minor && !lowVersion.patch) {
|
||||
return "minor";
|
||||
}
|
||||
return "patch";
|
||||
}
|
||||
if (highVersion.minor) {
|
||||
return "minor";
|
||||
}
|
||||
return "major";
|
||||
}
|
||||
const prefix = highHasPre ? "pre" : "";
|
||||
if (v1.major !== v2.major) {
|
||||
|
||||
101
src-tauri/vendored/plugins/auth-oauth2/build/index.js
generated
101
src-tauri/vendored/plugins/auth-oauth2/build/index.js
generated
@@ -102,9 +102,20 @@ async function getToken(ctx, contextId) {
|
||||
async function deleteToken(ctx, contextId) {
|
||||
return ctx.store.delete(tokenStoreKey(contextId));
|
||||
}
|
||||
async function resetDataDirKey(ctx, contextId) {
|
||||
const key = (/* @__PURE__ */ new Date()).toISOString();
|
||||
return ctx.store.set(dataDirStoreKey(contextId), key);
|
||||
}
|
||||
async function getDataDirKey(ctx, contextId) {
|
||||
const key = await ctx.store.get(dataDirStoreKey(contextId)) ?? "default";
|
||||
return `${contextId}::${key}`;
|
||||
}
|
||||
function tokenStoreKey(context_id) {
|
||||
return ["token", context_id].join("::");
|
||||
}
|
||||
function dataDirStoreKey(context_id) {
|
||||
return ["data_dir", context_id].join("::");
|
||||
}
|
||||
|
||||
// src/getOrRefreshAccessToken.ts
|
||||
async function getOrRefreshAccessToken(ctx, contextId, {
|
||||
@@ -119,7 +130,7 @@ async function getOrRefreshAccessToken(ctx, contextId, {
|
||||
if (token == null) {
|
||||
return null;
|
||||
}
|
||||
const now = Date.now() / 1e3;
|
||||
const now = Date.now();
|
||||
const isExpired = token.expiresAt && now > token.expiresAt;
|
||||
if (!isExpired && !forceRefresh) {
|
||||
return token;
|
||||
@@ -218,9 +229,16 @@ async function getAuthorizationCode(ctx, contextId, {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const authorizationUrlStr = authorizationUrl.toString();
|
||||
console.log("Authorizing", authorizationUrlStr);
|
||||
let foundCode = false;
|
||||
let { close } = await ctx.window.openUrl({
|
||||
url: authorizationUrlStr,
|
||||
label: "oauth-authorization-url",
|
||||
dataDirKey: await getDataDirKey(ctx, contextId),
|
||||
async onClose() {
|
||||
if (!foundCode) {
|
||||
reject(new Error("Authorization window closed"));
|
||||
}
|
||||
},
|
||||
async onNavigate({ url: urlStr }) {
|
||||
const url = new URL(urlStr);
|
||||
if (url.searchParams.has("error")) {
|
||||
@@ -230,6 +248,7 @@ async function getAuthorizationCode(ctx, contextId, {
|
||||
if (!code) {
|
||||
return;
|
||||
}
|
||||
foundCode = true;
|
||||
close();
|
||||
const response = await getAccessToken(ctx, {
|
||||
grantType: "authorization_code",
|
||||
@@ -428,7 +447,6 @@ var plugin = {
|
||||
actions: [
|
||||
{
|
||||
label: "Copy Current Token",
|
||||
icon: "copy",
|
||||
async onSelect(ctx, { contextId }) {
|
||||
const token = await getToken(ctx, contextId);
|
||||
if (token == null) {
|
||||
@@ -441,7 +459,6 @@ var plugin = {
|
||||
},
|
||||
{
|
||||
label: "Delete Token",
|
||||
icon: "trash",
|
||||
async onSelect(ctx, { contextId }) {
|
||||
if (await deleteToken(ctx, contextId)) {
|
||||
await ctx.toast.show({ message: "Token deleted", color: "success" });
|
||||
@@ -449,6 +466,12 @@ var plugin = {
|
||||
await ctx.toast.show({ message: "No token to delete", color: "warning" });
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
label: "Clear Window Session",
|
||||
async onSelect(ctx, { contextId }) {
|
||||
await resetDataDirKey(ctx, contextId);
|
||||
}
|
||||
}
|
||||
],
|
||||
args: [
|
||||
@@ -461,17 +484,24 @@ var plugin = {
|
||||
options: grantTypes
|
||||
},
|
||||
// Always-present fields
|
||||
{ type: "text", name: "clientId", label: "Client ID" },
|
||||
{
|
||||
type: "text",
|
||||
name: "clientId",
|
||||
label: "Client ID",
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
type: "text",
|
||||
name: "clientSecret",
|
||||
label: "Client Secret",
|
||||
optional: true,
|
||||
password: true,
|
||||
dynamic: hiddenIfNot(["authorization_code", "password", "client_credentials"])
|
||||
},
|
||||
{
|
||||
type: "text",
|
||||
name: "authorizationUrl",
|
||||
optional: true,
|
||||
label: "Authorization URL",
|
||||
dynamic: hiddenIfNot(["authorization_code", "implicit"]),
|
||||
placeholder: authorizationUrls[0],
|
||||
@@ -480,6 +510,7 @@ var plugin = {
|
||||
{
|
||||
type: "text",
|
||||
name: "accessTokenUrl",
|
||||
optional: true,
|
||||
label: "Access Token URL",
|
||||
placeholder: accessTokenUrls[0],
|
||||
dynamic: hiddenIfNot(["authorization_code", "password", "client_credentials"]),
|
||||
@@ -590,55 +621,55 @@ var plugin = {
|
||||
}
|
||||
],
|
||||
async onApply(ctx, { values, contextId }) {
|
||||
const headerPrefix = optionalString(values, "headerPrefix") ?? "";
|
||||
const grantType = requiredString(values, "grantType");
|
||||
const headerPrefix = stringArg(values, "headerPrefix");
|
||||
const grantType = stringArg(values, "grantType");
|
||||
const credentialsInBody = values.credentials === "body";
|
||||
let token;
|
||||
if (grantType === "authorization_code") {
|
||||
const authorizationUrl = requiredString(values, "authorizationUrl");
|
||||
const accessTokenUrl = requiredString(values, "accessTokenUrl");
|
||||
const authorizationUrl = stringArg(values, "authorizationUrl");
|
||||
const accessTokenUrl = stringArg(values, "accessTokenUrl");
|
||||
token = await getAuthorizationCode(ctx, contextId, {
|
||||
accessTokenUrl: accessTokenUrl.match(/^https?:\/\//) ? accessTokenUrl : `https://${accessTokenUrl}`,
|
||||
authorizationUrl: authorizationUrl.match(/^https?:\/\//) ? authorizationUrl : `https://${authorizationUrl}`,
|
||||
clientId: requiredString(values, "clientId"),
|
||||
clientSecret: requiredString(values, "clientSecret"),
|
||||
redirectUri: optionalString(values, "redirectUri"),
|
||||
scope: optionalString(values, "scope"),
|
||||
state: optionalString(values, "state"),
|
||||
clientId: stringArg(values, "clientId"),
|
||||
clientSecret: stringArg(values, "clientSecret"),
|
||||
redirectUri: stringArgOrNull(values, "redirectUri"),
|
||||
scope: stringArgOrNull(values, "scope"),
|
||||
state: stringArgOrNull(values, "state"),
|
||||
credentialsInBody,
|
||||
pkce: values.usePkce ? {
|
||||
challengeMethod: requiredString(values, "pkceChallengeMethod"),
|
||||
codeVerifier: optionalString(values, "pkceCodeVerifier")
|
||||
challengeMethod: stringArg(values, "pkceChallengeMethod"),
|
||||
codeVerifier: stringArgOrNull(values, "pkceCodeVerifier")
|
||||
} : null
|
||||
});
|
||||
} else if (grantType === "implicit") {
|
||||
const authorizationUrl = requiredString(values, "authorizationUrl");
|
||||
const authorizationUrl = stringArg(values, "authorizationUrl");
|
||||
token = await getImplicit(ctx, contextId, {
|
||||
authorizationUrl: authorizationUrl.match(/^https?:\/\//) ? authorizationUrl : `https://${authorizationUrl}`,
|
||||
clientId: requiredString(values, "clientId"),
|
||||
redirectUri: optionalString(values, "redirectUri"),
|
||||
responseType: requiredString(values, "responseType"),
|
||||
scope: optionalString(values, "scope"),
|
||||
state: optionalString(values, "state")
|
||||
clientId: stringArg(values, "clientId"),
|
||||
redirectUri: stringArgOrNull(values, "redirectUri"),
|
||||
responseType: stringArg(values, "responseType"),
|
||||
scope: stringArgOrNull(values, "scope"),
|
||||
state: stringArgOrNull(values, "state")
|
||||
});
|
||||
} else if (grantType === "client_credentials") {
|
||||
const accessTokenUrl = requiredString(values, "accessTokenUrl");
|
||||
const accessTokenUrl = stringArg(values, "accessTokenUrl");
|
||||
token = await getClientCredentials(ctx, contextId, {
|
||||
accessTokenUrl: accessTokenUrl.match(/^https?:\/\//) ? accessTokenUrl : `https://${accessTokenUrl}`,
|
||||
clientId: requiredString(values, "clientId"),
|
||||
clientSecret: requiredString(values, "clientSecret"),
|
||||
scope: optionalString(values, "scope"),
|
||||
clientId: stringArg(values, "clientId"),
|
||||
clientSecret: stringArg(values, "clientSecret"),
|
||||
scope: stringArgOrNull(values, "scope"),
|
||||
credentialsInBody
|
||||
});
|
||||
} else if (grantType === "password") {
|
||||
const accessTokenUrl = requiredString(values, "accessTokenUrl");
|
||||
const accessTokenUrl = stringArg(values, "accessTokenUrl");
|
||||
token = await getPassword(ctx, contextId, {
|
||||
accessTokenUrl: accessTokenUrl.match(/^https?:\/\//) ? accessTokenUrl : `https://${accessTokenUrl}`,
|
||||
clientId: requiredString(values, "clientId"),
|
||||
clientSecret: requiredString(values, "clientSecret"),
|
||||
username: requiredString(values, "username"),
|
||||
password: requiredString(values, "password"),
|
||||
scope: optionalString(values, "scope"),
|
||||
clientId: stringArg(values, "clientId"),
|
||||
clientSecret: stringArg(values, "clientSecret"),
|
||||
username: stringArg(values, "username"),
|
||||
password: stringArg(values, "password"),
|
||||
scope: stringArgOrNull(values, "scope"),
|
||||
credentialsInBody
|
||||
});
|
||||
} else {
|
||||
@@ -654,14 +685,14 @@ var plugin = {
|
||||
}
|
||||
}
|
||||
};
|
||||
function optionalString(values, name) {
|
||||
function stringArgOrNull(values, name) {
|
||||
const arg = values[name];
|
||||
if (arg == null || arg == "") return null;
|
||||
return `${arg}`;
|
||||
}
|
||||
function requiredString(values, name) {
|
||||
const arg = optionalString(values, name);
|
||||
if (!arg) throw new Error(`Missing required argument ${name}`);
|
||||
function stringArg(values, name) {
|
||||
const arg = stringArgOrNull(values, name);
|
||||
if (!arg) return "";
|
||||
return arg;
|
||||
}
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
|
||||
@@ -36,6 +36,9 @@ var require_quote = __commonJS({
|
||||
"use strict";
|
||||
module2.exports = function quote(xs) {
|
||||
return xs.map(function(s) {
|
||||
if (s === "") {
|
||||
return "''";
|
||||
}
|
||||
if (s && typeof s === "object") {
|
||||
return s.op.replace(/(.)/g, "\\$1");
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -49782,7 +49782,11 @@ var require_property_base = __commonJS({
|
||||
* @returns {*|undefined}
|
||||
*/
|
||||
parent() {
|
||||
return this && this.__parent && (this.__parent.__parent || this.__parent) || void 0;
|
||||
let parent = this.__parent;
|
||||
if (parent && parent._postman_propertyIsList) {
|
||||
parent = parent.__parent || parent;
|
||||
}
|
||||
return parent || void 0;
|
||||
},
|
||||
/**
|
||||
* Accepts an object and sets it as the parent of the current property.
|
||||
@@ -77141,7 +77145,7 @@ var require_dynamic_variables = __commonJS({
|
||||
description: "A random avatar image",
|
||||
generator: () => {
|
||||
return faker.random.arrayElement([
|
||||
// eslint-disable-next-line max-len
|
||||
// eslint-disable-next-line @stylistic/js/max-len
|
||||
`https://cloudflare-ipfs.com/ipfs/Qmd3W5DuhgHirLHGVixi6V76LhCkZUz6pnFt5AJBiyvHye/avatar/${faker.datatype.number(1249)}.jpg`,
|
||||
`https://avatars.githubusercontent.com/u/${faker.datatype.number(1e8)}`
|
||||
]);
|
||||
@@ -77494,7 +77498,7 @@ var require_superstring = __commonJS({
|
||||
* @readOnly
|
||||
* @type {RegExp}
|
||||
*/
|
||||
REGEX_EXTRACT_VARS: /\{\{([^{}]*?)}}/g,
|
||||
REGEX_EXTRACT_VARS: /{{([^{}]*?)}}/g,
|
||||
/**
|
||||
* Defines the number of times the variable substitution mechanism will repeat until all tokens are resolved
|
||||
*
|
||||
@@ -77589,7 +77593,7 @@ var require_property = __commonJS({
|
||||
} else if (typeof value === "object") {
|
||||
seen.add(value);
|
||||
for (const key in value) {
|
||||
if (Object.hasOwnProperty.call(value, key)) {
|
||||
if (Object.hasOwn(value, key)) {
|
||||
_findSubstitutions(value[key], seen, result);
|
||||
}
|
||||
}
|
||||
@@ -77894,7 +77898,7 @@ var require_property_list = __commonJS({
|
||||
before > -1 ? this.members.splice(before, 0, item) : this.members.push(item);
|
||||
if ((index = item[this._postman_listIndexKey]) && (index = String(index))) {
|
||||
this._postman_listIndexCaseInsensitive && (index = index.toLowerCase());
|
||||
if (this._postman_listAllowsMultipleValues && Object.hasOwnProperty.call(this.reference, index)) {
|
||||
if (this._postman_listAllowsMultipleValues && Object.hasOwn(this.reference, index)) {
|
||||
!_2.isArray(this.reference[index]) && (this.reference[index] = [this.reference[index]]);
|
||||
this.reference[index].push(item);
|
||||
} else {
|
||||
@@ -78458,7 +78462,7 @@ var require_parser = __commonJS({
|
||||
var ReplacementTracker = require_replacement_tracker();
|
||||
var REGEX_ALL_BACKSLASHES = /\\/g;
|
||||
var REGEX_LEADING_SLASHES = /^\/+/;
|
||||
var REGEX_ALL_VARIABLES = /{{[^{}]*[.:/?#@&\]][^{}]*}}/g;
|
||||
var REGEX_ALL_VARIABLES = /{{[^{}]*}}/g;
|
||||
var HASH_SEPARATOR = "#";
|
||||
var PATH_SEPARATOR = "/";
|
||||
var PORT_SEPARATOR = ":";
|
||||
@@ -78613,6 +78617,7 @@ var require_query_param = __commonJS({
|
||||
"../../node_modules/postman-collection/lib/collection/query-param.js"(exports2, module2) {
|
||||
var _2 = require_util2().lodash;
|
||||
var Property = require_property().Property;
|
||||
var Substitutor = require_superstring().Substitutor;
|
||||
var PropertyList = require_property_list().PropertyList;
|
||||
var E = "";
|
||||
var AMPERSAND = "&";
|
||||
@@ -78623,7 +78628,7 @@ var require_query_param = __commonJS({
|
||||
var REGEX_HASH = /#/g;
|
||||
var REGEX_EQUALS = /=/g;
|
||||
var REGEX_AMPERSAND = /&/g;
|
||||
var REGEX_EXTRACT_VARS = /{{[^{}]*[&#=][^{}]*}}/g;
|
||||
var REGEX_EXTRACT_VARS = Substitutor.REGEX_EXTRACT_VARS;
|
||||
var QueryParam;
|
||||
var encodeReservedChars = function(str, encodeEquals) {
|
||||
if (!str) {
|
||||
@@ -92126,7 +92131,7 @@ var require_db3 = __commonJS({
|
||||
type: "embed",
|
||||
format: "pdf"
|
||||
},
|
||||
"application/ecmascript": {
|
||||
"text/javascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
@@ -92134,6 +92139,66 @@ var require_db3 = __commonJS({
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"application/ecmascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"application/x-ecmascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"application/x-javascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/ecmascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.0": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.1": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.2": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.3": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.4": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.5": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/jscript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/livescript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/x-ecmascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/x-javascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/css": {
|
||||
type: "text",
|
||||
format: "stylesheet"
|
||||
},
|
||||
"application/json": {
|
||||
type: "text",
|
||||
format: "json"
|
||||
@@ -94086,7 +94151,7 @@ var require_content_info = __commonJS({
|
||||
* egHeader: inline; filename="test Response.json"
|
||||
* Reference: https://github.com/jshttp/content-disposition
|
||||
*/
|
||||
// eslint-disable-next-line max-len
|
||||
// eslint-disable-next-line @stylistic/js/max-len
|
||||
fileNameRegex: /;[ \t]*(?:filename)[ \t]*=[ \t]*("(?:[\x20!\x23-\x5b\x5d-\x7e\x80-\xff]|\\[\x20-\x7e])*"|[!#$%&'*+.0-9A-Z^_`a-z|~-]+)[ \t]*/,
|
||||
/**
|
||||
* RegExp for extracting filename* from content-disposition header
|
||||
@@ -94119,7 +94184,7 @@ var require_content_info = __commonJS({
|
||||
* egHeader: attachment;filename*=utf-8''%E4%BD%A0%E5%A5%BD.txt
|
||||
* Reference: https://github.com/jshttp/content-disposition
|
||||
*/
|
||||
// eslint-disable-next-line max-len, security/detect-unsafe-regex
|
||||
// eslint-disable-next-line @stylistic/js/max-len, security/detect-unsafe-regex
|
||||
encodedFileNameRegex: /;[ \t]*(?:filename\*)[ \t]*=[ \t]*([A-Za-z0-9!#$%&+\-^_`{}~]+)'.*'((?:%[0-9A-Fa-f]{2}|[A-Za-z0-9!#$&+.^_`|~-])+)[ \t]*/,
|
||||
/**
|
||||
* RegExp to match quoted-pair in RFC 2616
|
||||
@@ -95313,6 +95378,7 @@ var require_re = __commonJS({
|
||||
var re = exports2.re = [];
|
||||
var safeRe = exports2.safeRe = [];
|
||||
var src = exports2.src = [];
|
||||
var safeSrc = exports2.safeSrc = [];
|
||||
var t = exports2.t = {};
|
||||
var R = 0;
|
||||
var LETTERDASHNUMBER = "[a-zA-Z0-9-]";
|
||||
@@ -95333,6 +95399,7 @@ var require_re = __commonJS({
|
||||
debug(name, index, value);
|
||||
t[name] = index;
|
||||
src[index] = value;
|
||||
safeSrc[index] = safe;
|
||||
re[index] = new RegExp(value, isGlobal ? "g" : void 0);
|
||||
safeRe[index] = new RegExp(safe, isGlobal ? "g" : void 0);
|
||||
};
|
||||
@@ -95429,7 +95496,7 @@ var require_semver = __commonJS({
|
||||
"../../node_modules/semver/classes/semver.js"(exports2, module2) {
|
||||
var debug = require_debug();
|
||||
var { MAX_LENGTH, MAX_SAFE_INTEGER } = require_constants();
|
||||
var { safeRe: re, t } = require_re();
|
||||
var { safeRe: re, safeSrc: src, t } = require_re();
|
||||
var parseOptions = require_parse_options();
|
||||
var { compareIdentifiers } = require_identifiers();
|
||||
var SemVer = class _SemVer {
|
||||
@@ -95569,6 +95636,18 @@ var require_semver = __commonJS({
|
||||
// preminor will bump the version up to the next minor release, and immediately
|
||||
// down to pre-release. premajor and prepatch work the same way.
|
||||
inc(release, identifier, identifierBase) {
|
||||
if (release.startsWith("pre")) {
|
||||
if (!identifier && identifierBase === false) {
|
||||
throw new Error("invalid increment argument: identifier is empty");
|
||||
}
|
||||
if (identifier) {
|
||||
const r = new RegExp(`^${this.options.loose ? src[t.PRERELEASELOOSE] : src[t.PRERELEASE]}$`);
|
||||
const match = `-${identifier}`.match(r);
|
||||
if (!match || match[1] !== identifier) {
|
||||
throw new Error(`invalid identifier: ${identifier}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
switch (release) {
|
||||
case "premajor":
|
||||
this.prerelease.length = 0;
|
||||
@@ -95594,6 +95673,12 @@ var require_semver = __commonJS({
|
||||
}
|
||||
this.inc("pre", identifier, identifierBase);
|
||||
break;
|
||||
case "release":
|
||||
if (this.prerelease.length === 0) {
|
||||
throw new Error(`version ${this.raw} is not a prerelease`);
|
||||
}
|
||||
this.prerelease.length = 0;
|
||||
break;
|
||||
case "major":
|
||||
if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) {
|
||||
this.major++;
|
||||
@@ -95617,9 +95702,6 @@ var require_semver = __commonJS({
|
||||
break;
|
||||
case "pre": {
|
||||
const base = Number(identifierBase) ? 1 : 0;
|
||||
if (!identifier && identifierBase === false) {
|
||||
throw new Error("invalid increment argument: identifier is empty");
|
||||
}
|
||||
if (this.prerelease.length === 0) {
|
||||
this.prerelease = [base];
|
||||
} else {
|
||||
@@ -95754,13 +95836,12 @@ var require_diff = __commonJS({
|
||||
if (!lowVersion.patch && !lowVersion.minor) {
|
||||
return "major";
|
||||
}
|
||||
if (highVersion.patch) {
|
||||
if (lowVersion.compareMain(highVersion) === 0) {
|
||||
if (lowVersion.minor && !lowVersion.patch) {
|
||||
return "minor";
|
||||
}
|
||||
return "patch";
|
||||
}
|
||||
if (highVersion.minor) {
|
||||
return "minor";
|
||||
}
|
||||
return "major";
|
||||
}
|
||||
const prefix = highHasPre ? "pre" : "";
|
||||
if (v12.major !== v2.major) {
|
||||
@@ -136895,8 +136976,6 @@ var require_utils3 = __commonJS({
|
||||
originalRequest.url.query = [];
|
||||
originalRequest.header = _2.get(response, "originalRequest.headers", []);
|
||||
originalRequest.body = requestItem.request.body;
|
||||
response.code = response.code.replace(/X|x/g, "0");
|
||||
response.code = response.code === "default" ? 500 : _2.toSafeInteger(response.code);
|
||||
let sdkResponse = new Response({
|
||||
name: response.name,
|
||||
code: response.code,
|
||||
@@ -137692,7 +137771,7 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
exampleKey = Object.keys(exampleObj)[0];
|
||||
example = exampleObj[exampleKey];
|
||||
if (example.$ref) {
|
||||
if (example && example.$ref) {
|
||||
example = resolveExampleData(context, example);
|
||||
}
|
||||
if (_2.get(example, "value")) {
|
||||
@@ -137881,6 +137960,65 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
return schema2;
|
||||
};
|
||||
var processSchema = (resolvedSchema) => {
|
||||
if (resolvedSchema.type === "object" && resolvedSchema.properties) {
|
||||
const schemaDetails = {
|
||||
type: resolvedSchema.type,
|
||||
properties: {},
|
||||
required: []
|
||||
}, requiredProperties = new Set(resolvedSchema.required || []);
|
||||
for (let [propName, propValue] of Object.entries(resolvedSchema.properties)) {
|
||||
if (!propValue.type) {
|
||||
continue;
|
||||
}
|
||||
const propertyDetails = {
|
||||
type: propValue.type,
|
||||
deprecated: propValue.deprecated,
|
||||
enum: propValue.enum || void 0,
|
||||
minLength: propValue.minLength,
|
||||
maxLength: propValue.maxLength,
|
||||
minimum: propValue.minimum,
|
||||
maximum: propValue.maximum,
|
||||
pattern: propValue.pattern,
|
||||
example: propValue.example,
|
||||
description: propValue.description,
|
||||
format: propValue.format
|
||||
};
|
||||
if (requiredProperties.has(propName)) {
|
||||
schemaDetails.required.push(propName);
|
||||
}
|
||||
if (propValue.properties) {
|
||||
let processedProperties = processSchema(propValue);
|
||||
propertyDetails.properties = processedProperties.properties;
|
||||
if (processedProperties.required) {
|
||||
propertyDetails.required = processedProperties.required;
|
||||
}
|
||||
} else if (propValue.type === "array" && propValue.items) {
|
||||
propertyDetails.items = processSchema(propValue.items);
|
||||
}
|
||||
schemaDetails.properties[propName] = propertyDetails;
|
||||
}
|
||||
if (schemaDetails.required && schemaDetails.required.length === 0) {
|
||||
schemaDetails.required = void 0;
|
||||
}
|
||||
return schemaDetails;
|
||||
} else if (resolvedSchema.type === "array" && resolvedSchema.items) {
|
||||
const arrayDetails = {
|
||||
type: resolvedSchema.type,
|
||||
items: processSchema(resolvedSchema.items)
|
||||
};
|
||||
if (resolvedSchema.minItems !== void 0) {
|
||||
arrayDetails.minItems = resolvedSchema.minItems;
|
||||
}
|
||||
if (resolvedSchema.maxItems !== void 0) {
|
||||
arrayDetails.maxItems = resolvedSchema.maxItems;
|
||||
}
|
||||
return arrayDetails;
|
||||
}
|
||||
return {
|
||||
type: resolvedSchema.type
|
||||
};
|
||||
};
|
||||
var resolveSchema = (context, schema2, { stack = 0, resolveFor = CONVERSION, seenRef = {}, isResponseSchema = false } = {}) => {
|
||||
resetReadWritePropCache(context);
|
||||
let resolvedSchema = _resolveSchema(context, schema2, stack, resolveFor, seenRef);
|
||||
@@ -138227,12 +138365,16 @@ var require_schemaUtils2 = __commonJS({
|
||||
return pmExamples;
|
||||
};
|
||||
var resolveBodyData = (context, requestBodySchema, bodyType, isExampleBody = false, responseCode = null, requestBodyExamples = {}) => {
|
||||
let { parametersResolution, indentCharacter } = context.computedOptions, headerFamily = getHeaderFamily(bodyType), bodyData = "", shouldGenerateFromExample = parametersResolution === "example", isBodyTypeXML = bodyType === APP_XML || bodyType === TEXT_XML || headerFamily === HEADER_TYPE.XML, bodyKey = isExampleBody ? "response" : "request", responseExamples, example, examples;
|
||||
let { parametersResolution, indentCharacter } = context.computedOptions, headerFamily = getHeaderFamily(bodyType), bodyData = "", shouldGenerateFromExample = parametersResolution === "example", isBodyTypeXML = bodyType === APP_XML || bodyType === TEXT_XML || headerFamily === HEADER_TYPE.XML, bodyKey = isExampleBody ? "response" : "request", responseExamples, example, examples, resolvedSchemaTypes = [];
|
||||
if (_2.isEmpty(requestBodySchema)) {
|
||||
return [{ [bodyKey]: bodyData }];
|
||||
}
|
||||
if (requestBodySchema.$ref) {
|
||||
requestBodySchema = resolveSchema(context, requestBodySchema, { isResponseSchema: isExampleBody });
|
||||
requestBodySchema = resolveSchema(
|
||||
context,
|
||||
requestBodySchema,
|
||||
{ isResponseSchema: isExampleBody }
|
||||
);
|
||||
}
|
||||
if (requestBodySchema.example !== void 0) {
|
||||
const shouldResolveValueKey = _2.has(requestBodySchema.example, "value") && _2.keys(requestBodySchema.example).length <= 1;
|
||||
@@ -138243,7 +138385,11 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
examples = requestBodySchema.examples || _2.get(requestBodySchema, "schema.examples");
|
||||
requestBodySchema = requestBodySchema.schema || requestBodySchema;
|
||||
requestBodySchema = resolveSchema(context, requestBodySchema, { isResponseSchema: isExampleBody });
|
||||
requestBodySchema = resolveSchema(
|
||||
context,
|
||||
requestBodySchema,
|
||||
{ isResponseSchema: isExampleBody }
|
||||
);
|
||||
if (example === void 0 && _2.get(requestBodySchema, "example") !== void 0) {
|
||||
example = requestBodySchema.example;
|
||||
}
|
||||
@@ -138257,7 +138403,11 @@ var require_schemaUtils2 = __commonJS({
|
||||
} else if (requestBodySchema) {
|
||||
requestBodySchema = requestBodySchema.schema || requestBodySchema;
|
||||
if (requestBodySchema.$ref) {
|
||||
requestBodySchema = resolveSchema(context, requestBodySchema, { isResponseSchema: isExampleBody });
|
||||
requestBodySchema = resolveSchema(
|
||||
context,
|
||||
requestBodySchema,
|
||||
{ isResponseSchema: isExampleBody }
|
||||
);
|
||||
}
|
||||
if (isBodyTypeXML) {
|
||||
bodyData = xmlFaker(null, requestBodySchema, indentCharacter, parametersResolution);
|
||||
@@ -138285,6 +138435,10 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
}
|
||||
}
|
||||
if (context.enableTypeFetching && requestBodySchema.type !== void 0) {
|
||||
const requestBodySchemaTypes = processSchema(requestBodySchema);
|
||||
resolvedSchemaTypes.push(requestBodySchemaTypes);
|
||||
}
|
||||
if (isExampleBody && shouldGenerateFromExample && (_2.size(examples) > 1 || _2.size(requestBodyExamples) > 1)) {
|
||||
responseExamples = [{
|
||||
key: "_default",
|
||||
@@ -138305,22 +138459,37 @@ var require_schemaUtils2 = __commonJS({
|
||||
if (_2.isEmpty(matchedRequestBodyExamples)) {
|
||||
matchedRequestBodyExamples = requestBodyExamples;
|
||||
}
|
||||
return generateExamples(context, responseExamples, matchedRequestBodyExamples, requestBodySchema, isBodyTypeXML);
|
||||
const generatedBody = generateExamples(
|
||||
context,
|
||||
responseExamples,
|
||||
matchedRequestBodyExamples,
|
||||
requestBodySchema,
|
||||
isBodyTypeXML
|
||||
);
|
||||
return {
|
||||
generatedBody,
|
||||
resolvedSchemaType: resolvedSchemaTypes[0]
|
||||
};
|
||||
}
|
||||
return [{ [bodyKey]: bodyData }];
|
||||
return {
|
||||
generatedBody: [{ [bodyKey]: bodyData }],
|
||||
resolvedSchemaType: resolvedSchemaTypes[0]
|
||||
};
|
||||
};
|
||||
var resolveUrlEncodedRequestBodyForPostmanRequest = (context, requestBodyContent) => {
|
||||
let bodyData = "", urlEncodedParams = [], requestBodyData = {
|
||||
mode: "urlencoded",
|
||||
urlencoded: urlEncodedParams
|
||||
}, resolvedBody;
|
||||
}, resolvedBody, resolvedBodyResult, resolvedSchemaTypeObject;
|
||||
if (_2.isEmpty(requestBodyContent)) {
|
||||
return requestBodyData;
|
||||
}
|
||||
if (_2.has(requestBodyContent, "schema.$ref")) {
|
||||
requestBodyContent.schema = resolveSchema(context, requestBodyContent.schema);
|
||||
}
|
||||
resolvedBody = resolveBodyData(context, requestBodyContent.schema)[0];
|
||||
resolvedBodyResult = resolveBodyData(context, requestBodyContent.schema);
|
||||
resolvedBody = resolvedBodyResult && Array.isArray(resolvedBodyResult.generatedBody) && resolvedBodyResult.generatedBody[0];
|
||||
resolvedSchemaTypeObject = resolvedBodyResult && resolvedBodyResult.resolvedSchemaType;
|
||||
resolvedBody && (bodyData = resolvedBody.request);
|
||||
const encoding = requestBodyContent.encoding || {};
|
||||
_2.forOwn(bodyData, (value, key) => {
|
||||
@@ -138342,18 +138511,21 @@ var require_schemaUtils2 = __commonJS({
|
||||
headers: [{
|
||||
key: "Content-Type",
|
||||
value: URLENCODED
|
||||
}]
|
||||
}],
|
||||
resolvedSchemaTypeObject
|
||||
};
|
||||
};
|
||||
var resolveFormDataRequestBodyForPostmanRequest = (context, requestBodyContent) => {
|
||||
let bodyData = "", formDataParams = [], encoding = {}, requestBodyData = {
|
||||
mode: "formdata",
|
||||
formdata: formDataParams
|
||||
}, resolvedBody;
|
||||
}, resolvedBody, resolvedBodyResult, resolvedSchemaTypeObject;
|
||||
if (_2.isEmpty(requestBodyContent)) {
|
||||
return requestBodyData;
|
||||
}
|
||||
resolvedBody = resolveBodyData(context, requestBodyContent.schema)[0];
|
||||
resolvedBodyResult = resolveBodyData(context, requestBodyContent.schema);
|
||||
resolvedBody = resolvedBodyResult && Array.isArray(resolvedBodyResult.generatedBody) && resolvedBodyResult.generatedBody[0];
|
||||
resolvedSchemaTypeObject = resolvedBodyResult && resolvedBodyResult.resolvedSchemaType;
|
||||
resolvedBody && (bodyData = resolvedBody.request);
|
||||
encoding = _2.get(requestBodyContent, "encoding", {});
|
||||
_2.forOwn(bodyData, (value, key) => {
|
||||
@@ -138389,7 +138561,8 @@ var require_schemaUtils2 = __commonJS({
|
||||
headers: [{
|
||||
key: "Content-Type",
|
||||
value: FORM_DATA
|
||||
}]
|
||||
}],
|
||||
resolvedSchemaTypeObject
|
||||
};
|
||||
};
|
||||
var getRawBodyType = (content) => {
|
||||
@@ -138425,14 +138598,16 @@ var require_schemaUtils2 = __commonJS({
|
||||
return bodyType;
|
||||
};
|
||||
var resolveRawModeRequestBodyForPostmanRequest = (context, requestContent) => {
|
||||
let bodyType = getRawBodyType(requestContent), bodyData, headerFamily, dataToBeReturned = {}, { concreteUtils } = context, resolvedBody;
|
||||
let bodyType = getRawBodyType(requestContent), bodyData, headerFamily, dataToBeReturned = {}, { concreteUtils } = context, resolvedBody, resolvedBodyResult, resolvedSchemaTypeObject;
|
||||
headerFamily = getHeaderFamily(bodyType);
|
||||
if (concreteUtils.isBinaryContentType(bodyType, requestContent)) {
|
||||
dataToBeReturned = {
|
||||
mode: "file"
|
||||
};
|
||||
} else {
|
||||
resolvedBody = resolveBodyData(context, requestContent[bodyType], bodyType)[0];
|
||||
resolvedBodyResult = resolveBodyData(context, requestContent[bodyType], bodyType);
|
||||
resolvedBody = resolvedBodyResult && Array.isArray(resolvedBodyResult.generatedBody) && resolvedBodyResult.generatedBody[0];
|
||||
resolvedSchemaTypeObject = resolvedBodyResult && resolvedBodyResult.resolvedSchemaType;
|
||||
resolvedBody && (bodyData = resolvedBody.request);
|
||||
if (bodyType === TEXT_XML || bodyType === APP_XML || headerFamily === HEADER_TYPE.XML) {
|
||||
bodyData = getXmlVersionContent(bodyData);
|
||||
@@ -138456,7 +138631,8 @@ var require_schemaUtils2 = __commonJS({
|
||||
headers: [{
|
||||
key: "Content-Type",
|
||||
value: bodyType
|
||||
}]
|
||||
}],
|
||||
resolvedSchemaTypeObject
|
||||
};
|
||||
};
|
||||
var resolveRequestBodyForPostmanRequest = (context, operationItem) => {
|
||||
@@ -138536,8 +138712,25 @@ var require_schemaUtils2 = __commonJS({
|
||||
});
|
||||
return reqParam;
|
||||
};
|
||||
var createProperties = (param) => {
|
||||
const { schema: schema2 } = param;
|
||||
return {
|
||||
type: schema2.type,
|
||||
format: schema2.format,
|
||||
default: schema2.default,
|
||||
required: param.required || false,
|
||||
deprecated: param.deprecated || false,
|
||||
enum: schema2.enum || void 0,
|
||||
minLength: schema2.minLength,
|
||||
maxLength: schema2.maxLength,
|
||||
minimum: schema2.minimum,
|
||||
maximum: schema2.maximum,
|
||||
pattern: schema2.pattern,
|
||||
example: schema2.example
|
||||
};
|
||||
};
|
||||
var resolveQueryParamsForPostmanRequest = (context, operationItem, method) => {
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [], { includeDeprecated } = context.computedOptions;
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [], queryParamTypes = [], { includeDeprecated } = context.computedOptions;
|
||||
_2.forEach(params, (param) => {
|
||||
if (!_2.isObject(param)) {
|
||||
return;
|
||||
@@ -138545,20 +138738,28 @@ var require_schemaUtils2 = __commonJS({
|
||||
if (_2.has(param, "$ref")) {
|
||||
param = resolveSchema(context, param);
|
||||
}
|
||||
if (_2.has(param.schema, "$ref")) {
|
||||
param.schema = resolveSchema(context, param.schema);
|
||||
}
|
||||
if (param.in !== QUERYPARAM || !includeDeprecated && param.deprecated) {
|
||||
return;
|
||||
}
|
||||
let paramValue = resolveValueOfParameter(context, param);
|
||||
let queryParamTypeInfo = {}, properties = {}, paramValue = resolveValueOfParameter(context, param);
|
||||
if (param && param.name && param.schema && param.schema.type) {
|
||||
properties = createProperties(param);
|
||||
queryParamTypeInfo = { keyName: param.name, properties };
|
||||
queryParamTypes.push(queryParamTypeInfo);
|
||||
}
|
||||
if (typeof paramValue === "number" || typeof paramValue === "boolean") {
|
||||
paramValue = paramValue.toString();
|
||||
}
|
||||
const deserialisedParams = serialiseParamsBasedOnStyle(context, param, paramValue);
|
||||
pmParams.push(...deserialisedParams);
|
||||
});
|
||||
return pmParams;
|
||||
return { queryParamTypes, queryParams: pmParams };
|
||||
};
|
||||
var resolvePathParamsForPostmanRequest = (context, operationItem, method) => {
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [];
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [], pathParamTypes = [];
|
||||
_2.forEach(params, (param) => {
|
||||
if (!_2.isObject(param)) {
|
||||
return;
|
||||
@@ -138566,17 +138767,25 @@ var require_schemaUtils2 = __commonJS({
|
||||
if (_2.has(param, "$ref")) {
|
||||
param = resolveSchema(context, param);
|
||||
}
|
||||
if (_2.has(param.schema, "$ref")) {
|
||||
param.schema = resolveSchema(context, param.schema);
|
||||
}
|
||||
if (param.in !== PATHPARAM) {
|
||||
return;
|
||||
}
|
||||
let paramValue = resolveValueOfParameter(context, param);
|
||||
let pathParamTypeInfo = {}, properties = {}, paramValue = resolveValueOfParameter(context, param);
|
||||
if (param && param.name && param.schema && param.schema.type) {
|
||||
properties = createProperties(param);
|
||||
pathParamTypeInfo = { keyName: param.name, properties };
|
||||
pathParamTypes.push(pathParamTypeInfo);
|
||||
}
|
||||
if (typeof paramValue === "number" || typeof paramValue === "boolean") {
|
||||
paramValue = paramValue.toString();
|
||||
}
|
||||
const deserialisedParams = serialiseParamsBasedOnStyle(context, param, paramValue);
|
||||
pmParams.push(...deserialisedParams);
|
||||
});
|
||||
return pmParams;
|
||||
return { pathParamTypes, pathParams: pmParams };
|
||||
};
|
||||
var resolveNameForPostmanReqeust = (context, operationItem, requestUrl) => {
|
||||
let reqName, { requestNameSource } = context.computedOptions;
|
||||
@@ -138598,7 +138807,7 @@ var require_schemaUtils2 = __commonJS({
|
||||
return reqName;
|
||||
};
|
||||
var resolveHeadersForPostmanRequest = (context, operationItem, method) => {
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [], { keepImplicitHeaders, includeDeprecated } = context.computedOptions;
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [], headerTypes = [], { keepImplicitHeaders, includeDeprecated } = context.computedOptions;
|
||||
_2.forEach(params, (param) => {
|
||||
if (!_2.isObject(param)) {
|
||||
return;
|
||||
@@ -138606,25 +138815,33 @@ var require_schemaUtils2 = __commonJS({
|
||||
if (_2.has(param, "$ref")) {
|
||||
param = resolveSchema(context, param);
|
||||
}
|
||||
if (_2.has(param.schema, "$ref")) {
|
||||
param.schema = resolveSchema(context, param.schema);
|
||||
}
|
||||
if (param.in !== HEADER || !includeDeprecated && param.deprecated) {
|
||||
return;
|
||||
}
|
||||
if (!keepImplicitHeaders && _2.includes(IMPLICIT_HEADERS, _2.toLower(_2.get(param, "name")))) {
|
||||
return;
|
||||
}
|
||||
let paramValue = resolveValueOfParameter(context, param);
|
||||
let headerTypeInfo = {}, properties = {}, paramValue = resolveValueOfParameter(context, param);
|
||||
if (param && param.name && param.schema && param.schema.type) {
|
||||
properties = createProperties(param);
|
||||
headerTypeInfo = { keyName: param.name, properties };
|
||||
headerTypes.push(headerTypeInfo);
|
||||
}
|
||||
if (typeof paramValue === "number" || typeof paramValue === "boolean") {
|
||||
paramValue = paramValue.toString();
|
||||
}
|
||||
const deserialisedParams = serialiseParamsBasedOnStyle(context, param, paramValue);
|
||||
pmParams.push(...deserialisedParams);
|
||||
});
|
||||
return pmParams;
|
||||
return { headerTypes, headers: pmParams };
|
||||
};
|
||||
var resolveResponseBody = (context, responseBody = {}, requestBodyExamples = {}, code = null) => {
|
||||
let responseContent, bodyType, allBodyData, headerFamily, acceptHeader, emptyResponse = [{
|
||||
body: void 0
|
||||
}];
|
||||
}], resolvedResponseBodyResult, resolvedResponseBodyTypes;
|
||||
if (_2.isEmpty(responseBody)) {
|
||||
return emptyResponse;
|
||||
}
|
||||
@@ -138637,7 +138854,16 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
bodyType = getRawBodyType(responseContent);
|
||||
headerFamily = getHeaderFamily(bodyType);
|
||||
allBodyData = resolveBodyData(context, responseContent[bodyType], bodyType, true, code, requestBodyExamples);
|
||||
resolvedResponseBodyResult = resolveBodyData(
|
||||
context,
|
||||
responseContent[bodyType],
|
||||
bodyType,
|
||||
true,
|
||||
code,
|
||||
requestBodyExamples
|
||||
);
|
||||
allBodyData = resolvedResponseBodyResult.generatedBody;
|
||||
resolvedResponseBodyTypes = resolvedResponseBodyResult.resolvedSchemaType;
|
||||
return _2.map(allBodyData, (bodyData) => {
|
||||
let requestBodyData = bodyData.request, responseBodyData = bodyData.response, exampleName = bodyData.name;
|
||||
if (bodyType === TEXT_XML || bodyType === APP_XML || headerFamily === HEADER_TYPE.XML) {
|
||||
@@ -138663,12 +138889,13 @@ var require_schemaUtils2 = __commonJS({
|
||||
}],
|
||||
name: exampleName,
|
||||
bodyType,
|
||||
acceptHeader
|
||||
acceptHeader,
|
||||
resolvedResponseBodyTypes
|
||||
};
|
||||
});
|
||||
};
|
||||
var resolveResponseHeaders = (context, responseHeaders) => {
|
||||
const headers = [], { includeDeprecated } = context.computedOptions;
|
||||
const headers = [], { includeDeprecated } = context.computedOptions, headerTypes = [];
|
||||
if (_2.has(responseHeaders, "$ref")) {
|
||||
responseHeaders = resolveSchema(context, responseHeaders, { isResponseSchema: true });
|
||||
}
|
||||
@@ -138679,14 +138906,33 @@ var require_schemaUtils2 = __commonJS({
|
||||
if (!includeDeprecated && value.deprecated) {
|
||||
return;
|
||||
}
|
||||
let headerValue = resolveValueOfParameter(context, value, { isResponseSchema: true });
|
||||
let headerValue = resolveValueOfParameter(context, value, { isResponseSchema: true }), headerTypeInfo = {}, properties = {};
|
||||
if (typeof headerValue === "number" || typeof headerValue === "boolean") {
|
||||
headerValue = headerValue.toString();
|
||||
}
|
||||
const headerData = Object.assign({}, value, { name: headerName }), serialisedHeader = serialiseParamsBasedOnStyle(context, headerData, headerValue, { isResponseSchema: true });
|
||||
headers.push(...serialisedHeader);
|
||||
if (headerData && headerData.name && headerData.schema && headerData.schema.type) {
|
||||
const { schema: schema2 } = headerData;
|
||||
properties = {
|
||||
type: schema2.type,
|
||||
format: schema2.format,
|
||||
default: schema2.default,
|
||||
required: schema2.required || false,
|
||||
deprecated: schema2.deprecated || false,
|
||||
enum: schema2.enum || void 0,
|
||||
minLength: schema2.minLength,
|
||||
maxLength: schema2.maxLength,
|
||||
minimum: schema2.minimum,
|
||||
maximum: schema2.maximum,
|
||||
pattern: schema2.pattern,
|
||||
example: schema2.example
|
||||
};
|
||||
headerTypeInfo = { keyName: headerData.name, properties };
|
||||
headerTypes.push(headerTypeInfo);
|
||||
}
|
||||
});
|
||||
return headers;
|
||||
return { resolvedHeaderTypes: headerTypes, headers };
|
||||
};
|
||||
var getPreviewLangugaForResponseBody = (bodyType) => {
|
||||
const headerFamily = getHeaderFamily(bodyType);
|
||||
@@ -138753,7 +138999,7 @@ var require_schemaUtils2 = __commonJS({
|
||||
return responseAuthHelper;
|
||||
};
|
||||
var resolveResponseForPostmanRequest = (context, operationItem, request) => {
|
||||
let responses = [], requestBodyExamples = [], requestAcceptHeader, requestBody = operationItem.requestBody, requestContent, rawBodyType, headerFamily, isBodyTypeXML;
|
||||
let responses = [], requestBodyExamples = [], requestAcceptHeader, requestBody = operationItem.requestBody, requestContent, rawBodyType, headerFamily, isBodyTypeXML, resolvedExamplesObject = {}, responseTypes = {};
|
||||
if (typeof requestBody === "object") {
|
||||
if (requestBody.$ref) {
|
||||
requestBody = resolveSchema(context, requestBody, { isResponseSchema: true });
|
||||
@@ -138789,7 +139035,15 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
}
|
||||
_2.forOwn(operationItem.responses, (responseObj, code) => {
|
||||
let responseSchema = _2.has(responseObj, "$ref") ? resolveSchema(context, responseObj, { isResponseSchema: true }) : responseObj, { includeAuthInfoInExample } = context.computedOptions, auth = request.auth, resolvedExamples = resolveResponseBody(context, responseSchema, requestBodyExamples, code) || {}, headers = resolveResponseHeaders(context, responseSchema.headers);
|
||||
let responseSchema = _2.has(responseObj, "$ref") ? resolveSchema(context, responseObj, { isResponseSchema: true }) : responseObj, { includeAuthInfoInExample } = context.computedOptions, auth = request.auth, resolvedExamples = resolveResponseBody(context, responseSchema, requestBodyExamples, code) || {}, { resolvedHeaderTypes, headers } = resolveResponseHeaders(context, responseSchema.headers), responseBodyHeaderObj;
|
||||
resolvedExamplesObject = resolvedExamples[0] && resolvedExamples[0].resolvedResponseBodyTypes;
|
||||
responseBodyHeaderObj = {
|
||||
body: JSON.stringify(resolvedExamplesObject, null, 2),
|
||||
headers: JSON.stringify(resolvedHeaderTypes, null, 2)
|
||||
};
|
||||
code = code.replace(/X|x/g, "0");
|
||||
code = code === "default" ? 500 : _2.toSafeInteger(code);
|
||||
Object.assign(responseTypes, { [code]: responseBodyHeaderObj });
|
||||
_2.forOwn(resolvedExamples, (resolvedExample = {}) => {
|
||||
let { body, contentHeader = [], bodyType, acceptHeader, name } = resolvedExample, resolvedRequestBody = _2.get(resolvedExample, "request.body"), originalRequest, response, responseAuthHelper, requestBodyObj = {}, reqHeaders = _2.clone(request.headers) || [], reqQueryParams = _2.clone(_2.get(request, "params.queryParams", []));
|
||||
_2.isArray(acceptHeader) && reqHeaders.push(...acceptHeader);
|
||||
@@ -138829,13 +139083,17 @@ var require_schemaUtils2 = __commonJS({
|
||||
responses.push(response);
|
||||
});
|
||||
});
|
||||
return { responses, acceptHeader: requestAcceptHeader };
|
||||
return {
|
||||
responses,
|
||||
acceptHeader: requestAcceptHeader,
|
||||
responseTypes
|
||||
};
|
||||
};
|
||||
module2.exports = {
|
||||
resolvePostmanRequest: function(context, operationItem, path, method) {
|
||||
context.schemaCache = context.schemaCache || {};
|
||||
context.schemaFakerCache = context.schemaFakerCache || {};
|
||||
let url = resolveUrlForPostmanRequest(path), baseUrlData = resolveBaseUrlForPostmanRequest(operationItem[method]), requestName = resolveNameForPostmanReqeust(context, operationItem[method], url), queryParams = resolveQueryParamsForPostmanRequest(context, operationItem, method), headers = resolveHeadersForPostmanRequest(context, operationItem, method), pathParams = resolvePathParamsForPostmanRequest(context, operationItem, method), { pathVariables, collectionVariables } = filterCollectionAndPathVariables(url, pathParams), requestBody = resolveRequestBodyForPostmanRequest(context, operationItem[method]), request, securitySchema = _2.get(operationItem, [method, "security"]), authHelper = generateAuthForCollectionFromOpenAPI(context.openapi, securitySchema), { alwaysInheritAuthentication } = context.computedOptions;
|
||||
let url = resolveUrlForPostmanRequest(path), baseUrlData = resolveBaseUrlForPostmanRequest(operationItem[method]), requestName = resolveNameForPostmanReqeust(context, operationItem[method], url), { queryParamTypes, queryParams } = resolveQueryParamsForPostmanRequest(context, operationItem, method), { headerTypes, headers } = resolveHeadersForPostmanRequest(context, operationItem, method), { pathParamTypes, pathParams } = resolvePathParamsForPostmanRequest(context, operationItem, method), { pathVariables, collectionVariables } = filterCollectionAndPathVariables(url, pathParams), requestBody = resolveRequestBodyForPostmanRequest(context, operationItem[method]), requestBodyTypes = requestBody && requestBody.resolvedSchemaTypeObject, request, securitySchema = _2.get(operationItem, [method, "security"]), authHelper = generateAuthForCollectionFromOpenAPI(context.openapi, securitySchema), { alwaysInheritAuthentication } = context.computedOptions, requestIdentifier, requestTypesObject = {};
|
||||
headers.push(..._2.get(requestBody, "headers", []));
|
||||
pathVariables.push(...baseUrlData.pathVariables);
|
||||
collectionVariables.push(...baseUrlData.collectionVariables);
|
||||
@@ -138853,7 +139111,21 @@ var require_schemaUtils2 = __commonJS({
|
||||
body: _2.get(requestBody, "body"),
|
||||
auth: alwaysInheritAuthentication ? void 0 : authHelper
|
||||
};
|
||||
const { responses, acceptHeader } = resolveResponseForPostmanRequest(context, operationItem[method], request);
|
||||
const requestTypes = {
|
||||
body: JSON.stringify(requestBodyTypes, null, 2),
|
||||
headers: JSON.stringify(headerTypes, null, 2),
|
||||
pathParam: JSON.stringify(pathParamTypes, null, 2),
|
||||
queryParam: JSON.stringify(queryParamTypes, null, 2)
|
||||
}, {
|
||||
responses,
|
||||
acceptHeader,
|
||||
responseTypes
|
||||
} = resolveResponseForPostmanRequest(context, operationItem[method], request);
|
||||
requestIdentifier = method + path;
|
||||
Object.assign(
|
||||
requestTypesObject,
|
||||
{ [requestIdentifier]: { request: requestTypes, response: responseTypes } }
|
||||
);
|
||||
if (!_2.isEmpty(acceptHeader)) {
|
||||
request.headers = _2.concat(request.headers, acceptHeader);
|
||||
}
|
||||
@@ -138864,7 +139136,8 @@ var require_schemaUtils2 = __commonJS({
|
||||
responses
|
||||
})
|
||||
},
|
||||
collectionVariables
|
||||
collectionVariables,
|
||||
requestTypesObject
|
||||
};
|
||||
},
|
||||
resolveResponseForPostmanRequest,
|
||||
@@ -141220,7 +141493,7 @@ var require_libV2 = __commonJS({
|
||||
convertV2: function(context, cb) {
|
||||
let collectionTree = generateSkeletonTreeFromOpenAPI(context.openapi, context.computedOptions);
|
||||
let preOrderTraversal = GraphLib.alg.preorder(collectionTree, "root:collection");
|
||||
let collection = {};
|
||||
let collection = {}, extractedTypesObject = {};
|
||||
_2.forEach(preOrderTraversal, function(nodeIdentified) {
|
||||
let node = collectionTree.node(nodeIdentified);
|
||||
switch (node.type) {
|
||||
@@ -141254,15 +141527,16 @@ var require_libV2 = __commonJS({
|
||||
break;
|
||||
}
|
||||
case "request": {
|
||||
let request = {}, collectionVariables = [], requestObject = {};
|
||||
let request = {}, collectionVariables = [], requestObject = {}, requestTypesObject = {};
|
||||
try {
|
||||
({ request, collectionVariables } = resolvePostmanRequest(
|
||||
({ request, collectionVariables, requestTypesObject } = resolvePostmanRequest(
|
||||
context,
|
||||
context.openapi.paths[node.meta.path],
|
||||
node.meta.path,
|
||||
node.meta.method
|
||||
));
|
||||
requestObject = generateRequestItemObject(request);
|
||||
extractedTypesObject = Object.assign({}, extractedTypesObject, requestTypesObject);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
break;
|
||||
@@ -141337,6 +141611,17 @@ var require_libV2 = __commonJS({
|
||||
if (!_2.isEmpty(collection.variable)) {
|
||||
collection.variable = _2.uniqBy(collection.variable, "key");
|
||||
}
|
||||
if (context.enableTypeFetching) {
|
||||
return cb(null, {
|
||||
result: true,
|
||||
output: [{
|
||||
type: "collection",
|
||||
data: collection
|
||||
}],
|
||||
analytics: this.analytics || {},
|
||||
extractedTypes: extractedTypesObject || {}
|
||||
});
|
||||
}
|
||||
return cb(null, {
|
||||
result: true,
|
||||
output: [{
|
||||
@@ -145118,7 +145403,7 @@ var require_schemapack = __commonJS({
|
||||
var concreteUtils;
|
||||
var pathBrowserify = require_path_browserify();
|
||||
var SchemaPack = class {
|
||||
constructor(input, options = {}, moduleVersion = MODULE_VERSION.V1) {
|
||||
constructor(input, options = {}, moduleVersion = MODULE_VERSION.V1, enableTypeFetching = false) {
|
||||
if (input.type === schemaUtils.MULTI_FILE_API_TYPE_ALLOWED_VALUE && input.data && input.data[0] && input.data[0].path) {
|
||||
input = schemaUtils.mapDetectRootFilesInputToFolderInput(input);
|
||||
}
|
||||
@@ -145136,6 +145421,7 @@ var require_schemapack = __commonJS({
|
||||
actualStack: 0,
|
||||
numberOfRequests: 0
|
||||
};
|
||||
this.enableTypeFetching = enableTypeFetching;
|
||||
this.computedOptions = utils.mergeOptions(
|
||||
// predefined options
|
||||
_2.keyBy(this.definedOptions, "id"),
|
||||
@@ -145814,6 +146100,14 @@ var require_openapi_to_postmanv2 = __commonJS({
|
||||
}
|
||||
return cb(new UserError(_2.get(schema2, "validationResult.reason", DEFAULT_INVALID_ERROR)));
|
||||
},
|
||||
convertV2WithTypes: function(input, options, cb) {
|
||||
const enableTypeFetching = true;
|
||||
var schema2 = new SchemaPack(input, options, MODULE_VERSION.V2, enableTypeFetching);
|
||||
if (schema2.validated) {
|
||||
return schema2.convertV2(cb);
|
||||
}
|
||||
return cb(new UserError(_2.get(schema2, "validationResult.reason", DEFAULT_INVALID_ERROR)));
|
||||
},
|
||||
validate: function(input) {
|
||||
var schema2 = new SchemaPack(input);
|
||||
return schema2.validationResult;
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"dev": "yaakcli dev ./src/index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"openapi-to-postmanv2": "^4.23.1",
|
||||
"openapi-to-postmanv2": "^5.0.0",
|
||||
"yaml": "^2.4.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -69,6 +69,12 @@ function migrateImport(contents) {
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const environment of parsed.resources.environments ?? []) {
|
||||
if ("environmentId" in environment) {
|
||||
environment.base = environment.environmentId == null;
|
||||
delete environment.environmentId;
|
||||
}
|
||||
}
|
||||
return { resources: parsed.resources };
|
||||
}
|
||||
function isJSObject(obj) {
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
// If the importer is in node compatibility mode or this is not an ESM
|
||||
// file that has been converted to a CommonJS file using a Babel-
|
||||
// compatible transform (i.e. "__esModule" has not been set), then set
|
||||
// "default" to the CommonJS "module.exports" for node compatibility.
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
|
||||
// src/index.ts
|
||||
var src_exports = {};
|
||||
__export(src_exports, {
|
||||
plugin: () => plugin
|
||||
});
|
||||
module.exports = __toCommonJS(src_exports);
|
||||
var import_node_fs = __toESM(require("node:fs"));
|
||||
var plugin = {
|
||||
templateFunctions: [{
|
||||
name: "fs.readFile",
|
||||
args: [{ title: "Select File", type: "file", name: "path", label: "File" }],
|
||||
async onRender(_ctx, args) {
|
||||
if (!args.values.path) return null;
|
||||
try {
|
||||
return import_node_fs.default.promises.readFile(args.values.path, "utf-8");
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
plugin
|
||||
});
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"name": "@yaakapp/template-function-file",
|
||||
"private": true,
|
||||
"version": "0.0.1",
|
||||
"scripts": {
|
||||
"build": "yaakcli build ./src/index.ts",
|
||||
"dev": "yaakcli dev ./src/index.js"
|
||||
}
|
||||
}
|
||||
9
src-tauri/yaak-common/Cargo.toml
Normal file
9
src-tauri/yaak-common/Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[package]
|
||||
name = "yaak-common"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
tauri = { workspace = true }
|
||||
regex = "1.11.0"
|
||||
1
src-tauri/yaak-common/src/lib.rs
Normal file
1
src-tauri/yaak-common/src/lib.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod window;
|
||||
31
src-tauri/yaak-common/src/window.rs
Normal file
31
src-tauri/yaak-common/src/window.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use regex::Regex;
|
||||
use tauri::{Runtime, WebviewWindow};
|
||||
|
||||
pub trait WorkspaceWindowTrait {
|
||||
fn workspace_id(&self) -> Option<String>;
|
||||
fn cookie_jar_id(&self) -> Option<String>;
|
||||
fn environment_id(&self) -> Option<String>;
|
||||
}
|
||||
|
||||
impl<R: Runtime> WorkspaceWindowTrait for WebviewWindow<R> {
|
||||
fn workspace_id(&self) -> Option<String> {
|
||||
let url = self.url().unwrap();
|
||||
let re = Regex::new(r"/workspaces/(?<id>\w+)").unwrap();
|
||||
match re.captures(url.as_str()) {
|
||||
None => None,
|
||||
Some(captures) => captures.name("id").map(|c| c.as_str().to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn cookie_jar_id(&self) -> Option<String> {
|
||||
let url = self.url().unwrap();
|
||||
let mut query_pairs = url.query_pairs();
|
||||
query_pairs.find(|(k, _v)| k == "cookie_jar_id").map(|(_k, v)| v.to_string())
|
||||
}
|
||||
|
||||
fn environment_id(&self) -> Option<String> {
|
||||
let url = self.url().unwrap();
|
||||
let mut query_pairs = url.query_pairs();
|
||||
query_pairs.find(|(k, _v)| k == "environment_id").map(|(_k, v)| v.to_string())
|
||||
}
|
||||
}
|
||||
20
src-tauri/yaak-crypto/Cargo.toml
Normal file
20
src-tauri/yaak-crypto/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "yaak-crypto"
|
||||
links = "yaak-crypto"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
base32 = "0.5.1" # For encoding human-readable key
|
||||
base64 = "0.22.1" # For encoding in the database
|
||||
chacha20poly1305 = "0.10.1"
|
||||
keyring = { version = "4.0.0-rc.1" }
|
||||
log = "0.4.26"
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
tauri = { workspace = true }
|
||||
thiserror = "2.0.12"
|
||||
yaak-models = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
tauri-plugin = { workspace = true, features = ["build"] }
|
||||
9
src-tauri/yaak-crypto/build.rs
Normal file
9
src-tauri/yaak-crypto/build.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
const COMMANDS: &[&str] = &[
|
||||
"enable_encryption",
|
||||
"reveal_workspace_key",
|
||||
"set_workspace_key",
|
||||
];
|
||||
|
||||
fn main() {
|
||||
tauri_plugin::Builder::new(COMMANDS).build();
|
||||
}
|
||||
13
src-tauri/yaak-crypto/index.ts
Normal file
13
src-tauri/yaak-crypto/index.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
|
||||
export function enableEncryption(workspaceId: string) {
|
||||
return invoke<void>('plugin:yaak-crypto|enable_encryption', { workspaceId });
|
||||
}
|
||||
|
||||
export function revealWorkspaceKey(workspaceId: string) {
|
||||
return invoke<string>('plugin:yaak-crypto|reveal_workspace_key', { workspaceId });
|
||||
}
|
||||
|
||||
export function setWorkspaceKey(args: { workspaceId: string; key: string }) {
|
||||
return invoke<void>('plugin:yaak-crypto|set_workspace_key', args);
|
||||
}
|
||||
6
src-tauri/yaak-crypto/package.json
Normal file
6
src-tauri/yaak-crypto/package.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "@yaakapp-internal/crypto",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"main": "index.ts"
|
||||
}
|
||||
7
src-tauri/yaak-crypto/permissions/default.toml
Normal file
7
src-tauri/yaak-crypto/permissions/default.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[default]
|
||||
description = "Default permissions for the plugin"
|
||||
permissions = [
|
||||
"allow-enable-encryption",
|
||||
"allow-reveal-workspace-key",
|
||||
"allow-set-workspace-key",
|
||||
]
|
||||
31
src-tauri/yaak-crypto/src/commands.rs
Normal file
31
src-tauri/yaak-crypto/src/commands.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use crate::error::Result;
|
||||
use crate::manager::EncryptionManagerExt;
|
||||
use tauri::{command, Runtime, WebviewWindow};
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn enable_encryption<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
) -> Result<()> {
|
||||
window.crypto().ensure_workspace_key(workspace_id)?;
|
||||
window.crypto().reveal_workspace_key(workspace_id)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn reveal_workspace_key<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
) -> Result<String> {
|
||||
Ok(window.crypto().reveal_workspace_key(workspace_id)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn set_workspace_key<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
key: &str,
|
||||
) -> Result<()> {
|
||||
window.crypto().set_human_key(workspace_id, key)?;
|
||||
Ok(())
|
||||
}
|
||||
98
src-tauri/yaak-crypto/src/encryption.rs
Normal file
98
src-tauri/yaak-crypto/src/encryption.rs
Normal file
@@ -0,0 +1,98 @@
|
||||
use crate::error::Error::{DecryptionError, EncryptionError, InvalidEncryptedData};
|
||||
use crate::error::Result;
|
||||
use chacha20poly1305::aead::generic_array::typenum::Unsigned;
|
||||
use chacha20poly1305::aead::{Aead, AeadCore, Key, KeyInit, OsRng};
|
||||
use chacha20poly1305::XChaCha20Poly1305;
|
||||
|
||||
const ENCRYPTION_TAG: &str = "yA4k3nC";
|
||||
const ENCRYPTION_VERSION: u8 = 1;
|
||||
|
||||
pub(crate) fn encrypt_data(data: &[u8], key: &Key<XChaCha20Poly1305>) -> Result<Vec<u8>> {
|
||||
let nonce = XChaCha20Poly1305::generate_nonce(&mut OsRng);
|
||||
let cipher = XChaCha20Poly1305::new(&key);
|
||||
let ciphered_data = cipher.encrypt(&nonce, data).map_err(|_| EncryptionError)?;
|
||||
|
||||
let mut data: Vec<u8> = Vec::new();
|
||||
data.extend_from_slice(ENCRYPTION_TAG.as_bytes()); // Tag
|
||||
data.push(ENCRYPTION_VERSION); // Version
|
||||
data.extend_from_slice(&nonce.as_slice()); // Nonce
|
||||
data.extend_from_slice(&ciphered_data); // Ciphertext
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
pub(crate) fn decrypt_data(cipher_data: &[u8], key: &Key<XChaCha20Poly1305>) -> Result<Vec<u8>> {
|
||||
// Yaak Tag + ID + Version + Nonce + ... ciphertext ...
|
||||
let (tag, rest) =
|
||||
cipher_data.split_at_checked(ENCRYPTION_TAG.len()).ok_or(InvalidEncryptedData)?;
|
||||
if tag != ENCRYPTION_TAG.as_bytes() {
|
||||
return Err(InvalidEncryptedData);
|
||||
}
|
||||
|
||||
let (version, rest) = rest.split_at_checked(1).ok_or(InvalidEncryptedData)?;
|
||||
if version[0] != ENCRYPTION_VERSION {
|
||||
return Err(InvalidEncryptedData);
|
||||
}
|
||||
|
||||
let nonce_bytes = <XChaCha20Poly1305 as AeadCore>::NonceSize::to_usize();
|
||||
let (nonce, ciphered_data) = rest.split_at_checked(nonce_bytes).ok_or(InvalidEncryptedData)?;
|
||||
|
||||
let cipher = XChaCha20Poly1305::new(&key);
|
||||
cipher.decrypt(nonce.into(), ciphered_data).map_err(|_e| DecryptionError)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::encryption::{decrypt_data, encrypt_data};
|
||||
use crate::error::Error::InvalidEncryptedData;
|
||||
use crate::error::Result;
|
||||
use chacha20poly1305::aead::OsRng;
|
||||
use chacha20poly1305::{KeyInit, XChaCha20Poly1305};
|
||||
|
||||
#[test]
|
||||
fn test_encrypt_decrypt() -> Result<()> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let encrypted = encrypt_data("hello world".as_bytes(), &key)?;
|
||||
let decrypted = decrypt_data(encrypted.as_slice(), &key)?;
|
||||
assert_eq!(String::from_utf8(decrypted).unwrap(), "hello world");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decrypt_empty() -> Result<()> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let encrypted = encrypt_data(&[], &key)?;
|
||||
assert_eq!(encrypted.len(), 48);
|
||||
let decrypted = decrypt_data(encrypted.as_slice(), &key)?;
|
||||
assert_eq!(String::from_utf8(decrypted).unwrap(), "");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decrypt_bad_version() -> Result<()> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let mut encrypted = encrypt_data("hello world".as_bytes(), &key)?;
|
||||
encrypted[7] = 0;
|
||||
let decrypted = decrypt_data(encrypted.as_slice(), &key);
|
||||
assert!(matches!(decrypted, Err(InvalidEncryptedData)));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decrypt_bad_tag() -> Result<()> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let mut encrypted = encrypt_data("hello world".as_bytes(), &key)?;
|
||||
encrypted[0] = 2;
|
||||
let decrypted = decrypt_data(encrypted.as_slice(), &key);
|
||||
assert!(matches!(decrypted, Err(InvalidEncryptedData)));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decrypt_unencrypted_data() -> Result<()> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let decrypted = decrypt_data("123".as_bytes(), &key);
|
||||
assert!(matches!(decrypted, Err(InvalidEncryptedData)));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
50
src-tauri/yaak-crypto/src/error.rs
Normal file
50
src-tauri/yaak-crypto/src/error.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
use serde::{Serialize, Serializer};
|
||||
use std::io;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
DbError(#[from] yaak_models::error::Error),
|
||||
|
||||
#[error("Keyring error: {0}")]
|
||||
KeyringError(#[from] keyring::Error),
|
||||
|
||||
#[error("Missing workspace encryption key")]
|
||||
MissingWorkspaceKey,
|
||||
|
||||
#[error("Incorrect workspace key")]
|
||||
IncorrectWorkspaceKey,
|
||||
|
||||
#[error("Failed to decrypt workspace key: {0}")]
|
||||
WorkspaceKeyDecryptionError(String),
|
||||
|
||||
#[error("Crypto IO error: {0}")]
|
||||
IoError(#[from] io::Error),
|
||||
|
||||
#[error("Failed to encrypt data")]
|
||||
EncryptionError,
|
||||
|
||||
#[error("Failed to decrypt data")]
|
||||
DecryptionError,
|
||||
|
||||
#[error("Invalid encrypted data")]
|
||||
InvalidEncryptedData,
|
||||
|
||||
#[error("Invalid key provided")]
|
||||
InvalidHumanKey,
|
||||
|
||||
#[error("Encryption error: {0}")]
|
||||
GenericError(String),
|
||||
}
|
||||
|
||||
impl Serialize for Error {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.to_string().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
27
src-tauri/yaak-crypto/src/lib.rs
Normal file
27
src-tauri/yaak-crypto/src/lib.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
extern crate core;
|
||||
|
||||
use crate::commands::*;
|
||||
use crate::manager::EncryptionManager;
|
||||
use tauri::plugin::{Builder, TauriPlugin};
|
||||
use tauri::{generate_handler, Manager, Runtime};
|
||||
|
||||
mod commands;
|
||||
pub mod encryption;
|
||||
pub mod error;
|
||||
pub mod manager;
|
||||
mod master_key;
|
||||
mod workspace_key;
|
||||
|
||||
pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
Builder::new("yaak-crypto")
|
||||
.invoke_handler(generate_handler![
|
||||
enable_encryption,
|
||||
reveal_workspace_key,
|
||||
set_workspace_key
|
||||
])
|
||||
.setup(|app, _api| {
|
||||
app.manage(EncryptionManager::new(app.app_handle()));
|
||||
Ok(())
|
||||
})
|
||||
.build()
|
||||
}
|
||||
176
src-tauri/yaak-crypto/src/manager.rs
Normal file
176
src-tauri/yaak-crypto/src/manager.rs
Normal file
@@ -0,0 +1,176 @@
|
||||
use crate::error::Error::{
|
||||
GenericError, IncorrectWorkspaceKey, MissingWorkspaceKey, WorkspaceKeyDecryptionError,
|
||||
};
|
||||
use crate::error::{Error, Result};
|
||||
use crate::master_key::MasterKey;
|
||||
use crate::workspace_key::WorkspaceKey;
|
||||
use base64::prelude::BASE64_STANDARD;
|
||||
use base64::Engine;
|
||||
use log::{info, warn};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tauri::{AppHandle, Manager, Runtime, State};
|
||||
use yaak_models::models::{EncryptedKey, Workspace, WorkspaceMeta};
|
||||
use yaak_models::query_manager::{QueryManager, QueryManagerExt};
|
||||
use yaak_models::util::{generate_id_of_length, UpdateSource};
|
||||
|
||||
const KEY_USER: &str = "encryption-key";
|
||||
|
||||
pub trait EncryptionManagerExt<'a, R> {
|
||||
fn crypto(&'a self) -> State<'a, EncryptionManager>;
|
||||
}
|
||||
|
||||
impl<'a, R: Runtime, M: Manager<R>> EncryptionManagerExt<'a, R> for M {
|
||||
fn crypto(&'a self) -> State<'a, EncryptionManager> {
|
||||
self.state::<EncryptionManager>()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EncryptionManager {
|
||||
cached_master_key: Arc<Mutex<Option<MasterKey>>>,
|
||||
cached_workspace_keys: Arc<Mutex<HashMap<String, WorkspaceKey>>>,
|
||||
query_manager: QueryManager,
|
||||
app_id: String,
|
||||
}
|
||||
|
||||
impl EncryptionManager {
|
||||
pub fn new<R: Runtime>(app_handle: &AppHandle<R>) -> Self {
|
||||
Self {
|
||||
cached_master_key: Default::default(),
|
||||
cached_workspace_keys: Default::default(),
|
||||
query_manager: app_handle.db_manager().inner().clone(),
|
||||
app_id: app_handle.config().identifier.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encrypt(&self, workspace_id: &str, data: &[u8]) -> Result<Vec<u8>> {
|
||||
let workspace_secret = self.get_workspace_key(workspace_id)?;
|
||||
workspace_secret.encrypt(data)
|
||||
}
|
||||
|
||||
pub fn decrypt(&self, workspace_id: &str, data: &[u8]) -> Result<Vec<u8>> {
|
||||
let workspace_secret = self.get_workspace_key(workspace_id)?;
|
||||
workspace_secret.decrypt(data)
|
||||
}
|
||||
|
||||
pub fn reveal_workspace_key(&self, workspace_id: &str) -> Result<String> {
|
||||
let key = self.get_workspace_key(workspace_id)?;
|
||||
key.to_human()
|
||||
}
|
||||
|
||||
pub fn set_human_key(&self, workspace_id: &str, human_key: &str) -> Result<WorkspaceMeta> {
|
||||
let wkey = WorkspaceKey::from_human(human_key)?;
|
||||
|
||||
let workspace = self.query_manager.connect().get_workspace(workspace_id)?;
|
||||
let encryption_key_challenge = match workspace.encryption_key_challenge {
|
||||
None => return self.set_workspace_key(workspace_id, &wkey),
|
||||
Some(c) => c,
|
||||
};
|
||||
|
||||
let encryption_key_challenge = match BASE64_STANDARD.decode(encryption_key_challenge) {
|
||||
Ok(c) => c,
|
||||
Err(_) => return Err(GenericError("Failed to decode workspace challenge".to_string())),
|
||||
};
|
||||
|
||||
if let Err(_) = wkey.decrypt(encryption_key_challenge.as_slice()) {
|
||||
return Err(IncorrectWorkspaceKey);
|
||||
};
|
||||
|
||||
self.set_workspace_key(workspace_id, &wkey)
|
||||
}
|
||||
|
||||
pub(crate) fn set_workspace_key(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
wkey: &WorkspaceKey,
|
||||
) -> Result<WorkspaceMeta> {
|
||||
info!("Created workspace key for {workspace_id}");
|
||||
|
||||
let encrypted_key = BASE64_STANDARD.encode(self.get_master_key()?.encrypt(wkey.raw_key())?);
|
||||
let encrypted_key = EncryptedKey { encrypted_key };
|
||||
let encryption_key_challenge = wkey.encrypt(generate_id_of_length(50).as_bytes())?;
|
||||
let encryption_key_challenge = Some(BASE64_STANDARD.encode(encryption_key_challenge));
|
||||
|
||||
let workspace_meta = self.query_manager.with_tx::<WorkspaceMeta, Error>(|tx| {
|
||||
let workspace = tx.get_workspace(workspace_id)?;
|
||||
let workspace_meta = tx.get_or_create_workspace_meta(workspace_id)?;
|
||||
tx.upsert_workspace(
|
||||
&Workspace {
|
||||
encryption_key_challenge,
|
||||
..workspace
|
||||
},
|
||||
&UpdateSource::Background,
|
||||
)?;
|
||||
|
||||
Ok(tx.upsert_workspace_meta(
|
||||
&WorkspaceMeta {
|
||||
encryption_key: Some(encrypted_key.clone()),
|
||||
..workspace_meta
|
||||
},
|
||||
&UpdateSource::Background,
|
||||
)?)
|
||||
})?;
|
||||
|
||||
let mut cache = self.cached_workspace_keys.lock().unwrap();
|
||||
cache.insert(workspace_id.to_string(), wkey.clone());
|
||||
|
||||
Ok(workspace_meta)
|
||||
}
|
||||
|
||||
pub(crate) fn ensure_workspace_key(&self, workspace_id: &str) -> Result<WorkspaceMeta> {
|
||||
let workspace_meta =
|
||||
self.query_manager.connect().get_or_create_workspace_meta(workspace_id)?;
|
||||
|
||||
// Already exists
|
||||
if let Some(_) = workspace_meta.encryption_key {
|
||||
warn!("Tried to create workspace key when one already exists for {workspace_id}");
|
||||
return Ok(workspace_meta);
|
||||
}
|
||||
|
||||
let wkey = WorkspaceKey::create()?;
|
||||
self.set_workspace_key(workspace_id, &wkey)
|
||||
}
|
||||
|
||||
fn get_workspace_key(&self, workspace_id: &str) -> Result<WorkspaceKey> {
|
||||
{
|
||||
let cache = self.cached_workspace_keys.lock().unwrap();
|
||||
if let Some(k) = cache.get(workspace_id) {
|
||||
return Ok(k.clone());
|
||||
}
|
||||
};
|
||||
|
||||
let db = self.query_manager.connect();
|
||||
let workspace_meta = db.get_or_create_workspace_meta(workspace_id)?;
|
||||
|
||||
let key = match workspace_meta.encryption_key {
|
||||
None => return Err(MissingWorkspaceKey),
|
||||
Some(k) => k,
|
||||
};
|
||||
|
||||
let mkey = self.get_master_key()?;
|
||||
let decoded_key = BASE64_STANDARD
|
||||
.decode(key.encrypted_key)
|
||||
.map_err(|e| WorkspaceKeyDecryptionError(e.to_string()))?;
|
||||
let raw_key = mkey
|
||||
.decrypt(decoded_key.as_slice())
|
||||
.map_err(|e| WorkspaceKeyDecryptionError(e.to_string()))?;
|
||||
info!("Got existing workspace key for {workspace_id}");
|
||||
let wkey = WorkspaceKey::from_raw_key(raw_key.as_slice());
|
||||
|
||||
Ok(wkey)
|
||||
}
|
||||
|
||||
fn get_master_key(&self) -> Result<MasterKey> {
|
||||
// NOTE: This locks the key for the entire function which seems wrong, but this prevents
|
||||
// concurrent access from prompting the user for a keychain password multiple times.
|
||||
let mut master_secret = self.cached_master_key.lock().unwrap();
|
||||
if let Some(k) = master_secret.as_ref() {
|
||||
return Ok(k.to_owned());
|
||||
}
|
||||
|
||||
let mkey = MasterKey::get_or_create(&self.app_id, KEY_USER)?;
|
||||
*master_secret = Some(mkey.clone());
|
||||
Ok(mkey)
|
||||
}
|
||||
}
|
||||
79
src-tauri/yaak-crypto/src/master_key.rs
Normal file
79
src-tauri/yaak-crypto/src/master_key.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use crate::encryption::{decrypt_data, encrypt_data};
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use base32::Alphabet;
|
||||
use chacha20poly1305::aead::{Key, KeyInit, OsRng};
|
||||
use chacha20poly1305::XChaCha20Poly1305;
|
||||
use keyring::{Entry, Error};
|
||||
use log::info;
|
||||
|
||||
const HUMAN_PREFIX: &str = "YKM_";
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct MasterKey {
|
||||
key: Key<XChaCha20Poly1305>,
|
||||
}
|
||||
|
||||
impl MasterKey {
|
||||
pub(crate) fn get_or_create(app_id: &str, user: &str) -> Result<Self> {
|
||||
let id = format!("{app_id}.EncryptionKey");
|
||||
let entry = Entry::new(&id, user)?;
|
||||
|
||||
let key = match entry.get_password() {
|
||||
Ok(encoded) => {
|
||||
let without_prefix = encoded.strip_prefix(HUMAN_PREFIX).unwrap_or(&encoded);
|
||||
let key_bytes = base32::decode(Alphabet::Crockford {}, &without_prefix)
|
||||
.ok_or(GenericError("Failed to decode master key".to_string()))?;
|
||||
Key::<XChaCha20Poly1305>::clone_from_slice(key_bytes.as_slice())
|
||||
}
|
||||
Err(Error::NoEntry) => {
|
||||
info!("Creating new master key");
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let encoded = base32::encode(Alphabet::Crockford {}, key.as_slice());
|
||||
let with_prefix = format!("{HUMAN_PREFIX}{encoded}");
|
||||
entry.set_password(&with_prefix)?;
|
||||
key
|
||||
}
|
||||
Err(e) => return Err(GenericError(e.to_string())),
|
||||
};
|
||||
|
||||
Ok(Self { key })
|
||||
}
|
||||
|
||||
pub(crate) fn encrypt(&self, data: &[u8]) -> Result<Vec<u8>> {
|
||||
encrypt_data(data, &self.key)
|
||||
}
|
||||
|
||||
pub(crate) fn decrypt(&self, data: &[u8]) -> Result<Vec<u8>> {
|
||||
decrypt_data(data, &self.key)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn test_key() -> Self {
|
||||
let key: Key<XChaCha20Poly1305> = Key::<XChaCha20Poly1305>::clone_from_slice(
|
||||
"00000000000000000000000000000000".as_bytes(),
|
||||
);
|
||||
Self { key }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::error::Result;
|
||||
use crate::master_key::MasterKey;
|
||||
|
||||
#[test]
|
||||
fn test_master_key() -> Result<()> {
|
||||
// Test out the master key
|
||||
let mkey = MasterKey::test_key();
|
||||
let encrypted = mkey.encrypt("hello".as_bytes())?;
|
||||
let decrypted = mkey.decrypt(encrypted.as_slice()).unwrap();
|
||||
assert_eq!(decrypted, "hello".as_bytes().to_vec());
|
||||
|
||||
let mkey = MasterKey::test_key();
|
||||
let decrypted = mkey.decrypt(encrypted.as_slice()).unwrap();
|
||||
assert_eq!(decrypted, "hello".as_bytes().to_vec());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
116
src-tauri/yaak-crypto/src/workspace_key.rs
Normal file
116
src-tauri/yaak-crypto/src/workspace_key.rs
Normal file
@@ -0,0 +1,116 @@
|
||||
use crate::encryption::{decrypt_data, encrypt_data};
|
||||
use crate::error::Error::InvalidHumanKey;
|
||||
use crate::error::Result;
|
||||
use base32::Alphabet;
|
||||
use chacha20poly1305::aead::{Key, KeyInit, OsRng};
|
||||
use chacha20poly1305::{KeySizeUser, XChaCha20Poly1305};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct WorkspaceKey {
|
||||
key: Key<XChaCha20Poly1305>,
|
||||
}
|
||||
|
||||
const HUMAN_PREFIX: &str = "YK";
|
||||
|
||||
impl WorkspaceKey {
|
||||
pub(crate) fn to_human(&self) -> Result<String> {
|
||||
let encoded = base32::encode(Alphabet::Crockford {}, self.key.as_slice());
|
||||
let with_prefix = format!("{HUMAN_PREFIX}{encoded}");
|
||||
let with_separators = with_prefix
|
||||
.chars()
|
||||
.collect::<Vec<_>>()
|
||||
.chunks(6)
|
||||
.map(|chunk| chunk.iter().collect::<String>())
|
||||
.collect::<Vec<_>>()
|
||||
.join("-");
|
||||
Ok(with_separators)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn from_human(human_key: &str) -> Result<Self> {
|
||||
let without_prefix = human_key.strip_prefix(HUMAN_PREFIX).unwrap_or(human_key);
|
||||
let without_separators = without_prefix.replace("-", "");
|
||||
let key =
|
||||
base32::decode(Alphabet::Crockford {}, &without_separators).ok_or(InvalidHumanKey)?;
|
||||
if key.len() != XChaCha20Poly1305::key_size() {
|
||||
return Err(InvalidHumanKey);
|
||||
}
|
||||
Ok(Self::from_raw_key(key.as_slice()))
|
||||
}
|
||||
|
||||
pub(crate) fn from_raw_key(key: &[u8]) -> Self {
|
||||
Self {
|
||||
key: Key::<XChaCha20Poly1305>::clone_from_slice(key),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn raw_key(&self) -> &[u8] {
|
||||
self.key.as_slice()
|
||||
}
|
||||
|
||||
pub(crate) fn create() -> Result<Self> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
Ok(Self::from_raw_key(key.as_slice()))
|
||||
}
|
||||
|
||||
pub(crate) fn encrypt(&self, data: &[u8]) -> Result<Vec<u8>> {
|
||||
encrypt_data(data, &self.key)
|
||||
}
|
||||
|
||||
pub(crate) fn decrypt(&self, data: &[u8]) -> Result<Vec<u8>> {
|
||||
decrypt_data(data, &self.key)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn test_key() -> Self {
|
||||
Self::from_raw_key("f1a2d4b3c8e799af1456be3478a4c3f2".as_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::error::Error::InvalidHumanKey;
|
||||
use crate::error::Result;
|
||||
use crate::workspace_key::WorkspaceKey;
|
||||
|
||||
#[test]
|
||||
fn test_persisted_key() -> Result<()> {
|
||||
let key = WorkspaceKey::test_key();
|
||||
let encrypted = key.encrypt("hello".as_bytes())?;
|
||||
assert_eq!(key.decrypt(encrypted.as_slice())?, "hello".as_bytes());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_human_format() -> Result<()> {
|
||||
let key = WorkspaceKey::test_key();
|
||||
|
||||
let encrypted = key.encrypt("hello".as_bytes())?;
|
||||
assert_eq!(key.decrypt(encrypted.as_slice())?, "hello".as_bytes());
|
||||
|
||||
let human = key.to_human()?;
|
||||
assert_eq!(human, "YKCRRP-2CK46H-H36RSR-CMVKJE-B1CRRK-8D9PC9-JK6D1Q-71GK8R-SKCRS0");
|
||||
assert_eq!(
|
||||
WorkspaceKey::from_human(&human)?.decrypt(encrypted.as_slice())?,
|
||||
"hello".as_bytes()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_human_invalid() -> Result<()> {
|
||||
assert!(matches!(
|
||||
WorkspaceKey::from_human(
|
||||
"YKCRRP-2CK46H-H36RSR-CMVKJE-B1CRRK-8D9PC9-JK6D1Q-71GK8R-SKCRS0-H3X38D",
|
||||
),
|
||||
Err(InvalidHumanKey)
|
||||
));
|
||||
|
||||
assert!(matches!(WorkspaceKey::from_human("bad-key",), Err(InvalidHumanKey)));
|
||||
assert!(matches!(WorkspaceKey::from_human("",), Err(InvalidHumanKey)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -2,15 +2,15 @@
|
||||
name = "yaak-git"
|
||||
links = "yaak-git"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
git2 = { version = "0.20.0" , features = ["vendored-libgit2", "vendored-openssl"]}
|
||||
git2 = { version = "0.20.0", features = ["vendored-libgit2", "vendored-openssl"] }
|
||||
log = "0.4.22"
|
||||
serde = { version = "1.0.215", features = ["derive"] }
|
||||
serde_json = "1.0.132"
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
serde_yaml = "0.9.34"
|
||||
tauri = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
@@ -19,4 +19,4 @@ yaak-models = { workspace = true }
|
||||
yaak-sync = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
tauri-plugin = { version = "2.0.3", features = ["build"] }
|
||||
tauri-plugin = { workspace = true, features = ["build"] }
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { SyncModel } from "./gen_models";
|
||||
import type { SyncModel } from "./gen_models.js";
|
||||
|
||||
export type GitAuthor = { name: string | null, email: string | null, };
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, environmentId: string | null, createdAt: string, updatedAt: string, name: string, variables: Array<EnvironmentVariable>, };
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, base: boolean, variables: Array<EnvironmentVariable>, };
|
||||
|
||||
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
@@ -20,4 +20,4 @@ export type SyncModel = { "type": "workspace" } & Workspace | { "type": "environ
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, name: string, description: string, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, name: string, description: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
|
||||
@@ -52,7 +52,7 @@ export function useGit(dir: string) {
|
||||
onSuccess,
|
||||
}),
|
||||
commitAndPush: useMutation<PushResult, string, { message: string }>({
|
||||
mutationKey: ['git', 'commitpush', dir],
|
||||
mutationKey: ['git', 'commit_push', dir],
|
||||
mutationFn: async (args) => {
|
||||
await invoke('plugin:yaak-git|commit', { dir, ...args });
|
||||
return invoke('plugin:yaak-git|push', { dir });
|
||||
@@ -79,10 +79,18 @@ export function useGit(dir: string) {
|
||||
mutationFn: (args) => invoke('plugin:yaak-git|unstage', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
init: useGitInit(),
|
||||
},
|
||||
] as const;
|
||||
}
|
||||
|
||||
export async function gitInit(dir: string) {
|
||||
await invoke('plugin:yaak-git|initialize', { dir });
|
||||
export function useGitInit() {
|
||||
const queryClient = useQueryClient();
|
||||
const onSuccess = () => queryClient.invalidateQueries({ queryKey: ['git'] });
|
||||
|
||||
return useMutation<void, string, { dir: string }>({
|
||||
mutationKey: ['git', 'init'],
|
||||
mutationFn: (args) => invoke('plugin:yaak-git|initialize', { ...args }),
|
||||
onSuccess,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-add"
|
||||
description = "Enables the add command without any pre-configured scope."
|
||||
commands.allow = ["add"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-add"
|
||||
description = "Denies the add command without any pre-configured scope."
|
||||
commands.deny = ["add"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-branch"
|
||||
description = "Enables the branch command without any pre-configured scope."
|
||||
commands.allow = ["branch"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-branch"
|
||||
description = "Denies the branch command without any pre-configured scope."
|
||||
commands.deny = ["branch"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-checkout"
|
||||
description = "Enables the checkout command without any pre-configured scope."
|
||||
commands.allow = ["checkout"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-checkout"
|
||||
description = "Denies the checkout command without any pre-configured scope."
|
||||
commands.deny = ["checkout"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-checkout-remote"
|
||||
description = "Enables the checkout_remote command without any pre-configured scope."
|
||||
commands.allow = ["checkout_remote"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-checkout-remote"
|
||||
description = "Denies the checkout_remote command without any pre-configured scope."
|
||||
commands.deny = ["checkout_remote"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-commit"
|
||||
description = "Enables the commit command without any pre-configured scope."
|
||||
commands.allow = ["commit"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-commit"
|
||||
description = "Denies the commit command without any pre-configured scope."
|
||||
commands.deny = ["commit"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-delete-branch"
|
||||
description = "Enables the delete_branch command without any pre-configured scope."
|
||||
commands.allow = ["delete_branch"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-delete-branch"
|
||||
description = "Denies the delete_branch command without any pre-configured scope."
|
||||
commands.deny = ["delete_branch"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-fetch-all"
|
||||
description = "Enables the fetch_all command without any pre-configured scope."
|
||||
commands.allow = ["fetch_all"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-fetch-all"
|
||||
description = "Denies the fetch_all command without any pre-configured scope."
|
||||
commands.deny = ["fetch_all"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-initialize"
|
||||
description = "Enables the initialize command without any pre-configured scope."
|
||||
commands.allow = ["initialize"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-initialize"
|
||||
description = "Denies the initialize command without any pre-configured scope."
|
||||
commands.deny = ["initialize"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-log"
|
||||
description = "Enables the log command without any pre-configured scope."
|
||||
commands.allow = ["log"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-log"
|
||||
description = "Denies the log command without any pre-configured scope."
|
||||
commands.deny = ["log"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-merge-branch"
|
||||
description = "Enables the merge_branch command without any pre-configured scope."
|
||||
commands.allow = ["merge_branch"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-merge-branch"
|
||||
description = "Denies the merge_branch command without any pre-configured scope."
|
||||
commands.deny = ["merge_branch"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-pull"
|
||||
description = "Enables the pull command without any pre-configured scope."
|
||||
commands.allow = ["pull"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-pull"
|
||||
description = "Denies the pull command without any pre-configured scope."
|
||||
commands.deny = ["pull"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-push"
|
||||
description = "Enables the push command without any pre-configured scope."
|
||||
commands.allow = ["push"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-push"
|
||||
description = "Denies the push command without any pre-configured scope."
|
||||
commands.deny = ["push"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-status"
|
||||
description = "Enables the status command without any pre-configured scope."
|
||||
commands.allow = ["status"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-status"
|
||||
description = "Denies the status command without any pre-configured scope."
|
||||
commands.deny = ["status"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-unstage"
|
||||
description = "Enables the unstage command without any pre-configured scope."
|
||||
commands.allow = ["unstage"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-unstage"
|
||||
description = "Denies the unstage command without any pre-configured scope."
|
||||
commands.deny = ["unstage"]
|
||||
@@ -1,391 +0,0 @@
|
||||
## Default Permission
|
||||
|
||||
Default permissions for the plugin
|
||||
|
||||
- `allow-add`
|
||||
- `allow-branch`
|
||||
- `allow-checkout`
|
||||
- `allow-commit`
|
||||
- `allow-delete-branch`
|
||||
- `allow-fetch-all`
|
||||
- `allow-initialize`
|
||||
- `allow-log`
|
||||
- `allow-merge-branch`
|
||||
- `allow-pull`
|
||||
- `allow-push`
|
||||
- `allow-status`
|
||||
- `allow-unstage`
|
||||
|
||||
## Permission Table
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>Identifier</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-add`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the add command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-add`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the add command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-checkout`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the checkout command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-checkout`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the checkout command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-checkout-remote`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the checkout_remote command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-checkout-remote`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the checkout_remote command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-commit`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the commit command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-commit`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the commit command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-delete-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the delete_branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-delete-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the delete_branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-fetch-all`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the fetch_all command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-fetch-all`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the fetch_all command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-initialize`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the initialize command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-initialize`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the initialize command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-log`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the log command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-log`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the log command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-merge-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the merge_branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-merge-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the merge_branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-pull`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the pull command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-pull`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the pull command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-push`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the push command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-push`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the push command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-status`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the status command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-status`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the status command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-unstage`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the unstage command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-unstage`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the unstage command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
@@ -1,445 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "PermissionFile",
|
||||
"description": "Permission file that can define a default permission, a set of permissions or a list of inlined permissions.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"default": {
|
||||
"description": "The default permission set for the plugin",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/DefaultPermission"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"set": {
|
||||
"description": "A list of permissions sets defined",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/PermissionSet"
|
||||
}
|
||||
},
|
||||
"permission": {
|
||||
"description": "A list of inlined permissions",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Permission"
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"DefaultPermission": {
|
||||
"description": "The default permission set of the plugin.\n\nWorks similarly to a permission with the \"default\" identifier.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"permissions"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"description": "The version of the permission.",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
],
|
||||
"format": "uint64",
|
||||
"minimum": 1.0
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does. Tauri convention is to use <h4> headings in markdown content for Tauri documentation generation purposes.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissions": {
|
||||
"description": "All permissions this set contains.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"PermissionSet": {
|
||||
"description": "A set of direct permissions grouped together under a new name.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"description",
|
||||
"identifier",
|
||||
"permissions"
|
||||
],
|
||||
"properties": {
|
||||
"identifier": {
|
||||
"description": "A unique identifier for the permission.",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does.",
|
||||
"type": "string"
|
||||
},
|
||||
"permissions": {
|
||||
"description": "All permissions this set contains.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/PermissionKind"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Permission": {
|
||||
"description": "Descriptions of explicit privileges of commands.\n\nIt can enable commands to be accessible in the frontend of the application.\n\nIf the scope is defined it can be used to fine grain control the access of individual or multiple commands.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"identifier"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"description": "The version of the permission.",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
],
|
||||
"format": "uint64",
|
||||
"minimum": 1.0
|
||||
},
|
||||
"identifier": {
|
||||
"description": "A unique identifier for the permission.",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does. Tauri internal convention is to use <h4> headings in markdown content for Tauri documentation generation purposes.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"commands": {
|
||||
"description": "Allowed or denied commands when using this permission.",
|
||||
"default": {
|
||||
"allow": [],
|
||||
"deny": []
|
||||
},
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Commands"
|
||||
}
|
||||
]
|
||||
},
|
||||
"scope": {
|
||||
"description": "Allowed or denied scoped when using this permission.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Scopes"
|
||||
}
|
||||
]
|
||||
},
|
||||
"platforms": {
|
||||
"description": "Target platforms this permission applies. By default all platforms are affected by this permission.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Target"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Commands": {
|
||||
"description": "Allowed and denied commands inside a permission.\n\nIf two commands clash inside of `allow` and `deny`, it should be denied by default.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allow": {
|
||||
"description": "Allowed command.",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"deny": {
|
||||
"description": "Denied command, which takes priority.",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Scopes": {
|
||||
"description": "An argument for fine grained behavior control of Tauri commands.\n\nIt can be of any serde serializable type and is used to allow or prevent certain actions inside a Tauri command. The configured scope is passed to the command and will be enforced by the command implementation.\n\n## Example\n\n```json { \"allow\": [{ \"path\": \"$HOME/**\" }], \"deny\": [{ \"path\": \"$HOME/secret.txt\" }] } ```",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allow": {
|
||||
"description": "Data that defines what is allowed by the scope.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
},
|
||||
"deny": {
|
||||
"description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Value": {
|
||||
"description": "All supported ACL values.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "Represents a null JSON value.",
|
||||
"type": "null"
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`bool`].",
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"description": "Represents a valid ACL [`Number`].",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Number"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`String`].",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Represents a list of other [`Value`]s.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Represents a map of [`String`] keys to [`Value`]s.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"Number": {
|
||||
"description": "A valid ACL number.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "Represents an [`i64`].",
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`f64`].",
|
||||
"type": "number",
|
||||
"format": "double"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Target": {
|
||||
"description": "Platform target.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "MacOS.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"macOS"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Windows.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"windows"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Linux.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Android.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"android"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "iOS.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"iOS"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"PermissionKind": {
|
||||
"type": "string",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Enables the add command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-add"
|
||||
},
|
||||
{
|
||||
"description": "Denies the add command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-add"
|
||||
},
|
||||
{
|
||||
"description": "Enables the branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-branch"
|
||||
},
|
||||
{
|
||||
"description": "Denies the branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-branch"
|
||||
},
|
||||
{
|
||||
"description": "Enables the checkout command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-checkout"
|
||||
},
|
||||
{
|
||||
"description": "Denies the checkout command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-checkout"
|
||||
},
|
||||
{
|
||||
"description": "Enables the checkout_remote command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-checkout-remote"
|
||||
},
|
||||
{
|
||||
"description": "Denies the checkout_remote command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-checkout-remote"
|
||||
},
|
||||
{
|
||||
"description": "Enables the commit command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-commit"
|
||||
},
|
||||
{
|
||||
"description": "Denies the commit command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-commit"
|
||||
},
|
||||
{
|
||||
"description": "Enables the delete_branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-delete-branch"
|
||||
},
|
||||
{
|
||||
"description": "Denies the delete_branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-delete-branch"
|
||||
},
|
||||
{
|
||||
"description": "Enables the fetch_all command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-fetch-all"
|
||||
},
|
||||
{
|
||||
"description": "Denies the fetch_all command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-fetch-all"
|
||||
},
|
||||
{
|
||||
"description": "Enables the initialize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-initialize"
|
||||
},
|
||||
{
|
||||
"description": "Denies the initialize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-initialize"
|
||||
},
|
||||
{
|
||||
"description": "Enables the log command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-log"
|
||||
},
|
||||
{
|
||||
"description": "Denies the log command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-log"
|
||||
},
|
||||
{
|
||||
"description": "Enables the merge_branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-merge-branch"
|
||||
},
|
||||
{
|
||||
"description": "Denies the merge_branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-merge-branch"
|
||||
},
|
||||
{
|
||||
"description": "Enables the pull command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-pull"
|
||||
},
|
||||
{
|
||||
"description": "Denies the pull command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-pull"
|
||||
},
|
||||
{
|
||||
"description": "Enables the push command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-push"
|
||||
},
|
||||
{
|
||||
"description": "Denies the push command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-push"
|
||||
},
|
||||
{
|
||||
"description": "Enables the status command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-status"
|
||||
},
|
||||
{
|
||||
"description": "Denies the status command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-status"
|
||||
},
|
||||
{
|
||||
"description": "Enables the unstage command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-unstage"
|
||||
},
|
||||
{
|
||||
"description": "Denies the unstage command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-unstage"
|
||||
},
|
||||
{
|
||||
"description": "Default permissions for the plugin",
|
||||
"type": "string",
|
||||
"const": "default"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -117,9 +117,9 @@ pub fn git_commit(dir: &Path, message: &str) -> Result<()> {
|
||||
let tree = repo.find_tree(tree_oid)?;
|
||||
|
||||
// Make the signature
|
||||
let config = git2::Config::open_default()?.snapshot()?;
|
||||
let name = config.get_str("user.name").unwrap_or("Change Me");
|
||||
let email = config.get_str("user.email").unwrap_or("change_me@example.com");
|
||||
let config = repo.config()?.snapshot()?;
|
||||
let name = config.get_str("user.name").unwrap_or("Unknown");
|
||||
let email = config.get_str("user.email")?;
|
||||
let sig = git2::Signature::now(name, email)?;
|
||||
|
||||
// Get the current HEAD commit (if it exists)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user