mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-02-13 14:17:44 +01:00
Compare commits
120 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
041298b3f8 | ||
|
|
b400940f0e | ||
|
|
2e144f064d | ||
|
|
d8b1cadae6 | ||
|
|
c2f9760d08 | ||
|
|
a4c600cb48 | ||
|
|
bc3a5e3e58 | ||
|
|
4c3a02ac53 | ||
|
|
1974d61aa4 | ||
|
|
0bcb092854 | ||
|
|
409620f533 | ||
|
|
3e9037f70a | ||
|
|
be82b67ed3 | ||
|
|
432b366105 | ||
|
|
42e70b941d | ||
|
|
3808215210 | ||
|
|
763a60982a | ||
|
|
a05679fd93 | ||
|
|
73c366dc27 | ||
|
|
0be7d0283b | ||
|
|
749df338c5 | ||
|
|
3184c1b79e | ||
|
|
b52bf7cd56 | ||
|
|
d962d7f94b | ||
|
|
21e2a67c1e | ||
|
|
c188435524 | ||
|
|
8a7a7ba49d | ||
|
|
cbc40230bb | ||
|
|
bc4c3178c9 | ||
|
|
121fe5b3ea | ||
|
|
861609ddc0 | ||
|
|
e5070513ac | ||
|
|
f5c3798df9 | ||
|
|
469d12fede | ||
|
|
417a02744b | ||
|
|
81e78ef24c | ||
|
|
dad9cebb9e | ||
|
|
b3ede3d6d6 | ||
|
|
035fe54df0 | ||
|
|
5f8d99ba64 | ||
|
|
84b8d130dc | ||
|
|
94d4227bc1 | ||
|
|
77cdea2f9f | ||
|
|
8b1ca4cb47 | ||
|
|
d3b8a42180 | ||
|
|
95f39c514a | ||
|
|
7cba082eb0 | ||
|
|
3b9b320be2 | ||
|
|
18664975a9 | ||
|
|
bb014b7c43 | ||
|
|
9fa0650647 | ||
|
|
b8c42677ca | ||
|
|
2eb3c2241c | ||
|
|
8fb7bbfe2e | ||
|
|
52eba74151 | ||
|
|
e651760713 | ||
|
|
82451a26f6 | ||
|
|
cc15f60fb6 | ||
|
|
2f8b2a81c7 | ||
|
|
6d4fdc91fe | ||
|
|
faca29c789 | ||
|
|
1ab937aae4 | ||
|
|
45fcea1954 | ||
|
|
73554078d1 | ||
|
|
a42a88de7b | ||
|
|
14a6079176 | ||
|
|
6c513616c0 | ||
|
|
cdf5f1b7a5 | ||
|
|
6566857d54 | ||
|
|
2e55a1bd6d | ||
|
|
e114a85c39 | ||
|
|
92be088e6c | ||
|
|
f1757ae427 | ||
|
|
ce885c3551 | ||
|
|
17657a4d04 | ||
|
|
b7f62b78b1 | ||
|
|
006284b99c | ||
|
|
bac3968aac | ||
|
|
e5fa044eda | ||
|
|
5969120140 | ||
|
|
8801936ad2 | ||
|
|
1d37d46130 | ||
|
|
445c30f3a9 | ||
|
|
5fedea38c2 | ||
|
|
d86549f492 | ||
|
|
4c4eaba7d2 | ||
|
|
cf8f8743bb | ||
|
|
aa75636026 | ||
|
|
2c41b243b6 | ||
|
|
6aea343d4f | ||
|
|
c300e8cbd5 | ||
|
|
6e25c26e9f | ||
|
|
dce1455be7 | ||
|
|
736025b12f | ||
|
|
cb9e9a67a3 | ||
|
|
93c323458f | ||
|
|
6f8c03d8c1 | ||
|
|
afd4228fcf | ||
|
|
d478e5a12e | ||
|
|
0db9ebe67d | ||
|
|
80ea5e6b91 | ||
|
|
cb773babe1 | ||
|
|
b9ed554aca | ||
|
|
f42f3d0e27 | ||
|
|
93ba5b6e5c | ||
|
|
be11d5968e | ||
|
|
0828599e4f | ||
|
|
f47d22c395 | ||
|
|
12233cb6f6 | ||
|
|
cdce2ac53a | ||
|
|
f4d0371060 | ||
|
|
787a0433cb | ||
|
|
60ea408e51 | ||
|
|
0db0cdfd6c | ||
|
|
26371e5f6b | ||
|
|
6b7c144a11 | ||
|
|
62f43ca24c | ||
|
|
fbf4d3c11e | ||
|
|
7a1a0689b0 | ||
|
|
9ead45d67a |
15
.github/FUNDING.yml
vendored
Normal file
15
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: gschier
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
polar: # Replace with a single Polar username
|
||||
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||
thanks_dev: # Replace with a single thanks.dev username
|
||||
custom: https://yaak.app/pricing
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
|
||||
- name: install dependencies (windows only)
|
||||
if: matrix.platform == 'windows-latest'
|
||||
run: cargo install --force trusted-signing-cli
|
||||
run: cargo install --force trusted-signing-cli --version 0.5.0
|
||||
|
||||
- name: Install NPM Dependencies
|
||||
run: |
|
||||
|
||||
@@ -60,3 +60,10 @@ Run the app to apply the migrations.
|
||||
If nothing happens, try `cargo clean` and run the app again.
|
||||
|
||||
_Note: Development builds use a separate database location from production builds._
|
||||
|
||||
## Lezer Grammer Generation
|
||||
|
||||
```sh
|
||||
# Example
|
||||
lezer-generator components/core/Editor/<LANG>/<LANG>.grammar > components/core/Editor/<LANG>/<LANG>.ts
|
||||
```
|
||||
|
||||
25
README.md
25
README.md
@@ -3,7 +3,12 @@
|
||||
Yaak is a desktop API client for interacting with REST, GraphQL, Server Sent Events (SSE), WebSocket, and gRPC
|
||||
APIs. It's built using [Tauri](https://tauri.app), Rust, and ReactJS.
|
||||
|
||||

|
||||

|
||||
|
||||
## Contribution Policy
|
||||
|
||||
Yaak is open source, but only accepting contributions for bug fixes. To get started,
|
||||
visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment.
|
||||
|
||||
## Feature Overview
|
||||
|
||||
@@ -14,6 +19,7 @@ APIs. It's built using [Tauri](https://tauri.app), Rust, and ReactJS.
|
||||
- ⛓️ Chain together multiple requests to dynamically reference values.<br/>
|
||||
- 📂 Organize requests into workspaces and nested folders.<br/>
|
||||
- 🧮 Use environment variables to easily switch between Prod and Dev.<br/>
|
||||
- 🛡️ Secure arbitrary text values with end-to-end encryption<br/>
|
||||
- 🏷️ Send dynamic values like UUIDs or timestamps using template tags.<br/>
|
||||
- 🎨 Choose from many of the included themes, or make your own.<br/>
|
||||
- 💽 Mirror workspace data to a directory for integration with Git or Dropbox.<br/>
|
||||
@@ -21,17 +27,8 @@ APIs. It's built using [Tauri](https://tauri.app), Rust, and ReactJS.
|
||||
- 🔌 Create your own plugins for authentication, template tags, and more!<br/>
|
||||
- 🛜 Configure a proxy to access firewall-blocked APIs
|
||||
|
||||
## Feedback and Bug Reports
|
||||
## Useful Resources
|
||||
|
||||
All feedback, bug reports, questions, and feature requests should be reported to
|
||||
[feedback.yaak.app](https://feedback.yaak.app).
|
||||
|
||||
## Community Projects
|
||||
|
||||
- [`yaak2postman`](https://github.com/BiteCraft/yaak2postman) CLI for converting Yaak data
|
||||
exports to Postman-compatible collections
|
||||
|
||||
## Contribution Policy
|
||||
|
||||
Yaak is open source, but only accepting contributions for bug fixes. To get started,
|
||||
visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment.
|
||||
- [Feedback and Bug Reports](https://feedback.yaak.app)
|
||||
- [Documentation](https://feedback.yaak.app/help)
|
||||
- [Yaak vs Postman](https://yaak.app/blog/postman-alternative)
|
||||
|
||||
3845
package-lock.json
generated
3845
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
15
package.json
15
package.json
@@ -10,8 +10,10 @@
|
||||
"packages/plugin-runtime",
|
||||
"packages/plugin-runtime-types",
|
||||
"packages/common-lib",
|
||||
"src-tauri/yaak-license",
|
||||
"src-tauri/yaak-crypto",
|
||||
"src-tauri/yaak-git",
|
||||
"src-tauri/yaak-license",
|
||||
"src-tauri/yaak-mac-window",
|
||||
"src-tauri/yaak-models",
|
||||
"src-tauri/yaak-plugins",
|
||||
"src-tauri/yaak-sse",
|
||||
@@ -35,10 +37,13 @@
|
||||
"tauri-before-build": "npm run bootstrap && npm run --workspaces --if-present build",
|
||||
"tauri-before-dev": "npm run --workspaces --if-present dev"
|
||||
},
|
||||
"dependencies": {
|
||||
"jotai": "^2.12.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tauri-apps/cli": "^2.2.7",
|
||||
"@typescript-eslint/eslint-plugin": "^8.18.1",
|
||||
"@typescript-eslint/parser": "^8.18.1",
|
||||
"@tauri-apps/cli": "^2.4.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.27.0",
|
||||
"@typescript-eslint/parser": "^8.27.0",
|
||||
"eslint": "^8",
|
||||
"eslint-config-prettier": "^8",
|
||||
"eslint-plugin-import": "^2.31.0",
|
||||
@@ -48,6 +53,6 @@
|
||||
"nodejs-file-downloader": "^4.13.0",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "^3.4.2",
|
||||
"typescript": "^5.7.2"
|
||||
"typescript": "^5.8.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@yaakapp/api",
|
||||
"version": "0.5.0",
|
||||
"version": "0.5.3",
|
||||
"main": "lib/index.js",
|
||||
"typings": "./lib/index.d.ts",
|
||||
"files": [
|
||||
@@ -20,8 +20,6 @@
|
||||
"@types/node": "^22.5.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"cpy-cli": "^5.0.0",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"typescript": "^5.6.2"
|
||||
"cpy-cli": "^5.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -344,7 +344,7 @@ export type ImportResources = { workspaces: Array<Workspace>, environments: Arra
|
||||
|
||||
export type ImportResponse = { resources: ImportResources, };
|
||||
|
||||
export type InternalEvent = { id: string, pluginRefId: string, pluginName: string, replyId: string | null, windowContext: WindowContext, payload: InternalEventPayload, };
|
||||
export type InternalEvent = { id: string, pluginRefId: string, pluginName: string, replyId: string | null, windowContext: PluginWindowContext, payload: InternalEventPayload, };
|
||||
|
||||
export type InternalEventPayload = { "type": "boot_request" } & BootRequest | { "type": "boot_response" } & BootResponse | { "type": "reload_request" } & EmptyPayload | { "type": "reload_response" } & EmptyPayload | { "type": "terminate_request" } | { "type": "terminate_response" } | { "type": "import_request" } & ImportRequest | { "type": "import_response" } & ImportResponse | { "type": "filter_request" } & FilterRequest | { "type": "filter_response" } & FilterResponse | { "type": "export_http_request_request" } & ExportHttpRequestRequest | { "type": "export_http_request_response" } & ExportHttpRequestResponse | { "type": "send_http_request_request" } & SendHttpRequestRequest | { "type": "send_http_request_response" } & SendHttpRequestResponse | { "type": "get_http_request_actions_request" } & EmptyPayload | { "type": "get_http_request_actions_response" } & GetHttpRequestActionsResponse | { "type": "call_http_request_action_request" } & CallHttpRequestActionRequest | { "type": "get_template_functions_request" } | { "type": "get_template_functions_response" } & GetTemplateFunctionsResponse | { "type": "call_template_function_request" } & CallTemplateFunctionRequest | { "type": "call_template_function_response" } & CallTemplateFunctionResponse | { "type": "get_http_authentication_summary_request" } & EmptyPayload | { "type": "get_http_authentication_summary_response" } & GetHttpAuthenticationSummaryResponse | { "type": "get_http_authentication_config_request" } & GetHttpAuthenticationConfigRequest | { "type": "get_http_authentication_config_response" } & GetHttpAuthenticationConfigResponse | { "type": "call_http_authentication_request" } & CallHttpAuthenticationRequest | { "type": "call_http_authentication_response" } & CallHttpAuthenticationResponse | { "type": "call_http_authentication_action_request" } & CallHttpAuthenticationActionRequest | { "type": "call_http_authentication_action_response" } & EmptyPayload | { "type": "copy_text_request" } & CopyTextRequest | { "type": "copy_text_response" } & EmptyPayload | { "type": "render_http_request_request" } & RenderHttpRequestRequest | { "type": "render_http_request_response" } & RenderHttpRequestResponse | { "type": "get_key_value_request" } & GetKeyValueRequest | { "type": "get_key_value_response" } & GetKeyValueResponse | { "type": "set_key_value_request" } & SetKeyValueRequest | { "type": "set_key_value_response" } & SetKeyValueResponse | { "type": "delete_key_value_request" } & DeleteKeyValueRequest | { "type": "delete_key_value_response" } & DeleteKeyValueResponse | { "type": "open_window_request" } & OpenWindowRequest | { "type": "window_navigate_event" } & WindowNavigateEvent | { "type": "window_close_event" } | { "type": "close_window_request" } & CloseWindowRequest | { "type": "template_render_request" } & TemplateRenderRequest | { "type": "template_render_response" } & TemplateRenderResponse | { "type": "show_toast_request" } & ShowToastRequest | { "type": "show_toast_response" } & EmptyPayload | { "type": "prompt_text_request" } & PromptTextRequest | { "type": "prompt_text_response" } & PromptTextResponse | { "type": "get_http_request_by_id_request" } & GetHttpRequestByIdRequest | { "type": "get_http_request_by_id_response" } & GetHttpRequestByIdResponse | { "type": "find_http_responses_request" } & FindHttpResponsesRequest | { "type": "find_http_responses_response" } & FindHttpResponsesResponse | { "type": "empty_response" } & EmptyPayload | { "type": "error_response" } & ErrorResponse;
|
||||
|
||||
@@ -356,6 +356,8 @@ export type OpenWindowRequest = { url: string,
|
||||
*/
|
||||
label: string, title?: string, size?: WindowSize, dataDirKey?: string, };
|
||||
|
||||
export type PluginWindowContext = { "type": "none" } | { "type": "label", label: string, workspace_id: string | null, };
|
||||
|
||||
export type PromptTextRequest = { id: string, title: string, label: string, description?: string, defaultValue?: string, placeholder?: string,
|
||||
/**
|
||||
* Text to add to the confirmation button
|
||||
@@ -393,14 +395,17 @@ export type TemplateFunction = { name: string, description?: string,
|
||||
* Also support alternative names. This is useful for not breaking existing
|
||||
* tags when changing the `name` property
|
||||
*/
|
||||
aliases?: Array<string>, args: Array<FormInput>, };
|
||||
aliases?: Array<string>, args: Array<TemplateFunctionArg>, };
|
||||
|
||||
/**
|
||||
* Similar to FormInput, but contains
|
||||
*/
|
||||
export type TemplateFunctionArg = FormInput;
|
||||
|
||||
export type TemplateRenderRequest = { data: JsonValue, purpose: RenderPurpose, };
|
||||
|
||||
export type TemplateRenderResponse = { data: JsonValue, };
|
||||
|
||||
export type WindowContext = { "type": "none" } | { "type": "label", label: string, };
|
||||
|
||||
export type WindowNavigateEvent = { url: string, };
|
||||
|
||||
export type WindowSize = { width: number, height: number, };
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, environmentId: string | null, createdAt: string, updatedAt: string, name: string, variables: Array<EnvironmentVariable>, };
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, base: boolean, variables: Array<EnvironmentVariable>, };
|
||||
|
||||
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
@@ -24,4 +24,4 @@ export type HttpUrlParameter = { enabled?: boolean, name: string, value: string,
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, name: string, description: string, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, name: string, description: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type JsonValue = number | string | Array<JsonValue> | { [key in string]?: JsonValue };
|
||||
export type JsonValue = number | string | boolean | Array<JsonValue> | { [key in string]?: JsonValue } | null;
|
||||
|
||||
@@ -3,3 +3,7 @@ export type * from './themes';
|
||||
|
||||
export * from './bindings/gen_models';
|
||||
export * from './bindings/gen_events';
|
||||
|
||||
// Some extras for utility
|
||||
|
||||
export type { PartialImportResources } from './plugins/ImporterPlugin';
|
||||
|
||||
@@ -34,7 +34,7 @@ export interface Context {
|
||||
openUrl(
|
||||
args: OpenWindowRequest & {
|
||||
onNavigate?: (args: { url: string }) => void;
|
||||
onClose: () => void;
|
||||
onClose?: () => void;
|
||||
},
|
||||
): Promise<{ close: () => void }>;
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { Context } from './Context';
|
||||
|
||||
export type FilterPluginResponse = { filtered: string };
|
||||
type FilterPluginResponse = { filtered: string };
|
||||
|
||||
export type FilterPlugin = {
|
||||
name: string;
|
||||
|
||||
@@ -1,15 +1,21 @@
|
||||
import { Environment, Folder, GrpcRequest, HttpRequest, Workspace } from '../bindings/gen_models';
|
||||
import type { AtLeast } from '../helpers';
|
||||
import { ImportResources } from '../bindings/gen_events';
|
||||
import { AtLeast } from '../helpers';
|
||||
import type { Context } from './Context';
|
||||
|
||||
type ImportPluginResponse = null | {
|
||||
resources: {
|
||||
workspaces: AtLeast<Workspace, 'name' | 'id' | 'model'>[];
|
||||
environments: AtLeast<Environment, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
folders: AtLeast<Folder, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
httpRequests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
grpcRequests: AtLeast<GrpcRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
};
|
||||
type RootFields = 'name' | 'id' | 'model';
|
||||
type CommonFields = RootFields | 'workspaceId';
|
||||
|
||||
export type PartialImportResources = {
|
||||
workspaces: Array<AtLeast<ImportResources['workspaces'][0], RootFields>>;
|
||||
environments: Array<AtLeast<ImportResources['environments'][0], CommonFields>>;
|
||||
folders: Array<AtLeast<ImportResources['folders'][0], CommonFields>>;
|
||||
httpRequests: Array<AtLeast<ImportResources['httpRequests'][0], CommonFields>>;
|
||||
grpcRequests: Array<AtLeast<ImportResources['grpcRequests'][0], CommonFields>>;
|
||||
websocketRequests: Array<AtLeast<ImportResources['websocketRequests'][0], CommonFields>>;
|
||||
};
|
||||
|
||||
export type ImportPluginResponse = null | {
|
||||
resources: PartialImportResources;
|
||||
};
|
||||
|
||||
export type ImporterPlugin = {
|
||||
|
||||
@@ -15,7 +15,6 @@ export class PluginHandle {
|
||||
bootRequest: this.bootRequest,
|
||||
};
|
||||
this.#instance = new PluginInstance(workerData, pluginToAppEvents);
|
||||
console.log('Created plugin worker for ', this.bootRequest.dir);
|
||||
}
|
||||
|
||||
sendToWorker(event: InternalEvent) {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { PluginWindowContext, TemplateFunctionArg } from '@yaakapp-internal/plugins';
|
||||
import type {
|
||||
BootRequest,
|
||||
Context,
|
||||
@@ -17,7 +18,6 @@ import type {
|
||||
SendHttpRequestResponse,
|
||||
TemplateFunction,
|
||||
TemplateRenderResponse,
|
||||
WindowContext,
|
||||
} from '@yaakapp/api';
|
||||
import console from 'node:console';
|
||||
import { readFileSync, type Stats, statSync, watch } from 'node:fs';
|
||||
@@ -45,12 +45,12 @@ export class PluginInstance {
|
||||
this.#appToPluginEvents = new EventChannel();
|
||||
|
||||
// Forward incoming events to onMessage()
|
||||
this.#appToPluginEvents.listen(async event => {
|
||||
this.#appToPluginEvents.listen(async (event) => {
|
||||
await this.#onMessage(event);
|
||||
})
|
||||
});
|
||||
|
||||
// Reload plugin if the JS or package.json changes
|
||||
const windowContextNone: WindowContext = { type: 'none' };
|
||||
const windowContextNone: PluginWindowContext = { type: 'none' };
|
||||
const fileChangeCallback = async () => {
|
||||
this.#importModule();
|
||||
return this.#sendPayload(windowContextNone, { type: 'reload_response' }, null);
|
||||
@@ -261,10 +261,10 @@ export class PluginInstance {
|
||||
payload.type === 'call_template_function_request' &&
|
||||
Array.isArray(this.#mod?.templateFunctions)
|
||||
) {
|
||||
const action = this.#mod.templateFunctions.find((a) => a.name === payload.name);
|
||||
if (typeof action?.onRender === 'function') {
|
||||
applyFormInputDefaults(action.args, payload.args.values);
|
||||
const result = await action.onRender(ctx, payload.args);
|
||||
const fn = this.#mod.templateFunctions.find((a) => a.name === payload.name);
|
||||
if (typeof fn?.onRender === 'function') {
|
||||
applyFormInputDefaults(fn.args, payload.args.values);
|
||||
const result = await fn.onRender(ctx, payload.args);
|
||||
this.#sendPayload(
|
||||
windowContext,
|
||||
{
|
||||
@@ -317,7 +317,7 @@ export class PluginInstance {
|
||||
}
|
||||
|
||||
#buildEventToSend(
|
||||
windowContext: WindowContext,
|
||||
windowContext: PluginWindowContext,
|
||||
payload: InternalEventPayload,
|
||||
replyId: string | null = null,
|
||||
): InternalEvent {
|
||||
@@ -332,7 +332,7 @@ export class PluginInstance {
|
||||
}
|
||||
|
||||
#sendPayload(
|
||||
windowContext: WindowContext,
|
||||
windowContext: PluginWindowContext,
|
||||
payload: InternalEventPayload,
|
||||
replyId: string | null,
|
||||
): string {
|
||||
@@ -348,12 +348,12 @@ export class PluginInstance {
|
||||
this.#pluginToAppEvents.emit(event);
|
||||
}
|
||||
|
||||
#sendEmpty(windowContext: WindowContext, replyId: string | null = null): string {
|
||||
#sendEmpty(windowContext: PluginWindowContext, replyId: string | null = null): string {
|
||||
return this.#sendPayload(windowContext, { type: 'empty_response' }, replyId);
|
||||
}
|
||||
|
||||
#sendAndWaitForReply<T extends Omit<InternalEventPayload, 'type'>>(
|
||||
windowContext: WindowContext,
|
||||
windowContext: PluginWindowContext,
|
||||
payload: InternalEventPayload,
|
||||
): Promise<T> {
|
||||
// 1. Build event to send
|
||||
@@ -379,7 +379,7 @@ export class PluginInstance {
|
||||
}
|
||||
|
||||
#sendAndListenForEvents(
|
||||
windowContext: WindowContext,
|
||||
windowContext: PluginWindowContext,
|
||||
payload: InternalEventPayload,
|
||||
onEvent: (event: InternalEventPayload) => void,
|
||||
): void {
|
||||
@@ -551,7 +551,7 @@ function genId(len = 5): string {
|
||||
|
||||
/** Recursively apply form input defaults to a set of values */
|
||||
function applyFormInputDefaults(
|
||||
inputs: FormInput[],
|
||||
inputs: TemplateFunctionArg[],
|
||||
values: { [p: string]: JsonPrimitive | undefined },
|
||||
) {
|
||||
for (const input of inputs) {
|
||||
@@ -580,18 +580,3 @@ function watchFile(filepath: string, cb: (filepath: string) => void) {
|
||||
watchedFiles[filepath] = stat;
|
||||
});
|
||||
}
|
||||
|
||||
// function prefixStdout(s: string) {
|
||||
// if (!s.includes('%s')) {
|
||||
// throw new Error('Console prefix must contain a "%s" replacer');
|
||||
// }
|
||||
// interceptStdout((text: string) => {
|
||||
// const lines = text.split(/\n/);
|
||||
// let newText = '';
|
||||
// for (let i = 0; i < lines.length; i++) {
|
||||
// if (lines[i] == '') continue;
|
||||
// newText += util.format(s, lines[i]) + '\n';
|
||||
// }
|
||||
// return newText.trimEnd();
|
||||
// });
|
||||
// }
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
edition = "2018"
|
||||
edition = "2024"
|
||||
|
||||
# Widths
|
||||
chain_width = 100
|
||||
|
||||
@@ -9,12 +9,14 @@ const NODE_VERSION = 'v22.9.0';
|
||||
// `${process.platform}_${process.arch}`
|
||||
const MAC_ARM = 'darwin_arm64';
|
||||
const MAC_X64 = 'darwin_x64';
|
||||
const LNX_ARM = 'linux_arm64';
|
||||
const LNX_X64 = 'linux_x64';
|
||||
const WIN_X64 = 'win32_x64';
|
||||
|
||||
const URL_MAP = {
|
||||
[MAC_ARM]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-darwin-arm64.tar.gz`,
|
||||
[MAC_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-darwin-x64.tar.gz`,
|
||||
[LNX_ARM]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-linux-arm64.tar.gz`,
|
||||
[LNX_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-linux-x64.tar.gz`,
|
||||
[WIN_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-win-x64.zip`,
|
||||
};
|
||||
@@ -22,15 +24,17 @@ const URL_MAP = {
|
||||
const SRC_BIN_MAP = {
|
||||
[MAC_ARM]: `node-${NODE_VERSION}-darwin-arm64/bin/node`,
|
||||
[MAC_X64]: `node-${NODE_VERSION}-darwin-x64/bin/node`,
|
||||
[LNX_ARM]: `node-${NODE_VERSION}-linux-arm64/bin/node`,
|
||||
[LNX_X64]: `node-${NODE_VERSION}-linux-x64/bin/node`,
|
||||
[WIN_X64]: `node-${NODE_VERSION}-win-x64/node.exe`,
|
||||
};
|
||||
|
||||
const DST_BIN_MAP = {
|
||||
darwin_arm64: 'yaaknode-aarch64-apple-darwin',
|
||||
darwin_x64: 'yaaknode-x86_64-apple-darwin',
|
||||
linux_x64: 'yaaknode-x86_64-unknown-linux-gnu',
|
||||
win32_x64: 'yaaknode-x86_64-pc-windows-msvc.exe',
|
||||
[MAC_ARM]: 'yaaknode-aarch64-apple-darwin',
|
||||
[MAC_X64]: 'yaaknode-x86_64-apple-darwin',
|
||||
[LNX_ARM]: 'yaaknode-aarch64-unknown-linux-gnu',
|
||||
[LNX_X64]: 'yaaknode-x86_64-unknown-linux-gnu',
|
||||
[WIN_X64]: 'yaaknode-x86_64-pc-windows-msvc.exe',
|
||||
};
|
||||
|
||||
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
|
||||
|
||||
@@ -9,12 +9,14 @@ const VERSION = '28.3';
|
||||
// `${process.platform}_${process.arch}`
|
||||
const MAC_ARM = 'darwin_arm64';
|
||||
const MAC_X64 = 'darwin_x64';
|
||||
const LNX_ARM = 'linux_arm64';
|
||||
const LNX_X64 = 'linux_x64';
|
||||
const WIN_X64 = 'win32_x64';
|
||||
|
||||
const URL_MAP = {
|
||||
[MAC_ARM]: `https://github.com/protocolbuffers/protobuf/releases/download/v${VERSION}/protoc-${VERSION}-osx-aarch_64.zip`,
|
||||
[MAC_X64]: `https://github.com/protocolbuffers/protobuf/releases/download/v${VERSION}/protoc-${VERSION}-osx-x86_64.zip`,
|
||||
[LNX_ARM]: `https://github.com/protocolbuffers/protobuf/releases/download/v${VERSION}/protoc-${VERSION}-linux-aarch_64.zip`,
|
||||
[LNX_X64]: `https://github.com/protocolbuffers/protobuf/releases/download/v${VERSION}/protoc-${VERSION}-linux-x86_64.zip`,
|
||||
[WIN_X64]: `https://github.com/protocolbuffers/protobuf/releases/download/v${VERSION}/protoc-${VERSION}-win64.zip`,
|
||||
};
|
||||
@@ -22,6 +24,7 @@ const URL_MAP = {
|
||||
const SRC_BIN_MAP = {
|
||||
[MAC_ARM]: 'bin/protoc',
|
||||
[MAC_X64]: 'bin/protoc',
|
||||
[LNX_ARM]: 'bin/protoc',
|
||||
[LNX_X64]: 'bin/protoc',
|
||||
[WIN_X64]: 'bin/protoc.exe',
|
||||
};
|
||||
@@ -29,6 +32,7 @@ const SRC_BIN_MAP = {
|
||||
const DST_BIN_MAP = {
|
||||
[MAC_ARM]: 'yaakprotoc-aarch64-apple-darwin',
|
||||
[MAC_X64]: 'yaakprotoc-x86_64-apple-darwin',
|
||||
[LNX_ARM]: 'yaakprotoc-aarch64-unknown-linux-gnu',
|
||||
[LNX_X64]: 'yaakprotoc-x86_64-unknown-linux-gnu',
|
||||
[WIN_X64]: 'yaakprotoc-x86_64-pc-windows-msvc.exe',
|
||||
};
|
||||
|
||||
5
src-tauri/.gitignore
vendored
5
src-tauri/.gitignore
vendored
@@ -4,3 +4,8 @@ target/
|
||||
|
||||
vendored/*
|
||||
!vendored/plugins
|
||||
|
||||
gen/*
|
||||
|
||||
**/permissions/autogenerated
|
||||
**/permissions/schemas
|
||||
|
||||
1506
src-tauri/Cargo.lock
generated
1506
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,11 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"yaak-crypto",
|
||||
"yaak-git",
|
||||
"yaak-grpc",
|
||||
"yaak-http",
|
||||
"yaak-license",
|
||||
"yaak-mac-window",
|
||||
"yaak-models",
|
||||
"yaak-plugins",
|
||||
"yaak-sse",
|
||||
@@ -15,7 +17,7 @@ members = [
|
||||
[package]
|
||||
name = "yaak-app"
|
||||
version = "0.0.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
authors = ["Gregory Schier"]
|
||||
publish = false
|
||||
|
||||
@@ -31,52 +33,48 @@ strip = true # Automatically strip symbols from the binary.
|
||||
cargo-clippy = []
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.0.5", features = [] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
objc = "0.2.7"
|
||||
cocoa = "0.26.0"
|
||||
tauri-build = { version = "2.1.1", features = [] }
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
openssl-sys = { version = "0.9.105", features = ["vendored"] } # For Ubuntu installation to work
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4.31", features = ["serde"] }
|
||||
datetime = "0.5.2"
|
||||
encoding_rs = "0.8.35"
|
||||
eventsource-client = { git = "https://github.com/yaakapp/rust-eventsource-client", version = "0.14.0" }
|
||||
hex_color = "3.0.0"
|
||||
http = { version = "1.2.0", default-features = false }
|
||||
log = "0.4.21"
|
||||
log = "0.4.27"
|
||||
md5 = "0.7.0"
|
||||
mime_guess = "2.0.5"
|
||||
rand = "0.9.0"
|
||||
regex = "1.10.2"
|
||||
reqwest = { workspace = true, features = ["multipart", "cookies", "gzip", "brotli", "deflate", "json", "rustls-tls-manual-roots-no-provider"] }
|
||||
reqwest_cookie_store = "0.8.0"
|
||||
rustls = { version = "0.23.22", default-features = false, features = ["custom-provider", "ring"] }
|
||||
rustls-platform-verifier = "0.5.0"
|
||||
rustls = { version = "0.23.25", default-features = false, features = ["custom-provider", "ring"] }
|
||||
rustls-platform-verifier = "0.5.1"
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true, features = ["raw_value"] }
|
||||
tauri = { workspace = true, features = ["devtools", "protocol-asset"] }
|
||||
tauri-plugin-clipboard-manager = "2.2.1"
|
||||
tauri-plugin-clipboard-manager = "2.2.2"
|
||||
tauri-plugin-dialog = "2.2.0"
|
||||
tauri-plugin-fs = "2.2.0"
|
||||
tauri-plugin-log = { version = "2.2.1", features = ["colored"] }
|
||||
tauri-plugin-opener = "2.2.5"
|
||||
tauri-plugin-os = "2.2.0"
|
||||
tauri-plugin-log = { version = "2.3.1", features = ["colored"] }
|
||||
tauri-plugin-opener = "2.2.6"
|
||||
tauri-plugin-os = "2.2.1"
|
||||
tauri-plugin-shell = { workspace = true }
|
||||
tauri-plugin-single-instance = "2.2.1"
|
||||
tauri-plugin-updater = "2.5.0"
|
||||
tauri-plugin-single-instance = "2.2.2"
|
||||
tauri-plugin-updater = "2.6.1"
|
||||
tauri-plugin-window-state = "2.2.1"
|
||||
tokio = { version = "1.43.0", features = ["sync"] }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true, features = ["sync"] }
|
||||
tokio-stream = "0.1.17"
|
||||
ts-rs = { workspace = true }
|
||||
uuid = "1.12.1"
|
||||
yaak-common = { workspace = true }
|
||||
yaak-crypto = { workspace = true }
|
||||
yaak-git = { path = "yaak-git" }
|
||||
yaak-grpc = { path = "yaak-grpc" }
|
||||
yaak-http = { workspace = true }
|
||||
yaak-license = { path = "yaak-license" }
|
||||
yaak-mac-window = { path = "yaak-mac-window" }
|
||||
yaak-models = { workspace = true }
|
||||
yaak-plugins = { workspace = true }
|
||||
yaak-sse = { workspace = true }
|
||||
@@ -85,17 +83,20 @@ yaak-templates = { workspace = true }
|
||||
yaak-ws = { path = "yaak-ws" }
|
||||
|
||||
[workspace.dependencies]
|
||||
reqwest = "0.12.12"
|
||||
serde = "1.0.215"
|
||||
serde_json = "1.0.132"
|
||||
tauri = "2.2.5"
|
||||
tauri-plugin = "2.0.4"
|
||||
tauri-plugin-shell = "2.2.0"
|
||||
thiserror = "2.0.3"
|
||||
ts-rs = "10.0.0"
|
||||
reqwest = "0.12.15"
|
||||
serde = "1.0.219"
|
||||
serde_json = "1.0.140"
|
||||
tauri = "2.4.1"
|
||||
tauri-plugin = "2.1.1"
|
||||
tauri-plugin-shell = "2.2.1"
|
||||
tokio = "1.44.2"
|
||||
thiserror = "2.0.12"
|
||||
ts-rs = "10.1.0"
|
||||
yaak-common = { path = "yaak-common" }
|
||||
yaak-http = { path = "yaak-http" }
|
||||
yaak-models = { path = "yaak-models" }
|
||||
yaak-plugins = { path = "yaak-plugins" }
|
||||
yaak-sync = { path = "yaak-sync" }
|
||||
yaak-sse = { path = "yaak-sse" }
|
||||
yaak-sync = { path = "yaak-sync" }
|
||||
yaak-templates = { path = "yaak-templates" }
|
||||
yaak-crypto = { path = "yaak-crypto" }
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
"*"
|
||||
],
|
||||
"permissions": [
|
||||
"core:app:allow-identifier",
|
||||
"core:event:allow-emit",
|
||||
"core:event:allow-listen",
|
||||
"core:event:allow-unlisten",
|
||||
@@ -50,8 +51,11 @@
|
||||
"opener:allow-open-url",
|
||||
"opener:allow-reveal-item-in-dir",
|
||||
"shell:allow-open",
|
||||
"yaak-license:default",
|
||||
"yaak-crypto:default",
|
||||
"yaak-git:default",
|
||||
"yaak-license:default",
|
||||
"yaak-mac-window:default",
|
||||
"yaak-models:default",
|
||||
"yaak-sync:default",
|
||||
"yaak-ws:default"
|
||||
]
|
||||
|
||||
1
src-tauri/gen/schemas/acl-manifests.json
generated
1
src-tauri/gen/schemas/acl-manifests.json
generated
File diff suppressed because one or more lines are too long
1
src-tauri/gen/schemas/capabilities.json
generated
1
src-tauri/gen/schemas/capabilities.json
generated
@@ -1 +0,0 @@
|
||||
{"main":{"identifier":"main","description":"Main permissions","local":true,"windows":["*"],"permissions":["core:event:allow-emit","core:event:allow-listen","core:event:allow-unlisten","os:allow-os-type","clipboard-manager:allow-clear","clipboard-manager:allow-write-text","clipboard-manager:allow-read-text","dialog:allow-open","dialog:allow-save","fs:allow-read-dir","fs:allow-read-file","fs:allow-read-text-file",{"identifier":"fs:scope","allow":[{"path":"$APPDATA"},{"path":"$APPDATA/**"}]},"clipboard-manager:allow-read-text","clipboard-manager:allow-write-text","core:webview:allow-set-webview-zoom","core:window:allow-close","core:window:allow-internal-toggle-maximize","core:window:allow-is-fullscreen","core:window:allow-is-maximized","core:window:allow-maximize","core:window:allow-minimize","core:window:allow-set-decorations","core:window:allow-set-title","core:window:allow-show","core:window:allow-start-dragging","core:window:allow-theme","core:window:allow-unmaximize","opener:allow-default-urls","opener:allow-open-path","opener:allow-open-url","opener:allow-reveal-item-in-dir","shell:allow-open","yaak-license:default","yaak-git:default","yaak-sync:default","yaak-ws:default"]}}
|
||||
5943
src-tauri/gen/schemas/desktop-schema.json
generated
5943
src-tauri/gen/schemas/desktop-schema.json
generated
File diff suppressed because it is too large
Load Diff
5643
src-tauri/gen/schemas/linux-schema.json
generated
5643
src-tauri/gen/schemas/linux-schema.json
generated
File diff suppressed because it is too large
Load Diff
5943
src-tauri/gen/schemas/macOS-schema.json
generated
5943
src-tauri/gen/schemas/macOS-schema.json
generated
File diff suppressed because it is too large
Load Diff
5643
src-tauri/gen/schemas/windows-schema.json
generated
5643
src-tauri/gen/schemas/windows-schema.json
generated
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN hide_window_controls BOOLEAN DEFAULT FALSE NOT NULL;
|
||||
43
src-tauri/migrations/20250326193143_key-value-id.sql
Normal file
43
src-tauri/migrations/20250326193143_key-value-id.sql
Normal file
@@ -0,0 +1,43 @@
|
||||
-- 1. Create the new table with `id` as the primary key
|
||||
CREATE TABLE key_values_new
|
||||
(
|
||||
id TEXT PRIMARY KEY,
|
||||
model TEXT DEFAULT 'key_value' NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
deleted_at DATETIME,
|
||||
namespace TEXT NOT NULL,
|
||||
key TEXT NOT NULL,
|
||||
value TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- 2. Copy data from the old table
|
||||
INSERT INTO key_values_new (id, model, created_at, updated_at, deleted_at, namespace, key, value)
|
||||
SELECT (
|
||||
-- This is the best way to generate a random string in SQLite, apparently
|
||||
'kv_' || SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1) ||
|
||||
SUBSTR('abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23457789', (ABS(RANDOM()) % 57) + 1, 1)
|
||||
) AS id,
|
||||
model,
|
||||
created_at,
|
||||
updated_at,
|
||||
deleted_at,
|
||||
namespace,
|
||||
key,
|
||||
value
|
||||
FROM key_values;
|
||||
|
||||
-- 3. Drop the old table
|
||||
DROP TABLE key_values;
|
||||
|
||||
-- 4. Rename the new table
|
||||
ALTER TABLE key_values_new
|
||||
RENAME TO key_values;
|
||||
1
src-tauri/migrations/20250401122407_encrypted-key.sql
Normal file
1
src-tauri/migrations/20250401122407_encrypted-key.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE workspace_metas ADD COLUMN encryption_key TEXT NULL DEFAULT NULL;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE workspaces ADD COLUMN encryption_key_challenge TEXT NULL;
|
||||
245
src-tauri/migrations/20250424152740_remove-fks.sql
Normal file
245
src-tauri/migrations/20250424152740_remove-fks.sql
Normal file
@@ -0,0 +1,245 @@
|
||||
-- NOTE: SQLite does not support dropping foreign keys, so we need to create new
|
||||
-- tables and copy data instead. To prevent cascade deletes from wrecking stuff,
|
||||
-- we start with the leaf tables and finish with the parent tables (eg. folder).
|
||||
|
||||
----------------------------
|
||||
-- Remove http request FK --
|
||||
----------------------------
|
||||
|
||||
CREATE TABLE http_requests_dg_tmp
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'http_request' NOT NULL,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
deleted_at DATETIME,
|
||||
name TEXT NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
method TEXT NOT NULL,
|
||||
headers TEXT NOT NULL,
|
||||
body_type TEXT,
|
||||
sort_priority REAL DEFAULT 0 NOT NULL,
|
||||
authentication TEXT DEFAULT '{}' NOT NULL,
|
||||
authentication_type TEXT,
|
||||
folder_id TEXT,
|
||||
body TEXT DEFAULT '{}' NOT NULL,
|
||||
url_parameters TEXT DEFAULT '[]' NOT NULL,
|
||||
description TEXT DEFAULT '' NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO http_requests_dg_tmp(id, model, workspace_id, created_at, updated_at, deleted_at, name, url, method,
|
||||
headers, body_type, sort_priority, authentication, authentication_type, folder_id,
|
||||
body, url_parameters, description)
|
||||
SELECT id,
|
||||
model,
|
||||
workspace_id,
|
||||
created_at,
|
||||
updated_at,
|
||||
deleted_at,
|
||||
name,
|
||||
url,
|
||||
method,
|
||||
headers,
|
||||
body_type,
|
||||
sort_priority,
|
||||
authentication,
|
||||
authentication_type,
|
||||
folder_id,
|
||||
body,
|
||||
url_parameters,
|
||||
description
|
||||
FROM http_requests;
|
||||
|
||||
DROP TABLE http_requests;
|
||||
|
||||
ALTER TABLE http_requests_dg_tmp
|
||||
RENAME TO http_requests;
|
||||
|
||||
----------------------------
|
||||
-- Remove grpc request FK --
|
||||
----------------------------
|
||||
|
||||
CREATE TABLE grpc_requests_dg_tmp
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'grpc_request' NOT NULL,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
folder_id TEXT,
|
||||
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
|
||||
updated_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
sort_priority REAL NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
service TEXT,
|
||||
method TEXT,
|
||||
message TEXT NOT NULL,
|
||||
authentication TEXT DEFAULT '{}' NOT NULL,
|
||||
authentication_type TEXT,
|
||||
metadata TEXT DEFAULT '[]' NOT NULL,
|
||||
description TEXT DEFAULT '' NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO grpc_requests_dg_tmp(id, model, workspace_id, folder_id, created_at, updated_at, name, sort_priority, url,
|
||||
service, method, message, authentication, authentication_type, metadata, description)
|
||||
SELECT id,
|
||||
model,
|
||||
workspace_id,
|
||||
folder_id,
|
||||
created_at,
|
||||
updated_at,
|
||||
name,
|
||||
sort_priority,
|
||||
url,
|
||||
service,
|
||||
method,
|
||||
message,
|
||||
authentication,
|
||||
authentication_type,
|
||||
metadata,
|
||||
description
|
||||
FROM grpc_requests;
|
||||
|
||||
DROP TABLE grpc_requests;
|
||||
|
||||
ALTER TABLE grpc_requests_dg_tmp
|
||||
RENAME TO grpc_requests;
|
||||
|
||||
---------------------------------
|
||||
-- Remove websocket request FK --
|
||||
---------------------------------
|
||||
|
||||
CREATE TABLE websocket_requests_dg_tmp
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'websocket_request' NOT NULL,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
folder_id TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
deleted_at DATETIME,
|
||||
authentication TEXT DEFAULT '{}' NOT NULL,
|
||||
authentication_type TEXT,
|
||||
description TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
headers TEXT NOT NULL,
|
||||
message TEXT NOT NULL,
|
||||
sort_priority REAL NOT NULL,
|
||||
url_parameters TEXT DEFAULT '[]' NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO websocket_requests_dg_tmp(id, model, workspace_id, folder_id, created_at, updated_at, deleted_at,
|
||||
authentication, authentication_type, description, name, url, headers, message,
|
||||
sort_priority, url_parameters)
|
||||
SELECT id,
|
||||
model,
|
||||
workspace_id,
|
||||
folder_id,
|
||||
created_at,
|
||||
updated_at,
|
||||
deleted_at,
|
||||
authentication,
|
||||
authentication_type,
|
||||
description,
|
||||
name,
|
||||
url,
|
||||
headers,
|
||||
message,
|
||||
sort_priority,
|
||||
url_parameters
|
||||
FROM websocket_requests;
|
||||
|
||||
DROP TABLE websocket_requests;
|
||||
|
||||
ALTER TABLE websocket_requests_dg_tmp
|
||||
RENAME TO websocket_requests;
|
||||
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
---------------------------
|
||||
-- Remove environment FK --
|
||||
---------------------------
|
||||
|
||||
CREATE TABLE environments_dg_tmp
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
deleted_at DATETIME,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
variables DEFAULT '[]' NOT NULL,
|
||||
model TEXT DEFAULT 'environment',
|
||||
environment_id TEXT
|
||||
);
|
||||
|
||||
INSERT INTO environments_dg_tmp(id, created_at, updated_at, deleted_at, workspace_id, name, variables, model,
|
||||
environment_id)
|
||||
SELECT id,
|
||||
created_at,
|
||||
updated_at,
|
||||
deleted_at,
|
||||
workspace_id,
|
||||
name,
|
||||
variables,
|
||||
model,
|
||||
environment_id
|
||||
FROM environments;
|
||||
|
||||
DROP TABLE environments;
|
||||
|
||||
ALTER TABLE environments_dg_tmp
|
||||
RENAME TO environments;
|
||||
|
||||
----------------------
|
||||
-- Remove folder FK --
|
||||
----------------------
|
||||
|
||||
CREATE TABLE folders_dg_tmp
|
||||
(
|
||||
id TEXT NOT NULL
|
||||
PRIMARY KEY,
|
||||
model TEXT DEFAULT 'folder' NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
deleted_at DATETIME,
|
||||
workspace_id TEXT NOT NULL
|
||||
REFERENCES workspaces
|
||||
ON DELETE CASCADE,
|
||||
folder_id TEXT,
|
||||
name TEXT NOT NULL,
|
||||
sort_priority REAL DEFAULT 0 NOT NULL,
|
||||
description TEXT DEFAULT '' NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO folders_dg_tmp(id, model, created_at, updated_at, deleted_at, workspace_id, folder_id, name, sort_priority,
|
||||
description)
|
||||
SELECT id,
|
||||
model,
|
||||
created_at,
|
||||
updated_at,
|
||||
deleted_at,
|
||||
workspace_id,
|
||||
folder_id,
|
||||
name,
|
||||
sort_priority,
|
||||
description
|
||||
FROM folders;
|
||||
|
||||
DROP TABLE folders;
|
||||
|
||||
ALTER TABLE folders_dg_tmp
|
||||
RENAME TO folders;
|
||||
@@ -0,0 +1,11 @@
|
||||
-- There used to be sync code that skipped over environments because we didn't
|
||||
-- want to sync potentially insecure data. With encryption, it is now possible
|
||||
-- to sync environments securely. However, there were already sync states in the
|
||||
-- DB that marked environments as "Synced". Running the sync code on these envs
|
||||
-- would mark them as deleted by FS (exist in SyncState but not on FS).
|
||||
--
|
||||
-- To undo this mess, we have this migration to delete all environment-related
|
||||
-- sync states so we can sync from a clean slate.
|
||||
DELETE
|
||||
FROM sync_states
|
||||
WHERE model_id LIKE 'ev_%';
|
||||
20
src-tauri/migrations/20250508161145_public-environments.sql
Normal file
20
src-tauri/migrations/20250508161145_public-environments.sql
Normal file
@@ -0,0 +1,20 @@
|
||||
-- Add a public column to represent whether an environment can be shared or exported
|
||||
ALTER TABLE environments
|
||||
ADD COLUMN public BOOLEAN DEFAULT FALSE;
|
||||
|
||||
-- Add a base column to represent whether an environment is a base or sub environment. We used to
|
||||
-- do this with environment_id, but we need a more flexible solution now that envs can be optionally
|
||||
-- synced. E.g., it's now possible to only import a sub environment from a different client without
|
||||
-- its base environment "parent."
|
||||
ALTER TABLE environments
|
||||
ADD COLUMN base BOOLEAN DEFAULT FALSE;
|
||||
|
||||
-- SQLite doesn't support dynamic default values, so we update `base` based on the value of
|
||||
-- environment_id.
|
||||
UPDATE environments
|
||||
SET base = TRUE
|
||||
WHERE environment_id IS NULL;
|
||||
|
||||
-- Finally, we drop the old `environment_id` column that will no longer be used
|
||||
ALTER TABLE environments
|
||||
DROP COLUMN environment_id;
|
||||
40
src-tauri/src/commands.rs
Normal file
40
src-tauri/src/commands.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
use crate::error::Result;
|
||||
use tauri::{command, AppHandle, Manager, Runtime, WebviewWindow};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
||||
use yaak_crypto::manager::EncryptionManagerExt;
|
||||
use yaak_plugins::events::PluginWindowContext;
|
||||
use yaak_plugins::native_template_functions::{decrypt_secure_template_function, encrypt_secure_template_function};
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_show_workspace_key<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
) -> Result<()> {
|
||||
let key = window.crypto().reveal_workspace_key(workspace_id)?;
|
||||
window
|
||||
.dialog()
|
||||
.message(format!("Your workspace key is \n\n{}", key))
|
||||
.kind(MessageDialogKind::Info)
|
||||
.show(|_v| {});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_decrypt_template<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
template: &str,
|
||||
) -> Result<String> {
|
||||
let app_handle = window.app_handle();
|
||||
let window_context = &PluginWindowContext::new(&window);
|
||||
Ok(decrypt_secure_template_function(&app_handle, window_context, template)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_secure_template<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
template: &str,
|
||||
) -> Result<String> {
|
||||
let window_context = &PluginWindowContext::new(&window);
|
||||
Ok(encrypt_secure_template_function(&app_handle, window_context, template)?)
|
||||
}
|
||||
62
src-tauri/src/error.rs
Normal file
62
src-tauri/src/error.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use std::io;
|
||||
use serde::{Serialize, Serializer};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
TemplateError(#[from] yaak_templates::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
ModelError(#[from] yaak_models::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
SyncError(#[from] yaak_sync::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
CryptoError(#[from] yaak_crypto::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
GitError(#[from] yaak_git::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
WebsocketError(#[from] yaak_ws::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
LicenseError(#[from] yaak_license::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
PluginError(#[from] yaak_plugins::error::Error),
|
||||
|
||||
#[error("Updater error: {0}")]
|
||||
UpdaterError(#[from] tauri_plugin_updater::Error),
|
||||
|
||||
#[error("JSON error: {0}")]
|
||||
JsonError(#[from] serde_json::error::Error),
|
||||
|
||||
#[error("Tauri error: {0}")]
|
||||
TauriError(#[from] tauri::Error),
|
||||
|
||||
#[error("Event source error: {0}")]
|
||||
EventSourceError(#[from] eventsource_client::Error),
|
||||
|
||||
#[error("I/O error: {0}")]
|
||||
IOError(#[from] io::Error),
|
||||
|
||||
#[error("Request error: {0}")]
|
||||
RequestError(#[from] reqwest::Error),
|
||||
|
||||
#[error("Generic error: {0}")]
|
||||
GenericError(String),
|
||||
}
|
||||
|
||||
impl Serialize for Error {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.to_string().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
@@ -1,10 +1,14 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::error::Result;
|
||||
use KeyAndValueRef::{Ascii, Binary};
|
||||
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use yaak_grpc::{KeyAndValueRef, MetadataMap};
|
||||
use yaak_models::models::GrpcRequest;
|
||||
use yaak_plugins::events::{CallHttpAuthenticationRequest, HttpHeader};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
pub fn metadata_to_map(metadata: MetadataMap) -> BTreeMap<String, String> {
|
||||
pub(crate) fn metadata_to_map(metadata: MetadataMap) -> BTreeMap<String, String> {
|
||||
let mut entries = BTreeMap::new();
|
||||
for r in metadata.iter() {
|
||||
match r {
|
||||
@@ -14,3 +18,48 @@ pub fn metadata_to_map(metadata: MetadataMap) -> BTreeMap<String, String> {
|
||||
}
|
||||
entries
|
||||
}
|
||||
|
||||
pub(crate) async fn build_metadata<R: Runtime>(
|
||||
window: &WebviewWindow<R>,
|
||||
request: &GrpcRequest,
|
||||
) -> Result<BTreeMap<String, String>> {
|
||||
let plugin_manager = window.state::<PluginManager>();
|
||||
let mut metadata = BTreeMap::new();
|
||||
|
||||
// Add the rest of metadata
|
||||
for h in request.clone().metadata {
|
||||
if h.name.is_empty() && h.value.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if !h.enabled {
|
||||
continue;
|
||||
}
|
||||
|
||||
metadata.insert(h.name, h.value);
|
||||
}
|
||||
|
||||
if let Some(auth_name) = request.authentication_type.clone() {
|
||||
let auth = request.authentication.clone();
|
||||
let plugin_req = CallHttpAuthenticationRequest {
|
||||
context_id: format!("{:x}", md5::compute(request.id.clone())),
|
||||
values: serde_json::from_value(serde_json::to_value(&auth).unwrap()).unwrap(),
|
||||
method: "POST".to_string(),
|
||||
url: request.url.clone(),
|
||||
headers: metadata
|
||||
.iter()
|
||||
.map(|(name, value)| HttpHeader {
|
||||
name: name.to_string(),
|
||||
value: value.to_string(),
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
let plugin_result =
|
||||
plugin_manager.call_http_authentication(&window, &auth_name, plugin_req).await?;
|
||||
for header in plugin_result.set_headers {
|
||||
metadata.insert(header.name, header.value);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
|
||||
use yaak_models::queries::{
|
||||
get_key_value_int, get_key_value_string,
|
||||
set_key_value_int, set_key_value_string, UpdateSource,
|
||||
};
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use yaak_models::query_manager::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
const NAMESPACE: &str = "analytics";
|
||||
const NUM_LAUNCHES_KEY: &str = "num_launches";
|
||||
@@ -16,33 +13,32 @@ pub struct LaunchEventInfo {
|
||||
pub num_launches: i32,
|
||||
}
|
||||
|
||||
pub async fn store_launch_history<R: Runtime>(w: &WebviewWindow<R>) -> LaunchEventInfo {
|
||||
pub async fn store_launch_history<R: Runtime>(app_handle: &AppHandle<R>) -> LaunchEventInfo {
|
||||
let last_tracked_version_key = "last_tracked_version";
|
||||
|
||||
let mut info = LaunchEventInfo::default();
|
||||
|
||||
info.num_launches = get_num_launches(w).await + 1;
|
||||
info.previous_version = get_key_value_string(w, NAMESPACE, last_tracked_version_key, "").await;
|
||||
info.current_version = w.package_info().version.to_string();
|
||||
info.num_launches = get_num_launches(app_handle).await + 1;
|
||||
info.current_version = app_handle.package_info().version.to_string();
|
||||
|
||||
if info.previous_version.is_empty() {
|
||||
} else {
|
||||
info.launched_after_update = info.current_version != info.previous_version;
|
||||
};
|
||||
app_handle
|
||||
.with_tx(|tx| {
|
||||
info.previous_version =
|
||||
tx.get_key_value_string(NAMESPACE, last_tracked_version_key, "");
|
||||
|
||||
// Update key values
|
||||
if !info.previous_version.is_empty() {
|
||||
info.launched_after_update = info.current_version != info.previous_version;
|
||||
};
|
||||
|
||||
set_key_value_string(
|
||||
w,
|
||||
NAMESPACE,
|
||||
last_tracked_version_key,
|
||||
info.current_version.as_str(),
|
||||
&UpdateSource::Background,
|
||||
)
|
||||
.await;
|
||||
|
||||
set_key_value_int(w, NAMESPACE, NUM_LAUNCHES_KEY, info.num_launches, &UpdateSource::Background)
|
||||
.await;
|
||||
// Update key values
|
||||
|
||||
let source = &UpdateSource::Background;
|
||||
let version = info.current_version.as_str();
|
||||
tx.set_key_value_string(NAMESPACE, last_tracked_version_key, version, source);
|
||||
tx.set_key_value_int(NAMESPACE, NUM_LAUNCHES_KEY, info.num_launches, source);
|
||||
Ok(())
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
info
|
||||
}
|
||||
@@ -59,6 +55,6 @@ pub fn get_os() -> &'static str {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_num_launches<R: Runtime>(w: &WebviewWindow<R>) -> i32 {
|
||||
get_key_value_int(w, NAMESPACE, NUM_LAUNCHES_KEY, 0).await
|
||||
pub async fn get_num_launches<R: Runtime>(app_handle: &AppHandle<R>) -> i32 {
|
||||
app_handle.db().get_key_value_int(NAMESPACE, NUM_LAUNCHES_KEY, 0)
|
||||
}
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use crate::render::render_http_request;
|
||||
use crate::response_err;
|
||||
use http::header::{ACCEPT, USER_AGENT};
|
||||
use http::{HeaderMap, HeaderName, HeaderValue, Uri};
|
||||
use http::{HeaderMap, HeaderName, HeaderValue};
|
||||
use log::{debug, error, warn};
|
||||
use mime_guess::Mime;
|
||||
use reqwest::redirect::Policy;
|
||||
use reqwest::{multipart, Proxy, Url};
|
||||
use reqwest::{Method, Response};
|
||||
use rustls::crypto::ring;
|
||||
use reqwest::{Proxy, Url, multipart};
|
||||
use rustls::ClientConfig;
|
||||
use rustls::crypto::ring;
|
||||
use rustls_platform_verifier::BuilderVerifierExt;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
@@ -18,20 +20,18 @@ use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use tokio::fs;
|
||||
use tokio::fs::{create_dir_all, File};
|
||||
use tokio::fs::{File, create_dir_all};
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::sync::watch::Receiver;
|
||||
use tokio::sync::{oneshot, Mutex};
|
||||
use tokio::sync::{Mutex, oneshot};
|
||||
use yaak_models::models::{
|
||||
Cookie, CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseHeader,
|
||||
HttpResponseState, ProxySetting, ProxySettingAuth,
|
||||
};
|
||||
use yaak_models::queries::{
|
||||
get_base_environment, get_http_response, get_or_create_settings, get_workspace,
|
||||
update_response_if_id, upsert_cookie_jar, UpdateSource,
|
||||
};
|
||||
use yaak_models::query_manager::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{
|
||||
CallHttpAuthenticationRequest, HttpHeader, RenderPurpose, WindowContext,
|
||||
CallHttpAuthenticationRequest, HttpHeader, PluginWindowContext, RenderPurpose,
|
||||
};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
@@ -43,27 +43,46 @@ pub async fn send_http_request<R: Runtime>(
|
||||
environment: Option<Environment>,
|
||||
cookie_jar: Option<CookieJar>,
|
||||
cancelled_rx: &mut Receiver<bool>,
|
||||
) -> Result<HttpResponse, String> {
|
||||
let plugin_manager = window.state::<PluginManager>();
|
||||
let workspace = get_workspace(window, &unrendered_request.workspace_id)
|
||||
.await
|
||||
.expect("Failed to get Workspace");
|
||||
let base_environment = get_base_environment(window, &unrendered_request.workspace_id)
|
||||
.await
|
||||
.expect("Failed to get base environment");
|
||||
let settings = get_or_create_settings(window).await;
|
||||
let cb = PluginTemplateCallback::new(
|
||||
window.app_handle(),
|
||||
&WindowContext::from_window(window),
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
) -> Result<HttpResponse> {
|
||||
let app_handle = window.app_handle().clone();
|
||||
let plugin_manager = app_handle.state::<PluginManager>();
|
||||
let (settings, workspace) = {
|
||||
let db = window.db();
|
||||
let settings = db.get_settings();
|
||||
let workspace = db.get_workspace(&unrendered_request.workspace_id)?;
|
||||
(settings, workspace)
|
||||
};
|
||||
let base_environment =
|
||||
app_handle.db().get_base_environment(&unrendered_request.workspace_id)?;
|
||||
|
||||
let response_id = og_response.id.clone();
|
||||
let response = Arc::new(Mutex::new(og_response.clone()));
|
||||
|
||||
let request =
|
||||
render_http_request(&unrendered_request, &base_environment, environment.as_ref(), &cb)
|
||||
.await;
|
||||
let cb = PluginTemplateCallback::new(
|
||||
window.app_handle(),
|
||||
&PluginWindowContext::new(window),
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
let update_source = UpdateSource::from_window(window);
|
||||
|
||||
let request = match render_http_request(
|
||||
&unrendered_request,
|
||||
&base_environment,
|
||||
environment.as_ref(),
|
||||
&cb,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e.to_string(),
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let mut url_string = request.url;
|
||||
|
||||
@@ -104,7 +123,12 @@ pub async fn send_http_request<R: Runtime>(
|
||||
|
||||
match settings.proxy {
|
||||
Some(ProxySetting::Disabled) => client_builder = client_builder.no_proxy(),
|
||||
Some(ProxySetting::Enabled { http, https, auth }) => {
|
||||
Some(ProxySetting::Enabled {
|
||||
http,
|
||||
https,
|
||||
auth,
|
||||
disabled,
|
||||
}) if !disabled => {
|
||||
debug!("Using proxy http={http} https={https}");
|
||||
let mut proxy = Proxy::custom(move |url| {
|
||||
let http = if http.is_empty() { None } else { Some(http.to_owned()) };
|
||||
@@ -124,7 +148,7 @@ pub async fn send_http_request<R: Runtime>(
|
||||
|
||||
client_builder = client_builder.proxy(proxy);
|
||||
}
|
||||
None => {} // Nothing to do for this one, as it is the default
|
||||
_ => {} // Nothing to do for this one, as it is the default
|
||||
}
|
||||
|
||||
// Add cookie store if specified
|
||||
@@ -139,10 +163,9 @@ pub async fn send_http_request<R: Runtime>(
|
||||
serde_json::from_value(json_cookie).expect("Failed to deserialize cookie")
|
||||
})
|
||||
.map(|c| Ok(c))
|
||||
.collect::<Vec<Result<_, ()>>>();
|
||||
.collect::<Vec<Result<_>>>();
|
||||
|
||||
let store = reqwest_cookie_store::CookieStore::from_cookies(cookies, true)
|
||||
.expect("Failed to create cookie store");
|
||||
let store = reqwest_cookie_store::CookieStore::from_cookies(cookies, true)?;
|
||||
let cookie_store = reqwest_cookie_store::CookieStoreMutex::new(store);
|
||||
let cookie_store = Arc::new(cookie_store);
|
||||
client_builder = client_builder.cookie_provider(Arc::clone(&cookie_store));
|
||||
@@ -158,7 +181,7 @@ pub async fn send_http_request<R: Runtime>(
|
||||
));
|
||||
}
|
||||
|
||||
let client = client_builder.build().expect("Failed to build client");
|
||||
let client = client_builder.build()?;
|
||||
|
||||
// Render query parameters
|
||||
let mut query_params = Vec::new();
|
||||
@@ -169,32 +192,20 @@ pub async fn send_http_request<R: Runtime>(
|
||||
query_params.push((p.name, p.value));
|
||||
}
|
||||
|
||||
let uri = match Uri::from_str(url_string.as_str()) {
|
||||
let url = match Url::from_str(&url_string) {
|
||||
Ok(u) => u,
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
format!("Failed to parse URL \"{}\": {}", url_string, e.to_string()),
|
||||
window,
|
||||
)
|
||||
.await);
|
||||
}
|
||||
};
|
||||
// Yes, we're parsing both URI and URL because they could return different errors
|
||||
let url = match Url::from_str(uri.to_string().as_str()) {
|
||||
Ok(u) => u,
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&*response.lock().await,
|
||||
format!("Failed to parse URL \"{}\": {}", url_string, e.to_string()),
|
||||
window,
|
||||
)
|
||||
.await);
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let m = Method::from_bytes(request.method.to_uppercase().as_bytes())
|
||||
.expect("Failed to create method");
|
||||
let m = Method::from_str(&request.method.to_uppercase())
|
||||
.map_err(|e| GenericError(e.to_string()))?;
|
||||
let mut request_builder = client.request(m, url).query(&query_params);
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
@@ -282,7 +293,7 @@ pub async fn send_http_request<R: Runtime>(
|
||||
} else if body_type == "binary" && request_body.contains_key("filePath") {
|
||||
let file_path = request_body
|
||||
.get("filePath")
|
||||
.ok_or("filePath not set")?
|
||||
.ok_or(GenericError("filePath not set".to_string()))?
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
|
||||
@@ -291,7 +302,12 @@ pub async fn send_http_request<R: Runtime>(
|
||||
request_builder = request_builder.body(f);
|
||||
}
|
||||
Err(e) => {
|
||||
return Ok(response_err(&*response.lock().await, e, window).await);
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e,
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
}
|
||||
} else if body_type == "multipart/form-data" && request_body.contains_key("form") {
|
||||
@@ -318,11 +334,11 @@ pub async fn send_http_request<R: Runtime>(
|
||||
Ok(f) => multipart::Part::bytes(f),
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e.to_string(),
|
||||
window,
|
||||
)
|
||||
.await);
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -335,11 +351,11 @@ pub async fn send_http_request<R: Runtime>(
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
format!("Invalid mime for multi-part entry {e:?}"),
|
||||
window,
|
||||
)
|
||||
.await);
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
} else if !file_path.is_empty() {
|
||||
@@ -351,16 +367,16 @@ pub async fn send_http_request<R: Runtime>(
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
format!("Invalid mime for multi-part entry {e:?}"),
|
||||
window,
|
||||
)
|
||||
.await);
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Set file path if not empty
|
||||
// Set file path if it is not empty
|
||||
if !file_path.is_empty() {
|
||||
let filename = PathBuf::from(file_path)
|
||||
.file_name()
|
||||
@@ -383,6 +399,15 @@ pub async fn send_http_request<R: Runtime>(
|
||||
} else {
|
||||
warn!("Unsupported body type: {}", body_type);
|
||||
}
|
||||
} else {
|
||||
// No body set
|
||||
let method = request.method.to_ascii_lowercase();
|
||||
let is_body_method = method == "post" || method == "put" || method == "patch";
|
||||
// Add Content-Length for methods that commonly accept a body because some servers
|
||||
// will error if they don't receive it.
|
||||
if is_body_method && !headers.contains_key("content-length") {
|
||||
headers.insert("Content-Length", HeaderValue::from_static("0"));
|
||||
}
|
||||
}
|
||||
|
||||
// Add headers last, because previous steps may modify them
|
||||
@@ -392,7 +417,12 @@ pub async fn send_http_request<R: Runtime>(
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
warn!("Failed to build request builder {e:?}");
|
||||
return Ok(response_err(&*response.lock().await, e.to_string(), window).await);
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e.to_string(),
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -414,11 +444,16 @@ pub async fn send_http_request<R: Runtime>(
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
let auth_result = plugin_manager.call_http_authentication(window, &auth_name, req).await;
|
||||
let auth_result = plugin_manager.call_http_authentication(&window, &auth_name, req).await;
|
||||
let plugin_result = match auth_result {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
return Ok(response_err(&*response.lock().await, e.to_string(), window).await);
|
||||
return Ok(response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e.to_string(),
|
||||
&update_source,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -431,7 +466,7 @@ pub async fn send_http_request<R: Runtime>(
|
||||
}
|
||||
}
|
||||
|
||||
let (resp_tx, resp_rx) = oneshot::channel::<Result<Response, reqwest::Error>>();
|
||||
let (resp_tx, resp_rx) = oneshot::channel::<std::result::Result<Response, reqwest::Error>>();
|
||||
let (done_tx, done_rx) = oneshot::channel::<HttpResponse>();
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
@@ -443,22 +478,26 @@ pub async fn send_http_request<R: Runtime>(
|
||||
let raw_response = tokio::select! {
|
||||
Ok(r) = resp_rx => r,
|
||||
_ = cancelled_rx.changed() => {
|
||||
debug!("Request cancelled");
|
||||
return Ok(response_err(&*response.lock().await, "Request was cancelled".to_string(), window).await);
|
||||
let mut r = response.lock().await;
|
||||
r.elapsed_headers = start.elapsed().as_millis() as i32;
|
||||
r.elapsed = start.elapsed().as_millis() as i32;
|
||||
return Ok(response_err(&app_handle, &r, "Request was cancelled".to_string(), &update_source));
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
let app_handle = app_handle.clone();
|
||||
let window = window.clone();
|
||||
let cancelled_rx = cancelled_rx.clone();
|
||||
let response_id = response_id.clone();
|
||||
let response = response.clone();
|
||||
let update_source = update_source.clone();
|
||||
tokio::spawn(async move {
|
||||
match raw_response {
|
||||
Ok(mut v) => {
|
||||
let content_length = v.content_length();
|
||||
let response_headers = v.headers().clone();
|
||||
let dir = window.app_handle().path().app_data_dir().unwrap();
|
||||
let dir = app_handle.path().app_data_dir().unwrap();
|
||||
let base_dir = dir.join("responses");
|
||||
create_dir_all(base_dir.clone()).await.expect("Failed to create responses dir");
|
||||
let body_path = if response_id.is_empty() {
|
||||
@@ -471,6 +510,7 @@ pub async fn send_http_request<R: Runtime>(
|
||||
let mut r = response.lock().await;
|
||||
r.body_path = Some(body_path.to_str().unwrap().to_string());
|
||||
r.elapsed_headers = start.elapsed().as_millis() as i32;
|
||||
r.elapsed = start.elapsed().as_millis() as i32;
|
||||
r.status = v.status().as_u16() as i32;
|
||||
r.status_reason = v.status().canonical_reason().map(|s| s.to_string());
|
||||
r.headers = response_headers
|
||||
@@ -492,8 +532,9 @@ pub async fn send_http_request<R: Runtime>(
|
||||
};
|
||||
|
||||
r.state = HttpResponseState::Connected;
|
||||
update_response_if_id(&window, &r, &UpdateSource::Window)
|
||||
.await
|
||||
app_handle
|
||||
.db()
|
||||
.update_http_response_if_id(&r, &update_source)
|
||||
.expect("Failed to update response after connected");
|
||||
}
|
||||
|
||||
@@ -521,21 +562,27 @@ pub async fn send_http_request<R: Runtime>(
|
||||
f.flush().await.expect("Failed to flush file");
|
||||
written_bytes += bytes.len();
|
||||
r.content_length = Some(written_bytes as i32);
|
||||
update_response_if_id(&window, &r, &UpdateSource::Window)
|
||||
.await
|
||||
app_handle
|
||||
.db()
|
||||
.update_http_response_if_id(&r, &update_source)
|
||||
.expect("Failed to update response");
|
||||
}
|
||||
Ok(None) => {
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
response_err(&*response.lock().await, e.to_string(), &window).await;
|
||||
response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
e.to_string(),
|
||||
&update_source,
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set final content length
|
||||
// Set the final content length
|
||||
{
|
||||
let mut r = response.lock().await;
|
||||
r.content_length = match content_length {
|
||||
@@ -543,8 +590,9 @@ pub async fn send_http_request<R: Runtime>(
|
||||
None => Some(written_bytes as i32),
|
||||
};
|
||||
r.state = HttpResponseState::Closed;
|
||||
update_response_if_id(&window, &r, &UpdateSource::Window)
|
||||
.await
|
||||
app_handle
|
||||
.db()
|
||||
.update_http_response_if_id(&r, &UpdateSource::from_window(&window))
|
||||
.expect("Failed to update response");
|
||||
};
|
||||
|
||||
@@ -569,8 +617,9 @@ pub async fn send_http_request<R: Runtime>(
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
cookie_jar.cookies = json_cookies;
|
||||
if let Err(e) =
|
||||
upsert_cookie_jar(&window, &cookie_jar, &UpdateSource::Window).await
|
||||
if let Err(e) = app_handle
|
||||
.db()
|
||||
.upsert_cookie_jar(&cookie_jar, &UpdateSource::from_window(&window))
|
||||
{
|
||||
error!("Failed to update cookie jar: {}", e);
|
||||
};
|
||||
@@ -578,7 +627,12 @@ pub async fn send_http_request<R: Runtime>(
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to execute request {e}");
|
||||
response_err(&*response.lock().await, format!("{e} → {e:?}"), &window).await;
|
||||
response_err(
|
||||
&app_handle,
|
||||
&*response.lock().await,
|
||||
format!("{e} → {e:?}"),
|
||||
&update_source,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -587,16 +641,20 @@ pub async fn send_http_request<R: Runtime>(
|
||||
});
|
||||
};
|
||||
|
||||
let app_handle = app_handle.clone();
|
||||
Ok(tokio::select! {
|
||||
Ok(r) = done_rx => r,
|
||||
_ = cancelled_rx.changed() => {
|
||||
match get_http_response(window, response_id.as_str()).await {
|
||||
match app_handle.with_db(|c| c.get_http_response(&response_id)) {
|
||||
Ok(mut r) => {
|
||||
r.state = HttpResponseState::Closed;
|
||||
update_response_if_id(&window, &r, &UpdateSource::Window).await.expect("Failed to update response")
|
||||
r.elapsed = start.elapsed().as_millis() as i32;
|
||||
r.elapsed_headers = start.elapsed().as_millis() as i32;
|
||||
app_handle.db().update_http_response_if_id(&r, &UpdateSource::from_window(window))
|
||||
.expect("Failed to update response")
|
||||
},
|
||||
_ => {
|
||||
response_err(&*response.lock().await, "Ephemeral request was cancelled".to_string(), &window).await
|
||||
response_err(&app_handle, &*response.lock().await, "Ephemeral request was cancelled".to_string(), &update_source)
|
||||
}.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
1759
src-tauri/src/lib.rs
1759
src-tauri/src/lib.rs
File diff suppressed because it is too large
Load Diff
@@ -1,13 +1,16 @@
|
||||
use std::time::SystemTime;
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::history::{get_num_launches, get_os};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use log::debug;
|
||||
use reqwest::Method;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use tauri::{Emitter, Manager, Runtime, WebviewWindow};
|
||||
use yaak_models::queries::{get_key_value_raw, set_key_value_raw, UpdateSource};
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
|
||||
use yaak_license::{LicenseCheckStatus, check_license};
|
||||
use yaak_models::query_manager::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
// Check for updates every hour
|
||||
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
||||
@@ -43,16 +46,23 @@ impl YaakNotifier {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn seen<R: Runtime>(&mut self, w: &WebviewWindow<R>, id: &str) -> Result<(), String> {
|
||||
let mut seen = get_kv(w).await?;
|
||||
pub async fn seen<R: Runtime>(&mut self, window: &WebviewWindow<R>, id: &str) -> Result<()> {
|
||||
let app_handle = window.app_handle();
|
||||
let mut seen = get_kv(app_handle).await?;
|
||||
seen.push(id.to_string());
|
||||
debug!("Marked notification as seen {}", id);
|
||||
let seen_json = serde_json::to_string(&seen).map_err(|e| e.to_string())?;
|
||||
set_key_value_raw(w, KV_NAMESPACE, KV_KEY, seen_json.as_str(), &UpdateSource::Window).await;
|
||||
let seen_json = serde_json::to_string(&seen)?;
|
||||
window.db().set_key_value_raw(
|
||||
KV_NAMESPACE,
|
||||
KV_KEY,
|
||||
seen_json.as_str(),
|
||||
&UpdateSource::from_window(window),
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn check<R: Runtime>(&mut self, window: &WebviewWindow<R>) -> Result<(), String> {
|
||||
pub async fn check<R: Runtime>(&mut self, window: &WebviewWindow<R>) -> Result<()> {
|
||||
let app_handle = window.app_handle();
|
||||
let ignore_check = self.last_check.elapsed().unwrap().as_secs() < MAX_UPDATE_CHECK_SECONDS;
|
||||
|
||||
if ignore_check {
|
||||
@@ -61,22 +71,31 @@ impl YaakNotifier {
|
||||
|
||||
self.last_check = SystemTime::now();
|
||||
|
||||
let num_launches = get_num_launches(window).await;
|
||||
let info = window.app_handle().package_info().clone();
|
||||
let license_check = match check_license(window).await? {
|
||||
LicenseCheckStatus::PersonalUse { .. } => "personal".to_string(),
|
||||
LicenseCheckStatus::CommercialUse => "commercial".to_string(),
|
||||
LicenseCheckStatus::InvalidLicense => "invalid_license".to_string(),
|
||||
LicenseCheckStatus::Trialing { .. } => "trialing".to_string(),
|
||||
};
|
||||
let settings = window.db().get_settings();
|
||||
let num_launches = get_num_launches(app_handle).await;
|
||||
let info = app_handle.package_info().clone();
|
||||
let req = reqwest::Client::default()
|
||||
.request(Method::GET, "https://notify.yaak.app/notifications")
|
||||
.query(&[
|
||||
("version", info.version.to_string().as_str()),
|
||||
("launches", num_launches.to_string().as_str()),
|
||||
("platform", get_os())
|
||||
("installed", settings.created_at.format("%Y-%m-%d").to_string().as_str()),
|
||||
("license", &license_check),
|
||||
("platform", get_os()),
|
||||
]);
|
||||
let resp = req.send().await.map_err(|e| e.to_string())?;
|
||||
let resp = req.send().await?;
|
||||
if resp.status() != 200 {
|
||||
debug!("Skipping notification status code {}", resp.status());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let result = resp.json::<Value>().await.map_err(|e| e.to_string())?;
|
||||
let result = resp.json::<Value>().await?;
|
||||
|
||||
// Support both single and multiple notifications.
|
||||
// TODO: Remove support for single after April 2025
|
||||
@@ -90,23 +109,24 @@ impl YaakNotifier {
|
||||
|
||||
for notification in notifications {
|
||||
let age = notification.timestamp.signed_duration_since(Utc::now());
|
||||
let seen = get_kv(window).await?;
|
||||
let seen = get_kv(app_handle).await?;
|
||||
if seen.contains(¬ification.id) || (age > Duration::days(2)) {
|
||||
debug!("Already seen notification {}", notification.id);
|
||||
return Ok(());
|
||||
continue;
|
||||
}
|
||||
debug!("Got notification {:?}", notification);
|
||||
|
||||
let _ = window.emit_to(window.label(), "notification", notification.clone());
|
||||
let _ = app_handle.emit_to(window.label(), "notification", notification.clone());
|
||||
break; // Only show one notification
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_kv<R: Runtime>(w: &WebviewWindow<R>) -> Result<Vec<String>, String> {
|
||||
match get_key_value_raw(w, "notifications", "seen").await {
|
||||
async fn get_kv<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Vec<String>> {
|
||||
match app_handle.db().get_key_value_raw("notifications", "seen") {
|
||||
None => Ok(Vec::new()),
|
||||
Some(v) => serde_json::from_str(&v.value).map_err(|e| e.to_string()),
|
||||
Some(v) => Ok(serde_json::from_str(&v.value)?),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::http_request::send_http_request;
|
||||
use crate::render::{render_http_request, render_json_value};
|
||||
use crate::window::{create_window, CreateWindowConfig};
|
||||
use crate::window::{CreateWindowConfig, create_window};
|
||||
use crate::{
|
||||
call_frontend, cookie_jar_from_window, environment_from_window, get_window_from_window_context,
|
||||
workspace_from_window,
|
||||
@@ -10,16 +10,13 @@ use log::warn;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, State};
|
||||
use tauri_plugin_clipboard_manager::ClipboardExt;
|
||||
use yaak_models::models::{HttpResponse, Plugin};
|
||||
use yaak_models::queries::{
|
||||
create_default_http_response, delete_plugin_key_value, get_base_environment, get_http_request,
|
||||
get_plugin_key_value, list_http_responses_for_request, list_plugins, set_plugin_key_value,
|
||||
upsert_plugin, UpdateSource,
|
||||
};
|
||||
use yaak_models::query_manager::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{
|
||||
Color, DeleteKeyValueResponse, EmptyPayload, FindHttpResponsesResponse,
|
||||
GetHttpRequestByIdResponse, GetKeyValueResponse, Icon, InternalEvent, InternalEventPayload,
|
||||
RenderHttpRequestResponse, SendHttpRequestResponse, SetKeyValueResponse, ShowToastRequest,
|
||||
TemplateRenderResponse, WindowContext, WindowNavigateEvent,
|
||||
PluginWindowContext, RenderHttpRequestResponse, SendHttpRequestResponse, SetKeyValueResponse,
|
||||
ShowToastRequest, TemplateRenderResponse, WindowNavigateEvent,
|
||||
};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::plugin_handle::PluginHandle;
|
||||
@@ -42,7 +39,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
}
|
||||
InternalEventPayload::ShowToastRequest(req) => {
|
||||
match window_context {
|
||||
WindowContext::Label { label } => app_handle
|
||||
PluginWindowContext::Label { label, .. } => app_handle
|
||||
.emit_to(label, "show_toast", req)
|
||||
.expect("Failed to emit show_toast to window"),
|
||||
_ => app_handle.emit("show_toast", req).expect("Failed to emit show_toast"),
|
||||
@@ -55,19 +52,16 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
call_frontend(window, event).await
|
||||
}
|
||||
InternalEventPayload::FindHttpResponsesRequest(req) => {
|
||||
let http_responses = list_http_responses_for_request(
|
||||
app_handle,
|
||||
req.request_id.as_str(),
|
||||
req.limit.map(|l| l as i64),
|
||||
)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
let http_responses = app_handle
|
||||
.db()
|
||||
.list_http_responses_for_request(&req.request_id, req.limit.map(|l| l as u64))
|
||||
.unwrap_or_default();
|
||||
Some(InternalEventPayload::FindHttpResponsesResponse(FindHttpResponsesResponse {
|
||||
http_responses,
|
||||
}))
|
||||
}
|
||||
InternalEventPayload::GetHttpRequestByIdRequest(req) => {
|
||||
let http_request = get_http_request(app_handle, req.id.as_str()).await.unwrap();
|
||||
let http_request = app_handle.db().get_http_request(&req.id).ok();
|
||||
Some(InternalEventPayload::GetHttpRequestByIdResponse(GetHttpRequestByIdResponse {
|
||||
http_request,
|
||||
}))
|
||||
@@ -76,12 +70,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let window = get_window_from_window_context(app_handle, &window_context)
|
||||
.expect("Failed to find window for render http request");
|
||||
|
||||
let workspace = workspace_from_window(&window)
|
||||
.await
|
||||
.expect("Failed to get workspace_id from window URL");
|
||||
let environment = environment_from_window(&window).await;
|
||||
let base_environment = get_base_environment(&window, workspace.id.as_str())
|
||||
.await
|
||||
let workspace =
|
||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||
let environment = environment_from_window(&window);
|
||||
let base_environment = app_handle
|
||||
.db()
|
||||
.get_base_environment(&workspace.id)
|
||||
.expect("Failed to get base environment");
|
||||
let cb = PluginTemplateCallback::new(app_handle, &window_context, req.purpose);
|
||||
let http_request = render_http_request(
|
||||
@@ -90,7 +84,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
environment.as_ref(),
|
||||
&cb,
|
||||
)
|
||||
.await;
|
||||
.await
|
||||
.expect("Failed to render http request");
|
||||
Some(InternalEventPayload::RenderHttpRequestResponse(RenderHttpRequestResponse {
|
||||
http_request,
|
||||
}))
|
||||
@@ -99,23 +94,22 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let window = get_window_from_window_context(app_handle, &window_context)
|
||||
.expect("Failed to find window for render");
|
||||
|
||||
let workspace = workspace_from_window(&window)
|
||||
.await
|
||||
.expect("Failed to get workspace_id from window URL");
|
||||
let environment = environment_from_window(&window).await;
|
||||
let base_environment = get_base_environment(&window, workspace.id.as_str())
|
||||
.await
|
||||
let workspace =
|
||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||
let environment = environment_from_window(&window);
|
||||
let base_environment = app_handle
|
||||
.db()
|
||||
.get_base_environment(&workspace.id)
|
||||
.expect("Failed to get base environment");
|
||||
let cb = PluginTemplateCallback::new(app_handle, &window_context, req.purpose);
|
||||
let data =
|
||||
render_json_value(req.data, &base_environment, environment.as_ref(), &cb).await;
|
||||
let data = render_json_value(req.data, &base_environment, environment.as_ref(), &cb)
|
||||
.await
|
||||
.expect("Failed to render template");
|
||||
Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data }))
|
||||
}
|
||||
InternalEventPayload::ErrorResponse(resp) => {
|
||||
let window = get_window_from_window_context(app_handle, &window_context)
|
||||
.expect("Failed to find window for plugin reload");
|
||||
let toast_event = plugin_handle.build_event_to_send(
|
||||
&WindowContext::from_window(&window),
|
||||
&window_context,
|
||||
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
||||
message: format!(
|
||||
"Plugin error from {}: {}",
|
||||
@@ -131,9 +125,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
None
|
||||
}
|
||||
InternalEventPayload::ReloadResponse(_) => {
|
||||
let window = get_window_from_window_context(app_handle, &window_context)
|
||||
.expect("Failed to find window for plugin reload");
|
||||
let plugins = list_plugins(app_handle).await.unwrap();
|
||||
let plugins = app_handle.db().list_plugins().unwrap();
|
||||
for plugin in plugins {
|
||||
if plugin.directory != plugin_handle.dir {
|
||||
continue;
|
||||
@@ -143,10 +135,10 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
updated_at: Utc::now().naive_utc(), // TODO: Add reloaded_at field to use instead
|
||||
..plugin
|
||||
};
|
||||
upsert_plugin(&window, new_plugin, &UpdateSource::Plugin).await.unwrap();
|
||||
app_handle.db().upsert_plugin(&new_plugin, &UpdateSource::Plugin).unwrap();
|
||||
}
|
||||
let toast_event = plugin_handle.build_event_to_send(
|
||||
&WindowContext::from_window(&window),
|
||||
&window_context,
|
||||
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
||||
message: format!("Reloaded plugin {}", plugin_handle.dir),
|
||||
icon: Some(Icon::Info),
|
||||
@@ -161,32 +153,35 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let window = get_window_from_window_context(app_handle, &window_context)
|
||||
.expect("Failed to find window for sending HTTP request");
|
||||
let mut http_request = req.http_request;
|
||||
let workspace = workspace_from_window(&window)
|
||||
.await
|
||||
.expect("Failed to get workspace_id from window URL");
|
||||
let cookie_jar = cookie_jar_from_window(&window).await;
|
||||
let environment = environment_from_window(&window).await;
|
||||
let workspace =
|
||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||
let cookie_jar = cookie_jar_from_window(&window);
|
||||
let environment = environment_from_window(&window);
|
||||
|
||||
if http_request.workspace_id.is_empty() {
|
||||
http_request.workspace_id = workspace.id;
|
||||
}
|
||||
|
||||
let resp = if http_request.id.is_empty() {
|
||||
HttpResponse::new()
|
||||
let http_response = if http_request.id.is_empty() {
|
||||
HttpResponse::default()
|
||||
} else {
|
||||
create_default_http_response(
|
||||
&window,
|
||||
http_request.id.as_str(),
|
||||
&UpdateSource::Plugin,
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
window
|
||||
.db()
|
||||
.upsert_http_response(
|
||||
&HttpResponse {
|
||||
request_id: http_request.id.clone(),
|
||||
workspace_id: http_request.workspace_id.clone(),
|
||||
..Default::default()
|
||||
},
|
||||
&UpdateSource::Plugin,
|
||||
)
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
let result = send_http_request(
|
||||
&window,
|
||||
&http_request,
|
||||
&resp,
|
||||
&http_response,
|
||||
environment,
|
||||
cookie_jar,
|
||||
&mut tokio::sync::watch::channel(false).1, // No-op cancel channel
|
||||
@@ -221,13 +216,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
{
|
||||
let event_id = event.id.clone();
|
||||
let plugin_handle = plugin_handle.clone();
|
||||
let label = label.clone();
|
||||
let window_context = window_context.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
while let Some(url) = navigation_rx.recv().await {
|
||||
let url = url.to_string();
|
||||
let label = label.clone();
|
||||
let event_to_send = plugin_handle.build_event_to_send(
|
||||
&WindowContext::Label { label },
|
||||
&window_context, // NOTE: Sending existing context on purpose here
|
||||
&InternalEventPayload::WindowNavigateEvent(WindowNavigateEvent { url }),
|
||||
Some(event_id.clone()),
|
||||
);
|
||||
@@ -239,12 +233,11 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
{
|
||||
let event_id = event.id.clone();
|
||||
let plugin_handle = plugin_handle.clone();
|
||||
let label = label.clone();
|
||||
let window_context = window_context.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
while let Some(_) = close_rx.recv().await {
|
||||
let label = label.clone();
|
||||
let event_to_send = plugin_handle.build_event_to_send(
|
||||
&WindowContext::Label { label },
|
||||
&window_context,
|
||||
&InternalEventPayload::WindowCloseEvent,
|
||||
Some(event_id.clone()),
|
||||
);
|
||||
@@ -263,17 +256,17 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
}
|
||||
InternalEventPayload::SetKeyValueRequest(req) => {
|
||||
let name = plugin_handle.name().await;
|
||||
set_plugin_key_value(app_handle, &name, &req.key, &req.value).await;
|
||||
app_handle.db().set_plugin_key_value(&name, &req.key, &req.value);
|
||||
Some(InternalEventPayload::SetKeyValueResponse(SetKeyValueResponse {}))
|
||||
}
|
||||
InternalEventPayload::GetKeyValueRequest(req) => {
|
||||
let name = plugin_handle.name().await;
|
||||
let value = get_plugin_key_value(app_handle, &name, &req.key).await.map(|v| v.value);
|
||||
let value = app_handle.db().get_plugin_key_value(&name, &req.key).map(|v| v.value);
|
||||
Some(InternalEventPayload::GetKeyValueResponse(GetKeyValueResponse { value }))
|
||||
}
|
||||
InternalEventPayload::DeleteKeyValueRequest(req) => {
|
||||
let name = plugin_handle.name().await;
|
||||
let deleted = delete_plugin_key_value(app_handle, &name, &req.key).await;
|
||||
let deleted = app_handle.db().delete_plugin_key_value(&name, &req.key).unwrap();
|
||||
Some(InternalEventPayload::DeleteKeyValueResponse(DeleteKeyValueResponse { deleted }))
|
||||
}
|
||||
_ => None,
|
||||
|
||||
@@ -12,7 +12,7 @@ pub async fn render_template<T: TemplateCallback>(
|
||||
base_environment: &Environment,
|
||||
environment: Option<&Environment>,
|
||||
cb: &T,
|
||||
) -> String {
|
||||
) -> yaak_templates::error::Result<String> {
|
||||
let vars = &make_vars_hashmap(base_environment, environment);
|
||||
render(template, vars, cb).await
|
||||
}
|
||||
@@ -22,7 +22,7 @@ pub async fn render_json_value<T: TemplateCallback>(
|
||||
base_environment: &Environment,
|
||||
environment: Option<&Environment>,
|
||||
cb: &T,
|
||||
) -> Value {
|
||||
) -> yaak_templates::error::Result<Value> {
|
||||
let vars = &make_vars_hashmap(base_environment, environment);
|
||||
render_json_value_raw(value, vars, cb).await
|
||||
}
|
||||
@@ -32,32 +32,32 @@ pub async fn render_grpc_request<T: TemplateCallback>(
|
||||
base_environment: &Environment,
|
||||
environment: Option<&Environment>,
|
||||
cb: &T,
|
||||
) -> GrpcRequest {
|
||||
) -> yaak_templates::error::Result<GrpcRequest> {
|
||||
let vars = &make_vars_hashmap(base_environment, environment);
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
for p in r.metadata.clone() {
|
||||
metadata.push(GrpcMetadataEntry {
|
||||
enabled: p.enabled,
|
||||
name: render(p.name.as_str(), vars, cb).await,
|
||||
value: render(p.value.as_str(), vars, cb).await,
|
||||
name: render(p.name.as_str(), vars, cb).await?,
|
||||
value: render(p.value.as_str(), vars, cb).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut authentication = BTreeMap::new();
|
||||
for (k, v) in r.authentication.clone() {
|
||||
authentication.insert(k, render_json_value_raw(v, vars, cb).await);
|
||||
authentication.insert(k, render_json_value_raw(v, vars, cb).await?);
|
||||
}
|
||||
|
||||
let url = render(r.url.as_str(), vars, cb).await;
|
||||
let url = render(r.url.as_str(), vars, cb).await?;
|
||||
|
||||
GrpcRequest {
|
||||
Ok(GrpcRequest {
|
||||
url,
|
||||
metadata,
|
||||
authentication,
|
||||
..r.to_owned()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn render_http_request<T: TemplateCallback>(
|
||||
@@ -65,15 +65,15 @@ pub async fn render_http_request<T: TemplateCallback>(
|
||||
base_environment: &Environment,
|
||||
environment: Option<&Environment>,
|
||||
cb: &T,
|
||||
) -> HttpRequest {
|
||||
) -> yaak_templates::error::Result<HttpRequest> {
|
||||
let vars = &make_vars_hashmap(base_environment, environment);
|
||||
|
||||
let mut url_parameters = Vec::new();
|
||||
for p in r.url_parameters.clone() {
|
||||
url_parameters.push(HttpUrlParameter {
|
||||
enabled: p.enabled,
|
||||
name: render(p.name.as_str(), vars, cb).await,
|
||||
value: render(p.value.as_str(), vars, cb).await,
|
||||
name: render(p.name.as_str(), vars, cb).await?,
|
||||
value: render(p.value.as_str(), vars, cb).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
@@ -82,41 +82,41 @@ pub async fn render_http_request<T: TemplateCallback>(
|
||||
for p in r.headers.clone() {
|
||||
headers.push(HttpRequestHeader {
|
||||
enabled: p.enabled,
|
||||
name: render(p.name.as_str(), vars, cb).await,
|
||||
value: render(p.value.as_str(), vars, cb).await,
|
||||
name: render(p.name.as_str(), vars, cb).await?,
|
||||
value: render(p.value.as_str(), vars, cb).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut body = BTreeMap::new();
|
||||
for (k, v) in r.body.clone() {
|
||||
body.insert(k, render_json_value_raw(v, vars, cb).await);
|
||||
body.insert(k, render_json_value_raw(v, vars, cb).await?);
|
||||
}
|
||||
|
||||
let mut authentication = BTreeMap::new();
|
||||
for (k, v) in r.authentication.clone() {
|
||||
authentication.insert(k, render_json_value_raw(v, vars, cb).await);
|
||||
authentication.insert(k, render_json_value_raw(v, vars, cb).await?);
|
||||
}
|
||||
|
||||
let url = render(r.url.clone().as_str(), vars, cb).await;
|
||||
let url = render(r.url.clone().as_str(), vars, cb).await?;
|
||||
|
||||
// This doesn't fit perfectly with the concept of "rendering" but it kind of does
|
||||
let (url, url_parameters) = apply_path_placeholders(&url, url_parameters);
|
||||
|
||||
HttpRequest {
|
||||
Ok(HttpRequest {
|
||||
url,
|
||||
url_parameters,
|
||||
headers,
|
||||
body,
|
||||
authentication,
|
||||
..r.to_owned()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn render<T: TemplateCallback>(
|
||||
template: &str,
|
||||
vars: &HashMap<String, String>,
|
||||
cb: &T,
|
||||
) -> String {
|
||||
) -> yaak_templates::error::Result<String> {
|
||||
parse_and_render(template, vars, cb).await
|
||||
}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::time::SystemTime;
|
||||
|
||||
use crate::error::Result;
|
||||
use log::info;
|
||||
use tauri::{AppHandle, Manager};
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons};
|
||||
use tauri_plugin_updater::UpdaterExt;
|
||||
use tokio::task::block_in_place;
|
||||
use yaak_models::queries::get_or_create_settings;
|
||||
use yaak_models::query_manager::QueryManagerExt;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
use crate::is_dev;
|
||||
@@ -59,30 +60,30 @@ impl YaakUpdater {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn check_now(
|
||||
pub async fn check_now<R: Runtime>(
|
||||
&mut self,
|
||||
app_handle: &AppHandle,
|
||||
window: &WebviewWindow<R>,
|
||||
mode: UpdateMode,
|
||||
update_trigger: UpdateTrigger,
|
||||
) -> Result<bool, tauri_plugin_updater::Error> {
|
||||
let settings = get_or_create_settings(app_handle).await;
|
||||
) -> Result<bool> {
|
||||
let settings = window.db().get_settings();
|
||||
let update_key = format!("{:x}", md5::compute(settings.id));
|
||||
self.last_update_check = SystemTime::now();
|
||||
|
||||
info!("Checking for updates mode={}", mode);
|
||||
|
||||
let h = app_handle.clone();
|
||||
let update_check_result = app_handle
|
||||
let w = window.clone();
|
||||
let update_check_result = w
|
||||
.updater_builder()
|
||||
.on_before_exit(move || {
|
||||
// Kill plugin manager before exit or NSIS installer will fail to replace sidecar
|
||||
// while it's running.
|
||||
// NOTE: This is only called on Windows
|
||||
let h = h.clone();
|
||||
let w = w.clone();
|
||||
block_in_place(|| {
|
||||
tauri::async_runtime::block_on(async move {
|
||||
info!("Shutting down plugin manager before update");
|
||||
let plugin_manager = h.state::<PluginManager>();
|
||||
let plugin_manager = w.state::<PluginManager>();
|
||||
plugin_manager.terminate().await;
|
||||
});
|
||||
});
|
||||
@@ -100,11 +101,11 @@ impl YaakUpdater {
|
||||
.check()
|
||||
.await;
|
||||
|
||||
match update_check_result {
|
||||
Ok(Some(update)) => {
|
||||
let h = app_handle.clone();
|
||||
app_handle
|
||||
.dialog()
|
||||
let result = match update_check_result? {
|
||||
None => false,
|
||||
Some(update) => {
|
||||
let w = window.clone();
|
||||
w.dialog()
|
||||
.message(format!(
|
||||
"{} is available. Would you like to download and install it now?",
|
||||
update.version
|
||||
@@ -121,7 +122,7 @@ impl YaakUpdater {
|
||||
tauri::async_runtime::spawn(async move {
|
||||
match update.download_and_install(|_, _| {}, || {}).await {
|
||||
Ok(_) => {
|
||||
if h.dialog()
|
||||
if w.dialog()
|
||||
.message("Would you like to restart the app?")
|
||||
.title("Update Installed")
|
||||
.buttons(MessageDialogButtons::OkCancelCustom(
|
||||
@@ -130,27 +131,27 @@ impl YaakUpdater {
|
||||
))
|
||||
.blocking_show()
|
||||
{
|
||||
h.restart();
|
||||
w.app_handle().restart();
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
h.dialog()
|
||||
w.dialog()
|
||||
.message(format!("The update failed to install: {}", e));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
Ok(true)
|
||||
true
|
||||
}
|
||||
Ok(None) => Ok(false),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
};
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
pub async fn maybe_check(
|
||||
pub async fn maybe_check<R: Runtime>(
|
||||
&mut self,
|
||||
app_handle: &AppHandle,
|
||||
window: &WebviewWindow<R>,
|
||||
mode: UpdateMode,
|
||||
) -> Result<bool, tauri_plugin_updater::Error> {
|
||||
) -> Result<bool> {
|
||||
let update_period_seconds = match mode {
|
||||
UpdateMode::Stable => MAX_UPDATE_CHECK_HOURS_STABLE,
|
||||
UpdateMode::Beta => MAX_UPDATE_CHECK_HOURS_BETA,
|
||||
@@ -167,6 +168,6 @@ impl YaakUpdater {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
self.check_now(app_handle, mode, UpdateTrigger::Background).await
|
||||
self.check_now(window, mode, UpdateTrigger::Background).await
|
||||
}
|
||||
}
|
||||
|
||||
25
src-tauri/src/uri_scheme.rs
Normal file
25
src-tauri/src/uri_scheme.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use log::{info, warn};
|
||||
use tauri::{Manager, Runtime, UriSchemeContext};
|
||||
|
||||
pub(crate) fn handle_uri_scheme<R: Runtime>(
|
||||
a: UriSchemeContext<R>,
|
||||
req: http::Request<Vec<u8>>,
|
||||
) -> http::Response<Vec<u8>> {
|
||||
println!("------------- Yaak URI scheme invoked!");
|
||||
let uri = req.uri();
|
||||
let window = a
|
||||
.app_handle()
|
||||
.get_webview_window(a.webview_label())
|
||||
.expect("Failed to get webview window for URI scheme event");
|
||||
info!("Yaak URI scheme invoked with {uri:?} {window:?}");
|
||||
|
||||
let path = uri.path();
|
||||
if path == "/data/import" {
|
||||
warn!("TODO: import data")
|
||||
} else if path == "/plugins/install" {
|
||||
warn!("TODO: install plugin")
|
||||
}
|
||||
|
||||
let msg = format!("No handler found for {path}");
|
||||
tauri::http::Response::builder().status(404).body(msg.as_bytes().to_vec()).unwrap()
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::window_menu::app_menu;
|
||||
use crate::{DEFAULT_WINDOW_HEIGHT, DEFAULT_WINDOW_WIDTH, MIN_WINDOW_HEIGHT, MIN_WINDOW_WIDTH};
|
||||
use log::{info, warn};
|
||||
use rand::random;
|
||||
use std::process::exit;
|
||||
use tauri::{
|
||||
AppHandle, Emitter, LogicalSize, Manager, Runtime, WebviewUrl, WebviewWindow, WindowEvent,
|
||||
@@ -8,6 +8,15 @@ use tauri::{
|
||||
use tauri_plugin_opener::OpenerExt;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
const DEFAULT_WINDOW_WIDTH: f64 = 1100.0;
|
||||
const DEFAULT_WINDOW_HEIGHT: f64 = 600.0;
|
||||
|
||||
const MIN_WINDOW_WIDTH: f64 = 300.0;
|
||||
const MIN_WINDOW_HEIGHT: f64 = 300.0;
|
||||
|
||||
pub(crate) const MAIN_WINDOW_PREFIX: &str = "main_";
|
||||
const OTHER_WINDOW_PREFIX: &str = "other_";
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub(crate) struct CreateWindowConfig<'s> {
|
||||
pub url: &'s str,
|
||||
@@ -55,7 +64,10 @@ pub(crate) fn create_window<R: Runtime>(
|
||||
// macOS doesn't support data dir so must use this fn instead
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
win_builder = win_builder.data_store_identifier(to_fixed_hash(&key));
|
||||
let hash = md5::compute(key.as_bytes());
|
||||
let mut id = [0u8; 16];
|
||||
id.copy_from_slice(&hash[..16]); // Take the first 16 bytes of the hash
|
||||
win_builder = win_builder.data_store_identifier(id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -159,9 +171,94 @@ pub(crate) fn create_window<R: Runtime>(
|
||||
win
|
||||
}
|
||||
|
||||
fn to_fixed_hash(s: &str) -> [u8; 16] {
|
||||
let hash = md5::compute(s.as_bytes());
|
||||
let mut fixed = [0u8; 16];
|
||||
fixed.copy_from_slice(&hash[..16]); // Take the first 16 bytes of the hash
|
||||
fixed
|
||||
pub(crate) fn create_main_window(handle: &AppHandle, url: &str) -> WebviewWindow {
|
||||
let mut counter = 0;
|
||||
let label = loop {
|
||||
let label = format!("{MAIN_WINDOW_PREFIX}{counter}");
|
||||
match handle.webview_windows().get(label.as_str()) {
|
||||
None => break Some(label),
|
||||
Some(_) => counter += 1,
|
||||
}
|
||||
}
|
||||
.expect("Failed to generate label for new window");
|
||||
|
||||
let config = CreateWindowConfig {
|
||||
url,
|
||||
label: label.as_str(),
|
||||
title: "Yaak",
|
||||
inner_size: Some((DEFAULT_WINDOW_WIDTH, DEFAULT_WINDOW_HEIGHT)),
|
||||
position: Some((
|
||||
// Offset by random amount so it's easier to differentiate
|
||||
100.0 + random::<f64>() * 20.0,
|
||||
100.0 + random::<f64>() * 20.0,
|
||||
)),
|
||||
hide_titlebar: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
create_window(handle, config)
|
||||
}
|
||||
|
||||
pub(crate) fn create_child_window(parent_window: &WebviewWindow, url: &str, label: &str, title: &str, inner_size: (f64, f64)) -> WebviewWindow {
|
||||
let app_handle = parent_window.app_handle();
|
||||
let label = format!("{OTHER_WINDOW_PREFIX}_{label}");
|
||||
let scale_factor = parent_window.scale_factor().unwrap();
|
||||
|
||||
let current_pos = parent_window.inner_position().unwrap().to_logical::<f64>(scale_factor);
|
||||
let current_size = parent_window.inner_size().unwrap().to_logical::<f64>(scale_factor);
|
||||
|
||||
// Position the new window in the middle of the parent
|
||||
let position = (
|
||||
current_pos.x + current_size.width / 2.0 - inner_size.0 / 2.0,
|
||||
current_pos.y + current_size.height / 2.0 - inner_size.1 / 2.0,
|
||||
);
|
||||
|
||||
let config = CreateWindowConfig {
|
||||
label: label.as_str(),
|
||||
title,
|
||||
url,
|
||||
inner_size: Some(inner_size),
|
||||
position: Some(position),
|
||||
hide_titlebar: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let child_window = create_window(&app_handle, config);
|
||||
|
||||
// NOTE: These listeners will remain active even when the windows close. Unfortunately,
|
||||
// there's no way to unlisten to events for now, so we just have to be defensive.
|
||||
|
||||
{
|
||||
let parent_window = parent_window.clone();
|
||||
let child_window = child_window.clone();
|
||||
child_window.clone().on_window_event(move |e| match e {
|
||||
// When the new window is destroyed, bring the other up behind it
|
||||
WindowEvent::Destroyed => {
|
||||
if let Some(w) = parent_window.get_webview_window(child_window.label()) {
|
||||
w.set_focus().unwrap();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let parent_window = parent_window.clone();
|
||||
let child_window = child_window.clone();
|
||||
parent_window.clone().on_window_event(move |e| match e {
|
||||
// When the parent window is closed, close the child
|
||||
WindowEvent::CloseRequested { .. } => child_window.destroy().unwrap(),
|
||||
// When the parent window is focused, bring the child above
|
||||
WindowEvent::Focused(focus) => {
|
||||
if *focus {
|
||||
if let Some(w) = parent_window.get_webview_window(child_window.label()) {
|
||||
w.set_focus().unwrap();
|
||||
};
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
}
|
||||
|
||||
child_window
|
||||
}
|
||||
|
||||
34
src-tauri/vendored/plugins/auth-jwt/build/index.js
generated
34
src-tauri/vendored/plugins/auth-jwt/build/index.js
generated
@@ -1044,6 +1044,7 @@ var require_re = __commonJS({
|
||||
var re = exports2.re = [];
|
||||
var safeRe = exports2.safeRe = [];
|
||||
var src = exports2.src = [];
|
||||
var safeSrc = exports2.safeSrc = [];
|
||||
var t = exports2.t = {};
|
||||
var R = 0;
|
||||
var LETTERDASHNUMBER = "[a-zA-Z0-9-]";
|
||||
@@ -1064,6 +1065,7 @@ var require_re = __commonJS({
|
||||
debug(name, index, value);
|
||||
t[name] = index;
|
||||
src[index] = value;
|
||||
safeSrc[index] = safe;
|
||||
re[index] = new RegExp(value, isGlobal ? "g" : void 0);
|
||||
safeRe[index] = new RegExp(safe, isGlobal ? "g" : void 0);
|
||||
};
|
||||
@@ -1160,7 +1162,7 @@ var require_semver = __commonJS({
|
||||
"../../node_modules/semver/classes/semver.js"(exports2, module2) {
|
||||
var debug = require_debug();
|
||||
var { MAX_LENGTH, MAX_SAFE_INTEGER } = require_constants();
|
||||
var { safeRe: re, t } = require_re();
|
||||
var { safeRe: re, safeSrc: src, t } = require_re();
|
||||
var parseOptions = require_parse_options();
|
||||
var { compareIdentifiers } = require_identifiers();
|
||||
var SemVer = class _SemVer {
|
||||
@@ -1300,6 +1302,18 @@ var require_semver = __commonJS({
|
||||
// preminor will bump the version up to the next minor release, and immediately
|
||||
// down to pre-release. premajor and prepatch work the same way.
|
||||
inc(release, identifier, identifierBase) {
|
||||
if (release.startsWith("pre")) {
|
||||
if (!identifier && identifierBase === false) {
|
||||
throw new Error("invalid increment argument: identifier is empty");
|
||||
}
|
||||
if (identifier) {
|
||||
const r = new RegExp(`^${this.options.loose ? src[t.PRERELEASELOOSE] : src[t.PRERELEASE]}$`);
|
||||
const match = `-${identifier}`.match(r);
|
||||
if (!match || match[1] !== identifier) {
|
||||
throw new Error(`invalid identifier: ${identifier}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
switch (release) {
|
||||
case "premajor":
|
||||
this.prerelease.length = 0;
|
||||
@@ -1325,6 +1339,12 @@ var require_semver = __commonJS({
|
||||
}
|
||||
this.inc("pre", identifier, identifierBase);
|
||||
break;
|
||||
case "release":
|
||||
if (this.prerelease.length === 0) {
|
||||
throw new Error(`version ${this.raw} is not a prerelease`);
|
||||
}
|
||||
this.prerelease.length = 0;
|
||||
break;
|
||||
case "major":
|
||||
if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) {
|
||||
this.major++;
|
||||
@@ -1348,9 +1368,6 @@ var require_semver = __commonJS({
|
||||
break;
|
||||
case "pre": {
|
||||
const base = Number(identifierBase) ? 1 : 0;
|
||||
if (!identifier && identifierBase === false) {
|
||||
throw new Error("invalid increment argument: identifier is empty");
|
||||
}
|
||||
if (this.prerelease.length === 0) {
|
||||
this.prerelease = [base];
|
||||
} else {
|
||||
@@ -1485,13 +1502,12 @@ var require_diff = __commonJS({
|
||||
if (!lowVersion.patch && !lowVersion.minor) {
|
||||
return "major";
|
||||
}
|
||||
if (highVersion.patch) {
|
||||
if (lowVersion.compareMain(highVersion) === 0) {
|
||||
if (lowVersion.minor && !lowVersion.patch) {
|
||||
return "minor";
|
||||
}
|
||||
return "patch";
|
||||
}
|
||||
if (highVersion.minor) {
|
||||
return "minor";
|
||||
}
|
||||
return "major";
|
||||
}
|
||||
const prefix = highHasPre ? "pre" : "";
|
||||
if (v1.major !== v2.major) {
|
||||
|
||||
@@ -130,7 +130,7 @@ async function getOrRefreshAccessToken(ctx, contextId, {
|
||||
if (token == null) {
|
||||
return null;
|
||||
}
|
||||
const now = Date.now() / 1e3;
|
||||
const now = Date.now();
|
||||
const isExpired = token.expiresAt && now > token.expiresAt;
|
||||
if (!isExpired && !forceRefresh) {
|
||||
return token;
|
||||
|
||||
@@ -36,6 +36,9 @@ var require_quote = __commonJS({
|
||||
"use strict";
|
||||
module2.exports = function quote(xs) {
|
||||
return xs.map(function(s) {
|
||||
if (s === "") {
|
||||
return "''";
|
||||
}
|
||||
if (s && typeof s === "object") {
|
||||
return s.op.replace(/(.)/g, "\\$1");
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -49782,7 +49782,11 @@ var require_property_base = __commonJS({
|
||||
* @returns {*|undefined}
|
||||
*/
|
||||
parent() {
|
||||
return this && this.__parent && (this.__parent.__parent || this.__parent) || void 0;
|
||||
let parent = this.__parent;
|
||||
if (parent && parent._postman_propertyIsList) {
|
||||
parent = parent.__parent || parent;
|
||||
}
|
||||
return parent || void 0;
|
||||
},
|
||||
/**
|
||||
* Accepts an object and sets it as the parent of the current property.
|
||||
@@ -77141,7 +77145,7 @@ var require_dynamic_variables = __commonJS({
|
||||
description: "A random avatar image",
|
||||
generator: () => {
|
||||
return faker.random.arrayElement([
|
||||
// eslint-disable-next-line max-len
|
||||
// eslint-disable-next-line @stylistic/js/max-len
|
||||
`https://cloudflare-ipfs.com/ipfs/Qmd3W5DuhgHirLHGVixi6V76LhCkZUz6pnFt5AJBiyvHye/avatar/${faker.datatype.number(1249)}.jpg`,
|
||||
`https://avatars.githubusercontent.com/u/${faker.datatype.number(1e8)}`
|
||||
]);
|
||||
@@ -77494,7 +77498,7 @@ var require_superstring = __commonJS({
|
||||
* @readOnly
|
||||
* @type {RegExp}
|
||||
*/
|
||||
REGEX_EXTRACT_VARS: /\{\{([^{}]*?)}}/g,
|
||||
REGEX_EXTRACT_VARS: /{{([^{}]*?)}}/g,
|
||||
/**
|
||||
* Defines the number of times the variable substitution mechanism will repeat until all tokens are resolved
|
||||
*
|
||||
@@ -77589,7 +77593,7 @@ var require_property = __commonJS({
|
||||
} else if (typeof value === "object") {
|
||||
seen.add(value);
|
||||
for (const key in value) {
|
||||
if (Object.hasOwnProperty.call(value, key)) {
|
||||
if (Object.hasOwn(value, key)) {
|
||||
_findSubstitutions(value[key], seen, result);
|
||||
}
|
||||
}
|
||||
@@ -77894,7 +77898,7 @@ var require_property_list = __commonJS({
|
||||
before > -1 ? this.members.splice(before, 0, item) : this.members.push(item);
|
||||
if ((index = item[this._postman_listIndexKey]) && (index = String(index))) {
|
||||
this._postman_listIndexCaseInsensitive && (index = index.toLowerCase());
|
||||
if (this._postman_listAllowsMultipleValues && Object.hasOwnProperty.call(this.reference, index)) {
|
||||
if (this._postman_listAllowsMultipleValues && Object.hasOwn(this.reference, index)) {
|
||||
!_2.isArray(this.reference[index]) && (this.reference[index] = [this.reference[index]]);
|
||||
this.reference[index].push(item);
|
||||
} else {
|
||||
@@ -78458,7 +78462,7 @@ var require_parser = __commonJS({
|
||||
var ReplacementTracker = require_replacement_tracker();
|
||||
var REGEX_ALL_BACKSLASHES = /\\/g;
|
||||
var REGEX_LEADING_SLASHES = /^\/+/;
|
||||
var REGEX_ALL_VARIABLES = /{{[^{}]*[.:/?#@&\]][^{}]*}}/g;
|
||||
var REGEX_ALL_VARIABLES = /{{[^{}]*}}/g;
|
||||
var HASH_SEPARATOR = "#";
|
||||
var PATH_SEPARATOR = "/";
|
||||
var PORT_SEPARATOR = ":";
|
||||
@@ -78613,6 +78617,7 @@ var require_query_param = __commonJS({
|
||||
"../../node_modules/postman-collection/lib/collection/query-param.js"(exports2, module2) {
|
||||
var _2 = require_util2().lodash;
|
||||
var Property = require_property().Property;
|
||||
var Substitutor = require_superstring().Substitutor;
|
||||
var PropertyList = require_property_list().PropertyList;
|
||||
var E = "";
|
||||
var AMPERSAND = "&";
|
||||
@@ -78623,7 +78628,7 @@ var require_query_param = __commonJS({
|
||||
var REGEX_HASH = /#/g;
|
||||
var REGEX_EQUALS = /=/g;
|
||||
var REGEX_AMPERSAND = /&/g;
|
||||
var REGEX_EXTRACT_VARS = /{{[^{}]*[&#=][^{}]*}}/g;
|
||||
var REGEX_EXTRACT_VARS = Substitutor.REGEX_EXTRACT_VARS;
|
||||
var QueryParam;
|
||||
var encodeReservedChars = function(str, encodeEquals) {
|
||||
if (!str) {
|
||||
@@ -92126,7 +92131,7 @@ var require_db3 = __commonJS({
|
||||
type: "embed",
|
||||
format: "pdf"
|
||||
},
|
||||
"application/ecmascript": {
|
||||
"text/javascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
@@ -92134,6 +92139,66 @@ var require_db3 = __commonJS({
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"application/ecmascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"application/x-ecmascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"application/x-javascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/ecmascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.0": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.1": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.2": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.3": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.4": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/javascript1.5": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/jscript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/livescript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/x-ecmascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/x-javascript": {
|
||||
type: "text",
|
||||
format: "script"
|
||||
},
|
||||
"text/css": {
|
||||
type: "text",
|
||||
format: "stylesheet"
|
||||
},
|
||||
"application/json": {
|
||||
type: "text",
|
||||
format: "json"
|
||||
@@ -94086,7 +94151,7 @@ var require_content_info = __commonJS({
|
||||
* egHeader: inline; filename="test Response.json"
|
||||
* Reference: https://github.com/jshttp/content-disposition
|
||||
*/
|
||||
// eslint-disable-next-line max-len
|
||||
// eslint-disable-next-line @stylistic/js/max-len
|
||||
fileNameRegex: /;[ \t]*(?:filename)[ \t]*=[ \t]*("(?:[\x20!\x23-\x5b\x5d-\x7e\x80-\xff]|\\[\x20-\x7e])*"|[!#$%&'*+.0-9A-Z^_`a-z|~-]+)[ \t]*/,
|
||||
/**
|
||||
* RegExp for extracting filename* from content-disposition header
|
||||
@@ -94119,7 +94184,7 @@ var require_content_info = __commonJS({
|
||||
* egHeader: attachment;filename*=utf-8''%E4%BD%A0%E5%A5%BD.txt
|
||||
* Reference: https://github.com/jshttp/content-disposition
|
||||
*/
|
||||
// eslint-disable-next-line max-len, security/detect-unsafe-regex
|
||||
// eslint-disable-next-line @stylistic/js/max-len, security/detect-unsafe-regex
|
||||
encodedFileNameRegex: /;[ \t]*(?:filename\*)[ \t]*=[ \t]*([A-Za-z0-9!#$%&+\-^_`{}~]+)'.*'((?:%[0-9A-Fa-f]{2}|[A-Za-z0-9!#$&+.^_`|~-])+)[ \t]*/,
|
||||
/**
|
||||
* RegExp to match quoted-pair in RFC 2616
|
||||
@@ -95313,6 +95378,7 @@ var require_re = __commonJS({
|
||||
var re = exports2.re = [];
|
||||
var safeRe = exports2.safeRe = [];
|
||||
var src = exports2.src = [];
|
||||
var safeSrc = exports2.safeSrc = [];
|
||||
var t = exports2.t = {};
|
||||
var R = 0;
|
||||
var LETTERDASHNUMBER = "[a-zA-Z0-9-]";
|
||||
@@ -95333,6 +95399,7 @@ var require_re = __commonJS({
|
||||
debug(name, index, value);
|
||||
t[name] = index;
|
||||
src[index] = value;
|
||||
safeSrc[index] = safe;
|
||||
re[index] = new RegExp(value, isGlobal ? "g" : void 0);
|
||||
safeRe[index] = new RegExp(safe, isGlobal ? "g" : void 0);
|
||||
};
|
||||
@@ -95429,7 +95496,7 @@ var require_semver = __commonJS({
|
||||
"../../node_modules/semver/classes/semver.js"(exports2, module2) {
|
||||
var debug = require_debug();
|
||||
var { MAX_LENGTH, MAX_SAFE_INTEGER } = require_constants();
|
||||
var { safeRe: re, t } = require_re();
|
||||
var { safeRe: re, safeSrc: src, t } = require_re();
|
||||
var parseOptions = require_parse_options();
|
||||
var { compareIdentifiers } = require_identifiers();
|
||||
var SemVer = class _SemVer {
|
||||
@@ -95569,6 +95636,18 @@ var require_semver = __commonJS({
|
||||
// preminor will bump the version up to the next minor release, and immediately
|
||||
// down to pre-release. premajor and prepatch work the same way.
|
||||
inc(release, identifier, identifierBase) {
|
||||
if (release.startsWith("pre")) {
|
||||
if (!identifier && identifierBase === false) {
|
||||
throw new Error("invalid increment argument: identifier is empty");
|
||||
}
|
||||
if (identifier) {
|
||||
const r = new RegExp(`^${this.options.loose ? src[t.PRERELEASELOOSE] : src[t.PRERELEASE]}$`);
|
||||
const match = `-${identifier}`.match(r);
|
||||
if (!match || match[1] !== identifier) {
|
||||
throw new Error(`invalid identifier: ${identifier}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
switch (release) {
|
||||
case "premajor":
|
||||
this.prerelease.length = 0;
|
||||
@@ -95594,6 +95673,12 @@ var require_semver = __commonJS({
|
||||
}
|
||||
this.inc("pre", identifier, identifierBase);
|
||||
break;
|
||||
case "release":
|
||||
if (this.prerelease.length === 0) {
|
||||
throw new Error(`version ${this.raw} is not a prerelease`);
|
||||
}
|
||||
this.prerelease.length = 0;
|
||||
break;
|
||||
case "major":
|
||||
if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) {
|
||||
this.major++;
|
||||
@@ -95617,9 +95702,6 @@ var require_semver = __commonJS({
|
||||
break;
|
||||
case "pre": {
|
||||
const base = Number(identifierBase) ? 1 : 0;
|
||||
if (!identifier && identifierBase === false) {
|
||||
throw new Error("invalid increment argument: identifier is empty");
|
||||
}
|
||||
if (this.prerelease.length === 0) {
|
||||
this.prerelease = [base];
|
||||
} else {
|
||||
@@ -95754,13 +95836,12 @@ var require_diff = __commonJS({
|
||||
if (!lowVersion.patch && !lowVersion.minor) {
|
||||
return "major";
|
||||
}
|
||||
if (highVersion.patch) {
|
||||
if (lowVersion.compareMain(highVersion) === 0) {
|
||||
if (lowVersion.minor && !lowVersion.patch) {
|
||||
return "minor";
|
||||
}
|
||||
return "patch";
|
||||
}
|
||||
if (highVersion.minor) {
|
||||
return "minor";
|
||||
}
|
||||
return "major";
|
||||
}
|
||||
const prefix = highHasPre ? "pre" : "";
|
||||
if (v12.major !== v2.major) {
|
||||
@@ -136895,8 +136976,6 @@ var require_utils3 = __commonJS({
|
||||
originalRequest.url.query = [];
|
||||
originalRequest.header = _2.get(response, "originalRequest.headers", []);
|
||||
originalRequest.body = requestItem.request.body;
|
||||
response.code = response.code.replace(/X|x/g, "0");
|
||||
response.code = response.code === "default" ? 500 : _2.toSafeInteger(response.code);
|
||||
let sdkResponse = new Response({
|
||||
name: response.name,
|
||||
code: response.code,
|
||||
@@ -137692,7 +137771,7 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
exampleKey = Object.keys(exampleObj)[0];
|
||||
example = exampleObj[exampleKey];
|
||||
if (example.$ref) {
|
||||
if (example && example.$ref) {
|
||||
example = resolveExampleData(context, example);
|
||||
}
|
||||
if (_2.get(example, "value")) {
|
||||
@@ -137881,6 +137960,65 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
return schema2;
|
||||
};
|
||||
var processSchema = (resolvedSchema) => {
|
||||
if (resolvedSchema.type === "object" && resolvedSchema.properties) {
|
||||
const schemaDetails = {
|
||||
type: resolvedSchema.type,
|
||||
properties: {},
|
||||
required: []
|
||||
}, requiredProperties = new Set(resolvedSchema.required || []);
|
||||
for (let [propName, propValue] of Object.entries(resolvedSchema.properties)) {
|
||||
if (!propValue.type) {
|
||||
continue;
|
||||
}
|
||||
const propertyDetails = {
|
||||
type: propValue.type,
|
||||
deprecated: propValue.deprecated,
|
||||
enum: propValue.enum || void 0,
|
||||
minLength: propValue.minLength,
|
||||
maxLength: propValue.maxLength,
|
||||
minimum: propValue.minimum,
|
||||
maximum: propValue.maximum,
|
||||
pattern: propValue.pattern,
|
||||
example: propValue.example,
|
||||
description: propValue.description,
|
||||
format: propValue.format
|
||||
};
|
||||
if (requiredProperties.has(propName)) {
|
||||
schemaDetails.required.push(propName);
|
||||
}
|
||||
if (propValue.properties) {
|
||||
let processedProperties = processSchema(propValue);
|
||||
propertyDetails.properties = processedProperties.properties;
|
||||
if (processedProperties.required) {
|
||||
propertyDetails.required = processedProperties.required;
|
||||
}
|
||||
} else if (propValue.type === "array" && propValue.items) {
|
||||
propertyDetails.items = processSchema(propValue.items);
|
||||
}
|
||||
schemaDetails.properties[propName] = propertyDetails;
|
||||
}
|
||||
if (schemaDetails.required && schemaDetails.required.length === 0) {
|
||||
schemaDetails.required = void 0;
|
||||
}
|
||||
return schemaDetails;
|
||||
} else if (resolvedSchema.type === "array" && resolvedSchema.items) {
|
||||
const arrayDetails = {
|
||||
type: resolvedSchema.type,
|
||||
items: processSchema(resolvedSchema.items)
|
||||
};
|
||||
if (resolvedSchema.minItems !== void 0) {
|
||||
arrayDetails.minItems = resolvedSchema.minItems;
|
||||
}
|
||||
if (resolvedSchema.maxItems !== void 0) {
|
||||
arrayDetails.maxItems = resolvedSchema.maxItems;
|
||||
}
|
||||
return arrayDetails;
|
||||
}
|
||||
return {
|
||||
type: resolvedSchema.type
|
||||
};
|
||||
};
|
||||
var resolveSchema = (context, schema2, { stack = 0, resolveFor = CONVERSION, seenRef = {}, isResponseSchema = false } = {}) => {
|
||||
resetReadWritePropCache(context);
|
||||
let resolvedSchema = _resolveSchema(context, schema2, stack, resolveFor, seenRef);
|
||||
@@ -138227,12 +138365,16 @@ var require_schemaUtils2 = __commonJS({
|
||||
return pmExamples;
|
||||
};
|
||||
var resolveBodyData = (context, requestBodySchema, bodyType, isExampleBody = false, responseCode = null, requestBodyExamples = {}) => {
|
||||
let { parametersResolution, indentCharacter } = context.computedOptions, headerFamily = getHeaderFamily(bodyType), bodyData = "", shouldGenerateFromExample = parametersResolution === "example", isBodyTypeXML = bodyType === APP_XML || bodyType === TEXT_XML || headerFamily === HEADER_TYPE.XML, bodyKey = isExampleBody ? "response" : "request", responseExamples, example, examples;
|
||||
let { parametersResolution, indentCharacter } = context.computedOptions, headerFamily = getHeaderFamily(bodyType), bodyData = "", shouldGenerateFromExample = parametersResolution === "example", isBodyTypeXML = bodyType === APP_XML || bodyType === TEXT_XML || headerFamily === HEADER_TYPE.XML, bodyKey = isExampleBody ? "response" : "request", responseExamples, example, examples, resolvedSchemaTypes = [];
|
||||
if (_2.isEmpty(requestBodySchema)) {
|
||||
return [{ [bodyKey]: bodyData }];
|
||||
}
|
||||
if (requestBodySchema.$ref) {
|
||||
requestBodySchema = resolveSchema(context, requestBodySchema, { isResponseSchema: isExampleBody });
|
||||
requestBodySchema = resolveSchema(
|
||||
context,
|
||||
requestBodySchema,
|
||||
{ isResponseSchema: isExampleBody }
|
||||
);
|
||||
}
|
||||
if (requestBodySchema.example !== void 0) {
|
||||
const shouldResolveValueKey = _2.has(requestBodySchema.example, "value") && _2.keys(requestBodySchema.example).length <= 1;
|
||||
@@ -138243,7 +138385,11 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
examples = requestBodySchema.examples || _2.get(requestBodySchema, "schema.examples");
|
||||
requestBodySchema = requestBodySchema.schema || requestBodySchema;
|
||||
requestBodySchema = resolveSchema(context, requestBodySchema, { isResponseSchema: isExampleBody });
|
||||
requestBodySchema = resolveSchema(
|
||||
context,
|
||||
requestBodySchema,
|
||||
{ isResponseSchema: isExampleBody }
|
||||
);
|
||||
if (example === void 0 && _2.get(requestBodySchema, "example") !== void 0) {
|
||||
example = requestBodySchema.example;
|
||||
}
|
||||
@@ -138257,7 +138403,11 @@ var require_schemaUtils2 = __commonJS({
|
||||
} else if (requestBodySchema) {
|
||||
requestBodySchema = requestBodySchema.schema || requestBodySchema;
|
||||
if (requestBodySchema.$ref) {
|
||||
requestBodySchema = resolveSchema(context, requestBodySchema, { isResponseSchema: isExampleBody });
|
||||
requestBodySchema = resolveSchema(
|
||||
context,
|
||||
requestBodySchema,
|
||||
{ isResponseSchema: isExampleBody }
|
||||
);
|
||||
}
|
||||
if (isBodyTypeXML) {
|
||||
bodyData = xmlFaker(null, requestBodySchema, indentCharacter, parametersResolution);
|
||||
@@ -138285,6 +138435,10 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
}
|
||||
}
|
||||
if (context.enableTypeFetching && requestBodySchema.type !== void 0) {
|
||||
const requestBodySchemaTypes = processSchema(requestBodySchema);
|
||||
resolvedSchemaTypes.push(requestBodySchemaTypes);
|
||||
}
|
||||
if (isExampleBody && shouldGenerateFromExample && (_2.size(examples) > 1 || _2.size(requestBodyExamples) > 1)) {
|
||||
responseExamples = [{
|
||||
key: "_default",
|
||||
@@ -138305,22 +138459,37 @@ var require_schemaUtils2 = __commonJS({
|
||||
if (_2.isEmpty(matchedRequestBodyExamples)) {
|
||||
matchedRequestBodyExamples = requestBodyExamples;
|
||||
}
|
||||
return generateExamples(context, responseExamples, matchedRequestBodyExamples, requestBodySchema, isBodyTypeXML);
|
||||
const generatedBody = generateExamples(
|
||||
context,
|
||||
responseExamples,
|
||||
matchedRequestBodyExamples,
|
||||
requestBodySchema,
|
||||
isBodyTypeXML
|
||||
);
|
||||
return {
|
||||
generatedBody,
|
||||
resolvedSchemaType: resolvedSchemaTypes[0]
|
||||
};
|
||||
}
|
||||
return [{ [bodyKey]: bodyData }];
|
||||
return {
|
||||
generatedBody: [{ [bodyKey]: bodyData }],
|
||||
resolvedSchemaType: resolvedSchemaTypes[0]
|
||||
};
|
||||
};
|
||||
var resolveUrlEncodedRequestBodyForPostmanRequest = (context, requestBodyContent) => {
|
||||
let bodyData = "", urlEncodedParams = [], requestBodyData = {
|
||||
mode: "urlencoded",
|
||||
urlencoded: urlEncodedParams
|
||||
}, resolvedBody;
|
||||
}, resolvedBody, resolvedBodyResult, resolvedSchemaTypeObject;
|
||||
if (_2.isEmpty(requestBodyContent)) {
|
||||
return requestBodyData;
|
||||
}
|
||||
if (_2.has(requestBodyContent, "schema.$ref")) {
|
||||
requestBodyContent.schema = resolveSchema(context, requestBodyContent.schema);
|
||||
}
|
||||
resolvedBody = resolveBodyData(context, requestBodyContent.schema)[0];
|
||||
resolvedBodyResult = resolveBodyData(context, requestBodyContent.schema);
|
||||
resolvedBody = resolvedBodyResult && Array.isArray(resolvedBodyResult.generatedBody) && resolvedBodyResult.generatedBody[0];
|
||||
resolvedSchemaTypeObject = resolvedBodyResult && resolvedBodyResult.resolvedSchemaType;
|
||||
resolvedBody && (bodyData = resolvedBody.request);
|
||||
const encoding = requestBodyContent.encoding || {};
|
||||
_2.forOwn(bodyData, (value, key) => {
|
||||
@@ -138342,18 +138511,21 @@ var require_schemaUtils2 = __commonJS({
|
||||
headers: [{
|
||||
key: "Content-Type",
|
||||
value: URLENCODED
|
||||
}]
|
||||
}],
|
||||
resolvedSchemaTypeObject
|
||||
};
|
||||
};
|
||||
var resolveFormDataRequestBodyForPostmanRequest = (context, requestBodyContent) => {
|
||||
let bodyData = "", formDataParams = [], encoding = {}, requestBodyData = {
|
||||
mode: "formdata",
|
||||
formdata: formDataParams
|
||||
}, resolvedBody;
|
||||
}, resolvedBody, resolvedBodyResult, resolvedSchemaTypeObject;
|
||||
if (_2.isEmpty(requestBodyContent)) {
|
||||
return requestBodyData;
|
||||
}
|
||||
resolvedBody = resolveBodyData(context, requestBodyContent.schema)[0];
|
||||
resolvedBodyResult = resolveBodyData(context, requestBodyContent.schema);
|
||||
resolvedBody = resolvedBodyResult && Array.isArray(resolvedBodyResult.generatedBody) && resolvedBodyResult.generatedBody[0];
|
||||
resolvedSchemaTypeObject = resolvedBodyResult && resolvedBodyResult.resolvedSchemaType;
|
||||
resolvedBody && (bodyData = resolvedBody.request);
|
||||
encoding = _2.get(requestBodyContent, "encoding", {});
|
||||
_2.forOwn(bodyData, (value, key) => {
|
||||
@@ -138389,7 +138561,8 @@ var require_schemaUtils2 = __commonJS({
|
||||
headers: [{
|
||||
key: "Content-Type",
|
||||
value: FORM_DATA
|
||||
}]
|
||||
}],
|
||||
resolvedSchemaTypeObject
|
||||
};
|
||||
};
|
||||
var getRawBodyType = (content) => {
|
||||
@@ -138425,14 +138598,16 @@ var require_schemaUtils2 = __commonJS({
|
||||
return bodyType;
|
||||
};
|
||||
var resolveRawModeRequestBodyForPostmanRequest = (context, requestContent) => {
|
||||
let bodyType = getRawBodyType(requestContent), bodyData, headerFamily, dataToBeReturned = {}, { concreteUtils } = context, resolvedBody;
|
||||
let bodyType = getRawBodyType(requestContent), bodyData, headerFamily, dataToBeReturned = {}, { concreteUtils } = context, resolvedBody, resolvedBodyResult, resolvedSchemaTypeObject;
|
||||
headerFamily = getHeaderFamily(bodyType);
|
||||
if (concreteUtils.isBinaryContentType(bodyType, requestContent)) {
|
||||
dataToBeReturned = {
|
||||
mode: "file"
|
||||
};
|
||||
} else {
|
||||
resolvedBody = resolveBodyData(context, requestContent[bodyType], bodyType)[0];
|
||||
resolvedBodyResult = resolveBodyData(context, requestContent[bodyType], bodyType);
|
||||
resolvedBody = resolvedBodyResult && Array.isArray(resolvedBodyResult.generatedBody) && resolvedBodyResult.generatedBody[0];
|
||||
resolvedSchemaTypeObject = resolvedBodyResult && resolvedBodyResult.resolvedSchemaType;
|
||||
resolvedBody && (bodyData = resolvedBody.request);
|
||||
if (bodyType === TEXT_XML || bodyType === APP_XML || headerFamily === HEADER_TYPE.XML) {
|
||||
bodyData = getXmlVersionContent(bodyData);
|
||||
@@ -138456,7 +138631,8 @@ var require_schemaUtils2 = __commonJS({
|
||||
headers: [{
|
||||
key: "Content-Type",
|
||||
value: bodyType
|
||||
}]
|
||||
}],
|
||||
resolvedSchemaTypeObject
|
||||
};
|
||||
};
|
||||
var resolveRequestBodyForPostmanRequest = (context, operationItem) => {
|
||||
@@ -138536,8 +138712,25 @@ var require_schemaUtils2 = __commonJS({
|
||||
});
|
||||
return reqParam;
|
||||
};
|
||||
var createProperties = (param) => {
|
||||
const { schema: schema2 } = param;
|
||||
return {
|
||||
type: schema2.type,
|
||||
format: schema2.format,
|
||||
default: schema2.default,
|
||||
required: param.required || false,
|
||||
deprecated: param.deprecated || false,
|
||||
enum: schema2.enum || void 0,
|
||||
minLength: schema2.minLength,
|
||||
maxLength: schema2.maxLength,
|
||||
minimum: schema2.minimum,
|
||||
maximum: schema2.maximum,
|
||||
pattern: schema2.pattern,
|
||||
example: schema2.example
|
||||
};
|
||||
};
|
||||
var resolveQueryParamsForPostmanRequest = (context, operationItem, method) => {
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [], { includeDeprecated } = context.computedOptions;
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [], queryParamTypes = [], { includeDeprecated } = context.computedOptions;
|
||||
_2.forEach(params, (param) => {
|
||||
if (!_2.isObject(param)) {
|
||||
return;
|
||||
@@ -138545,20 +138738,28 @@ var require_schemaUtils2 = __commonJS({
|
||||
if (_2.has(param, "$ref")) {
|
||||
param = resolveSchema(context, param);
|
||||
}
|
||||
if (_2.has(param.schema, "$ref")) {
|
||||
param.schema = resolveSchema(context, param.schema);
|
||||
}
|
||||
if (param.in !== QUERYPARAM || !includeDeprecated && param.deprecated) {
|
||||
return;
|
||||
}
|
||||
let paramValue = resolveValueOfParameter(context, param);
|
||||
let queryParamTypeInfo = {}, properties = {}, paramValue = resolveValueOfParameter(context, param);
|
||||
if (param && param.name && param.schema && param.schema.type) {
|
||||
properties = createProperties(param);
|
||||
queryParamTypeInfo = { keyName: param.name, properties };
|
||||
queryParamTypes.push(queryParamTypeInfo);
|
||||
}
|
||||
if (typeof paramValue === "number" || typeof paramValue === "boolean") {
|
||||
paramValue = paramValue.toString();
|
||||
}
|
||||
const deserialisedParams = serialiseParamsBasedOnStyle(context, param, paramValue);
|
||||
pmParams.push(...deserialisedParams);
|
||||
});
|
||||
return pmParams;
|
||||
return { queryParamTypes, queryParams: pmParams };
|
||||
};
|
||||
var resolvePathParamsForPostmanRequest = (context, operationItem, method) => {
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [];
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [], pathParamTypes = [];
|
||||
_2.forEach(params, (param) => {
|
||||
if (!_2.isObject(param)) {
|
||||
return;
|
||||
@@ -138566,17 +138767,25 @@ var require_schemaUtils2 = __commonJS({
|
||||
if (_2.has(param, "$ref")) {
|
||||
param = resolveSchema(context, param);
|
||||
}
|
||||
if (_2.has(param.schema, "$ref")) {
|
||||
param.schema = resolveSchema(context, param.schema);
|
||||
}
|
||||
if (param.in !== PATHPARAM) {
|
||||
return;
|
||||
}
|
||||
let paramValue = resolveValueOfParameter(context, param);
|
||||
let pathParamTypeInfo = {}, properties = {}, paramValue = resolveValueOfParameter(context, param);
|
||||
if (param && param.name && param.schema && param.schema.type) {
|
||||
properties = createProperties(param);
|
||||
pathParamTypeInfo = { keyName: param.name, properties };
|
||||
pathParamTypes.push(pathParamTypeInfo);
|
||||
}
|
||||
if (typeof paramValue === "number" || typeof paramValue === "boolean") {
|
||||
paramValue = paramValue.toString();
|
||||
}
|
||||
const deserialisedParams = serialiseParamsBasedOnStyle(context, param, paramValue);
|
||||
pmParams.push(...deserialisedParams);
|
||||
});
|
||||
return pmParams;
|
||||
return { pathParamTypes, pathParams: pmParams };
|
||||
};
|
||||
var resolveNameForPostmanReqeust = (context, operationItem, requestUrl) => {
|
||||
let reqName, { requestNameSource } = context.computedOptions;
|
||||
@@ -138598,7 +138807,7 @@ var require_schemaUtils2 = __commonJS({
|
||||
return reqName;
|
||||
};
|
||||
var resolveHeadersForPostmanRequest = (context, operationItem, method) => {
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [], { keepImplicitHeaders, includeDeprecated } = context.computedOptions;
|
||||
const params = resolvePathItemParams(context, operationItem[method].parameters, operationItem.parameters), pmParams = [], headerTypes = [], { keepImplicitHeaders, includeDeprecated } = context.computedOptions;
|
||||
_2.forEach(params, (param) => {
|
||||
if (!_2.isObject(param)) {
|
||||
return;
|
||||
@@ -138606,25 +138815,33 @@ var require_schemaUtils2 = __commonJS({
|
||||
if (_2.has(param, "$ref")) {
|
||||
param = resolveSchema(context, param);
|
||||
}
|
||||
if (_2.has(param.schema, "$ref")) {
|
||||
param.schema = resolveSchema(context, param.schema);
|
||||
}
|
||||
if (param.in !== HEADER || !includeDeprecated && param.deprecated) {
|
||||
return;
|
||||
}
|
||||
if (!keepImplicitHeaders && _2.includes(IMPLICIT_HEADERS, _2.toLower(_2.get(param, "name")))) {
|
||||
return;
|
||||
}
|
||||
let paramValue = resolveValueOfParameter(context, param);
|
||||
let headerTypeInfo = {}, properties = {}, paramValue = resolveValueOfParameter(context, param);
|
||||
if (param && param.name && param.schema && param.schema.type) {
|
||||
properties = createProperties(param);
|
||||
headerTypeInfo = { keyName: param.name, properties };
|
||||
headerTypes.push(headerTypeInfo);
|
||||
}
|
||||
if (typeof paramValue === "number" || typeof paramValue === "boolean") {
|
||||
paramValue = paramValue.toString();
|
||||
}
|
||||
const deserialisedParams = serialiseParamsBasedOnStyle(context, param, paramValue);
|
||||
pmParams.push(...deserialisedParams);
|
||||
});
|
||||
return pmParams;
|
||||
return { headerTypes, headers: pmParams };
|
||||
};
|
||||
var resolveResponseBody = (context, responseBody = {}, requestBodyExamples = {}, code = null) => {
|
||||
let responseContent, bodyType, allBodyData, headerFamily, acceptHeader, emptyResponse = [{
|
||||
body: void 0
|
||||
}];
|
||||
}], resolvedResponseBodyResult, resolvedResponseBodyTypes;
|
||||
if (_2.isEmpty(responseBody)) {
|
||||
return emptyResponse;
|
||||
}
|
||||
@@ -138637,7 +138854,16 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
bodyType = getRawBodyType(responseContent);
|
||||
headerFamily = getHeaderFamily(bodyType);
|
||||
allBodyData = resolveBodyData(context, responseContent[bodyType], bodyType, true, code, requestBodyExamples);
|
||||
resolvedResponseBodyResult = resolveBodyData(
|
||||
context,
|
||||
responseContent[bodyType],
|
||||
bodyType,
|
||||
true,
|
||||
code,
|
||||
requestBodyExamples
|
||||
);
|
||||
allBodyData = resolvedResponseBodyResult.generatedBody;
|
||||
resolvedResponseBodyTypes = resolvedResponseBodyResult.resolvedSchemaType;
|
||||
return _2.map(allBodyData, (bodyData) => {
|
||||
let requestBodyData = bodyData.request, responseBodyData = bodyData.response, exampleName = bodyData.name;
|
||||
if (bodyType === TEXT_XML || bodyType === APP_XML || headerFamily === HEADER_TYPE.XML) {
|
||||
@@ -138663,12 +138889,13 @@ var require_schemaUtils2 = __commonJS({
|
||||
}],
|
||||
name: exampleName,
|
||||
bodyType,
|
||||
acceptHeader
|
||||
acceptHeader,
|
||||
resolvedResponseBodyTypes
|
||||
};
|
||||
});
|
||||
};
|
||||
var resolveResponseHeaders = (context, responseHeaders) => {
|
||||
const headers = [], { includeDeprecated } = context.computedOptions;
|
||||
const headers = [], { includeDeprecated } = context.computedOptions, headerTypes = [];
|
||||
if (_2.has(responseHeaders, "$ref")) {
|
||||
responseHeaders = resolveSchema(context, responseHeaders, { isResponseSchema: true });
|
||||
}
|
||||
@@ -138679,14 +138906,33 @@ var require_schemaUtils2 = __commonJS({
|
||||
if (!includeDeprecated && value.deprecated) {
|
||||
return;
|
||||
}
|
||||
let headerValue = resolveValueOfParameter(context, value, { isResponseSchema: true });
|
||||
let headerValue = resolveValueOfParameter(context, value, { isResponseSchema: true }), headerTypeInfo = {}, properties = {};
|
||||
if (typeof headerValue === "number" || typeof headerValue === "boolean") {
|
||||
headerValue = headerValue.toString();
|
||||
}
|
||||
const headerData = Object.assign({}, value, { name: headerName }), serialisedHeader = serialiseParamsBasedOnStyle(context, headerData, headerValue, { isResponseSchema: true });
|
||||
headers.push(...serialisedHeader);
|
||||
if (headerData && headerData.name && headerData.schema && headerData.schema.type) {
|
||||
const { schema: schema2 } = headerData;
|
||||
properties = {
|
||||
type: schema2.type,
|
||||
format: schema2.format,
|
||||
default: schema2.default,
|
||||
required: schema2.required || false,
|
||||
deprecated: schema2.deprecated || false,
|
||||
enum: schema2.enum || void 0,
|
||||
minLength: schema2.minLength,
|
||||
maxLength: schema2.maxLength,
|
||||
minimum: schema2.minimum,
|
||||
maximum: schema2.maximum,
|
||||
pattern: schema2.pattern,
|
||||
example: schema2.example
|
||||
};
|
||||
headerTypeInfo = { keyName: headerData.name, properties };
|
||||
headerTypes.push(headerTypeInfo);
|
||||
}
|
||||
});
|
||||
return headers;
|
||||
return { resolvedHeaderTypes: headerTypes, headers };
|
||||
};
|
||||
var getPreviewLangugaForResponseBody = (bodyType) => {
|
||||
const headerFamily = getHeaderFamily(bodyType);
|
||||
@@ -138753,7 +138999,7 @@ var require_schemaUtils2 = __commonJS({
|
||||
return responseAuthHelper;
|
||||
};
|
||||
var resolveResponseForPostmanRequest = (context, operationItem, request) => {
|
||||
let responses = [], requestBodyExamples = [], requestAcceptHeader, requestBody = operationItem.requestBody, requestContent, rawBodyType, headerFamily, isBodyTypeXML;
|
||||
let responses = [], requestBodyExamples = [], requestAcceptHeader, requestBody = operationItem.requestBody, requestContent, rawBodyType, headerFamily, isBodyTypeXML, resolvedExamplesObject = {}, responseTypes = {};
|
||||
if (typeof requestBody === "object") {
|
||||
if (requestBody.$ref) {
|
||||
requestBody = resolveSchema(context, requestBody, { isResponseSchema: true });
|
||||
@@ -138789,7 +139035,15 @@ var require_schemaUtils2 = __commonJS({
|
||||
}
|
||||
}
|
||||
_2.forOwn(operationItem.responses, (responseObj, code) => {
|
||||
let responseSchema = _2.has(responseObj, "$ref") ? resolveSchema(context, responseObj, { isResponseSchema: true }) : responseObj, { includeAuthInfoInExample } = context.computedOptions, auth = request.auth, resolvedExamples = resolveResponseBody(context, responseSchema, requestBodyExamples, code) || {}, headers = resolveResponseHeaders(context, responseSchema.headers);
|
||||
let responseSchema = _2.has(responseObj, "$ref") ? resolveSchema(context, responseObj, { isResponseSchema: true }) : responseObj, { includeAuthInfoInExample } = context.computedOptions, auth = request.auth, resolvedExamples = resolveResponseBody(context, responseSchema, requestBodyExamples, code) || {}, { resolvedHeaderTypes, headers } = resolveResponseHeaders(context, responseSchema.headers), responseBodyHeaderObj;
|
||||
resolvedExamplesObject = resolvedExamples[0] && resolvedExamples[0].resolvedResponseBodyTypes;
|
||||
responseBodyHeaderObj = {
|
||||
body: JSON.stringify(resolvedExamplesObject, null, 2),
|
||||
headers: JSON.stringify(resolvedHeaderTypes, null, 2)
|
||||
};
|
||||
code = code.replace(/X|x/g, "0");
|
||||
code = code === "default" ? 500 : _2.toSafeInteger(code);
|
||||
Object.assign(responseTypes, { [code]: responseBodyHeaderObj });
|
||||
_2.forOwn(resolvedExamples, (resolvedExample = {}) => {
|
||||
let { body, contentHeader = [], bodyType, acceptHeader, name } = resolvedExample, resolvedRequestBody = _2.get(resolvedExample, "request.body"), originalRequest, response, responseAuthHelper, requestBodyObj = {}, reqHeaders = _2.clone(request.headers) || [], reqQueryParams = _2.clone(_2.get(request, "params.queryParams", []));
|
||||
_2.isArray(acceptHeader) && reqHeaders.push(...acceptHeader);
|
||||
@@ -138829,13 +139083,17 @@ var require_schemaUtils2 = __commonJS({
|
||||
responses.push(response);
|
||||
});
|
||||
});
|
||||
return { responses, acceptHeader: requestAcceptHeader };
|
||||
return {
|
||||
responses,
|
||||
acceptHeader: requestAcceptHeader,
|
||||
responseTypes
|
||||
};
|
||||
};
|
||||
module2.exports = {
|
||||
resolvePostmanRequest: function(context, operationItem, path, method) {
|
||||
context.schemaCache = context.schemaCache || {};
|
||||
context.schemaFakerCache = context.schemaFakerCache || {};
|
||||
let url = resolveUrlForPostmanRequest(path), baseUrlData = resolveBaseUrlForPostmanRequest(operationItem[method]), requestName = resolveNameForPostmanReqeust(context, operationItem[method], url), queryParams = resolveQueryParamsForPostmanRequest(context, operationItem, method), headers = resolveHeadersForPostmanRequest(context, operationItem, method), pathParams = resolvePathParamsForPostmanRequest(context, operationItem, method), { pathVariables, collectionVariables } = filterCollectionAndPathVariables(url, pathParams), requestBody = resolveRequestBodyForPostmanRequest(context, operationItem[method]), request, securitySchema = _2.get(operationItem, [method, "security"]), authHelper = generateAuthForCollectionFromOpenAPI(context.openapi, securitySchema), { alwaysInheritAuthentication } = context.computedOptions;
|
||||
let url = resolveUrlForPostmanRequest(path), baseUrlData = resolveBaseUrlForPostmanRequest(operationItem[method]), requestName = resolveNameForPostmanReqeust(context, operationItem[method], url), { queryParamTypes, queryParams } = resolveQueryParamsForPostmanRequest(context, operationItem, method), { headerTypes, headers } = resolveHeadersForPostmanRequest(context, operationItem, method), { pathParamTypes, pathParams } = resolvePathParamsForPostmanRequest(context, operationItem, method), { pathVariables, collectionVariables } = filterCollectionAndPathVariables(url, pathParams), requestBody = resolveRequestBodyForPostmanRequest(context, operationItem[method]), requestBodyTypes = requestBody && requestBody.resolvedSchemaTypeObject, request, securitySchema = _2.get(operationItem, [method, "security"]), authHelper = generateAuthForCollectionFromOpenAPI(context.openapi, securitySchema), { alwaysInheritAuthentication } = context.computedOptions, requestIdentifier, requestTypesObject = {};
|
||||
headers.push(..._2.get(requestBody, "headers", []));
|
||||
pathVariables.push(...baseUrlData.pathVariables);
|
||||
collectionVariables.push(...baseUrlData.collectionVariables);
|
||||
@@ -138853,7 +139111,21 @@ var require_schemaUtils2 = __commonJS({
|
||||
body: _2.get(requestBody, "body"),
|
||||
auth: alwaysInheritAuthentication ? void 0 : authHelper
|
||||
};
|
||||
const { responses, acceptHeader } = resolveResponseForPostmanRequest(context, operationItem[method], request);
|
||||
const requestTypes = {
|
||||
body: JSON.stringify(requestBodyTypes, null, 2),
|
||||
headers: JSON.stringify(headerTypes, null, 2),
|
||||
pathParam: JSON.stringify(pathParamTypes, null, 2),
|
||||
queryParam: JSON.stringify(queryParamTypes, null, 2)
|
||||
}, {
|
||||
responses,
|
||||
acceptHeader,
|
||||
responseTypes
|
||||
} = resolveResponseForPostmanRequest(context, operationItem[method], request);
|
||||
requestIdentifier = method + path;
|
||||
Object.assign(
|
||||
requestTypesObject,
|
||||
{ [requestIdentifier]: { request: requestTypes, response: responseTypes } }
|
||||
);
|
||||
if (!_2.isEmpty(acceptHeader)) {
|
||||
request.headers = _2.concat(request.headers, acceptHeader);
|
||||
}
|
||||
@@ -138864,7 +139136,8 @@ var require_schemaUtils2 = __commonJS({
|
||||
responses
|
||||
})
|
||||
},
|
||||
collectionVariables
|
||||
collectionVariables,
|
||||
requestTypesObject
|
||||
};
|
||||
},
|
||||
resolveResponseForPostmanRequest,
|
||||
@@ -141220,7 +141493,7 @@ var require_libV2 = __commonJS({
|
||||
convertV2: function(context, cb) {
|
||||
let collectionTree = generateSkeletonTreeFromOpenAPI(context.openapi, context.computedOptions);
|
||||
let preOrderTraversal = GraphLib.alg.preorder(collectionTree, "root:collection");
|
||||
let collection = {};
|
||||
let collection = {}, extractedTypesObject = {};
|
||||
_2.forEach(preOrderTraversal, function(nodeIdentified) {
|
||||
let node = collectionTree.node(nodeIdentified);
|
||||
switch (node.type) {
|
||||
@@ -141254,15 +141527,16 @@ var require_libV2 = __commonJS({
|
||||
break;
|
||||
}
|
||||
case "request": {
|
||||
let request = {}, collectionVariables = [], requestObject = {};
|
||||
let request = {}, collectionVariables = [], requestObject = {}, requestTypesObject = {};
|
||||
try {
|
||||
({ request, collectionVariables } = resolvePostmanRequest(
|
||||
({ request, collectionVariables, requestTypesObject } = resolvePostmanRequest(
|
||||
context,
|
||||
context.openapi.paths[node.meta.path],
|
||||
node.meta.path,
|
||||
node.meta.method
|
||||
));
|
||||
requestObject = generateRequestItemObject(request);
|
||||
extractedTypesObject = Object.assign({}, extractedTypesObject, requestTypesObject);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
break;
|
||||
@@ -141337,6 +141611,17 @@ var require_libV2 = __commonJS({
|
||||
if (!_2.isEmpty(collection.variable)) {
|
||||
collection.variable = _2.uniqBy(collection.variable, "key");
|
||||
}
|
||||
if (context.enableTypeFetching) {
|
||||
return cb(null, {
|
||||
result: true,
|
||||
output: [{
|
||||
type: "collection",
|
||||
data: collection
|
||||
}],
|
||||
analytics: this.analytics || {},
|
||||
extractedTypes: extractedTypesObject || {}
|
||||
});
|
||||
}
|
||||
return cb(null, {
|
||||
result: true,
|
||||
output: [{
|
||||
@@ -145118,7 +145403,7 @@ var require_schemapack = __commonJS({
|
||||
var concreteUtils;
|
||||
var pathBrowserify = require_path_browserify();
|
||||
var SchemaPack = class {
|
||||
constructor(input, options = {}, moduleVersion = MODULE_VERSION.V1) {
|
||||
constructor(input, options = {}, moduleVersion = MODULE_VERSION.V1, enableTypeFetching = false) {
|
||||
if (input.type === schemaUtils.MULTI_FILE_API_TYPE_ALLOWED_VALUE && input.data && input.data[0] && input.data[0].path) {
|
||||
input = schemaUtils.mapDetectRootFilesInputToFolderInput(input);
|
||||
}
|
||||
@@ -145136,6 +145421,7 @@ var require_schemapack = __commonJS({
|
||||
actualStack: 0,
|
||||
numberOfRequests: 0
|
||||
};
|
||||
this.enableTypeFetching = enableTypeFetching;
|
||||
this.computedOptions = utils.mergeOptions(
|
||||
// predefined options
|
||||
_2.keyBy(this.definedOptions, "id"),
|
||||
@@ -145814,6 +146100,14 @@ var require_openapi_to_postmanv2 = __commonJS({
|
||||
}
|
||||
return cb(new UserError(_2.get(schema2, "validationResult.reason", DEFAULT_INVALID_ERROR)));
|
||||
},
|
||||
convertV2WithTypes: function(input, options, cb) {
|
||||
const enableTypeFetching = true;
|
||||
var schema2 = new SchemaPack(input, options, MODULE_VERSION.V2, enableTypeFetching);
|
||||
if (schema2.validated) {
|
||||
return schema2.convertV2(cb);
|
||||
}
|
||||
return cb(new UserError(_2.get(schema2, "validationResult.reason", DEFAULT_INVALID_ERROR)));
|
||||
},
|
||||
validate: function(input) {
|
||||
var schema2 = new SchemaPack(input);
|
||||
return schema2.validationResult;
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"dev": "yaakcli dev ./src/index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"openapi-to-postmanv2": "^4.23.1",
|
||||
"openapi-to-postmanv2": "^5.0.0",
|
||||
"yaml": "^2.4.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -69,6 +69,12 @@ function migrateImport(contents) {
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const environment of parsed.resources.environments ?? []) {
|
||||
if ("environmentId" in environment) {
|
||||
environment.base = environment.environmentId == null;
|
||||
delete environment.environmentId;
|
||||
}
|
||||
}
|
||||
return { resources: parsed.resources };
|
||||
}
|
||||
function isJSObject(obj) {
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
// If the importer is in node compatibility mode or this is not an ESM
|
||||
// file that has been converted to a CommonJS file using a Babel-
|
||||
// compatible transform (i.e. "__esModule" has not been set), then set
|
||||
// "default" to the CommonJS "module.exports" for node compatibility.
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
|
||||
// src/index.ts
|
||||
var src_exports = {};
|
||||
__export(src_exports, {
|
||||
plugin: () => plugin
|
||||
});
|
||||
module.exports = __toCommonJS(src_exports);
|
||||
var import_node_fs = __toESM(require("node:fs"));
|
||||
var plugin = {
|
||||
templateFunctions: [{
|
||||
name: "fs.readFile",
|
||||
args: [{ title: "Select File", type: "file", name: "path", label: "File" }],
|
||||
async onRender(_ctx, args) {
|
||||
if (!args.values.path) return null;
|
||||
try {
|
||||
return import_node_fs.default.promises.readFile(args.values.path, "utf-8");
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
// Annotate the CommonJS export names for ESM import in node:
|
||||
0 && (module.exports = {
|
||||
plugin
|
||||
});
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"name": "@yaakapp/template-function-file",
|
||||
"private": true,
|
||||
"version": "0.0.1",
|
||||
"scripts": {
|
||||
"build": "yaakcli build ./src/index.ts",
|
||||
"dev": "yaakcli dev ./src/index.js"
|
||||
}
|
||||
}
|
||||
9
src-tauri/yaak-common/Cargo.toml
Normal file
9
src-tauri/yaak-common/Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[package]
|
||||
name = "yaak-common"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
tauri = { workspace = true }
|
||||
regex = "1.11.0"
|
||||
1
src-tauri/yaak-common/src/lib.rs
Normal file
1
src-tauri/yaak-common/src/lib.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod window;
|
||||
31
src-tauri/yaak-common/src/window.rs
Normal file
31
src-tauri/yaak-common/src/window.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use regex::Regex;
|
||||
use tauri::{Runtime, WebviewWindow};
|
||||
|
||||
pub trait WorkspaceWindowTrait {
|
||||
fn workspace_id(&self) -> Option<String>;
|
||||
fn cookie_jar_id(&self) -> Option<String>;
|
||||
fn environment_id(&self) -> Option<String>;
|
||||
}
|
||||
|
||||
impl<R: Runtime> WorkspaceWindowTrait for WebviewWindow<R> {
|
||||
fn workspace_id(&self) -> Option<String> {
|
||||
let url = self.url().unwrap();
|
||||
let re = Regex::new(r"/workspaces/(?<id>\w+)").unwrap();
|
||||
match re.captures(url.as_str()) {
|
||||
None => None,
|
||||
Some(captures) => captures.name("id").map(|c| c.as_str().to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn cookie_jar_id(&self) -> Option<String> {
|
||||
let url = self.url().unwrap();
|
||||
let mut query_pairs = url.query_pairs();
|
||||
query_pairs.find(|(k, _v)| k == "cookie_jar_id").map(|(_k, v)| v.to_string())
|
||||
}
|
||||
|
||||
fn environment_id(&self) -> Option<String> {
|
||||
let url = self.url().unwrap();
|
||||
let mut query_pairs = url.query_pairs();
|
||||
query_pairs.find(|(k, _v)| k == "environment_id").map(|(_k, v)| v.to_string())
|
||||
}
|
||||
}
|
||||
20
src-tauri/yaak-crypto/Cargo.toml
Normal file
20
src-tauri/yaak-crypto/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "yaak-crypto"
|
||||
links = "yaak-crypto"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
base32 = "0.5.1" # For encoding human-readable key
|
||||
base64 = "0.22.1" # For encoding in the database
|
||||
chacha20poly1305 = "0.10.1"
|
||||
keyring = { version = "4.0.0-rc.1" }
|
||||
log = "0.4.26"
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
tauri = { workspace = true }
|
||||
thiserror = "2.0.12"
|
||||
yaak-models = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
tauri-plugin = { workspace = true, features = ["build"] }
|
||||
9
src-tauri/yaak-crypto/build.rs
Normal file
9
src-tauri/yaak-crypto/build.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
const COMMANDS: &[&str] = &[
|
||||
"enable_encryption",
|
||||
"reveal_workspace_key",
|
||||
"set_workspace_key",
|
||||
];
|
||||
|
||||
fn main() {
|
||||
tauri_plugin::Builder::new(COMMANDS).build();
|
||||
}
|
||||
13
src-tauri/yaak-crypto/index.ts
Normal file
13
src-tauri/yaak-crypto/index.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
|
||||
export function enableEncryption(workspaceId: string) {
|
||||
return invoke<void>('plugin:yaak-crypto|enable_encryption', { workspaceId });
|
||||
}
|
||||
|
||||
export function revealWorkspaceKey(workspaceId: string) {
|
||||
return invoke<string>('plugin:yaak-crypto|reveal_workspace_key', { workspaceId });
|
||||
}
|
||||
|
||||
export function setWorkspaceKey(args: { workspaceId: string; key: string }) {
|
||||
return invoke<void>('plugin:yaak-crypto|set_workspace_key', args);
|
||||
}
|
||||
6
src-tauri/yaak-crypto/package.json
Normal file
6
src-tauri/yaak-crypto/package.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "@yaakapp-internal/crypto",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"main": "index.ts"
|
||||
}
|
||||
7
src-tauri/yaak-crypto/permissions/default.toml
Normal file
7
src-tauri/yaak-crypto/permissions/default.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[default]
|
||||
description = "Default permissions for the plugin"
|
||||
permissions = [
|
||||
"allow-enable-encryption",
|
||||
"allow-reveal-workspace-key",
|
||||
"allow-set-workspace-key",
|
||||
]
|
||||
31
src-tauri/yaak-crypto/src/commands.rs
Normal file
31
src-tauri/yaak-crypto/src/commands.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use crate::error::Result;
|
||||
use crate::manager::EncryptionManagerExt;
|
||||
use tauri::{command, Runtime, WebviewWindow};
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn enable_encryption<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
) -> Result<()> {
|
||||
window.crypto().ensure_workspace_key(workspace_id)?;
|
||||
window.crypto().reveal_workspace_key(workspace_id)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn reveal_workspace_key<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
) -> Result<String> {
|
||||
Ok(window.crypto().reveal_workspace_key(workspace_id)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn set_workspace_key<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
key: &str,
|
||||
) -> Result<()> {
|
||||
window.crypto().set_human_key(workspace_id, key)?;
|
||||
Ok(())
|
||||
}
|
||||
98
src-tauri/yaak-crypto/src/encryption.rs
Normal file
98
src-tauri/yaak-crypto/src/encryption.rs
Normal file
@@ -0,0 +1,98 @@
|
||||
use crate::error::Error::{DecryptionError, EncryptionError, InvalidEncryptedData};
|
||||
use crate::error::Result;
|
||||
use chacha20poly1305::aead::generic_array::typenum::Unsigned;
|
||||
use chacha20poly1305::aead::{Aead, AeadCore, Key, KeyInit, OsRng};
|
||||
use chacha20poly1305::XChaCha20Poly1305;
|
||||
|
||||
const ENCRYPTION_TAG: &str = "yA4k3nC";
|
||||
const ENCRYPTION_VERSION: u8 = 1;
|
||||
|
||||
pub(crate) fn encrypt_data(data: &[u8], key: &Key<XChaCha20Poly1305>) -> Result<Vec<u8>> {
|
||||
let nonce = XChaCha20Poly1305::generate_nonce(&mut OsRng);
|
||||
let cipher = XChaCha20Poly1305::new(&key);
|
||||
let ciphered_data = cipher.encrypt(&nonce, data).map_err(|_| EncryptionError)?;
|
||||
|
||||
let mut data: Vec<u8> = Vec::new();
|
||||
data.extend_from_slice(ENCRYPTION_TAG.as_bytes()); // Tag
|
||||
data.push(ENCRYPTION_VERSION); // Version
|
||||
data.extend_from_slice(&nonce.as_slice()); // Nonce
|
||||
data.extend_from_slice(&ciphered_data); // Ciphertext
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
pub(crate) fn decrypt_data(cipher_data: &[u8], key: &Key<XChaCha20Poly1305>) -> Result<Vec<u8>> {
|
||||
// Yaak Tag + ID + Version + Nonce + ... ciphertext ...
|
||||
let (tag, rest) =
|
||||
cipher_data.split_at_checked(ENCRYPTION_TAG.len()).ok_or(InvalidEncryptedData)?;
|
||||
if tag != ENCRYPTION_TAG.as_bytes() {
|
||||
return Err(InvalidEncryptedData);
|
||||
}
|
||||
|
||||
let (version, rest) = rest.split_at_checked(1).ok_or(InvalidEncryptedData)?;
|
||||
if version[0] != ENCRYPTION_VERSION {
|
||||
return Err(InvalidEncryptedData);
|
||||
}
|
||||
|
||||
let nonce_bytes = <XChaCha20Poly1305 as AeadCore>::NonceSize::to_usize();
|
||||
let (nonce, ciphered_data) = rest.split_at_checked(nonce_bytes).ok_or(InvalidEncryptedData)?;
|
||||
|
||||
let cipher = XChaCha20Poly1305::new(&key);
|
||||
cipher.decrypt(nonce.into(), ciphered_data).map_err(|_e| DecryptionError)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::encryption::{decrypt_data, encrypt_data};
|
||||
use crate::error::Error::InvalidEncryptedData;
|
||||
use crate::error::Result;
|
||||
use chacha20poly1305::aead::OsRng;
|
||||
use chacha20poly1305::{KeyInit, XChaCha20Poly1305};
|
||||
|
||||
#[test]
|
||||
fn test_encrypt_decrypt() -> Result<()> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let encrypted = encrypt_data("hello world".as_bytes(), &key)?;
|
||||
let decrypted = decrypt_data(encrypted.as_slice(), &key)?;
|
||||
assert_eq!(String::from_utf8(decrypted).unwrap(), "hello world");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decrypt_empty() -> Result<()> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let encrypted = encrypt_data(&[], &key)?;
|
||||
assert_eq!(encrypted.len(), 48);
|
||||
let decrypted = decrypt_data(encrypted.as_slice(), &key)?;
|
||||
assert_eq!(String::from_utf8(decrypted).unwrap(), "");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decrypt_bad_version() -> Result<()> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let mut encrypted = encrypt_data("hello world".as_bytes(), &key)?;
|
||||
encrypted[7] = 0;
|
||||
let decrypted = decrypt_data(encrypted.as_slice(), &key);
|
||||
assert!(matches!(decrypted, Err(InvalidEncryptedData)));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decrypt_bad_tag() -> Result<()> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let mut encrypted = encrypt_data("hello world".as_bytes(), &key)?;
|
||||
encrypted[0] = 2;
|
||||
let decrypted = decrypt_data(encrypted.as_slice(), &key);
|
||||
assert!(matches!(decrypted, Err(InvalidEncryptedData)));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decrypt_unencrypted_data() -> Result<()> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let decrypted = decrypt_data("123".as_bytes(), &key);
|
||||
assert!(matches!(decrypted, Err(InvalidEncryptedData)));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
50
src-tauri/yaak-crypto/src/error.rs
Normal file
50
src-tauri/yaak-crypto/src/error.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
use serde::{Serialize, Serializer};
|
||||
use std::io;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
DbError(#[from] yaak_models::error::Error),
|
||||
|
||||
#[error("Keyring error: {0}")]
|
||||
KeyringError(#[from] keyring::Error),
|
||||
|
||||
#[error("Missing workspace encryption key")]
|
||||
MissingWorkspaceKey,
|
||||
|
||||
#[error("Incorrect workspace key")]
|
||||
IncorrectWorkspaceKey,
|
||||
|
||||
#[error("Failed to decrypt workspace key: {0}")]
|
||||
WorkspaceKeyDecryptionError(String),
|
||||
|
||||
#[error("Crypto IO error: {0}")]
|
||||
IoError(#[from] io::Error),
|
||||
|
||||
#[error("Failed to encrypt data")]
|
||||
EncryptionError,
|
||||
|
||||
#[error("Failed to decrypt data")]
|
||||
DecryptionError,
|
||||
|
||||
#[error("Invalid encrypted data")]
|
||||
InvalidEncryptedData,
|
||||
|
||||
#[error("Invalid key provided")]
|
||||
InvalidHumanKey,
|
||||
|
||||
#[error("Encryption error: {0}")]
|
||||
GenericError(String),
|
||||
}
|
||||
|
||||
impl Serialize for Error {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.to_string().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
27
src-tauri/yaak-crypto/src/lib.rs
Normal file
27
src-tauri/yaak-crypto/src/lib.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
extern crate core;
|
||||
|
||||
use crate::commands::*;
|
||||
use crate::manager::EncryptionManager;
|
||||
use tauri::plugin::{Builder, TauriPlugin};
|
||||
use tauri::{generate_handler, Manager, Runtime};
|
||||
|
||||
mod commands;
|
||||
pub mod encryption;
|
||||
pub mod error;
|
||||
pub mod manager;
|
||||
mod master_key;
|
||||
mod workspace_key;
|
||||
|
||||
pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
Builder::new("yaak-crypto")
|
||||
.invoke_handler(generate_handler![
|
||||
enable_encryption,
|
||||
reveal_workspace_key,
|
||||
set_workspace_key
|
||||
])
|
||||
.setup(|app, _api| {
|
||||
app.manage(EncryptionManager::new(app.app_handle()));
|
||||
Ok(())
|
||||
})
|
||||
.build()
|
||||
}
|
||||
176
src-tauri/yaak-crypto/src/manager.rs
Normal file
176
src-tauri/yaak-crypto/src/manager.rs
Normal file
@@ -0,0 +1,176 @@
|
||||
use crate::error::Error::{
|
||||
GenericError, IncorrectWorkspaceKey, MissingWorkspaceKey, WorkspaceKeyDecryptionError,
|
||||
};
|
||||
use crate::error::{Error, Result};
|
||||
use crate::master_key::MasterKey;
|
||||
use crate::workspace_key::WorkspaceKey;
|
||||
use base64::prelude::BASE64_STANDARD;
|
||||
use base64::Engine;
|
||||
use log::{info, warn};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tauri::{AppHandle, Manager, Runtime, State};
|
||||
use yaak_models::models::{EncryptedKey, Workspace, WorkspaceMeta};
|
||||
use yaak_models::query_manager::{QueryManager, QueryManagerExt};
|
||||
use yaak_models::util::{generate_id_of_length, UpdateSource};
|
||||
|
||||
const KEY_USER: &str = "encryption-key";
|
||||
|
||||
pub trait EncryptionManagerExt<'a, R> {
|
||||
fn crypto(&'a self) -> State<'a, EncryptionManager>;
|
||||
}
|
||||
|
||||
impl<'a, R: Runtime, M: Manager<R>> EncryptionManagerExt<'a, R> for M {
|
||||
fn crypto(&'a self) -> State<'a, EncryptionManager> {
|
||||
self.state::<EncryptionManager>()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EncryptionManager {
|
||||
cached_master_key: Arc<Mutex<Option<MasterKey>>>,
|
||||
cached_workspace_keys: Arc<Mutex<HashMap<String, WorkspaceKey>>>,
|
||||
query_manager: QueryManager,
|
||||
app_id: String,
|
||||
}
|
||||
|
||||
impl EncryptionManager {
|
||||
pub fn new<R: Runtime>(app_handle: &AppHandle<R>) -> Self {
|
||||
Self {
|
||||
cached_master_key: Default::default(),
|
||||
cached_workspace_keys: Default::default(),
|
||||
query_manager: app_handle.db_manager().inner().clone(),
|
||||
app_id: app_handle.config().identifier.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encrypt(&self, workspace_id: &str, data: &[u8]) -> Result<Vec<u8>> {
|
||||
let workspace_secret = self.get_workspace_key(workspace_id)?;
|
||||
workspace_secret.encrypt(data)
|
||||
}
|
||||
|
||||
pub fn decrypt(&self, workspace_id: &str, data: &[u8]) -> Result<Vec<u8>> {
|
||||
let workspace_secret = self.get_workspace_key(workspace_id)?;
|
||||
workspace_secret.decrypt(data)
|
||||
}
|
||||
|
||||
pub fn reveal_workspace_key(&self, workspace_id: &str) -> Result<String> {
|
||||
let key = self.get_workspace_key(workspace_id)?;
|
||||
key.to_human()
|
||||
}
|
||||
|
||||
pub fn set_human_key(&self, workspace_id: &str, human_key: &str) -> Result<WorkspaceMeta> {
|
||||
let wkey = WorkspaceKey::from_human(human_key)?;
|
||||
|
||||
let workspace = self.query_manager.connect().get_workspace(workspace_id)?;
|
||||
let encryption_key_challenge = match workspace.encryption_key_challenge {
|
||||
None => return self.set_workspace_key(workspace_id, &wkey),
|
||||
Some(c) => c,
|
||||
};
|
||||
|
||||
let encryption_key_challenge = match BASE64_STANDARD.decode(encryption_key_challenge) {
|
||||
Ok(c) => c,
|
||||
Err(_) => return Err(GenericError("Failed to decode workspace challenge".to_string())),
|
||||
};
|
||||
|
||||
if let Err(_) = wkey.decrypt(encryption_key_challenge.as_slice()) {
|
||||
return Err(IncorrectWorkspaceKey);
|
||||
};
|
||||
|
||||
self.set_workspace_key(workspace_id, &wkey)
|
||||
}
|
||||
|
||||
pub(crate) fn set_workspace_key(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
wkey: &WorkspaceKey,
|
||||
) -> Result<WorkspaceMeta> {
|
||||
info!("Created workspace key for {workspace_id}");
|
||||
|
||||
let encrypted_key = BASE64_STANDARD.encode(self.get_master_key()?.encrypt(wkey.raw_key())?);
|
||||
let encrypted_key = EncryptedKey { encrypted_key };
|
||||
let encryption_key_challenge = wkey.encrypt(generate_id_of_length(50).as_bytes())?;
|
||||
let encryption_key_challenge = Some(BASE64_STANDARD.encode(encryption_key_challenge));
|
||||
|
||||
let workspace_meta = self.query_manager.with_tx::<WorkspaceMeta, Error>(|tx| {
|
||||
let workspace = tx.get_workspace(workspace_id)?;
|
||||
let workspace_meta = tx.get_or_create_workspace_meta(workspace_id)?;
|
||||
tx.upsert_workspace(
|
||||
&Workspace {
|
||||
encryption_key_challenge,
|
||||
..workspace
|
||||
},
|
||||
&UpdateSource::Background,
|
||||
)?;
|
||||
|
||||
Ok(tx.upsert_workspace_meta(
|
||||
&WorkspaceMeta {
|
||||
encryption_key: Some(encrypted_key.clone()),
|
||||
..workspace_meta
|
||||
},
|
||||
&UpdateSource::Background,
|
||||
)?)
|
||||
})?;
|
||||
|
||||
let mut cache = self.cached_workspace_keys.lock().unwrap();
|
||||
cache.insert(workspace_id.to_string(), wkey.clone());
|
||||
|
||||
Ok(workspace_meta)
|
||||
}
|
||||
|
||||
pub(crate) fn ensure_workspace_key(&self, workspace_id: &str) -> Result<WorkspaceMeta> {
|
||||
let workspace_meta =
|
||||
self.query_manager.connect().get_or_create_workspace_meta(workspace_id)?;
|
||||
|
||||
// Already exists
|
||||
if let Some(_) = workspace_meta.encryption_key {
|
||||
warn!("Tried to create workspace key when one already exists for {workspace_id}");
|
||||
return Ok(workspace_meta);
|
||||
}
|
||||
|
||||
let wkey = WorkspaceKey::create()?;
|
||||
self.set_workspace_key(workspace_id, &wkey)
|
||||
}
|
||||
|
||||
fn get_workspace_key(&self, workspace_id: &str) -> Result<WorkspaceKey> {
|
||||
{
|
||||
let cache = self.cached_workspace_keys.lock().unwrap();
|
||||
if let Some(k) = cache.get(workspace_id) {
|
||||
return Ok(k.clone());
|
||||
}
|
||||
};
|
||||
|
||||
let db = self.query_manager.connect();
|
||||
let workspace_meta = db.get_or_create_workspace_meta(workspace_id)?;
|
||||
|
||||
let key = match workspace_meta.encryption_key {
|
||||
None => return Err(MissingWorkspaceKey),
|
||||
Some(k) => k,
|
||||
};
|
||||
|
||||
let mkey = self.get_master_key()?;
|
||||
let decoded_key = BASE64_STANDARD
|
||||
.decode(key.encrypted_key)
|
||||
.map_err(|e| WorkspaceKeyDecryptionError(e.to_string()))?;
|
||||
let raw_key = mkey
|
||||
.decrypt(decoded_key.as_slice())
|
||||
.map_err(|e| WorkspaceKeyDecryptionError(e.to_string()))?;
|
||||
info!("Got existing workspace key for {workspace_id}");
|
||||
let wkey = WorkspaceKey::from_raw_key(raw_key.as_slice());
|
||||
|
||||
Ok(wkey)
|
||||
}
|
||||
|
||||
fn get_master_key(&self) -> Result<MasterKey> {
|
||||
// NOTE: This locks the key for the entire function which seems wrong, but this prevents
|
||||
// concurrent access from prompting the user for a keychain password multiple times.
|
||||
let mut master_secret = self.cached_master_key.lock().unwrap();
|
||||
if let Some(k) = master_secret.as_ref() {
|
||||
return Ok(k.to_owned());
|
||||
}
|
||||
|
||||
let mkey = MasterKey::get_or_create(&self.app_id, KEY_USER)?;
|
||||
*master_secret = Some(mkey.clone());
|
||||
Ok(mkey)
|
||||
}
|
||||
}
|
||||
79
src-tauri/yaak-crypto/src/master_key.rs
Normal file
79
src-tauri/yaak-crypto/src/master_key.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use crate::encryption::{decrypt_data, encrypt_data};
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use base32::Alphabet;
|
||||
use chacha20poly1305::aead::{Key, KeyInit, OsRng};
|
||||
use chacha20poly1305::XChaCha20Poly1305;
|
||||
use keyring::{Entry, Error};
|
||||
use log::info;
|
||||
|
||||
const HUMAN_PREFIX: &str = "YKM_";
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct MasterKey {
|
||||
key: Key<XChaCha20Poly1305>,
|
||||
}
|
||||
|
||||
impl MasterKey {
|
||||
pub(crate) fn get_or_create(app_id: &str, user: &str) -> Result<Self> {
|
||||
let id = format!("{app_id}.EncryptionKey");
|
||||
let entry = Entry::new(&id, user)?;
|
||||
|
||||
let key = match entry.get_password() {
|
||||
Ok(encoded) => {
|
||||
let without_prefix = encoded.strip_prefix(HUMAN_PREFIX).unwrap_or(&encoded);
|
||||
let key_bytes = base32::decode(Alphabet::Crockford {}, &without_prefix)
|
||||
.ok_or(GenericError("Failed to decode master key".to_string()))?;
|
||||
Key::<XChaCha20Poly1305>::clone_from_slice(key_bytes.as_slice())
|
||||
}
|
||||
Err(Error::NoEntry) => {
|
||||
info!("Creating new master key");
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
let encoded = base32::encode(Alphabet::Crockford {}, key.as_slice());
|
||||
let with_prefix = format!("{HUMAN_PREFIX}{encoded}");
|
||||
entry.set_password(&with_prefix)?;
|
||||
key
|
||||
}
|
||||
Err(e) => return Err(GenericError(e.to_string())),
|
||||
};
|
||||
|
||||
Ok(Self { key })
|
||||
}
|
||||
|
||||
pub(crate) fn encrypt(&self, data: &[u8]) -> Result<Vec<u8>> {
|
||||
encrypt_data(data, &self.key)
|
||||
}
|
||||
|
||||
pub(crate) fn decrypt(&self, data: &[u8]) -> Result<Vec<u8>> {
|
||||
decrypt_data(data, &self.key)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn test_key() -> Self {
|
||||
let key: Key<XChaCha20Poly1305> = Key::<XChaCha20Poly1305>::clone_from_slice(
|
||||
"00000000000000000000000000000000".as_bytes(),
|
||||
);
|
||||
Self { key }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::error::Result;
|
||||
use crate::master_key::MasterKey;
|
||||
|
||||
#[test]
|
||||
fn test_master_key() -> Result<()> {
|
||||
// Test out the master key
|
||||
let mkey = MasterKey::test_key();
|
||||
let encrypted = mkey.encrypt("hello".as_bytes())?;
|
||||
let decrypted = mkey.decrypt(encrypted.as_slice()).unwrap();
|
||||
assert_eq!(decrypted, "hello".as_bytes().to_vec());
|
||||
|
||||
let mkey = MasterKey::test_key();
|
||||
let decrypted = mkey.decrypt(encrypted.as_slice()).unwrap();
|
||||
assert_eq!(decrypted, "hello".as_bytes().to_vec());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
116
src-tauri/yaak-crypto/src/workspace_key.rs
Normal file
116
src-tauri/yaak-crypto/src/workspace_key.rs
Normal file
@@ -0,0 +1,116 @@
|
||||
use crate::encryption::{decrypt_data, encrypt_data};
|
||||
use crate::error::Error::InvalidHumanKey;
|
||||
use crate::error::Result;
|
||||
use base32::Alphabet;
|
||||
use chacha20poly1305::aead::{Key, KeyInit, OsRng};
|
||||
use chacha20poly1305::{KeySizeUser, XChaCha20Poly1305};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct WorkspaceKey {
|
||||
key: Key<XChaCha20Poly1305>,
|
||||
}
|
||||
|
||||
const HUMAN_PREFIX: &str = "YK";
|
||||
|
||||
impl WorkspaceKey {
|
||||
pub(crate) fn to_human(&self) -> Result<String> {
|
||||
let encoded = base32::encode(Alphabet::Crockford {}, self.key.as_slice());
|
||||
let with_prefix = format!("{HUMAN_PREFIX}{encoded}");
|
||||
let with_separators = with_prefix
|
||||
.chars()
|
||||
.collect::<Vec<_>>()
|
||||
.chunks(6)
|
||||
.map(|chunk| chunk.iter().collect::<String>())
|
||||
.collect::<Vec<_>>()
|
||||
.join("-");
|
||||
Ok(with_separators)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn from_human(human_key: &str) -> Result<Self> {
|
||||
let without_prefix = human_key.strip_prefix(HUMAN_PREFIX).unwrap_or(human_key);
|
||||
let without_separators = without_prefix.replace("-", "");
|
||||
let key =
|
||||
base32::decode(Alphabet::Crockford {}, &without_separators).ok_or(InvalidHumanKey)?;
|
||||
if key.len() != XChaCha20Poly1305::key_size() {
|
||||
return Err(InvalidHumanKey);
|
||||
}
|
||||
Ok(Self::from_raw_key(key.as_slice()))
|
||||
}
|
||||
|
||||
pub(crate) fn from_raw_key(key: &[u8]) -> Self {
|
||||
Self {
|
||||
key: Key::<XChaCha20Poly1305>::clone_from_slice(key),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn raw_key(&self) -> &[u8] {
|
||||
self.key.as_slice()
|
||||
}
|
||||
|
||||
pub(crate) fn create() -> Result<Self> {
|
||||
let key = XChaCha20Poly1305::generate_key(OsRng);
|
||||
Ok(Self::from_raw_key(key.as_slice()))
|
||||
}
|
||||
|
||||
pub(crate) fn encrypt(&self, data: &[u8]) -> Result<Vec<u8>> {
|
||||
encrypt_data(data, &self.key)
|
||||
}
|
||||
|
||||
pub(crate) fn decrypt(&self, data: &[u8]) -> Result<Vec<u8>> {
|
||||
decrypt_data(data, &self.key)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn test_key() -> Self {
|
||||
Self::from_raw_key("f1a2d4b3c8e799af1456be3478a4c3f2".as_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::error::Error::InvalidHumanKey;
|
||||
use crate::error::Result;
|
||||
use crate::workspace_key::WorkspaceKey;
|
||||
|
||||
#[test]
|
||||
fn test_persisted_key() -> Result<()> {
|
||||
let key = WorkspaceKey::test_key();
|
||||
let encrypted = key.encrypt("hello".as_bytes())?;
|
||||
assert_eq!(key.decrypt(encrypted.as_slice())?, "hello".as_bytes());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_human_format() -> Result<()> {
|
||||
let key = WorkspaceKey::test_key();
|
||||
|
||||
let encrypted = key.encrypt("hello".as_bytes())?;
|
||||
assert_eq!(key.decrypt(encrypted.as_slice())?, "hello".as_bytes());
|
||||
|
||||
let human = key.to_human()?;
|
||||
assert_eq!(human, "YKCRRP-2CK46H-H36RSR-CMVKJE-B1CRRK-8D9PC9-JK6D1Q-71GK8R-SKCRS0");
|
||||
assert_eq!(
|
||||
WorkspaceKey::from_human(&human)?.decrypt(encrypted.as_slice())?,
|
||||
"hello".as_bytes()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_human_invalid() -> Result<()> {
|
||||
assert!(matches!(
|
||||
WorkspaceKey::from_human(
|
||||
"YKCRRP-2CK46H-H36RSR-CMVKJE-B1CRRK-8D9PC9-JK6D1Q-71GK8R-SKCRS0-H3X38D",
|
||||
),
|
||||
Err(InvalidHumanKey)
|
||||
));
|
||||
|
||||
assert!(matches!(WorkspaceKey::from_human("bad-key",), Err(InvalidHumanKey)));
|
||||
assert!(matches!(WorkspaceKey::from_human("",), Err(InvalidHumanKey)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -2,15 +2,15 @@
|
||||
name = "yaak-git"
|
||||
links = "yaak-git"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
git2 = { version = "0.20.0" , features = ["vendored-libgit2", "vendored-openssl"]}
|
||||
git2 = { version = "0.20.0", features = ["vendored-libgit2", "vendored-openssl"] }
|
||||
log = "0.4.22"
|
||||
serde = { version = "1.0.215", features = ["derive"] }
|
||||
serde_json = "1.0.132"
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
serde_yaml = "0.9.34"
|
||||
tauri = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
@@ -19,4 +19,4 @@ yaak-models = { workspace = true }
|
||||
yaak-sync = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
tauri-plugin = { version = "2.0.3", features = ["build"] }
|
||||
tauri-plugin = { workspace = true, features = ["build"] }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, environmentId: string | null, createdAt: string, updatedAt: string, name: string, variables: Array<EnvironmentVariable>, };
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, base: boolean, variables: Array<EnvironmentVariable>, };
|
||||
|
||||
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
@@ -20,4 +20,4 @@ export type SyncModel = { "type": "workspace" } & Workspace | { "type": "environ
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, name: string, description: string, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, name: string, description: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
|
||||
@@ -52,7 +52,7 @@ export function useGit(dir: string) {
|
||||
onSuccess,
|
||||
}),
|
||||
commitAndPush: useMutation<PushResult, string, { message: string }>({
|
||||
mutationKey: ['git', 'commitpush', dir],
|
||||
mutationKey: ['git', 'commit_push', dir],
|
||||
mutationFn: async (args) => {
|
||||
await invoke('plugin:yaak-git|commit', { dir, ...args });
|
||||
return invoke('plugin:yaak-git|push', { dir });
|
||||
@@ -79,10 +79,18 @@ export function useGit(dir: string) {
|
||||
mutationFn: (args) => invoke('plugin:yaak-git|unstage', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
init: useGitInit(),
|
||||
},
|
||||
] as const;
|
||||
}
|
||||
|
||||
export async function gitInit(dir: string) {
|
||||
await invoke('plugin:yaak-git|initialize', { dir });
|
||||
export function useGitInit() {
|
||||
const queryClient = useQueryClient();
|
||||
const onSuccess = () => queryClient.invalidateQueries({ queryKey: ['git'] });
|
||||
|
||||
return useMutation<void, string, { dir: string }>({
|
||||
mutationKey: ['git', 'init'],
|
||||
mutationFn: (args) => invoke('plugin:yaak-git|initialize', { ...args }),
|
||||
onSuccess,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-add"
|
||||
description = "Enables the add command without any pre-configured scope."
|
||||
commands.allow = ["add"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-add"
|
||||
description = "Denies the add command without any pre-configured scope."
|
||||
commands.deny = ["add"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-branch"
|
||||
description = "Enables the branch command without any pre-configured scope."
|
||||
commands.allow = ["branch"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-branch"
|
||||
description = "Denies the branch command without any pre-configured scope."
|
||||
commands.deny = ["branch"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-checkout"
|
||||
description = "Enables the checkout command without any pre-configured scope."
|
||||
commands.allow = ["checkout"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-checkout"
|
||||
description = "Denies the checkout command without any pre-configured scope."
|
||||
commands.deny = ["checkout"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-checkout-remote"
|
||||
description = "Enables the checkout_remote command without any pre-configured scope."
|
||||
commands.allow = ["checkout_remote"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-checkout-remote"
|
||||
description = "Denies the checkout_remote command without any pre-configured scope."
|
||||
commands.deny = ["checkout_remote"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-commit"
|
||||
description = "Enables the commit command without any pre-configured scope."
|
||||
commands.allow = ["commit"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-commit"
|
||||
description = "Denies the commit command without any pre-configured scope."
|
||||
commands.deny = ["commit"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-delete-branch"
|
||||
description = "Enables the delete_branch command without any pre-configured scope."
|
||||
commands.allow = ["delete_branch"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-delete-branch"
|
||||
description = "Denies the delete_branch command without any pre-configured scope."
|
||||
commands.deny = ["delete_branch"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-fetch-all"
|
||||
description = "Enables the fetch_all command without any pre-configured scope."
|
||||
commands.allow = ["fetch_all"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-fetch-all"
|
||||
description = "Denies the fetch_all command without any pre-configured scope."
|
||||
commands.deny = ["fetch_all"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-initialize"
|
||||
description = "Enables the initialize command without any pre-configured scope."
|
||||
commands.allow = ["initialize"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-initialize"
|
||||
description = "Denies the initialize command without any pre-configured scope."
|
||||
commands.deny = ["initialize"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-log"
|
||||
description = "Enables the log command without any pre-configured scope."
|
||||
commands.allow = ["log"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-log"
|
||||
description = "Denies the log command without any pre-configured scope."
|
||||
commands.deny = ["log"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-merge-branch"
|
||||
description = "Enables the merge_branch command without any pre-configured scope."
|
||||
commands.allow = ["merge_branch"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-merge-branch"
|
||||
description = "Denies the merge_branch command without any pre-configured scope."
|
||||
commands.deny = ["merge_branch"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-pull"
|
||||
description = "Enables the pull command without any pre-configured scope."
|
||||
commands.allow = ["pull"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-pull"
|
||||
description = "Denies the pull command without any pre-configured scope."
|
||||
commands.deny = ["pull"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-push"
|
||||
description = "Enables the push command without any pre-configured scope."
|
||||
commands.allow = ["push"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-push"
|
||||
description = "Denies the push command without any pre-configured scope."
|
||||
commands.deny = ["push"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-status"
|
||||
description = "Enables the status command without any pre-configured scope."
|
||||
commands.allow = ["status"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-status"
|
||||
description = "Denies the status command without any pre-configured scope."
|
||||
commands.deny = ["status"]
|
||||
@@ -1,13 +0,0 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-unstage"
|
||||
description = "Enables the unstage command without any pre-configured scope."
|
||||
commands.allow = ["unstage"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-unstage"
|
||||
description = "Denies the unstage command without any pre-configured scope."
|
||||
commands.deny = ["unstage"]
|
||||
@@ -1,391 +0,0 @@
|
||||
## Default Permission
|
||||
|
||||
Default permissions for the plugin
|
||||
|
||||
- `allow-add`
|
||||
- `allow-branch`
|
||||
- `allow-checkout`
|
||||
- `allow-commit`
|
||||
- `allow-delete-branch`
|
||||
- `allow-fetch-all`
|
||||
- `allow-initialize`
|
||||
- `allow-log`
|
||||
- `allow-merge-branch`
|
||||
- `allow-pull`
|
||||
- `allow-push`
|
||||
- `allow-status`
|
||||
- `allow-unstage`
|
||||
|
||||
## Permission Table
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>Identifier</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-add`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the add command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-add`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the add command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-checkout`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the checkout command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-checkout`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the checkout command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-checkout-remote`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the checkout_remote command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-checkout-remote`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the checkout_remote command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-commit`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the commit command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-commit`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the commit command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-delete-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the delete_branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-delete-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the delete_branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-fetch-all`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the fetch_all command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-fetch-all`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the fetch_all command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-initialize`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the initialize command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-initialize`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the initialize command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-log`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the log command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-log`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the log command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-merge-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the merge_branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-merge-branch`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the merge_branch command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-pull`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the pull command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-pull`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the pull command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-push`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the push command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-push`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the push command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-status`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the status command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-status`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the status command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:allow-unstage`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the unstage command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`yaak-git:deny-unstage`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the unstage command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
@@ -1,445 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "PermissionFile",
|
||||
"description": "Permission file that can define a default permission, a set of permissions or a list of inlined permissions.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"default": {
|
||||
"description": "The default permission set for the plugin",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/DefaultPermission"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"set": {
|
||||
"description": "A list of permissions sets defined",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/PermissionSet"
|
||||
}
|
||||
},
|
||||
"permission": {
|
||||
"description": "A list of inlined permissions",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Permission"
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"DefaultPermission": {
|
||||
"description": "The default permission set of the plugin.\n\nWorks similarly to a permission with the \"default\" identifier.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"permissions"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"description": "The version of the permission.",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
],
|
||||
"format": "uint64",
|
||||
"minimum": 1.0
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does. Tauri convention is to use <h4> headings in markdown content for Tauri documentation generation purposes.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissions": {
|
||||
"description": "All permissions this set contains.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"PermissionSet": {
|
||||
"description": "A set of direct permissions grouped together under a new name.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"description",
|
||||
"identifier",
|
||||
"permissions"
|
||||
],
|
||||
"properties": {
|
||||
"identifier": {
|
||||
"description": "A unique identifier for the permission.",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does.",
|
||||
"type": "string"
|
||||
},
|
||||
"permissions": {
|
||||
"description": "All permissions this set contains.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/PermissionKind"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Permission": {
|
||||
"description": "Descriptions of explicit privileges of commands.\n\nIt can enable commands to be accessible in the frontend of the application.\n\nIf the scope is defined it can be used to fine grain control the access of individual or multiple commands.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"identifier"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"description": "The version of the permission.",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
],
|
||||
"format": "uint64",
|
||||
"minimum": 1.0
|
||||
},
|
||||
"identifier": {
|
||||
"description": "A unique identifier for the permission.",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does. Tauri internal convention is to use <h4> headings in markdown content for Tauri documentation generation purposes.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"commands": {
|
||||
"description": "Allowed or denied commands when using this permission.",
|
||||
"default": {
|
||||
"allow": [],
|
||||
"deny": []
|
||||
},
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Commands"
|
||||
}
|
||||
]
|
||||
},
|
||||
"scope": {
|
||||
"description": "Allowed or denied scoped when using this permission.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Scopes"
|
||||
}
|
||||
]
|
||||
},
|
||||
"platforms": {
|
||||
"description": "Target platforms this permission applies. By default all platforms are affected by this permission.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Target"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Commands": {
|
||||
"description": "Allowed and denied commands inside a permission.\n\nIf two commands clash inside of `allow` and `deny`, it should be denied by default.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allow": {
|
||||
"description": "Allowed command.",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"deny": {
|
||||
"description": "Denied command, which takes priority.",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Scopes": {
|
||||
"description": "An argument for fine grained behavior control of Tauri commands.\n\nIt can be of any serde serializable type and is used to allow or prevent certain actions inside a Tauri command. The configured scope is passed to the command and will be enforced by the command implementation.\n\n## Example\n\n```json { \"allow\": [{ \"path\": \"$HOME/**\" }], \"deny\": [{ \"path\": \"$HOME/secret.txt\" }] } ```",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allow": {
|
||||
"description": "Data that defines what is allowed by the scope.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
},
|
||||
"deny": {
|
||||
"description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Value": {
|
||||
"description": "All supported ACL values.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "Represents a null JSON value.",
|
||||
"type": "null"
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`bool`].",
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"description": "Represents a valid ACL [`Number`].",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Number"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`String`].",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Represents a list of other [`Value`]s.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Represents a map of [`String`] keys to [`Value`]s.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"Number": {
|
||||
"description": "A valid ACL number.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "Represents an [`i64`].",
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`f64`].",
|
||||
"type": "number",
|
||||
"format": "double"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Target": {
|
||||
"description": "Platform target.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "MacOS.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"macOS"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Windows.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"windows"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Linux.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Android.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"android"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "iOS.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"iOS"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"PermissionKind": {
|
||||
"type": "string",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Enables the add command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-add"
|
||||
},
|
||||
{
|
||||
"description": "Denies the add command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-add"
|
||||
},
|
||||
{
|
||||
"description": "Enables the branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-branch"
|
||||
},
|
||||
{
|
||||
"description": "Denies the branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-branch"
|
||||
},
|
||||
{
|
||||
"description": "Enables the checkout command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-checkout"
|
||||
},
|
||||
{
|
||||
"description": "Denies the checkout command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-checkout"
|
||||
},
|
||||
{
|
||||
"description": "Enables the checkout_remote command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-checkout-remote"
|
||||
},
|
||||
{
|
||||
"description": "Denies the checkout_remote command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-checkout-remote"
|
||||
},
|
||||
{
|
||||
"description": "Enables the commit command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-commit"
|
||||
},
|
||||
{
|
||||
"description": "Denies the commit command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-commit"
|
||||
},
|
||||
{
|
||||
"description": "Enables the delete_branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-delete-branch"
|
||||
},
|
||||
{
|
||||
"description": "Denies the delete_branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-delete-branch"
|
||||
},
|
||||
{
|
||||
"description": "Enables the fetch_all command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-fetch-all"
|
||||
},
|
||||
{
|
||||
"description": "Denies the fetch_all command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-fetch-all"
|
||||
},
|
||||
{
|
||||
"description": "Enables the initialize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-initialize"
|
||||
},
|
||||
{
|
||||
"description": "Denies the initialize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-initialize"
|
||||
},
|
||||
{
|
||||
"description": "Enables the log command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-log"
|
||||
},
|
||||
{
|
||||
"description": "Denies the log command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-log"
|
||||
},
|
||||
{
|
||||
"description": "Enables the merge_branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-merge-branch"
|
||||
},
|
||||
{
|
||||
"description": "Denies the merge_branch command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-merge-branch"
|
||||
},
|
||||
{
|
||||
"description": "Enables the pull command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-pull"
|
||||
},
|
||||
{
|
||||
"description": "Denies the pull command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-pull"
|
||||
},
|
||||
{
|
||||
"description": "Enables the push command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-push"
|
||||
},
|
||||
{
|
||||
"description": "Denies the push command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-push"
|
||||
},
|
||||
{
|
||||
"description": "Enables the status command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-status"
|
||||
},
|
||||
{
|
||||
"description": "Denies the status command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-status"
|
||||
},
|
||||
{
|
||||
"description": "Enables the unstage command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-unstage"
|
||||
},
|
||||
{
|
||||
"description": "Denies the unstage command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-unstage"
|
||||
},
|
||||
{
|
||||
"description": "Default permissions for the plugin",
|
||||
"type": "string",
|
||||
"const": "default"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -117,9 +117,9 @@ pub fn git_commit(dir: &Path, message: &str) -> Result<()> {
|
||||
let tree = repo.find_tree(tree_oid)?;
|
||||
|
||||
// Make the signature
|
||||
let config = git2::Config::open_default()?.snapshot()?;
|
||||
let name = config.get_str("user.name").unwrap_or("Change Me");
|
||||
let email = config.get_str("user.email").unwrap_or("change_me@example.com");
|
||||
let config = repo.config()?.snapshot()?;
|
||||
let name = config.get_str("user.name").unwrap_or("Unknown");
|
||||
let email = config.get_str("user.email")?;
|
||||
let sig = git2::Signature::now(name, email)?;
|
||||
|
||||
// Get the current HEAD commit (if it exists)
|
||||
|
||||
@@ -8,7 +8,7 @@ use tauri::{
|
||||
mod branch;
|
||||
mod callbacks;
|
||||
mod commands;
|
||||
mod error;
|
||||
pub mod error;
|
||||
mod fetch;
|
||||
mod git;
|
||||
mod merge;
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
[package]
|
||||
name = "yaak-grpc"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.79"
|
||||
anyhow = "1.0.97"
|
||||
async-recursion = "1.1.1"
|
||||
dunce = "1.0.4"
|
||||
hyper = "1.5.2"
|
||||
@@ -18,11 +18,11 @@ md5 = "0.7.0"
|
||||
prost = "0.13.4"
|
||||
prost-reflect = { version = "0.14.4", default-features = false, features = ["serde", "derive"] }
|
||||
prost-types = "0.13.4"
|
||||
serde = { version = "1.0.196", features = ["derive"] }
|
||||
serde_json = "1.0.113"
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tauri = { workspace = true }
|
||||
tauri-plugin-shell = { workspace = true }
|
||||
tokio = { version = "1.0", features = ["macros", "rt-multi-thread", "fs"] }
|
||||
tokio = { workspace = true, features = ["macros", "rt-multi-thread", "fs"] }
|
||||
tokio-stream = "0.1.14"
|
||||
tonic = { version = "0.12.3", default-features = false, features = ["transport"] }
|
||||
tonic-reflection = "0.12.3"
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
use crate::manager::decorate_req;
|
||||
use crate::transport::get_transport;
|
||||
use async_recursion::async_recursion;
|
||||
use hyper_rustls::HttpsConnector;
|
||||
use hyper_util::client::legacy::connect::HttpConnector;
|
||||
use hyper_util::client::legacy::Client;
|
||||
use hyper_util::client::legacy::connect::HttpConnector;
|
||||
use log::debug;
|
||||
use std::collections::BTreeMap;
|
||||
use tokio_stream::StreamExt;
|
||||
use tonic::Request;
|
||||
use tonic::body::BoxBody;
|
||||
use tonic::transport::Uri;
|
||||
use tonic::Request;
|
||||
use tonic_reflection::pb::v1::server_reflection_request::MessageRequest;
|
||||
use tonic_reflection::pb::v1::server_reflection_response::MessageResponse;
|
||||
use tonic_reflection::pb::v1::{
|
||||
@@ -44,6 +46,7 @@ impl AutoReflectionClient {
|
||||
pub async fn send_reflection_request(
|
||||
&mut self,
|
||||
message: MessageRequest,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) -> Result<MessageResponse, String> {
|
||||
let reflection_request = ServerReflectionRequest {
|
||||
host: "".into(), // Doesn't matter
|
||||
@@ -51,7 +54,9 @@ impl AutoReflectionClient {
|
||||
};
|
||||
|
||||
if self.use_v1alpha {
|
||||
let request = Request::new(tokio_stream::once(to_v1alpha_request(reflection_request)));
|
||||
let mut request = Request::new(tokio_stream::once(to_v1alpha_request(reflection_request)));
|
||||
decorate_req(metadata, &mut request).map_err(|e| e.to_string())?;
|
||||
|
||||
self.client_v1alpha
|
||||
.server_reflection_info(request)
|
||||
.await
|
||||
@@ -70,7 +75,9 @@ impl AutoReflectionClient {
|
||||
.ok_or("No reflection response".to_string())
|
||||
.map(|resp| to_v1_msg_response(resp))
|
||||
} else {
|
||||
let request = Request::new(tokio_stream::once(reflection_request));
|
||||
let mut request = Request::new(tokio_stream::once(reflection_request));
|
||||
decorate_req(metadata, &mut request).map_err(|e| e.to_string())?;
|
||||
|
||||
let resp = self.client_v1.server_reflection_info(request).await;
|
||||
match resp {
|
||||
Ok(r) => Ok(r),
|
||||
@@ -79,7 +86,7 @@ impl AutoReflectionClient {
|
||||
// If v1 fails, change to v1alpha and try again
|
||||
debug!("gRPC schema reflection falling back to v1alpha");
|
||||
self.use_v1alpha = true;
|
||||
return self.send_reflection_request(message).await;
|
||||
return self.send_reflection_request(message, metadata).await;
|
||||
}
|
||||
_ => Err(e),
|
||||
},
|
||||
|
||||
@@ -1,16 +1,256 @@
|
||||
use prost_reflect::{DescriptorPool, MessageDescriptor};
|
||||
use prost_types::field_descriptor_proto;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use prost_reflect::{DescriptorPool, FieldDescriptor, MessageDescriptor};
|
||||
use std::collections::{HashMap, HashSet, VecDeque};
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
pub fn message_to_json_schema(_: &DescriptorPool, root_msg: MessageDescriptor) -> JsonSchemaEntry {
|
||||
JsonSchemaGenerator::generate_json_schema(root_msg)
|
||||
}
|
||||
|
||||
struct JsonSchemaGenerator {
|
||||
msg_mapping: HashMap<String, JsonSchemaEntry>,
|
||||
}
|
||||
|
||||
impl JsonSchemaGenerator {
|
||||
pub fn new() -> Self {
|
||||
JsonSchemaGenerator {
|
||||
msg_mapping: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_json_schema(msg: MessageDescriptor) -> JsonSchemaEntry {
|
||||
let generator = JsonSchemaGenerator::new();
|
||||
generator.scan_root(msg)
|
||||
}
|
||||
|
||||
fn add_message(&mut self, msg: &MessageDescriptor) {
|
||||
let name = msg.full_name().to_string();
|
||||
if self.msg_mapping.contains_key(&name) {
|
||||
return;
|
||||
}
|
||||
self.msg_mapping.insert(name.clone(), JsonSchemaEntry::object());
|
||||
}
|
||||
|
||||
pub fn scan_root(mut self, root_msg: MessageDescriptor) -> JsonSchemaEntry {
|
||||
self.init_structure(root_msg.clone());
|
||||
self.fill_properties(root_msg.clone());
|
||||
|
||||
let mut root = self.msg_mapping.remove(root_msg.full_name()).unwrap();
|
||||
|
||||
if self.msg_mapping.len() > 0 {
|
||||
root.defs = Some(self.msg_mapping);
|
||||
}
|
||||
root
|
||||
}
|
||||
|
||||
fn fill_properties(&mut self, root_msg: MessageDescriptor) {
|
||||
let root_name = root_msg.full_name().to_string();
|
||||
|
||||
let mut visited = HashSet::new();
|
||||
let mut msg_queue = VecDeque::new();
|
||||
msg_queue.push_back(root_msg);
|
||||
|
||||
while !msg_queue.is_empty() {
|
||||
let msg = msg_queue.pop_front().unwrap();
|
||||
let msg_name = msg.full_name();
|
||||
if visited.contains(msg_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
visited.insert(msg_name.to_string());
|
||||
|
||||
let entry = self.msg_mapping.get_mut(msg_name).unwrap();
|
||||
|
||||
for field in msg.fields() {
|
||||
let field_name = field.name().to_string();
|
||||
|
||||
if matches!(field.cardinality(), prost_reflect::Cardinality::Required) {
|
||||
entry.add_required(field_name.clone());
|
||||
}
|
||||
|
||||
if let Some(oneof) = field.containing_oneof() {
|
||||
for oneof_field in oneof.fields() {
|
||||
if let Some(fm) = is_message_field(&oneof_field) {
|
||||
msg_queue.push_back(fm);
|
||||
}
|
||||
entry.add_property(
|
||||
oneof_field.name().to_string(),
|
||||
field_to_type_or_ref(&root_name, oneof_field),
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
let (field_type, nest_msg) = {
|
||||
if let Some(fm) = is_message_field(&field) {
|
||||
if field.is_list() {
|
||||
// repeated message type
|
||||
(
|
||||
JsonSchemaEntry::array(field_to_type_or_ref(&root_name, field)),
|
||||
Some(fm),
|
||||
)
|
||||
} else if field.is_map() {
|
||||
let value_field = fm.get_field_by_name("value").unwrap();
|
||||
|
||||
if let Some(fm) = is_message_field(&value_field) {
|
||||
(
|
||||
JsonSchemaEntry::map(field_to_type_or_ref(
|
||||
&root_name,
|
||||
value_field,
|
||||
)),
|
||||
Some(fm),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
JsonSchemaEntry::map(field_to_type_or_ref(
|
||||
&root_name,
|
||||
value_field,
|
||||
)),
|
||||
None,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
(field_to_type_or_ref(&root_name, field), Some(fm))
|
||||
}
|
||||
} else {
|
||||
if field.is_list() {
|
||||
// repeated scalar type
|
||||
(JsonSchemaEntry::array(field_to_type_or_ref(&root_name, field)), None)
|
||||
} else {
|
||||
(field_to_type_or_ref(&root_name, field), None)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(fm) = nest_msg {
|
||||
msg_queue.push_back(fm);
|
||||
}
|
||||
|
||||
entry.add_property(field_name, field_type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn init_structure(&mut self, root_msg: MessageDescriptor) {
|
||||
let mut visited = HashSet::new();
|
||||
let mut msg_queue = VecDeque::new();
|
||||
msg_queue.push_back(root_msg.clone());
|
||||
|
||||
// level traversal, to make sure all message type is defined before used
|
||||
while !msg_queue.is_empty() {
|
||||
let msg = msg_queue.pop_front().unwrap();
|
||||
let name = msg.full_name();
|
||||
if visited.contains(name) {
|
||||
continue;
|
||||
}
|
||||
visited.insert(name.to_string());
|
||||
self.add_message(&msg);
|
||||
|
||||
for child in msg.child_messages() {
|
||||
if child.is_map_entry() {
|
||||
// for field with map<key, value> type, there will be a child message type *Entry generated
|
||||
// just skip it
|
||||
continue;
|
||||
}
|
||||
|
||||
self.add_message(&child);
|
||||
msg_queue.push_back(child);
|
||||
}
|
||||
|
||||
for field in msg.fields() {
|
||||
if let Some(oneof) = field.containing_oneof() {
|
||||
for oneof_field in oneof.fields() {
|
||||
if let Some(fm) = is_message_field(&oneof_field) {
|
||||
self.add_message(&fm);
|
||||
msg_queue.push_back(fm);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if field.is_map() {
|
||||
// key is always scalar type, so no need to process
|
||||
// value can be any type, so need to unpack value type
|
||||
let map_field_msg = is_message_field(&field).unwrap();
|
||||
let map_value_field = map_field_msg.get_field_by_name("value").unwrap();
|
||||
if let Some(value_fm) = is_message_field(&map_value_field) {
|
||||
self.add_message(&value_fm);
|
||||
msg_queue.push_back(value_fm);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Some(fm) = is_message_field(&field) {
|
||||
self.add_message(&fm);
|
||||
msg_queue.push_back(fm);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn field_to_type_or_ref(root_name: &str, field: FieldDescriptor) -> JsonSchemaEntry {
|
||||
match field.kind() {
|
||||
prost_reflect::Kind::Bool => JsonSchemaEntry::boolean(),
|
||||
prost_reflect::Kind::Double => JsonSchemaEntry::number("double"),
|
||||
prost_reflect::Kind::Float => JsonSchemaEntry::number("float"),
|
||||
prost_reflect::Kind::Int32 => JsonSchemaEntry::number("int32"),
|
||||
prost_reflect::Kind::Int64 => JsonSchemaEntry::string_with_format("int64"),
|
||||
prost_reflect::Kind::Uint32 => JsonSchemaEntry::number("int64"),
|
||||
prost_reflect::Kind::Uint64 => JsonSchemaEntry::string_with_format("uint64"),
|
||||
prost_reflect::Kind::Sint32 => JsonSchemaEntry::number("sint32"),
|
||||
prost_reflect::Kind::Sint64 => JsonSchemaEntry::string_with_format("sint64"),
|
||||
prost_reflect::Kind::Fixed32 => JsonSchemaEntry::number("int64"),
|
||||
prost_reflect::Kind::Fixed64 => JsonSchemaEntry::string_with_format("fixed64"),
|
||||
prost_reflect::Kind::Sfixed32 => JsonSchemaEntry::number("sfixed32"),
|
||||
prost_reflect::Kind::Sfixed64 => JsonSchemaEntry::string_with_format("sfixed64"),
|
||||
prost_reflect::Kind::String => JsonSchemaEntry::string(),
|
||||
prost_reflect::Kind::Bytes => JsonSchemaEntry::string_with_format("byte"),
|
||||
prost_reflect::Kind::Enum(enums) => {
|
||||
let values = enums.values().map(|v| v.name().to_string()).collect::<Vec<_>>();
|
||||
JsonSchemaEntry::enums(values)
|
||||
}
|
||||
prost_reflect::Kind::Message(fm) => {
|
||||
let field_type_full_name = fm.full_name();
|
||||
match field_type_full_name {
|
||||
// [Protocol Buffers Well-Known Types]: https://protobuf.dev/reference/protobuf/google.protobuf/
|
||||
"google.protobuf.FieldMask" => JsonSchemaEntry::string(),
|
||||
"google.protobuf.Timestamp" => JsonSchemaEntry::string_with_format("date-time"),
|
||||
"google.protobuf.Duration" => JsonSchemaEntry::string(),
|
||||
"google.protobuf.StringValue" => JsonSchemaEntry::string(),
|
||||
"google.protobuf.BytesValue" => JsonSchemaEntry::string_with_format("byte"),
|
||||
"google.protobuf.Int32Value" => JsonSchemaEntry::number("int32"),
|
||||
"google.protobuf.UInt32Value" => JsonSchemaEntry::string_with_format("int64"),
|
||||
"google.protobuf.Int64Value" => JsonSchemaEntry::string_with_format("int64"),
|
||||
"google.protobuf.UInt64Value" => JsonSchemaEntry::string_with_format("uint64"),
|
||||
"google.protobuf.FloatValue" => JsonSchemaEntry::number("float"),
|
||||
"google.protobuf.DoubleValue" => JsonSchemaEntry::number("double"),
|
||||
"google.protobuf.BoolValue" => JsonSchemaEntry::boolean(),
|
||||
"google.protobuf.Empty" => JsonSchemaEntry::default(),
|
||||
"google.protobuf.Struct" => JsonSchemaEntry::object(),
|
||||
"google.protobuf.ListValue" => JsonSchemaEntry::array(JsonSchemaEntry::default()),
|
||||
"google.protobuf.NullValue" => JsonSchemaEntry::null(),
|
||||
name @ _ if name == root_name => JsonSchemaEntry::root_reference(),
|
||||
_ => JsonSchemaEntry::reference(fm.full_name()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_message_field(field: &FieldDescriptor) -> Option<MessageDescriptor> {
|
||||
match field.kind() {
|
||||
prost_reflect::Kind::Message(m) => Some(m),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, serde::Serialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct JsonSchemaEntry {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
title: Option<String>,
|
||||
|
||||
#[serde(rename = "type")]
|
||||
type_: JsonType,
|
||||
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
|
||||
type_: Option<JsonType>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
format: Option<String>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
description: Option<String>,
|
||||
@@ -21,15 +261,115 @@ pub struct JsonSchemaEntry {
|
||||
#[serde(rename = "enum", skip_serializing_if = "Option::is_none")]
|
||||
enum_: Option<Vec<String>>,
|
||||
|
||||
/// Don't allow any other properties in the object
|
||||
additional_properties: bool,
|
||||
// for map type
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
additional_properties: Option<Box<JsonSchemaEntry>>,
|
||||
|
||||
/// Set all properties to required
|
||||
// Set all properties to required
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
required: Option<Vec<String>>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
items: Option<Box<JsonSchemaEntry>>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none", rename = "$defs")]
|
||||
defs: Option<HashMap<String, JsonSchemaEntry>>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none", rename = "$ref")]
|
||||
ref_: Option<String>,
|
||||
}
|
||||
|
||||
impl JsonSchemaEntry {
|
||||
pub fn add_property(&mut self, name: String, entry: JsonSchemaEntry) {
|
||||
if self.properties.is_none() {
|
||||
self.properties = Some(HashMap::new());
|
||||
}
|
||||
self.properties.as_mut().unwrap().insert(name, entry);
|
||||
}
|
||||
|
||||
pub fn add_required(&mut self, name: String) {
|
||||
if self.required.is_none() {
|
||||
self.required = Some(Vec::new());
|
||||
}
|
||||
self.required.as_mut().unwrap().push(name);
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonSchemaEntry {
|
||||
pub fn object() -> Self {
|
||||
JsonSchemaEntry {
|
||||
type_: Some(JsonType::Object),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
pub fn boolean() -> Self {
|
||||
JsonSchemaEntry {
|
||||
type_: Some(JsonType::Boolean),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
pub fn number<S: Into<String>>(format: S) -> Self {
|
||||
JsonSchemaEntry {
|
||||
type_: Some(JsonType::Number),
|
||||
format: Some(format.into()),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
pub fn string() -> Self {
|
||||
JsonSchemaEntry {
|
||||
type_: Some(JsonType::String),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string_with_format<S: Into<String>>(format: S) -> Self {
|
||||
JsonSchemaEntry {
|
||||
type_: Some(JsonType::String),
|
||||
format: Some(format.into()),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
pub fn reference<S: AsRef<str>>(ref_: S) -> Self {
|
||||
JsonSchemaEntry {
|
||||
ref_: Some(format!("#/$defs/{}", ref_.as_ref())),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
pub fn root_reference() -> Self{
|
||||
JsonSchemaEntry {
|
||||
ref_: Some("#".to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
pub fn array(item: JsonSchemaEntry) -> Self {
|
||||
JsonSchemaEntry {
|
||||
type_: Some(JsonType::Array),
|
||||
items: Some(Box::new(item)),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
pub fn enums(enums: Vec<String>) -> Self {
|
||||
JsonSchemaEntry {
|
||||
type_: Some(JsonType::String),
|
||||
enum_: Some(enums),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map(value_type: JsonSchemaEntry) -> Self {
|
||||
JsonSchemaEntry {
|
||||
type_: Some(JsonType::Object),
|
||||
additional_properties: Some(Box::new(value_type)),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn null() -> Self {
|
||||
JsonSchemaEntry {
|
||||
type_: Some(JsonType::Null),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum JsonType {
|
||||
@@ -49,7 +389,7 @@ impl Default for JsonType {
|
||||
}
|
||||
|
||||
impl serde::Serialize for JsonType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
@@ -64,116 +404,3 @@ impl serde::Serialize for JsonType {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for JsonType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<JsonType, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
match s.as_str() {
|
||||
"string" => Ok(JsonType::String),
|
||||
"number" => Ok(JsonType::Number),
|
||||
"object" => Ok(JsonType::Object),
|
||||
"array" => Ok(JsonType::Array),
|
||||
"boolean" => Ok(JsonType::Boolean),
|
||||
"null" => Ok(JsonType::Null),
|
||||
_ => Ok(JsonType::_UNKNOWN),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn message_to_json_schema(
|
||||
pool: &DescriptorPool,
|
||||
message: MessageDescriptor,
|
||||
) -> JsonSchemaEntry {
|
||||
let mut schema = JsonSchemaEntry {
|
||||
title: Some(message.name().to_string()),
|
||||
type_: JsonType::Object, // Messages are objects
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut properties = HashMap::new();
|
||||
message.fields().for_each(|f| match f.kind() {
|
||||
prost_reflect::Kind::Message(m) => {
|
||||
properties.insert(f.name().to_string(), message_to_json_schema(pool, m));
|
||||
}
|
||||
prost_reflect::Kind::Enum(e) => {
|
||||
properties.insert(
|
||||
f.name().to_string(),
|
||||
JsonSchemaEntry {
|
||||
type_: map_proto_type_to_json_type(f.field_descriptor_proto().r#type()),
|
||||
enum_: Some(e.values().map(|v| v.name().to_string()).collect::<Vec<_>>()),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
// TODO: Handle repeated label
|
||||
match f.field_descriptor_proto().label() {
|
||||
field_descriptor_proto::Label::Repeated => {
|
||||
// TODO: Handle more complex repeated types. This just handles primitives for now
|
||||
properties.insert(
|
||||
f.name().to_string(),
|
||||
JsonSchemaEntry {
|
||||
type_: JsonType::Array,
|
||||
items: Some(Box::new(JsonSchemaEntry {
|
||||
type_: map_proto_type_to_json_type(
|
||||
f.field_descriptor_proto().r#type(),
|
||||
),
|
||||
..Default::default()
|
||||
})),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
// Regular JSON field
|
||||
properties.insert(
|
||||
f.name().to_string(),
|
||||
JsonSchemaEntry {
|
||||
type_: map_proto_type_to_json_type(f.field_descriptor_proto().r#type()),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
schema.properties = Some(properties);
|
||||
|
||||
// All proto 3 fields are optional, so maybe we could
|
||||
// make this a setting?
|
||||
// schema.required = Some(
|
||||
// message
|
||||
// .fields()
|
||||
// .map(|f| f.name().to_string())
|
||||
// .collect::<Vec<_>>(),
|
||||
// );
|
||||
|
||||
schema
|
||||
}
|
||||
|
||||
fn map_proto_type_to_json_type(proto_type: field_descriptor_proto::Type) -> JsonType {
|
||||
match proto_type {
|
||||
field_descriptor_proto::Type::Double => JsonType::Number,
|
||||
field_descriptor_proto::Type::Float => JsonType::Number,
|
||||
field_descriptor_proto::Type::Int64 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Uint64 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Int32 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Fixed64 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Fixed32 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Bool => JsonType::Boolean,
|
||||
field_descriptor_proto::Type::String => JsonType::String,
|
||||
field_descriptor_proto::Type::Group => JsonType::_UNKNOWN,
|
||||
field_descriptor_proto::Type::Message => JsonType::Object,
|
||||
field_descriptor_proto::Type::Bytes => JsonType::String,
|
||||
field_descriptor_proto::Type::Uint32 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Enum => JsonType::String,
|
||||
field_descriptor_proto::Type::Sfixed32 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Sfixed64 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Sint32 => JsonType::Number,
|
||||
field_descriptor_proto::Type::Sint64 => JsonType::Number,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,7 +69,7 @@ impl GrpcConnection {
|
||||
service: &str,
|
||||
method: &str,
|
||||
message: &str,
|
||||
metadata: BTreeMap<String, String>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) -> Result<Response<DynamicMessage>, StreamError> {
|
||||
let method = &self.method(&service, &method)?;
|
||||
let input_message = method.input();
|
||||
@@ -96,7 +96,7 @@ impl GrpcConnection {
|
||||
service: &str,
|
||||
method: &str,
|
||||
stream: ReceiverStream<DynamicMessage>,
|
||||
metadata: BTreeMap<String, String>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) -> Result<Response<Streaming<DynamicMessage>>, StreamError> {
|
||||
let method = &self.method(&service, &method)?;
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
@@ -116,7 +116,7 @@ impl GrpcConnection {
|
||||
service: &str,
|
||||
method: &str,
|
||||
stream: ReceiverStream<DynamicMessage>,
|
||||
metadata: BTreeMap<String, String>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) -> Result<Response<DynamicMessage>, StreamError> {
|
||||
let method = &self.method(&service, &method)?;
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
@@ -137,7 +137,7 @@ impl GrpcConnection {
|
||||
service: &str,
|
||||
method: &str,
|
||||
message: &str,
|
||||
metadata: BTreeMap<String, String>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) -> Result<Response<Streaming<DynamicMessage>>, StreamError> {
|
||||
let method = &self.method(&service, &method)?;
|
||||
let input_message = method.input();
|
||||
@@ -180,10 +180,11 @@ impl GrpcHandle {
|
||||
id: &str,
|
||||
uri: &str,
|
||||
proto_files: &Vec<PathBuf>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) -> Result<(), String> {
|
||||
let pool = if proto_files.is_empty() {
|
||||
let full_uri = uri_from_str(uri)?;
|
||||
fill_pool_from_reflection(&full_uri).await
|
||||
fill_pool_from_reflection(&full_uri, metadata).await
|
||||
} else {
|
||||
fill_pool_from_files(&self.app_handle, proto_files).await
|
||||
}?;
|
||||
@@ -197,9 +198,10 @@ impl GrpcHandle {
|
||||
id: &str,
|
||||
uri: &str,
|
||||
proto_files: &Vec<PathBuf>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) -> Result<Vec<ServiceDefinition>, String> {
|
||||
// Ensure reflection is up-to-date
|
||||
self.reflect(id, uri, proto_files).await?;
|
||||
self.reflect(id, uri, proto_files, metadata).await?;
|
||||
|
||||
let pool = self.get_pool(id, uri, proto_files).ok_or("Failed to get pool".to_string())?;
|
||||
Ok(self.services_from_pool(&pool))
|
||||
@@ -235,8 +237,9 @@ impl GrpcHandle {
|
||||
id: &str,
|
||||
uri: &str,
|
||||
proto_files: &Vec<PathBuf>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) -> Result<GrpcConnection, String> {
|
||||
self.reflect(id, uri, proto_files).await?;
|
||||
self.reflect(id, uri, proto_files, metadata).await?;
|
||||
let pool = self.get_pool(id, uri, proto_files).ok_or("Failed to get pool")?;
|
||||
|
||||
let uri = uri_from_str(uri)?;
|
||||
@@ -254,7 +257,10 @@ impl GrpcHandle {
|
||||
}
|
||||
}
|
||||
|
||||
fn decorate_req<T>(metadata: BTreeMap<String, String>, req: &mut Request<T>) -> Result<(), String> {
|
||||
pub(crate) fn decorate_req<T>(
|
||||
metadata: &BTreeMap<String, String>,
|
||||
req: &mut Request<T>,
|
||||
) -> Result<(), String> {
|
||||
for (k, v) in metadata {
|
||||
req.metadata_mut().insert(
|
||||
MetadataKey::from_str(k.as_str()).map_err(|e| e.to_string())?,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::env::temp_dir;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
@@ -28,7 +29,7 @@ pub async fn fill_pool_from_files(
|
||||
let desc_path = temp_dir().join(random_file_name);
|
||||
let global_import_dir = app_handle
|
||||
.path()
|
||||
.resolve("vendored/protoc/protoc-include", BaseDirectory::Resource)
|
||||
.resolve("vendored/protoc/include", BaseDirectory::Resource)
|
||||
.expect("failed to resolve protoc include directory");
|
||||
|
||||
// HACK: Remove UNC prefix for Windows paths
|
||||
@@ -89,11 +90,14 @@ pub async fn fill_pool_from_files(
|
||||
Ok(pool)
|
||||
}
|
||||
|
||||
pub async fn fill_pool_from_reflection(uri: &Uri) -> Result<DescriptorPool, String> {
|
||||
pub async fn fill_pool_from_reflection(
|
||||
uri: &Uri,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) -> Result<DescriptorPool, String> {
|
||||
let mut pool = DescriptorPool::new();
|
||||
let mut client = AutoReflectionClient::new(uri);
|
||||
|
||||
for service in list_services(&mut client).await? {
|
||||
for service in list_services(&mut client, metadata).await? {
|
||||
if service == "grpc.reflection.v1alpha.ServerReflection" {
|
||||
continue;
|
||||
}
|
||||
@@ -101,14 +105,18 @@ pub async fn fill_pool_from_reflection(uri: &Uri) -> Result<DescriptorPool, Stri
|
||||
// TODO: update reflection client to use v1
|
||||
continue;
|
||||
}
|
||||
file_descriptor_set_from_service_name(&service, &mut pool, &mut client).await;
|
||||
file_descriptor_set_from_service_name(&service, &mut pool, &mut client, metadata).await;
|
||||
}
|
||||
|
||||
Ok(pool)
|
||||
}
|
||||
|
||||
async fn list_services(client: &mut AutoReflectionClient) -> Result<Vec<String>, String> {
|
||||
let response = client.send_reflection_request(MessageRequest::ListServices("".into())).await?;
|
||||
async fn list_services(
|
||||
client: &mut AutoReflectionClient,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) -> Result<Vec<String>, String> {
|
||||
let response =
|
||||
client.send_reflection_request(MessageRequest::ListServices("".into()), metadata).await?;
|
||||
|
||||
let list_services_response = match response {
|
||||
MessageResponse::ListServicesResponse(resp) => resp,
|
||||
@@ -122,9 +130,13 @@ async fn file_descriptor_set_from_service_name(
|
||||
service_name: &str,
|
||||
pool: &mut DescriptorPool,
|
||||
client: &mut AutoReflectionClient,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) {
|
||||
let response = match client
|
||||
.send_reflection_request(MessageRequest::FileContainingSymbol(service_name.into()))
|
||||
.send_reflection_request(
|
||||
MessageRequest::FileContainingSymbol(service_name.into()),
|
||||
metadata,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(resp) => resp,
|
||||
@@ -139,8 +151,13 @@ async fn file_descriptor_set_from_service_name(
|
||||
_ => panic!("Expected a FileDescriptorResponse variant"),
|
||||
};
|
||||
|
||||
add_file_descriptors_to_pool(file_descriptor_response.file_descriptor_proto, pool, client)
|
||||
.await;
|
||||
add_file_descriptors_to_pool(
|
||||
file_descriptor_response.file_descriptor_proto,
|
||||
pool,
|
||||
client,
|
||||
metadata,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[async_recursion]
|
||||
@@ -148,6 +165,7 @@ async fn add_file_descriptors_to_pool(
|
||||
fds: Vec<Vec<u8>>,
|
||||
pool: &mut DescriptorPool,
|
||||
client: &mut AutoReflectionClient,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) {
|
||||
let mut topo_sort = topology::SimpleTopoSort::new();
|
||||
let mut fd_mapping = std::collections::HashMap::with_capacity(fds.len());
|
||||
@@ -165,7 +183,7 @@ async fn add_file_descriptors_to_pool(
|
||||
if let Some(fdp) = fd_mapping.remove(&node) {
|
||||
pool.add_file_descriptor_proto(fdp).expect("add file descriptor proto");
|
||||
} else {
|
||||
file_descriptor_set_by_filename(node.as_str(), pool, client).await;
|
||||
file_descriptor_set_by_filename(node.as_str(), pool, client, metadata).await;
|
||||
}
|
||||
}
|
||||
Err(_) => panic!("proto file got cycle!"),
|
||||
@@ -177,6 +195,7 @@ async fn file_descriptor_set_by_filename(
|
||||
filename: &str,
|
||||
pool: &mut DescriptorPool,
|
||||
client: &mut AutoReflectionClient,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
) {
|
||||
// We already fetched this file
|
||||
if let Some(_) = pool.get_file_by_name(filename) {
|
||||
@@ -184,7 +203,7 @@ async fn file_descriptor_set_by_filename(
|
||||
}
|
||||
|
||||
let msg = MessageRequest::FileByFilename(filename.into());
|
||||
let response = client.send_reflection_request(msg).await;
|
||||
let response = client.send_reflection_request(msg, metadata).await;
|
||||
let file_descriptor_response = match response {
|
||||
Ok(MessageResponse::FileDescriptorResponse(resp)) => resp,
|
||||
Ok(_) => {
|
||||
@@ -196,8 +215,13 @@ async fn file_descriptor_set_by_filename(
|
||||
}
|
||||
};
|
||||
|
||||
add_file_descriptors_to_pool(file_descriptor_response.file_descriptor_proto, pool, client)
|
||||
.await;
|
||||
add_file_descriptors_to_pool(
|
||||
file_descriptor_response.file_descriptor_proto,
|
||||
pool,
|
||||
client,
|
||||
metadata,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub fn method_desc_to_path(md: &MethodDescriptor) -> PathAndQuery {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "yaak-http"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -2,19 +2,19 @@
|
||||
name = "yaak-license"
|
||||
links = "yaak-license"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
reqwest = { workspace = true, features = ["json"] }
|
||||
serde = { version = "1.0.208", features = ["derive"] }
|
||||
ts-rs = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tauri = { workspace = true }
|
||||
yaak-models = { workspace = true }
|
||||
chrono = "0.4.38"
|
||||
log = "0.4.22"
|
||||
serde_json = "1.0.132"
|
||||
log = "0.4.26"
|
||||
reqwest = { workspace = true, features = ["json"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tauri = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
ts-rs = { workspace = true }
|
||||
yaak-models = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
tauri-plugin = { workspace = true, features = ["build"] }
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user