Websockets for plugin runtime communication (#156)

This commit is contained in:
Gregory Schier
2025-01-20 10:55:53 -08:00
committed by GitHub
parent 095aaa5e92
commit b698a56549
54 changed files with 841 additions and 1185 deletions

153
src-tauri/Cargo.lock generated
View File

@@ -311,9 +311,9 @@ dependencies = [
[[package]]
name = "async-stream"
version = "0.3.5"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51"
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
dependencies = [
"async-stream-impl",
"futures-core",
@@ -322,9 +322,9 @@ dependencies = [
[[package]]
name = "async-stream-impl"
version = "0.3.5"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [
"proc-macro2",
"quote",
@@ -394,9 +394,9 @@ checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
[[package]]
name = "axum"
version = "0.7.5"
version = "0.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf"
checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f"
dependencies = [
"async-trait",
"axum-core",
@@ -413,17 +413,17 @@ dependencies = [
"pin-project-lite",
"rustversion",
"serde",
"sync_wrapper 1.0.1",
"tower 0.4.13",
"sync_wrapper",
"tower 0.5.2",
"tower-layer",
"tower-service",
]
[[package]]
name = "axum-core"
version = "0.4.3"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3"
checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199"
dependencies = [
"async-trait",
"bytes",
@@ -434,7 +434,7 @@ dependencies = [
"mime",
"pin-project-lite",
"rustversion",
"sync_wrapper 0.1.2",
"sync_wrapper",
"tower-layer",
"tower-service",
]
@@ -651,9 +651,9 @@ checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495"
[[package]]
name = "bytes"
version = "1.7.1"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50"
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
dependencies = [
"serde",
]
@@ -1142,6 +1142,12 @@ dependencies = [
"syn 2.0.87",
]
[[package]]
name = "data-encoding"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e60eed09d8c01d3cee5b7d30acb059b76614c918fa0f992e0dd6eeb10daad6f"
[[package]]
name = "datetime"
version = "0.5.2"
@@ -1549,12 +1555,6 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "fixedbitset"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flate2"
version = "1.0.35"
@@ -2290,9 +2290,9 @@ dependencies = [
[[package]]
name = "hyper-timeout"
version = "0.5.1"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793"
checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
dependencies = [
"hyper",
"hyper-util",
@@ -2933,12 +2933,6 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "multimap"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03"
[[package]]
name = "nanoid"
version = "0.4.0"
@@ -3148,7 +3142,7 @@ version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56"
dependencies = [
"proc-macro-crate 2.0.0",
"proc-macro-crate 3.2.0",
"proc-macro2",
"quote",
"syn 2.0.87",
@@ -3613,16 +3607,6 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "petgraph"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db"
dependencies = [
"fixedbitset",
"indexmap 2.3.0",
]
[[package]]
name = "phf"
version = "0.8.0"
@@ -3889,16 +3873,6 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
[[package]]
name = "prettyplease"
version = "0.2.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e"
dependencies = [
"proc-macro2",
"syn 2.0.87",
]
[[package]]
name = "proc-macro-crate"
version = "1.3.1"
@@ -3976,27 +3950,6 @@ dependencies = [
"prost-derive",
]
[[package]]
name = "prost-build"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bb182580f71dd070f88d01ce3de9f4da5021db7115d2e1c3605a754153b77c1"
dependencies = [
"bytes",
"heck 0.5.0",
"itertools",
"log",
"multimap",
"once_cell",
"petgraph",
"prettyplease",
"prost",
"prost-types",
"regex",
"syn 2.0.87",
"tempfile",
]
[[package]]
name = "prost-derive"
version = "0.13.4"
@@ -4382,7 +4335,7 @@ dependencies = [
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper 1.0.1",
"sync_wrapper",
"system-configuration",
"tokio",
"tokio-native-tls",
@@ -4646,9 +4599,9 @@ dependencies = [
[[package]]
name = "rustversion"
version = "1.0.17"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6"
checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4"
[[package]]
name = "ryu"
@@ -5477,12 +5430,6 @@ dependencies = [
"syn 2.0.87",
]
[[package]]
name = "sync_wrapper"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
[[package]]
name = "sync_wrapper"
version = "1.0.1"
@@ -6228,6 +6175,18 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-tungstenite"
version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be4bf6fecd69fcdede0ec680aaf474cdab988f9de6bc73d3758f0160e3b7025a"
dependencies = [
"futures-util",
"log",
"tokio",
"tungstenite",
]
[[package]]
name = "tokio-util"
version = "0.7.11"
@@ -6341,19 +6300,6 @@ dependencies = [
"tracing",
]
[[package]]
name = "tonic-build"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "568392c5a2bd0020723e3f387891176aabafe36fd9fcd074ad309dfa0c8eb964"
dependencies = [
"prettyplease",
"proc-macro2",
"prost-build",
"quote",
"syn 2.0.87",
]
[[package]]
name = "tonic-reflection"
version = "0.12.3"
@@ -6396,7 +6342,7 @@ dependencies = [
"futures-core",
"futures-util",
"pin-project-lite",
"sync_wrapper 1.0.1",
"sync_wrapper",
"tokio",
"tower-layer",
"tower-service",
@@ -6498,6 +6444,24 @@ dependencies = [
"termcolor",
]
[[package]]
name = "tungstenite"
version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413083a99c579593656008130e29255e54dcaae495be556cc26888f211648c24"
dependencies = [
"byteorder",
"bytes",
"data-encoding",
"http",
"httparse",
"log",
"rand 0.8.5",
"sha1",
"thiserror 2.0.7",
"utf-8",
]
[[package]]
name = "typeid"
version = "1.0.0"
@@ -7636,9 +7600,9 @@ name = "yaak-plugins"
version = "0.1.0"
dependencies = [
"dunce",
"futures-util",
"log",
"path-slash",
"prost",
"rand 0.8.5",
"regex",
"serde",
@@ -7647,8 +7611,7 @@ dependencies = [
"tauri-plugin-shell",
"thiserror 2.0.7",
"tokio",
"tonic",
"tonic-build",
"tokio-tungstenite",
"ts-rs",
"yaak-models",
]

View File

@@ -32,7 +32,7 @@ use tauri_plugin_clipboard_manager::ClipboardExt;
use tauri_plugin_log::fern::colors::ColoredLevelConfig;
use tauri_plugin_log::{Builder, Target, TargetKind};
use tauri_plugin_opener::OpenerExt;
use tauri_plugin_window_state::{AppHandleExt, StateFlags, WindowExt};
use tauri_plugin_window_state::{AppHandleExt, StateFlags};
use tokio::fs::read_to_string;
use tokio::sync::Mutex;
use tokio::task::block_in_place;
@@ -63,11 +63,12 @@ use yaak_models::queries::{
upsert_workspace_meta, BatchUpsertResult, UpdateSource,
};
use yaak_plugins::events::{
BootResponse, CallHttpAuthenticationRequest, CallHttpRequestActionRequest, FilterResponse,
FindHttpResponsesResponse, GetHttpAuthenticationResponse, GetHttpRequestActionsResponse,
GetHttpRequestByIdResponse, GetTemplateFunctionsResponse, HttpHeader, Icon, InternalEvent,
InternalEventPayload, PromptTextResponse, RenderHttpRequestResponse, RenderPurpose,
SendHttpRequestResponse, ShowToastRequest, TemplateRenderResponse, WindowContext,
BootResponse, CallHttpAuthenticationRequest, CallHttpRequestActionRequest, Color,
FilterResponse, FindHttpResponsesResponse, GetHttpAuthenticationResponse,
GetHttpRequestActionsResponse, GetHttpRequestByIdResponse, GetTemplateFunctionsResponse,
HttpHeader, Icon, InternalEvent, InternalEventPayload, PromptTextResponse,
RenderHttpRequestResponse, RenderPurpose, SendHttpRequestResponse, ShowToastRequest,
TemplateRenderResponse, WindowContext,
};
use yaak_plugins::manager::PluginManager;
use yaak_plugins::plugin_handle::PluginHandle;
@@ -2268,6 +2269,21 @@ async fn handle_plugin_event<R: Runtime>(
render_json_value(req.data, &base_environment, environment.as_ref(), &cb).await;
Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data }))
}
InternalEventPayload::ErrorResponse(resp) => {
let window = get_window_from_window_context(app_handle, &window_context)
.expect("Failed to find window for plugin reload");
let toast_event = plugin_handle.build_event_to_send(
WindowContext::from_window(&window),
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
message: resp.error,
color: Some(Color::Danger),
..Default::default()
}),
None,
);
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await;
None
}
InternalEventPayload::ReloadResponse(_) => {
let window = get_window_from_window_context(app_handle, &window_context)
.expect("Failed to find window for plugin reload");

View File

@@ -34,34 +34,34 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
var require_safe_buffer = __commonJS({
"../../node_modules/safe-buffer/index.js"(exports2, module2) {
var buffer = require("buffer");
var Buffer2 = buffer.Buffer;
var Buffer3 = buffer.Buffer;
function copyProps(src, dst) {
for (var key in src) {
dst[key] = src[key];
}
}
if (Buffer2.from && Buffer2.alloc && Buffer2.allocUnsafe && Buffer2.allocUnsafeSlow) {
if (Buffer3.from && Buffer3.alloc && Buffer3.allocUnsafe && Buffer3.allocUnsafeSlow) {
module2.exports = buffer;
} else {
copyProps(buffer, exports2);
exports2.Buffer = SafeBuffer;
}
function SafeBuffer(arg, encodingOrOffset, length) {
return Buffer2(arg, encodingOrOffset, length);
return Buffer3(arg, encodingOrOffset, length);
}
SafeBuffer.prototype = Object.create(Buffer2.prototype);
copyProps(Buffer2, SafeBuffer);
SafeBuffer.prototype = Object.create(Buffer3.prototype);
copyProps(Buffer3, SafeBuffer);
SafeBuffer.from = function(arg, encodingOrOffset, length) {
if (typeof arg === "number") {
throw new TypeError("Argument must not be a number");
}
return Buffer2(arg, encodingOrOffset, length);
return Buffer3(arg, encodingOrOffset, length);
};
SafeBuffer.alloc = function(size, fill, encoding) {
if (typeof size !== "number") {
throw new TypeError("Argument must be a number");
}
var buf = Buffer2(size);
var buf = Buffer3(size);
if (fill !== void 0) {
if (typeof encoding === "string") {
buf.fill(fill, encoding);
@@ -77,7 +77,7 @@ var require_safe_buffer = __commonJS({
if (typeof size !== "number") {
throw new TypeError("Argument must be a number");
}
return Buffer2(size);
return Buffer3(size);
};
SafeBuffer.allocUnsafeSlow = function(size) {
if (typeof size !== "number") {
@@ -91,7 +91,7 @@ var require_safe_buffer = __commonJS({
// ../../node_modules/jws/lib/data-stream.js
var require_data_stream = __commonJS({
"../../node_modules/jws/lib/data-stream.js"(exports2, module2) {
var Buffer2 = require_safe_buffer().Buffer;
var Buffer3 = require_safe_buffer().Buffer;
var Stream = require("stream");
var util = require("util");
function DataStream(data) {
@@ -99,11 +99,11 @@ var require_data_stream = __commonJS({
this.writable = true;
this.readable = true;
if (!data) {
this.buffer = Buffer2.alloc(0);
this.buffer = Buffer3.alloc(0);
return this;
}
if (typeof data.pipe === "function") {
this.buffer = Buffer2.alloc(0);
this.buffer = Buffer3.alloc(0);
data.pipe(this);
return this;
}
@@ -121,7 +121,7 @@ var require_data_stream = __commonJS({
}
util.inherits(DataStream, Stream);
DataStream.prototype.write = function write(data) {
this.buffer = Buffer2.concat([this.buffer, Buffer2.from(data)]);
this.buffer = Buffer3.concat([this.buffer, Buffer3.from(data)]);
this.emit("data", data);
};
DataStream.prototype.end = function end(data) {
@@ -140,11 +140,11 @@ var require_data_stream = __commonJS({
var require_buffer_equal_constant_time = __commonJS({
"../../node_modules/buffer-equal-constant-time/index.js"(exports2, module2) {
"use strict";
var Buffer2 = require("buffer").Buffer;
var Buffer3 = require("buffer").Buffer;
var SlowBuffer = require("buffer").SlowBuffer;
module2.exports = bufferEq;
function bufferEq(a, b) {
if (!Buffer2.isBuffer(a) || !Buffer2.isBuffer(b)) {
if (!Buffer3.isBuffer(a) || !Buffer3.isBuffer(b)) {
return false;
}
if (a.length !== b.length) {
@@ -157,14 +157,14 @@ var require_buffer_equal_constant_time = __commonJS({
return c === 0;
}
bufferEq.install = function() {
Buffer2.prototype.equal = SlowBuffer.prototype.equal = function equal(that) {
Buffer3.prototype.equal = SlowBuffer.prototype.equal = function equal(that) {
return bufferEq(this, that);
};
};
var origBufEqual = Buffer2.prototype.equal;
var origBufEqual = Buffer3.prototype.equal;
var origSlowBufEqual = SlowBuffer.prototype.equal;
bufferEq.restore = function() {
Buffer2.prototype.equal = origBufEqual;
Buffer3.prototype.equal = origBufEqual;
SlowBuffer.prototype.equal = origSlowBufEqual;
};
}
@@ -198,7 +198,7 @@ var require_param_bytes_for_alg = __commonJS({
var require_ecdsa_sig_formatter = __commonJS({
"../../node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.js"(exports2, module2) {
"use strict";
var Buffer2 = require_safe_buffer().Buffer;
var Buffer3 = require_safe_buffer().Buffer;
var getParamBytesForAlg = require_param_bytes_for_alg();
var MAX_OCTET = 128;
var CLASS_UNIVERSAL = 0;
@@ -211,10 +211,10 @@ var require_ecdsa_sig_formatter = __commonJS({
return base64.replace(/=/g, "").replace(/\+/g, "-").replace(/\//g, "_");
}
function signatureAsBuffer(signature) {
if (Buffer2.isBuffer(signature)) {
if (Buffer3.isBuffer(signature)) {
return signature;
} else if ("string" === typeof signature) {
return Buffer2.from(signature, "base64");
return Buffer3.from(signature, "base64");
}
throw new TypeError("ECDSA signature must be a Base64 string or a Buffer");
}
@@ -262,7 +262,7 @@ var require_ecdsa_sig_formatter = __commonJS({
throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain');
}
var rPadding = paramBytes - rLength, sPadding = paramBytes - sLength;
var dst = Buffer2.allocUnsafe(rPadding + rLength + sPadding + sLength);
var dst = Buffer3.allocUnsafe(rPadding + rLength + sPadding + sLength);
for (offset = 0; offset < rPadding; ++offset) {
dst[offset] = 0;
}
@@ -300,7 +300,7 @@ var require_ecdsa_sig_formatter = __commonJS({
var sLength = paramBytes - sPadding;
var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength;
var shortLength = rsBytes < MAX_OCTET;
var dst = Buffer2.allocUnsafe((shortLength ? 2 : 3) + rsBytes);
var dst = Buffer3.allocUnsafe((shortLength ? 2 : 3) + rsBytes);
var offset = 0;
dst[offset++] = ENCODED_TAG_SEQ;
if (shortLength) {
@@ -338,7 +338,7 @@ var require_ecdsa_sig_formatter = __commonJS({
var require_jwa = __commonJS({
"../../node_modules/jwa/index.js"(exports2, module2) {
var bufferEqual = require_buffer_equal_constant_time();
var Buffer2 = require_safe_buffer().Buffer;
var Buffer3 = require_safe_buffer().Buffer;
var crypto = require("crypto");
var formatEcdsa = require_ecdsa_sig_formatter();
var util = require("util");
@@ -352,7 +352,7 @@ var require_jwa = __commonJS({
MSG_INVALID_SECRET += "or a KeyObject";
}
function checkIsPublicKey(key) {
if (Buffer2.isBuffer(key)) {
if (Buffer3.isBuffer(key)) {
return;
}
if (typeof key === "string") {
@@ -375,7 +375,7 @@ var require_jwa = __commonJS({
}
}
function checkIsPrivateKey(key) {
if (Buffer2.isBuffer(key)) {
if (Buffer3.isBuffer(key)) {
return;
}
if (typeof key === "string") {
@@ -387,7 +387,7 @@ var require_jwa = __commonJS({
throw typeError(MSG_INVALID_SIGNER_KEY);
}
function checkIsSecretKey(key) {
if (Buffer2.isBuffer(key)) {
if (Buffer3.isBuffer(key)) {
return;
}
if (typeof key === "string") {
@@ -425,7 +425,7 @@ var require_jwa = __commonJS({
return new TypeError(errMsg);
}
function bufferOrString(obj) {
return Buffer2.isBuffer(obj) || typeof obj === "string";
return Buffer3.isBuffer(obj) || typeof obj === "string";
}
function normalizeInput(thing) {
if (!bufferOrString(thing))
@@ -444,7 +444,7 @@ var require_jwa = __commonJS({
function createHmacVerifier(bits) {
return function verify(thing, signature, secret) {
var computedSig = createHmacSigner(bits)(thing, secret);
return bufferEqual(Buffer2.from(signature), Buffer2.from(computedSig));
return bufferEqual(Buffer3.from(signature), Buffer3.from(computedSig));
};
}
function createKeySigner(bits) {
@@ -550,11 +550,11 @@ var require_jwa = __commonJS({
// ../../node_modules/jws/lib/tostring.js
var require_tostring = __commonJS({
"../../node_modules/jws/lib/tostring.js"(exports2, module2) {
var Buffer2 = require("buffer").Buffer;
var Buffer3 = require("buffer").Buffer;
module2.exports = function toString(obj) {
if (typeof obj === "string")
return obj;
if (typeof obj === "number" || Buffer2.isBuffer(obj))
if (typeof obj === "number" || Buffer3.isBuffer(obj))
return obj.toString();
return JSON.stringify(obj);
};
@@ -564,14 +564,14 @@ var require_tostring = __commonJS({
// ../../node_modules/jws/lib/sign-stream.js
var require_sign_stream = __commonJS({
"../../node_modules/jws/lib/sign-stream.js"(exports2, module2) {
var Buffer2 = require_safe_buffer().Buffer;
var Buffer3 = require_safe_buffer().Buffer;
var DataStream = require_data_stream();
var jwa = require_jwa();
var Stream = require("stream");
var toString = require_tostring();
var util = require("util");
function base64url(string, encoding) {
return Buffer2.from(string, encoding).toString("base64").replace(/=/g, "").replace(/\+/g, "-").replace(/\//g, "_");
return Buffer3.from(string, encoding).toString("base64").replace(/=/g, "").replace(/\+/g, "-").replace(/\//g, "_");
}
function jwsSecuredInput(header, payload, encoding) {
encoding = encoding || "utf8";
@@ -634,7 +634,7 @@ var require_sign_stream = __commonJS({
// ../../node_modules/jws/lib/verify-stream.js
var require_verify_stream = __commonJS({
"../../node_modules/jws/lib/verify-stream.js"(exports2, module2) {
var Buffer2 = require_safe_buffer().Buffer;
var Buffer3 = require_safe_buffer().Buffer;
var DataStream = require_data_stream();
var jwa = require_jwa();
var Stream = require("stream");
@@ -655,7 +655,7 @@ var require_verify_stream = __commonJS({
}
function headerFromJWS(jwsSig) {
var encodedHeader = jwsSig.split(".", 1)[0];
return safeJsonParse(Buffer2.from(encodedHeader, "base64").toString("binary"));
return safeJsonParse(Buffer3.from(encodedHeader, "base64").toString("binary"));
}
function securedInputFromJWS(jwsSig) {
return jwsSig.split(".", 2).join(".");
@@ -666,7 +666,7 @@ var require_verify_stream = __commonJS({
function payloadFromJWS(jwsSig, encoding) {
encoding = encoding || "utf8";
var payload = jwsSig.split(".")[1];
return Buffer2.from(payload, "base64").toString(encoding);
return Buffer3.from(payload, "base64").toString(encoding);
}
function isValidJws(string) {
return JWS_REGEX.test(string) && !!headerFromJWS(string);
@@ -3793,6 +3793,7 @@ __export(src_exports, {
});
module.exports = __toCommonJS(src_exports);
var import_jsonwebtoken = __toESM(require_jsonwebtoken());
var import_node_buffer = __toESM(require("node:buffer"));
var algorithms = [
"HS256",
"HS384",
@@ -3805,7 +3806,8 @@ var algorithms = [
"PS512",
"ES256",
"ES384",
"ES512"
"ES512",
"none"
];
var defaultAlgorithm = algorithms[0];
var plugin = {
@@ -3818,31 +3820,34 @@ var plugin = {
type: "select",
name: "algorithm",
label: "Algorithm",
hideLabel: true,
defaultValue: defaultAlgorithm,
options: algorithms.map((value) => ({ name: value, value }))
options: algorithms.map((value) => ({ name: value === "none" ? "None" : value, value }))
},
{
type: "text",
type: "editor",
name: "secret",
label: "Secret",
optional: true
label: "Secret or Private Key",
optional: true,
hideGutter: true
},
{
type: "checkbox",
name: "secretBase64",
label: "Secret Base64 Encoded"
label: "Secret is base64 encoded"
},
{
type: "editor",
name: "payload",
label: "Payload",
language: "json",
optional: true
defaultValue: '{\n "foo": "bar"\n}',
placeholder: "{ }"
}
],
async onApply(_ctx, args) {
const { algorithm, secret: _secret, secretBase64, payload } = args.config;
const secret = secretBase64 ? Buffer.from(`${_secret}`, "base64") : `${_secret}`;
const secret = secretBase64 ? import_node_buffer.default.from(`${_secret}`, "base64") : `${_secret}`;
const token = import_jsonwebtoken.default.sign(`${payload}`, secret, { algorithm });
const value = `Bearer ${token}`;
return { setHeaders: [{ name: "Authorization", value }] };

View File

@@ -20,8 +20,8 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
// src/index.ts
var src_exports = {};
__export(src_exports, {
plugin: () => plugin,
pluginHookExport: () => pluginHookExport
convertToCurl: () => convertToCurl,
plugin: () => plugin
});
module.exports = __toCommonJS(src_exports);
var NEWLINE = "\\\n ";
@@ -32,13 +32,13 @@ var plugin = {
icon: "copy",
async onSelect(ctx, args) {
const rendered_request = await ctx.httpRequest.render({ httpRequest: args.httpRequest, purpose: "preview" });
const data = await pluginHookExport(ctx, rendered_request);
const data = await convertToCurl(rendered_request);
ctx.clipboard.copyText(data);
ctx.toast.show({ message: "Curl copied to clipboard", icon: "copy" });
}
}]
};
async function pluginHookExport(_ctx, request) {
async function convertToCurl(request) {
const xs = ["curl"];
if (request.method) xs.push("-X", request.method);
if (request.url) xs.push(quote(request.url));
@@ -104,6 +104,6 @@ function maybeParseJSON(v, fallback) {
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
plugin,
pluginHookExport
convertToCurl,
plugin
});

View File

@@ -30,7 +30,7 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
// src/index.ts
var src_exports = {};
__export(src_exports, {
pluginHookResponseFilter: () => pluginHookResponseFilter
plugin: () => plugin
});
module.exports = __toCommonJS(src_exports);
@@ -499,12 +499,18 @@ JSONPath.prototype.safeVm = import_vm.default;
var SafeScript = import_vm.default.Script;
// src/index.ts
function pluginHookResponseFilter(_ctx, args) {
const parsed = JSON.parse(args.body);
const filtered = JSONPath({ path: args.filter, json: parsed });
return JSON.stringify(filtered, null, 2);
}
var plugin = {
filter: {
name: "JSONPath",
description: "Filter JSONPath",
onFilter(_ctx, args) {
const parsed = JSON.parse(args.payload);
const filtered = JSONPath({ path: args.filter, json: parsed });
return { filtered: JSON.stringify(filtered, null, 2) };
}
}
};
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
pluginHookResponseFilter
plugin
});

View File

@@ -8346,21 +8346,27 @@ var require_xpath = __commonJS({
// src/index.ts
var src_exports = {};
__export(src_exports, {
pluginHookResponseFilter: () => pluginHookResponseFilter
plugin: () => plugin
});
module.exports = __toCommonJS(src_exports);
var import_xmldom = __toESM(require_lib());
var import_xpath = __toESM(require_xpath());
function pluginHookResponseFilter(_ctx, { filter, body }) {
const doc = new import_xmldom.DOMParser().parseFromString(body, "text/xml");
const result = import_xpath.default.select(filter, doc, false);
if (Array.isArray(result)) {
return result.map((r) => String(r)).join("\n");
} else {
return String(result);
var plugin = {
filter: {
name: "XPath",
description: "Filter XPath",
onFilter(_ctx, args) {
const doc = new import_xmldom.DOMParser().parseFromString(args.payload, "text/xml");
const result = import_xpath.default.select(args.filter, doc, false);
if (Array.isArray(result)) {
return { filtered: result.map((r) => String(r)).join("\n") };
} else {
return { filtered: String(result) };
}
}
}
}
};
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
pluginHookResponseFilter
plugin
});

View File

@@ -260,7 +260,8 @@ var require_shell_quote = __commonJS({
// src/index.ts
var src_exports = {};
__export(src_exports, {
pluginHookImport: () => pluginHookImport
convertCurl: () => convertCurl,
plugin: () => plugin
});
module.exports = __toCommonJS(src_exports);
var import_shell_quote = __toESM(require_shell_quote());
@@ -290,7 +291,16 @@ var SUPPORTED_FLAGS = [
DATA_FLAGS
].flatMap((v) => v);
var BOOLEAN_FLAGS = ["G", "get", "digest"];
function pluginHookImport(_ctx, rawData) {
var plugin = {
importer: {
name: "cURL",
description: "Import cURL commands",
onImport(_ctx, args) {
return convertCurl(args.text);
}
}
};
function convertCurl(rawData) {
if (!rawData.match(/^\s*curl /)) {
return null;
}
@@ -570,5 +580,6 @@ function generateId(model) {
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
pluginHookImport
convertCurl,
plugin
});

View File

@@ -7208,11 +7208,21 @@ var require_dist = __commonJS({
// src/index.ts
var src_exports = {};
__export(src_exports, {
pluginHookImport: () => pluginHookImport
convertInsomnia: () => convertInsomnia,
plugin: () => plugin
});
module.exports = __toCommonJS(src_exports);
var import_yaml = __toESM(require_dist());
function pluginHookImport(ctx, contents) {
var plugin = {
importer: {
name: "Insomnia",
description: "Import Insomnia workspaces",
onImport(_ctx, args) {
return convertInsomnia(args.text);
}
}
};
function convertInsomnia(contents) {
let parsed;
try {
parsed = JSON.parse(contents);
@@ -7439,5 +7449,6 @@ function deleteUndefinedAttrs(obj) {
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
pluginHookImport
convertInsomnia,
plugin
});

View File

@@ -113483,30 +113483,30 @@ var require_json_schema_faker = __commonJS({
});
};
exports5.filter = function(plugins, method, file) {
return plugins.filter(function(plugin) {
return !!getResult(plugin, method, file);
return plugins.filter(function(plugin2) {
return !!getResult(plugin2, method, file);
});
};
exports5.sort = function(plugins) {
plugins.forEach(function(plugin) {
plugin.order = plugin.order || Number.MAX_SAFE_INTEGER;
plugins.forEach(function(plugin2) {
plugin2.order = plugin2.order || Number.MAX_SAFE_INTEGER;
});
return plugins.sort(function(a, b) {
return a.order - b.order;
});
};
exports5.run = function(plugins, method, file) {
var plugin, lastError, index = 0;
var plugin2, lastError, index = 0;
return new Promise(function(resolve2, reject) {
runNextPlugin();
function runNextPlugin() {
plugin = plugins[index++];
if (!plugin) {
plugin2 = plugins[index++];
if (!plugin2) {
return reject(lastError);
}
try {
debug(" %s", plugin.name);
var result = getResult(plugin, method, file, callback);
debug(" %s", plugin2.name);
var result = getResult(plugin2, method, file, callback);
if (result && typeof result.then === "function") {
result.then(onSuccess, onError);
} else if (result !== void 0) {
@@ -113526,7 +113526,7 @@ var require_json_schema_faker = __commonJS({
function onSuccess(result) {
debug(" success");
resolve2({
plugin,
plugin: plugin2,
result
});
}
@@ -145850,7 +145850,8 @@ var require_openapi_to_postmanv2 = __commonJS({
// src/index.ts
var src_exports = {};
__export(src_exports, {
pluginHookImport: () => pluginHookImport2
convertOpenApi: () => convertOpenApi,
plugin: () => plugin
});
module.exports = __toCommonJS(src_exports);
var import_openapi_to_postmanv2 = __toESM(require_openapi_to_postmanv2());
@@ -145859,7 +145860,7 @@ var import_openapi_to_postmanv2 = __toESM(require_openapi_to_postmanv2());
var POSTMAN_2_1_0_SCHEMA = "https://schema.getpostman.com/json/collection/v2.1.0/collection.json";
var POSTMAN_2_0_0_SCHEMA = "https://schema.getpostman.com/json/collection/v2.0.0/collection.json";
var VALID_SCHEMAS = [POSTMAN_2_0_0_SCHEMA, POSTMAN_2_1_0_SCHEMA];
function pluginHookImport(_ctx, contents) {
function convertPostman(contents) {
const root = parseJSONToRecord(contents);
if (root == null) return;
const info = toRecord(root.info);
@@ -146150,7 +146151,16 @@ function generateId(model) {
}
// src/index.ts
async function pluginHookImport2(ctx, contents) {
var plugin = {
importer: {
name: "OpenAPI",
description: "Import OpenAPI collections",
onImport(_ctx, args) {
return convertOpenApi(args.text);
}
}
};
async function convertOpenApi(contents) {
let postmanCollection;
try {
postmanCollection = await new Promise((resolve, reject) => {
@@ -146164,11 +146174,12 @@ async function pluginHookImport2(ctx, contents) {
} catch (err) {
return void 0;
}
return pluginHookImport(ctx, JSON.stringify(postmanCollection));
return convertPostman(JSON.stringify(postmanCollection));
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
pluginHookImport
convertOpenApi,
plugin
});
/*! Bundled license information:

View File

@@ -20,13 +20,23 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
// src/index.ts
var src_exports = {};
__export(src_exports, {
pluginHookImport: () => pluginHookImport
convertPostman: () => convertPostman,
plugin: () => plugin
});
module.exports = __toCommonJS(src_exports);
var POSTMAN_2_1_0_SCHEMA = "https://schema.getpostman.com/json/collection/v2.1.0/collection.json";
var POSTMAN_2_0_0_SCHEMA = "https://schema.getpostman.com/json/collection/v2.0.0/collection.json";
var VALID_SCHEMAS = [POSTMAN_2_0_0_SCHEMA, POSTMAN_2_1_0_SCHEMA];
function pluginHookImport(_ctx, contents) {
var plugin = {
importer: {
name: "Postman",
description: "Import postman collections",
onImport(_ctx, args) {
return convertPostman(args.text);
}
}
};
function convertPostman(contents) {
const root = parseJSONToRecord(contents);
if (root == null) return;
const info = toRecord(root.info);
@@ -317,5 +327,6 @@ function generateId(model) {
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
pluginHookImport
convertPostman,
plugin
});

View File

@@ -20,10 +20,20 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
// src/index.ts
var src_exports = {};
__export(src_exports, {
pluginHookImport: () => pluginHookImport
migrateImport: () => migrateImport,
plugin: () => plugin
});
module.exports = __toCommonJS(src_exports);
function pluginHookImport(_ctx, contents) {
var plugin = {
importer: {
name: "Yaak",
description: "Yaak official format",
onImport(_ctx, args) {
return migrateImport(args.text);
}
}
};
function migrateImport(contents) {
let parsed;
try {
parsed = JSON.parse(contents);
@@ -66,5 +76,6 @@ function isJSObject(obj) {
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
pluginHookImport
migrateImport,
plugin
});

View File

@@ -5,23 +5,23 @@ edition = "2021"
publish = false
[dependencies]
tonic = "0.12.3"
anyhow = "1.0.79"
async-recursion = "1.1.1"
dunce = "1.0.4"
hyper = "1.5.2"
hyper-rustls = { version = "0.27.5", default-features = false, features = ["http2", "rustls-platform-verifier"] }
hyper-util = { version = "0.1.10", features = ["client-legacy", "client"] }
log = "0.4.20"
md5 = "0.7.0"
prost = "0.13.4"
tokio = { version = "1.0", features = ["macros", "rt-multi-thread", "fs"] }
tonic-reflection = "0.12.3"
tokio-stream = "0.1.14"
prost-reflect = { version = "0.14.4", default-features = false, features = ["serde", "derive"] }
prost-types = "0.13.4"
serde = { version = "1.0.196", features = ["derive"] }
serde_json = "1.0.113"
prost-reflect = { version = "0.14.4", default-features = false, features = ["serde", "derive"] }
log = "0.4.20"
anyhow = "1.0.79"
hyper = "1.5.2"
hyper-util = { version = "0.1.10", features = ["client-legacy", "client"] }
hyper-rustls = { version = "0.27.5", default-features = false, features = ["http2", "rustls-platform-verifier"] }
uuid = { version = "1.7.0", features = ["v4"] }
tauri = { workspace = true }
tauri-plugin-shell = { workspace = true }
md5 = "0.7.0"
dunce = "1.0.4"
async-recursion = "1.1.1"
tokio = { version = "1.0", features = ["macros", "rt-multi-thread", "fs"] }
tokio-stream = "0.1.14"
tonic = { version = "0.12.3", default-features = false, features = ["transport"] }
tonic-reflection = "0.12.3"
uuid = { version = "1.7.0", features = ["v4"] }

View File

@@ -2,12 +2,12 @@ use prost_reflect::{DynamicMessage, MethodDescriptor, SerializeOptions};
use serde::{Deserialize, Serialize};
use serde_json::Deserializer;
mod client;
mod codec;
mod json_schema;
pub mod manager;
mod reflection;
mod transport;
mod client;
pub use tonic::metadata::*;
pub use tonic::Code;

View File

@@ -16,11 +16,9 @@ use tonic::transport::Uri;
use tonic::{IntoRequest, IntoStreamingRequest, Request, Response, Status, Streaming};
use crate::codec::DynamicCodec;
use crate::reflection::{
fill_pool_from_files, fill_pool_from_reflection, method_desc_to_path,
};
use crate::{json_schema, MethodDefinition, ServiceDefinition};
use crate::reflection::{fill_pool_from_files, fill_pool_from_reflection, method_desc_to_path};
use crate::transport::get_transport;
use crate::{json_schema, MethodDefinition, ServiceDefinition};
#[derive(Clone)]
pub struct GrpcConnection {

View File

@@ -1,6 +1,6 @@
use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder};
use hyper_util::client::legacy::Client;
use hyper_util::client::legacy::connect::HttpConnector;
use hyper_util::client::legacy::Client;
use hyper_util::rt::TokioExecutor;
use tonic::body::BoxBody;
@@ -16,4 +16,3 @@ pub(crate) fn get_transport() -> Client<HttpsConnector<HttpConnector>, BoxBody>
.http2_only(true)
.build(connector)
}

View File

@@ -7,19 +7,16 @@ publish = false
[dependencies]
dunce = "1.0.4"
log = "0.4.21"
prost = "0.13.1"
rand = "0.8.5"
serde = { version = "1.0.198", features = ["derive"] }
serde_json = "1.0.113"
tauri = { workspace = true }
tauri-plugin-shell = { workspace = true }
tokio = { version = "1.42.0", features = ["macros", "rt-multi-thread", "process"] }
tonic = { version = "0.12.3"}
ts-rs = { workspace = true }
ts-rs = { workspace = true, features = ["import-esm"] }
thiserror = "2.0.7"
yaak-models = { workspace = true }
regex = "1.10.6"
path-slash = "0.2.1"
[build-dependencies]
tonic-build = "0.12.1"
tokio-tungstenite = "0.26.1"
futures-util = "0.3.30"

View File

@@ -1,15 +1,15 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { Environment } from "./models";
import type { Folder } from "./models";
import type { GrpcRequest } from "./models";
import type { HttpRequest } from "./models";
import type { HttpResponse } from "./models";
import type { JsonValue } from "./serde_json/JsonValue";
import type { Workspace } from "./models";
import type { Environment } from "./models.js";
import type { Folder } from "./models.js";
import type { GrpcRequest } from "./models.js";
import type { HttpRequest } from "./models.js";
import type { HttpResponse } from "./models.js";
import type { JsonValue } from "./serde_json/JsonValue.js";
import type { Workspace } from "./models.js";
export type BootRequest = { dir: string, watch: boolean, };
export type BootResponse = { name: string, version: string, capabilities: Array<string>, };
export type BootResponse = { name: string, version: string, };
export type CallHttpAuthenticationRequest = { config: { [key in string]?: JsonValue }, method: string, url: string, headers: Array<HttpHeader>, };
@@ -38,6 +38,8 @@ export type EditorLanguage = "text" | "javascript" | "json" | "html" | "xml" | "
export type EmptyPayload = {};
export type ErrorResponse = { error: string, };
export type ExportHttpRequestRequest = { httpRequest: HttpRequest, };
export type ExportHttpRequestResponse = { content: string, };
@@ -242,9 +244,9 @@ export type ImportResources = { workspaces: Array<Workspace>, environments: Arra
export type ImportResponse = { resources: ImportResources, };
export type InternalEvent = { id: string, pluginRefId: string, replyId: string | null, payload: InternalEventPayload, windowContext: WindowContext, };
export type InternalEvent = { id: string, pluginRefId: string, pluginName: string, replyId: string | null, windowContext: WindowContext, payload: InternalEventPayload, };
export type InternalEventPayload = { "type": "boot_request" } & BootRequest | { "type": "boot_response" } & BootResponse | { "type": "reload_request" } & EmptyPayload | { "type": "reload_response" } & EmptyPayload | { "type": "terminate_request" } | { "type": "terminate_response" } | { "type": "import_request" } & ImportRequest | { "type": "import_response" } & ImportResponse | { "type": "filter_request" } & FilterRequest | { "type": "filter_response" } & FilterResponse | { "type": "export_http_request_request" } & ExportHttpRequestRequest | { "type": "export_http_request_response" } & ExportHttpRequestResponse | { "type": "send_http_request_request" } & SendHttpRequestRequest | { "type": "send_http_request_response" } & SendHttpRequestResponse | { "type": "get_http_request_actions_request" } & EmptyPayload | { "type": "get_http_request_actions_response" } & GetHttpRequestActionsResponse | { "type": "call_http_request_action_request" } & CallHttpRequestActionRequest | { "type": "get_template_functions_request" } | { "type": "get_template_functions_response" } & GetTemplateFunctionsResponse | { "type": "call_template_function_request" } & CallTemplateFunctionRequest | { "type": "call_template_function_response" } & CallTemplateFunctionResponse | { "type": "get_http_authentication_request" } & EmptyPayload | { "type": "get_http_authentication_response" } & GetHttpAuthenticationResponse | { "type": "call_http_authentication_request" } & CallHttpAuthenticationRequest | { "type": "call_http_authentication_response" } & CallHttpAuthenticationResponse | { "type": "copy_text_request" } & CopyTextRequest | { "type": "render_http_request_request" } & RenderHttpRequestRequest | { "type": "render_http_request_response" } & RenderHttpRequestResponse | { "type": "template_render_request" } & TemplateRenderRequest | { "type": "template_render_response" } & TemplateRenderResponse | { "type": "show_toast_request" } & ShowToastRequest | { "type": "prompt_text_request" } & PromptTextRequest | { "type": "prompt_text_response" } & PromptTextResponse | { "type": "get_http_request_by_id_request" } & GetHttpRequestByIdRequest | { "type": "get_http_request_by_id_response" } & GetHttpRequestByIdResponse | { "type": "find_http_responses_request" } & FindHttpResponsesRequest | { "type": "find_http_responses_response" } & FindHttpResponsesResponse | { "type": "empty_response" } & EmptyPayload;
export type InternalEventPayload = { "type": "boot_request" } & BootRequest | { "type": "boot_response" } & BootResponse | { "type": "reload_request" } & EmptyPayload | { "type": "reload_response" } & EmptyPayload | { "type": "terminate_request" } | { "type": "terminate_response" } | { "type": "import_request" } & ImportRequest | { "type": "import_response" } & ImportResponse | { "type": "filter_request" } & FilterRequest | { "type": "filter_response" } & FilterResponse | { "type": "export_http_request_request" } & ExportHttpRequestRequest | { "type": "export_http_request_response" } & ExportHttpRequestResponse | { "type": "send_http_request_request" } & SendHttpRequestRequest | { "type": "send_http_request_response" } & SendHttpRequestResponse | { "type": "get_http_request_actions_request" } & EmptyPayload | { "type": "get_http_request_actions_response" } & GetHttpRequestActionsResponse | { "type": "call_http_request_action_request" } & CallHttpRequestActionRequest | { "type": "get_template_functions_request" } | { "type": "get_template_functions_response" } & GetTemplateFunctionsResponse | { "type": "call_template_function_request" } & CallTemplateFunctionRequest | { "type": "call_template_function_response" } & CallTemplateFunctionResponse | { "type": "get_http_authentication_request" } & EmptyPayload | { "type": "get_http_authentication_response" } & GetHttpAuthenticationResponse | { "type": "call_http_authentication_request" } & CallHttpAuthenticationRequest | { "type": "call_http_authentication_response" } & CallHttpAuthenticationResponse | { "type": "copy_text_request" } & CopyTextRequest | { "type": "render_http_request_request" } & RenderHttpRequestRequest | { "type": "render_http_request_response" } & RenderHttpRequestResponse | { "type": "template_render_request" } & TemplateRenderRequest | { "type": "template_render_response" } & TemplateRenderResponse | { "type": "show_toast_request" } & ShowToastRequest | { "type": "prompt_text_request" } & PromptTextRequest | { "type": "prompt_text_response" } & PromptTextResponse | { "type": "get_http_request_by_id_request" } & GetHttpRequestByIdRequest | { "type": "get_http_request_by_id_response" } & GetHttpRequestByIdResponse | { "type": "find_http_responses_request" } & FindHttpResponsesRequest | { "type": "find_http_responses_response" } & FindHttpResponsesResponse | { "type": "empty_response" } & EmptyPayload | { "type": "error_response" } & ErrorResponse;
export type PromptTextRequest = { id: string, title: string, label: string, description?: string, defaultValue?: string, placeholder?: string,
/**

View File

@@ -1,6 +0,0 @@
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Compile protobuf types
tonic_build::compile_protos("../../proto/plugins/runtime.proto")?;
Ok(())
}

View File

@@ -1,4 +1,4 @@
use crate::server::plugin_runtime::EventStreamEvent;
use crate::events::InternalEvent;
use thiserror::Error;
use tokio::io;
use tokio::sync::mpsc::error::SendError;
@@ -14,18 +14,15 @@ pub enum Error {
#[error("Tauri shell error: {0}")]
TauriShellErr(#[from] tauri_plugin_shell::Error),
#[error("Grpc transport error: {0}")]
GrpcTransportErr(#[from] tonic::transport::Error),
#[error("Grpc send error: {0}")]
GrpcSendErr(#[from] SendError<tonic::Result<EventStreamEvent>>),
GrpcSendErr(#[from] SendError<InternalEvent>),
#[error("JSON error: {0}")]
JsonErr(#[from] serde_json::Error),
#[error("Plugin not found: {0}")]
PluginNotFoundErr(String),
#[error("Auth plugin not found: {0}")]
AuthPluginNotFound(String),

View File

@@ -1,4 +1,5 @@
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;
use tauri::{Runtime, WebviewWindow};
use ts_rs::TS;
@@ -11,9 +12,24 @@ use yaak_models::models::{Environment, Folder, GrpcRequest, HttpRequest, HttpRes
pub struct InternalEvent {
pub id: String,
pub plugin_ref_id: String,
pub plugin_name: String,
pub reply_id: Option<String>,
pub payload: InternalEventPayload,
pub window_context: WindowContext,
pub payload: InternalEventPayload,
}
/// Special type used to deserialize everything but the payload. This is so we can
/// catch any plugin-related type errors, since payload is sent by the plugin author
/// and all other fields are sent by Yaak first-party code.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct InternalEventRawPayload {
pub id: String,
pub plugin_ref_id: String,
pub plugin_name: String,
pub reply_id: Option<String>,
pub window_context: WindowContext,
pub payload: Value,
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
@@ -95,6 +111,8 @@ pub enum InternalEventPayload {
/// Returned when a plugin doesn't get run, just so the server
/// has something to listen for
EmptyResponse(EmptyPayload),
ErrorResponse(ErrorResponse),
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
@@ -102,6 +120,13 @@ pub enum InternalEventPayload {
#[ts(export, type = "{}", export_to = "events.ts")]
pub struct EmptyPayload {}
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
#[serde(default)]
#[ts(export, export_to = "events.ts")]
pub struct ErrorResponse {
pub error: String,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "events.ts")]
@@ -116,7 +141,6 @@ pub struct BootRequest {
pub struct BootResponse {
pub name: String,
pub version: String,
pub capabilities: Vec<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]

View File

@@ -9,7 +9,7 @@ pub mod events;
pub mod manager;
mod nodejs;
pub mod plugin_handle;
mod server;
mod server_ws;
mod util;
pub fn init<R: Runtime>() -> TauriPlugin<R> {

View File

@@ -12,8 +12,7 @@ use crate::events::{
};
use crate::nodejs::start_nodejs_plugin_runtime;
use crate::plugin_handle::PluginHandle;
use crate::server::plugin_runtime::plugin_runtime_server::PluginRuntimeServer;
use crate::server::PluginRuntimeServerImpl;
use crate::server_ws::PluginRuntimeServerWebsocket;
use log::{info, warn};
use std::collections::HashMap;
use std::env;
@@ -25,8 +24,6 @@ use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
use tokio::fs::read_dir;
use tokio::net::TcpListener;
use tokio::sync::{mpsc, Mutex};
use tonic::codegen::tokio_stream;
use tonic::transport::Server;
use yaak_models::queries::{generate_id, list_plugins};
#[derive(Clone)]
@@ -34,7 +31,7 @@ pub struct PluginManager {
subscribers: Arc<Mutex<HashMap<String, mpsc::Sender<InternalEvent>>>>,
plugins: Arc<Mutex<Vec<PluginHandle>>>,
kill_tx: tokio::sync::watch::Sender<bool>,
grpc_service: Arc<PluginRuntimeServerImpl>,
ws_service: Arc<PluginRuntimeServerWebsocket>,
}
#[derive(Clone)]
@@ -50,13 +47,13 @@ impl PluginManager {
let (client_disconnect_tx, mut client_disconnect_rx) = mpsc::channel(128);
let (client_connect_tx, mut client_connect_rx) = tokio::sync::watch::channel(false);
let grpc_service =
PluginRuntimeServerImpl::new(events_tx, client_disconnect_tx, client_connect_tx);
let ws_service =
PluginRuntimeServerWebsocket::new(events_tx, client_disconnect_tx, client_connect_tx);
let plugin_manager = PluginManager {
plugins: Arc::new(Mutex::new(Vec::new())),
subscribers: Arc::new(Mutex::new(HashMap::new())),
grpc_service: Arc::new(grpc_service.clone()),
ws_service: Arc::new(ws_service.clone()),
kill_tx: kill_server_tx,
};
@@ -79,14 +76,9 @@ impl PluginManager {
}
});
info!("Starting plugin server");
let svc = PluginRuntimeServer::new(grpc_service.to_owned())
.max_encoding_message_size(usize::MAX)
.max_decoding_message_size(usize::MAX);
let listen_addr = match option_env!("PORT") {
None => "localhost:0".to_string(),
let listen_addr = match option_env!("YAAK_PLUGIN_SERVER_PORT") {
Some(port) => format!("localhost:{port}"),
None => "localhost:0".to_string(),
};
let listener = tauri::async_runtime::block_on(async move {
TcpListener::bind(listen_addr).await.expect("Failed to bind TCP listener")
@@ -114,14 +106,9 @@ impl PluginManager {
};
// 1. Spawn server in the background
info!("Starting gRPC plugin server on {addr}");
info!("Starting plugin server on {addr}");
tauri::async_runtime::spawn(async move {
Server::builder()
.timeout(Duration::from_secs(10))
.add_service(svc)
.serve_with_incoming(tokio_stream::wrappers::TcpListenerStream::new(listener))
.await
.expect("grpc plugin runtime server failed to start");
ws_service.listen(listener).await;
});
// 2. Start Node.js runtime and initialize plugins
@@ -203,7 +190,7 @@ impl PluginManager {
watch: bool,
) -> Result<()> {
info!("Adding plugin by dir {dir}");
let maybe_tx = self.grpc_service.app_to_plugin_events_tx.lock().await;
let maybe_tx = self.ws_service.app_to_plugin_events_tx.lock().await;
let tx = match &*maybe_tx {
None => return Err(ClientNotInitializedErr),
Some(tx) => tx,
@@ -357,21 +344,23 @@ impl PluginManager {
.collect::<Vec<InternalEvent>>();
// 2. Spawn thread to subscribe to incoming events and check reply ids
let send_events_fut = {
let sub_events_fut = {
let events_to_send = events_to_send.clone();
tokio::spawn(async move {
let mut found_events = Vec::new();
while let Some(event) = rx.recv().await {
if events_to_send
let matched_sent_event = events_to_send
.iter()
.find(|e| Some(e.id.to_owned()) == event.reply_id)
.is_some()
{
.is_some();
if matched_sent_event {
found_events.push(event.clone());
};
if found_events.len() == events_to_send.len() {
let found_them_all = found_events.len() == events_to_send.len();
if found_them_all{
break;
}
}
@@ -390,7 +379,7 @@ impl PluginManager {
}
// 4. Join on the spawned thread
let events = send_events_fut.await.expect("Thread didn't succeed");
let events = sub_events_fut.await.expect("Thread didn't succeed");
// 5. Unsubscribe
self.unsubscribe(rx_id.as_str()).await;
@@ -502,7 +491,7 @@ impl PluginManager {
// Clone for mutability
let mut req = req.clone();
// Fill in default values
// Fill in default values
for arg in authentication.config.clone() {
let base = match arg {
FormInput::Text(a) => a.base,

View File

@@ -32,8 +32,8 @@ pub async fn start_nodejs_plugin_runtime<R: Runtime>(
let cmd = app
.shell()
.sidecar("yaaknode")?
.env("PORT", addr.port().to_string())
.args(&[plugin_runtime_main]);
.env("YAAK_PLUGIN_RUNTIME_PORT", addr.port().to_string())
.args(&[&plugin_runtime_main]);
let (mut child_rx, child) = cmd.spawn()?;
info!("Spawned plugin runtime");

View File

@@ -1,8 +1,8 @@
use crate::error::Result;
use crate::events::{BootResponse, InternalEvent, InternalEventPayload, WindowContext};
use crate::server::plugin_runtime::EventStreamEvent;
use crate::util::gen_id;
use log::info;
use std::path::Path;
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
@@ -10,12 +10,12 @@ use tokio::sync::{mpsc, Mutex};
pub struct PluginHandle {
pub ref_id: String,
pub dir: String,
pub(crate) to_plugin_tx: Arc<Mutex<mpsc::Sender<tonic::Result<EventStreamEvent>>>>,
pub(crate) to_plugin_tx: Arc<Mutex<mpsc::Sender<InternalEvent>>>,
pub(crate) boot_resp: Arc<Mutex<BootResponse>>,
}
impl PluginHandle {
pub fn new(dir: &str, tx: mpsc::Sender<tonic::Result<EventStreamEvent>>) -> Self {
pub fn new(dir: &str, tx: mpsc::Sender<InternalEvent>) -> Self {
let ref_id = gen_id();
PluginHandle {
@@ -46,9 +46,11 @@ impl PluginHandle {
payload: &InternalEventPayload,
reply_id: Option<String>,
) -> InternalEvent {
let dir = Path::new(&self.dir);
InternalEvent {
id: gen_id(),
plugin_ref_id: self.ref_id.clone(),
plugin_name: dir.file_name().unwrap().to_str().unwrap().to_string(),
reply_id,
payload: payload.clone(),
window_context,
@@ -63,13 +65,7 @@ impl PluginHandle {
}
pub(crate) async fn send(&self, event: &InternalEvent) -> Result<()> {
self.to_plugin_tx
.lock()
.await
.send(Ok(EventStreamEvent {
event: serde_json::to_string(event)?,
}))
.await?;
self.to_plugin_tx.lock().await.send(event.to_owned()).await?;
Ok(())
}

View File

@@ -1,99 +0,0 @@
use log::warn;
use std::pin::Pin;
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
use tonic::codegen::tokio_stream::wrappers::ReceiverStream;
use tonic::codegen::tokio_stream::{Stream, StreamExt};
use tonic::{Request, Response, Status, Streaming};
use crate::events::InternalEvent;
use crate::server::plugin_runtime::plugin_runtime_server::PluginRuntime;
use plugin_runtime::EventStreamEvent;
pub mod plugin_runtime {
tonic::include_proto!("yaak.plugins.runtime");
}
type ResponseStream = Pin<Box<dyn Stream<Item = Result<EventStreamEvent, Status>> + Send>>;
#[derive(Clone)]
pub(crate) struct PluginRuntimeServerImpl {
pub(crate) app_to_plugin_events_tx:
Arc<Mutex<Option<mpsc::Sender<tonic::Result<EventStreamEvent>>>>>,
client_disconnect_tx: mpsc::Sender<bool>,
client_connect_tx: tokio::sync::watch::Sender<bool>,
plugin_to_app_events_tx: mpsc::Sender<InternalEvent>,
}
impl PluginRuntimeServerImpl {
pub fn new(
events_tx: mpsc::Sender<InternalEvent>,
disconnect_tx: mpsc::Sender<bool>,
connect_tx: tokio::sync::watch::Sender<bool>,
) -> Self {
PluginRuntimeServerImpl {
app_to_plugin_events_tx: Arc::new(Mutex::new(None)),
client_disconnect_tx: disconnect_tx,
client_connect_tx: connect_tx,
plugin_to_app_events_tx: events_tx,
}
}
}
#[tonic::async_trait]
impl PluginRuntime for PluginRuntimeServerImpl {
type EventStreamStream = ResponseStream;
async fn event_stream(
&self,
req: Request<Streaming<EventStreamEvent>>,
) -> tonic::Result<Response<Self::EventStreamStream>> {
let mut in_stream = req.into_inner();
let (to_plugin_tx, to_plugin_rx) = mpsc::channel::<tonic::Result<EventStreamEvent>>(128);
let mut app_to_plugin_events_tx = self.app_to_plugin_events_tx.lock().await;
*app_to_plugin_events_tx = Some(to_plugin_tx);
let plugin_to_app_events_tx = self.plugin_to_app_events_tx.clone();
let client_disconnect_tx = self.client_disconnect_tx.clone();
self.client_connect_tx.send(true).expect("Failed to send client ready event");
tokio::spawn(async move {
while let Some(result) = in_stream.next().await {
// Received event from plugin runtime
match result {
Ok(v) => {
let event: InternalEvent = match serde_json::from_str(v.event.as_str()) {
Ok(pe) => pe,
Err(e) => {
warn!("Failed to deserialize event {e:?} -> {}", v.event);
continue;
}
};
// Send event to subscribers
// Emit event to the channel for server to handle
if let Err(e) = plugin_to_app_events_tx.try_send(event.clone()) {
warn!("Failed to send to channel. Receiver probably isn't listening: {:?}", e);
}
}
Err(err) => {
// TODO: Better error handling
warn!("gRPC server error {err}");
break;
}
};
}
if let Err(e) = client_disconnect_tx.send(true).await {
warn!("Failed to send killed event {:?}", e);
}
});
// Write the same data that was received
let out_stream = ReceiverStream::new(to_plugin_rx);
Ok(Response::new(Box::pin(out_stream) as Self::EventStreamStream))
}
}

View File

@@ -0,0 +1,132 @@
use crate::events::{ErrorResponse, InternalEvent, InternalEventPayload, InternalEventRawPayload};
use futures_util::{SinkExt, StreamExt};
use log::{error, info, warn};
use std::sync::Arc;
use tokio::net::{TcpListener, TcpStream};
use tokio::sync::{mpsc, Mutex};
use tokio_tungstenite::accept_async_with_config;
use tokio_tungstenite::tungstenite::protocol::WebSocketConfig;
use tokio_tungstenite::tungstenite::Message;
#[derive(Clone)]
pub(crate) struct PluginRuntimeServerWebsocket {
pub(crate) app_to_plugin_events_tx: Arc<Mutex<Option<mpsc::Sender<InternalEvent>>>>,
client_disconnect_tx: mpsc::Sender<bool>,
client_connect_tx: tokio::sync::watch::Sender<bool>,
plugin_to_app_events_tx: mpsc::Sender<InternalEvent>,
}
impl PluginRuntimeServerWebsocket {
pub fn new(
events_tx: mpsc::Sender<InternalEvent>,
disconnect_tx: mpsc::Sender<bool>,
connect_tx: tokio::sync::watch::Sender<bool>,
) -> Self {
PluginRuntimeServerWebsocket {
app_to_plugin_events_tx: Arc::new(Mutex::new(None)),
client_disconnect_tx: disconnect_tx,
client_connect_tx: connect_tx,
plugin_to_app_events_tx: events_tx,
}
}
pub async fn listen(&self, listener: TcpListener) {
while let Ok((stream, _)) = listener.accept().await {
self.accept_connection(stream).await;
}
}
async fn accept_connection(&self, stream: TcpStream) {
let (to_plugin_tx, mut to_plugin_rx) = mpsc::channel::<InternalEvent>(128);
let mut app_to_plugin_events_tx = self.app_to_plugin_events_tx.lock().await;
*app_to_plugin_events_tx = Some(to_plugin_tx);
let plugin_to_app_events_tx = self.plugin_to_app_events_tx.clone();
let client_disconnect_tx = self.client_disconnect_tx.clone();
let client_connect_tx = self.client_connect_tx.clone();
let addr = stream.peer_addr().expect("connected streams should have a peer address");
let conf = WebSocketConfig::default();
let ws_stream = accept_async_with_config(stream, Some(conf))
.await
.expect("Error during the websocket handshake occurred");
let (mut ws_sender, mut ws_receiver) = ws_stream.split();
tauri::async_runtime::spawn(async move {
client_connect_tx.send(true).expect("Failed to send client ready event");
info!("New plugin runtime websocket connection: {}", addr);
loop {
tokio::select! {
msg = ws_receiver.next() => {
let msg = match msg {
Some(Ok(msg)) => msg,
Some(Err(e)) => {
warn!("Websocket error {e:?}");
continue;
}
None => break,
};
// Skip non-text messages
if !msg.is_text() {
return;
}
let event = match serde_json::from_str::<InternalEventRawPayload>(&msg.into_text().unwrap()) {
Ok(e) => e,
Err(e) => {
error!("Failed to decode plugin event {e:?}");
continue;
}
};
// Parse everything but the payload so we can catch errors on that, specifically
let payload = serde_json::from_value::<InternalEventPayload>(event.payload)
.unwrap_or_else(|e| {
InternalEventPayload::ErrorResponse(ErrorResponse {
error: format!("Plugin error from {}: {e:?}", event.plugin_name),
})
});
let event = InternalEvent{
id: event.id,
payload,
plugin_ref_id: event.plugin_ref_id,
plugin_name: event.plugin_name,
window_context: event.window_context,
reply_id: event.reply_id,
};
// Send event to subscribers
// Emit event to the channel for server to handle
if let Err(e) = plugin_to_app_events_tx.try_send(event) {
warn!("Failed to send to channel. Receiver probably isn't listening: {:?}", e);
}
}
event_for_plugin = to_plugin_rx.recv() => {
match event_for_plugin {
None => {
error!("Plugin runtime client WS channel closed");
return;
},
Some(event) => {
let event_bytes = serde_json::to_string(&event).unwrap();
let msg = Message::text(event_bytes);
ws_sender.send(msg).await.unwrap();
}
}
}
}
}
if let Err(e) = client_disconnect_tx.send(true).await {
warn!("Failed to send killed event {:?}", e);
}
});
}
}

View File

@@ -1,6 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncModel } from "./models";
import type { SyncState } from "./models";
import type { SyncModel } from "./models.js";
import type { SyncState } from "./models.js";
export type FsCandidate = { "type": "FsCandidate", model: SyncModel, relPath: string, checksum: string, };