mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-01-20 02:27:21 +01:00
Compare commits
802 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a83e094f00 | ||
|
|
e683a2cb2a | ||
|
|
1f231c2722 | ||
|
|
0fc2575ef6 | ||
|
|
44c718e6bc | ||
|
|
495df847ab | ||
|
|
5dbb9852f3 | ||
|
|
82d3304c38 | ||
|
|
5e2218fd64 | ||
|
|
5b12fad173 | ||
|
|
8d6f23eacb | ||
|
|
75a09859bc | ||
|
|
dc47c4ceba | ||
|
|
8ae0290aed | ||
|
|
f01d1e704b | ||
|
|
3f72996e64 | ||
|
|
fe862517fb | ||
|
|
b39335dc4f | ||
|
|
8f3bdb5039 | ||
|
|
b47ec01f9c | ||
|
|
36728d1d1f | ||
|
|
c3f0351445 | ||
|
|
9a5364187c | ||
|
|
a9db14994f | ||
|
|
2383e8468f | ||
|
|
a79d485b6a | ||
|
|
7eb931d689 | ||
|
|
995cd2aa7b | ||
|
|
1ce50e0c1b | ||
|
|
bce3d26a1a | ||
|
|
d9680ad0fa | ||
|
|
e2e026e1ff | ||
|
|
16739d9a37 | ||
|
|
524a4f2275 | ||
|
|
ba66883dc2 | ||
|
|
2caa735a2e | ||
|
|
90637fda6b | ||
|
|
2cef46b46a | ||
|
|
14b3abf76c | ||
|
|
8cd3961f87 | ||
|
|
5eb2e2b5a2 | ||
|
|
5dd897e042 | ||
|
|
9c77ec296d | ||
|
|
696e72323b | ||
|
|
3e8c01f436 | ||
|
|
80fc4dec09 | ||
|
|
671885fc8c | ||
|
|
002b61f0d7 | ||
|
|
d32b462bd9 | ||
|
|
5c8b47288a | ||
|
|
8e662e6feb | ||
|
|
83aaeb94f6 | ||
|
|
8606940dee | ||
|
|
60e469a1c9 | ||
|
|
57d548743f | ||
|
|
dab7ee2492 | ||
|
|
9360fd7e43 | ||
|
|
7b4bc53c0f | ||
|
|
0f7969d10a | ||
|
|
6b373b5985 | ||
|
|
c0068a1561 | ||
|
|
fad5e03f8b | ||
|
|
a22fc68764 | ||
|
|
59a442e8c0 | ||
|
|
35e8155ecf | ||
|
|
8b9a5cb5fb | ||
|
|
29f9825f04 | ||
|
|
ad8a6beacf | ||
|
|
194ac78814 | ||
|
|
0542f1ebea | ||
|
|
8de917ea5e | ||
|
|
0b494bbfbf | ||
|
|
7e74f71c79 | ||
|
|
bd7fd676a5 | ||
|
|
cb1c6a4d8c | ||
|
|
b640f0c357 | ||
|
|
967590c7ff | ||
|
|
321d6ec9d7 | ||
|
|
79b4fd7829 | ||
|
|
52eb4d338f | ||
|
|
4b8b48e92f | ||
|
|
e3f45b58e5 | ||
|
|
a5f1922446 | ||
|
|
22e7b15a63 | ||
|
|
c6bb2a73f8 | ||
|
|
bde961f95d | ||
|
|
c4867b3f68 | ||
|
|
1c36e9a2f0 | ||
|
|
1c82d8a738 | ||
|
|
a2df591102 | ||
|
|
c950ee0fb8 | ||
|
|
0ce5a9fcce | ||
|
|
bd1970a805 | ||
|
|
3aaa4b1050 | ||
|
|
31d2426846 | ||
|
|
25c4e4edaf | ||
|
|
1a5bf53b02 | ||
|
|
e2d1b62044 | ||
|
|
6186104d11 | ||
|
|
f1e97eaea4 | ||
|
|
bb0e58bf8b | ||
|
|
dc802fa055 | ||
|
|
3c8e19367f | ||
|
|
a89887eab8 | ||
|
|
7d3f0ad549 | ||
|
|
5608db1334 | ||
|
|
10443b3c02 | ||
|
|
7d272f3cd6 | ||
|
|
d60a62ab24 | ||
|
|
2b0f73b7c5 | ||
|
|
0915ea8585 | ||
|
|
ed9a0b8ac7 | ||
|
|
7437f39ed3 | ||
|
|
27f80cd97b | ||
|
|
04cf688a9b | ||
|
|
178af308ce | ||
|
|
d40732a910 | ||
|
|
d8a579b7b3 | ||
|
|
bf8ef40708 | ||
|
|
41060d5d43 | ||
|
|
ab6cef064c | ||
|
|
829d10d7b9 | ||
|
|
57e1f641a7 | ||
|
|
7df965e74b | ||
|
|
0e1153fdfd | ||
|
|
9797bc1830 | ||
|
|
9d00eb98d2 | ||
|
|
88f6e882c6 | ||
|
|
230a773909 | ||
|
|
e6da1afa82 | ||
|
|
a70d9e57be | ||
|
|
8afe0c0755 | ||
|
|
e47c2513a8 | ||
|
|
276bcfceeb | ||
|
|
6fd1b35a50 | ||
|
|
bc33244549 | ||
|
|
33c982b288 | ||
|
|
98493a1167 | ||
|
|
a8c10f9601 | ||
|
|
1051f84bbf | ||
|
|
eea137e677 | ||
|
|
27cf4e925f | ||
|
|
6b239c4e3f | ||
|
|
4c7ee5ef80 | ||
|
|
3ddb79899e | ||
|
|
b14595a3e2 | ||
|
|
82c94369ae | ||
|
|
7e77d2e9a8 | ||
|
|
ce5ce76b21 | ||
|
|
27d6b30cf9 | ||
|
|
9285dd5d70 | ||
|
|
977234808e | ||
|
|
e1ccd327f5 | ||
|
|
a0e3e71a4b | ||
|
|
2d4037da37 | ||
|
|
d21608bb82 | ||
|
|
f16239de73 | ||
|
|
d00f34c240 | ||
|
|
fd77437f59 | ||
|
|
f9cd8d24a1 | ||
|
|
c0a9f79834 | ||
|
|
ca63c101a1 | ||
|
|
dbd0b46ef2 | ||
|
|
89793ebe2f | ||
|
|
e1ffc387ea | ||
|
|
1efd0852ef | ||
|
|
59d532ed4d | ||
|
|
f034cda7cd | ||
|
|
5c1cf1e57d | ||
|
|
780960f5de | ||
|
|
aada95e9da | ||
|
|
89b5003cce | ||
|
|
3559333461 | ||
|
|
fbb6cab567 | ||
|
|
30fca29c6d | ||
|
|
ec1e521b39 | ||
|
|
a8998cd696 | ||
|
|
df178baed6 | ||
|
|
e2f75371e6 | ||
|
|
9f25ab39c6 | ||
|
|
937df6a024 | ||
|
|
7deb7182a8 | ||
|
|
82b14c56be | ||
|
|
58b98734c9 | ||
|
|
c2f58f328c | ||
|
|
36eabe7528 | ||
|
|
8347093e8b | ||
|
|
75282cb43a | ||
|
|
3a253a758d | ||
|
|
f2955c26c1 | ||
|
|
e6c0317b37 | ||
|
|
0cfb218b07 | ||
|
|
8ef103fbde | ||
|
|
ec5cdfb025 | ||
|
|
7f4d082c17 | ||
|
|
e10011ef34 | ||
|
|
1eb6999c37 | ||
|
|
35596916bf | ||
|
|
d61b22dd87 | ||
|
|
5565a9db9a | ||
|
|
b803655306 | ||
|
|
20df2bf13a | ||
|
|
e8fab85ce5 | ||
|
|
ce730f3dbe | ||
|
|
2328973de5 | ||
|
|
37dea50c91 | ||
|
|
40dfc8b30a | ||
|
|
72971bb9ec | ||
|
|
f835599502 | ||
|
|
0caa4f8099 | ||
|
|
71d3c9acd1 | ||
|
|
239ffa174a | ||
|
|
eae79ab14b | ||
|
|
ef7912615a | ||
|
|
7f20b67380 | ||
|
|
504337c178 | ||
|
|
d6b4f06ac8 | ||
|
|
28cbf5474d | ||
|
|
d797b84d4e | ||
|
|
720745857d | ||
|
|
a74ea1aeda | ||
|
|
643f5e7f26 | ||
|
|
a208b934e4 | ||
|
|
5892774082 | ||
|
|
13bfc1c3bd | ||
|
|
00289734c7 | ||
|
|
09c7c2cb91 | ||
|
|
bbe62abd20 | ||
|
|
cae9a4fd36 | ||
|
|
e7ee4a8867 | ||
|
|
b020574c88 | ||
|
|
192e1da5b6 | ||
|
|
0640079e59 | ||
|
|
a9d99aa17f | ||
|
|
b4667e1f88 | ||
|
|
7bc26fd448 | ||
|
|
94a9a5d5d5 | ||
|
|
bcfa2c411f | ||
|
|
1e3d43dbae | ||
|
|
c6b5e4d5df | ||
|
|
1293870e11 | ||
|
|
8309c19167 | ||
|
|
63a381c55a | ||
|
|
d891f891b7 | ||
|
|
0b12a6b318 | ||
|
|
0144ab05a0 | ||
|
|
1f71d4372f | ||
|
|
5ed1ea07ef | ||
|
|
4d2b101278 | ||
|
|
1dfdadde98 | ||
|
|
d19729869e | ||
|
|
5f782ad109 | ||
|
|
27dbdc9b5a | ||
|
|
4892863dd7 | ||
|
|
dc077209cc | ||
|
|
d8d5344d21 | ||
|
|
d148a8384d | ||
|
|
6884e9428b | ||
|
|
25ebccfcd7 | ||
|
|
7adb0cbb50 | ||
|
|
50866abda4 | ||
|
|
c83d904cf0 | ||
|
|
160447f8f6 | ||
|
|
184b13cc2a | ||
|
|
8fa965e055 | ||
|
|
1dd0b69079 | ||
|
|
be8dd107e3 | ||
|
|
8f139f10ef | ||
|
|
8b0823984b | ||
|
|
d82d2229d4 | ||
|
|
4be1bc17f3 | ||
|
|
e6af0c6009 | ||
|
|
e5d10bd72b | ||
|
|
9426885bb8 | ||
|
|
89e5d4f235 | ||
|
|
eecb3fbc7f | ||
|
|
244f1319b4 | ||
|
|
6a2b76e760 | ||
|
|
bc0278fce9 | ||
|
|
959841fb22 | ||
|
|
fbc878dbe5 | ||
|
|
7d183c6580 | ||
|
|
0555420ad9 | ||
|
|
ae25561c7e | ||
|
|
92e2b2b8f9 | ||
|
|
2a4a830fb7 | ||
|
|
0ad4c7cd7e | ||
|
|
41413d52ad | ||
|
|
eabc1bd305 | ||
|
|
9c312e12c1 | ||
|
|
18ea9dda3d | ||
|
|
56d4212f68 | ||
|
|
d932c19513 | ||
|
|
b800f00b7e | ||
|
|
e43af5234f | ||
|
|
aa59d96e55 | ||
|
|
5be04ceea6 | ||
|
|
da3392ac53 | ||
|
|
a2c9c98b21 | ||
|
|
8298d6e031 | ||
|
|
d6331022ad | ||
|
|
5e75d8c9a7 | ||
|
|
51944a212a | ||
|
|
49ed756479 | ||
|
|
1d207d5fbd | ||
|
|
6b1d15415d | ||
|
|
df3bfaaab7 | ||
|
|
ad0b8a8e7d | ||
|
|
d5459229b9 | ||
|
|
adbf596f0b | ||
|
|
c740966394 | ||
|
|
7adab73af3 | ||
|
|
c964f255d8 | ||
|
|
a7ffed9716 | ||
|
|
488d66d248 | ||
|
|
93cb469cb8 | ||
|
|
03a2fc8ee5 | ||
|
|
2d72e5792e | ||
|
|
9bf9a87f12 | ||
|
|
e2fca399e0 | ||
|
|
e13fdddf98 | ||
|
|
890eea299d | ||
|
|
9beac00981 | ||
|
|
1a64d7d9e6 | ||
|
|
bd5ae12f2e | ||
|
|
dbaf1da3ce | ||
|
|
a03c5df440 | ||
|
|
0776f6a2be | ||
|
|
ac9d050d9e | ||
|
|
b885c358a3 | ||
|
|
84d447973e | ||
|
|
019ec4de20 | ||
|
|
26189067cd | ||
|
|
caf39071af | ||
|
|
08a1223482 | ||
|
|
24bd90745e | ||
|
|
7a72920e66 | ||
|
|
81a8276e2b | ||
|
|
8fb6f51555 | ||
|
|
b0026aff66 | ||
|
|
3ced7f7c18 | ||
|
|
33f6995193 | ||
|
|
8af526682a | ||
|
|
d0d4324957 | ||
|
|
2bcd0e0bbe | ||
|
|
1b99c7e10f | ||
|
|
2831bb61b8 | ||
|
|
320670de2a | ||
|
|
1f39a36f26 | ||
|
|
1f84ba716e | ||
|
|
d034965d9c | ||
|
|
5eb30489e5 | ||
|
|
05458a0753 | ||
|
|
7975ef0699 | ||
|
|
5f810a1b4c | ||
|
|
c4093e79cf | ||
|
|
956f8ed2ea | ||
|
|
7b32f76a1e | ||
|
|
0dd11bc051 | ||
|
|
aa38df28af | ||
|
|
32962a6336 | ||
|
|
b268b72a4a | ||
|
|
eb0c90311b | ||
|
|
1070bf8e8f | ||
|
|
9bdb01987c | ||
|
|
3130fb948a | ||
|
|
e46de9eebd | ||
|
|
4360355c8c | ||
|
|
3baaddba0b | ||
|
|
e77c1c2a46 | ||
|
|
38422d59fd | ||
|
|
c884cedfc2 | ||
|
|
91074a35d8 | ||
|
|
9a02b63a6b | ||
|
|
4470409a24 | ||
|
|
8a978420be | ||
|
|
dfc01d51ca | ||
|
|
758154fa14 | ||
|
|
ef23a85577 | ||
|
|
e4533088ed | ||
|
|
4ffce4a534 | ||
|
|
6b77a62934 | ||
|
|
f603867040 | ||
|
|
b6eb7418aa | ||
|
|
39c97681cf | ||
|
|
2d0f0d8f6b | ||
|
|
209a767c91 | ||
|
|
71f2a724cb | ||
|
|
4b89b95738 | ||
|
|
b535722acd | ||
|
|
a9806a06a2 | ||
|
|
948e19b82f | ||
|
|
cd841fa13a | ||
|
|
3ffcf91abd | ||
|
|
9fd84a3bfc | ||
|
|
627c451cd1 | ||
|
|
e6ee89464a | ||
|
|
136e9f2738 | ||
|
|
d97986e526 | ||
|
|
7661aa9819 | ||
|
|
b7596f3f78 | ||
|
|
4e2231674c | ||
|
|
d36d023a5c | ||
|
|
3d5a7ebe3d | ||
|
|
819384e952 | ||
|
|
2f34c5e821 | ||
|
|
0c89c154ee | ||
|
|
060dce7440 | ||
|
|
91707529bd | ||
|
|
22f182a8eb | ||
|
|
6ccc42dc3f | ||
|
|
655f5a8eed | ||
|
|
cd06a72d6f | ||
|
|
0a5d71ecc2 | ||
|
|
9c214b619c | ||
|
|
6fddb727be | ||
|
|
b5d3b9a803 | ||
|
|
a74c8a94db | ||
|
|
abc6d0ff1e | ||
|
|
afdbcd0a38 | ||
|
|
c31ae805a6 | ||
|
|
7c1afd7fe5 | ||
|
|
ad470a3fd2 | ||
|
|
7755d06bba | ||
|
|
5aed4b79be | ||
|
|
f0e3f29606 | ||
|
|
abbcc525bf | ||
|
|
ef8ade45b1 | ||
|
|
8919d598c2 | ||
|
|
e21e42f5fe | ||
|
|
658aed8a29 | ||
|
|
a666f7d216 | ||
|
|
62429df469 | ||
|
|
7aed699c3f | ||
|
|
a14db0ab74 | ||
|
|
7b67770dc7 | ||
|
|
7766d8439b | ||
|
|
fac0683a71 | ||
|
|
79e04967f5 | ||
|
|
47eb8947f5 | ||
|
|
959c55315c | ||
|
|
b392f0c00f | ||
|
|
c8e674d015 | ||
|
|
90fbb81e1d | ||
|
|
21e58ca644 | ||
|
|
cfb0aa55ea | ||
|
|
9e0c021481 | ||
|
|
4acca8dd06 | ||
|
|
88eea09428 | ||
|
|
90d2743267 | ||
|
|
89945532a0 | ||
|
|
671860e053 | ||
|
|
a87ca6af47 | ||
|
|
b59ea4991c | ||
|
|
b6fd59219f | ||
|
|
38ce7650c1 | ||
|
|
2b21e28096 | ||
|
|
3206651248 | ||
|
|
ba8aa0e218 | ||
|
|
975a001635 | ||
|
|
30e7f7ccfe | ||
|
|
cca8d97d63 | ||
|
|
455e6c3520 | ||
|
|
d05ee3ec16 | ||
|
|
b1b5d08e89 | ||
|
|
6202e59daa | ||
|
|
c4a8603b81 | ||
|
|
5d15d1565c | ||
|
|
045ff558f8 | ||
|
|
504ed583cc | ||
|
|
15087f2d5a | ||
|
|
eb1cd1c14b | ||
|
|
0918d86654 | ||
|
|
d0e2220df7 | ||
|
|
ccb04f0b45 | ||
|
|
54b6e1c7c3 | ||
|
|
7c8acdc956 | ||
|
|
3973ae15be | ||
|
|
86dadf4f5e | ||
|
|
dc4cb4be74 | ||
|
|
2a29c4b551 | ||
|
|
2f64f45aba | ||
|
|
2f998ddfb6 | ||
|
|
93369a779d | ||
|
|
74e28123a8 | ||
|
|
fef7db8710 | ||
|
|
cb5d7626ac | ||
|
|
e74f9f33c0 | ||
|
|
0d27c17e28 | ||
|
|
f0b6d32639 | ||
|
|
9ebd506056 | ||
|
|
a5a91d2444 | ||
|
|
4435a66ece | ||
|
|
874a1079c3 | ||
|
|
1dc239d243 | ||
|
|
73a04276c0 | ||
|
|
9955064484 | ||
|
|
7975b528ec | ||
|
|
7d9fbda975 | ||
|
|
fce06747e3 | ||
|
|
db0cca54a7 | ||
|
|
4424caecc5 | ||
|
|
b866dcd566 | ||
|
|
9e4e6435ab | ||
|
|
41b10ff442 | ||
|
|
79f3307104 | ||
|
|
026629cd6d | ||
|
|
cf570a8f88 | ||
|
|
4a64384468 | ||
|
|
a9065c3380 | ||
|
|
e5e5548562 | ||
|
|
44cf2f670f | ||
|
|
d04d91d6dd | ||
|
|
3eb25b1507 | ||
|
|
a95cae5610 | ||
|
|
9cc6e62f28 | ||
|
|
24117f7c8d | ||
|
|
7245e6e593 | ||
|
|
4a5b1f4da3 | ||
|
|
c1ac67cc31 | ||
|
|
4085361b7e | ||
|
|
8b4227dbff | ||
|
|
3f7ed9e177 | ||
|
|
f9f1ba9e24 | ||
|
|
29309500a6 | ||
|
|
6b07fe105f | ||
|
|
90b03a0b97 | ||
|
|
d54a468006 | ||
|
|
7e98b6d853 | ||
|
|
8069094201 | ||
|
|
b28dc01e6b | ||
|
|
6bc1f9f494 | ||
|
|
e85dd32005 | ||
|
|
efc4e3bf6c | ||
|
|
768a13ff4d | ||
|
|
a8d73f74f4 | ||
|
|
4dddfc0cc5 | ||
|
|
4163bebe3b | ||
|
|
ad13744d14 | ||
|
|
9f9b3a5b21 | ||
|
|
63e68baeb1 | ||
|
|
4382ca6582 | ||
|
|
99f2f4c211 | ||
|
|
6b23379e5a | ||
|
|
4deda36e8f | ||
|
|
0ce7831cfb | ||
|
|
4e9005e240 | ||
|
|
9147252e5b | ||
|
|
b6b549ca18 | ||
|
|
85c25fb71e | ||
|
|
50637ba9fd | ||
|
|
bfe55dd55a | ||
|
|
1a2bb3d12e | ||
|
|
452a0c3ed5 | ||
|
|
b594a4690f | ||
|
|
7d2ba43463 | ||
|
|
107db42c33 | ||
|
|
dbacb9fc8d | ||
|
|
1fa3499ca6 | ||
|
|
62f3198d27 | ||
|
|
529550934b | ||
|
|
ca3b22a881 | ||
|
|
0ecf2d9123 | ||
|
|
af9bb7138b | ||
|
|
6cbfa74f97 | ||
|
|
2845fb8d35 | ||
|
|
fbe2660a57 | ||
|
|
1519282ac6 | ||
|
|
5d8d8dca70 | ||
|
|
0254e2c31d | ||
|
|
368b494d62 | ||
|
|
00d2213d05 | ||
|
|
b04602bcb9 | ||
|
|
80bfbd503a | ||
|
|
bfa186aebb | ||
|
|
818595e961 | ||
|
|
b5e9852f8d | ||
|
|
ff3734d65a | ||
|
|
4aa771ba29 | ||
|
|
5848c381fa | ||
|
|
03b35beae4 | ||
|
|
49856bb6f7 | ||
|
|
0cb56f1a85 | ||
|
|
4a0d698776 | ||
|
|
a4ebdb5736 | ||
|
|
6a5ecc2880 | ||
|
|
7a5bd92442 | ||
|
|
63c1111608 | ||
|
|
5704fb560a | ||
|
|
33dc3b719d | ||
|
|
d3329b4628 | ||
|
|
42e2c9f96f | ||
|
|
3e8a10757f | ||
|
|
8028d82fd0 | ||
|
|
4ad9feba68 | ||
|
|
ef469be7a9 | ||
|
|
14cd73d75a | ||
|
|
fbe6039845 | ||
|
|
f3fbd070dd | ||
|
|
7c2611a5a7 | ||
|
|
ae949f4616 | ||
|
|
89da434c0e | ||
|
|
4a98d1d655 | ||
|
|
bb41f0e4fe | ||
|
|
d2e0717d91 | ||
|
|
7912204fcb | ||
|
|
83e41ad618 | ||
|
|
dd7e46c2cc | ||
|
|
fb7424714a | ||
|
|
47481b711e | ||
|
|
0f86c3a731 | ||
|
|
b91d1b8b3c | ||
|
|
cd5ae6691c | ||
|
|
0f58986b4c | ||
|
|
af9755c513 | ||
|
|
56ce25f953 | ||
|
|
ed70c15ee9 | ||
|
|
d88ae99425 | ||
|
|
cf7ef55b7d | ||
|
|
2f12424f8d | ||
|
|
96aacec4fc | ||
|
|
7e57bb98a8 | ||
|
|
298f5c5a99 | ||
|
|
bf44ea7864 | ||
|
|
9abdc45e93 | ||
|
|
0d82cc7574 | ||
|
|
402b2a551f | ||
|
|
700c589ae2 | ||
|
|
8929d736d9 | ||
|
|
8c65fce357 | ||
|
|
b81f1e9e6b | ||
|
|
44d083d773 | ||
|
|
2b308282d4 | ||
|
|
c7738743c5 | ||
|
|
abc60667c6 | ||
|
|
53162e8bca | ||
|
|
6b95574e3d | ||
|
|
c9d62ae961 | ||
|
|
a70019927d | ||
|
|
01cd7f951a | ||
|
|
50f92bcfab | ||
|
|
362d9f8e59 | ||
|
|
0b1cf53942 | ||
|
|
a4c769b33c | ||
|
|
74a1cb61c1 | ||
|
|
268545c728 | ||
|
|
184bbb01c5 | ||
|
|
66fa7ac419 | ||
|
|
a80f3d997e | ||
|
|
efa7c24c9f | ||
|
|
91de21c7ad | ||
|
|
1e9ba57ef0 | ||
|
|
464389b248 | ||
|
|
107eb72eda | ||
|
|
ee7bf838f4 | ||
|
|
f63bcd94d1 | ||
|
|
16d5cb6ade | ||
|
|
63decdef8b | ||
|
|
e54e88f46d | ||
|
|
b6f53d059e | ||
|
|
9e1771c5ec | ||
|
|
8306bc2198 | ||
|
|
3810fb7d51 | ||
|
|
5b4984113e | ||
|
|
baf9efe246 | ||
|
|
f4a3109a31 | ||
|
|
241f2f39ec | ||
|
|
d9b40dca83 | ||
|
|
cb3f053057 | ||
|
|
c4ab045e57 | ||
|
|
d9b38efd97 | ||
|
|
5981588c95 | ||
|
|
388bef59b8 | ||
|
|
3d8de61c1c | ||
|
|
91b818f98d | ||
|
|
3a7f0898f9 | ||
|
|
c2f6de875a | ||
|
|
0647001807 | ||
|
|
4181d87792 | ||
|
|
58cf0a2015 | ||
|
|
d80c3d305b | ||
|
|
4d64a2bc2f | ||
|
|
5c54beaaa9 | ||
|
|
eee98f32b2 | ||
|
|
0949de66bf | ||
|
|
5a6acb24d9 | ||
|
|
2e5cab62c7 | ||
|
|
903db5fffd | ||
|
|
e3faf32708 | ||
|
|
97926ddc03 | ||
|
|
e891804051 | ||
|
|
a2982f8b77 | ||
|
|
321941baab | ||
|
|
e2e25dc30b | ||
|
|
1170ca4789 | ||
|
|
d159f62138 | ||
|
|
25005eef1b | ||
|
|
b17824c88d | ||
|
|
00f4a008f8 | ||
|
|
3e2bc67b59 | ||
|
|
efe072c7c4 | ||
|
|
59f1d11e40 | ||
|
|
5f947ac983 | ||
|
|
aa66f957f2 | ||
|
|
cf5f69271f | ||
|
|
c6653af782 | ||
|
|
fa1f33a2ac | ||
|
|
9f479882ad | ||
|
|
41db316489 | ||
|
|
50f0f5885e | ||
|
|
a609f09d50 | ||
|
|
43f2aa3068 | ||
|
|
a094e13bd5 | ||
|
|
eb076afbe4 | ||
|
|
7b41488a38 | ||
|
|
ca12d48352 | ||
|
|
0331d3b2b0 | ||
|
|
4d4814583c | ||
|
|
da3e158516 | ||
|
|
7b9d6baff0 | ||
|
|
550f1b7c6f | ||
|
|
74e8ee1786 | ||
|
|
55d3ea01b6 | ||
|
|
4cfe51cbb2 | ||
|
|
1f5ac60523 | ||
|
|
26bf4f2abd | ||
|
|
54671fff79 | ||
|
|
378309d763 | ||
|
|
e7d4bba8b3 | ||
|
|
fe3de0bc98 | ||
|
|
f3e38d7b71 | ||
|
|
eafa3b2de9 | ||
|
|
d8cc075bd0 | ||
|
|
bdb877a936 | ||
|
|
5c96e83a22 | ||
|
|
43abb57f77 | ||
|
|
9c6d821978 | ||
|
|
7e9babf515 | ||
|
|
ee36baf432 | ||
|
|
0bf57dcab7 | ||
|
|
e647d23adc | ||
|
|
d1b5b9c371 | ||
|
|
92ec514442 | ||
|
|
107466dd58 | ||
|
|
4246260ce6 | ||
|
|
936787d327 | ||
|
|
f976397283 | ||
|
|
657c6ad9a9 | ||
|
|
ede07c3b0e | ||
|
|
9326e8dcce | ||
|
|
8ff2adf833 | ||
|
|
5e387b513a | ||
|
|
dca316c0d5 | ||
|
|
db2d786d50 | ||
|
|
c0d7962142 | ||
|
|
5d14354ca9 | ||
|
|
347dace6de | ||
|
|
d952c75e3c | ||
|
|
abc3745be1 | ||
|
|
1382d7c523 | ||
|
|
3de0edf0f9 | ||
|
|
5513d39152 | ||
|
|
1a9547d1d2 | ||
|
|
26cc64d3a0 | ||
|
|
e465b33365 | ||
|
|
957739ba5e | ||
|
|
59967374c5 | ||
|
|
43bc346a2b | ||
|
|
5fbd3f67cd | ||
|
|
e352343d62 | ||
|
|
baee0f0c6f | ||
|
|
fcc5eead88 | ||
|
|
29d1f687d1 | ||
|
|
f568266c7f | ||
|
|
a1e42b8ddb | ||
|
|
e6389b1153 | ||
|
|
f0835acb33 | ||
|
|
35b04b219f | ||
|
|
01b62e936a | ||
|
|
09d16a03ef | ||
|
|
bb61602fd2 | ||
|
|
f30b78ea1f | ||
|
|
6ee652ca75 | ||
|
|
67d8bbc154 | ||
|
|
b852484559 | ||
|
|
0b077e5e88 | ||
|
|
381d957db2 | ||
|
|
f8f77abc12 | ||
|
|
0b0484c610 | ||
|
|
2b769088af | ||
|
|
856d13c603 | ||
|
|
76e398b8a1 | ||
|
|
24a7d85be0 | ||
|
|
989271f653 | ||
|
|
3fd8cd5713 | ||
|
|
43f1c7caf6 | ||
|
|
bd91ac88a3 | ||
|
|
fbcbf2e5a5 | ||
|
|
31eb03da0d | ||
|
|
a4dbfed712 | ||
|
|
784cb53ec6 | ||
|
|
43c799bd60 |
72
.github/workflows/artifacts.yml
vendored
72
.github/workflows/artifacts.yml
vendored
@@ -1,72 +0,0 @@
|
||||
name: Generate Artifacts
|
||||
on:
|
||||
push:
|
||||
tags: [ v* ]
|
||||
|
||||
permissions: write-all
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
- os: macos-latest
|
||||
target: x86_64-apple-darwin
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
- os: ubuntu-20.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
- name: Cache Rust
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
./src-tauri/target
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('src-tauri/Cargo.lock') }}
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
- name: install dependencies (ubuntu only)
|
||||
if: matrix.os == 'ubuntu-20.04'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
# Pin dev version to get non-default targets
|
||||
# https://github.com/tauri-apps/tauri-action/issues/356
|
||||
- uses: tauri-apps/tauri-action@dev
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||
ENABLE_CODE_SIGNING: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
with:
|
||||
tagName: 'v__VERSION__'
|
||||
releaseName: 'Release __VERSION__'
|
||||
releaseBody: 'https://yaak.app/changelog/__VERSION__'
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: '--target ${{ matrix.target }}'
|
||||
64
.github/workflows/release.yml
vendored
Normal file
64
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: Generate Artifacts
|
||||
on:
|
||||
push:
|
||||
tags: [ v* ]
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
permissions:
|
||||
contents: write
|
||||
name: Build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: 'macos-latest' # for Arm based macs (M1 and above).
|
||||
args: '--target aarch64-apple-darwin'
|
||||
- platform: 'macos-latest' # for Intel based macs.
|
||||
args: '--target x86_64-apple-darwin'
|
||||
- platform: 'ubuntu-22.04' # for Tauri v1 you could replace this with ubuntu-20.04.
|
||||
args: ''
|
||||
- platform: 'windows-latest'
|
||||
args: ''
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: setup node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: lts/*
|
||||
- name: install Rust stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
# Those targets are only used on macos runners so it's in an `if` to slightly speed up windows and linux builds.
|
||||
targets: ${{ matrix.platform == 'macos-latest' && 'aarch64-apple-darwin,x86_64-apple-darwin' || '' }}
|
||||
- name: install dependencies (ubuntu only)
|
||||
if: matrix.platform == 'ubuntu-22.04' # This must match the platform value defined above.
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Run lint
|
||||
run: npm run lint
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
- uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||
ENABLE_CODE_SIGNING: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
with:
|
||||
tagName: 'v__VERSION__'
|
||||
releaseName: 'Release __VERSION__'
|
||||
releaseBody: 'https://yaak.app/changelog/__VERSION__'
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }}
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -26,3 +26,5 @@ dist-ssr
|
||||
|
||||
*.sqlite
|
||||
*.sqlite-*
|
||||
|
||||
.cargo
|
||||
@@ -1,20 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Build Desktop" type="ShConfigurationType">
|
||||
<option name="SCRIPT_TEXT" value="npm run tauri build -- --target universal-apple-darwin" />
|
||||
<option name="INDEPENDENT_SCRIPT_PATH" value="true" />
|
||||
<option name="SCRIPT_PATH" value="" />
|
||||
<option name="SCRIPT_OPTIONS" value="" />
|
||||
<option name="INDEPENDENT_SCRIPT_WORKING_DIRECTORY" value="true" />
|
||||
<option name="SCRIPT_WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||
<option name="INDEPENDENT_INTERPRETER_PATH" value="true" />
|
||||
<option name="INTERPRETER_PATH" value="/bin/zsh" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="EXECUTE_IN_TERMINAL" value="true" />
|
||||
<option name="EXECUTE_SCRIPT_FILE" value="false" />
|
||||
<envs>
|
||||
<env name="TAURI_KEY_PASSWORD" value="fishhook-upstream-wash-assured" />
|
||||
<env name="TAURI_PRIVATE_KEY" value="dW50cnVzdGVkIGNvbW1lbnQ6IHJzaWduIGVuY3J5cHRlZCBzZWNyZXQga2V5ClJXUlRZMEl5OGxWaytTa3dIa2xXVUltQzRGUXIzd2lYQ2NpV0ZhQURSbWJWZ1NrK0tnY0FBQkFBQUFBQUFBQUFBQUlBQUFBQUV2M1VKdVRyVHpHSzhQdGc2ZVFtOVNsMU5tNEVSN280cFNrbXhncW9tdjNXaFJZUTJqUzQ5Q01zWTJWRVhaY1pGNHNjR1NFR3JmcWFRN09NdWdGMXpZVXhzejR4V3lDV1JpZHlnbW5LNS9vMFFtRlZjbUl4YjZSNzhlMmk3ait5SExYcG5QZUkxOFE9Cg==" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
@@ -1 +0,0 @@
|
||||
plugins
|
||||
@@ -1,8 +1,8 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
||||
<title>Yaak App</title>
|
||||
<!-- <script src="http://localhost:8097"></script>-->
|
||||
|
||||
@@ -15,13 +15,13 @@
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
html, body {
|
||||
background-color: black;
|
||||
background-color: #1b1a29;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<body class="text-base">
|
||||
<div id="root"></div>
|
||||
<div id="cm-portal" class="cm-portal"></div>
|
||||
<div id="react-portal"></div>
|
||||
|
||||
2994
package-lock.json
generated
2994
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
32
package.json
32
package.json
@@ -4,8 +4,9 @@
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"start": "npm run build:plugins && npm run tauri-dev",
|
||||
"tauri-dev": "tauri dev --no-watch --config ./src-tauri/tauri-dev.conf.json",
|
||||
"start": "npm run tauri-dev:desktop",
|
||||
"tauri-dev:desktop": "tauri dev --no-watch --config ./src-tauri/tauri-dev.conf.json",
|
||||
"tauri-dev:ios": "tauri ios dev --force-ip-prompt --config ./src-tauri/tauri-dev.conf.json",
|
||||
"tauri-build": "tauri build",
|
||||
"tauri": "tauri",
|
||||
"build": "npm run build:frontend",
|
||||
@@ -15,9 +16,11 @@
|
||||
"build:icon:dev": "tauri icon design/icon-dev.png --output ./src-tauri/icons/dev",
|
||||
"build:frontend": "vite build",
|
||||
"build:plugins": "run-p build:plugin:*",
|
||||
"build:plugin:exporter-curl": "cd plugins/exporter-curl && vite build --emptyOutDir",
|
||||
"build:plugin:importer-insomnia": "cd plugins/importer-insomnia && vite build --emptyOutDir",
|
||||
"build:plugin:importer-postman": "cd plugins/importer-postman && vite build --emptyOutDir",
|
||||
"build:plugin:importer-yaak": "cd plugins/importer-yaak && vite build --emptyOutDir",
|
||||
"build:plugin:importer-curl": "cd plugins/importer-curl && vite build --emptyOutDir",
|
||||
"build:plugin:filter-jsonpath": "cd plugins/filter-jsonpath && vite build --emptyOutDir",
|
||||
"build:plugin:filter-xpath": "cd plugins/filter-xpath && vite build --emptyOutDir",
|
||||
"test": "vitest",
|
||||
@@ -26,7 +29,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@codemirror/commands": "^6.2.1",
|
||||
"@codemirror/lang-javascript": "^6.1.4",
|
||||
"@codemirror/lang-javascript": "^6.2.2",
|
||||
"@codemirror/lang-json": "^6.0.1",
|
||||
"@codemirror/lang-xml": "^6.0.2",
|
||||
"@codemirror/language": "^6.6.0",
|
||||
@@ -36,11 +39,13 @@
|
||||
"@lezer/lr": "^1.3.3",
|
||||
"@react-hook/resize-observer": "^1.2.6",
|
||||
"@tailwindcss/container-queries": "^0.1.0",
|
||||
"@tanstack/query-sync-storage-persister": "^4.27.1",
|
||||
"@tanstack/react-query": "^4.28.0",
|
||||
"@tanstack/react-query-devtools": "^4.28.0",
|
||||
"@tanstack/react-query-persist-client": "^4.28.0",
|
||||
"@tauri-apps/api": "^1.5.3",
|
||||
"@tanstack/react-query": "^5.35.5",
|
||||
"@tauri-apps/api": ">=2.0.0-beta.0",
|
||||
"@tauri-apps/plugin-clipboard-manager": "^2.1.0-beta.1",
|
||||
"@tauri-apps/plugin-dialog": ">=2.0.0-beta.0",
|
||||
"@tauri-apps/plugin-fs": ">=2.0.0-beta.0",
|
||||
"@tauri-apps/plugin-os": ">=2.0.0-beta.0",
|
||||
"@tauri-apps/plugin-shell": ">=2.0.0-beta.0",
|
||||
"buffer": "^6.0.3",
|
||||
"classnames": "^2.3.2",
|
||||
"cm6-graphql": "^0.0.9",
|
||||
@@ -51,6 +56,7 @@
|
||||
"format-graphql": "^1.4.0",
|
||||
"framer-motion": "^9.0.4",
|
||||
"lucide-react": "^0.309.0",
|
||||
"mime": "^4.0.1",
|
||||
"papaparse": "^5.4.1",
|
||||
"parse-color": "^1.0.0",
|
||||
"react": "^18.2.0",
|
||||
@@ -61,13 +67,14 @@
|
||||
"react-router-dom": "^6.8.1",
|
||||
"react-use": "^17.4.0",
|
||||
"slugify": "^1.6.6",
|
||||
"tauri-plugin-log-api": "github:tauri-apps/tauri-plugin-log#v1",
|
||||
"tauri-plugin-log-api": "github:tauri-apps/tauri-plugin-log#v2",
|
||||
"uuid": "^9.0.0",
|
||||
"xml-formatter": "^3.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/nesting": "^0.0.0-insiders.565cd3e",
|
||||
"@tauri-apps/cli": "^1.5.10",
|
||||
"@tanstack/react-query-devtools": "^5.35.5",
|
||||
"@tauri-apps/cli": ">=2.0.0-beta.0",
|
||||
"@types/node": "^18.7.10",
|
||||
"@types/papaparse": "^5.3.7",
|
||||
"@types/parse-color": "^1.0.1",
|
||||
@@ -86,6 +93,7 @@
|
||||
"eslint-plugin-react": "^7.32.2",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"husky": "^8.0.3",
|
||||
"internal-ip": "^8.0.0",
|
||||
"lint-staged": "^15.0.2",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"postcss": "^8.4.21",
|
||||
@@ -93,8 +101,8 @@
|
||||
"prettier": "^2.8.4",
|
||||
"react-devtools": "^4.27.2",
|
||||
"tailwindcss": "^3.2.7",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^5.1.1",
|
||||
"typescript": "^5.4.5",
|
||||
"vite": "^5.0.0",
|
||||
"vite-plugin-svgr": "^4.2.0",
|
||||
"vite-plugin-top-level-await": "^1.4.1",
|
||||
"vitest": "^1.3.0"
|
||||
|
||||
1544
plugins/exporter-curl/package-lock.json
generated
Normal file
1544
plugins/exporter-curl/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
7
plugins/exporter-curl/package.json
Normal file
7
plugins/exporter-curl/package.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"name": "exporter-curl",
|
||||
"version": "0.0.1",
|
||||
"devDependencies": {
|
||||
"vitest": "^1.4.0"
|
||||
}
|
||||
}
|
||||
76
plugins/exporter-curl/src/index.ts
Normal file
76
plugins/exporter-curl/src/index.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { HttpRequest } from '../../../src-web/lib/models';
|
||||
|
||||
const NEWLINE = '\\\n ';
|
||||
|
||||
export function pluginHookExport(request: Partial<HttpRequest>) {
|
||||
const xs = ['curl'];
|
||||
|
||||
// Add method and URL all on first line
|
||||
if (request.method) xs.push('-X', request.method);
|
||||
if (request.url) xs.push(quote(request.url));
|
||||
|
||||
xs.push(NEWLINE);
|
||||
|
||||
// Add URL params
|
||||
for (const p of (request.urlParameters ?? []).filter(onlyEnabled)) {
|
||||
xs.push('--url-query', quote(`${p.name}=${p.value}`));
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
|
||||
// Add headers
|
||||
for (const h of (request.headers ?? []).filter(onlyEnabled)) {
|
||||
xs.push('--header', quote(`${h.name}: ${h.value}`));
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
|
||||
// Add form params
|
||||
if (Array.isArray(request.body?.form)) {
|
||||
const flag = request.bodyType === 'multipart/form-data' ? '--form' : '--data';
|
||||
for (const p of (request.body?.form ?? []).filter(onlyEnabled)) {
|
||||
if (p.file) {
|
||||
let v = `${p.name}=@${p.file}`;
|
||||
v += p.contentType ? `;type=${p.contentType}` : '';
|
||||
xs.push(flag, v);
|
||||
} else {
|
||||
xs.push(flag, quote(`${p.name}=${p.value}`));
|
||||
}
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
} else if (typeof request.body?.text === 'string') {
|
||||
// --data-raw $'...' to do special ANSI C quoting
|
||||
xs.push('--data-raw', `$${quote(request.body.text)}`);
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
|
||||
// Add basic/digest authentication
|
||||
if (request.authenticationType === 'basic' || request.authenticationType === 'digest') {
|
||||
if (request.authenticationType === 'digest') xs.push('--digest');
|
||||
xs.push(
|
||||
'--user',
|
||||
quote(`${request.authentication?.username ?? ''}:${request.authentication?.password ?? ''}`),
|
||||
);
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
|
||||
// Add bearer authentication
|
||||
if (request.authenticationType === 'bearer') {
|
||||
xs.push('--header', quote(`Authorization: Bearer ${request.authentication?.token ?? ''}`));
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
|
||||
// Remove trailing newline
|
||||
if (xs[xs.length - 1] === NEWLINE) {
|
||||
xs.splice(xs.length - 1, 1);
|
||||
}
|
||||
|
||||
return xs.join(' ');
|
||||
}
|
||||
|
||||
function quote(arg: string): string {
|
||||
const escaped = arg.replace(/'/g, "\\'");
|
||||
return `'${escaped}'`;
|
||||
}
|
||||
|
||||
function onlyEnabled(v: { name?: string; enabled?: boolean }): boolean {
|
||||
return v.enabled !== false && !!v.name;
|
||||
}
|
||||
175
plugins/exporter-curl/tests/index.test.ts
Normal file
175
plugins/exporter-curl/tests/index.test.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
import { pluginHookExport } from '../src';
|
||||
|
||||
describe('exporter-curl', () => {
|
||||
test('Exports GET with params', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
urlParameters: [
|
||||
{ name: 'a', value: 'aaa' },
|
||||
{ name: 'b', value: 'bbb', enabled: true },
|
||||
{ name: 'c', value: 'ccc', enabled: false },
|
||||
],
|
||||
}),
|
||||
).toEqual(
|
||||
[`curl 'https://yaak.app'`, `--url-query 'a=aaa'`, `--url-query 'b=bbb'`].join(` \\\n `),
|
||||
);
|
||||
});
|
||||
test('Exports POST with url form data', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
bodyType: 'application/x-www-form-urlencoded',
|
||||
body: {
|
||||
form: [
|
||||
{ name: 'a', value: 'aaa' },
|
||||
{ name: 'b', value: 'bbb', enabled: true },
|
||||
{ name: 'c', value: 'ccc', enabled: false },
|
||||
],
|
||||
},
|
||||
}),
|
||||
).toEqual(
|
||||
[`curl -X POST 'https://yaak.app'`, `--data 'a=aaa'`, `--data 'b=bbb'`].join(` \\\n `),
|
||||
);
|
||||
});
|
||||
|
||||
test('Exports PUT with multipart form', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
method: 'PUT',
|
||||
bodyType: 'multipart/form-data',
|
||||
body: {
|
||||
form: [
|
||||
{ name: 'a', value: 'aaa' },
|
||||
{ name: 'b', value: 'bbb', enabled: true },
|
||||
{ name: 'c', value: 'ccc', enabled: false },
|
||||
{ name: 'f', file: '/foo/bar.png', contentType: 'image/png' },
|
||||
],
|
||||
},
|
||||
}),
|
||||
).toEqual(
|
||||
[
|
||||
`curl -X PUT 'https://yaak.app'`,
|
||||
`--form 'a=aaa'`,
|
||||
`--form 'b=bbb'`,
|
||||
`--form f=@/foo/bar.png;type=image/png`,
|
||||
].join(` \\\n `),
|
||||
);
|
||||
});
|
||||
|
||||
test('Exports JSON body', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
bodyType: 'application/json',
|
||||
body: {
|
||||
text: `{"foo":"bar's"}`,
|
||||
},
|
||||
headers: [{ name: 'Content-Type', value: 'application/json' }],
|
||||
}),
|
||||
).toEqual(
|
||||
[
|
||||
`curl -X POST 'https://yaak.app'`,
|
||||
`--header 'Content-Type: application/json'`,
|
||||
`--data-raw $'{"foo":"bar\\'s"}'`,
|
||||
].join(` \\\n `),
|
||||
);
|
||||
});
|
||||
|
||||
test('Exports multi-line JSON body', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
bodyType: 'application/json',
|
||||
body: {
|
||||
text: `{"foo":"bar",\n"baz":"qux"}`,
|
||||
},
|
||||
headers: [{ name: 'Content-Type', value: 'application/json' }],
|
||||
}),
|
||||
).toEqual(
|
||||
[
|
||||
`curl -X POST 'https://yaak.app'`,
|
||||
`--header 'Content-Type: application/json'`,
|
||||
`--data-raw $'{"foo":"bar",\n"baz":"qux"}'`,
|
||||
].join(` \\\n `),
|
||||
);
|
||||
});
|
||||
|
||||
test('Exports headers', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
headers: [
|
||||
{ name: 'a', value: 'aaa' },
|
||||
{ name: 'b', value: 'bbb', enabled: true },
|
||||
{ name: 'c', value: 'ccc', enabled: false },
|
||||
],
|
||||
}),
|
||||
).toEqual([`curl`, `--header 'a: aaa'`, `--header 'b: bbb'`].join(` \\\n `));
|
||||
});
|
||||
|
||||
test('Basic auth', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'basic',
|
||||
authentication: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
}),
|
||||
).toEqual([`curl 'https://yaak.app'`, `--user 'user:pass'`].join(` \\\n `));
|
||||
});
|
||||
|
||||
test('Broken basic auth', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'basic',
|
||||
authentication: {},
|
||||
}),
|
||||
).toEqual([`curl 'https://yaak.app'`, `--user ':'`].join(` \\\n `));
|
||||
});
|
||||
|
||||
test('Digest auth', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'digest',
|
||||
authentication: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
}),
|
||||
).toEqual([`curl 'https://yaak.app'`, `--digest --user 'user:pass'`].join(` \\\n `));
|
||||
});
|
||||
|
||||
test('Bearer auth', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'bearer',
|
||||
authentication: {
|
||||
token: 'tok',
|
||||
},
|
||||
}),
|
||||
).toEqual([`curl 'https://yaak.app'`, `--header 'Authorization: Bearer tok'`].join(` \\\n `));
|
||||
});
|
||||
|
||||
test('Broken bearer auth', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'bearer',
|
||||
authentication: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
}),
|
||||
).toEqual([`curl 'https://yaak.app'`, `--header 'Authorization: Bearer '`].join(` \\\n `));
|
||||
});
|
||||
});
|
||||
13
plugins/exporter-curl/vite.config.js
Normal file
13
plugins/exporter-curl/vite.config.js
Normal file
@@ -0,0 +1,13 @@
|
||||
import { resolve } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.ts'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
outDir: resolve(__dirname, '../../src-tauri/plugins/exporter-curl'),
|
||||
},
|
||||
});
|
||||
1562
plugins/importer-curl/package-lock.json
generated
Normal file
1562
plugins/importer-curl/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
11
plugins/importer-curl/package.json
Normal file
11
plugins/importer-curl/package.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "importer-curl",
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"shell-quote": "^1.8.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/shell-quote": "^1.7.5",
|
||||
"vitest": "^1.4.0"
|
||||
}
|
||||
}
|
||||
421
plugins/importer-curl/src/index.ts
Normal file
421
plugins/importer-curl/src/index.ts
Normal file
@@ -0,0 +1,421 @@
|
||||
import { ControlOperator, parse, ParseEntry } from 'shell-quote';
|
||||
import {
|
||||
Environment,
|
||||
Folder,
|
||||
HttpRequest,
|
||||
HttpUrlParameter,
|
||||
Model,
|
||||
Workspace,
|
||||
} from '../../../src-web/lib/models';
|
||||
|
||||
type AtLeast<T, K extends keyof T> = Partial<T> & Pick<T, K>;
|
||||
|
||||
interface ExportResources {
|
||||
workspaces: AtLeast<Workspace, 'name' | 'id' | 'model'>[];
|
||||
environments: AtLeast<Environment, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
httpRequests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
folders: AtLeast<Folder, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
}
|
||||
|
||||
export const id = 'curl';
|
||||
export const name = 'cURL';
|
||||
export const description = 'cURL command line tool';
|
||||
|
||||
const DATA_FLAGS = ['d', 'data', 'data-raw', 'data-urlencode', 'data-binary', 'data-ascii'];
|
||||
const SUPPORTED_ARGS = [
|
||||
['url'], // Specify the URL explicitly
|
||||
['user', 'u'], // Authentication
|
||||
['digest'], // Apply auth as digest
|
||||
['header', 'H'],
|
||||
['cookie', 'b'],
|
||||
['get', 'G'], // Put the post data in the URL
|
||||
['d', 'data'], // Add url encoded data
|
||||
['data-raw'],
|
||||
['data-urlencode'],
|
||||
['data-binary'],
|
||||
['data-ascii'],
|
||||
['form', 'F'], // Add multipart data
|
||||
['request', 'X'], // Request method
|
||||
DATA_FLAGS,
|
||||
].flatMap((v) => v);
|
||||
|
||||
type Pair = string | boolean;
|
||||
|
||||
type PairsByName = Record<string, Pair[]>;
|
||||
|
||||
export function pluginHookImport(rawData: string) {
|
||||
if (!rawData.match(/^\s*curl /)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const commands: ParseEntry[][] = [];
|
||||
|
||||
// Replace non-escaped newlines with semicolons to make parsing easier
|
||||
// NOTE: This is really slow in debug build but fast in release mode
|
||||
const normalizedData = rawData.replace(/\ncurl/g, '; curl');
|
||||
|
||||
let currentCommand: ParseEntry[] = [];
|
||||
|
||||
const parsed = parse(normalizedData);
|
||||
|
||||
// Break up `-XPOST` into `-X POST`
|
||||
const normalizedParseEntries = parsed.flatMap((entry) => {
|
||||
if (
|
||||
typeof entry === 'string' &&
|
||||
entry.startsWith('-') &&
|
||||
!entry.startsWith('--') &&
|
||||
entry.length > 2
|
||||
) {
|
||||
return [entry.slice(0, 2), entry.slice(2)];
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
|
||||
for (const parseEntry of normalizedParseEntries) {
|
||||
if (typeof parseEntry === 'string') {
|
||||
if (parseEntry.startsWith('$')) {
|
||||
currentCommand.push(parseEntry.slice(1));
|
||||
} else {
|
||||
currentCommand.push(parseEntry);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if ('comment' in parseEntry) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const { op } = parseEntry as { op: 'glob'; pattern: string } | { op: ControlOperator };
|
||||
|
||||
// `;` separates commands
|
||||
if (op === ';') {
|
||||
commands.push(currentCommand);
|
||||
currentCommand = [];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (op?.startsWith('$')) {
|
||||
// Handle the case where literal like -H $'Header: \'Some Quoted Thing\''
|
||||
const str = op.slice(2, op.length - 1).replace(/\\'/g, "'");
|
||||
|
||||
currentCommand.push(str);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (op === 'glob') {
|
||||
currentCommand.push((parseEntry as { op: 'glob'; pattern: string }).pattern);
|
||||
}
|
||||
}
|
||||
|
||||
commands.push(currentCommand);
|
||||
|
||||
const workspace: ExportResources['workspaces'][0] = {
|
||||
model: 'workspace',
|
||||
id: generateId('workspace'),
|
||||
name: 'Curl Import',
|
||||
};
|
||||
|
||||
const requests: ExportResources['httpRequests'] = commands
|
||||
.filter((command) => command[0] === 'curl')
|
||||
.map((v) => importCommand(v, workspace.id));
|
||||
|
||||
return {
|
||||
resources: {
|
||||
httpRequests: requests,
|
||||
workspaces: [workspace],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function importCommand(parseEntries: ParseEntry[], workspaceId: string) {
|
||||
// ~~~~~~~~~~~~~~~~~~~~~ //
|
||||
// Collect all the flags //
|
||||
// ~~~~~~~~~~~~~~~~~~~~~ //
|
||||
const pairsByName: PairsByName = {};
|
||||
const singletons: ParseEntry[] = [];
|
||||
|
||||
// Start at 1 so we can skip the ^curl part
|
||||
for (let i = 1; i < parseEntries.length; i++) {
|
||||
let parseEntry = parseEntries[i];
|
||||
if (typeof parseEntry === 'string') {
|
||||
parseEntry = parseEntry.trim();
|
||||
}
|
||||
|
||||
if (typeof parseEntry === 'string' && parseEntry.match(/^-{1,2}[\w-]+/)) {
|
||||
const isSingleDash = parseEntry[0] === '-' && parseEntry[1] !== '-';
|
||||
let name = parseEntry.replace(/^-{1,2}/, '');
|
||||
|
||||
if (!SUPPORTED_ARGS.includes(name)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let value;
|
||||
const nextEntry = parseEntries[i + 1];
|
||||
if (isSingleDash && name.length > 1) {
|
||||
// Handle squished arguments like -XPOST
|
||||
value = name.slice(1);
|
||||
name = name.slice(0, 1);
|
||||
} else if (typeof nextEntry === 'string' && !nextEntry.startsWith('-')) {
|
||||
// Next arg is not a flag, so assign it as the value
|
||||
value = nextEntry;
|
||||
i++; // Skip next one
|
||||
} else {
|
||||
value = true;
|
||||
}
|
||||
|
||||
pairsByName[name] = pairsByName[name] || [];
|
||||
pairsByName[name]!.push(value);
|
||||
} else if (parseEntry) {
|
||||
singletons.push(parseEntry);
|
||||
}
|
||||
}
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~ //
|
||||
// Build the request //
|
||||
// ~~~~~~~~~~~~~~~~~ //
|
||||
|
||||
// Url & parameters
|
||||
|
||||
let urlParameters: HttpUrlParameter[];
|
||||
let url: string;
|
||||
|
||||
const urlArg = getPairValue(pairsByName, (singletons[0] as string) || '', ['url']);
|
||||
const [baseUrl, search] = splitOnce(urlArg, '?');
|
||||
urlParameters =
|
||||
search?.split('&').map((p) => {
|
||||
const v = splitOnce(p, '=');
|
||||
return { name: v[0] ?? '', value: v[1] ?? '', enabled: true };
|
||||
}) ?? [];
|
||||
|
||||
url = baseUrl ?? urlArg;
|
||||
|
||||
// Authentication
|
||||
const [username, password] = getPairValue(pairsByName, '', ['u', 'user']).split(/:(.*)$/);
|
||||
|
||||
const isDigest = getPairValue(pairsByName, false, ['digest']);
|
||||
const authenticationType = username ? (isDigest ? 'digest' : 'basic') : null;
|
||||
const authentication = username
|
||||
? {
|
||||
username: username.trim(),
|
||||
password: (password ?? '').trim(),
|
||||
}
|
||||
: {};
|
||||
|
||||
// Headers
|
||||
const headers = [
|
||||
...((pairsByName.header as string[] | undefined) || []),
|
||||
...((pairsByName.H as string[] | undefined) || []),
|
||||
].map((header) => {
|
||||
const [name, value] = header.split(/:(.*)$/);
|
||||
// remove final colon from header name if present
|
||||
if (!value) {
|
||||
return {
|
||||
name: (name ?? '').trim().replace(/;$/, ''),
|
||||
value: '',
|
||||
enabled: true,
|
||||
};
|
||||
}
|
||||
return {
|
||||
name: (name ?? '').trim(),
|
||||
value: value.trim(),
|
||||
enabled: true,
|
||||
};
|
||||
});
|
||||
|
||||
// Cookies
|
||||
const cookieHeaderValue = [
|
||||
...((pairsByName.cookie as string[] | undefined) || []),
|
||||
...((pairsByName.b as string[] | undefined) || []),
|
||||
]
|
||||
.map((str) => {
|
||||
const name = str.split('=', 1)[0];
|
||||
const value = str.replace(`${name}=`, '');
|
||||
return `${name}=${value}`;
|
||||
})
|
||||
.join('; ');
|
||||
|
||||
// Convert cookie value to header
|
||||
const existingCookieHeader = headers.find((header) => header.name.toLowerCase() === 'cookie');
|
||||
|
||||
if (cookieHeaderValue && existingCookieHeader) {
|
||||
// Has existing cookie header, so let's update it
|
||||
existingCookieHeader.value += `; ${cookieHeaderValue}`;
|
||||
} else if (cookieHeaderValue) {
|
||||
// No existing cookie header, so let's make a new one
|
||||
headers.push({
|
||||
name: 'Cookie',
|
||||
value: cookieHeaderValue,
|
||||
enabled: true,
|
||||
});
|
||||
}
|
||||
|
||||
///Body (Text or Blob)
|
||||
const dataParameters = pairsToDataParameters(pairsByName);
|
||||
const contentTypeHeader = headers.find((header) => header.name.toLowerCase() === 'content-type');
|
||||
const mimeType = contentTypeHeader ? contentTypeHeader.value.split(';')[0] : null;
|
||||
|
||||
// Body (Multipart Form Data)
|
||||
const formDataParams = [
|
||||
...((pairsByName.form as string[] | undefined) || []),
|
||||
...((pairsByName.F as string[] | undefined) || []),
|
||||
].map((str) => {
|
||||
const parts = str.split('=');
|
||||
const name = parts[0] ?? '';
|
||||
const value = parts[1] ?? '';
|
||||
const item: { name: string; value?: string; file?: string; enabled: boolean } = {
|
||||
name,
|
||||
enabled: true,
|
||||
};
|
||||
|
||||
if (value.indexOf('@') === 0) {
|
||||
item.file = value.slice(1);
|
||||
} else {
|
||||
item.value = value;
|
||||
}
|
||||
|
||||
return item;
|
||||
});
|
||||
|
||||
// Body
|
||||
let body = {};
|
||||
let bodyType: string | null = null;
|
||||
const bodyAsGET = getPairValue(pairsByName, false, ['G', 'get']);
|
||||
|
||||
if (dataParameters.length > 0 && bodyAsGET) {
|
||||
urlParameters.push(...dataParameters);
|
||||
} else if (
|
||||
dataParameters.length > 0 &&
|
||||
(mimeType == null || mimeType === 'application/x-www-form-urlencoded')
|
||||
) {
|
||||
bodyType = mimeType ?? 'application/x-www-form-urlencoded';
|
||||
body = {
|
||||
form: dataParameters.map((parameter) => ({
|
||||
...parameter,
|
||||
name: decodeURIComponent(parameter.name || ''),
|
||||
value: decodeURIComponent(parameter.value || ''),
|
||||
})),
|
||||
};
|
||||
headers.push({
|
||||
name: 'Content-Type',
|
||||
value: 'application/x-www-form-urlencoded',
|
||||
enabled: true,
|
||||
});
|
||||
} else if (dataParameters.length > 0) {
|
||||
bodyType =
|
||||
mimeType === 'application/json' || mimeType === 'text/xml' || mimeType === 'text/plain'
|
||||
? mimeType
|
||||
: 'other';
|
||||
body = {
|
||||
text: dataParameters
|
||||
.map(({ name, value }) => (name && value ? `${name}=${value}` : name || value))
|
||||
.join('&'),
|
||||
};
|
||||
} else if (formDataParams.length) {
|
||||
bodyType = mimeType ?? 'multipart/form-data';
|
||||
body = {
|
||||
form: formDataParams,
|
||||
};
|
||||
if (mimeType == null) {
|
||||
headers.push({
|
||||
name: 'Content-Type',
|
||||
value: 'multipart/form-data',
|
||||
enabled: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Method
|
||||
let method = getPairValue(pairsByName, '', ['X', 'request']).toUpperCase();
|
||||
|
||||
if (method === '' && body) {
|
||||
method = 'text' in body || 'form' in body ? 'POST' : 'GET';
|
||||
}
|
||||
|
||||
const request: ExportResources['httpRequests'][0] = {
|
||||
id: generateId('http_request'),
|
||||
model: 'http_request',
|
||||
workspaceId,
|
||||
name: '',
|
||||
urlParameters,
|
||||
url,
|
||||
method,
|
||||
headers,
|
||||
authentication,
|
||||
authenticationType,
|
||||
body,
|
||||
bodyType,
|
||||
folderId: null,
|
||||
sortPriority: 0,
|
||||
};
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
const pairsToDataParameters = (keyedPairs: PairsByName) => {
|
||||
let dataParameters: {
|
||||
name: string;
|
||||
value: string;
|
||||
contentType?: string;
|
||||
filePath?: string;
|
||||
enabled?: boolean;
|
||||
}[] = [];
|
||||
|
||||
for (const flagName of DATA_FLAGS) {
|
||||
const pairs = keyedPairs[flagName];
|
||||
|
||||
if (!pairs || pairs.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const p of pairs) {
|
||||
if (typeof p !== 'string') continue;
|
||||
|
||||
const [name, value] = p.split('=');
|
||||
if (p.startsWith('@')) {
|
||||
// Yaak doesn't support files in url-encoded data, so
|
||||
dataParameters.push({
|
||||
name: name ?? '',
|
||||
value: '',
|
||||
filePath: p.slice(1),
|
||||
enabled: true,
|
||||
});
|
||||
} else {
|
||||
dataParameters.push({
|
||||
name: name ?? '',
|
||||
value: flagName === 'data-urlencode' ? encodeURIComponent(value ?? '') : value ?? '',
|
||||
enabled: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dataParameters;
|
||||
};
|
||||
|
||||
const getPairValue = <T extends string | boolean>(
|
||||
pairsByName: PairsByName,
|
||||
defaultValue: T,
|
||||
names: string[],
|
||||
) => {
|
||||
for (const name of names) {
|
||||
if (pairsByName[name] && pairsByName[name]!.length) {
|
||||
return pairsByName[name]![0] as T;
|
||||
}
|
||||
}
|
||||
|
||||
return defaultValue;
|
||||
};
|
||||
|
||||
function splitOnce(str: string, sep: string): string[] {
|
||||
const index = str.indexOf(sep);
|
||||
if (index > -1) {
|
||||
return [str.slice(0, index), str.slice(index + 1)];
|
||||
}
|
||||
return [str];
|
||||
}
|
||||
|
||||
const idCount: Partial<Record<Model['model'], number>> = {};
|
||||
function generateId(model: Model['model']): string {
|
||||
idCount[model] = (idCount[model] ?? -1) + 1;
|
||||
return `GENERATE_ID::${model.toUpperCase()}_${idCount[model]}`;
|
||||
}
|
||||
335
plugins/importer-curl/tests/index.test.ts
Normal file
335
plugins/importer-curl/tests/index.test.ts
Normal file
@@ -0,0 +1,335 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
import { HttpRequest, Model, Workspace } from '../../../src-web/lib/models';
|
||||
import { pluginHookImport } from '../src';
|
||||
|
||||
describe('importer-curl', () => {
|
||||
test('Imports basic GET', () => {
|
||||
expect(pluginHookImport('curl https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Explicit URL', () => {
|
||||
expect(pluginHookImport('curl --url https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Missing URL', () => {
|
||||
expect(pluginHookImport('curl -X POST')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
method: 'POST',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('URL between', () => {
|
||||
expect(pluginHookImport('curl -v https://yaak.app -X POST')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Random flags', () => {
|
||||
expect(pluginHookImport('curl --random -Z -Y -S --foo https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports --request method', () => {
|
||||
expect(pluginHookImport('curl --request POST https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports -XPOST method', () => {
|
||||
expect(pluginHookImport('curl -XPOST --request POST https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports multiple requests', () => {
|
||||
expect(
|
||||
pluginHookImport('curl \\\n https://yaak.app\necho "foo"\ncurl example.com;curl foo.com'),
|
||||
).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({ url: 'https://yaak.app' }),
|
||||
baseRequest({ url: 'example.com' }),
|
||||
baseRequest({ url: 'foo.com' }),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports form data', () => {
|
||||
expect(
|
||||
pluginHookImport('curl -X POST -F "a=aaa" -F b=bbb" -F f=@filepath https://yaak.app'),
|
||||
).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
method: 'POST',
|
||||
url: 'https://yaak.app',
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'multipart/form-data',
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
bodyType: 'multipart/form-data',
|
||||
body: {
|
||||
form: [
|
||||
{ enabled: true, name: 'a', value: 'aaa' },
|
||||
{ enabled: true, name: 'b', value: 'bbb' },
|
||||
{ enabled: true, name: 'f', file: 'filepath' },
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports data params as form url-encoded', () => {
|
||||
expect(pluginHookImport('curl -d a -d b -d c=ccc https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
method: 'POST',
|
||||
url: 'https://yaak.app',
|
||||
bodyType: 'application/x-www-form-urlencoded',
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/x-www-form-urlencoded',
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
body: {
|
||||
form: [
|
||||
{ name: 'a', value: '', enabled: true },
|
||||
{ name: 'b', value: '', enabled: true },
|
||||
{ name: 'c', value: 'ccc', enabled: true },
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports data params as text', () => {
|
||||
expect(
|
||||
pluginHookImport('curl -H Content-Type:text/plain -d a -d b -d c=ccc https://yaak.app'),
|
||||
).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
method: 'POST',
|
||||
url: 'https://yaak.app',
|
||||
headers: [{ name: 'Content-Type', value: 'text/plain', enabled: true }],
|
||||
bodyType: 'text/plain',
|
||||
body: { text: 'a&b&c=ccc' },
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports multi-line JSON', () => {
|
||||
expect(
|
||||
pluginHookImport(
|
||||
`curl -H Content-Type:application/json -d $'{\n "foo":"bar"\n}' https://yaak.app`,
|
||||
),
|
||||
).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
method: 'POST',
|
||||
url: 'https://yaak.app',
|
||||
headers: [{ name: 'Content-Type', value: 'application/json', enabled: true }],
|
||||
bodyType: 'application/json',
|
||||
body: { text: '{\n "foo":"bar"\n}' },
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports multiple headers', () => {
|
||||
expect(
|
||||
pluginHookImport('curl -H Foo:bar --header Name -H AAA:bbb -H :ccc https://yaak.app'),
|
||||
).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
headers: [
|
||||
{ name: 'Name', value: '', enabled: true },
|
||||
{ name: 'Foo', value: 'bar', enabled: true },
|
||||
{ name: 'AAA', value: 'bbb', enabled: true },
|
||||
{ name: '', value: 'ccc', enabled: true },
|
||||
],
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports basic auth', () => {
|
||||
expect(pluginHookImport('curl --user user:pass https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'basic',
|
||||
authentication: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports digest auth', () => {
|
||||
expect(pluginHookImport('curl --digest --user user:pass https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'digest',
|
||||
authentication: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports cookie as header', () => {
|
||||
expect(pluginHookImport('curl --cookie "foo=bar" https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
headers: [{ name: 'Cookie', value: 'foo=bar', enabled: true }],
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports query params from the URL', () => {
|
||||
expect(pluginHookImport('curl "https://yaak.app?foo=bar&baz=a%20a"')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
urlParameters: [
|
||||
{ name: 'foo', value: 'bar', enabled: true },
|
||||
{ name: 'baz', value: 'a%20a', enabled: true },
|
||||
],
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const idCount: Partial<Record<Model['model'], number>> = {};
|
||||
|
||||
function baseRequest(mergeWith: Partial<HttpRequest>) {
|
||||
idCount.http_request = (idCount.http_request ?? -1) + 1;
|
||||
return {
|
||||
id: `GENERATE_ID::HTTP_REQUEST_${idCount.http_request}`,
|
||||
model: 'http_request',
|
||||
authentication: {},
|
||||
authenticationType: null,
|
||||
body: {},
|
||||
bodyType: null,
|
||||
folderId: null,
|
||||
headers: [],
|
||||
method: 'GET',
|
||||
name: '',
|
||||
sortPriority: 0,
|
||||
url: '',
|
||||
urlParameters: [],
|
||||
workspaceId: `GENERATE_ID::WORKSPACE_${idCount.workspace}`,
|
||||
...mergeWith,
|
||||
};
|
||||
}
|
||||
|
||||
function baseWorkspace(mergeWith: Partial<Workspace> = {}) {
|
||||
idCount.workspace = (idCount.workspace ?? -1) + 1;
|
||||
return {
|
||||
id: `GENERATE_ID::WORKSPACE_${idCount.workspace}`,
|
||||
model: 'workspace',
|
||||
name: 'Curl Import',
|
||||
...mergeWith,
|
||||
};
|
||||
}
|
||||
13
plugins/importer-curl/vite.config.js
Normal file
13
plugins/importer-curl/vite.config.js
Normal file
@@ -0,0 +1,13 @@
|
||||
import { resolve } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.ts'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
outDir: resolve(__dirname, '../../src-tauri/plugins/importer-curl'),
|
||||
},
|
||||
});
|
||||
16
plugins/importer-insomnia/package-lock.json
generated
16
plugins/importer-insomnia/package-lock.json
generated
@@ -6,7 +6,21 @@
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "importer-insomnia",
|
||||
"version": "0.0.1"
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"yaml": "^2.4.2"
|
||||
}
|
||||
},
|
||||
"node_modules/yaml": {
|
||||
"version": "2.4.2",
|
||||
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.2.tgz",
|
||||
"integrity": "sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA==",
|
||||
"bin": {
|
||||
"yaml": "bin.mjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
{
|
||||
"name": "importer-insomnia",
|
||||
"version": "0.0.1"
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"yaml": "^2.4.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
export function isWorkspace(obj) {
|
||||
return isJSObject(obj) && obj._type === 'workspace';
|
||||
}
|
||||
|
||||
export function isRequestGroup(obj) {
|
||||
return isJSObject(obj) && obj._type === 'request_group';
|
||||
}
|
||||
|
||||
export function isRequest(obj) {
|
||||
return isJSObject(obj) && obj._type === 'request';
|
||||
}
|
||||
|
||||
export function isEnvironment(obj) {
|
||||
return isJSObject(obj) && obj._type === 'environment';
|
||||
}
|
||||
|
||||
export function isJSObject(obj) {
|
||||
return Object.prototype.toString.call(obj) === '[object Object]';
|
||||
}
|
||||
|
||||
export function isJSString(obj) {
|
||||
return Object.prototype.toString.call(obj) === '[object String]';
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import { isJSString } from './types.js';
|
||||
|
||||
export function parseVariables(data) {
|
||||
return Object.entries(data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Insomnia syntax to Yaak syntax
|
||||
* @param {string} variable - Text to convert
|
||||
*/
|
||||
export function convertSyntax(variable) {
|
||||
if (!isJSString(variable)) return variable;
|
||||
return variable.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, '${[$2]}');
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
/**
|
||||
* Import an Insomnia environment object.
|
||||
* @param {Object} e - The environment object to import.
|
||||
* @param workspaceId - Workspace to import into.
|
||||
*/
|
||||
export function importEnvironment(e, workspaceId) {
|
||||
console.log('IMPORTING Environment', e._id, e.name, JSON.stringify(e, null, 2));
|
||||
return {
|
||||
id: e._id,
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId,
|
||||
model: 'environment',
|
||||
name: e.name,
|
||||
variables: Object.entries(e.data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
})),
|
||||
};
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
/**
|
||||
* Import an Insomnia folder object.
|
||||
* @param {Object} f - The environment object to import.
|
||||
* @param workspaceId - Workspace to import into.
|
||||
*/
|
||||
export function importFolder(f, workspaceId) {
|
||||
console.log('IMPORTING FOLDER', f._id, f.name, JSON.stringify(f, null, 2));
|
||||
return {
|
||||
id: f._id,
|
||||
createdAt: new Date(f.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(f.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
folderId: f.parentId === workspaceId ? null : f.parentId,
|
||||
workspaceId,
|
||||
model: 'folder',
|
||||
name: f.name,
|
||||
};
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
import { convertSyntax } from '../helpers/variables.js';
|
||||
|
||||
/**
|
||||
* Import an Insomnia request object.
|
||||
* @param {Object} r - The request object to import.
|
||||
* @param workspaceId - The workspace ID to use for the request.
|
||||
* @param {number} sortPriority - The sort priority to use for the request.
|
||||
*/
|
||||
export function importRequest(r, workspaceId, sortPriority = 0) {
|
||||
console.log('IMPORTING REQUEST', r._id, r.name, JSON.stringify(r, null, 2));
|
||||
|
||||
let bodyType = null;
|
||||
let body = null;
|
||||
if (r.body?.mimeType === 'application/graphql') {
|
||||
bodyType = 'graphql';
|
||||
body = convertSyntax(r.body.text);
|
||||
} else if (r.body?.mimeType === 'application/json') {
|
||||
bodyType = 'application/json';
|
||||
body = convertSyntax(r.body.text);
|
||||
}
|
||||
|
||||
let authenticationType = null;
|
||||
let authentication = {};
|
||||
if (r.authentication.type === 'bearer') {
|
||||
authenticationType = 'bearer';
|
||||
authentication = {
|
||||
token: convertSyntax(r.authentication.token),
|
||||
};
|
||||
} else if (r.authentication.type === 'basic') {
|
||||
authenticationType = 'basic';
|
||||
authentication = {
|
||||
username: convertSyntax(r.authentication.username),
|
||||
password: convertSyntax(r.authentication.password),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: r._id,
|
||||
createdAt: new Date(r.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(r.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId,
|
||||
folderId: r.parentId === workspaceId ? null : r.parentId,
|
||||
model: 'http_request',
|
||||
sortPriority,
|
||||
name: r.name,
|
||||
url: convertSyntax(r.url),
|
||||
body,
|
||||
bodyType,
|
||||
authentication,
|
||||
authenticationType,
|
||||
method: r.method,
|
||||
headers: (r.headers ?? [])
|
||||
.map(({ name, value, disabled }) => ({
|
||||
enabled: !disabled,
|
||||
name,
|
||||
value,
|
||||
}))
|
||||
.filter(({ name, value }) => name !== '' || value !== ''),
|
||||
};
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
import { importEnvironment } from './importers/environment.js';
|
||||
import { importRequest } from './importers/request.js';
|
||||
import {
|
||||
isEnvironment,
|
||||
isJSObject,
|
||||
isRequest,
|
||||
isRequestGroup,
|
||||
isWorkspace,
|
||||
} from './helpers/types.js';
|
||||
import { parseVariables } from './helpers/variables.js';
|
||||
import { importFolder } from './importers/folder.js';
|
||||
|
||||
export function pluginHookImport(contents) {
|
||||
console.log('RUNNING INSOMNIA');
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(contents);
|
||||
} catch (e) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isJSObject(parsed)) return;
|
||||
if (!Array.isArray(parsed.resources)) return;
|
||||
|
||||
const resources = {
|
||||
workspaces: [],
|
||||
requests: [],
|
||||
environments: [],
|
||||
folders: [],
|
||||
};
|
||||
|
||||
// Import workspaces
|
||||
const workspacesToImport = parsed.resources.filter(isWorkspace);
|
||||
for (const workspaceToImport of workspacesToImport) {
|
||||
const baseEnvironment = parsed.resources.find(
|
||||
(r) => isEnvironment(r) && r.parentId === workspaceToImport._id,
|
||||
);
|
||||
resources.workspaces.push({
|
||||
id: workspaceToImport._id,
|
||||
createdAt: new Date(workspacesToImport.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(workspacesToImport.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
model: 'workspace',
|
||||
name: workspaceToImport.name,
|
||||
variables: baseEnvironment ? parseVariables(baseEnvironment.data) : [],
|
||||
});
|
||||
const environmentsToImport = parsed.resources.filter(
|
||||
(r) => isEnvironment(r) && r.parentId === baseEnvironment?._id,
|
||||
);
|
||||
resources.environments.push(
|
||||
...environmentsToImport.map((r) => importEnvironment(r, workspaceToImport._id)),
|
||||
);
|
||||
|
||||
const nextFolder = (parentId) => {
|
||||
const children = parsed.resources.filter((r) => r.parentId === parentId);
|
||||
let sortPriority = 0;
|
||||
for (const child of children) {
|
||||
if (isRequestGroup(child)) {
|
||||
resources.folders.push(importFolder(child, workspaceToImport._id));
|
||||
nextFolder(child._id);
|
||||
} else if (isRequest(child)) {
|
||||
resources.requests.push(importRequest(child, workspaceToImport._id, sortPriority++));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Import folders
|
||||
nextFolder(workspaceToImport._id);
|
||||
}
|
||||
|
||||
// Filter out any `null` values
|
||||
resources.requests = resources.requests.filter(Boolean);
|
||||
resources.environments = resources.environments.filter(Boolean);
|
||||
resources.workspaces = resources.workspaces.filter(Boolean);
|
||||
|
||||
return { resources };
|
||||
}
|
||||
278
plugins/importer-insomnia/src/index.ts
Normal file
278
plugins/importer-insomnia/src/index.ts
Normal file
@@ -0,0 +1,278 @@
|
||||
import {
|
||||
Environment,
|
||||
Folder,
|
||||
GrpcRequest,
|
||||
HttpRequest,
|
||||
Workspace,
|
||||
} from '../../../src-web/lib/models';
|
||||
import { parse as parseYaml } from 'yaml';
|
||||
|
||||
type AtLeast<T, K extends keyof T> = Partial<T> & Pick<T, K>;
|
||||
|
||||
export interface ExportResources {
|
||||
workspaces: AtLeast<Workspace, 'name' | 'id' | 'model'>[];
|
||||
environments: AtLeast<Environment, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
httpRequests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
grpcRequests: AtLeast<GrpcRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
folders: AtLeast<Folder, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
}
|
||||
|
||||
export function pluginHookImport(contents: string) {
|
||||
let parsed: any;
|
||||
|
||||
try {
|
||||
parsed = JSON.parse(contents);
|
||||
} catch (e) {}
|
||||
|
||||
try {
|
||||
parsed = parseYaml(contents);
|
||||
} catch (e) {}
|
||||
|
||||
if (!isJSObject(parsed)) return;
|
||||
if (!Array.isArray(parsed.resources)) return;
|
||||
|
||||
const resources: ExportResources = {
|
||||
workspaces: [],
|
||||
httpRequests: [],
|
||||
grpcRequests: [],
|
||||
environments: [],
|
||||
folders: [],
|
||||
};
|
||||
|
||||
// Import workspaces
|
||||
const workspacesToImport = parsed.resources.filter(isWorkspace);
|
||||
for (const workspaceToImport of workspacesToImport) {
|
||||
const baseEnvironment = parsed.resources.find(
|
||||
(r: any) => isEnvironment(r) && r.parentId === workspaceToImport._id,
|
||||
);
|
||||
resources.workspaces.push({
|
||||
id: convertId(workspaceToImport._id),
|
||||
createdAt: new Date(workspacesToImport.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(workspacesToImport.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
model: 'workspace',
|
||||
name: workspaceToImport.name,
|
||||
variables: baseEnvironment ? parseVariables(baseEnvironment.data) : [],
|
||||
});
|
||||
const environmentsToImport = parsed.resources.filter(
|
||||
(r: any) => isEnvironment(r) && r.parentId === baseEnvironment?._id,
|
||||
);
|
||||
resources.environments.push(
|
||||
...environmentsToImport.map((r: any) => importEnvironment(r, workspaceToImport._id)),
|
||||
);
|
||||
|
||||
const nextFolder = (parentId: string) => {
|
||||
const children = parsed.resources.filter((r: any) => r.parentId === parentId);
|
||||
let sortPriority = 0;
|
||||
for (const child of children) {
|
||||
if (isRequestGroup(child)) {
|
||||
resources.folders.push(importFolder(child, workspaceToImport._id));
|
||||
nextFolder(child._id);
|
||||
} else if (isHttpRequest(child)) {
|
||||
resources.httpRequests.push(
|
||||
importHttpRequest(child, workspaceToImport._id, sortPriority++),
|
||||
);
|
||||
} else if (isGrpcRequest(child)) {
|
||||
resources.grpcRequests.push(
|
||||
importGrpcRequest(child, workspaceToImport._id, sortPriority++),
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Import folders
|
||||
nextFolder(workspaceToImport._id);
|
||||
}
|
||||
|
||||
// Filter out any `null` values
|
||||
resources.httpRequests = resources.httpRequests.filter(Boolean);
|
||||
resources.grpcRequests = resources.grpcRequests.filter(Boolean);
|
||||
resources.environments = resources.environments.filter(Boolean);
|
||||
resources.workspaces = resources.workspaces.filter(Boolean);
|
||||
|
||||
return { resources };
|
||||
}
|
||||
|
||||
function importEnvironment(e: any, workspaceId: string): ExportResources['environments'][0] {
|
||||
return {
|
||||
id: convertId(e._id),
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId: convertId(workspaceId),
|
||||
model: 'environment',
|
||||
name: e.name,
|
||||
variables: Object.entries(e.data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
function importFolder(f: any, workspaceId: string): ExportResources['folders'][0] {
|
||||
return {
|
||||
id: convertId(f._id),
|
||||
createdAt: new Date(f.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(f.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
folderId: f.parentId === workspaceId ? null : convertId(f.parentId),
|
||||
workspaceId: convertId(workspaceId),
|
||||
model: 'folder',
|
||||
name: f.name,
|
||||
};
|
||||
}
|
||||
|
||||
function importGrpcRequest(
|
||||
r: any,
|
||||
workspaceId: string,
|
||||
sortPriority = 0,
|
||||
): ExportResources['grpcRequests'][0] {
|
||||
const parts = r.protoMethodName.split('/').filter((p: any) => p !== '');
|
||||
const service = parts[0] ?? null;
|
||||
const method = parts[1] ?? null;
|
||||
|
||||
return {
|
||||
id: convertId(r._id),
|
||||
createdAt: new Date(r.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(r.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId: convertId(workspaceId),
|
||||
folderId: r.parentId === workspaceId ? null : convertId(r.parentId),
|
||||
model: 'grpc_request',
|
||||
sortPriority,
|
||||
name: r.name,
|
||||
url: convertSyntax(r.url),
|
||||
service,
|
||||
method,
|
||||
message: r.body?.text ?? '',
|
||||
metadata: (r.metadata ?? [])
|
||||
.map((h: any) => ({
|
||||
enabled: !h.disabled,
|
||||
name: h.name ?? '',
|
||||
value: h.value ?? '',
|
||||
}))
|
||||
.filter(({ name, value }: any) => name !== '' || value !== ''),
|
||||
};
|
||||
}
|
||||
|
||||
function importHttpRequest(
|
||||
r: any,
|
||||
workspaceId: string,
|
||||
sortPriority = 0,
|
||||
): ExportResources['httpRequests'][0] {
|
||||
let bodyType = null;
|
||||
let body = {};
|
||||
if (r.body.mimeType === 'application/octet-stream') {
|
||||
bodyType = 'binary';
|
||||
body = { filePath: r.body.fileName ?? '' };
|
||||
} else if (r.body?.mimeType === 'application/x-www-form-urlencoded') {
|
||||
bodyType = 'application/x-www-form-urlencoded';
|
||||
body = {
|
||||
form: (r.body.params ?? []).map((p: any) => ({
|
||||
enabled: !p.disabled,
|
||||
name: p.name ?? '',
|
||||
value: p.value ?? '',
|
||||
})),
|
||||
};
|
||||
} else if (r.body?.mimeType === 'multipart/form-data') {
|
||||
bodyType = 'multipart/form-data';
|
||||
body = {
|
||||
form: (r.body.params ?? []).map((p: any) => ({
|
||||
enabled: !p.disabled,
|
||||
name: p.name ?? '',
|
||||
value: p.value ?? '',
|
||||
file: p.fileName ?? null,
|
||||
})),
|
||||
};
|
||||
} else if (r.body?.mimeType === 'application/graphql') {
|
||||
bodyType = 'graphql';
|
||||
body = { text: convertSyntax(r.body.text ?? '') };
|
||||
} else if (r.body?.mimeType === 'application/json') {
|
||||
bodyType = 'application/json';
|
||||
body = { text: convertSyntax(r.body.text ?? '') };
|
||||
}
|
||||
|
||||
let authenticationType = null;
|
||||
let authentication = {};
|
||||
if (r.authentication.type === 'bearer') {
|
||||
authenticationType = 'bearer';
|
||||
authentication = {
|
||||
token: convertSyntax(r.authentication.token),
|
||||
};
|
||||
} else if (r.authentication.type === 'basic') {
|
||||
authenticationType = 'basic';
|
||||
authentication = {
|
||||
username: convertSyntax(r.authentication.username),
|
||||
password: convertSyntax(r.authentication.password),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: convertId(r._id),
|
||||
createdAt: new Date(r.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(r.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId: convertId(workspaceId),
|
||||
folderId: r.parentId === workspaceId ? null : convertId(r.parentId),
|
||||
model: 'http_request',
|
||||
sortPriority,
|
||||
name: r.name,
|
||||
url: convertSyntax(r.url),
|
||||
body,
|
||||
bodyType,
|
||||
authentication,
|
||||
authenticationType,
|
||||
method: r.method,
|
||||
headers: (r.headers ?? [])
|
||||
.map((h: any) => ({
|
||||
enabled: !h.disabled,
|
||||
name: h.name ?? '',
|
||||
value: h.value ?? '',
|
||||
}))
|
||||
.filter(({ name, value }: any) => name !== '' || value !== ''),
|
||||
};
|
||||
}
|
||||
|
||||
function parseVariables(data: Record<string, string>) {
|
||||
return Object.entries(data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
}));
|
||||
}
|
||||
|
||||
function convertSyntax(variable: string): string {
|
||||
if (!isJSString(variable)) return variable;
|
||||
return variable.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, '${[$2]}');
|
||||
}
|
||||
|
||||
function isWorkspace(obj: any) {
|
||||
return isJSObject(obj) && obj._type === 'workspace';
|
||||
}
|
||||
|
||||
function isRequestGroup(obj: any) {
|
||||
return isJSObject(obj) && obj._type === 'request_group';
|
||||
}
|
||||
|
||||
function isHttpRequest(obj: any) {
|
||||
return isJSObject(obj) && obj._type === 'request';
|
||||
}
|
||||
|
||||
function isGrpcRequest(obj: any) {
|
||||
return isJSObject(obj) && obj._type === 'grpc_request';
|
||||
}
|
||||
|
||||
function isEnvironment(obj: any) {
|
||||
return isJSObject(obj) && obj._type === 'environment';
|
||||
}
|
||||
|
||||
function isJSObject(obj: any) {
|
||||
return Object.prototype.toString.call(obj) === '[object Object]';
|
||||
}
|
||||
|
||||
function isJSString(obj: any) {
|
||||
return Object.prototype.toString.call(obj) === '[object String]';
|
||||
}
|
||||
|
||||
function convertId(id: string): string {
|
||||
if (id.startsWith('GENERATE_ID::')) {
|
||||
return id;
|
||||
}
|
||||
return `GENERATE_ID::${id}`;
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { defineConfig } from 'vite';
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.js'),
|
||||
entry: resolve(__dirname, 'src/index.ts'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
|
||||
1495
plugins/importer-postman/package-lock.json
generated
1495
plugins/importer-postman/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,7 @@
|
||||
{
|
||||
"name": "importer-postman",
|
||||
"version": "0.0.1"
|
||||
"version": "0.0.1",
|
||||
"devDependencies": {
|
||||
"vitest": "^1.4.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Environment, Folder, HttpRequest, Workspace } from '../../../src-web/lib/models';
|
||||
import { Environment, Folder, HttpRequest, Model, Workspace } from '../../../src-web/lib/models';
|
||||
|
||||
const POSTMAN_2_1_0_SCHEMA = 'https://schema.getpostman.com/json/collection/v2.1.0/collection.json';
|
||||
const POSTMAN_2_0_0_SCHEMA = 'https://schema.getpostman.com/json/collection/v2.0.0/collection.json';
|
||||
@@ -9,7 +9,7 @@ type AtLeast<T, K extends keyof T> = Partial<T> & Pick<T, K>;
|
||||
interface ExportResources {
|
||||
workspaces: AtLeast<Workspace, 'name' | 'id' | 'model'>[];
|
||||
environments: AtLeast<Environment, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
requests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
httpRequests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
folders: AtLeast<Folder, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
}
|
||||
|
||||
@@ -23,18 +23,25 @@ export function pluginHookImport(contents: string): { resources: ExportResources
|
||||
return;
|
||||
}
|
||||
|
||||
const globalAuth = importAuth(root.auth);
|
||||
|
||||
const exportResources: ExportResources = {
|
||||
workspaces: [],
|
||||
environments: [],
|
||||
requests: [],
|
||||
httpRequests: [],
|
||||
folders: [],
|
||||
};
|
||||
|
||||
const workspace: ExportResources['workspaces'][0] = {
|
||||
model: 'workspace',
|
||||
id: generateId('wk'),
|
||||
id: generateId('workspace'),
|
||||
name: info.name || 'Postman Import',
|
||||
description: info.description || '',
|
||||
variables:
|
||||
root.variable?.map((v: any) => ({
|
||||
name: v.key,
|
||||
value: v.value,
|
||||
})) ?? [],
|
||||
};
|
||||
exportResources.workspaces.push(workspace);
|
||||
|
||||
@@ -43,7 +50,7 @@ export function pluginHookImport(contents: string): { resources: ExportResources
|
||||
const folder: ExportResources['folders'][0] = {
|
||||
model: 'folder',
|
||||
workspaceId: workspace.id,
|
||||
id: generateId('fl'),
|
||||
id: generateId('folder'),
|
||||
name: v.name,
|
||||
folderId,
|
||||
};
|
||||
@@ -54,10 +61,11 @@ export function pluginHookImport(contents: string): { resources: ExportResources
|
||||
} else if (typeof v.name === 'string' && 'request' in v) {
|
||||
const r = toRecord(v.request);
|
||||
const bodyPatch = importBody(r.body);
|
||||
const authPatch = importAuth(r.auth);
|
||||
const request: ExportResources['requests'][0] = {
|
||||
const requestAuthPath = importAuth(r.auth);
|
||||
const authPatch = requestAuthPath.authenticationType == null ? globalAuth : requestAuthPath;
|
||||
const request: ExportResources['httpRequests'][0] = {
|
||||
model: 'http_request',
|
||||
id: generateId('rq'),
|
||||
id: generateId('http_request'),
|
||||
workspaceId: workspace.id,
|
||||
folderId,
|
||||
name: v.name,
|
||||
@@ -79,7 +87,7 @@ export function pluginHookImport(contents: string): { resources: ExportResources
|
||||
}),
|
||||
],
|
||||
};
|
||||
exportResources.requests.push(request);
|
||||
exportResources.httpRequests.push(request);
|
||||
} else {
|
||||
console.log('Unknown item', v, folderId);
|
||||
}
|
||||
@@ -105,6 +113,14 @@ function importAuth(
|
||||
password: auth.basic.password || '',
|
||||
},
|
||||
};
|
||||
} else if ('bearer' in auth) {
|
||||
return {
|
||||
headers: [],
|
||||
authenticationType: 'bearer',
|
||||
authentication: {
|
||||
token: auth.bearer.token || '',
|
||||
},
|
||||
};
|
||||
} else {
|
||||
// TODO: support other auth types
|
||||
return { headers: [], authenticationType: null, authentication: {} };
|
||||
@@ -164,6 +180,7 @@ function importBody(rawBody: any): Pick<HttpRequest, 'body' | 'bodyType' | 'head
|
||||
f.src != null
|
||||
? {
|
||||
enabled: !f.disabled,
|
||||
contentType: f.contentType ?? null,
|
||||
name: f.key ?? '',
|
||||
file: f.src ?? '',
|
||||
}
|
||||
@@ -175,6 +192,20 @@ function importBody(rawBody: any): Pick<HttpRequest, 'body' | 'bodyType' | 'head
|
||||
),
|
||||
},
|
||||
};
|
||||
} else if ('raw' in body) {
|
||||
return {
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: body.options?.raw?.language === 'json' ? 'application/json' : '',
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
bodyType: body.options?.raw?.language === 'json' ? 'application/json' : 'other',
|
||||
body: {
|
||||
text: body.raw ?? '',
|
||||
},
|
||||
};
|
||||
} else {
|
||||
// TODO: support other body types
|
||||
return { headers: [], bodyType: null, body: {} };
|
||||
@@ -213,11 +244,9 @@ function convertTemplateSyntax<T>(obj: T): T {
|
||||
}
|
||||
}
|
||||
|
||||
function generateId(prefix: 'wk' | 'rq' | 'fl'): string {
|
||||
const alphabet = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
||||
let id = `${prefix}_`;
|
||||
for (let i = 0; i < 10; i++) {
|
||||
id += alphabet[Math.floor(Math.random() * alphabet.length)];
|
||||
}
|
||||
return id;
|
||||
const idCount: Partial<Record<Model['model'], number>> = {};
|
||||
|
||||
function generateId(model: Model['model']): string {
|
||||
idCount[model] = (idCount[model] ?? -1) + 1;
|
||||
return `GENERATE_ID::${model.toUpperCase()}_${idCount[model]}`;
|
||||
}
|
||||
|
||||
38
plugins/importer-postman/tests/fixtures/nested.json
vendored
Normal file
38
plugins/importer-postman/tests/fixtures/nested.json
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"info": {
|
||||
"_postman_id": "9e6dfada-256c-49ea-a38f-7d1b05b7ca2d",
|
||||
"name": "New Collection",
|
||||
"schema": "https://schema.getpostman.com/json/collection/v2.0.0/collection.json",
|
||||
"_exporter_id": "18798"
|
||||
},
|
||||
"item": [
|
||||
{
|
||||
"name": "Top Folder",
|
||||
"item": [
|
||||
{
|
||||
"name": "Nested Folder",
|
||||
"item": [
|
||||
{
|
||||
"name": "Request 1",
|
||||
"request": {
|
||||
"method": "GET"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Request 2",
|
||||
"request": {
|
||||
"method": "GET"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Request 3",
|
||||
"request": {
|
||||
"method": "GET"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
90
plugins/importer-postman/tests/index.test.ts
Normal file
90
plugins/importer-postman/tests/index.test.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import { Model } from '../../../src-web/lib/models';
|
||||
import { pluginHookImport } from '../src';
|
||||
|
||||
let originalRandom = Math.random;
|
||||
|
||||
describe('importer-postman', () => {
|
||||
beforeEach(() => {
|
||||
let i = 0;
|
||||
// Psuedo-random number generator to ensure consistent ID generation
|
||||
Math.random = vi.fn(() => ((i++ * 1000) % 133) / 100);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
Math.random = originalRandom;
|
||||
});
|
||||
|
||||
const p = path.join(__dirname, 'fixtures');
|
||||
const fixtures = fs.readdirSync(p);
|
||||
|
||||
for (const fixture of fixtures) {
|
||||
test('Imports ' + fixture, () => {
|
||||
const contents = fs.readFileSync(path.join(p, fixture), 'utf-8');
|
||||
const imported = pluginHookImport(contents);
|
||||
const folder0 = newId('folder');
|
||||
const folder1 = newId('folder');
|
||||
expect(imported).toEqual({
|
||||
resources: expect.objectContaining({
|
||||
workspaces: [
|
||||
expect.objectContaining({
|
||||
id: newId('workspace'),
|
||||
model: 'workspace',
|
||||
name: 'New Collection',
|
||||
}),
|
||||
],
|
||||
folders: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: folder0,
|
||||
model: 'folder',
|
||||
workspaceId: existingId('workspace'),
|
||||
name: 'Top Folder',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
folderId: folder0,
|
||||
id: folder1,
|
||||
model: 'folder',
|
||||
workspaceId: existingId('workspace'),
|
||||
name: 'Nested Folder',
|
||||
}),
|
||||
]),
|
||||
httpRequests: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: newId('http_request'),
|
||||
model: 'http_request',
|
||||
name: 'Request 1',
|
||||
workspaceId: existingId('workspace'),
|
||||
folderId: folder1,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: newId('http_request'),
|
||||
model: 'http_request',
|
||||
name: 'Request 2',
|
||||
workspaceId: existingId('workspace'),
|
||||
folderId: folder0,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: newId('http_request'),
|
||||
model: 'http_request',
|
||||
name: 'Request 3',
|
||||
workspaceId: existingId('workspace'),
|
||||
folderId: null,
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const idCount: Partial<Record<Model['model'], number>> = {};
|
||||
function newId(model: Model['model']): string {
|
||||
idCount[model] = (idCount[model] ?? -1) + 1;
|
||||
return `GENERATE_ID::${model.toUpperCase()}_${idCount[model]}`;
|
||||
}
|
||||
|
||||
function existingId(model: Model['model']): string {
|
||||
return `GENERATE_ID::${model.toUpperCase()}_${idCount[model] ?? 0}`;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
export function pluginHookImport(contents) {
|
||||
export function pluginHookImport(contents: string) {
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(contents);
|
||||
@@ -10,23 +10,20 @@ export function pluginHookImport(contents) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (!('yaakSchema' in parsed)) {
|
||||
const isYaakExport = 'yaakSchema' in parsed;
|
||||
if (!isYaakExport) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Migrate v1 to v2 -- changes requests to httpRequests
|
||||
if (parsed.yaakSchema === 1) {
|
||||
if ('requests' in parsed.resources) {
|
||||
parsed.resources.httpRequests = parsed.resources.requests;
|
||||
parsed.yaakSchema = 2;
|
||||
delete parsed.resources.requests;
|
||||
}
|
||||
|
||||
if (parsed.yaakSchema === 2) {
|
||||
return { resources: parsed.resources }; // Should already be in the correct format
|
||||
}
|
||||
|
||||
return undefined;
|
||||
return { resources: parsed.resources }; // Should already be in the correct format
|
||||
}
|
||||
|
||||
export function isJSObject(obj) {
|
||||
export function isJSObject(obj: any) {
|
||||
return Object.prototype.toString.call(obj) === '[object Object]';
|
||||
}
|
||||
29
plugins/importer-yaak/tests/index.test.ts
Normal file
29
plugins/importer-yaak/tests/index.test.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
import { pluginHookImport } from '../src';
|
||||
|
||||
describe('importer-yaak', () => {
|
||||
test('Skips invalid imports', () => {
|
||||
expect(pluginHookImport('not JSON')).toBeUndefined();
|
||||
expect(pluginHookImport('[]')).toBeUndefined();
|
||||
expect(pluginHookImport(JSON.stringify({ resources: {} }))).toBeUndefined();
|
||||
});
|
||||
|
||||
test('converts schema 1 to 2', () => {
|
||||
const imported = pluginHookImport(
|
||||
JSON.stringify({
|
||||
yaakSchema: 1,
|
||||
resources: {
|
||||
requests: [],
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
expect(imported).toEqual(
|
||||
expect.objectContaining({
|
||||
resources: {
|
||||
httpRequests: [],
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -4,7 +4,7 @@ import { defineConfig } from 'vite';
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.js'),
|
||||
entry: resolve(__dirname, 'src/index.ts'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n UPDATE settings SET (\n theme, appearance, update_channel\n ) = (?, ?, ?) WHERE id = 'default';\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "48ec5fdf20f34add763c540061caa25054545503704e19f149987f99b1a0e4f0"
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, created_at, updated_at, theme, appearance, update_channel\n FROM settings\n WHERE id = 'default'\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 2,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 3,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "theme",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "appearance",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "update_channel",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "b32994b09ae7a06eb0f031069d327e55127a5bce60cbb499b83d1701386a23cb"
|
||||
}
|
||||
92
src-tauri/.sqlx/query-ca3485d87b060cd77c4114d2af544adf18f6f15341d9d5db40865e92a80da4e2.json
generated
Normal file
92
src-tauri/.sqlx/query-ca3485d87b060cd77c4114d2af544adf18f6f15341d9d5db40865e92a80da4e2.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, created_at, updated_at, theme, appearance,\n theme_dark, theme_light, update_channel,\n interface_font_size, interface_scale, editor_font_size, editor_soft_wrap\n FROM settings\n WHERE id = 'default'\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 2,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 3,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "theme",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "appearance",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "theme_dark",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "theme_light",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "update_channel",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "interface_font_size",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "interface_scale",
|
||||
"ordinal": 10,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "editor_font_size",
|
||||
"ordinal": 11,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "editor_soft_wrap",
|
||||
"ordinal": 12,
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "ca3485d87b060cd77c4114d2af544adf18f6f15341d9d5db40865e92a80da4e2"
|
||||
}
|
||||
12
src-tauri/.sqlx/query-efd8ba41ea909b18dd520c57c1d464c5ae057b720cbbedcaec1513d43535632c.json
generated
Normal file
12
src-tauri/.sqlx/query-efd8ba41ea909b18dd520c57c1d464c5ae057b720cbbedcaec1513d43535632c.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n UPDATE settings SET (\n theme, appearance, theme_dark, theme_light, update_channel,\n interface_font_size, interface_scale, editor_font_size, editor_soft_wrap\n ) = (?, ?, ?, ?, ?, ?, ?, ?, ?) WHERE id = 'default';\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 9
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "efd8ba41ea909b18dd520c57c1d464c5ae057b720cbbedcaec1513d43535632c"
|
||||
}
|
||||
4340
src-tauri/Cargo.lock
generated
4340
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,18 +1,20 @@
|
||||
workspace = { members = ["grpc"] }
|
||||
|
||||
[package]
|
||||
name = "yaak-app"
|
||||
version = "0.0.0"
|
||||
description = "A network protocol testing utility app"
|
||||
authors = ["Gregory Schier"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/gschier/yaak-app"
|
||||
edition = "2021"
|
||||
|
||||
# Produce a library for mobile support
|
||||
[lib]
|
||||
name = "tauri_app_lib"
|
||||
crate-type = ["staticlib", "cdylib", "lib"]
|
||||
|
||||
[profile.release]
|
||||
strip = true # Automatically strip symbols from the binary.
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "1.5", features = [] }
|
||||
tauri-build = { version = "2.0.0-beta", features = [] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
objc = "0.2.7"
|
||||
@@ -22,53 +24,32 @@ cocoa = "0.25.0"
|
||||
openssl-sys = { version = "0.9", features = ["vendored"] } # For Ubuntu installation to work
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.21.0"
|
||||
boa_engine = { version = "0.17.3", features = ["annex-b"] }
|
||||
boa_runtime = { version = "0.17.3" }
|
||||
base64 = "0.22.0"
|
||||
boa_engine = { version = "0.18.0", features = ["annex-b"] }
|
||||
boa_runtime = { version = "0.18.0" }
|
||||
chrono = { version = "0.4.31", features = ["serde"] }
|
||||
futures = "0.3.26"
|
||||
http = "0.2.8"
|
||||
http = "0.2.10"
|
||||
rand = "0.8.5"
|
||||
reqwest = { version = "0.11.23", features = ["multipart", "cookies", "gzip", "brotli", "deflate"] }
|
||||
cookie = { version = "0.18.0" }
|
||||
serde = { version = "1.0.195", features = ["derive"] }
|
||||
serde_json = { version = "1.0.111", features = ["raw_value"] }
|
||||
sqlx = { version = "0.7.3", features = ["sqlite", "runtime-tokio-rustls", "json", "chrono", "time"] }
|
||||
tauri = { version = "1.5.4", features = [
|
||||
"config-toml",
|
||||
"path-all",
|
||||
"devtools",
|
||||
"dialog-open",
|
||||
"dialog-save",
|
||||
"fs-read-file",
|
||||
"os-all",
|
||||
"protocol-asset",
|
||||
"shell-open",
|
||||
"shell-sidecar",
|
||||
"updater",
|
||||
"window-close",
|
||||
"window-maximize",
|
||||
"window-minimize",
|
||||
"window-set-decorations",
|
||||
"window-set-title",
|
||||
"window-start-dragging",
|
||||
"window-unmaximize",
|
||||
] }
|
||||
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
||||
tauri-plugin-log = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1", features = ["colored"] }
|
||||
tokio = { version = "1.25.0", features = ["sync"] }
|
||||
uuid = "1.3.0"
|
||||
log = "0.4.20"
|
||||
reqwest = { version = "0.11.23", features = ["multipart", "cookies", "gzip", "brotli", "deflate", "json"] }
|
||||
serde = { version = "1.0.198", features = ["derive"] }
|
||||
serde_json = { version = "1.0.116", features = ["raw_value"] }
|
||||
sqlx = { version = "0.7.4", features = ["sqlite", "runtime-tokio-rustls", "json", "chrono", "time"] }
|
||||
tauri = { version = "2.0.0-beta", features = ["config-toml", "devtools", "protocol-asset"] }
|
||||
tauri-plugin-clipboard-manager = "2.1.0-beta"
|
||||
tauri-plugin-dialog = "2.0.0-beta"
|
||||
tauri-plugin-log = { version = "2.0.0-beta", features = ["colored"] }
|
||||
tauri-plugin-shell = "2.0.0-beta"
|
||||
tauri-plugin-os = "2.0.0-beta"
|
||||
tauri-plugin-updater = "2.0.0-beta"
|
||||
tauri-plugin-window-state = "2.0.0-beta"
|
||||
tauri-plugin-fs = "2.0.0-beta"
|
||||
tauri-plugin-deep-link = "2.0.0-beta"
|
||||
tokio = { version = "1.36.0", features = ["sync"] }
|
||||
uuid = "1.7.0"
|
||||
log = "0.4.21"
|
||||
datetime = "0.5.2"
|
||||
window-shadows = "0.2.2"
|
||||
reqwest_cookie_store = "0.6.0"
|
||||
grpc = { path = "./grpc" }
|
||||
tokio-stream = "0.1.14"
|
||||
|
||||
[features]
|
||||
# by default Tauri runs in production mode
|
||||
# when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` is an URL
|
||||
default = ["custom-protocol"]
|
||||
# this feature is used used for production builds where `devPath` points to the filesystem
|
||||
# DO NOT remove this
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
tokio-stream = "0.1.15"
|
||||
regex = "1.10.2"
|
||||
hex_color = "3.0.0"
|
||||
|
||||
56
src-tauri/capabilities/capabilities.json
Normal file
56
src-tauri/capabilities/capabilities.json
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"$schema": "../gen/schemas/capabilities.json",
|
||||
"identifier": "main",
|
||||
"description": "Main permissions",
|
||||
"local": true,
|
||||
"windows": [
|
||||
"*"
|
||||
],
|
||||
"permissions": [
|
||||
"os:allow-os-type",
|
||||
"event:allow-emit",
|
||||
"clipboard-manager:allow-write-text",
|
||||
"clipboard-manager:allow-read-text",
|
||||
"dialog:allow-open",
|
||||
"dialog:allow-save",
|
||||
"event:allow-listen",
|
||||
"event:allow-unlisten",
|
||||
"fs:allow-read-file",
|
||||
"fs:allow-read-text-file",
|
||||
{
|
||||
"identifier": "fs:scope",
|
||||
"allow": [
|
||||
{
|
||||
"path": "$APPDATA"
|
||||
},
|
||||
{
|
||||
"path": "$APPDATA/**"
|
||||
}
|
||||
]
|
||||
},
|
||||
"shell:allow-open",
|
||||
{
|
||||
"identifier": "shell:allow-execute",
|
||||
"allow": [
|
||||
{
|
||||
"name": "protoc",
|
||||
"sidecar": true,
|
||||
"args": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"webview:allow-set-webview-zoom",
|
||||
"window:allow-close",
|
||||
"window:allow-is-fullscreen",
|
||||
"window:allow-maximize",
|
||||
"window:allow-minimize",
|
||||
"window:allow-toggle-maximize",
|
||||
"window:allow-set-decorations",
|
||||
"window:allow-set-title",
|
||||
"window:allow-start-dragging",
|
||||
"window:allow-unmaximize",
|
||||
"window:allow-theme",
|
||||
"clipboard-manager:allow-read-text",
|
||||
"clipboard-manager:allow-write-text"
|
||||
]
|
||||
}
|
||||
1
src-tauri/gen/schemas/acl-manifests.json
Normal file
1
src-tauri/gen/schemas/acl-manifests.json
Normal file
File diff suppressed because one or more lines are too long
1
src-tauri/gen/schemas/capabilities.json
Normal file
1
src-tauri/gen/schemas/capabilities.json
Normal file
@@ -0,0 +1 @@
|
||||
{"main":{"identifier":"main","description":"Main permissions","local":true,"windows":["*"],"permissions":["os:allow-os-type","event:allow-emit","clipboard-manager:allow-write-text","clipboard-manager:allow-read-text","dialog:allow-open","dialog:allow-save","event:allow-listen","event:allow-unlisten","fs:allow-read-file","fs:allow-read-text-file",{"identifier":"fs:scope","allow":[{"path":"$APPDATA"},{"path":"$APPDATA/**"}]},"shell:allow-open",{"identifier":"shell:allow-execute","allow":[{"args":true,"name":"protoc","sidecar":true}]},"webview:allow-set-webview-zoom","window:allow-close","window:allow-is-fullscreen","window:allow-maximize","window:allow-minimize","window:allow-toggle-maximize","window:allow-set-decorations","window:allow-set-title","window:allow-start-dragging","window:allow-unmaximize","window:allow-theme","clipboard-manager:allow-read-text","clipboard-manager:allow-write-text"]}}
|
||||
6924
src-tauri/gen/schemas/desktop-schema.json
Normal file
6924
src-tauri/gen/schemas/desktop-schema.json
Normal file
File diff suppressed because it is too large
Load Diff
6924
src-tauri/gen/schemas/linux-schema.json
Normal file
6924
src-tauri/gen/schemas/linux-schema.json
Normal file
File diff suppressed because it is too large
Load Diff
6924
src-tauri/gen/schemas/macOS-schema.json
Normal file
6924
src-tauri/gen/schemas/macOS-schema.json
Normal file
File diff suppressed because it is too large
Load Diff
6924
src-tauri/gen/schemas/windows-schema.json
Normal file
6924
src-tauri/gen/schemas/windows-schema.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -14,10 +14,9 @@ serde = { version = "1.0.196", features = ["derive"] }
|
||||
serde_json = "1.0.113"
|
||||
prost-reflect = { version = "0.12.0", features = ["serde", "derive"] }
|
||||
log = "0.4.20"
|
||||
once_cell = { version = "1.19.0", features = [] }
|
||||
anyhow = "1.0.79"
|
||||
hyper = { version = "0.14" }
|
||||
hyper-rustls = { version = "0.24.0", features = ["http2"] }
|
||||
protoc-bin-vendored = "3.0.0"
|
||||
uuid = { version = "1.7.0", features = ["v4"] }
|
||||
tauri = { version = "1.5.4", features = ["process-command-api"]}
|
||||
tauri = { version = "2.0.0-beta" }
|
||||
tauri-plugin-shell = "2.0.0-beta"
|
||||
|
||||
@@ -8,6 +8,7 @@ use hyper_rustls::HttpsConnector;
|
||||
pub use prost_reflect::DynamicMessage;
|
||||
use prost_reflect::{DescriptorPool, MethodDescriptor, ServiceDescriptor};
|
||||
use serde_json::Deserializer;
|
||||
use tauri::AppHandle;
|
||||
use tokio_stream::wrappers::ReceiverStream;
|
||||
use tonic::body::BoxBody;
|
||||
use tonic::metadata::{MetadataKey, MetadataValue};
|
||||
@@ -166,13 +167,17 @@ impl GrpcConnection {
|
||||
}
|
||||
|
||||
pub struct GrpcHandle {
|
||||
app_handle: AppHandle,
|
||||
pools: HashMap<String, DescriptorPool>,
|
||||
}
|
||||
|
||||
impl Default for GrpcHandle {
|
||||
fn default() -> Self {
|
||||
impl GrpcHandle {
|
||||
pub fn new(app_handle: &AppHandle) -> Self {
|
||||
let pools = HashMap::new();
|
||||
Self { pools }
|
||||
Self {
|
||||
pools,
|
||||
app_handle: app_handle.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -183,7 +188,7 @@ impl GrpcHandle {
|
||||
uri: &Uri,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<Vec<ServiceDefinition>, String> {
|
||||
let pool = fill_pool_from_files(paths).await?;
|
||||
let pool = fill_pool_from_files(&self.app_handle, paths).await?;
|
||||
self.pools.insert(self.get_pool_key(id, uri), pool.clone());
|
||||
Ok(self.services_from_pool(&pool))
|
||||
}
|
||||
@@ -237,7 +242,7 @@ impl GrpcHandle {
|
||||
None => match proto_files.len() {
|
||||
0 => fill_pool(&uri).await?,
|
||||
_ => {
|
||||
let pool = fill_pool_from_files(proto_files).await?;
|
||||
let pool = fill_pool_from_files(&self.app_handle, proto_files).await?;
|
||||
self.pools.insert(id.to_string(), pool.clone());
|
||||
pool
|
||||
}
|
||||
|
||||
@@ -1,36 +1,48 @@
|
||||
use std::env::temp_dir;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::str::{from_utf8, FromStr};
|
||||
|
||||
use anyhow::anyhow;
|
||||
use hyper::Client;
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::Client;
|
||||
use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder};
|
||||
use log::{debug, info, warn};
|
||||
use prost::Message;
|
||||
use prost_reflect::{DescriptorPool, MethodDescriptor};
|
||||
use prost_types::{FileDescriptorProto, FileDescriptorSet};
|
||||
use tauri::api::process::{Command, CommandEvent};
|
||||
use tauri::path::BaseDirectory;
|
||||
use tauri::{AppHandle, Manager};
|
||||
use tauri_plugin_shell::process::CommandEvent;
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
use tokio::fs;
|
||||
use tokio_stream::StreamExt;
|
||||
use tonic::body::BoxBody;
|
||||
use tonic::codegen::http::uri::PathAndQuery;
|
||||
use tonic::Request;
|
||||
use tonic::transport::Uri;
|
||||
use tonic::Request;
|
||||
use tonic_reflection::pb::server_reflection_client::ServerReflectionClient;
|
||||
use tonic_reflection::pb::server_reflection_request::MessageRequest;
|
||||
use tonic_reflection::pb::server_reflection_response::MessageResponse;
|
||||
use tonic_reflection::pb::ServerReflectionRequest;
|
||||
|
||||
pub async fn fill_pool_from_files(paths: Vec<PathBuf>) -> Result<DescriptorPool, String> {
|
||||
pub async fn fill_pool_from_files(
|
||||
app_handle: &AppHandle,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<DescriptorPool, String> {
|
||||
let mut pool = DescriptorPool::new();
|
||||
let random_file_name = format!("{}.desc", uuid::Uuid::new_v4());
|
||||
let desc_path = temp_dir().join(random_file_name);
|
||||
let global_import_dir = app_handle
|
||||
.path()
|
||||
.resolve("protoc-vendored/include", BaseDirectory::Resource)
|
||||
.expect("failed to resolve protoc include directory");
|
||||
|
||||
let mut args = vec![
|
||||
"--include_imports".to_string(),
|
||||
"--include_source_info".to_string(),
|
||||
"-I".to_string(),
|
||||
global_import_dir.to_string_lossy().to_string(),
|
||||
"-o".to_string(),
|
||||
desc_path.to_string_lossy().to_string(),
|
||||
];
|
||||
@@ -53,7 +65,9 @@ pub async fn fill_pool_from_files(paths: Vec<PathBuf>) -> Result<DescriptorPool,
|
||||
}
|
||||
}
|
||||
|
||||
let (mut rx, _child) = Command::new_sidecar("protoc")
|
||||
let (mut rx, _child) = app_handle
|
||||
.shell()
|
||||
.sidecar("protoc")
|
||||
.expect("protoc not found")
|
||||
.args(args)
|
||||
.spawn()
|
||||
@@ -62,10 +76,16 @@ pub async fn fill_pool_from_files(paths: Vec<PathBuf>) -> Result<DescriptorPool,
|
||||
while let Some(event) = rx.recv().await {
|
||||
match event {
|
||||
CommandEvent::Stdout(line) => {
|
||||
info!("protoc stdout: {}", line);
|
||||
info!(
|
||||
"protoc stdout: {}",
|
||||
from_utf8(line.as_slice()).unwrap_or_default().to_string()
|
||||
);
|
||||
}
|
||||
CommandEvent::Stderr(line) => {
|
||||
info!("protoc stderr: {}", line);
|
||||
info!(
|
||||
"protoc stderr: {}",
|
||||
from_utf8(line.as_slice()).unwrap_or_default().to_string()
|
||||
);
|
||||
}
|
||||
CommandEvent::Error(e) => {
|
||||
return Err(e.to_string());
|
||||
@@ -76,10 +96,7 @@ pub async fn fill_pool_from_files(paths: Vec<PathBuf>) -> Result<DescriptorPool,
|
||||
// success
|
||||
}
|
||||
Some(code) => {
|
||||
return Err(format!(
|
||||
"protoc failed with exit code: {}",
|
||||
code,
|
||||
));
|
||||
return Err(format!("protoc failed with exit code: {}", code,));
|
||||
}
|
||||
None => {
|
||||
return Err("protoc failed with no exit code".to_string());
|
||||
|
||||
4
src-tauri/migrations/20240522031045_theme-settings.sql
Normal file
4
src-tauri/migrations/20240522031045_theme-settings.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN theme_dark TEXT DEFAULT 'yaak-dark' NOT NULL;
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN theme_light TEXT DEFAULT 'yaak-light' NOT NULL;
|
||||
4
src-tauri/migrations/20240529143147_more-settings.sql
Normal file
4
src-tauri/migrations/20240529143147_more-settings.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE settings ADD COLUMN interface_font_size INTEGER DEFAULT 15 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN interface_scale INTEGER DEFAULT 1 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN editor_font_size INTEGER DEFAULT 13 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN editor_soft_wrap BOOLEAN DEFAULT 1 NOT NULL;
|
||||
36
src-tauri/plugins/exporter-curl/index.mjs
Normal file
36
src-tauri/plugins/exporter-curl/index.mjs
Normal file
@@ -0,0 +1,36 @@
|
||||
const o = `\\
|
||||
`;
|
||||
function d(n) {
|
||||
var h, f, r, u, l, s;
|
||||
const t = ["curl"];
|
||||
n.method && t.push("-X", n.method), n.url && t.push(i(n.url)), t.push(o);
|
||||
for (const a of (n.urlParameters ?? []).filter(p))
|
||||
t.push("--url-query", i(`${a.name}=${a.value}`)), t.push(o);
|
||||
for (const a of (n.headers ?? []).filter(p))
|
||||
t.push("--header", i(`${a.name}: ${a.value}`)), t.push(o);
|
||||
if (Array.isArray((h = n.body) == null ? void 0 : h.form)) {
|
||||
const a = n.bodyType === "multipart/form-data" ? "--form" : "--data";
|
||||
for (const e of (((f = n.body) == null ? void 0 : f.form) ?? []).filter(p)) {
|
||||
if (e.file) {
|
||||
let c = `${e.name}=@${e.file}`;
|
||||
c += e.contentType ? `;type=${e.contentType}` : "", t.push(a, c);
|
||||
} else
|
||||
t.push(a, i(`${e.name}=${e.value}`));
|
||||
t.push(o);
|
||||
}
|
||||
} else
|
||||
typeof ((r = n.body) == null ? void 0 : r.text) == "string" && (t.push("--data-raw", `$${i(n.body.text)}`), t.push(o));
|
||||
return (n.authenticationType === "basic" || n.authenticationType === "digest") && (n.authenticationType === "digest" && t.push("--digest"), t.push(
|
||||
"--user",
|
||||
i(`${((u = n.authentication) == null ? void 0 : u.username) ?? ""}:${((l = n.authentication) == null ? void 0 : l.password) ?? ""}`)
|
||||
), t.push(o)), n.authenticationType === "bearer" && (t.push("--header", i(`Authorization: Bearer ${((s = n.authentication) == null ? void 0 : s.token) ?? ""}`)), t.push(o)), t[t.length - 1] === o && t.splice(t.length - 1, 1), t.join(" ");
|
||||
}
|
||||
function i(n) {
|
||||
return `'${n.replace(/'/g, "\\'")}'`;
|
||||
}
|
||||
function p(n) {
|
||||
return n.enabled !== !1 && !!n.name;
|
||||
}
|
||||
export {
|
||||
d as pluginHookExport
|
||||
};
|
||||
297
src-tauri/plugins/importer-curl/index.mjs
Normal file
297
src-tauri/plugins/importer-curl/index.mjs
Normal file
@@ -0,0 +1,297 @@
|
||||
var j = "(?:" + [
|
||||
"\\|\\|",
|
||||
"\\&\\&",
|
||||
";;",
|
||||
"\\|\\&",
|
||||
"\\<\\(",
|
||||
"\\<\\<\\<",
|
||||
">>",
|
||||
">\\&",
|
||||
"<\\&",
|
||||
"[&;()|<>]"
|
||||
].join("|") + ")", D = new RegExp("^" + j + "$"), q = "|&;()<> \\t", M = '"((\\\\"|[^"])*?)"', Q = "'((\\\\'|[^'])*?)'", V = /^#$/, _ = "'", G = '"', U = "$", $ = "", z = 4294967296;
|
||||
for (var L = 0; L < 4; L++)
|
||||
$ += (z * Math.random()).toString(16);
|
||||
var J = new RegExp("^" + $);
|
||||
function X(n, s) {
|
||||
for (var e = s.lastIndex, t = [], c; c = s.exec(n); )
|
||||
t.push(c), s.lastIndex === c.index && (s.lastIndex += 1);
|
||||
return s.lastIndex = e, t;
|
||||
}
|
||||
function F(n, s, e) {
|
||||
var t = typeof n == "function" ? n(e) : n[e];
|
||||
return typeof t > "u" && e != "" ? t = "" : typeof t > "u" && (t = "$"), typeof t == "object" ? s + $ + JSON.stringify(t) + $ : s + t;
|
||||
}
|
||||
function K(n, s, e) {
|
||||
e || (e = {});
|
||||
var t = e.escape || "\\", c = "(\\" + t + `['"` + q + `]|[^\\s'"` + q + "])+", m = new RegExp([
|
||||
"(" + j + ")",
|
||||
// control chars
|
||||
"(" + c + "|" + M + "|" + Q + ")+"
|
||||
].join("|"), "g"), f = X(n, m);
|
||||
if (f.length === 0)
|
||||
return [];
|
||||
s || (s = {});
|
||||
var w = !1;
|
||||
return f.map(function(r) {
|
||||
var a = r[0];
|
||||
if (!a || w)
|
||||
return;
|
||||
if (D.test(a))
|
||||
return { op: a };
|
||||
var x = !1, C = !1, d = "", O = !1, i;
|
||||
function T() {
|
||||
i += 1;
|
||||
var v, p, R = a.charAt(i);
|
||||
if (R === "{") {
|
||||
if (i += 1, a.charAt(i) === "}")
|
||||
throw new Error("Bad substitution: " + a.slice(i - 2, i + 1));
|
||||
if (v = a.indexOf("}", i), v < 0)
|
||||
throw new Error("Bad substitution: " + a.slice(i));
|
||||
p = a.slice(i, v), i = v;
|
||||
} else if (/[*@#?$!_-]/.test(R))
|
||||
p = R, i += 1;
|
||||
else {
|
||||
var g = a.slice(i);
|
||||
v = g.match(/[^\w\d_]/), v ? (p = g.slice(0, v.index), i += v.index - 1) : (p = g, i = a.length);
|
||||
}
|
||||
return F(s, "", p);
|
||||
}
|
||||
for (i = 0; i < a.length; i++) {
|
||||
var u = a.charAt(i);
|
||||
if (O = O || !x && (u === "*" || u === "?"), C)
|
||||
d += u, C = !1;
|
||||
else if (x)
|
||||
u === x ? x = !1 : x == _ ? d += u : u === t ? (i += 1, u = a.charAt(i), u === G || u === t || u === U ? d += u : d += t + u) : u === U ? d += T() : d += u;
|
||||
else if (u === G || u === _)
|
||||
x = u;
|
||||
else {
|
||||
if (D.test(u))
|
||||
return { op: a };
|
||||
if (V.test(u)) {
|
||||
w = !0;
|
||||
var b = { comment: n.slice(r.index + i + 1) };
|
||||
return d.length ? [d, b] : [b];
|
||||
} else
|
||||
u === t ? C = !0 : u === U ? d += T() : d += u;
|
||||
}
|
||||
}
|
||||
return O ? { op: "glob", pattern: d } : d;
|
||||
}).reduce(function(r, a) {
|
||||
return typeof a > "u" ? r : r.concat(a);
|
||||
}, []);
|
||||
}
|
||||
var Y = function(s, e, t) {
|
||||
var c = K(s, e, t);
|
||||
return typeof e != "function" ? c : c.reduce(function(m, f) {
|
||||
if (typeof f == "object")
|
||||
return m.concat(f);
|
||||
var w = f.split(RegExp("(" + $ + ".*?" + $ + ")", "g"));
|
||||
return w.length === 1 ? m.concat(w[0]) : m.concat(w.filter(Boolean).map(function(r) {
|
||||
return J.test(r) ? JSON.parse(r.split($)[1]) : r;
|
||||
}));
|
||||
}, []);
|
||||
}, Z = Y;
|
||||
const ae = "curl", se = "cURL", ie = "cURL command line tool", H = ["d", "data", "data-raw", "data-urlencode", "data-binary", "data-ascii"], ee = [
|
||||
["url"],
|
||||
// Specify the URL explicitly
|
||||
["user", "u"],
|
||||
// Authentication
|
||||
["digest"],
|
||||
// Apply auth as digest
|
||||
["header", "H"],
|
||||
["cookie", "b"],
|
||||
["get", "G"],
|
||||
// Put the post data in the URL
|
||||
["d", "data"],
|
||||
// Add url encoded data
|
||||
["data-raw"],
|
||||
["data-urlencode"],
|
||||
["data-binary"],
|
||||
["data-ascii"],
|
||||
["form", "F"],
|
||||
// Add multipart data
|
||||
["request", "X"],
|
||||
// Request method
|
||||
H
|
||||
].flatMap((n) => n);
|
||||
function oe(n) {
|
||||
if (!n.match(/^\s*curl /))
|
||||
return null;
|
||||
const s = [], e = n.replace(/\ncurl/g, "; curl");
|
||||
let t = [];
|
||||
const m = Z(e).flatMap((r) => typeof r == "string" && r.startsWith("-") && !r.startsWith("--") && r.length > 2 ? [r.slice(0, 2), r.slice(2)] : r);
|
||||
for (const r of m) {
|
||||
if (typeof r == "string") {
|
||||
r.startsWith("$") ? t.push(r.slice(1)) : t.push(r);
|
||||
continue;
|
||||
}
|
||||
if ("comment" in r)
|
||||
continue;
|
||||
const { op: a } = r;
|
||||
if (a === ";") {
|
||||
s.push(t), t = [];
|
||||
continue;
|
||||
}
|
||||
if (a != null && a.startsWith("$")) {
|
||||
const x = a.slice(2, a.length - 1).replace(/\\'/g, "'");
|
||||
t.push(x);
|
||||
continue;
|
||||
}
|
||||
a === "glob" && t.push(r.pattern);
|
||||
}
|
||||
s.push(t);
|
||||
const f = {
|
||||
model: "workspace",
|
||||
id: N("workspace"),
|
||||
name: "Curl Import"
|
||||
};
|
||||
return {
|
||||
resources: {
|
||||
httpRequests: s.filter((r) => r[0] === "curl").map((r) => te(r, f.id)),
|
||||
workspaces: [f]
|
||||
}
|
||||
};
|
||||
}
|
||||
function te(n, s) {
|
||||
const e = {}, t = [];
|
||||
for (let o = 1; o < n.length; o++) {
|
||||
let l = n[o];
|
||||
if (typeof l == "string" && (l = l.trim()), typeof l == "string" && l.match(/^-{1,2}[\w-]+/)) {
|
||||
const E = l[0] === "-" && l[1] !== "-";
|
||||
let h = l.replace(/^-{1,2}/, "");
|
||||
if (!ee.includes(h))
|
||||
continue;
|
||||
let y;
|
||||
const S = n[o + 1];
|
||||
E && h.length > 1 ? (y = h.slice(1), h = h.slice(0, 1)) : typeof S == "string" && !S.startsWith("-") ? (y = S, o++) : y = !0, e[h] = e[h] || [], e[h].push(y);
|
||||
} else
|
||||
l && t.push(l);
|
||||
}
|
||||
let c, m;
|
||||
const f = A(e, t[0] || "", ["url"]), [w, r] = W(f, "?");
|
||||
c = (r == null ? void 0 : r.split("&").map((o) => {
|
||||
const l = W(o, "=");
|
||||
return { name: l[0] ?? "", value: l[1] ?? "", enabled: !0 };
|
||||
})) ?? [], m = w ?? f;
|
||||
const [a, x] = A(e, "", ["u", "user"]).split(/:(.*)$/), C = A(e, !1, ["digest"]), d = a ? C ? "digest" : "basic" : null, O = a ? {
|
||||
username: a.trim(),
|
||||
password: (x ?? "").trim()
|
||||
} : {}, i = [
|
||||
...e.header || [],
|
||||
...e.H || []
|
||||
].map((o) => {
|
||||
const [l, E] = o.split(/:(.*)$/);
|
||||
return E ? {
|
||||
name: (l ?? "").trim(),
|
||||
value: E.trim(),
|
||||
enabled: !0
|
||||
} : {
|
||||
name: (l ?? "").trim().replace(/;$/, ""),
|
||||
value: "",
|
||||
enabled: !0
|
||||
};
|
||||
}), T = [
|
||||
...e.cookie || [],
|
||||
...e.b || []
|
||||
].map((o) => {
|
||||
const l = o.split("=", 1)[0], E = o.replace(`${l}=`, "");
|
||||
return `${l}=${E}`;
|
||||
}).join("; "), u = i.find((o) => o.name.toLowerCase() === "cookie");
|
||||
T && u ? u.value += `; ${T}` : T && i.push({
|
||||
name: "Cookie",
|
||||
value: T,
|
||||
enabled: !0
|
||||
});
|
||||
const b = ne(e), v = i.find((o) => o.name.toLowerCase() === "content-type"), p = v ? v.value.split(";")[0] : null, R = [
|
||||
...e.form || [],
|
||||
...e.F || []
|
||||
].map((o) => {
|
||||
const l = o.split("="), E = l[0] ?? "", h = l[1] ?? "", y = {
|
||||
name: E,
|
||||
enabled: !0
|
||||
};
|
||||
return h.indexOf("@") === 0 ? y.file = h.slice(1) : y.value = h, y;
|
||||
});
|
||||
let g = {}, I = null;
|
||||
const B = A(e, !1, ["G", "get"]);
|
||||
b.length > 0 && B ? c.push(...b) : b.length > 0 && (p == null || p === "application/x-www-form-urlencoded") ? (I = p ?? "application/x-www-form-urlencoded", g = {
|
||||
form: b.map((o) => ({
|
||||
...o,
|
||||
name: decodeURIComponent(o.name || ""),
|
||||
value: decodeURIComponent(o.value || "")
|
||||
}))
|
||||
}, i.push({
|
||||
name: "Content-Type",
|
||||
value: "application/x-www-form-urlencoded",
|
||||
enabled: !0
|
||||
})) : b.length > 0 ? (I = p === "application/json" || p === "text/xml" || p === "text/plain" ? p : "other", g = {
|
||||
text: b.map(({ name: o, value: l }) => o && l ? `${o}=${l}` : o || l).join("&")
|
||||
}) : R.length && (I = p ?? "multipart/form-data", g = {
|
||||
form: R
|
||||
}, p == null && i.push({
|
||||
name: "Content-Type",
|
||||
value: "multipart/form-data",
|
||||
enabled: !0
|
||||
}));
|
||||
let P = A(e, "", ["X", "request"]).toUpperCase();
|
||||
return P === "" && g && (P = "text" in g || "form" in g ? "POST" : "GET"), {
|
||||
id: N("http_request"),
|
||||
model: "http_request",
|
||||
workspaceId: s,
|
||||
name: "",
|
||||
urlParameters: c,
|
||||
url: m,
|
||||
method: P,
|
||||
headers: i,
|
||||
authentication: O,
|
||||
authenticationType: d,
|
||||
body: g,
|
||||
bodyType: I,
|
||||
folderId: null,
|
||||
sortPriority: 0
|
||||
};
|
||||
}
|
||||
const ne = (n) => {
|
||||
let s = [];
|
||||
for (const e of H) {
|
||||
const t = n[e];
|
||||
if (!(!t || t.length === 0))
|
||||
for (const c of t) {
|
||||
if (typeof c != "string")
|
||||
continue;
|
||||
const [m, f] = c.split("=");
|
||||
c.startsWith("@") ? s.push({
|
||||
name: m ?? "",
|
||||
value: "",
|
||||
filePath: c.slice(1),
|
||||
enabled: !0
|
||||
}) : s.push({
|
||||
name: m ?? "",
|
||||
value: e === "data-urlencode" ? encodeURIComponent(f ?? "") : f ?? "",
|
||||
enabled: !0
|
||||
});
|
||||
}
|
||||
}
|
||||
return s;
|
||||
}, A = (n, s, e) => {
|
||||
for (const t of e)
|
||||
if (n[t] && n[t].length)
|
||||
return n[t][0];
|
||||
return s;
|
||||
};
|
||||
function W(n, s) {
|
||||
const e = n.indexOf(s);
|
||||
return e > -1 ? [n.slice(0, e), n.slice(e + 1)] : [n];
|
||||
}
|
||||
const k = {};
|
||||
function N(n) {
|
||||
return k[n] = (k[n] ?? -1) + 1, `GENERATE_ID::${n.toUpperCase()}_${k[n]}`;
|
||||
}
|
||||
export {
|
||||
ie as description,
|
||||
ae as id,
|
||||
te as importCommand,
|
||||
se as name,
|
||||
oe as pluginHookImport
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,68 +1,73 @@
|
||||
const T = "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", w = "https://schema.getpostman.com/json/collection/v2.0.0/collection.json", A = [w, T];
|
||||
function q(e) {
|
||||
const t = b(e);
|
||||
const S = "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", _ = "https://schema.getpostman.com/json/collection/v2.0.0/collection.json", O = [_, S];
|
||||
function v(e) {
|
||||
var g;
|
||||
const t = k(e);
|
||||
if (t == null)
|
||||
return;
|
||||
const n = a(t.info);
|
||||
if (!A.includes(n.schema) || !Array.isArray(t.item))
|
||||
const o = i(t.info);
|
||||
if (!O.includes(o.schema) || !Array.isArray(t.item))
|
||||
return;
|
||||
const i = {
|
||||
const u = A(t.auth), s = {
|
||||
workspaces: [],
|
||||
environments: [],
|
||||
requests: [],
|
||||
httpRequests: [],
|
||||
folders: []
|
||||
}, c = {
|
||||
}, n = {
|
||||
model: "workspace",
|
||||
id: m("wk"),
|
||||
name: n.name || "Postman Import",
|
||||
description: n.description || ""
|
||||
id: h("workspace"),
|
||||
name: o.name || "Postman Import",
|
||||
description: o.description || "",
|
||||
variables: ((g = t.variable) == null ? void 0 : g.map((r) => ({
|
||||
name: r.key,
|
||||
value: r.value
|
||||
}))) ?? []
|
||||
};
|
||||
i.workspaces.push(c);
|
||||
const f = (r, u = null) => {
|
||||
s.workspaces.push(n);
|
||||
const T = (r, p = null) => {
|
||||
if (typeof r.name == "string" && Array.isArray(r.item)) {
|
||||
const o = {
|
||||
const a = {
|
||||
model: "folder",
|
||||
workspaceId: c.id,
|
||||
id: m("fl"),
|
||||
workspaceId: n.id,
|
||||
id: h("folder"),
|
||||
name: r.name,
|
||||
folderId: u
|
||||
folderId: p
|
||||
};
|
||||
i.folders.push(o);
|
||||
for (const s of r.item)
|
||||
f(s, o.id);
|
||||
s.folders.push(a);
|
||||
for (const l of r.item)
|
||||
T(l, a.id);
|
||||
} else if (typeof r.name == "string" && "request" in r) {
|
||||
const o = a(r.request), s = k(o.body), d = S(o.auth), g = {
|
||||
const a = i(r.request), l = j(a.body), w = A(a.auth), d = w.authenticationType == null ? u : w, q = {
|
||||
model: "http_request",
|
||||
id: m("rq"),
|
||||
workspaceId: c.id,
|
||||
folderId: u,
|
||||
id: h("http_request"),
|
||||
workspaceId: n.id,
|
||||
folderId: p,
|
||||
name: r.name,
|
||||
method: o.method || "GET",
|
||||
url: typeof o.url == "string" ? o.url : a(o.url).raw,
|
||||
body: s.body,
|
||||
bodyType: s.bodyType,
|
||||
method: a.method || "GET",
|
||||
url: typeof a.url == "string" ? a.url : i(a.url).raw,
|
||||
body: l.body,
|
||||
bodyType: l.bodyType,
|
||||
authentication: d.authentication,
|
||||
authenticationType: d.authenticationType,
|
||||
headers: [
|
||||
...s.headers,
|
||||
...l.headers,
|
||||
...d.headers,
|
||||
...y(o.header).map((p) => ({
|
||||
name: p.key,
|
||||
value: p.value,
|
||||
enabled: !p.disabled
|
||||
...b(a.header).map((m) => ({
|
||||
name: m.key,
|
||||
value: m.value,
|
||||
enabled: !m.disabled
|
||||
}))
|
||||
]
|
||||
};
|
||||
i.requests.push(g);
|
||||
s.httpRequests.push(q);
|
||||
} else
|
||||
console.log("Unknown item", r, u);
|
||||
console.log("Unknown item", r, p);
|
||||
};
|
||||
for (const r of t.item)
|
||||
f(r);
|
||||
return { resources: h(i) };
|
||||
T(r);
|
||||
return { resources: f(s) };
|
||||
}
|
||||
function S(e) {
|
||||
const t = a(e);
|
||||
function A(e) {
|
||||
const t = i(e);
|
||||
return "basic" in t ? {
|
||||
headers: [],
|
||||
authenticationType: "basic",
|
||||
@@ -70,10 +75,17 @@ function S(e) {
|
||||
username: t.basic.username || "",
|
||||
password: t.basic.password || ""
|
||||
}
|
||||
} : "bearer" in t ? {
|
||||
headers: [],
|
||||
authenticationType: "bearer",
|
||||
authentication: {
|
||||
token: t.bearer.token || ""
|
||||
}
|
||||
} : { headers: [], authenticationType: null, authentication: {} };
|
||||
}
|
||||
function k(e) {
|
||||
const t = a(e);
|
||||
function j(e) {
|
||||
var o, c, u, s;
|
||||
const t = i(e);
|
||||
return "graphql" in t ? {
|
||||
headers: [
|
||||
{
|
||||
@@ -85,7 +97,7 @@ function k(e) {
|
||||
bodyType: "graphql",
|
||||
body: {
|
||||
text: JSON.stringify(
|
||||
{ query: t.graphql.query, variables: b(t.graphql.variables) },
|
||||
{ query: t.graphql.query, variables: k(t.graphql.variables) },
|
||||
null,
|
||||
2
|
||||
)
|
||||
@@ -100,7 +112,7 @@ function k(e) {
|
||||
],
|
||||
bodyType: "application/x-www-form-urlencoded",
|
||||
body: {
|
||||
form: y(t.urlencoded).map((n) => ({
|
||||
form: b(t.urlencoded).map((n) => ({
|
||||
enabled: !n.disabled,
|
||||
name: n.key ?? "",
|
||||
value: n.value ?? ""
|
||||
@@ -116,9 +128,10 @@ function k(e) {
|
||||
],
|
||||
bodyType: "multipart/form-data",
|
||||
body: {
|
||||
form: y(t.formdata).map(
|
||||
form: b(t.formdata).map(
|
||||
(n) => n.src != null ? {
|
||||
enabled: !n.disabled,
|
||||
contentType: n.contentType ?? null,
|
||||
name: n.key ?? "",
|
||||
file: n.src ?? ""
|
||||
} : {
|
||||
@@ -128,33 +141,42 @@ function k(e) {
|
||||
}
|
||||
)
|
||||
}
|
||||
} : "raw" in t ? {
|
||||
headers: [
|
||||
{
|
||||
name: "Content-Type",
|
||||
value: ((c = (o = t.options) == null ? void 0 : o.raw) == null ? void 0 : c.language) === "json" ? "application/json" : "",
|
||||
enabled: !0
|
||||
}
|
||||
],
|
||||
bodyType: ((s = (u = t.options) == null ? void 0 : u.raw) == null ? void 0 : s.language) === "json" ? "application/json" : "other",
|
||||
body: {
|
||||
text: t.raw ?? ""
|
||||
}
|
||||
} : { headers: [], bodyType: null, body: {} };
|
||||
}
|
||||
function b(e) {
|
||||
function k(e) {
|
||||
try {
|
||||
return a(JSON.parse(e));
|
||||
return i(JSON.parse(e));
|
||||
} catch {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function a(e) {
|
||||
function i(e) {
|
||||
return Object.prototype.toString.call(e) === "[object Object]" ? e : {};
|
||||
}
|
||||
function y(e) {
|
||||
function b(e) {
|
||||
return Object.prototype.toString.call(e) === "[object Array]" ? e : [];
|
||||
}
|
||||
function h(e) {
|
||||
return typeof e == "string" ? e.replace(/{{\s*(_\.)?([^}]+)\s*}}/g, "${[$2]}") : Array.isArray(e) && e != null ? e.map(h) : typeof e == "object" && e != null ? Object.fromEntries(
|
||||
Object.entries(e).map(([t, n]) => [t, h(n)])
|
||||
function f(e) {
|
||||
return typeof e == "string" ? e.replace(/{{\s*(_\.)?([^}]+)\s*}}/g, "${[$2]}") : Array.isArray(e) && e != null ? e.map(f) : typeof e == "object" && e != null ? Object.fromEntries(
|
||||
Object.entries(e).map(([t, o]) => [t, f(o)])
|
||||
) : e;
|
||||
}
|
||||
function m(e) {
|
||||
const t = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
let n = `${e}_`;
|
||||
for (let l = 0; l < 10; l++)
|
||||
n += t[Math.floor(Math.random() * t.length)];
|
||||
return n;
|
||||
const y = {};
|
||||
function h(e) {
|
||||
return y[e] = (y[e] ?? -1) + 1, `GENERATE_ID::${e.toUpperCase()}_${y[e]}`;
|
||||
}
|
||||
export {
|
||||
q as pluginHookImport
|
||||
v as pluginHookImport
|
||||
};
|
||||
|
||||
@@ -5,8 +5,8 @@ function u(r) {
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
if (t(e) && "yaakSchema" in e && (e.yaakSchema === 1 && (e.resources.httpRequests = e.resources.requests, e.yaakSchema = 2), e.yaakSchema === 2))
|
||||
return { resources: e.resources };
|
||||
if (!(!t(e) || !("yaakSchema" in e)))
|
||||
return "requests" in e.resources && (e.resources.httpRequests = e.resources.requests, delete e.resources.requests), { resources: e.resources };
|
||||
}
|
||||
function t(r) {
|
||||
return Object.prototype.toString.call(r) === "[object Object]";
|
||||
|
||||
162
src-tauri/protoc-vendored/include/google/protobuf/any.proto
Normal file
162
src-tauri/protoc-vendored/include/google/protobuf/any.proto
Normal file
@@ -0,0 +1,162 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option go_package = "google.golang.org/protobuf/types/known/anypb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "AnyProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// `Any` contains an arbitrary serialized protocol buffer message along with a
|
||||
// URL that describes the type of the serialized message.
|
||||
//
|
||||
// Protobuf library provides support to pack/unpack Any values in the form
|
||||
// of utility functions or additional generated methods of the Any type.
|
||||
//
|
||||
// Example 1: Pack and unpack a message in C++.
|
||||
//
|
||||
// Foo foo = ...;
|
||||
// Any any;
|
||||
// any.PackFrom(foo);
|
||||
// ...
|
||||
// if (any.UnpackTo(&foo)) {
|
||||
// ...
|
||||
// }
|
||||
//
|
||||
// Example 2: Pack and unpack a message in Java.
|
||||
//
|
||||
// Foo foo = ...;
|
||||
// Any any = Any.pack(foo);
|
||||
// ...
|
||||
// if (any.is(Foo.class)) {
|
||||
// foo = any.unpack(Foo.class);
|
||||
// }
|
||||
// // or ...
|
||||
// if (any.isSameTypeAs(Foo.getDefaultInstance())) {
|
||||
// foo = any.unpack(Foo.getDefaultInstance());
|
||||
// }
|
||||
//
|
||||
// Example 3: Pack and unpack a message in Python.
|
||||
//
|
||||
// foo = Foo(...)
|
||||
// any = Any()
|
||||
// any.Pack(foo)
|
||||
// ...
|
||||
// if any.Is(Foo.DESCRIPTOR):
|
||||
// any.Unpack(foo)
|
||||
// ...
|
||||
//
|
||||
// Example 4: Pack and unpack a message in Go
|
||||
//
|
||||
// foo := &pb.Foo{...}
|
||||
// any, err := anypb.New(foo)
|
||||
// if err != nil {
|
||||
// ...
|
||||
// }
|
||||
// ...
|
||||
// foo := &pb.Foo{}
|
||||
// if err := any.UnmarshalTo(foo); err != nil {
|
||||
// ...
|
||||
// }
|
||||
//
|
||||
// The pack methods provided by protobuf library will by default use
|
||||
// 'type.googleapis.com/full.type.name' as the type URL and the unpack
|
||||
// methods only use the fully qualified type name after the last '/'
|
||||
// in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||
// name "y.z".
|
||||
//
|
||||
// JSON
|
||||
// ====
|
||||
// The JSON representation of an `Any` value uses the regular
|
||||
// representation of the deserialized, embedded message, with an
|
||||
// additional field `@type` which contains the type URL. Example:
|
||||
//
|
||||
// package google.profile;
|
||||
// message Person {
|
||||
// string first_name = 1;
|
||||
// string last_name = 2;
|
||||
// }
|
||||
//
|
||||
// {
|
||||
// "@type": "type.googleapis.com/google.profile.Person",
|
||||
// "firstName": <string>,
|
||||
// "lastName": <string>
|
||||
// }
|
||||
//
|
||||
// If the embedded message type is well-known and has a custom JSON
|
||||
// representation, that representation will be embedded adding a field
|
||||
// `value` which holds the custom JSON in addition to the `@type`
|
||||
// field. Example (for message [google.protobuf.Duration][]):
|
||||
//
|
||||
// {
|
||||
// "@type": "type.googleapis.com/google.protobuf.Duration",
|
||||
// "value": "1.212s"
|
||||
// }
|
||||
//
|
||||
message Any {
|
||||
// A URL/resource name that uniquely identifies the type of the serialized
|
||||
// protocol buffer message. This string must contain at least
|
||||
// one "/" character. The last segment of the URL's path must represent
|
||||
// the fully qualified name of the type (as in
|
||||
// `path/google.protobuf.Duration`). The name should be in a canonical form
|
||||
// (e.g., leading "." is not accepted).
|
||||
//
|
||||
// In practice, teams usually precompile into the binary all types that they
|
||||
// expect it to use in the context of Any. However, for URLs which use the
|
||||
// scheme `http`, `https`, or no scheme, one can optionally set up a type
|
||||
// server that maps type URLs to message definitions as follows:
|
||||
//
|
||||
// * If no scheme is provided, `https` is assumed.
|
||||
// * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
// value in binary format, or produce an error.
|
||||
// * Applications are allowed to cache lookup results based on the
|
||||
// URL, or have them precompiled into a binary to avoid any
|
||||
// lookup. Therefore, binary compatibility needs to be preserved
|
||||
// on changes to types. (Use versioned type names to manage
|
||||
// breaking changes.)
|
||||
//
|
||||
// Note: this functionality is not currently available in the official
|
||||
// protobuf release, and it is not used for type URLs beginning with
|
||||
// type.googleapis.com. As of May 2023, there are no widely used type server
|
||||
// implementations and no plans to implement one.
|
||||
//
|
||||
// Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
// used with implementation specific semantics.
|
||||
//
|
||||
string type_url = 1;
|
||||
|
||||
// Must be a valid serialized protocol buffer of the above specified type.
|
||||
bytes value = 2;
|
||||
}
|
||||
207
src-tauri/protoc-vendored/include/google/protobuf/api.proto
Normal file
207
src-tauri/protoc-vendored/include/google/protobuf/api.proto
Normal file
@@ -0,0 +1,207 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
import "google/protobuf/source_context.proto";
|
||||
import "google/protobuf/type.proto";
|
||||
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "ApiProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/apipb";
|
||||
|
||||
// Api is a light-weight descriptor for an API Interface.
|
||||
//
|
||||
// Interfaces are also described as "protocol buffer services" in some contexts,
|
||||
// such as by the "service" keyword in a .proto file, but they are different
|
||||
// from API Services, which represent a concrete implementation of an interface
|
||||
// as opposed to simply a description of methods and bindings. They are also
|
||||
// sometimes simply referred to as "APIs" in other contexts, such as the name of
|
||||
// this message itself. See https://cloud.google.com/apis/design/glossary for
|
||||
// detailed terminology.
|
||||
message Api {
|
||||
// The fully qualified name of this interface, including package name
|
||||
// followed by the interface's simple name.
|
||||
string name = 1;
|
||||
|
||||
// The methods of this interface, in unspecified order.
|
||||
repeated Method methods = 2;
|
||||
|
||||
// Any metadata attached to the interface.
|
||||
repeated Option options = 3;
|
||||
|
||||
// A version string for this interface. If specified, must have the form
|
||||
// `major-version.minor-version`, as in `1.10`. If the minor version is
|
||||
// omitted, it defaults to zero. If the entire version field is empty, the
|
||||
// major version is derived from the package name, as outlined below. If the
|
||||
// field is not empty, the version in the package name will be verified to be
|
||||
// consistent with what is provided here.
|
||||
//
|
||||
// The versioning schema uses [semantic
|
||||
// versioning](http://semver.org) where the major version number
|
||||
// indicates a breaking change and the minor version an additive,
|
||||
// non-breaking change. Both version numbers are signals to users
|
||||
// what to expect from different versions, and should be carefully
|
||||
// chosen based on the product plan.
|
||||
//
|
||||
// The major version is also reflected in the package name of the
|
||||
// interface, which must end in `v<major-version>`, as in
|
||||
// `google.feature.v1`. For major versions 0 and 1, the suffix can
|
||||
// be omitted. Zero major versions must only be used for
|
||||
// experimental, non-GA interfaces.
|
||||
//
|
||||
string version = 4;
|
||||
|
||||
// Source context for the protocol buffer service represented by this
|
||||
// message.
|
||||
SourceContext source_context = 5;
|
||||
|
||||
// Included interfaces. See [Mixin][].
|
||||
repeated Mixin mixins = 6;
|
||||
|
||||
// The source syntax of the service.
|
||||
Syntax syntax = 7;
|
||||
}
|
||||
|
||||
// Method represents a method of an API interface.
|
||||
message Method {
|
||||
// The simple name of this method.
|
||||
string name = 1;
|
||||
|
||||
// A URL of the input message type.
|
||||
string request_type_url = 2;
|
||||
|
||||
// If true, the request is streamed.
|
||||
bool request_streaming = 3;
|
||||
|
||||
// The URL of the output message type.
|
||||
string response_type_url = 4;
|
||||
|
||||
// If true, the response is streamed.
|
||||
bool response_streaming = 5;
|
||||
|
||||
// Any metadata attached to the method.
|
||||
repeated Option options = 6;
|
||||
|
||||
// The source syntax of this method.
|
||||
Syntax syntax = 7;
|
||||
}
|
||||
|
||||
// Declares an API Interface to be included in this interface. The including
|
||||
// interface must redeclare all the methods from the included interface, but
|
||||
// documentation and options are inherited as follows:
|
||||
//
|
||||
// - If after comment and whitespace stripping, the documentation
|
||||
// string of the redeclared method is empty, it will be inherited
|
||||
// from the original method.
|
||||
//
|
||||
// - Each annotation belonging to the service config (http,
|
||||
// visibility) which is not set in the redeclared method will be
|
||||
// inherited.
|
||||
//
|
||||
// - If an http annotation is inherited, the path pattern will be
|
||||
// modified as follows. Any version prefix will be replaced by the
|
||||
// version of the including interface plus the [root][] path if
|
||||
// specified.
|
||||
//
|
||||
// Example of a simple mixin:
|
||||
//
|
||||
// package google.acl.v1;
|
||||
// service AccessControl {
|
||||
// // Get the underlying ACL object.
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl) {
|
||||
// option (google.api.http).get = "/v1/{resource=**}:getAcl";
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// package google.storage.v2;
|
||||
// service Storage {
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl);
|
||||
//
|
||||
// // Get a data record.
|
||||
// rpc GetData(GetDataRequest) returns (Data) {
|
||||
// option (google.api.http).get = "/v2/{resource=**}";
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// Example of a mixin configuration:
|
||||
//
|
||||
// apis:
|
||||
// - name: google.storage.v2.Storage
|
||||
// mixins:
|
||||
// - name: google.acl.v1.AccessControl
|
||||
//
|
||||
// The mixin construct implies that all methods in `AccessControl` are
|
||||
// also declared with same name and request/response types in
|
||||
// `Storage`. A documentation generator or annotation processor will
|
||||
// see the effective `Storage.GetAcl` method after inherting
|
||||
// documentation and annotations as follows:
|
||||
//
|
||||
// service Storage {
|
||||
// // Get the underlying ACL object.
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl) {
|
||||
// option (google.api.http).get = "/v2/{resource=**}:getAcl";
|
||||
// }
|
||||
// ...
|
||||
// }
|
||||
//
|
||||
// Note how the version in the path pattern changed from `v1` to `v2`.
|
||||
//
|
||||
// If the `root` field in the mixin is specified, it should be a
|
||||
// relative path under which inherited HTTP paths are placed. Example:
|
||||
//
|
||||
// apis:
|
||||
// - name: google.storage.v2.Storage
|
||||
// mixins:
|
||||
// - name: google.acl.v1.AccessControl
|
||||
// root: acls
|
||||
//
|
||||
// This implies the following inherited HTTP annotation:
|
||||
//
|
||||
// service Storage {
|
||||
// // Get the underlying ACL object.
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl) {
|
||||
// option (google.api.http).get = "/v2/acls/{resource=**}:getAcl";
|
||||
// }
|
||||
// ...
|
||||
// }
|
||||
message Mixin {
|
||||
// The fully qualified name of the interface which is included.
|
||||
string name = 1;
|
||||
|
||||
// If non-empty specifies a path under which inherited HTTP paths
|
||||
// are rooted.
|
||||
string root = 2;
|
||||
}
|
||||
@@ -0,0 +1,168 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file or at
|
||||
// https://developers.google.com/open-source/licenses/bsd
|
||||
|
||||
// Author: kenton@google.com (Kenton Varda)
|
||||
//
|
||||
// protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is
|
||||
// just a program that reads a CodeGeneratorRequest from stdin and writes a
|
||||
// CodeGeneratorResponse to stdout.
|
||||
//
|
||||
// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead
|
||||
// of dealing with the raw protocol defined here.
|
||||
//
|
||||
// A plugin executable needs only to be placed somewhere in the path. The
|
||||
// plugin should be named "protoc-gen-$NAME", and will then be used when the
|
||||
// flag "--${NAME}_out" is passed to protoc.
|
||||
|
||||
syntax = "proto2";
|
||||
|
||||
package google.protobuf.compiler;
|
||||
option java_package = "com.google.protobuf.compiler";
|
||||
option java_outer_classname = "PluginProtos";
|
||||
|
||||
option csharp_namespace = "Google.Protobuf.Compiler";
|
||||
option go_package = "google.golang.org/protobuf/types/pluginpb";
|
||||
|
||||
import "google/protobuf/descriptor.proto";
|
||||
|
||||
// The version number of protocol compiler.
|
||||
message Version {
|
||||
optional int32 major = 1;
|
||||
optional int32 minor = 2;
|
||||
optional int32 patch = 3;
|
||||
// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
|
||||
// be empty for mainline stable releases.
|
||||
optional string suffix = 4;
|
||||
}
|
||||
|
||||
// An encoded CodeGeneratorRequest is written to the plugin's stdin.
|
||||
message CodeGeneratorRequest {
|
||||
// The .proto files that were explicitly listed on the command-line. The
|
||||
// code generator should generate code only for these files. Each file's
|
||||
// descriptor will be included in proto_file, below.
|
||||
repeated string file_to_generate = 1;
|
||||
|
||||
// The generator parameter passed on the command-line.
|
||||
optional string parameter = 2;
|
||||
|
||||
// FileDescriptorProtos for all files in files_to_generate and everything
|
||||
// they import. The files will appear in topological order, so each file
|
||||
// appears before any file that imports it.
|
||||
//
|
||||
// Note: the files listed in files_to_generate will include runtime-retention
|
||||
// options only, but all other files will include source-retention options.
|
||||
// The source_file_descriptors field below is available in case you need
|
||||
// source-retention options for files_to_generate.
|
||||
//
|
||||
// protoc guarantees that all proto_files will be written after
|
||||
// the fields above, even though this is not technically guaranteed by the
|
||||
// protobuf wire format. This theoretically could allow a plugin to stream
|
||||
// in the FileDescriptorProtos and handle them one by one rather than read
|
||||
// the entire set into memory at once. However, as of this writing, this
|
||||
// is not similarly optimized on protoc's end -- it will store all fields in
|
||||
// memory at once before sending them to the plugin.
|
||||
//
|
||||
// Type names of fields and extensions in the FileDescriptorProto are always
|
||||
// fully qualified.
|
||||
repeated FileDescriptorProto proto_file = 15;
|
||||
|
||||
// File descriptors with all options, including source-retention options.
|
||||
// These descriptors are only provided for the files listed in
|
||||
// files_to_generate.
|
||||
repeated FileDescriptorProto source_file_descriptors = 17;
|
||||
|
||||
// The version number of protocol compiler.
|
||||
optional Version compiler_version = 3;
|
||||
}
|
||||
|
||||
// The plugin writes an encoded CodeGeneratorResponse to stdout.
|
||||
message CodeGeneratorResponse {
|
||||
// Error message. If non-empty, code generation failed. The plugin process
|
||||
// should exit with status code zero even if it reports an error in this way.
|
||||
//
|
||||
// This should be used to indicate errors in .proto files which prevent the
|
||||
// code generator from generating correct code. Errors which indicate a
|
||||
// problem in protoc itself -- such as the input CodeGeneratorRequest being
|
||||
// unparseable -- should be reported by writing a message to stderr and
|
||||
// exiting with a non-zero status code.
|
||||
optional string error = 1;
|
||||
|
||||
// A bitmask of supported features that the code generator supports.
|
||||
// This is a bitwise "or" of values from the Feature enum.
|
||||
optional uint64 supported_features = 2;
|
||||
|
||||
// Sync with code_generator.h.
|
||||
enum Feature {
|
||||
FEATURE_NONE = 0;
|
||||
FEATURE_PROTO3_OPTIONAL = 1;
|
||||
FEATURE_SUPPORTS_EDITIONS = 2;
|
||||
}
|
||||
|
||||
// Represents a single generated file.
|
||||
message File {
|
||||
// The file name, relative to the output directory. The name must not
|
||||
// contain "." or ".." components and must be relative, not be absolute (so,
|
||||
// the file cannot lie outside the output directory). "/" must be used as
|
||||
// the path separator, not "\".
|
||||
//
|
||||
// If the name is omitted, the content will be appended to the previous
|
||||
// file. This allows the generator to break large files into small chunks,
|
||||
// and allows the generated text to be streamed back to protoc so that large
|
||||
// files need not reside completely in memory at one time. Note that as of
|
||||
// this writing protoc does not optimize for this -- it will read the entire
|
||||
// CodeGeneratorResponse before writing files to disk.
|
||||
optional string name = 1;
|
||||
|
||||
// If non-empty, indicates that the named file should already exist, and the
|
||||
// content here is to be inserted into that file at a defined insertion
|
||||
// point. This feature allows a code generator to extend the output
|
||||
// produced by another code generator. The original generator may provide
|
||||
// insertion points by placing special annotations in the file that look
|
||||
// like:
|
||||
// @@protoc_insertion_point(NAME)
|
||||
// The annotation can have arbitrary text before and after it on the line,
|
||||
// which allows it to be placed in a comment. NAME should be replaced with
|
||||
// an identifier naming the point -- this is what other generators will use
|
||||
// as the insertion_point. Code inserted at this point will be placed
|
||||
// immediately above the line containing the insertion point (thus multiple
|
||||
// insertions to the same point will come out in the order they were added).
|
||||
// The double-@ is intended to make it unlikely that the generated code
|
||||
// could contain things that look like insertion points by accident.
|
||||
//
|
||||
// For example, the C++ code generator places the following line in the
|
||||
// .pb.h files that it generates:
|
||||
// // @@protoc_insertion_point(namespace_scope)
|
||||
// This line appears within the scope of the file's package namespace, but
|
||||
// outside of any particular class. Another plugin can then specify the
|
||||
// insertion_point "namespace_scope" to generate additional classes or
|
||||
// other declarations that should be placed in this scope.
|
||||
//
|
||||
// Note that if the line containing the insertion point begins with
|
||||
// whitespace, the same whitespace will be added to every line of the
|
||||
// inserted text. This is useful for languages like Python, where
|
||||
// indentation matters. In these languages, the insertion point comment
|
||||
// should be indented the same amount as any inserted code will need to be
|
||||
// in order to work correctly in that context.
|
||||
//
|
||||
// The code generator that generates the initial file and the one which
|
||||
// inserts into it must both run as part of a single invocation of protoc.
|
||||
// Code generators are executed in the order in which they appear on the
|
||||
// command line.
|
||||
//
|
||||
// If |insertion_point| is present, |name| must also be present.
|
||||
optional string insertion_point = 2;
|
||||
|
||||
// The file contents.
|
||||
optional string content = 15;
|
||||
|
||||
// Information describing the file content being inserted. If an insertion
|
||||
// point is used, this information will be appropriately offset and inserted
|
||||
// into the code generation metadata for the generated files.
|
||||
optional GeneratedCodeInfo generated_code_info = 16;
|
||||
}
|
||||
repeated File file = 15;
|
||||
}
|
||||
1218
src-tauri/protoc-vendored/include/google/protobuf/descriptor.proto
Normal file
1218
src-tauri/protoc-vendored/include/google/protobuf/descriptor.proto
Normal file
File diff suppressed because it is too large
Load Diff
115
src-tauri/protoc-vendored/include/google/protobuf/duration.proto
Normal file
115
src-tauri/protoc-vendored/include/google/protobuf/duration.proto
Normal file
@@ -0,0 +1,115 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/durationpb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "DurationProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// A Duration represents a signed, fixed-length span of time represented
|
||||
// as a count of seconds and fractions of seconds at nanosecond
|
||||
// resolution. It is independent of any calendar and concepts like "day"
|
||||
// or "month". It is related to Timestamp in that the difference between
|
||||
// two Timestamp values is a Duration and it can be added or subtracted
|
||||
// from a Timestamp. Range is approximately +-10,000 years.
|
||||
//
|
||||
// # Examples
|
||||
//
|
||||
// Example 1: Compute Duration from two Timestamps in pseudo code.
|
||||
//
|
||||
// Timestamp start = ...;
|
||||
// Timestamp end = ...;
|
||||
// Duration duration = ...;
|
||||
//
|
||||
// duration.seconds = end.seconds - start.seconds;
|
||||
// duration.nanos = end.nanos - start.nanos;
|
||||
//
|
||||
// if (duration.seconds < 0 && duration.nanos > 0) {
|
||||
// duration.seconds += 1;
|
||||
// duration.nanos -= 1000000000;
|
||||
// } else if (duration.seconds > 0 && duration.nanos < 0) {
|
||||
// duration.seconds -= 1;
|
||||
// duration.nanos += 1000000000;
|
||||
// }
|
||||
//
|
||||
// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
|
||||
//
|
||||
// Timestamp start = ...;
|
||||
// Duration duration = ...;
|
||||
// Timestamp end = ...;
|
||||
//
|
||||
// end.seconds = start.seconds + duration.seconds;
|
||||
// end.nanos = start.nanos + duration.nanos;
|
||||
//
|
||||
// if (end.nanos < 0) {
|
||||
// end.seconds -= 1;
|
||||
// end.nanos += 1000000000;
|
||||
// } else if (end.nanos >= 1000000000) {
|
||||
// end.seconds += 1;
|
||||
// end.nanos -= 1000000000;
|
||||
// }
|
||||
//
|
||||
// Example 3: Compute Duration from datetime.timedelta in Python.
|
||||
//
|
||||
// td = datetime.timedelta(days=3, minutes=10)
|
||||
// duration = Duration()
|
||||
// duration.FromTimedelta(td)
|
||||
//
|
||||
// # JSON Mapping
|
||||
//
|
||||
// In JSON format, the Duration type is encoded as a string rather than an
|
||||
// object, where the string ends in the suffix "s" (indicating seconds) and
|
||||
// is preceded by the number of seconds, with nanoseconds expressed as
|
||||
// fractional seconds. For example, 3 seconds with 0 nanoseconds should be
|
||||
// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
|
||||
// be expressed in JSON format as "3.000000001s", and 3 seconds and 1
|
||||
// microsecond should be expressed in JSON format as "3.000001s".
|
||||
//
|
||||
message Duration {
|
||||
// Signed seconds of the span of time. Must be from -315,576,000,000
|
||||
// to +315,576,000,000 inclusive. Note: these bounds are computed from:
|
||||
// 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
|
||||
int64 seconds = 1;
|
||||
|
||||
// Signed fractions of a second at nanosecond resolution of the span
|
||||
// of time. Durations less than one second are represented with a 0
|
||||
// `seconds` field and a positive or negative `nanos` field. For durations
|
||||
// of one second or more, a non-zero value for the `nanos` field must be
|
||||
// of the same sign as the `seconds` field. Must be from -999,999,999
|
||||
// to +999,999,999 inclusive.
|
||||
int32 nanos = 2;
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option go_package = "google.golang.org/protobuf/types/known/emptypb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "EmptyProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option cc_enable_arenas = true;
|
||||
|
||||
// A generic empty message that you can re-use to avoid defining duplicated
|
||||
// empty messages in your APIs. A typical example is to use it as the request
|
||||
// or the response type of an API method. For instance:
|
||||
//
|
||||
// service Foo {
|
||||
// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
|
||||
// }
|
||||
//
|
||||
message Empty {}
|
||||
@@ -0,0 +1,245 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "FieldMaskProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/fieldmaskpb";
|
||||
option cc_enable_arenas = true;
|
||||
|
||||
// `FieldMask` represents a set of symbolic field paths, for example:
|
||||
//
|
||||
// paths: "f.a"
|
||||
// paths: "f.b.d"
|
||||
//
|
||||
// Here `f` represents a field in some root message, `a` and `b`
|
||||
// fields in the message found in `f`, and `d` a field found in the
|
||||
// message in `f.b`.
|
||||
//
|
||||
// Field masks are used to specify a subset of fields that should be
|
||||
// returned by a get operation or modified by an update operation.
|
||||
// Field masks also have a custom JSON encoding (see below).
|
||||
//
|
||||
// # Field Masks in Projections
|
||||
//
|
||||
// When used in the context of a projection, a response message or
|
||||
// sub-message is filtered by the API to only contain those fields as
|
||||
// specified in the mask. For example, if the mask in the previous
|
||||
// example is applied to a response message as follows:
|
||||
//
|
||||
// f {
|
||||
// a : 22
|
||||
// b {
|
||||
// d : 1
|
||||
// x : 2
|
||||
// }
|
||||
// y : 13
|
||||
// }
|
||||
// z: 8
|
||||
//
|
||||
// The result will not contain specific values for fields x,y and z
|
||||
// (their value will be set to the default, and omitted in proto text
|
||||
// output):
|
||||
//
|
||||
//
|
||||
// f {
|
||||
// a : 22
|
||||
// b {
|
||||
// d : 1
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// A repeated field is not allowed except at the last position of a
|
||||
// paths string.
|
||||
//
|
||||
// If a FieldMask object is not present in a get operation, the
|
||||
// operation applies to all fields (as if a FieldMask of all fields
|
||||
// had been specified).
|
||||
//
|
||||
// Note that a field mask does not necessarily apply to the
|
||||
// top-level response message. In case of a REST get operation, the
|
||||
// field mask applies directly to the response, but in case of a REST
|
||||
// list operation, the mask instead applies to each individual message
|
||||
// in the returned resource list. In case of a REST custom method,
|
||||
// other definitions may be used. Where the mask applies will be
|
||||
// clearly documented together with its declaration in the API. In
|
||||
// any case, the effect on the returned resource/resources is required
|
||||
// behavior for APIs.
|
||||
//
|
||||
// # Field Masks in Update Operations
|
||||
//
|
||||
// A field mask in update operations specifies which fields of the
|
||||
// targeted resource are going to be updated. The API is required
|
||||
// to only change the values of the fields as specified in the mask
|
||||
// and leave the others untouched. If a resource is passed in to
|
||||
// describe the updated values, the API ignores the values of all
|
||||
// fields not covered by the mask.
|
||||
//
|
||||
// If a repeated field is specified for an update operation, new values will
|
||||
// be appended to the existing repeated field in the target resource. Note that
|
||||
// a repeated field is only allowed in the last position of a `paths` string.
|
||||
//
|
||||
// If a sub-message is specified in the last position of the field mask for an
|
||||
// update operation, then new value will be merged into the existing sub-message
|
||||
// in the target resource.
|
||||
//
|
||||
// For example, given the target message:
|
||||
//
|
||||
// f {
|
||||
// b {
|
||||
// d: 1
|
||||
// x: 2
|
||||
// }
|
||||
// c: [1]
|
||||
// }
|
||||
//
|
||||
// And an update message:
|
||||
//
|
||||
// f {
|
||||
// b {
|
||||
// d: 10
|
||||
// }
|
||||
// c: [2]
|
||||
// }
|
||||
//
|
||||
// then if the field mask is:
|
||||
//
|
||||
// paths: ["f.b", "f.c"]
|
||||
//
|
||||
// then the result will be:
|
||||
//
|
||||
// f {
|
||||
// b {
|
||||
// d: 10
|
||||
// x: 2
|
||||
// }
|
||||
// c: [1, 2]
|
||||
// }
|
||||
//
|
||||
// An implementation may provide options to override this default behavior for
|
||||
// repeated and message fields.
|
||||
//
|
||||
// In order to reset a field's value to the default, the field must
|
||||
// be in the mask and set to the default value in the provided resource.
|
||||
// Hence, in order to reset all fields of a resource, provide a default
|
||||
// instance of the resource and set all fields in the mask, or do
|
||||
// not provide a mask as described below.
|
||||
//
|
||||
// If a field mask is not present on update, the operation applies to
|
||||
// all fields (as if a field mask of all fields has been specified).
|
||||
// Note that in the presence of schema evolution, this may mean that
|
||||
// fields the client does not know and has therefore not filled into
|
||||
// the request will be reset to their default. If this is unwanted
|
||||
// behavior, a specific service may require a client to always specify
|
||||
// a field mask, producing an error if not.
|
||||
//
|
||||
// As with get operations, the location of the resource which
|
||||
// describes the updated values in the request message depends on the
|
||||
// operation kind. In any case, the effect of the field mask is
|
||||
// required to be honored by the API.
|
||||
//
|
||||
// ## Considerations for HTTP REST
|
||||
//
|
||||
// The HTTP kind of an update operation which uses a field mask must
|
||||
// be set to PATCH instead of PUT in order to satisfy HTTP semantics
|
||||
// (PUT must only be used for full updates).
|
||||
//
|
||||
// # JSON Encoding of Field Masks
|
||||
//
|
||||
// In JSON, a field mask is encoded as a single string where paths are
|
||||
// separated by a comma. Fields name in each path are converted
|
||||
// to/from lower-camel naming conventions.
|
||||
//
|
||||
// As an example, consider the following message declarations:
|
||||
//
|
||||
// message Profile {
|
||||
// User user = 1;
|
||||
// Photo photo = 2;
|
||||
// }
|
||||
// message User {
|
||||
// string display_name = 1;
|
||||
// string address = 2;
|
||||
// }
|
||||
//
|
||||
// In proto a field mask for `Profile` may look as such:
|
||||
//
|
||||
// mask {
|
||||
// paths: "user.display_name"
|
||||
// paths: "photo"
|
||||
// }
|
||||
//
|
||||
// In JSON, the same mask is represented as below:
|
||||
//
|
||||
// {
|
||||
// mask: "user.displayName,photo"
|
||||
// }
|
||||
//
|
||||
// # Field Masks and Oneof Fields
|
||||
//
|
||||
// Field masks treat fields in oneofs just as regular fields. Consider the
|
||||
// following message:
|
||||
//
|
||||
// message SampleMessage {
|
||||
// oneof test_oneof {
|
||||
// string name = 4;
|
||||
// SubMessage sub_message = 9;
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// The field mask can be:
|
||||
//
|
||||
// mask {
|
||||
// paths: "name"
|
||||
// }
|
||||
//
|
||||
// Or:
|
||||
//
|
||||
// mask {
|
||||
// paths: "sub_message"
|
||||
// }
|
||||
//
|
||||
// Note that oneof type names ("test_oneof" in this case) cannot be used in
|
||||
// paths.
|
||||
//
|
||||
// ## Field Mask Verification
|
||||
//
|
||||
// The implementation of any API method which has a FieldMask type field in the
|
||||
// request should verify the included field paths, and return an
|
||||
// `INVALID_ARGUMENT` error if any path is unmappable.
|
||||
message FieldMask {
|
||||
// The set of field mask paths.
|
||||
repeated string paths = 1;
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "SourceContextProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/sourcecontextpb";
|
||||
|
||||
// `SourceContext` represents information about the source of a
|
||||
// protobuf element, like the file in which it is defined.
|
||||
message SourceContext {
|
||||
// The path-qualified name of the .proto file that contained the associated
|
||||
// protobuf element. For example: `"google/protobuf/source_context.proto"`.
|
||||
string file_name = 1;
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/structpb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "StructProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// `Struct` represents a structured data value, consisting of fields
|
||||
// which map to dynamically typed values. In some languages, `Struct`
|
||||
// might be supported by a native representation. For example, in
|
||||
// scripting languages like JS a struct is represented as an
|
||||
// object. The details of that representation are described together
|
||||
// with the proto support for the language.
|
||||
//
|
||||
// The JSON representation for `Struct` is JSON object.
|
||||
message Struct {
|
||||
// Unordered map of dynamically typed values.
|
||||
map<string, Value> fields = 1;
|
||||
}
|
||||
|
||||
// `Value` represents a dynamically typed value which can be either
|
||||
// null, a number, a string, a boolean, a recursive struct value, or a
|
||||
// list of values. A producer of value is expected to set one of these
|
||||
// variants. Absence of any variant indicates an error.
|
||||
//
|
||||
// The JSON representation for `Value` is JSON value.
|
||||
message Value {
|
||||
// The kind of value.
|
||||
oneof kind {
|
||||
// Represents a null value.
|
||||
NullValue null_value = 1;
|
||||
// Represents a double value.
|
||||
double number_value = 2;
|
||||
// Represents a string value.
|
||||
string string_value = 3;
|
||||
// Represents a boolean value.
|
||||
bool bool_value = 4;
|
||||
// Represents a structured value.
|
||||
Struct struct_value = 5;
|
||||
// Represents a repeated `Value`.
|
||||
ListValue list_value = 6;
|
||||
}
|
||||
}
|
||||
|
||||
// `NullValue` is a singleton enumeration to represent the null value for the
|
||||
// `Value` type union.
|
||||
//
|
||||
// The JSON representation for `NullValue` is JSON `null`.
|
||||
enum NullValue {
|
||||
// Null value.
|
||||
NULL_VALUE = 0;
|
||||
}
|
||||
|
||||
// `ListValue` is a wrapper around a repeated field of values.
|
||||
//
|
||||
// The JSON representation for `ListValue` is JSON array.
|
||||
message ListValue {
|
||||
// Repeated field of dynamically typed values.
|
||||
repeated Value values = 1;
|
||||
}
|
||||
@@ -0,0 +1,144 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/timestamppb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "TimestampProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// A Timestamp represents a point in time independent of any time zone or local
|
||||
// calendar, encoded as a count of seconds and fractions of seconds at
|
||||
// nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
||||
// January 1, 1970, in the proleptic Gregorian calendar which extends the
|
||||
// Gregorian calendar backwards to year one.
|
||||
//
|
||||
// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
||||
// second table is needed for interpretation, using a [24-hour linear
|
||||
// smear](https://developers.google.com/time/smear).
|
||||
//
|
||||
// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
||||
// restricting to that range, we ensure that we can convert to and from [RFC
|
||||
// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
||||
//
|
||||
// # Examples
|
||||
//
|
||||
// Example 1: Compute Timestamp from POSIX `time()`.
|
||||
//
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds(time(NULL));
|
||||
// timestamp.set_nanos(0);
|
||||
//
|
||||
// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
//
|
||||
// struct timeval tv;
|
||||
// gettimeofday(&tv, NULL);
|
||||
//
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds(tv.tv_sec);
|
||||
// timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
//
|
||||
// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
//
|
||||
// FILETIME ft;
|
||||
// GetSystemTimeAsFileTime(&ft);
|
||||
// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
//
|
||||
// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
//
|
||||
// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
//
|
||||
// long millis = System.currentTimeMillis();
|
||||
//
|
||||
// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
// .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
//
|
||||
// Example 5: Compute Timestamp from Java `Instant.now()`.
|
||||
//
|
||||
// Instant now = Instant.now();
|
||||
//
|
||||
// Timestamp timestamp =
|
||||
// Timestamp.newBuilder().setSeconds(now.getEpochSecond())
|
||||
// .setNanos(now.getNano()).build();
|
||||
//
|
||||
// Example 6: Compute Timestamp from current time in Python.
|
||||
//
|
||||
// timestamp = Timestamp()
|
||||
// timestamp.GetCurrentTime()
|
||||
//
|
||||
// # JSON Mapping
|
||||
//
|
||||
// In JSON format, the Timestamp type is encoded as a string in the
|
||||
// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
// where {year} is always expressed using four digits while {month}, {day},
|
||||
// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
// is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
// "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
// able to accept both UTC and other timezones (as indicated by an offset).
|
||||
//
|
||||
// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
// 01:30 UTC on January 15, 2017.
|
||||
//
|
||||
// In JavaScript, one can convert a Date object to this format using the
|
||||
// standard
|
||||
// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
|
||||
// method. In Python, a standard `datetime.datetime` object can be converted
|
||||
// to this format using
|
||||
// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
||||
// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
||||
// the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()
|
||||
// ) to obtain a formatter capable of generating timestamps in this format.
|
||||
//
|
||||
message Timestamp {
|
||||
// Represents seconds of UTC time since Unix epoch
|
||||
// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
// 9999-12-31T23:59:59Z inclusive.
|
||||
int64 seconds = 1;
|
||||
|
||||
// Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
// second values with fractions must still have non-negative nanos values
|
||||
// that count forward in time. Must be from 0 to 999,999,999
|
||||
// inclusive.
|
||||
int32 nanos = 2;
|
||||
}
|
||||
193
src-tauri/protoc-vendored/include/google/protobuf/type.proto
Normal file
193
src-tauri/protoc-vendored/include/google/protobuf/type.proto
Normal file
@@ -0,0 +1,193 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
import "google/protobuf/any.proto";
|
||||
import "google/protobuf/source_context.proto";
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "TypeProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/typepb";
|
||||
|
||||
// A protocol buffer message type.
|
||||
message Type {
|
||||
// The fully qualified message name.
|
||||
string name = 1;
|
||||
// The list of fields.
|
||||
repeated Field fields = 2;
|
||||
// The list of types appearing in `oneof` definitions in this type.
|
||||
repeated string oneofs = 3;
|
||||
// The protocol buffer options.
|
||||
repeated Option options = 4;
|
||||
// The source context.
|
||||
SourceContext source_context = 5;
|
||||
// The source syntax.
|
||||
Syntax syntax = 6;
|
||||
// The source edition string, only valid when syntax is SYNTAX_EDITIONS.
|
||||
string edition = 7;
|
||||
}
|
||||
|
||||
// A single field of a message type.
|
||||
message Field {
|
||||
// Basic field types.
|
||||
enum Kind {
|
||||
// Field type unknown.
|
||||
TYPE_UNKNOWN = 0;
|
||||
// Field type double.
|
||||
TYPE_DOUBLE = 1;
|
||||
// Field type float.
|
||||
TYPE_FLOAT = 2;
|
||||
// Field type int64.
|
||||
TYPE_INT64 = 3;
|
||||
// Field type uint64.
|
||||
TYPE_UINT64 = 4;
|
||||
// Field type int32.
|
||||
TYPE_INT32 = 5;
|
||||
// Field type fixed64.
|
||||
TYPE_FIXED64 = 6;
|
||||
// Field type fixed32.
|
||||
TYPE_FIXED32 = 7;
|
||||
// Field type bool.
|
||||
TYPE_BOOL = 8;
|
||||
// Field type string.
|
||||
TYPE_STRING = 9;
|
||||
// Field type group. Proto2 syntax only, and deprecated.
|
||||
TYPE_GROUP = 10;
|
||||
// Field type message.
|
||||
TYPE_MESSAGE = 11;
|
||||
// Field type bytes.
|
||||
TYPE_BYTES = 12;
|
||||
// Field type uint32.
|
||||
TYPE_UINT32 = 13;
|
||||
// Field type enum.
|
||||
TYPE_ENUM = 14;
|
||||
// Field type sfixed32.
|
||||
TYPE_SFIXED32 = 15;
|
||||
// Field type sfixed64.
|
||||
TYPE_SFIXED64 = 16;
|
||||
// Field type sint32.
|
||||
TYPE_SINT32 = 17;
|
||||
// Field type sint64.
|
||||
TYPE_SINT64 = 18;
|
||||
}
|
||||
|
||||
// Whether a field is optional, required, or repeated.
|
||||
enum Cardinality {
|
||||
// For fields with unknown cardinality.
|
||||
CARDINALITY_UNKNOWN = 0;
|
||||
// For optional fields.
|
||||
CARDINALITY_OPTIONAL = 1;
|
||||
// For required fields. Proto2 syntax only.
|
||||
CARDINALITY_REQUIRED = 2;
|
||||
// For repeated fields.
|
||||
CARDINALITY_REPEATED = 3;
|
||||
}
|
||||
|
||||
// The field type.
|
||||
Kind kind = 1;
|
||||
// The field cardinality.
|
||||
Cardinality cardinality = 2;
|
||||
// The field number.
|
||||
int32 number = 3;
|
||||
// The field name.
|
||||
string name = 4;
|
||||
// The field type URL, without the scheme, for message or enumeration
|
||||
// types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`.
|
||||
string type_url = 6;
|
||||
// The index of the field type in `Type.oneofs`, for message or enumeration
|
||||
// types. The first type has index 1; zero means the type is not in the list.
|
||||
int32 oneof_index = 7;
|
||||
// Whether to use alternative packed wire representation.
|
||||
bool packed = 8;
|
||||
// The protocol buffer options.
|
||||
repeated Option options = 9;
|
||||
// The field JSON name.
|
||||
string json_name = 10;
|
||||
// The string value of the default value of this field. Proto2 syntax only.
|
||||
string default_value = 11;
|
||||
}
|
||||
|
||||
// Enum type definition.
|
||||
message Enum {
|
||||
// Enum type name.
|
||||
string name = 1;
|
||||
// Enum value definitions.
|
||||
repeated EnumValue enumvalue = 2;
|
||||
// Protocol buffer options.
|
||||
repeated Option options = 3;
|
||||
// The source context.
|
||||
SourceContext source_context = 4;
|
||||
// The source syntax.
|
||||
Syntax syntax = 5;
|
||||
// The source edition string, only valid when syntax is SYNTAX_EDITIONS.
|
||||
string edition = 6;
|
||||
}
|
||||
|
||||
// Enum value definition.
|
||||
message EnumValue {
|
||||
// Enum value name.
|
||||
string name = 1;
|
||||
// Enum value number.
|
||||
int32 number = 2;
|
||||
// Protocol buffer options.
|
||||
repeated Option options = 3;
|
||||
}
|
||||
|
||||
// A protocol buffer option, which can be attached to a message, field,
|
||||
// enumeration, etc.
|
||||
message Option {
|
||||
// The option's name. For protobuf built-in options (options defined in
|
||||
// descriptor.proto), this is the short name. For example, `"map_entry"`.
|
||||
// For custom options, it should be the fully-qualified name. For example,
|
||||
// `"google.api.http"`.
|
||||
string name = 1;
|
||||
// The option's value packed in an Any message. If the value is a primitive,
|
||||
// the corresponding wrapper type defined in google/protobuf/wrappers.proto
|
||||
// should be used. If the value is an enum, it should be stored as an int32
|
||||
// value using the google.protobuf.Int32Value type.
|
||||
Any value = 2;
|
||||
}
|
||||
|
||||
// The syntax in which a protocol buffer element is defined.
|
||||
enum Syntax {
|
||||
// Syntax `proto2`.
|
||||
SYNTAX_PROTO2 = 0;
|
||||
// Syntax `proto3`.
|
||||
SYNTAX_PROTO3 = 1;
|
||||
// Syntax `editions`.
|
||||
SYNTAX_EDITIONS = 2;
|
||||
}
|
||||
123
src-tauri/protoc-vendored/include/google/protobuf/wrappers.proto
Normal file
123
src-tauri/protoc-vendored/include/google/protobuf/wrappers.proto
Normal file
@@ -0,0 +1,123 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
// Wrappers for primitive (non-message) types. These types are useful
|
||||
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||
// where we need to distinguish between the absence of a primitive
|
||||
// typed field and its default value.
|
||||
//
|
||||
// These wrappers have no meaningful use within repeated fields as they lack
|
||||
// the ability to detect presence on individual elements.
|
||||
// These wrappers have no meaningful use within a map or a oneof since
|
||||
// individual entries of a map or fields of a oneof can already detect presence.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/wrapperspb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "WrappersProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// Wrapper message for `double`.
|
||||
//
|
||||
// The JSON representation for `DoubleValue` is JSON number.
|
||||
message DoubleValue {
|
||||
// The double value.
|
||||
double value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `float`.
|
||||
//
|
||||
// The JSON representation for `FloatValue` is JSON number.
|
||||
message FloatValue {
|
||||
// The float value.
|
||||
float value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `int64`.
|
||||
//
|
||||
// The JSON representation for `Int64Value` is JSON string.
|
||||
message Int64Value {
|
||||
// The int64 value.
|
||||
int64 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `uint64`.
|
||||
//
|
||||
// The JSON representation for `UInt64Value` is JSON string.
|
||||
message UInt64Value {
|
||||
// The uint64 value.
|
||||
uint64 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `int32`.
|
||||
//
|
||||
// The JSON representation for `Int32Value` is JSON number.
|
||||
message Int32Value {
|
||||
// The int32 value.
|
||||
int32 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `uint32`.
|
||||
//
|
||||
// The JSON representation for `UInt32Value` is JSON number.
|
||||
message UInt32Value {
|
||||
// The uint32 value.
|
||||
uint32 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `bool`.
|
||||
//
|
||||
// The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
message BoolValue {
|
||||
// The bool value.
|
||||
bool value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `string`.
|
||||
//
|
||||
// The JSON representation for `StringValue` is JSON string.
|
||||
message StringValue {
|
||||
// The string value.
|
||||
string value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `bytes`.
|
||||
//
|
||||
// The JSON representation for `BytesValue` is JSON string.
|
||||
message BytesValue {
|
||||
// The bytes value.
|
||||
bytes value = 1;
|
||||
}
|
||||
@@ -1,19 +1,25 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use log::{debug, warn};
|
||||
use log::{debug, info, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sqlx::types::JsonValue;
|
||||
use tauri::{AppHandle, Manager};
|
||||
|
||||
use crate::is_dev;
|
||||
use crate::models::{generate_id, get_key_value_int, get_key_value_string, set_key_value_int, set_key_value_string};
|
||||
use crate::models::{
|
||||
generate_id, get_key_value_int, get_key_value_string, set_key_value_int, set_key_value_string,
|
||||
};
|
||||
|
||||
const NAMESPACE: &str = "analytics";
|
||||
const NUM_LAUNCHES_KEY: &str = "num_launches";
|
||||
|
||||
// serializable
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum AnalyticsResource {
|
||||
App,
|
||||
Appearance,
|
||||
CookieJar,
|
||||
Dialog,
|
||||
Environment,
|
||||
@@ -24,7 +30,9 @@ pub enum AnalyticsResource {
|
||||
HttpRequest,
|
||||
HttpResponse,
|
||||
KeyValue,
|
||||
Setting,
|
||||
Sidebar,
|
||||
Theme,
|
||||
Workspace,
|
||||
}
|
||||
|
||||
@@ -90,20 +98,19 @@ pub struct LaunchEventInfo {
|
||||
pub num_launches: i32,
|
||||
}
|
||||
|
||||
pub async fn track_launch_event(app_handle: &AppHandle) -> LaunchEventInfo {
|
||||
let namespace = "analytics";
|
||||
pub async fn track_launch_event(app: &AppHandle) -> LaunchEventInfo {
|
||||
let last_tracked_version_key = "last_tracked_version";
|
||||
|
||||
let mut info = LaunchEventInfo::default();
|
||||
|
||||
info.num_launches = get_key_value_int(app_handle, namespace, "num_launches", 0).await + 1;
|
||||
info.num_launches = get_num_launches(app).await + 1;
|
||||
info.previous_version =
|
||||
get_key_value_string(app_handle, namespace, last_tracked_version_key, "").await;
|
||||
info.current_version = app_handle.package_info().version.to_string();
|
||||
get_key_value_string(app, NAMESPACE, last_tracked_version_key, "").await;
|
||||
info.current_version = app.package_info().version.to_string();
|
||||
|
||||
if info.previous_version.is_empty() {
|
||||
track_event(
|
||||
app_handle,
|
||||
app,
|
||||
AnalyticsResource::App,
|
||||
AnalyticsAction::LaunchFirst,
|
||||
None,
|
||||
@@ -113,10 +120,10 @@ pub async fn track_launch_event(app_handle: &AppHandle) -> LaunchEventInfo {
|
||||
info.launched_after_update = info.current_version != info.previous_version;
|
||||
if info.launched_after_update {
|
||||
track_event(
|
||||
app_handle,
|
||||
app,
|
||||
AnalyticsResource::App,
|
||||
AnalyticsAction::LaunchUpdate,
|
||||
Some(json!({ "num_launches": info.num_launches })),
|
||||
Some(json!({ NUM_LAUNCHES_KEY: info.num_launches })),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
@@ -124,23 +131,23 @@ pub async fn track_launch_event(app_handle: &AppHandle) -> LaunchEventInfo {
|
||||
|
||||
// Track a launch event in all cases
|
||||
track_event(
|
||||
app_handle,
|
||||
app,
|
||||
AnalyticsResource::App,
|
||||
AnalyticsAction::Launch,
|
||||
Some(json!({ "num_launches": info.num_launches })),
|
||||
Some(json!({ NUM_LAUNCHES_KEY: info.num_launches })),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Update key values
|
||||
|
||||
set_key_value_string(
|
||||
app_handle,
|
||||
namespace,
|
||||
app,
|
||||
NAMESPACE,
|
||||
last_tracked_version_key,
|
||||
info.current_version.as_str(),
|
||||
)
|
||||
.await;
|
||||
set_key_value_int(app_handle, namespace, "num_launches", info.num_launches).await;
|
||||
set_key_value_int(app, NAMESPACE, NUM_LAUNCHES_KEY, info.num_launches).await;
|
||||
|
||||
info
|
||||
}
|
||||
@@ -182,15 +189,12 @@ pub async fn track_event(
|
||||
|
||||
// Disable analytics actual sending in dev
|
||||
if is_dev() {
|
||||
debug!("track: {} {} {:?}", event, attributes_json, params);
|
||||
debug!("track: {}", event);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(e) = req.send().await {
|
||||
warn!(
|
||||
"Error sending analytics event: {} {} {} {:?}",
|
||||
e, event, attributes_json, params,
|
||||
);
|
||||
info!("Error sending analytics event: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -207,7 +211,7 @@ fn get_os() -> &'static str {
|
||||
}
|
||||
|
||||
fn get_window_size(app_handle: &AppHandle) -> String {
|
||||
let window = match app_handle.windows().into_values().next() {
|
||||
let window = match app_handle.webview_windows().into_values().next() {
|
||||
Some(w) => w,
|
||||
None => return "unknown".to_string(),
|
||||
};
|
||||
@@ -232,10 +236,14 @@ fn get_window_size(app_handle: &AppHandle) -> String {
|
||||
async fn get_id(app_handle: &AppHandle) -> String {
|
||||
let id = get_key_value_string(app_handle, "analytics", "id", "").await;
|
||||
if id.is_empty() {
|
||||
let new_id = generate_id(None);
|
||||
let new_id = generate_id();
|
||||
set_key_value_string(app_handle, "analytics", "id", new_id.as_str()).await;
|
||||
new_id
|
||||
} else {
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_num_launches(app: &AppHandle) -> i32 {
|
||||
get_key_value_int(app, NAMESPACE, NUM_LAUNCHES_KEY, 0).await
|
||||
}
|
||||
|
||||
@@ -7,20 +7,20 @@ use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use base64::Engine;
|
||||
use http::{HeaderMap, HeaderName, HeaderValue, Method};
|
||||
use http::header::{ACCEPT, USER_AGENT};
|
||||
use http::{HeaderMap, HeaderName, HeaderValue, Method};
|
||||
use log::{error, info, warn};
|
||||
use reqwest::{multipart, Url};
|
||||
use reqwest::redirect::Policy;
|
||||
use reqwest::{multipart, Url};
|
||||
use sqlx::types::{Json, JsonValue};
|
||||
use tauri::{Manager, Window};
|
||||
use tauri::{Manager, WebviewWindow};
|
||||
use tokio::sync::oneshot;
|
||||
use tokio::sync::watch::{Receiver};
|
||||
use tokio::sync::watch::Receiver;
|
||||
|
||||
use crate::{models, render, response_err};
|
||||
|
||||
pub async fn send_http_request(
|
||||
window: &Window,
|
||||
window: &WebviewWindow,
|
||||
request: models::HttpRequest,
|
||||
response: &models::HttpResponse,
|
||||
environment: Option<models::Environment>,
|
||||
@@ -35,6 +35,7 @@ pub async fn send_http_request(
|
||||
|
||||
let mut url_string = render::render(&request.url, &workspace, environment.as_ref());
|
||||
|
||||
url_string = ensure_proto(&url_string);
|
||||
if !url_string.starts_with("http://") && !url_string.starts_with("https://") {
|
||||
url_string = format!("http://{}", url_string);
|
||||
}
|
||||
@@ -88,14 +89,24 @@ pub async fn send_http_request(
|
||||
let uri = match http::Uri::from_str(url_string.as_str()) {
|
||||
Ok(u) => u,
|
||||
Err(e) => {
|
||||
return response_err(response, e.to_string(), window).await;
|
||||
return response_err(
|
||||
response,
|
||||
format!("Failed to parse URL \"{}\": {}", url_string, e.to_string()),
|
||||
window,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
};
|
||||
// Yes, we're parsing both URI and URL because they could return different errors
|
||||
let url = match Url::from_str(uri.to_string().as_str()) {
|
||||
Ok(u) => u,
|
||||
Err(e) => {
|
||||
return response_err(response, e.to_string(), window).await;
|
||||
return response_err(
|
||||
response,
|
||||
format!("Failed to parse URL \"{}\": {}", url_string, e.to_string()),
|
||||
window,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -112,7 +123,6 @@ pub async fn send_http_request(
|
||||
// everything manually to know that).
|
||||
// if let Some(cookie_store) = maybe_cookie_store.clone() {
|
||||
// let values1 = cookie_store.get_request_values(&url);
|
||||
// println!("COOKIE VLUAES: {:?}", values1.collect::<Vec<_>>());
|
||||
// let raw_value = cookie_store.get_request_values(&url)
|
||||
// .map(|(name, value)| format!("{}={}", name, value))
|
||||
// .collect::<Vec<_>>()
|
||||
@@ -244,6 +254,21 @@ pub async fn send_http_request(
|
||||
}
|
||||
}
|
||||
request_builder = request_builder.form(&form_params);
|
||||
} else if body_type == "binary" && request_body.contains_key("filePath") {
|
||||
let file_path = request_body
|
||||
.get("filePath")
|
||||
.ok_or("filePath not set")?
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
|
||||
match fs::read(file_path).map_err(|e| e.to_string()) {
|
||||
Ok(f) => {
|
||||
request_builder = request_builder.body(f);
|
||||
}
|
||||
Err(e) => {
|
||||
return response_err(response, e, window).await;
|
||||
}
|
||||
}
|
||||
} else if body_type == "multipart/form-data" && request_body.contains_key("form") {
|
||||
let mut multipart_form = multipart::Form::new();
|
||||
if let Some(form_definition) = request_body.get("form") {
|
||||
@@ -253,38 +278,67 @@ pub async fn send_http_request(
|
||||
.unwrap_or(empty_bool)
|
||||
.as_bool()
|
||||
.unwrap_or(false);
|
||||
let name = p
|
||||
let name_raw = p
|
||||
.get("name")
|
||||
.unwrap_or(empty_string)
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
if !enabled || name.is_empty() {
|
||||
|
||||
if !enabled || name_raw.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let file = p
|
||||
let file_path = p
|
||||
.get("file")
|
||||
.unwrap_or(empty_string)
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
let value = p
|
||||
let value_raw = p
|
||||
.get("value")
|
||||
.unwrap_or(empty_string)
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
multipart_form = multipart_form.part(
|
||||
render::render(name, &workspace, environment_ref),
|
||||
match !file.is_empty() {
|
||||
true => {
|
||||
multipart::Part::bytes(fs::read(file).map_err(|e| e.to_string())?)
|
||||
|
||||
let name = render::render(name_raw, &workspace, environment_ref);
|
||||
let mut part = if file_path.is_empty() {
|
||||
multipart::Part::text(render::render(
|
||||
value_raw,
|
||||
&workspace,
|
||||
environment_ref,
|
||||
))
|
||||
} else {
|
||||
match fs::read(file_path) {
|
||||
Ok(f) => multipart::Part::bytes(f),
|
||||
Err(e) => {
|
||||
return response_err(response, e.to_string(), window).await;
|
||||
}
|
||||
false => multipart::Part::text(render::render(
|
||||
value,
|
||||
&workspace,
|
||||
environment_ref,
|
||||
)),
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
let ct_raw = p
|
||||
.get("contentType")
|
||||
.unwrap_or(empty_string)
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
|
||||
if !ct_raw.is_empty() {
|
||||
let content_type = render::render(ct_raw, &workspace, environment_ref);
|
||||
part = part
|
||||
.mime_str(content_type.as_str())
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
|
||||
if !file_path.is_empty() {
|
||||
let filename = PathBuf::from(file_path)
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_str()
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
part = part.file_name(filename);
|
||||
}
|
||||
|
||||
multipart_form = multipart_form.part(name, part);
|
||||
}
|
||||
}
|
||||
headers.remove("Content-Type"); // reqwest will add this automatically
|
||||
@@ -307,11 +361,11 @@ pub async fn send_http_request(
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
let (resp_tx, resp_rx) = oneshot::channel();
|
||||
|
||||
|
||||
tokio::spawn(async move {
|
||||
let _ = resp_tx.send(client.execute(sendable_req).await);
|
||||
});
|
||||
|
||||
|
||||
let raw_response = tokio::select! {
|
||||
Ok(r) = resp_rx => {r}
|
||||
_ = cancel_rx.changed() => {
|
||||
@@ -358,7 +412,7 @@ pub async fn send_http_request(
|
||||
|
||||
{
|
||||
// Write body to FS
|
||||
let dir = window.app_handle().path_resolver().app_data_dir().unwrap();
|
||||
let dir = window.app_handle().path().app_data_dir().unwrap();
|
||||
let base_dir = dir.join("responses");
|
||||
create_dir_all(base_dir.clone()).expect("Failed to create responses dir");
|
||||
let body_path = match response.id.is_empty() {
|
||||
@@ -423,3 +477,26 @@ pub async fn send_http_request(
|
||||
Err(e) => response_err(response, e.to_string(), window).await,
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_proto(url_str: &str) -> String {
|
||||
if url_str.starts_with("http://") || url_str.starts_with("https://") {
|
||||
return url_str.to_string();
|
||||
}
|
||||
|
||||
// Url::from_str will fail without a proto, so add one
|
||||
let parseable_url = format!("http://{}", url_str);
|
||||
if let Ok(u) = Url::from_str(parseable_url.as_str()) {
|
||||
match u.host() {
|
||||
Some(host) => {
|
||||
let h = host.to_string();
|
||||
// These TLDs force HTTPS
|
||||
if h.ends_with(".app") || h.ends_with(".dev") || h.ends_with(".page") {
|
||||
return format!("https://{url_str}");
|
||||
}
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
format!("http://{url_str}")
|
||||
}
|
||||
1904
src-tauri/src/lib.rs
Normal file
1904
src-tauri/src/lib.rs
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -7,9 +7,38 @@ use serde::{Deserialize, Serialize};
|
||||
use sqlx::types::chrono::NaiveDateTime;
|
||||
use sqlx::types::{Json, JsonValue};
|
||||
use sqlx::{Pool, Sqlite};
|
||||
use tauri::{AppHandle, Manager, Wry};
|
||||
use tauri::{AppHandle, Manager, WebviewWindow, Wry};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
pub enum ModelType {
|
||||
TypeCookieJar,
|
||||
TypeEnvironment,
|
||||
TypeFolder,
|
||||
TypeGrpcConnection,
|
||||
TypeGrpcEvent,
|
||||
TypeGrpcRequest,
|
||||
TypeHttpRequest,
|
||||
TypeHttpResponse,
|
||||
TypeWorkspace,
|
||||
}
|
||||
|
||||
impl ModelType {
|
||||
pub fn id_prefix(&self) -> String {
|
||||
match self {
|
||||
ModelType::TypeCookieJar => "cj",
|
||||
ModelType::TypeEnvironment => "ev",
|
||||
ModelType::TypeFolder => "fl",
|
||||
ModelType::TypeGrpcConnection => "gc",
|
||||
ModelType::TypeGrpcEvent => "ge",
|
||||
ModelType::TypeGrpcRequest => "gr",
|
||||
ModelType::TypeHttpRequest => "rq",
|
||||
ModelType::TypeHttpResponse => "rs",
|
||||
ModelType::TypeWorkspace => "wk",
|
||||
}
|
||||
.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn default_true() -> bool {
|
||||
true
|
||||
}
|
||||
@@ -23,7 +52,13 @@ pub struct Settings {
|
||||
pub updated_at: NaiveDateTime,
|
||||
pub theme: String,
|
||||
pub appearance: String,
|
||||
pub theme_dark: String,
|
||||
pub theme_light: String,
|
||||
pub update_channel: String,
|
||||
pub interface_font_size: i64,
|
||||
pub interface_scale: i64,
|
||||
pub editor_font_size: i64,
|
||||
pub editor_soft_wrap: bool,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
|
||||
@@ -426,9 +461,9 @@ pub async fn get_workspace(mgr: &impl Manager<Wry>, id: &str) -> Result<Workspac
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_workspace(mgr: &impl Manager<Wry>, id: &str) -> Result<Workspace, sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
let workspace = get_workspace(mgr, id).await?;
|
||||
pub async fn delete_workspace(window: &WebviewWindow, id: &str) -> Result<Workspace, sqlx::Error> {
|
||||
let db = get_db(window).await;
|
||||
let workspace = get_workspace(window, id).await?;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM workspaces
|
||||
@@ -439,11 +474,11 @@ pub async fn delete_workspace(mgr: &impl Manager<Wry>, id: &str) -> Result<Works
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
for r in list_responses_by_workspace_id(mgr, id).await? {
|
||||
delete_http_response(mgr, &r.id).await?;
|
||||
for r in list_responses_by_workspace_id(window, id).await? {
|
||||
delete_http_response(window, &r.id).await?;
|
||||
}
|
||||
|
||||
emit_deleted_model(mgr, workspace)
|
||||
emit_deleted_model(window, workspace)
|
||||
}
|
||||
|
||||
pub async fn get_cookie_jar(mgr: &impl Manager<Wry>, id: &str) -> Result<CookieJar, sqlx::Error> {
|
||||
@@ -481,12 +516,9 @@ pub async fn list_cookie_jars(
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_cookie_jar(
|
||||
mgr: &impl Manager<Wry>,
|
||||
id: &str,
|
||||
) -> Result<CookieJar, sqlx::Error> {
|
||||
let cookie_jar = get_cookie_jar(mgr, id).await?;
|
||||
let db = get_db(mgr).await;
|
||||
pub async fn delete_cookie_jar(window: &WebviewWindow, id: &str) -> Result<CookieJar, sqlx::Error> {
|
||||
let cookie_jar = get_cookie_jar(window, id).await?;
|
||||
let db = get_db(window).await;
|
||||
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
@@ -498,25 +530,25 @@ pub async fn delete_cookie_jar(
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(mgr, cookie_jar)
|
||||
emit_deleted_model(window, cookie_jar)
|
||||
}
|
||||
|
||||
pub async fn duplicate_grpc_request(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
id: &str,
|
||||
) -> Result<GrpcRequest, sqlx::Error> {
|
||||
let mut request = get_grpc_request(mgr, id).await?.clone();
|
||||
let mut request = get_grpc_request(window, id).await?.clone();
|
||||
request.id = "".to_string();
|
||||
upsert_grpc_request(mgr, &request).await
|
||||
upsert_grpc_request(window, &request).await
|
||||
}
|
||||
|
||||
pub async fn upsert_grpc_request(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
request: &GrpcRequest,
|
||||
) -> Result<GrpcRequest, sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
let id = match request.id.as_str() {
|
||||
"" => generate_id(Some("gr")),
|
||||
"" => generate_model_id(ModelType::TypeGrpcRequest),
|
||||
_ => request.id.to_string(),
|
||||
};
|
||||
let trimmed_name = request.name.trim();
|
||||
@@ -556,8 +588,8 @@ pub async fn upsert_grpc_request(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_grpc_request(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
match get_grpc_request(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -607,12 +639,12 @@ pub async fn list_grpc_requests(
|
||||
}
|
||||
|
||||
pub async fn upsert_grpc_connection(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
connection: &GrpcConnection,
|
||||
) -> Result<GrpcConnection, sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
let id = match connection.id.as_str() {
|
||||
"" => generate_id(Some("gc")),
|
||||
"" => generate_model_id(ModelType::TypeGrpcConnection),
|
||||
_ => connection.id.to_string(),
|
||||
};
|
||||
sqlx::query!(
|
||||
@@ -646,8 +678,8 @@ pub async fn upsert_grpc_connection(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_grpc_connection(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
match get_grpc_connection(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -696,12 +728,12 @@ pub async fn list_grpc_connections(
|
||||
}
|
||||
|
||||
pub async fn upsert_grpc_event(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
event: &GrpcEvent,
|
||||
) -> Result<GrpcEvent, sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
let id = match event.id.as_str() {
|
||||
"" => generate_id(Some("ge")),
|
||||
"" => generate_model_id(ModelType::TypeGrpcEvent),
|
||||
_ => event.id.to_string(),
|
||||
};
|
||||
sqlx::query!(
|
||||
@@ -732,8 +764,8 @@ pub async fn upsert_grpc_event(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_grpc_event(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
match get_grpc_event(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -778,16 +810,16 @@ pub async fn list_grpc_events(
|
||||
}
|
||||
|
||||
pub async fn upsert_cookie_jar(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
cookie_jar: &CookieJar,
|
||||
) -> Result<CookieJar, sqlx::Error> {
|
||||
let id = match cookie_jar.id.as_str() {
|
||||
"" => generate_id(Some("cj")),
|
||||
"" => generate_model_id(ModelType::TypeCookieJar),
|
||||
_ => cookie_jar.id.to_string(),
|
||||
};
|
||||
let trimmed_name = cookie_jar.name.trim();
|
||||
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO cookie_jars (
|
||||
@@ -807,8 +839,8 @@ pub async fn upsert_cookie_jar(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_cookie_jar(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
match get_cookie_jar(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -833,11 +865,11 @@ pub async fn list_environments(
|
||||
}
|
||||
|
||||
pub async fn delete_environment(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
id: &str,
|
||||
) -> Result<Environment, sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
let env = get_environment(mgr, id).await?;
|
||||
let db = get_db(window).await;
|
||||
let env = get_environment(window, id).await?;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM environments
|
||||
@@ -848,7 +880,7 @@ pub async fn delete_environment(
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(mgr, env)
|
||||
emit_deleted_model(window, env)
|
||||
}
|
||||
|
||||
async fn get_settings(mgr: &impl Manager<Wry>) -> Result<Settings, sqlx::Error> {
|
||||
@@ -857,7 +889,9 @@ async fn get_settings(mgr: &impl Manager<Wry>) -> Result<Settings, sqlx::Error>
|
||||
Settings,
|
||||
r#"
|
||||
SELECT
|
||||
id, model, created_at, updated_at, theme, appearance, update_channel
|
||||
id, model, created_at, updated_at, theme, appearance,
|
||||
theme_dark, theme_light, update_channel,
|
||||
interface_font_size, interface_scale, editor_font_size, editor_soft_wrap
|
||||
FROM settings
|
||||
WHERE id = 'default'
|
||||
"#,
|
||||
@@ -886,39 +920,46 @@ pub async fn get_or_create_settings(mgr: &impl Manager<Wry>) -> Settings {
|
||||
}
|
||||
|
||||
pub async fn update_settings(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
settings: Settings,
|
||||
) -> Result<Settings, sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE settings SET (
|
||||
theme, appearance, update_channel
|
||||
) = (?, ?, ?) WHERE id = 'default';
|
||||
theme, appearance, theme_dark, theme_light, update_channel,
|
||||
interface_font_size, interface_scale, editor_font_size, editor_soft_wrap
|
||||
) = (?, ?, ?, ?, ?, ?, ?, ?, ?) WHERE id = 'default';
|
||||
"#,
|
||||
settings.theme,
|
||||
settings.appearance,
|
||||
settings.update_channel
|
||||
settings.theme_dark,
|
||||
settings.theme_light,
|
||||
settings.update_channel,
|
||||
settings.interface_font_size,
|
||||
settings.interface_scale,
|
||||
settings.editor_font_size,
|
||||
settings.editor_soft_wrap,
|
||||
)
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_settings(mgr).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
match get_settings(window).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn upsert_environment(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
environment: Environment,
|
||||
) -> Result<Environment, sqlx::Error> {
|
||||
let id = match environment.id.as_str() {
|
||||
"" => generate_id(Some("ev")),
|
||||
"" => generate_model_id(ModelType::TypeEnvironment),
|
||||
_ => environment.id.to_string(),
|
||||
};
|
||||
let trimmed_name = environment.name.trim();
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO environments (
|
||||
@@ -938,8 +979,8 @@ pub async fn upsert_environment(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_environment(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
match get_environment(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -999,9 +1040,9 @@ pub async fn list_folders(
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_folder(mgr: &impl Manager<Wry>, id: &str) -> Result<Folder, sqlx::Error> {
|
||||
let folder = get_folder(mgr, id).await?;
|
||||
let db = get_db(mgr).await;
|
||||
pub async fn delete_folder(window: &WebviewWindow, id: &str) -> Result<Folder, sqlx::Error> {
|
||||
let folder = get_folder(window, id).await?;
|
||||
let db = get_db(window).await;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM folders
|
||||
@@ -1012,17 +1053,17 @@ pub async fn delete_folder(mgr: &impl Manager<Wry>, id: &str) -> Result<Folder,
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(mgr, folder)
|
||||
emit_deleted_model(window, folder)
|
||||
}
|
||||
|
||||
pub async fn upsert_folder(mgr: &impl Manager<Wry>, r: Folder) -> Result<Folder, sqlx::Error> {
|
||||
pub async fn upsert_folder(window: &WebviewWindow, r: Folder) -> Result<Folder, sqlx::Error> {
|
||||
let id = match r.id.as_str() {
|
||||
"" => generate_id(Some("fl")),
|
||||
"" => generate_model_id(ModelType::TypeFolder),
|
||||
_ => r.id.to_string(),
|
||||
};
|
||||
let trimmed_name = r.name.trim();
|
||||
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO folders (
|
||||
@@ -1044,32 +1085,32 @@ pub async fn upsert_folder(mgr: &impl Manager<Wry>, r: Folder) -> Result<Folder,
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_folder(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
match get_folder(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn duplicate_http_request(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
id: &str,
|
||||
) -> Result<HttpRequest, sqlx::Error> {
|
||||
let mut request = get_http_request(mgr, id).await?.clone();
|
||||
let mut request = get_http_request(window, id).await?.clone();
|
||||
request.id = "".to_string();
|
||||
upsert_http_request(mgr, request).await
|
||||
upsert_http_request(window, request).await
|
||||
}
|
||||
|
||||
pub async fn upsert_http_request(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
r: HttpRequest,
|
||||
) -> Result<HttpRequest, sqlx::Error> {
|
||||
let id = match r.id.as_str() {
|
||||
"" => generate_id(Some("rq")),
|
||||
"" => generate_model_id(ModelType::TypeHttpRequest),
|
||||
_ => r.id.to_string(),
|
||||
};
|
||||
let trimmed_name = r.name.trim();
|
||||
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
|
||||
sqlx::query!(
|
||||
r#"
|
||||
@@ -1109,13 +1150,13 @@ pub async fn upsert_http_request(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_http_request(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
match get_http_request(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_requests(
|
||||
pub async fn list_http_requests(
|
||||
mgr: &impl Manager<Wry>,
|
||||
workspace_id: &str,
|
||||
) -> Result<Vec<HttpRequest>, sqlx::Error> {
|
||||
@@ -1165,15 +1206,15 @@ pub async fn get_http_request(
|
||||
}
|
||||
|
||||
pub async fn delete_http_request(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
id: &str,
|
||||
) -> Result<HttpRequest, sqlx::Error> {
|
||||
let req = get_http_request(mgr, id).await?;
|
||||
let req = get_http_request(window, id).await?;
|
||||
|
||||
// DB deletes will cascade but this will delete the files
|
||||
delete_all_http_responses(mgr, id).await?;
|
||||
delete_all_http_responses(window, id).await?;
|
||||
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM http_requests
|
||||
@@ -1184,12 +1225,12 @@ pub async fn delete_http_request(
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(mgr, req)
|
||||
emit_deleted_model(window, req)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn create_http_response(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
request_id: &str,
|
||||
elapsed: i64,
|
||||
elapsed_headers: i64,
|
||||
@@ -1202,10 +1243,10 @@ pub async fn create_http_response(
|
||||
version: Option<&str>,
|
||||
remote_addr: Option<&str>,
|
||||
) -> Result<HttpResponse, sqlx::Error> {
|
||||
let req = get_http_request(mgr, request_id).await?;
|
||||
let id = generate_id(Some("rp"));
|
||||
let req = get_http_request(window, request_id).await?;
|
||||
let id = generate_model_id(ModelType::TypeHttpResponse);
|
||||
let headers_json = Json(headers);
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO http_responses (
|
||||
@@ -1231,14 +1272,14 @@ pub async fn create_http_response(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_http_response(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
match get_http_response(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn cancel_pending_grpc_connections(mgr: &impl Manager<Wry>) -> Result<(), sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
pub async fn cancel_pending_grpc_connections(app: &AppHandle) -> Result<(), sqlx::Error> {
|
||||
let db = get_db(app).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE grpc_connections
|
||||
@@ -1251,8 +1292,8 @@ pub async fn cancel_pending_grpc_connections(mgr: &impl Manager<Wry>) -> Result<
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn cancel_pending_responses(mgr: &impl Manager<Wry>) -> Result<(), sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
pub async fn cancel_pending_responses(app: &AppHandle) -> Result<(), sqlx::Error> {
|
||||
let db = get_db(app).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE http_responses
|
||||
@@ -1266,27 +1307,27 @@ pub async fn cancel_pending_responses(mgr: &impl Manager<Wry>) -> Result<(), sql
|
||||
}
|
||||
|
||||
pub async fn update_response_if_id(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
response: &HttpResponse,
|
||||
) -> Result<HttpResponse, sqlx::Error> {
|
||||
if response.id.is_empty() {
|
||||
Ok(response.clone())
|
||||
} else {
|
||||
update_response(mgr, response).await
|
||||
update_response(window, response).await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn upsert_workspace(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
workspace: Workspace,
|
||||
) -> Result<Workspace, sqlx::Error> {
|
||||
let id = match workspace.id.as_str() {
|
||||
"" => generate_id(Some("wk")),
|
||||
"" => generate_model_id(ModelType::TypeWorkspace),
|
||||
_ => workspace.id.to_string(),
|
||||
};
|
||||
let trimmed_name = workspace.name.trim();
|
||||
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO workspaces (
|
||||
@@ -1314,17 +1355,17 @@ pub async fn upsert_workspace(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_workspace(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
match get_workspace(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn update_response(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
response: &HttpResponse,
|
||||
) -> Result<HttpResponse, sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE http_responses SET (
|
||||
@@ -1348,8 +1389,8 @@ pub async fn update_response(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_http_response(mgr, &response.id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
match get_http_response(window, &response.id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -1427,12 +1468,12 @@ pub async fn list_responses_by_workspace_id(
|
||||
}
|
||||
|
||||
pub async fn delete_grpc_request(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
id: &str,
|
||||
) -> Result<GrpcRequest, sqlx::Error> {
|
||||
let req = get_grpc_request(mgr, id).await?;
|
||||
let req = get_grpc_request(window, id).await?;
|
||||
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM grpc_requests
|
||||
@@ -1443,16 +1484,16 @@ pub async fn delete_grpc_request(
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(mgr, req)
|
||||
emit_deleted_model(window, req)
|
||||
}
|
||||
|
||||
pub async fn delete_grpc_connection(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
id: &str,
|
||||
) -> Result<GrpcConnection, sqlx::Error> {
|
||||
let resp = get_grpc_connection(mgr, id).await?;
|
||||
let resp = get_grpc_connection(window, id).await?;
|
||||
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM grpc_connections
|
||||
@@ -1463,14 +1504,14 @@ pub async fn delete_grpc_connection(
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(mgr, resp)
|
||||
emit_deleted_model(window, resp)
|
||||
}
|
||||
|
||||
pub async fn delete_http_response(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
id: &str,
|
||||
) -> Result<HttpResponse, sqlx::Error> {
|
||||
let resp = get_http_response(mgr, id).await?;
|
||||
let resp = get_http_response(window, id).await?;
|
||||
|
||||
// Delete the body file if it exists
|
||||
if let Some(p) = resp.body_path.clone() {
|
||||
@@ -1479,7 +1520,7 @@ pub async fn delete_http_response(
|
||||
};
|
||||
}
|
||||
|
||||
let db = get_db(mgr).await;
|
||||
let db = get_db(window).await;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM http_responses
|
||||
@@ -1490,92 +1531,129 @@ pub async fn delete_http_response(
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(mgr, resp)
|
||||
emit_deleted_model(window, resp)
|
||||
}
|
||||
|
||||
pub async fn delete_all_grpc_connections(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
request_id: &str,
|
||||
) -> Result<(), sqlx::Error> {
|
||||
for r in list_grpc_connections(mgr, request_id).await? {
|
||||
delete_grpc_connection(mgr, &r.id).await?;
|
||||
for r in list_grpc_connections(window, request_id).await? {
|
||||
delete_grpc_connection(window, &r.id).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_all_http_responses(
|
||||
mgr: &impl Manager<Wry>,
|
||||
window: &WebviewWindow,
|
||||
request_id: &str,
|
||||
) -> Result<(), sqlx::Error> {
|
||||
for r in list_responses(mgr, request_id, None).await? {
|
||||
delete_http_response(mgr, &r.id).await?;
|
||||
for r in list_responses(window, request_id, None).await? {
|
||||
delete_http_response(window, &r.id).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_id(prefix: Option<&str>) -> String {
|
||||
let id = Alphanumeric.sample_string(&mut rand::thread_rng(), 10);
|
||||
match prefix {
|
||||
None => id,
|
||||
Some(p) => format!("{p}_{id}"),
|
||||
}
|
||||
pub fn generate_model_id(model: ModelType) -> String {
|
||||
let id = generate_id();
|
||||
format!("{}_{}", model.id_prefix(), id)
|
||||
}
|
||||
|
||||
pub fn generate_id() -> String {
|
||||
Alphanumeric.sample_string(&mut rand::thread_rng(), 10)
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Deserialize, Serialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct WorkspaceExport {
|
||||
pub yaak_version: String,
|
||||
pub yaak_schema: i64,
|
||||
pub timestamp: NaiveDateTime,
|
||||
pub resources: WorkspaceExportResources,
|
||||
pub yaak_version: String,
|
||||
pub yaak_schema: i64,
|
||||
pub timestamp: NaiveDateTime,
|
||||
pub resources: WorkspaceExportResources,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Deserialize, Serialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct WorkspaceExportResources {
|
||||
pub workspaces: Vec<Workspace>,
|
||||
pub environments: Vec<Environment>,
|
||||
pub folders: Vec<Folder>,
|
||||
pub http_requests: Vec<HttpRequest>,
|
||||
pub grpc_requests: Vec<GrpcRequest>,
|
||||
pub workspaces: Vec<Workspace>,
|
||||
pub environments: Vec<Environment>,
|
||||
pub folders: Vec<Folder>,
|
||||
pub http_requests: Vec<HttpRequest>,
|
||||
pub grpc_requests: Vec<GrpcRequest>,
|
||||
}
|
||||
|
||||
pub async fn get_workspace_export_resources(
|
||||
app_handle: &AppHandle,
|
||||
workspace_id: &str,
|
||||
window: &WebviewWindow,
|
||||
workspace_ids: Vec<&str>,
|
||||
) -> WorkspaceExport {
|
||||
let workspace = get_workspace(app_handle, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get workspace");
|
||||
return WorkspaceExport {
|
||||
let app_handle = window.app_handle();
|
||||
let mut data = WorkspaceExport {
|
||||
yaak_version: app_handle.package_info().version.clone().to_string(),
|
||||
yaak_schema: 2,
|
||||
timestamp: chrono::Utc::now().naive_utc(),
|
||||
resources: WorkspaceExportResources {
|
||||
workspaces: vec![workspace],
|
||||
environments: list_environments(app_handle, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get environments"),
|
||||
folders: list_folders(app_handle, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get folders"),
|
||||
http_requests: list_requests(app_handle, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get requests"),
|
||||
grpc_requests: list_grpc_requests(app_handle, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get grpc requests"),
|
||||
workspaces: Vec::new(),
|
||||
environments: Vec::new(),
|
||||
folders: Vec::new(),
|
||||
http_requests: Vec::new(),
|
||||
grpc_requests: Vec::new(),
|
||||
},
|
||||
};
|
||||
|
||||
for workspace_id in workspace_ids {
|
||||
data.resources.workspaces.push(
|
||||
get_workspace(window, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get workspace"),
|
||||
);
|
||||
data.resources.environments.append(
|
||||
&mut list_environments(window, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get environments"),
|
||||
);
|
||||
data.resources.folders.append(
|
||||
&mut list_folders(window, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get folders"),
|
||||
);
|
||||
data.resources.http_requests.append(
|
||||
&mut list_http_requests(window, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get http requests"),
|
||||
);
|
||||
data.resources.grpc_requests.append(
|
||||
&mut list_grpc_requests(window, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get grpc requests"),
|
||||
);
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
fn emit_upserted_model<S: Serialize + Clone>(mgr: &impl Manager<Wry>, model: S) -> S {
|
||||
mgr.emit_all("upserted_model", model.clone()).unwrap();
|
||||
#[derive(Clone, Serialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
struct ModelPayload<M: Serialize + Clone> {
|
||||
pub model: M,
|
||||
pub window_label: String,
|
||||
}
|
||||
|
||||
fn emit_upserted_model<M: Serialize + Clone>(window: &WebviewWindow, model: M) -> M {
|
||||
let payload = ModelPayload {
|
||||
model: model.clone(),
|
||||
window_label: window.label().to_string(),
|
||||
};
|
||||
|
||||
window.emit("upserted_model", payload).unwrap();
|
||||
model
|
||||
}
|
||||
|
||||
fn emit_deleted_model<S: Serialize + Clone, E>(mgr: &impl Manager<Wry>, model: S) -> Result<S, E> {
|
||||
mgr.emit_all("deleted_model", model.clone()).unwrap();
|
||||
fn emit_deleted_model<M: Serialize + Clone, E>(window: &WebviewWindow, model: M) -> Result<M, E> {
|
||||
let payload = ModelPayload {
|
||||
model: model.clone(),
|
||||
window_label: window.label().to_string(),
|
||||
};
|
||||
window.emit("deleted_model", payload).unwrap();
|
||||
Ok(model)
|
||||
}
|
||||
|
||||
|
||||
99
src-tauri/src/notifications.rs
Normal file
99
src-tauri/src/notifications.rs
Normal file
@@ -0,0 +1,99 @@
|
||||
use std::time::SystemTime;
|
||||
|
||||
use chrono::{Duration, NaiveDateTime, Utc};
|
||||
use http::Method;
|
||||
use log::debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::{AppHandle, Manager};
|
||||
|
||||
use crate::analytics::get_num_launches;
|
||||
use crate::models::{get_key_value_raw, set_key_value_raw};
|
||||
|
||||
// Check for updates every hour
|
||||
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
||||
|
||||
const KV_NAMESPACE: &str = "notifications";
|
||||
const KV_KEY: &str = "seen";
|
||||
|
||||
// Create updater struct
|
||||
pub struct YaakNotifier {
|
||||
last_check: SystemTime,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct YaakNotification {
|
||||
timestamp: NaiveDateTime,
|
||||
id: String,
|
||||
message: String,
|
||||
action: Option<YaakNotificationAction>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct YaakNotificationAction {
|
||||
label: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
impl YaakNotifier {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
last_check: SystemTime::UNIX_EPOCH,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn seen(&mut self, app: &AppHandle, id: &str) -> Result<(), String> {
|
||||
let mut seen = get_kv(app).await?;
|
||||
seen.push(id.to_string());
|
||||
debug!("Marked notification as seen {}", id);
|
||||
let seen_json = serde_json::to_string(&seen).map_err(|e| e.to_string())?;
|
||||
set_key_value_raw(app, KV_NAMESPACE, KV_KEY, seen_json.as_str()).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn check(&mut self, app: &AppHandle) -> Result<(), String> {
|
||||
let ignore_check = self.last_check.elapsed().unwrap().as_secs() < MAX_UPDATE_CHECK_SECONDS;
|
||||
|
||||
if ignore_check {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.last_check = SystemTime::now();
|
||||
|
||||
let num_launches = get_num_launches(app).await;
|
||||
let info = app.package_info().clone();
|
||||
let req = reqwest::Client::default()
|
||||
.request(Method::GET, "https://notify.yaak.app/notifications")
|
||||
.query(&[
|
||||
("version", info.version.to_string()),
|
||||
("launches", num_launches.to_string()),
|
||||
]);
|
||||
let resp = req.send().await.map_err(|e| e.to_string())?;
|
||||
let notification = resp
|
||||
.json::<YaakNotification>()
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let age = notification
|
||||
.timestamp
|
||||
.signed_duration_since(Utc::now().naive_utc());
|
||||
let seen = get_kv(app).await?;
|
||||
if seen.contains(¬ification.id) || (age > Duration::days(1)) {
|
||||
debug!("Already seen notification {}", notification.id);
|
||||
return Ok(());
|
||||
}
|
||||
debug!("Got notification {:?}", notification);
|
||||
|
||||
let _ = app.emit("notification", notification.clone());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_kv(app: &AppHandle) -> Result<Vec<String>, String> {
|
||||
match get_key_value_raw(app, "notifications", "seen").await {
|
||||
None => Ok(Vec::new()),
|
||||
Some(v) => serde_json::from_str(&v.value).map_err(|e| e.to_string()),
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,18 @@
|
||||
use std::fs;
|
||||
use std::rc::Rc;
|
||||
|
||||
use boa_engine::{
|
||||
Context, js_string, JsNativeError, JsValue, Module, module::SimpleModuleLoader,
|
||||
property::Attribute, Source,
|
||||
};
|
||||
use boa_engine::builtins::promise::PromiseState;
|
||||
use boa_engine::module::ModuleLoader;
|
||||
use boa_engine::{
|
||||
js_string, module::SimpleModuleLoader, property::Attribute, Context, JsNativeError, JsValue,
|
||||
Module, Source,
|
||||
};
|
||||
use boa_runtime::Console;
|
||||
use log::{debug, error};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use tauri::AppHandle;
|
||||
use tauri::path::BaseDirectory;
|
||||
use tauri::{AppHandle, Manager};
|
||||
|
||||
use crate::models::{WorkspaceExportResources};
|
||||
use crate::models::{HttpRequest, WorkspaceExportResources};
|
||||
|
||||
#[derive(Default, Debug, Deserialize, Serialize)]
|
||||
pub struct FilterResult {
|
||||
@@ -47,14 +47,28 @@ pub async fn run_plugin_filter(
|
||||
Some(resources)
|
||||
}
|
||||
|
||||
pub fn run_plugin_export_curl(
|
||||
app_handle: &AppHandle,
|
||||
request: &HttpRequest,
|
||||
) -> Result<String, String> {
|
||||
let mut context = Context::default();
|
||||
let request_json = serde_json::to_value(request).map_err(|e| e.to_string())?;
|
||||
let result_json = run_plugin(
|
||||
app_handle,
|
||||
"exporter-curl",
|
||||
"pluginHookExport",
|
||||
&[JsValue::from_json(&request_json, &mut context).map_err(|e| e.to_string())?],
|
||||
);
|
||||
|
||||
let resources: String = serde_json::from_value(result_json).map_err(|e| e.to_string())?;
|
||||
Ok(resources)
|
||||
}
|
||||
|
||||
pub async fn run_plugin_import(
|
||||
app_handle: &AppHandle,
|
||||
plugin_name: &str,
|
||||
file_path: &str,
|
||||
) -> Option<ImportResult> {
|
||||
let file = fs::read_to_string(file_path)
|
||||
.unwrap_or_else(|_| panic!("Unable to read file {}", file_path));
|
||||
let file_contents = file.as_str();
|
||||
file_contents: &str,
|
||||
) -> Result<Option<ImportResult>, String> {
|
||||
let result_json = run_plugin(
|
||||
app_handle,
|
||||
plugin_name,
|
||||
@@ -63,12 +77,11 @@ pub async fn run_plugin_import(
|
||||
);
|
||||
|
||||
if result_json.is_null() {
|
||||
return None;
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let resources: ImportResult =
|
||||
serde_json::from_value(result_json).expect("failed to parse result json");
|
||||
Some(resources)
|
||||
let resources: ImportResult = serde_json::from_value(result_json).map_err(|e| e.to_string())?;
|
||||
Ok(Some(resources))
|
||||
}
|
||||
|
||||
fn run_plugin(
|
||||
@@ -78,8 +91,8 @@ fn run_plugin(
|
||||
js_args: &[JsValue],
|
||||
) -> serde_json::Value {
|
||||
let plugin_dir = app_handle
|
||||
.path_resolver()
|
||||
.resolve_resource("plugins")
|
||||
.path()
|
||||
.resolve("plugins", BaseDirectory::Resource)
|
||||
.expect("failed to resolve plugin directory resource")
|
||||
.join(plugin_name);
|
||||
let plugin_index_file = plugin_dir.join("index.mjs");
|
||||
@@ -89,12 +102,9 @@ fn run_plugin(
|
||||
plugin_dir, plugin_index_file
|
||||
);
|
||||
|
||||
// Module loader for the specific plugin
|
||||
let loader = &SimpleModuleLoader::new(plugin_dir).expect("failed to create module loader");
|
||||
let dyn_loader: &dyn ModuleLoader = loader;
|
||||
|
||||
let loader = Rc::new(SimpleModuleLoader::new(plugin_dir).unwrap());
|
||||
let context = &mut Context::builder()
|
||||
.module_loader(dyn_loader)
|
||||
.module_loader(loader.clone())
|
||||
.build()
|
||||
.expect("failed to create context");
|
||||
|
||||
@@ -108,15 +118,13 @@ fn run_plugin(
|
||||
// Insert parsed entrypoint into the module loader
|
||||
loader.insert(plugin_index_file, module.clone());
|
||||
|
||||
let promise_result = module
|
||||
.load_link_evaluate(context)
|
||||
.expect("failed to evaluate module");
|
||||
let promise_result = module.load_link_evaluate(context);
|
||||
|
||||
// Very important to push forward the job queue after queueing promises.
|
||||
context.run_jobs();
|
||||
|
||||
// Checking if the final promise didn't return an error.
|
||||
match promise_result.state().expect("failed to get promise state") {
|
||||
match promise_result.state() {
|
||||
PromiseState::Pending => {
|
||||
panic!("Promise was pending");
|
||||
}
|
||||
|
||||
@@ -1,6 +1,67 @@
|
||||
use crate::models::{Environment, Workspace};
|
||||
use std::collections::HashMap;
|
||||
use tauri::regex::Regex;
|
||||
|
||||
use regex::Regex;
|
||||
use sqlx::types::{Json, JsonValue};
|
||||
|
||||
use crate::models::{Environment, HttpRequest, HttpRequestHeader, HttpUrlParameter, Workspace};
|
||||
|
||||
pub fn render_request(r: &HttpRequest, w: &Workspace, e: Option<&Environment>) -> HttpRequest {
|
||||
let r = r.clone();
|
||||
HttpRequest {
|
||||
url: render(r.url.as_str(), w, e),
|
||||
url_parameters: Json(
|
||||
r.url_parameters
|
||||
.0
|
||||
.iter()
|
||||
.map(|p| HttpUrlParameter {
|
||||
enabled: p.enabled,
|
||||
name: render(p.name.as_str(), w, e),
|
||||
value: render(p.value.as_str(), w, e),
|
||||
})
|
||||
.collect::<Vec<HttpUrlParameter>>(),
|
||||
),
|
||||
headers: Json(
|
||||
r.headers
|
||||
.0
|
||||
.iter()
|
||||
.map(|p| HttpRequestHeader {
|
||||
enabled: p.enabled,
|
||||
name: render(p.name.as_str(), w, e),
|
||||
value: render(p.value.as_str(), w, e),
|
||||
})
|
||||
.collect::<Vec<HttpRequestHeader>>(),
|
||||
),
|
||||
body: Json(
|
||||
r.body
|
||||
.0
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
let v = if v.is_string() {
|
||||
render(v.as_str().unwrap(), w, e)
|
||||
} else {
|
||||
v.to_string()
|
||||
};
|
||||
(render(k, w, e), JsonValue::from(v))
|
||||
})
|
||||
.collect::<HashMap<String, JsonValue>>(),
|
||||
),
|
||||
authentication: Json(
|
||||
r.authentication
|
||||
.0
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
let v = if v.is_string() {
|
||||
render(v.as_str().unwrap(), w, e)
|
||||
} else {
|
||||
v.to_string()
|
||||
};
|
||||
(render(k, w, e), JsonValue::from(v))
|
||||
})
|
||||
.collect::<HashMap<String, JsonValue>>(),
|
||||
),
|
||||
..r
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render(template: &str, workspace: &Workspace, environment: Option<&Environment>) -> String {
|
||||
let mut map = HashMap::new();
|
||||
@@ -24,7 +85,7 @@ pub fn render(template: &str, workspace: &Workspace, environment: Option<&Enviro
|
||||
|
||||
Regex::new(r"\$\{\[\s*([^]\s]+)\s*]}")
|
||||
.expect("Failed to create regex")
|
||||
.replace_all(template, |caps: &tauri::regex::Captures| {
|
||||
.replace_all(template, |caps: ®ex::Captures| {
|
||||
let key = caps.get(1).unwrap().as_str();
|
||||
map.get(key).unwrap_or(&"")
|
||||
})
|
||||
|
||||
450
src-tauri/src/tauri_plugin_mac_window.rs
Normal file
450
src-tauri/src/tauri_plugin_mac_window.rs
Normal file
@@ -0,0 +1,450 @@
|
||||
use hex_color::HexColor;
|
||||
use objc::{msg_send, sel, sel_impl};
|
||||
use rand::{distributions::Alphanumeric, Rng};
|
||||
use tauri::{
|
||||
plugin::{Builder, TauriPlugin},
|
||||
Manager, Runtime, Window, WindowEvent,
|
||||
};
|
||||
|
||||
const WINDOW_CONTROL_PAD_X: f64 = 13.0;
|
||||
const WINDOW_CONTROL_PAD_Y: f64 = 18.0;
|
||||
|
||||
struct UnsafeWindowHandle(*mut std::ffi::c_void);
|
||||
|
||||
unsafe impl Send for UnsafeWindowHandle {}
|
||||
|
||||
unsafe impl Sync for UnsafeWindowHandle {}
|
||||
|
||||
pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
Builder::new("mac_window")
|
||||
.on_window_ready(|window| {
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
setup_traffic_light_positioner(&window);
|
||||
let h = window.app_handle();
|
||||
|
||||
let window_for_theme = window.clone();
|
||||
let id1 = h.listen("yaak_bg_changed", move |ev| {
|
||||
let payload = serde_json::from_str::<&str>(ev.payload()).unwrap().trim();
|
||||
let color = HexColor::parse_rgb(payload).unwrap();
|
||||
update_window_theme(window_for_theme.clone(), color);
|
||||
});
|
||||
|
||||
let window_for_title = window.clone();
|
||||
let id2 = h.listen("yaak_title_changed", move |ev| {
|
||||
let payload = serde_json::from_str::<&str>(ev.payload()).unwrap().trim();
|
||||
update_window_title(window_for_title.clone(), payload.to_string());
|
||||
});
|
||||
|
||||
let h = h.clone();
|
||||
window.on_window_event(move |e| {
|
||||
match e {
|
||||
WindowEvent::Destroyed => {
|
||||
h.unlisten(id1);
|
||||
h.unlisten(id2);
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
});
|
||||
}
|
||||
return;
|
||||
})
|
||||
.build()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn update_window_title<R: Runtime>(window: Window<R>, title: String) {
|
||||
use cocoa::{appkit::NSWindow, base::nil, foundation::NSString};
|
||||
|
||||
unsafe {
|
||||
let window_handle = UnsafeWindowHandle(window.ns_window().unwrap());
|
||||
|
||||
let window2 = window.clone();
|
||||
let label = window.label().to_string();
|
||||
let _ = window.run_on_main_thread(move || {
|
||||
let win_title = NSString::alloc(nil).init_str(&title);
|
||||
let handle = window_handle;
|
||||
NSWindow::setTitle_(handle.0 as cocoa::base::id, win_title);
|
||||
position_traffic_lights(
|
||||
UnsafeWindowHandle(window2.ns_window().expect("Failed to create window handle")),
|
||||
WINDOW_CONTROL_PAD_X,
|
||||
WINDOW_CONTROL_PAD_Y,
|
||||
label,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn update_window_theme<R: Runtime>(window: Window<R>, color: HexColor) {
|
||||
use cocoa::appkit::{
|
||||
NSAppearance, NSAppearanceNameVibrantDark, NSAppearanceNameVibrantLight, NSWindow,
|
||||
};
|
||||
|
||||
let brightness = (color.r as u64 + color.g as u64 + color.b as u64) / 3;
|
||||
let label = window.label().to_string();
|
||||
|
||||
unsafe {
|
||||
let window_handle = UnsafeWindowHandle(window.ns_window().unwrap());
|
||||
let window2 = window.clone();
|
||||
let _ = window.run_on_main_thread(move || {
|
||||
let handle = window_handle;
|
||||
|
||||
let selected_appearance = if brightness >= 128 {
|
||||
NSAppearance(NSAppearanceNameVibrantLight)
|
||||
} else {
|
||||
NSAppearance(NSAppearanceNameVibrantDark)
|
||||
};
|
||||
|
||||
NSWindow::setAppearance(handle.0 as cocoa::base::id, selected_appearance);
|
||||
position_traffic_lights(
|
||||
UnsafeWindowHandle(window2.ns_window().expect("Failed to create window handle")),
|
||||
WINDOW_CONTROL_PAD_X,
|
||||
WINDOW_CONTROL_PAD_Y,
|
||||
label,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn position_traffic_lights(ns_window_handle: UnsafeWindowHandle, x: f64, y: f64, label: String) {
|
||||
if label.starts_with("nested_") {
|
||||
return;
|
||||
}
|
||||
|
||||
use cocoa::appkit::{NSView, NSWindow, NSWindowButton};
|
||||
use cocoa::foundation::NSRect;
|
||||
|
||||
let ns_window = ns_window_handle.0 as cocoa::base::id;
|
||||
unsafe {
|
||||
let close = ns_window.standardWindowButton_(NSWindowButton::NSWindowCloseButton);
|
||||
let miniaturize =
|
||||
ns_window.standardWindowButton_(NSWindowButton::NSWindowMiniaturizeButton);
|
||||
let zoom = ns_window.standardWindowButton_(NSWindowButton::NSWindowZoomButton);
|
||||
|
||||
let title_bar_container_view = close.superview().superview();
|
||||
|
||||
let close_rect: NSRect = msg_send![close, frame];
|
||||
let button_height = close_rect.size.height;
|
||||
|
||||
let title_bar_frame_height = button_height + y;
|
||||
let mut title_bar_rect = NSView::frame(title_bar_container_view);
|
||||
title_bar_rect.size.height = title_bar_frame_height;
|
||||
title_bar_rect.origin.y = NSView::frame(ns_window).size.height - title_bar_frame_height;
|
||||
let _: () = msg_send![title_bar_container_view, setFrame: title_bar_rect];
|
||||
|
||||
let window_buttons = vec![close, miniaturize, zoom];
|
||||
let space_between = NSView::frame(miniaturize).origin.x - NSView::frame(close).origin.x;
|
||||
|
||||
for (i, button) in window_buttons.into_iter().enumerate() {
|
||||
let mut rect: NSRect = NSView::frame(button);
|
||||
rect.origin.x = x + (i as f64 * space_between);
|
||||
button.setFrameOrigin(rect.origin);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
#[derive(Debug)]
|
||||
struct WindowState<R: Runtime> {
|
||||
window: Window<R>,
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub fn setup_traffic_light_positioner<R: Runtime>(window: &Window<R>) {
|
||||
use cocoa::appkit::NSWindow;
|
||||
use cocoa::base::{id, BOOL};
|
||||
use cocoa::delegate;
|
||||
use cocoa::foundation::NSUInteger;
|
||||
use objc::runtime::{Object, Sel};
|
||||
use std::ffi::c_void;
|
||||
|
||||
position_traffic_lights(
|
||||
UnsafeWindowHandle(window.ns_window().expect("Failed to create window handle")),
|
||||
WINDOW_CONTROL_PAD_X,
|
||||
WINDOW_CONTROL_PAD_Y,
|
||||
window.label().to_string(),
|
||||
);
|
||||
|
||||
// Ensure they stay in place while resizing the window.
|
||||
fn with_window_state<R: Runtime, F: FnOnce(&mut WindowState<R>) -> T, T>(
|
||||
this: &Object,
|
||||
func: F,
|
||||
) {
|
||||
let ptr = unsafe {
|
||||
let x: *mut c_void = *this.get_ivar("app_box");
|
||||
&mut *(x as *mut WindowState<R>)
|
||||
};
|
||||
func(ptr);
|
||||
}
|
||||
|
||||
unsafe {
|
||||
let ns_win = window
|
||||
.ns_window()
|
||||
.expect("NS Window should exist to mount traffic light delegate.")
|
||||
as id;
|
||||
|
||||
let current_delegate: id = ns_win.delegate();
|
||||
|
||||
extern "C" fn on_window_should_close(this: &Object, _cmd: Sel, sender: id) -> BOOL {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
msg_send![super_del, windowShouldClose: sender]
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_will_close(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowWillClose: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_resize<R: Runtime>(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
with_window_state(&*this, |state: &mut WindowState<R>| {
|
||||
let id = state
|
||||
.window
|
||||
.ns_window()
|
||||
.expect("NS window should exist on state to handle resize")
|
||||
as id;
|
||||
|
||||
position_traffic_lights(
|
||||
UnsafeWindowHandle(id as *mut c_void),
|
||||
WINDOW_CONTROL_PAD_X,
|
||||
WINDOW_CONTROL_PAD_Y,
|
||||
state.window.label().to_string(),
|
||||
);
|
||||
});
|
||||
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidResize: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_move(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidMove: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_change_backing_properties(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidChangeBackingProperties: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_become_key(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidBecomeKey: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_resign_key(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidResignKey: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_dragging_entered(this: &Object, _cmd: Sel, notification: id) -> BOOL {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
msg_send![super_del, draggingEntered: notification]
|
||||
}
|
||||
}
|
||||
extern "C" fn on_prepare_for_drag_operation(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) -> BOOL {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
msg_send![super_del, prepareForDragOperation: notification]
|
||||
}
|
||||
}
|
||||
extern "C" fn on_perform_drag_operation(this: &Object, _cmd: Sel, sender: id) -> BOOL {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
msg_send![super_del, performDragOperation: sender]
|
||||
}
|
||||
}
|
||||
extern "C" fn on_conclude_drag_operation(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, concludeDragOperation: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_dragging_exited(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, draggingExited: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_will_use_full_screen_presentation_options(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
window: id,
|
||||
proposed_options: NSUInteger,
|
||||
) -> NSUInteger {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
msg_send![super_del, window: window willUseFullScreenPresentationOptions: proposed_options]
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_enter_full_screen<R: Runtime>(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
with_window_state(&*this, |state: &mut WindowState<R>| {
|
||||
state
|
||||
.window
|
||||
.emit("did-enter-fullscreen", ())
|
||||
.expect("Failed to emit event");
|
||||
});
|
||||
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidEnterFullScreen: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_will_enter_full_screen<R: Runtime>(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
with_window_state(&*this, |state: &mut WindowState<R>| {
|
||||
state
|
||||
.window
|
||||
.emit("will-enter-fullscreen", ())
|
||||
.expect("Failed to emit event");
|
||||
});
|
||||
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowWillEnterFullScreen: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_exit_full_screen<R: Runtime>(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
with_window_state(&*this, |state: &mut WindowState<R>| {
|
||||
state
|
||||
.window
|
||||
.emit("did-exit-fullscreen", ())
|
||||
.expect("Failed to emit event");
|
||||
|
||||
let id = state.window.ns_window().expect("Failed to emit event") as id;
|
||||
position_traffic_lights(
|
||||
UnsafeWindowHandle(id as *mut c_void),
|
||||
WINDOW_CONTROL_PAD_X,
|
||||
WINDOW_CONTROL_PAD_Y,
|
||||
state.window.label().to_string(),
|
||||
);
|
||||
});
|
||||
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidExitFullScreen: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_will_exit_full_screen<R: Runtime>(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
with_window_state(&*this, |state: &mut WindowState<R>| {
|
||||
state
|
||||
.window
|
||||
.emit("will-exit-fullscreen", ())
|
||||
.expect("Failed to emit event");
|
||||
});
|
||||
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowWillExitFullScreen: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_fail_to_enter_full_screen(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
window: id,
|
||||
) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidFailToEnterFullScreen: window];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_effective_appearance_did_change(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, effectiveAppearanceDidChange: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_effective_appearance_did_changed_on_main_thread(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![
|
||||
super_del,
|
||||
effectiveAppearanceDidChangedOnMainThread: notification
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
// Are we de-allocing this properly ? (I miss safe Rust :( )
|
||||
let window_label = window.label().to_string();
|
||||
|
||||
let app_state = WindowState {
|
||||
window: window.clone(),
|
||||
};
|
||||
let app_box = Box::into_raw(Box::new(app_state)) as *mut c_void;
|
||||
let random_str: String = rand::thread_rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(20)
|
||||
.map(char::from)
|
||||
.collect();
|
||||
|
||||
// We need to ensure we have a unique delegate name, otherwise we will panic while trying to create a duplicate
|
||||
// delegate with the same name.
|
||||
let delegate_name = format!("windowDelegate_{}_{}", window_label, random_str);
|
||||
|
||||
ns_win.setDelegate_(delegate!(&delegate_name, {
|
||||
window: id = ns_win,
|
||||
app_box: *mut c_void = app_box,
|
||||
toolbar: id = cocoa::base::nil,
|
||||
super_delegate: id = current_delegate,
|
||||
(windowShouldClose:) => on_window_should_close as extern fn(&Object, Sel, id) -> BOOL,
|
||||
(windowWillClose:) => on_window_will_close as extern fn(&Object, Sel, id),
|
||||
(windowDidResize:) => on_window_did_resize::<R> as extern fn(&Object, Sel, id),
|
||||
(windowDidMove:) => on_window_did_move as extern fn(&Object, Sel, id),
|
||||
(windowDidChangeBackingProperties:) => on_window_did_change_backing_properties as extern fn(&Object, Sel, id),
|
||||
(windowDidBecomeKey:) => on_window_did_become_key as extern fn(&Object, Sel, id),
|
||||
(windowDidResignKey:) => on_window_did_resign_key as extern fn(&Object, Sel, id),
|
||||
(draggingEntered:) => on_dragging_entered as extern fn(&Object, Sel, id) -> BOOL,
|
||||
(prepareForDragOperation:) => on_prepare_for_drag_operation as extern fn(&Object, Sel, id) -> BOOL,
|
||||
(performDragOperation:) => on_perform_drag_operation as extern fn(&Object, Sel, id) -> BOOL,
|
||||
(concludeDragOperation:) => on_conclude_drag_operation as extern fn(&Object, Sel, id),
|
||||
(draggingExited:) => on_dragging_exited as extern fn(&Object, Sel, id),
|
||||
(window:willUseFullScreenPresentationOptions:) => on_window_will_use_full_screen_presentation_options as extern fn(&Object, Sel, id, NSUInteger) -> NSUInteger,
|
||||
(windowDidEnterFullScreen:) => on_window_did_enter_full_screen::<R> as extern fn(&Object, Sel, id),
|
||||
(windowWillEnterFullScreen:) => on_window_will_enter_full_screen::<R> as extern fn(&Object, Sel, id),
|
||||
(windowDidExitFullScreen:) => on_window_did_exit_full_screen::<R> as extern fn(&Object, Sel, id),
|
||||
(windowWillExitFullScreen:) => on_window_will_exit_full_screen::<R> as extern fn(&Object, Sel, id),
|
||||
(windowDidFailToEnterFullScreen:) => on_window_did_fail_to_enter_full_screen as extern fn(&Object, Sel, id),
|
||||
(effectiveAppearanceDidChange:) => on_effective_appearance_did_change as extern fn(&Object, Sel, id),
|
||||
(effectiveAppearanceDidChangedOnMainThread:) => on_effective_appearance_did_changed_on_main_thread as extern fn(&Object, Sel, id)
|
||||
}))
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::time::SystemTime;
|
||||
|
||||
use log::info;
|
||||
use tauri::api::dialog;
|
||||
use tauri::{updater, AppHandle, Window};
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_dialog::DialogExt;
|
||||
use tauri_plugin_updater::UpdaterExt;
|
||||
|
||||
use crate::is_dev;
|
||||
|
||||
@@ -17,6 +19,28 @@ pub struct YaakUpdater {
|
||||
pub enum UpdateMode {
|
||||
Stable,
|
||||
Beta,
|
||||
Alpha,
|
||||
}
|
||||
|
||||
impl Display for UpdateMode {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let s = match self {
|
||||
UpdateMode::Stable => "stable",
|
||||
UpdateMode::Beta => "beta",
|
||||
UpdateMode::Alpha => "alpha",
|
||||
};
|
||||
write!(f, "{}", s)
|
||||
}
|
||||
}
|
||||
|
||||
impl UpdateMode {
|
||||
pub fn new(mode: &str) -> UpdateMode {
|
||||
match mode {
|
||||
"beta" => UpdateMode::Beta,
|
||||
"alpha" => UpdateMode::Alpha,
|
||||
_ => UpdateMode::Stable,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl YaakUpdater {
|
||||
@@ -29,64 +53,57 @@ impl YaakUpdater {
|
||||
&mut self,
|
||||
app_handle: &AppHandle,
|
||||
mode: UpdateMode,
|
||||
) -> Result<bool, updater::Error> {
|
||||
) -> Result<bool, tauri_plugin_updater::Error> {
|
||||
self.last_update_check = SystemTime::now();
|
||||
|
||||
let update_mode = get_update_mode_str(mode);
|
||||
let enabled = !is_dev();
|
||||
info!(
|
||||
"Checking for updates mode={} enabled={}",
|
||||
update_mode, enabled
|
||||
);
|
||||
info!("Checking for updates mode={}", mode);
|
||||
|
||||
if !enabled {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
match app_handle
|
||||
.updater()
|
||||
.header("X-Update-Mode", update_mode)?
|
||||
let update_check_result = app_handle
|
||||
.updater_builder()
|
||||
.header("X-Update-Mode", mode.to_string())?
|
||||
.build()?
|
||||
.check()
|
||||
.await
|
||||
{
|
||||
Ok(update) => {
|
||||
.await;
|
||||
|
||||
match update_check_result {
|
||||
Ok(Some(update)) => {
|
||||
let h = app_handle.clone();
|
||||
dialog::ask(
|
||||
None::<&Window>,
|
||||
"Update Available",
|
||||
format!(
|
||||
app_handle
|
||||
.dialog()
|
||||
.message(format!(
|
||||
"{} is available. Would you like to download and install it now?",
|
||||
update.latest_version()
|
||||
),
|
||||
|confirmed| {
|
||||
update.version
|
||||
))
|
||||
.ok_button_label("Download")
|
||||
.cancel_button_label("Later")
|
||||
.title("Update Available")
|
||||
.show(|confirmed| {
|
||||
if !confirmed {
|
||||
return;
|
||||
}
|
||||
tauri::async_runtime::spawn(async move {
|
||||
match update.download_and_install().await {
|
||||
match update.download_and_install(|_, _| {}, || {}).await {
|
||||
Ok(_) => {
|
||||
if dialog::blocking::ask(
|
||||
None::<&Window>,
|
||||
"Update Installed",
|
||||
"Would you like to restart the app?",
|
||||
) {
|
||||
if h.dialog()
|
||||
.message("Would you like to restart the app?")
|
||||
.title("Update Installed")
|
||||
.ok_button_label("Restart")
|
||||
.cancel_button_label("Later")
|
||||
.blocking_show()
|
||||
{
|
||||
h.restart();
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
dialog::message(
|
||||
None::<&Window>,
|
||||
"Update Failed",
|
||||
format!("The update failed to install: {}", e),
|
||||
);
|
||||
h.dialog()
|
||||
.message(format!("The update failed to install: {}", e));
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
Ok(true)
|
||||
}
|
||||
Err(updater::Error::UpToDate) => Ok(false),
|
||||
Ok(None) => Ok(false),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -94,27 +111,18 @@ impl YaakUpdater {
|
||||
&mut self,
|
||||
app_handle: &AppHandle,
|
||||
mode: UpdateMode,
|
||||
) -> Result<bool, updater::Error> {
|
||||
) -> Result<bool, tauri_plugin_updater::Error> {
|
||||
let ignore_check =
|
||||
self.last_update_check.elapsed().unwrap().as_secs() < MAX_UPDATE_CHECK_SECONDS;
|
||||
if ignore_check {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// Don't check if dev
|
||||
if is_dev() {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
self.force_check(app_handle, mode).await
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_mode_from_str(mode: &str) -> UpdateMode {
|
||||
match mode {
|
||||
"beta" => UpdateMode::Beta,
|
||||
_ => UpdateMode::Stable,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_update_mode_str(mode: UpdateMode) -> &'static str {
|
||||
match mode {
|
||||
UpdateMode::Stable => "stable",
|
||||
UpdateMode::Beta => "beta",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
use tauri::{Runtime, Window};
|
||||
|
||||
const TRAFFIC_LIGHT_OFFSET_X: f64 = 13.0;
|
||||
const TRAFFIC_LIGHT_OFFSET_Y: f64 = 18.0;
|
||||
|
||||
pub trait TrafficLightWindowExt {
|
||||
fn position_traffic_lights(&self);
|
||||
}
|
||||
|
||||
impl<R: Runtime> TrafficLightWindowExt for Window<R> {
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
fn position_traffic_lights(&self) {
|
||||
// No-op on other platforms
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn position_traffic_lights(&self) {
|
||||
use cocoa::appkit::{NSView, NSWindow, NSWindowButton};
|
||||
use cocoa::foundation::NSRect;
|
||||
|
||||
let window = self.ns_window().unwrap() as cocoa::base::id;
|
||||
|
||||
let x = TRAFFIC_LIGHT_OFFSET_X;
|
||||
let y = TRAFFIC_LIGHT_OFFSET_Y;
|
||||
|
||||
unsafe {
|
||||
let close = window.standardWindowButton_(NSWindowButton::NSWindowCloseButton);
|
||||
let miniaturize =
|
||||
window.standardWindowButton_(NSWindowButton::NSWindowMiniaturizeButton);
|
||||
let zoom = window.standardWindowButton_(NSWindowButton::NSWindowZoomButton);
|
||||
|
||||
let title_bar_container_view = close.superview().superview();
|
||||
|
||||
let close_rect: NSRect = msg_send![close, frame];
|
||||
let button_height = close_rect.size.height;
|
||||
|
||||
let title_bar_frame_height = button_height + y;
|
||||
let mut title_bar_rect = NSView::frame(title_bar_container_view);
|
||||
title_bar_rect.size.height = title_bar_frame_height;
|
||||
title_bar_rect.origin.y = NSView::frame(window).size.height - title_bar_frame_height;
|
||||
let _: () = msg_send![title_bar_container_view, setFrame: title_bar_rect];
|
||||
|
||||
let window_buttons = vec![close, miniaturize, zoom];
|
||||
let space_between = NSView::frame(miniaturize).origin.x - NSView::frame(close).origin.x;
|
||||
|
||||
for (i, button) in window_buttons.into_iter().enumerate() {
|
||||
let mut rect: NSRect = NSView::frame(button);
|
||||
rect.origin.x = x + (i as f64 * space_between);
|
||||
button.setFrameOrigin(rect.origin);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,140 +1,145 @@
|
||||
use tauri::{AboutMetadata, CustomMenuItem, Menu, MenuItem, Submenu};
|
||||
use crate::is_dev;
|
||||
use tauri::menu::{
|
||||
AboutMetadata, Menu, MenuItemBuilder, PredefinedMenuItem, Submenu, HELP_SUBMENU_ID,
|
||||
WINDOW_SUBMENU_ID,
|
||||
};
|
||||
pub use tauri::AppHandle;
|
||||
use tauri::Wry;
|
||||
|
||||
pub fn os_default(#[allow(unused)] app_name: &str) -> Menu {
|
||||
let mut menu = Menu::new();
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
menu = menu.add_submenu(Submenu::new(
|
||||
app_name,
|
||||
Menu::new()
|
||||
.add_native_item(MenuItem::About(
|
||||
app_name.to_string(),
|
||||
AboutMetadata::default(),
|
||||
))
|
||||
.add_native_item(MenuItem::Separator)
|
||||
.add_item(
|
||||
CustomMenuItem::new("toggle_settings".to_string(), "Settings")
|
||||
.accelerator("CmdOrCtrl+,"),
|
||||
)
|
||||
.add_native_item(MenuItem::Separator)
|
||||
.add_native_item(MenuItem::Services)
|
||||
.add_native_item(MenuItem::Separator)
|
||||
.add_native_item(MenuItem::Hide)
|
||||
.add_native_item(MenuItem::HideOthers)
|
||||
.add_native_item(MenuItem::ShowAll)
|
||||
.add_native_item(MenuItem::Separator)
|
||||
.add_native_item(MenuItem::Quit),
|
||||
));
|
||||
}
|
||||
pub fn app_menu(app_handle: &AppHandle) -> tauri::Result<Menu<Wry>> {
|
||||
let pkg_info = app_handle.package_info();
|
||||
let config = app_handle.config();
|
||||
let about_metadata = AboutMetadata {
|
||||
name: Some(pkg_info.name.clone()),
|
||||
version: Some(pkg_info.version.to_string()),
|
||||
copyright: config.bundle.copyright.clone(),
|
||||
authors: config.bundle.publisher.clone().map(|p| vec![p]),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut file_menu = Menu::new();
|
||||
file_menu = file_menu.add_native_item(MenuItem::CloseWindow);
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
{
|
||||
file_menu = file_menu.add_native_item(MenuItem::Quit);
|
||||
}
|
||||
menu = menu.add_submenu(Submenu::new("File", file_menu));
|
||||
let window_menu = Submenu::with_id_and_items(
|
||||
app_handle,
|
||||
WINDOW_SUBMENU_ID,
|
||||
"Window",
|
||||
true,
|
||||
&[
|
||||
&PredefinedMenuItem::minimize(app_handle, None)?,
|
||||
&PredefinedMenuItem::maximize(app_handle, None)?,
|
||||
#[cfg(target_os = "macos")]
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&PredefinedMenuItem::close_window(app_handle, None)?,
|
||||
],
|
||||
)?;
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
let mut edit_menu = Menu::new();
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Undo);
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Redo);
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Separator);
|
||||
}
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Cut);
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Copy);
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Paste);
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::SelectAll);
|
||||
}
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
menu = menu.add_submenu(Submenu::new("Edit", edit_menu));
|
||||
}
|
||||
let mut view_menu = Menu::new();
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
view_menu = view_menu
|
||||
.add_native_item(MenuItem::EnterFullScreen)
|
||||
.add_native_item(MenuItem::Separator);
|
||||
}
|
||||
view_menu = view_menu
|
||||
.add_item(
|
||||
CustomMenuItem::new("zoom_reset".to_string(), "Zoom to Actual Size")
|
||||
.accelerator("CmdOrCtrl+0"),
|
||||
)
|
||||
.add_item(
|
||||
CustomMenuItem::new("zoom_in".to_string(), "Zoom In").accelerator("CmdOrCtrl+Plus"),
|
||||
)
|
||||
.add_item(
|
||||
CustomMenuItem::new("zoom_out".to_string(), "Zoom Out").accelerator("CmdOrCtrl+-"),
|
||||
);
|
||||
// .add_native_item(MenuItem::Separator)
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("toggle_sidebar".to_string(), "Toggle Sidebar")
|
||||
// .accelerator("CmdOrCtrl+b"),
|
||||
// )
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("focus_sidebar".to_string(), "Focus Sidebar")
|
||||
// .accelerator("CmdOrCtrl+1"),
|
||||
// )
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("toggle_settings".to_string(), "Toggle Settings")
|
||||
// .accelerator("CmdOrCtrl+,"),
|
||||
// )
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("focus_url".to_string(), "Focus URL").accelerator("CmdOrCtrl+l"),
|
||||
// );
|
||||
menu = menu.add_submenu(Submenu::new("View", view_menu));
|
||||
let help_menu = Submenu::with_id_and_items(
|
||||
app_handle,
|
||||
HELP_SUBMENU_ID,
|
||||
"Help",
|
||||
true,
|
||||
&[
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
&PredefinedMenuItem::about(app_handle, None, Some(about_metadata))?,
|
||||
#[cfg(target_os = "macos")]
|
||||
&MenuItemBuilder::with_id("open_feedback".to_string(), "Give Feedback")
|
||||
.build(app_handle)?,
|
||||
],
|
||||
)?;
|
||||
|
||||
let mut window_menu = Menu::new();
|
||||
window_menu = window_menu.add_native_item(MenuItem::Minimize);
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
window_menu = window_menu.add_native_item(MenuItem::Zoom);
|
||||
window_menu = window_menu.add_native_item(MenuItem::Separator);
|
||||
}
|
||||
window_menu = window_menu.add_native_item(MenuItem::CloseWindow);
|
||||
menu = menu.add_submenu(Submenu::new("Window", window_menu));
|
||||
let menu = Menu::with_items(
|
||||
app_handle,
|
||||
&[
|
||||
#[cfg(target_os = "macos")]
|
||||
&Submenu::with_items(
|
||||
app_handle,
|
||||
pkg_info.name.clone(),
|
||||
true,
|
||||
&[
|
||||
&PredefinedMenuItem::about(app_handle, None, Some(about_metadata))?,
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&MenuItemBuilder::with_id("settings".to_string(), "Settings")
|
||||
.accelerator("CmdOrCtrl+,")
|
||||
.build(app_handle)?,
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&PredefinedMenuItem::services(app_handle, None)?,
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&PredefinedMenuItem::hide(app_handle, None)?,
|
||||
&PredefinedMenuItem::hide_others(app_handle, None)?,
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&PredefinedMenuItem::quit(app_handle, None)?,
|
||||
],
|
||||
)?,
|
||||
#[cfg(not(any(
|
||||
target_os = "linux",
|
||||
target_os = "dragonfly",
|
||||
target_os = "freebsd",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd"
|
||||
)))]
|
||||
&Submenu::with_items(
|
||||
app_handle,
|
||||
"File",
|
||||
true,
|
||||
&[
|
||||
&PredefinedMenuItem::close_window(app_handle, None)?,
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
&PredefinedMenuItem::quit(app_handle, None)?,
|
||||
],
|
||||
)?,
|
||||
&Submenu::with_items(
|
||||
app_handle,
|
||||
"Edit",
|
||||
true,
|
||||
&[
|
||||
&PredefinedMenuItem::undo(app_handle, None)?,
|
||||
&PredefinedMenuItem::redo(app_handle, None)?,
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&PredefinedMenuItem::cut(app_handle, None)?,
|
||||
&PredefinedMenuItem::copy(app_handle, None)?,
|
||||
&PredefinedMenuItem::paste(app_handle, None)?,
|
||||
&PredefinedMenuItem::select_all(app_handle, None)?,
|
||||
],
|
||||
)?,
|
||||
&Submenu::with_items(
|
||||
app_handle,
|
||||
"View",
|
||||
true,
|
||||
&[
|
||||
#[cfg(target_os = "macos")]
|
||||
&PredefinedMenuItem::fullscreen(app_handle, None)?,
|
||||
#[cfg(target_os = "macos")]
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&MenuItemBuilder::with_id("zoom_reset".to_string(), "Zoom to Actual Size")
|
||||
.accelerator("CmdOrCtrl+0")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("zoom_in".to_string(), "Zoom In")
|
||||
.accelerator("CmdOrCtrl+=")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("zoom_out".to_string(), "Zoom Out")
|
||||
.accelerator("CmdOrCtrl+-")
|
||||
.build(app_handle)?,
|
||||
],
|
||||
)?,
|
||||
&window_menu,
|
||||
&help_menu,
|
||||
#[cfg(dev)]
|
||||
&Submenu::with_items(
|
||||
app_handle,
|
||||
"Develop",
|
||||
true,
|
||||
&[
|
||||
&MenuItemBuilder::with_id("dev.refresh".to_string(), "Refresh")
|
||||
.accelerator("CmdOrCtrl+Shift+r")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("dev.toggle_devtools".to_string(), "Open Devtools")
|
||||
.accelerator("CmdOrCtrl+Option+i")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("dev.generate_theme_css".to_string(), "Generate Theme CSS")
|
||||
.build(app_handle)?,
|
||||
],
|
||||
)?,
|
||||
],
|
||||
)?;
|
||||
|
||||
// menu = menu.add_submenu(Submenu::new(
|
||||
// "Workspace",
|
||||
// Menu::new()
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("send_request".to_string(), "Send Request")
|
||||
// .accelerator("CmdOrCtrl+r"),
|
||||
// )
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("new_request".to_string(), "New Request")
|
||||
// .accelerator("CmdOrCtrl+n"),
|
||||
// )
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("duplicate_request".to_string(), "Duplicate Request")
|
||||
// .accelerator("CmdOrCtrl+d"),
|
||||
// ),
|
||||
// ));
|
||||
|
||||
if is_dev() {
|
||||
menu = menu.add_submenu(Submenu::new(
|
||||
"Developer",
|
||||
Menu::new()
|
||||
.add_item(
|
||||
CustomMenuItem::new("refresh".to_string(), "Refresh")
|
||||
.accelerator("CmdOrCtrl + Shift + r"),
|
||||
)
|
||||
.add_item(
|
||||
CustomMenuItem::new("toggle_devtools".to_string(), "Open Devtools")
|
||||
.accelerator("CmdOrCtrl + Option + i"),
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
menu
|
||||
Ok(menu)
|
||||
}
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
{
|
||||
"package": {
|
||||
"productName": "Daak"
|
||||
},
|
||||
"tauri": {
|
||||
"bundle": {
|
||||
"icon": [
|
||||
"icons/dev/32x32.png",
|
||||
"icons/dev/128x128.png",
|
||||
"icons/dev/128x128@2x.png",
|
||||
"icons/dev/icon.icns",
|
||||
"icons/dev/icon.ico"
|
||||
],
|
||||
"identifier": "app.yaak.desktop.dev"
|
||||
}
|
||||
"productName": "Daak",
|
||||
"identifier": "app.yaak.desktop.dev",
|
||||
"bundle": {
|
||||
"icon": [
|
||||
"icons/dev/32x32.png",
|
||||
"icons/dev/128x128.png",
|
||||
"icons/dev/128x128@2x.png",
|
||||
"icons/dev/icon.icns",
|
||||
"icons/dev/icon.ico"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,113 +1,81 @@
|
||||
{
|
||||
"productName": "yaak",
|
||||
"version": "2024.5.2",
|
||||
"identifier": "app.yaak.desktop",
|
||||
"build": {
|
||||
"beforeBuildCommand": "npm run build",
|
||||
"beforeDevCommand": "npm run dev",
|
||||
"devPath": "http://localhost:1420",
|
||||
"distDir": "../dist",
|
||||
"withGlobalTauri": false
|
||||
"devUrl": "http://localhost:1420",
|
||||
"frontendDist": "../dist"
|
||||
},
|
||||
"package": {
|
||||
"productName": "Yaak",
|
||||
"version": "2024.3.0"
|
||||
"app": {
|
||||
"withGlobalTauri": false,
|
||||
"security": {
|
||||
"assetProtocol": {
|
||||
"enable": true,
|
||||
"scope": {
|
||||
"allow": [
|
||||
"$APPDATA/responses/*"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"tauri": {
|
||||
"windows": [],
|
||||
"allowlist": {
|
||||
"all": false,
|
||||
"os": {
|
||||
"all": true
|
||||
},
|
||||
"protocol": {
|
||||
"assetScope": [
|
||||
"$APPDATA/responses/*"
|
||||
],
|
||||
"asset": true
|
||||
},
|
||||
"fs": {
|
||||
"readFile": true,
|
||||
"scope": [
|
||||
"$RESOURCE/*",
|
||||
"$APPDATA/responses/*"
|
||||
"plugins": {
|
||||
"deep-link": {
|
||||
"mobile": [],
|
||||
"desktop": {
|
||||
"schemes": [
|
||||
"yaak"
|
||||
]
|
||||
},
|
||||
"shell": {
|
||||
"all": false,
|
||||
"open": true,
|
||||
"sidecar": true,
|
||||
"scope": [
|
||||
{ "name": "protoc", "sidecar": true,
|
||||
"args": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"window": {
|
||||
"close": true,
|
||||
"maximize": true,
|
||||
"minimize": true,
|
||||
"setDecorations": true,
|
||||
"setTitle": true,
|
||||
"startDragging": true,
|
||||
"unmaximize": true
|
||||
},
|
||||
"dialog": {
|
||||
"all": false,
|
||||
"open": true,
|
||||
"save": true
|
||||
},
|
||||
"path": {
|
||||
"all": true
|
||||
}
|
||||
},
|
||||
"bundle": {
|
||||
"active": true,
|
||||
"category": "DeveloperTool",
|
||||
"copyright": "",
|
||||
"externalBin": [
|
||||
"protoc-vendored/protoc"
|
||||
],
|
||||
"icon": [
|
||||
"icons/release/32x32.png",
|
||||
"icons/release/128x128.png",
|
||||
"icons/release/128x128@2x.png",
|
||||
"icons/release/icon.icns",
|
||||
"icons/release/icon.ico"
|
||||
],
|
||||
"identifier": "app.yaak.desktop",
|
||||
"longDescription": "The best cross-platform visual API client",
|
||||
"resources": [
|
||||
"migrations/*",
|
||||
"plugins/*"
|
||||
],
|
||||
"shortDescription": "The best API client",
|
||||
"targets": [
|
||||
"deb",
|
||||
"appimage",
|
||||
"nsis",
|
||||
"app",
|
||||
"dmg",
|
||||
"updater"
|
||||
],
|
||||
"deb": {
|
||||
"depends": []
|
||||
},
|
||||
"macOS": {
|
||||
"exceptionDomain": "",
|
||||
"entitlements": "macos/entitlements.plist",
|
||||
"frameworks": []
|
||||
},
|
||||
"windows": {
|
||||
"digestAlgorithm": "sha256",
|
||||
"timestampUrl": ""
|
||||
}
|
||||
},
|
||||
"security": {},
|
||||
"updater": {
|
||||
"active": true,
|
||||
"dialog": false,
|
||||
"endpoints": [
|
||||
"https://update.yaak.app/check/{{target}}/{{arch}}/{{current_version}}"
|
||||
],
|
||||
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEMxRDJFREQ1MjExQjdGN0IKUldSN2Z4c2gxZTNTd1FHNCtmYnFXMHVVQzhuNkJOM1cwOFBodmdLall3ckhKenpKUytHSTR1MlkK"
|
||||
}
|
||||
},
|
||||
"bundle": {
|
||||
"active": true,
|
||||
"category": "DeveloperTool",
|
||||
"externalBin": [
|
||||
"protoc-vendored/protoc"
|
||||
],
|
||||
"icon": [
|
||||
"icons/release/32x32.png",
|
||||
"icons/release/128x128.png",
|
||||
"icons/release/128x128@2x.png",
|
||||
"icons/release/icon.icns",
|
||||
"icons/release/icon.ico"
|
||||
],
|
||||
"longDescription": "A cross-platform desktop app for interacting with REST, GraphQL, and gRPC",
|
||||
"resources": [
|
||||
"migrations/*",
|
||||
"plugins/*",
|
||||
"protoc-vendored/include/*"
|
||||
],
|
||||
"shortDescription": "Desktop API client",
|
||||
"targets": [
|
||||
"deb",
|
||||
"appimage",
|
||||
"nsis",
|
||||
"app",
|
||||
"dmg",
|
||||
"updater"
|
||||
],
|
||||
"iOS": {
|
||||
"developmentTeam": "7PU3P6ELJ8"
|
||||
},
|
||||
"macOS": {
|
||||
"exceptionDomain": "",
|
||||
"entitlements": "macos/entitlements.plist",
|
||||
"frameworks": []
|
||||
},
|
||||
"windows": {
|
||||
"digestAlgorithm": "sha256",
|
||||
"timestampUrl": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import { HelmetProvider } from 'react-helmet-async';
|
||||
import { AppRouter } from './AppRouter';
|
||||
|
||||
const queryClient = new QueryClient({
|
||||
logger: undefined,
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { createBrowserRouter, Navigate, Outlet, RouterProvider, useParams } from 'react-router-dom';
|
||||
import { createBrowserRouter, Navigate, RouterProvider, useParams } from 'react-router-dom';
|
||||
import { routePaths, useAppRoutes } from '../hooks/useAppRoutes';
|
||||
import { DialogProvider } from './DialogContext';
|
||||
import { GlobalHooks } from './GlobalHooks';
|
||||
import RouteError from './RouteError';
|
||||
import Workspace from './Workspace';
|
||||
import { DefaultLayout } from './DefaultLayout';
|
||||
import { RedirectToLatestWorkspace } from './RedirectToLatestWorkspace';
|
||||
import RouteError from './RouteError';
|
||||
import { Settings } from './Settings/Settings';
|
||||
import Workspace from './Workspace';
|
||||
|
||||
const router = createBrowserRouter([
|
||||
{
|
||||
@@ -37,6 +37,12 @@ const router = createBrowserRouter([
|
||||
path: '/workspaces/:workspaceId/environments/:environmentId/requests/:requestId',
|
||||
element: <RedirectLegacyEnvironmentURLs />,
|
||||
},
|
||||
{
|
||||
path: routePaths.workspaceSettings({
|
||||
workspaceId: ':workspaceId',
|
||||
}),
|
||||
element: <Settings />,
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
@@ -58,7 +64,7 @@ function RedirectLegacyEnvironmentURLs() {
|
||||
}>();
|
||||
const environmentId = rawEnvironmentId === '__default__' ? undefined : rawEnvironmentId;
|
||||
|
||||
let to = '/';
|
||||
let to;
|
||||
if (workspaceId != null && requestId != null) {
|
||||
to = routes.paths.request({ workspaceId, environmentId, requestId });
|
||||
} else if (workspaceId != null) {
|
||||
@@ -69,12 +75,3 @@ function RedirectLegacyEnvironmentURLs() {
|
||||
|
||||
return <Navigate to={to} />;
|
||||
}
|
||||
|
||||
function DefaultLayout() {
|
||||
return (
|
||||
<DialogProvider>
|
||||
<Outlet />
|
||||
<GlobalHooks />
|
||||
</DialogProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ export function BasicAuth<T extends HttpRequest | GrpcRequest>({ request }: Prop
|
||||
const updateGrpcRequest = useUpdateGrpcRequest(request.id);
|
||||
|
||||
return (
|
||||
<VStack className="my-2" space={2}>
|
||||
<VStack className="py-2 overflow-y-auto h-full" space={2}>
|
||||
<Input
|
||||
useTemplating
|
||||
autocompleteVariables
|
||||
|
||||
81
src-web/components/BinaryFileEditor.tsx
Normal file
81
src-web/components/BinaryFileEditor.tsx
Normal file
@@ -0,0 +1,81 @@
|
||||
import { open } from '@tauri-apps/plugin-dialog';
|
||||
import mime from 'mime';
|
||||
import { useKeyValue } from '../hooks/useKeyValue';
|
||||
import type { HttpRequest } from '../lib/models';
|
||||
import { Banner } from './core/Banner';
|
||||
import { Button } from './core/Button';
|
||||
import { InlineCode } from './core/InlineCode';
|
||||
import { HStack, VStack } from './core/Stacks';
|
||||
|
||||
type Props = {
|
||||
requestId: string;
|
||||
contentType: string | null;
|
||||
body: HttpRequest['body'];
|
||||
onChange: (body: HttpRequest['body']) => void;
|
||||
onChangeContentType: (contentType: string | null) => void;
|
||||
};
|
||||
|
||||
export function BinaryFileEditor({
|
||||
contentType,
|
||||
body,
|
||||
onChange,
|
||||
onChangeContentType,
|
||||
requestId,
|
||||
}: Props) {
|
||||
const ignoreContentType = useKeyValue<boolean>({
|
||||
namespace: 'global',
|
||||
key: ['ignore_content_type', requestId],
|
||||
fallback: false,
|
||||
});
|
||||
|
||||
const handleClick = async () => {
|
||||
await ignoreContentType.set(false);
|
||||
const selected = await open({
|
||||
title: 'Select File',
|
||||
multiple: false,
|
||||
});
|
||||
if (selected == null) {
|
||||
return;
|
||||
}
|
||||
onChange({ filePath: selected.path });
|
||||
};
|
||||
|
||||
const filePath = typeof body.filePath === 'string' ? body.filePath : undefined;
|
||||
const mimeType = mime.getType(filePath ?? '') ?? 'application/octet-stream';
|
||||
|
||||
return (
|
||||
<VStack space={2}>
|
||||
<HStack space={2}>
|
||||
<Button variant="border" color="secondary" size="sm" onClick={handleClick}>
|
||||
Choose File
|
||||
</Button>
|
||||
<div className="text-sm font-mono truncate rtl pr-3 text-fg">
|
||||
{/* Special character to insert ltr text in rtl element without making things wonky */}
|
||||
‎
|
||||
{filePath ?? 'Select File'}
|
||||
</div>
|
||||
</HStack>
|
||||
{filePath != null && mimeType !== contentType && !ignoreContentType.value && (
|
||||
<Banner className="mt-3 !py-5">
|
||||
<div className="mb-4 text-center">
|
||||
<div>Set Content-Type header</div>
|
||||
<InlineCode>{mimeType}</InlineCode> for current request?
|
||||
</div>
|
||||
<HStack space={1.5} justifyContent="center">
|
||||
<Button size="sm" variant="border" onClick={() => ignoreContentType.set(true)}>
|
||||
Ignore
|
||||
</Button>
|
||||
<Button
|
||||
variant="solid"
|
||||
color="primary"
|
||||
size="sm"
|
||||
onClick={() => onChangeContentType(mimeType)}
|
||||
>
|
||||
Set Header
|
||||
</Button>
|
||||
</HStack>
|
||||
</Banner>
|
||||
)}
|
||||
</VStack>
|
||||
);
|
||||
}
|
||||
127
src-web/components/CommandPalette.tsx
Normal file
127
src-web/components/CommandPalette.tsx
Normal file
@@ -0,0 +1,127 @@
|
||||
import classNames from 'classnames';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useActiveEnvironmentId } from '../hooks/useActiveEnvironmentId';
|
||||
import { useAppRoutes } from '../hooks/useAppRoutes';
|
||||
import { getRecentEnvironments } from '../hooks/useRecentEnvironments';
|
||||
import { useRequests } from '../hooks/useRequests';
|
||||
import { useWorkspaces } from '../hooks/useWorkspaces';
|
||||
import { fallbackRequestName } from '../lib/fallbackRequestName';
|
||||
import { Input } from './core/Input';
|
||||
|
||||
export function CommandPalette({ onClose }: { onClose: () => void }) {
|
||||
const [selectedIndex, setSelectedIndex] = useState<number>(0);
|
||||
const routes = useAppRoutes();
|
||||
const activeEnvironmentId = useActiveEnvironmentId();
|
||||
const workspaces = useWorkspaces();
|
||||
const requests = useRequests();
|
||||
const [command, setCommand] = useState<string>('');
|
||||
|
||||
const items = useMemo<{ label: string; onSelect: () => void; key: string }[]>(() => {
|
||||
const items = [];
|
||||
for (const r of requests) {
|
||||
items.push({
|
||||
key: `switch-request-${r.id}`,
|
||||
label: `Switch Request → ${fallbackRequestName(r)}`,
|
||||
onSelect: () => {
|
||||
return routes.navigate('request', {
|
||||
workspaceId: r.workspaceId,
|
||||
requestId: r.id,
|
||||
environmentId: activeEnvironmentId ?? undefined,
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
for (const w of workspaces) {
|
||||
items.push({
|
||||
key: `switch-workspace-${w.id}`,
|
||||
label: `Switch Workspace → ${w.name}`,
|
||||
onSelect: async () => {
|
||||
const environmentId = (await getRecentEnvironments(w.id))[0];
|
||||
return routes.navigate('workspace', {
|
||||
workspaceId: w.id,
|
||||
environmentId,
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
return items;
|
||||
}, [activeEnvironmentId, requests, routes, workspaces]);
|
||||
|
||||
const filteredItems = useMemo(() => {
|
||||
return items.filter((v) => v.label.toLowerCase().includes(command.toLowerCase()));
|
||||
}, [command, items]);
|
||||
|
||||
const handleSelectAndClose = useCallback(
|
||||
(cb: () => void) => {
|
||||
onClose();
|
||||
cb();
|
||||
},
|
||||
[onClose],
|
||||
);
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: KeyboardEvent) => {
|
||||
if (e.key === 'ArrowDown') {
|
||||
setSelectedIndex((prev) => prev + 1);
|
||||
} else if (e.key === 'ArrowUp') {
|
||||
setSelectedIndex((prev) => prev - 1);
|
||||
} else if (e.key === 'Enter') {
|
||||
const item = filteredItems[selectedIndex];
|
||||
if (item) {
|
||||
handleSelectAndClose(item.onSelect);
|
||||
}
|
||||
}
|
||||
},
|
||||
[filteredItems, handleSelectAndClose, selectedIndex],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="h-full grid grid-rows-[auto_minmax(0,1fr)]">
|
||||
<div className="px-2 py-2 w-full">
|
||||
<Input
|
||||
hideLabel
|
||||
name="command"
|
||||
label="Command"
|
||||
placeholder="Type a command"
|
||||
defaultValue=""
|
||||
onChange={setCommand}
|
||||
onKeyDown={handleKeyDown}
|
||||
/>
|
||||
</div>
|
||||
<div className="h-full px-1.5 overflow-y-auto">
|
||||
{filteredItems.map((v, i) => (
|
||||
<CommandPaletteItem
|
||||
active={i === selectedIndex}
|
||||
key={v.key}
|
||||
onClick={() => handleSelectAndClose(v.onSelect)}
|
||||
>
|
||||
{v.label}
|
||||
</CommandPaletteItem>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function CommandPaletteItem({
|
||||
children,
|
||||
active,
|
||||
onClick,
|
||||
}: {
|
||||
children: ReactNode;
|
||||
active: boolean;
|
||||
onClick: () => void;
|
||||
}) {
|
||||
return (
|
||||
<button
|
||||
onClick={onClick}
|
||||
className={classNames(
|
||||
'w-full h-xs flex items-center rounded px-1.5 text-fg-subtle',
|
||||
active && 'bg-background-highlight-secondary text-fg',
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
</button>
|
||||
);
|
||||
}
|
||||
@@ -28,7 +28,7 @@ export const CookieDialog = function ({ cookieJarId }: Props) {
|
||||
|
||||
return (
|
||||
<div className="pb-2">
|
||||
<table className="w-full text-xs mb-auto min-w-full max-w-full divide-y">
|
||||
<table className="w-full text-sm mb-auto min-w-full max-w-full divide-y divide-background-highlight">
|
||||
<thead>
|
||||
<tr>
|
||||
<th className="py-2 text-left">Domain</th>
|
||||
@@ -36,13 +36,13 @@ export const CookieDialog = function ({ cookieJarId }: Props) {
|
||||
<th className="py-2 pl-4"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody className="divide-y">
|
||||
<tbody className="divide-y divide-background-highlight-secondary">
|
||||
{cookieJar?.cookies.map((c) => (
|
||||
<tr key={c.domain + c.raw_cookie}>
|
||||
<td className="py-2 select-text cursor-text font-mono font-semibold max-w-0">
|
||||
{cookieDomain(c)}
|
||||
</td>
|
||||
<td className="py-2 pl-4 select-text cursor-text font-mono text-gray-700 whitespace-nowrap overflow-x-auto max-w-[200px] hide-scrollbars">
|
||||
<td className="py-2 pl-4 select-text cursor-text font-mono text-fg-subtle whitespace-nowrap overflow-x-auto max-w-[200px] hide-scrollbars">
|
||||
{c.raw_cookie}
|
||||
</td>
|
||||
<td className="max-w-0 w-10">
|
||||
@@ -53,11 +53,6 @@ export const CookieDialog = function ({ cookieJarId }: Props) {
|
||||
title="Delete"
|
||||
className="ml-auto"
|
||||
onClick={async () => {
|
||||
console.log(
|
||||
'DELETE COOKIE',
|
||||
c,
|
||||
cookieJar.cookies.filter((c2) => c2 !== c).length,
|
||||
);
|
||||
await updateCookieJar.mutateAsync({
|
||||
...cookieJar,
|
||||
cookies: cookieJar.cookies.filter((c2) => c2 !== c),
|
||||
|
||||
@@ -2,16 +2,14 @@ import { useCreateDropdownItems } from '../hooks/useCreateDropdownItems';
|
||||
import type { DropdownProps } from './core/Dropdown';
|
||||
import { Dropdown } from './core/Dropdown';
|
||||
|
||||
interface Props {
|
||||
interface Props extends Omit<DropdownProps, 'items'> {
|
||||
hideFolder?: boolean;
|
||||
children: DropdownProps['children'];
|
||||
openOnHotKeyAction?: DropdownProps['openOnHotKeyAction'];
|
||||
}
|
||||
|
||||
export function CreateDropdown({ hideFolder, children, openOnHotKeyAction }: Props) {
|
||||
export function CreateDropdown({ hideFolder, children, ...props }: Props) {
|
||||
const items = useCreateDropdownItems({ hideFolder, hideIcons: true });
|
||||
return (
|
||||
<Dropdown openOnHotKeyAction={openOnHotKeyAction} items={items}>
|
||||
<Dropdown items={items} {...props}>
|
||||
{children}
|
||||
</Dropdown>
|
||||
);
|
||||
|
||||
29
src-web/components/DefaultLayout.tsx
Normal file
29
src-web/components/DefaultLayout.tsx
Normal file
@@ -0,0 +1,29 @@
|
||||
import { Outlet } from 'react-router-dom';
|
||||
import { DialogProvider } from './DialogContext';
|
||||
import { GlobalHooks } from './GlobalHooks';
|
||||
import { ToastProvider } from './ToastContext';
|
||||
import classNames from 'classnames';
|
||||
import { useOsInfo } from '../hooks/useOsInfo';
|
||||
import { motion } from 'framer-motion';
|
||||
|
||||
export function DefaultLayout() {
|
||||
const osInfo = useOsInfo();
|
||||
return (
|
||||
<DialogProvider>
|
||||
<ToastProvider>
|
||||
<motion.div
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
transition={{ duration: 0.1, delay: 0.1 }}
|
||||
className={classNames(
|
||||
'w-full h-full',
|
||||
osInfo?.osType === 'linux' && 'border border-background-highlight-secondary',
|
||||
)}
|
||||
>
|
||||
<Outlet />
|
||||
</motion.div>
|
||||
<GlobalHooks />
|
||||
</ToastProvider>
|
||||
</DialogProvider>
|
||||
);
|
||||
}
|
||||
@@ -6,7 +6,7 @@ import { Dialog } from './core/Dialog';
|
||||
type DialogEntry = {
|
||||
id: string;
|
||||
render: ({ hide }: { hide: () => void }) => React.ReactNode;
|
||||
} & Pick<DialogProps, 'title' | 'description' | 'hideX' | 'className' | 'size' | 'noPadding'>;
|
||||
} & Omit<DialogProps, 'onClose' | 'open' | 'children'>;
|
||||
|
||||
interface State {
|
||||
dialogs: DialogEntry[];
|
||||
|
||||
@@ -14,7 +14,7 @@ export const DropMarker = memo(
|
||||
'relative w-full h-0 overflow-visible pointer-events-none',
|
||||
)}
|
||||
>
|
||||
<div className="absolute z-50 left-2 right-2 -bottom-[0.1rem] h-[0.2rem] bg-blue-500/50 rounded-full" />
|
||||
<div className="absolute z-50 left-2 right-2 -bottom-[0.1rem] h-[0.2rem] bg-fg-primary rounded-full" />
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import classNames from 'classnames';
|
||||
import type { ReactNode } from 'react';
|
||||
import React from 'react';
|
||||
|
||||
interface Props {
|
||||
children: ReactNode;
|
||||
@@ -11,7 +12,8 @@ export function EmptyStateText({ children, className }: Props) {
|
||||
<div
|
||||
className={classNames(
|
||||
className,
|
||||
'rounded-lg border border-dashed border-highlight h-full py-2 text-gray-400 flex items-center justify-center',
|
||||
'rounded-lg border border-dashed border-background-highlight',
|
||||
'h-full py-2 text-fg-subtler flex items-center justify-center italic',
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
|
||||
@@ -71,8 +71,8 @@ export const EnvironmentActionsDropdown = memo(function EnvironmentActionsDropdo
|
||||
size="sm"
|
||||
className={classNames(
|
||||
className,
|
||||
'text-gray-800 !px-2 truncate',
|
||||
activeEnvironment == null && 'text-opacity-disabled italic',
|
||||
'text-fg !px-2 truncate',
|
||||
activeEnvironment == null && 'text-fg-subtler italic',
|
||||
)}
|
||||
{...buttonProps}
|
||||
>
|
||||
|
||||
@@ -5,6 +5,7 @@ import { useActiveWorkspace } from '../hooks/useActiveWorkspace';
|
||||
import { useCreateEnvironment } from '../hooks/useCreateEnvironment';
|
||||
import { useDeleteEnvironment } from '../hooks/useDeleteEnvironment';
|
||||
import { useEnvironments } from '../hooks/useEnvironments';
|
||||
import { useKeyValue } from '../hooks/useKeyValue';
|
||||
import { usePrompt } from '../hooks/usePrompt';
|
||||
import { useUpdateEnvironment } from '../hooks/useUpdateEnvironment';
|
||||
import { useUpdateWorkspace } from '../hooks/useUpdateWorkspace';
|
||||
@@ -59,14 +60,16 @@ export const EnvironmentEditDialog = function ({ initialEnvironment }: Props) {
|
||||
<SidebarButton
|
||||
active={selectedEnvironment?.id == null}
|
||||
onClick={() => setSelectedEnvironmentId(null)}
|
||||
className="group"
|
||||
environment={null}
|
||||
rightSlot={
|
||||
<IconButton
|
||||
size="sm"
|
||||
iconSize="md"
|
||||
color="custom"
|
||||
title="Add sub environment"
|
||||
icon="plusCircle"
|
||||
iconClassName="text-gray-500 group-hover:text-gray-700"
|
||||
iconClassName="text-fg-subtler group-hover:text-fg-subtle"
|
||||
className="group"
|
||||
onClick={handleCreateEnvironment}
|
||||
/>
|
||||
}
|
||||
@@ -94,7 +97,7 @@ export const EnvironmentEditDialog = function ({ initialEnvironment }: Props) {
|
||||
secondSlot={() =>
|
||||
activeWorkspace != null && (
|
||||
<EnvironmentEditor
|
||||
className="pt-2 border-l border-highlight"
|
||||
className="pt-2 border-l border-background-highlight-secondary"
|
||||
environment={selectedEnvironment}
|
||||
workspace={activeWorkspace}
|
||||
/>
|
||||
@@ -113,6 +116,11 @@ const EnvironmentEditor = function ({
|
||||
workspace: Workspace;
|
||||
className?: string;
|
||||
}) {
|
||||
const valueVisibility = useKeyValue<boolean>({
|
||||
namespace: 'global',
|
||||
key: 'environmentValueVisibility',
|
||||
fallback: true,
|
||||
});
|
||||
const environments = useEnvironments();
|
||||
const updateEnvironment = useUpdateEnvironment(environment?.id ?? null);
|
||||
const updateWorkspace = useUpdateWorkspace(workspace.id);
|
||||
@@ -164,15 +172,26 @@ const EnvironmentEditor = function ({
|
||||
return (
|
||||
<VStack space={4} className={classNames(className, 'pl-4')}>
|
||||
<HStack space={2} className="justify-between">
|
||||
<Heading className="w-full flex items-center">
|
||||
<Heading className="w-full flex items-center gap-1">
|
||||
<div>{environment?.name ?? 'Global Variables'}</div>
|
||||
<IconButton
|
||||
iconClassName="text-fg-subtler"
|
||||
size="sm"
|
||||
icon={valueVisibility.value ? 'eye' : 'eyeClosed'}
|
||||
title={valueVisibility.value ? 'Hide Values' : 'Reveal Values'}
|
||||
onClick={() => {
|
||||
return valueVisibility.set((v) => !v);
|
||||
}}
|
||||
/>
|
||||
</Heading>
|
||||
</HStack>
|
||||
<PairEditor
|
||||
className="pr-2"
|
||||
nameAutocomplete={nameAutocomplete}
|
||||
nameAutocompleteVariables={false}
|
||||
namePlaceholder="VAR_NAME"
|
||||
nameValidate={validateName}
|
||||
valueType={valueVisibility.value ? 'text' : 'password'}
|
||||
valueAutocompleteVariables={false}
|
||||
forceUpdateKey={environment?.id ?? workspace?.id ?? 'n/a'}
|
||||
pairs={variables}
|
||||
@@ -216,8 +235,8 @@ function SidebarButton({
|
||||
<div
|
||||
className={classNames(
|
||||
className,
|
||||
'w-full grid grid-cols-[minmax(0,1fr)_auto] items-center',
|
||||
'px-1', // Padding to show focus border
|
||||
'w-full grid grid-cols-[minmax(0,1fr)_auto] items-center gap-0.5',
|
||||
'px-2', // Padding to show focus border
|
||||
)}
|
||||
>
|
||||
<Button
|
||||
@@ -225,7 +244,7 @@ function SidebarButton({
|
||||
size="xs"
|
||||
className={classNames(
|
||||
'w-full',
|
||||
active ? 'text-gray-800' : 'text-gray-600 hover:text-gray-700',
|
||||
active ? 'text-fg bg-background-active' : 'text-fg-subtle hover:text-fg',
|
||||
)}
|
||||
justify="start"
|
||||
onClick={onClick}
|
||||
@@ -262,7 +281,7 @@ function SidebarButton({
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'delete',
|
||||
key: 'delete-environment',
|
||||
variant: 'danger',
|
||||
label: 'Delete',
|
||||
leftSlot: <Icon icon="trash" size="sm" />,
|
||||
|
||||
118
src-web/components/ExportDataDialog.tsx
Normal file
118
src-web/components/ExportDataDialog.tsx
Normal file
@@ -0,0 +1,118 @@
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import { save } from '@tauri-apps/plugin-dialog';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import slugify from 'slugify';
|
||||
import type { Workspace } from '../lib/models';
|
||||
import { count } from '../lib/pluralize';
|
||||
import { Button } from './core/Button';
|
||||
import { Checkbox } from './core/Checkbox';
|
||||
import { HStack, VStack } from './core/Stacks';
|
||||
|
||||
interface Props {
|
||||
onHide: () => void;
|
||||
onSuccess: (path: string) => void;
|
||||
activeWorkspace: Workspace;
|
||||
workspaces: Workspace[];
|
||||
}
|
||||
|
||||
export function ExportDataDialog({
|
||||
onHide,
|
||||
onSuccess,
|
||||
activeWorkspace,
|
||||
workspaces: allWorkspaces,
|
||||
}: Props) {
|
||||
const [selectedWorkspaces, setSelectedWorkspaces] = useState<Record<string, boolean>>({
|
||||
[activeWorkspace.id]: true,
|
||||
});
|
||||
|
||||
// Put active workspace first
|
||||
const workspaces = useMemo(
|
||||
() => [activeWorkspace, ...allWorkspaces.filter((w) => w.id !== activeWorkspace.id)],
|
||||
[activeWorkspace, allWorkspaces],
|
||||
);
|
||||
|
||||
const handleToggleAll = () => {
|
||||
setSelectedWorkspaces(
|
||||
allSelected ? {} : workspaces.reduce((acc, w) => ({ ...acc, [w.id]: true }), {}),
|
||||
);
|
||||
};
|
||||
|
||||
const handleExport = useCallback(async () => {
|
||||
const ids = Object.keys(selectedWorkspaces).filter((k) => selectedWorkspaces[k]);
|
||||
const workspace = ids.length === 1 ? workspaces.find((w) => w.id === ids[0]) : undefined;
|
||||
const slug = workspace ? slugify(workspace.name, { lower: true }) : 'workspaces';
|
||||
const exportPath = await save({
|
||||
title: 'Export Data',
|
||||
defaultPath: `yaak.${slug}.json`,
|
||||
});
|
||||
if (exportPath == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
await invoke('cmd_export_data', { workspaceIds: ids, exportPath });
|
||||
onHide();
|
||||
onSuccess(exportPath);
|
||||
}, [onHide, onSuccess, selectedWorkspaces, workspaces]);
|
||||
|
||||
const allSelected = workspaces.every((w) => selectedWorkspaces[w.id]);
|
||||
const numSelected = Object.values(selectedWorkspaces).filter(Boolean).length;
|
||||
const noneSelected = numSelected === 0;
|
||||
return (
|
||||
<VStack space={3} className="w-full mb-3 px-4">
|
||||
<table className="w-full mb-auto min-w-full max-w-full divide-y">
|
||||
<thead>
|
||||
<tr>
|
||||
<th className="w-6 min-w-0 py-2 text-left pl-1">
|
||||
<Checkbox
|
||||
checked={allSelected}
|
||||
indeterminate={!allSelected && !noneSelected}
|
||||
hideLabel
|
||||
title="All workspaces"
|
||||
onChange={handleToggleAll}
|
||||
/>
|
||||
</th>
|
||||
<th className="py-2 text-left pl-4" onClick={handleToggleAll}>
|
||||
Workspace
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody className="divide-y">
|
||||
{workspaces.map((w) => (
|
||||
<tr key={w.id}>
|
||||
<td className="min-w-0 py-1 pl-1">
|
||||
<Checkbox
|
||||
checked={selectedWorkspaces[w.id] ?? false}
|
||||
title={w.name}
|
||||
hideLabel
|
||||
onChange={() =>
|
||||
setSelectedWorkspaces((prev) => ({ ...prev, [w.id]: !prev[w.id] }))
|
||||
}
|
||||
/>
|
||||
</td>
|
||||
<td
|
||||
className="py-1 pl-4 text-fg whitespace-nowrap overflow-x-auto hide-scrollbars"
|
||||
onClick={() => setSelectedWorkspaces((prev) => ({ ...prev, [w.id]: !prev[w.id] }))}
|
||||
>
|
||||
{w.name} {w.id === activeWorkspace.id ? '(current workspace)' : ''}
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
<HStack space={2} justifyContent="end">
|
||||
<Button className="focus" variant="border" onClick={onHide}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
type="submit"
|
||||
className="focus"
|
||||
color="primary"
|
||||
disabled={noneSelected}
|
||||
onClick={() => handleExport()}
|
||||
>
|
||||
Export {count('Workspace', numSelected, { omitSingle: true, noneWord: 'Nothing' })}
|
||||
</Button>
|
||||
</HStack>
|
||||
</VStack>
|
||||
);
|
||||
}
|
||||
@@ -6,7 +6,7 @@ import { PairEditor } from './core/PairEditor';
|
||||
type Props = {
|
||||
forceUpdateKey: string;
|
||||
body: HttpRequest['body'];
|
||||
onChange: (headers: HttpRequest['body']) => void;
|
||||
onChange: (body: HttpRequest['body']) => void;
|
||||
};
|
||||
|
||||
export function FormMultipartEditor({ body, forceUpdateKey, onChange }: Props) {
|
||||
@@ -16,6 +16,7 @@ export function FormMultipartEditor({ body, forceUpdateKey, onChange }: Props) {
|
||||
enabled: p.enabled,
|
||||
name: p.name,
|
||||
value: p.file ?? p.value,
|
||||
contentType: p.contentType,
|
||||
isFile: !!p.file,
|
||||
})),
|
||||
[body.form],
|
||||
@@ -27,6 +28,7 @@ export function FormMultipartEditor({ body, forceUpdateKey, onChange }: Props) {
|
||||
form: pairs.map((p) => ({
|
||||
enabled: p.enabled,
|
||||
name: p.name,
|
||||
contentType: p.contentType,
|
||||
file: p.isFile ? p.value : undefined,
|
||||
value: p.isFile ? undefined : p.value,
|
||||
})),
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user