mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-02-14 22:57:47 +01:00
Compare commits
985 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d586ec5fb4 | ||
|
|
ad8a4933da | ||
|
|
499d0b09ba | ||
|
|
ac1181c1b4 | ||
|
|
b9671781d3 | ||
|
|
42fd7cb21e | ||
|
|
fe9f765c1d | ||
|
|
087a1e5ea1 | ||
|
|
6e156497da | ||
|
|
ad3c2ed113 | ||
|
|
166bae6104 | ||
|
|
b3e6a2b34f | ||
|
|
547f1c9139 | ||
|
|
4d7ded61f8 | ||
|
|
c92472dbac | ||
|
|
d347f2db77 | ||
|
|
80c1675331 | ||
|
|
23fba45fcb | ||
|
|
80777f712c | ||
|
|
6ffb0015c6 | ||
|
|
9d9b855c04 | ||
|
|
e7cbcc7b20 | ||
|
|
20a5ab3666 | ||
|
|
311fda887f | ||
|
|
55e33df8d6 | ||
|
|
71c9c8ba8c | ||
|
|
11c2b9bb2c | ||
|
|
5cfd208c23 | ||
|
|
3a0b647ce4 | ||
|
|
8367835061 | ||
|
|
55276b968e | ||
|
|
9e04226097 | ||
|
|
51487d5412 | ||
|
|
0dccba4ab2 | ||
|
|
13183ae837 | ||
|
|
f24fd13c45 | ||
|
|
972dffe959 | ||
|
|
9f7033ab06 | ||
|
|
cd07123157 | ||
|
|
b62901e295 | ||
|
|
96996f73d5 | ||
|
|
4782fd376b | ||
|
|
41dfa15feb | ||
|
|
47d4fffc20 | ||
|
|
994f314cff | ||
|
|
0f8f086e01 | ||
|
|
19280c3bbc | ||
|
|
3cd7c1ef2e | ||
|
|
6a5f61e84b | ||
|
|
a444984aed | ||
|
|
c0e253ef84 | ||
|
|
7910d1b0fc | ||
|
|
d26d20180e | ||
|
|
54d9a78474 | ||
|
|
216c41d8cf | ||
|
|
bf2c392380 | ||
|
|
1835fa3ba4 | ||
|
|
d701381041 | ||
|
|
89c19858df | ||
|
|
01136921df | ||
|
|
17ab201b96 | ||
|
|
5d3182da4c | ||
|
|
293a91012f | ||
|
|
a43f601b6a | ||
|
|
447932697d | ||
|
|
0c75c2289e | ||
|
|
c97c9b599d | ||
|
|
fc972b04f7 | ||
|
|
e8410f70a1 | ||
|
|
70c63fc63c | ||
|
|
c4e1353b51 | ||
|
|
fd6959e868 | ||
|
|
854c19c1a1 | ||
|
|
9370b4879b | ||
|
|
0b2da8e7ae | ||
|
|
2e0982b999 | ||
|
|
2ec3be4374 | ||
|
|
b1e6f52396 | ||
|
|
5ce928068c | ||
|
|
76b1e5b103 | ||
|
|
4a95128416 | ||
|
|
cae6eb2b24 | ||
|
|
cdcfb585cf | ||
|
|
d4d56dfe1b | ||
|
|
f6c491498b | ||
|
|
e01cdd1dad | ||
|
|
9e41d366c1 | ||
|
|
fee46de974 | ||
|
|
fa14b7aaf8 | ||
|
|
bb44bfca49 | ||
|
|
6fcb3948ce | ||
|
|
f716291328 | ||
|
|
9f8eabe22b | ||
|
|
6d9e14784f | ||
|
|
1d5f3260e2 | ||
|
|
883fcdb6b8 | ||
|
|
c9040a6e5f | ||
|
|
151450f55b | ||
|
|
5722880890 | ||
|
|
d361a40903 | ||
|
|
cd9e720835 | ||
|
|
50e2ab3a03 | ||
|
|
dc5dfeb022 | ||
|
|
0b8a18edae | ||
|
|
0bb3f14b4c | ||
|
|
c18d30b89f | ||
|
|
8f06a834c8 | ||
|
|
bd948f9cd6 | ||
|
|
d70254bcdc | ||
|
|
fac5385d5d | ||
|
|
e8bcc695bb | ||
|
|
88aeb0e530 | ||
|
|
92b1582232 | ||
|
|
196990c077 | ||
|
|
c42b8cf538 | ||
|
|
085274c960 | ||
|
|
7ef187abf6 | ||
|
|
12eac34d95 | ||
|
|
c29b3c6509 | ||
|
|
7faa423aba | ||
|
|
5b2162e48d | ||
|
|
ee776143b2 | ||
|
|
7a18fb29e4 | ||
|
|
4485cad9e8 | ||
|
|
33763b6d2f | ||
|
|
641fe86cf7 | ||
|
|
65e7c804d7 | ||
|
|
23ec8bee8f | ||
|
|
8aeeaa2e09 | ||
|
|
57f01d249e | ||
|
|
6c5a914db6 | ||
|
|
155e51aa74 | ||
|
|
012a984456 | ||
|
|
25800202f2 | ||
|
|
a058064f1f | ||
|
|
9f40804532 | ||
|
|
26cc467858 | ||
|
|
be1cf7bf65 | ||
|
|
ea4f104ca7 | ||
|
|
32a28a3170 | ||
|
|
6215914212 | ||
|
|
a2dbd7f849 | ||
|
|
5bb9815f4b | ||
|
|
7cd8ac3b21 | ||
|
|
456d3aaf52 | ||
|
|
113743f7cf | ||
|
|
01a4d6f4ac | ||
|
|
ff5cfe744e | ||
|
|
29c4b51f54 | ||
|
|
2f74bf8db8 | ||
|
|
b30d784d06 | ||
|
|
ac0adaf3d8 | ||
|
|
c246d3a748 | ||
|
|
ae40728c1e | ||
|
|
0430ec883b | ||
|
|
eba6f33536 | ||
|
|
dae2873376 | ||
|
|
cb2f56d9a1 | ||
|
|
7d82aa70a4 | ||
|
|
adea234987 | ||
|
|
37d0b487b8 | ||
|
|
d507f8c99f | ||
|
|
60406ac83f | ||
|
|
8fe6f3a335 | ||
|
|
69e027c302 | ||
|
|
4232bdd298 | ||
|
|
ef1c5da027 | ||
|
|
e250326868 | ||
|
|
125f503cfa | ||
|
|
8f086425fe | ||
|
|
ae2da73873 | ||
|
|
3cd6688ffb | ||
|
|
8538da8879 | ||
|
|
b0e4ece278 | ||
|
|
5e058af03e | ||
|
|
5108bc92f3 | ||
|
|
3c5fdcb18d | ||
|
|
4672de4a47 | ||
|
|
239f6da141 | ||
|
|
ec148d1736 | ||
|
|
392b549646 | ||
|
|
993d4dc65d | ||
|
|
e326405f4f | ||
|
|
a83e094f00 | ||
|
|
e683a2cb2a | ||
|
|
1f231c2722 | ||
|
|
0fc2575ef6 | ||
|
|
44c718e6bc | ||
|
|
495df847ab | ||
|
|
5dbb9852f3 | ||
|
|
82d3304c38 | ||
|
|
5e2218fd64 | ||
|
|
5b12fad173 | ||
|
|
8d6f23eacb | ||
|
|
75a09859bc | ||
|
|
dc47c4ceba | ||
|
|
8ae0290aed | ||
|
|
f01d1e704b | ||
|
|
3f72996e64 | ||
|
|
fe862517fb | ||
|
|
b39335dc4f | ||
|
|
8f3bdb5039 | ||
|
|
b47ec01f9c | ||
|
|
36728d1d1f | ||
|
|
c3f0351445 | ||
|
|
9a5364187c | ||
|
|
a9db14994f | ||
|
|
2383e8468f | ||
|
|
a79d485b6a | ||
|
|
7eb931d689 | ||
|
|
995cd2aa7b | ||
|
|
1ce50e0c1b | ||
|
|
bce3d26a1a | ||
|
|
d9680ad0fa | ||
|
|
e2e026e1ff | ||
|
|
16739d9a37 | ||
|
|
524a4f2275 | ||
|
|
ba66883dc2 | ||
|
|
2caa735a2e | ||
|
|
90637fda6b | ||
|
|
2cef46b46a | ||
|
|
14b3abf76c | ||
|
|
8cd3961f87 | ||
|
|
5eb2e2b5a2 | ||
|
|
5dd897e042 | ||
|
|
9c77ec296d | ||
|
|
696e72323b | ||
|
|
3e8c01f436 | ||
|
|
80fc4dec09 | ||
|
|
671885fc8c | ||
|
|
002b61f0d7 | ||
|
|
d32b462bd9 | ||
|
|
5c8b47288a | ||
|
|
8e662e6feb | ||
|
|
83aaeb94f6 | ||
|
|
8606940dee | ||
|
|
60e469a1c9 | ||
|
|
57d548743f | ||
|
|
dab7ee2492 | ||
|
|
9360fd7e43 | ||
|
|
7b4bc53c0f | ||
|
|
0f7969d10a | ||
|
|
6b373b5985 | ||
|
|
c0068a1561 | ||
|
|
fad5e03f8b | ||
|
|
a22fc68764 | ||
|
|
59a442e8c0 | ||
|
|
35e8155ecf | ||
|
|
8b9a5cb5fb | ||
|
|
29f9825f04 | ||
|
|
ad8a6beacf | ||
|
|
194ac78814 | ||
|
|
0542f1ebea | ||
|
|
8de917ea5e | ||
|
|
0b494bbfbf | ||
|
|
7e74f71c79 | ||
|
|
bd7fd676a5 | ||
|
|
cb1c6a4d8c | ||
|
|
b640f0c357 | ||
|
|
967590c7ff | ||
|
|
321d6ec9d7 | ||
|
|
79b4fd7829 | ||
|
|
52eb4d338f | ||
|
|
4b8b48e92f | ||
|
|
e3f45b58e5 | ||
|
|
a5f1922446 | ||
|
|
22e7b15a63 | ||
|
|
c6bb2a73f8 | ||
|
|
bde961f95d | ||
|
|
c4867b3f68 | ||
|
|
1c36e9a2f0 | ||
|
|
1c82d8a738 | ||
|
|
a2df591102 | ||
|
|
c950ee0fb8 | ||
|
|
0ce5a9fcce | ||
|
|
bd1970a805 | ||
|
|
3aaa4b1050 | ||
|
|
31d2426846 | ||
|
|
25c4e4edaf | ||
|
|
1a5bf53b02 | ||
|
|
e2d1b62044 | ||
|
|
6186104d11 | ||
|
|
f1e97eaea4 | ||
|
|
bb0e58bf8b | ||
|
|
dc802fa055 | ||
|
|
3c8e19367f | ||
|
|
a89887eab8 | ||
|
|
7d3f0ad549 | ||
|
|
5608db1334 | ||
|
|
10443b3c02 | ||
|
|
7d272f3cd6 | ||
|
|
d60a62ab24 | ||
|
|
2b0f73b7c5 | ||
|
|
0915ea8585 | ||
|
|
ed9a0b8ac7 | ||
|
|
7437f39ed3 | ||
|
|
27f80cd97b | ||
|
|
04cf688a9b | ||
|
|
178af308ce | ||
|
|
d40732a910 | ||
|
|
d8a579b7b3 | ||
|
|
bf8ef40708 | ||
|
|
41060d5d43 | ||
|
|
ab6cef064c | ||
|
|
829d10d7b9 | ||
|
|
57e1f641a7 | ||
|
|
7df965e74b | ||
|
|
0e1153fdfd | ||
|
|
9797bc1830 | ||
|
|
9d00eb98d2 | ||
|
|
88f6e882c6 | ||
|
|
230a773909 | ||
|
|
e6da1afa82 | ||
|
|
a70d9e57be | ||
|
|
8afe0c0755 | ||
|
|
e47c2513a8 | ||
|
|
276bcfceeb | ||
|
|
6fd1b35a50 | ||
|
|
bc33244549 | ||
|
|
33c982b288 | ||
|
|
98493a1167 | ||
|
|
a8c10f9601 | ||
|
|
1051f84bbf | ||
|
|
eea137e677 | ||
|
|
27cf4e925f | ||
|
|
6b239c4e3f | ||
|
|
4c7ee5ef80 | ||
|
|
3ddb79899e | ||
|
|
b14595a3e2 | ||
|
|
82c94369ae | ||
|
|
7e77d2e9a8 | ||
|
|
ce5ce76b21 | ||
|
|
27d6b30cf9 | ||
|
|
9285dd5d70 | ||
|
|
977234808e | ||
|
|
e1ccd327f5 | ||
|
|
a0e3e71a4b | ||
|
|
2d4037da37 | ||
|
|
d21608bb82 | ||
|
|
f16239de73 | ||
|
|
d00f34c240 | ||
|
|
fd77437f59 | ||
|
|
f9cd8d24a1 | ||
|
|
c0a9f79834 | ||
|
|
ca63c101a1 | ||
|
|
dbd0b46ef2 | ||
|
|
89793ebe2f | ||
|
|
e1ffc387ea | ||
|
|
1efd0852ef | ||
|
|
59d532ed4d | ||
|
|
f034cda7cd | ||
|
|
5c1cf1e57d | ||
|
|
780960f5de | ||
|
|
aada95e9da | ||
|
|
89b5003cce | ||
|
|
3559333461 | ||
|
|
fbb6cab567 | ||
|
|
30fca29c6d | ||
|
|
ec1e521b39 | ||
|
|
a8998cd696 | ||
|
|
df178baed6 | ||
|
|
e2f75371e6 | ||
|
|
9f25ab39c6 | ||
|
|
937df6a024 | ||
|
|
7deb7182a8 | ||
|
|
82b14c56be | ||
|
|
58b98734c9 | ||
|
|
c2f58f328c | ||
|
|
36eabe7528 | ||
|
|
8347093e8b | ||
|
|
75282cb43a | ||
|
|
3a253a758d | ||
|
|
f2955c26c1 | ||
|
|
e6c0317b37 | ||
|
|
0cfb218b07 | ||
|
|
8ef103fbde | ||
|
|
ec5cdfb025 | ||
|
|
7f4d082c17 | ||
|
|
e10011ef34 | ||
|
|
1eb6999c37 | ||
|
|
35596916bf | ||
|
|
d61b22dd87 | ||
|
|
5565a9db9a | ||
|
|
b803655306 | ||
|
|
20df2bf13a | ||
|
|
e8fab85ce5 | ||
|
|
ce730f3dbe | ||
|
|
2328973de5 | ||
|
|
37dea50c91 | ||
|
|
40dfc8b30a | ||
|
|
72971bb9ec | ||
|
|
f835599502 | ||
|
|
0caa4f8099 | ||
|
|
71d3c9acd1 | ||
|
|
239ffa174a | ||
|
|
eae79ab14b | ||
|
|
ef7912615a | ||
|
|
7f20b67380 | ||
|
|
504337c178 | ||
|
|
d6b4f06ac8 | ||
|
|
28cbf5474d | ||
|
|
d797b84d4e | ||
|
|
720745857d | ||
|
|
a74ea1aeda | ||
|
|
643f5e7f26 | ||
|
|
a208b934e4 | ||
|
|
5892774082 | ||
|
|
13bfc1c3bd | ||
|
|
00289734c7 | ||
|
|
09c7c2cb91 | ||
|
|
bbe62abd20 | ||
|
|
cae9a4fd36 | ||
|
|
e7ee4a8867 | ||
|
|
b020574c88 | ||
|
|
192e1da5b6 | ||
|
|
0640079e59 | ||
|
|
a9d99aa17f | ||
|
|
b4667e1f88 | ||
|
|
7bc26fd448 | ||
|
|
94a9a5d5d5 | ||
|
|
bcfa2c411f | ||
|
|
1e3d43dbae | ||
|
|
c6b5e4d5df | ||
|
|
1293870e11 | ||
|
|
8309c19167 | ||
|
|
63a381c55a | ||
|
|
d891f891b7 | ||
|
|
0b12a6b318 | ||
|
|
0144ab05a0 | ||
|
|
1f71d4372f | ||
|
|
5ed1ea07ef | ||
|
|
4d2b101278 | ||
|
|
1dfdadde98 | ||
|
|
d19729869e | ||
|
|
5f782ad109 | ||
|
|
27dbdc9b5a | ||
|
|
4892863dd7 | ||
|
|
dc077209cc | ||
|
|
d8d5344d21 | ||
|
|
d148a8384d | ||
|
|
6884e9428b | ||
|
|
25ebccfcd7 | ||
|
|
7adb0cbb50 | ||
|
|
50866abda4 | ||
|
|
c83d904cf0 | ||
|
|
160447f8f6 | ||
|
|
184b13cc2a | ||
|
|
8fa965e055 | ||
|
|
1dd0b69079 | ||
|
|
be8dd107e3 | ||
|
|
8f139f10ef | ||
|
|
8b0823984b | ||
|
|
d82d2229d4 | ||
|
|
4be1bc17f3 | ||
|
|
e6af0c6009 | ||
|
|
e5d10bd72b | ||
|
|
9426885bb8 | ||
|
|
89e5d4f235 | ||
|
|
eecb3fbc7f | ||
|
|
244f1319b4 | ||
|
|
6a2b76e760 | ||
|
|
bc0278fce9 | ||
|
|
959841fb22 | ||
|
|
fbc878dbe5 | ||
|
|
7d183c6580 | ||
|
|
0555420ad9 | ||
|
|
ae25561c7e | ||
|
|
92e2b2b8f9 | ||
|
|
2a4a830fb7 | ||
|
|
0ad4c7cd7e | ||
|
|
41413d52ad | ||
|
|
eabc1bd305 | ||
|
|
9c312e12c1 | ||
|
|
18ea9dda3d | ||
|
|
56d4212f68 | ||
|
|
d932c19513 | ||
|
|
b800f00b7e | ||
|
|
e43af5234f | ||
|
|
aa59d96e55 | ||
|
|
5be04ceea6 | ||
|
|
da3392ac53 | ||
|
|
a2c9c98b21 | ||
|
|
8298d6e031 | ||
|
|
d6331022ad | ||
|
|
5e75d8c9a7 | ||
|
|
51944a212a | ||
|
|
49ed756479 | ||
|
|
1d207d5fbd | ||
|
|
6b1d15415d | ||
|
|
df3bfaaab7 | ||
|
|
ad0b8a8e7d | ||
|
|
d5459229b9 | ||
|
|
adbf596f0b | ||
|
|
c740966394 | ||
|
|
7adab73af3 | ||
|
|
c964f255d8 | ||
|
|
a7ffed9716 | ||
|
|
488d66d248 | ||
|
|
93cb469cb8 | ||
|
|
03a2fc8ee5 | ||
|
|
2d72e5792e | ||
|
|
9bf9a87f12 | ||
|
|
e2fca399e0 | ||
|
|
e13fdddf98 | ||
|
|
890eea299d | ||
|
|
9beac00981 | ||
|
|
1a64d7d9e6 | ||
|
|
bd5ae12f2e | ||
|
|
dbaf1da3ce | ||
|
|
a03c5df440 | ||
|
|
0776f6a2be | ||
|
|
ac9d050d9e | ||
|
|
b885c358a3 | ||
|
|
84d447973e | ||
|
|
019ec4de20 | ||
|
|
26189067cd | ||
|
|
caf39071af | ||
|
|
08a1223482 | ||
|
|
24bd90745e | ||
|
|
7a72920e66 | ||
|
|
81a8276e2b | ||
|
|
8fb6f51555 | ||
|
|
b0026aff66 | ||
|
|
3ced7f7c18 | ||
|
|
33f6995193 | ||
|
|
8af526682a | ||
|
|
d0d4324957 | ||
|
|
2bcd0e0bbe | ||
|
|
1b99c7e10f | ||
|
|
2831bb61b8 | ||
|
|
320670de2a | ||
|
|
1f39a36f26 | ||
|
|
1f84ba716e | ||
|
|
d034965d9c | ||
|
|
5eb30489e5 | ||
|
|
05458a0753 | ||
|
|
7975ef0699 | ||
|
|
5f810a1b4c | ||
|
|
c4093e79cf | ||
|
|
956f8ed2ea | ||
|
|
7b32f76a1e | ||
|
|
0dd11bc051 | ||
|
|
aa38df28af | ||
|
|
32962a6336 | ||
|
|
b268b72a4a | ||
|
|
eb0c90311b | ||
|
|
1070bf8e8f | ||
|
|
9bdb01987c | ||
|
|
3130fb948a | ||
|
|
e46de9eebd | ||
|
|
4360355c8c | ||
|
|
3baaddba0b | ||
|
|
e77c1c2a46 | ||
|
|
38422d59fd | ||
|
|
c884cedfc2 | ||
|
|
91074a35d8 | ||
|
|
9a02b63a6b | ||
|
|
4470409a24 | ||
|
|
8a978420be | ||
|
|
dfc01d51ca | ||
|
|
758154fa14 | ||
|
|
ef23a85577 | ||
|
|
e4533088ed | ||
|
|
4ffce4a534 | ||
|
|
6b77a62934 | ||
|
|
f603867040 | ||
|
|
b6eb7418aa | ||
|
|
39c97681cf | ||
|
|
2d0f0d8f6b | ||
|
|
209a767c91 | ||
|
|
71f2a724cb | ||
|
|
4b89b95738 | ||
|
|
b535722acd | ||
|
|
a9806a06a2 | ||
|
|
948e19b82f | ||
|
|
cd841fa13a | ||
|
|
3ffcf91abd | ||
|
|
9fd84a3bfc | ||
|
|
627c451cd1 | ||
|
|
e6ee89464a | ||
|
|
136e9f2738 | ||
|
|
d97986e526 | ||
|
|
7661aa9819 | ||
|
|
b7596f3f78 | ||
|
|
4e2231674c | ||
|
|
d36d023a5c | ||
|
|
3d5a7ebe3d | ||
|
|
819384e952 | ||
|
|
2f34c5e821 | ||
|
|
0c89c154ee | ||
|
|
060dce7440 | ||
|
|
91707529bd | ||
|
|
22f182a8eb | ||
|
|
6ccc42dc3f | ||
|
|
655f5a8eed | ||
|
|
cd06a72d6f | ||
|
|
0a5d71ecc2 | ||
|
|
9c214b619c | ||
|
|
6fddb727be | ||
|
|
b5d3b9a803 | ||
|
|
a74c8a94db | ||
|
|
abc6d0ff1e | ||
|
|
afdbcd0a38 | ||
|
|
c31ae805a6 | ||
|
|
7c1afd7fe5 | ||
|
|
ad470a3fd2 | ||
|
|
7755d06bba | ||
|
|
5aed4b79be | ||
|
|
f0e3f29606 | ||
|
|
abbcc525bf | ||
|
|
ef8ade45b1 | ||
|
|
8919d598c2 | ||
|
|
e21e42f5fe | ||
|
|
658aed8a29 | ||
|
|
a666f7d216 | ||
|
|
62429df469 | ||
|
|
7aed699c3f | ||
|
|
a14db0ab74 | ||
|
|
7b67770dc7 | ||
|
|
7766d8439b | ||
|
|
fac0683a71 | ||
|
|
79e04967f5 | ||
|
|
47eb8947f5 | ||
|
|
959c55315c | ||
|
|
b392f0c00f | ||
|
|
c8e674d015 | ||
|
|
90fbb81e1d | ||
|
|
21e58ca644 | ||
|
|
cfb0aa55ea | ||
|
|
9e0c021481 | ||
|
|
4acca8dd06 | ||
|
|
88eea09428 | ||
|
|
90d2743267 | ||
|
|
89945532a0 | ||
|
|
671860e053 | ||
|
|
a87ca6af47 | ||
|
|
b59ea4991c | ||
|
|
b6fd59219f | ||
|
|
38ce7650c1 | ||
|
|
2b21e28096 | ||
|
|
3206651248 | ||
|
|
ba8aa0e218 | ||
|
|
975a001635 | ||
|
|
30e7f7ccfe | ||
|
|
cca8d97d63 | ||
|
|
455e6c3520 | ||
|
|
d05ee3ec16 | ||
|
|
b1b5d08e89 | ||
|
|
6202e59daa | ||
|
|
c4a8603b81 | ||
|
|
5d15d1565c | ||
|
|
045ff558f8 | ||
|
|
504ed583cc | ||
|
|
15087f2d5a | ||
|
|
eb1cd1c14b | ||
|
|
0918d86654 | ||
|
|
d0e2220df7 | ||
|
|
ccb04f0b45 | ||
|
|
54b6e1c7c3 | ||
|
|
7c8acdc956 | ||
|
|
3973ae15be | ||
|
|
86dadf4f5e | ||
|
|
dc4cb4be74 | ||
|
|
2a29c4b551 | ||
|
|
2f64f45aba | ||
|
|
2f998ddfb6 | ||
|
|
93369a779d | ||
|
|
74e28123a8 | ||
|
|
fef7db8710 | ||
|
|
cb5d7626ac | ||
|
|
e74f9f33c0 | ||
|
|
0d27c17e28 | ||
|
|
f0b6d32639 | ||
|
|
9ebd506056 | ||
|
|
a5a91d2444 | ||
|
|
4435a66ece | ||
|
|
874a1079c3 | ||
|
|
1dc239d243 | ||
|
|
73a04276c0 | ||
|
|
9955064484 | ||
|
|
7975b528ec | ||
|
|
7d9fbda975 | ||
|
|
fce06747e3 | ||
|
|
db0cca54a7 | ||
|
|
4424caecc5 | ||
|
|
b866dcd566 | ||
|
|
9e4e6435ab | ||
|
|
41b10ff442 | ||
|
|
79f3307104 | ||
|
|
026629cd6d | ||
|
|
cf570a8f88 | ||
|
|
4a64384468 | ||
|
|
a9065c3380 | ||
|
|
e5e5548562 | ||
|
|
44cf2f670f | ||
|
|
d04d91d6dd | ||
|
|
3eb25b1507 | ||
|
|
a95cae5610 | ||
|
|
9cc6e62f28 | ||
|
|
24117f7c8d | ||
|
|
7245e6e593 | ||
|
|
4a5b1f4da3 | ||
|
|
c1ac67cc31 | ||
|
|
4085361b7e | ||
|
|
8b4227dbff | ||
|
|
3f7ed9e177 | ||
|
|
f9f1ba9e24 | ||
|
|
29309500a6 | ||
|
|
6b07fe105f | ||
|
|
90b03a0b97 | ||
|
|
d54a468006 | ||
|
|
7e98b6d853 | ||
|
|
8069094201 | ||
|
|
b28dc01e6b | ||
|
|
6bc1f9f494 | ||
|
|
e85dd32005 | ||
|
|
efc4e3bf6c | ||
|
|
768a13ff4d | ||
|
|
a8d73f74f4 | ||
|
|
4dddfc0cc5 | ||
|
|
4163bebe3b | ||
|
|
ad13744d14 | ||
|
|
9f9b3a5b21 | ||
|
|
63e68baeb1 | ||
|
|
4382ca6582 | ||
|
|
99f2f4c211 | ||
|
|
6b23379e5a | ||
|
|
4deda36e8f | ||
|
|
0ce7831cfb | ||
|
|
4e9005e240 | ||
|
|
9147252e5b | ||
|
|
b6b549ca18 | ||
|
|
85c25fb71e | ||
|
|
50637ba9fd | ||
|
|
bfe55dd55a | ||
|
|
1a2bb3d12e | ||
|
|
452a0c3ed5 | ||
|
|
b594a4690f | ||
|
|
7d2ba43463 | ||
|
|
107db42c33 | ||
|
|
dbacb9fc8d | ||
|
|
1fa3499ca6 | ||
|
|
62f3198d27 | ||
|
|
529550934b | ||
|
|
ca3b22a881 | ||
|
|
0ecf2d9123 | ||
|
|
af9bb7138b | ||
|
|
6cbfa74f97 | ||
|
|
2845fb8d35 | ||
|
|
fbe2660a57 | ||
|
|
1519282ac6 | ||
|
|
5d8d8dca70 | ||
|
|
0254e2c31d | ||
|
|
368b494d62 | ||
|
|
00d2213d05 | ||
|
|
b04602bcb9 | ||
|
|
80bfbd503a | ||
|
|
bfa186aebb | ||
|
|
818595e961 | ||
|
|
b5e9852f8d | ||
|
|
ff3734d65a | ||
|
|
4aa771ba29 | ||
|
|
5848c381fa | ||
|
|
03b35beae4 | ||
|
|
49856bb6f7 | ||
|
|
0cb56f1a85 | ||
|
|
4a0d698776 | ||
|
|
a4ebdb5736 | ||
|
|
6a5ecc2880 | ||
|
|
7a5bd92442 | ||
|
|
63c1111608 | ||
|
|
5704fb560a | ||
|
|
33dc3b719d | ||
|
|
d3329b4628 | ||
|
|
42e2c9f96f | ||
|
|
3e8a10757f | ||
|
|
8028d82fd0 | ||
|
|
4ad9feba68 | ||
|
|
ef469be7a9 | ||
|
|
14cd73d75a | ||
|
|
fbe6039845 | ||
|
|
f3fbd070dd | ||
|
|
7c2611a5a7 | ||
|
|
ae949f4616 | ||
|
|
89da434c0e | ||
|
|
4a98d1d655 | ||
|
|
bb41f0e4fe | ||
|
|
d2e0717d91 | ||
|
|
7912204fcb | ||
|
|
83e41ad618 | ||
|
|
dd7e46c2cc | ||
|
|
fb7424714a | ||
|
|
47481b711e | ||
|
|
0f86c3a731 | ||
|
|
b91d1b8b3c | ||
|
|
cd5ae6691c | ||
|
|
0f58986b4c | ||
|
|
af9755c513 | ||
|
|
56ce25f953 | ||
|
|
ed70c15ee9 | ||
|
|
d88ae99425 | ||
|
|
cf7ef55b7d | ||
|
|
2f12424f8d | ||
|
|
96aacec4fc | ||
|
|
7e57bb98a8 | ||
|
|
298f5c5a99 | ||
|
|
bf44ea7864 | ||
|
|
9abdc45e93 | ||
|
|
0d82cc7574 | ||
|
|
402b2a551f | ||
|
|
700c589ae2 | ||
|
|
8929d736d9 | ||
|
|
8c65fce357 | ||
|
|
b81f1e9e6b | ||
|
|
44d083d773 | ||
|
|
2b308282d4 | ||
|
|
c7738743c5 | ||
|
|
abc60667c6 | ||
|
|
53162e8bca | ||
|
|
6b95574e3d | ||
|
|
c9d62ae961 | ||
|
|
a70019927d | ||
|
|
01cd7f951a | ||
|
|
50f92bcfab | ||
|
|
362d9f8e59 | ||
|
|
0b1cf53942 | ||
|
|
a4c769b33c | ||
|
|
74a1cb61c1 | ||
|
|
268545c728 | ||
|
|
184bbb01c5 | ||
|
|
66fa7ac419 | ||
|
|
a80f3d997e | ||
|
|
efa7c24c9f | ||
|
|
91de21c7ad | ||
|
|
1e9ba57ef0 | ||
|
|
464389b248 | ||
|
|
107eb72eda | ||
|
|
ee7bf838f4 | ||
|
|
f63bcd94d1 | ||
|
|
16d5cb6ade | ||
|
|
63decdef8b | ||
|
|
e54e88f46d | ||
|
|
b6f53d059e | ||
|
|
9e1771c5ec | ||
|
|
8306bc2198 | ||
|
|
3810fb7d51 | ||
|
|
5b4984113e | ||
|
|
baf9efe246 | ||
|
|
f4a3109a31 | ||
|
|
241f2f39ec | ||
|
|
d9b40dca83 | ||
|
|
cb3f053057 | ||
|
|
c4ab045e57 | ||
|
|
d9b38efd97 | ||
|
|
5981588c95 | ||
|
|
388bef59b8 | ||
|
|
3d8de61c1c | ||
|
|
91b818f98d | ||
|
|
3a7f0898f9 | ||
|
|
c2f6de875a | ||
|
|
0647001807 | ||
|
|
4181d87792 | ||
|
|
58cf0a2015 | ||
|
|
d80c3d305b | ||
|
|
4d64a2bc2f | ||
|
|
5c54beaaa9 | ||
|
|
eee98f32b2 | ||
|
|
0949de66bf | ||
|
|
5a6acb24d9 | ||
|
|
2e5cab62c7 | ||
|
|
903db5fffd | ||
|
|
e3faf32708 | ||
|
|
97926ddc03 | ||
|
|
e891804051 | ||
|
|
a2982f8b77 | ||
|
|
321941baab | ||
|
|
e2e25dc30b | ||
|
|
1170ca4789 | ||
|
|
d159f62138 | ||
|
|
25005eef1b | ||
|
|
b17824c88d | ||
|
|
00f4a008f8 | ||
|
|
3e2bc67b59 | ||
|
|
efe072c7c4 | ||
|
|
59f1d11e40 | ||
|
|
5f947ac983 | ||
|
|
aa66f957f2 | ||
|
|
cf5f69271f | ||
|
|
c6653af782 | ||
|
|
fa1f33a2ac | ||
|
|
9f479882ad | ||
|
|
41db316489 | ||
|
|
50f0f5885e | ||
|
|
a609f09d50 | ||
|
|
43f2aa3068 | ||
|
|
a094e13bd5 | ||
|
|
eb076afbe4 | ||
|
|
7b41488a38 | ||
|
|
ca12d48352 | ||
|
|
0331d3b2b0 | ||
|
|
4d4814583c | ||
|
|
da3e158516 | ||
|
|
7b9d6baff0 | ||
|
|
550f1b7c6f | ||
|
|
74e8ee1786 | ||
|
|
55d3ea01b6 | ||
|
|
4cfe51cbb2 | ||
|
|
1f5ac60523 | ||
|
|
26bf4f2abd | ||
|
|
54671fff79 | ||
|
|
378309d763 | ||
|
|
e7d4bba8b3 | ||
|
|
fe3de0bc98 | ||
|
|
f3e38d7b71 | ||
|
|
eafa3b2de9 | ||
|
|
d8cc075bd0 | ||
|
|
bdb877a936 | ||
|
|
5c96e83a22 | ||
|
|
43abb57f77 | ||
|
|
9c6d821978 | ||
|
|
7e9babf515 | ||
|
|
ee36baf432 | ||
|
|
0bf57dcab7 | ||
|
|
e647d23adc | ||
|
|
d1b5b9c371 | ||
|
|
92ec514442 | ||
|
|
107466dd58 | ||
|
|
4246260ce6 | ||
|
|
936787d327 | ||
|
|
f976397283 | ||
|
|
657c6ad9a9 | ||
|
|
ede07c3b0e | ||
|
|
9326e8dcce | ||
|
|
8ff2adf833 | ||
|
|
5e387b513a | ||
|
|
dca316c0d5 | ||
|
|
db2d786d50 | ||
|
|
c0d7962142 | ||
|
|
5d14354ca9 | ||
|
|
347dace6de | ||
|
|
d952c75e3c | ||
|
|
abc3745be1 | ||
|
|
1382d7c523 | ||
|
|
3de0edf0f9 | ||
|
|
5513d39152 | ||
|
|
1a9547d1d2 | ||
|
|
26cc64d3a0 | ||
|
|
e465b33365 | ||
|
|
957739ba5e | ||
|
|
59967374c5 | ||
|
|
43bc346a2b | ||
|
|
5fbd3f67cd | ||
|
|
e352343d62 | ||
|
|
baee0f0c6f | ||
|
|
fcc5eead88 | ||
|
|
29d1f687d1 | ||
|
|
f568266c7f | ||
|
|
a1e42b8ddb | ||
|
|
e6389b1153 | ||
|
|
f0835acb33 | ||
|
|
35b04b219f | ||
|
|
01b62e936a | ||
|
|
09d16a03ef | ||
|
|
bb61602fd2 | ||
|
|
f30b78ea1f | ||
|
|
6ee652ca75 | ||
|
|
67d8bbc154 | ||
|
|
b852484559 | ||
|
|
0b077e5e88 | ||
|
|
381d957db2 | ||
|
|
f8f77abc12 | ||
|
|
0b0484c610 | ||
|
|
2b769088af | ||
|
|
856d13c603 | ||
|
|
76e398b8a1 | ||
|
|
24a7d85be0 | ||
|
|
989271f653 | ||
|
|
3fd8cd5713 | ||
|
|
43f1c7caf6 | ||
|
|
bd91ac88a3 | ||
|
|
fbcbf2e5a5 | ||
|
|
31eb03da0d | ||
|
|
a4dbfed712 | ||
|
|
784cb53ec6 | ||
|
|
43c799bd60 |
@@ -12,7 +12,7 @@ module.exports = {
|
||||
parserOptions: {
|
||||
project: ["./tsconfig.json"]
|
||||
},
|
||||
ignorePatterns: ["src-tauri/**/*", "plugins/**/*"],
|
||||
ignorePatterns: ["scripts/**/*", "plugin-runtime/**/*", "src-tauri/**/*", "plugins/**/*"],
|
||||
settings: {
|
||||
react: {
|
||||
version: "detect"
|
||||
|
||||
72
.github/workflows/artifacts.yml
vendored
72
.github/workflows/artifacts.yml
vendored
@@ -1,72 +0,0 @@
|
||||
name: Generate Artifacts
|
||||
on:
|
||||
push:
|
||||
tags: [ v* ]
|
||||
|
||||
permissions: write-all
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
- os: macos-latest
|
||||
target: x86_64-apple-darwin
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
- os: ubuntu-20.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
- name: Cache Rust
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
./src-tauri/target
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('src-tauri/Cargo.lock') }}
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
- name: install dependencies (ubuntu only)
|
||||
if: matrix.os == 'ubuntu-20.04'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
# Pin dev version to get non-default targets
|
||||
# https://github.com/tauri-apps/tauri-action/issues/356
|
||||
- uses: tauri-apps/tauri-action@dev
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||
ENABLE_CODE_SIGNING: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
with:
|
||||
tagName: 'v__VERSION__'
|
||||
releaseName: 'Release __VERSION__'
|
||||
releaseBody: 'https://yaak.app/changelog/__VERSION__'
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: '--target ${{ matrix.target }}'
|
||||
18
.github/workflows/ci-js.yml
vendored
Normal file
18
.github/workflows/ci-js.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
on:
|
||||
pull_request:
|
||||
branches: [develop]
|
||||
|
||||
name: CI (JS)
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Lint/Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm test
|
||||
36
.github/workflows/ci-rust.yml
vendored
Normal file
36
.github/workflows/ci-rust.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
on:
|
||||
pull_request:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- src-tauri/**
|
||||
- .github/workflows/**
|
||||
|
||||
name: CI (Rust)
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src-tauri
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Check/Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- uses: actions/cache@v3
|
||||
continue-on-error: false
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-cargo-
|
||||
- run: cargo check --all
|
||||
- run: cargo test --all
|
||||
120
.github/workflows/release.yml
vendored
Normal file
120
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
name: Generate Artifacts
|
||||
on:
|
||||
push:
|
||||
tags: [ v* ]
|
||||
|
||||
env:
|
||||
YAAK_PLUGINS_DIR: checkout/plugins
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
name: Build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: 'macos-latest' # for Arm-based macs (M1 and above).
|
||||
args: '--target aarch64-apple-darwin'
|
||||
yaak_arch: 'arm64'
|
||||
- platform: 'macos-latest' # for Intel-based macs.
|
||||
args: '--target x86_64-apple-darwin'
|
||||
yaak_arch: 'x64'
|
||||
- platform: 'ubuntu-22.04' # for Tauri v1, you could replace this with ubuntu-20.04.
|
||||
args: ''
|
||||
yaak_arch: 'x64'
|
||||
- platform: 'windows-latest'
|
||||
args: ''
|
||||
yaak_arch: 'x64'
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout yaakapp/app
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.22'
|
||||
|
||||
- name: install Rust stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
# Those targets are only used on macos runners so it's in an `if` to slightly speed up windows and linux builds.
|
||||
targets: ${{ matrix.platform == 'macos-latest' && 'aarch64-apple-darwin,x86_64-apple-darwin' || '' }}
|
||||
|
||||
- uses: actions/cache@v3
|
||||
continue-on-error: false
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-cargo-
|
||||
|
||||
- name: install dependencies (ubuntu only)
|
||||
if: matrix.platform == 'ubuntu-22.04' # This must match the platform value defined above.
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
|
||||
- name: Install Node dependencies
|
||||
run: |
|
||||
npm ci
|
||||
|
||||
- name: Install plugin-runtime Node dependencies
|
||||
working-directory: plugin-runtime
|
||||
run: |
|
||||
npm ci
|
||||
|
||||
- name: Install Protoc for plugin-runtime
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Install yaak CLI
|
||||
run: go install github.com/yaakapp/yaakcli@latest
|
||||
|
||||
- name: Run lint
|
||||
run: npm run lint
|
||||
|
||||
- name: Checkout yaakapp/plugins
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: yaakapp/plugins
|
||||
path: ${{ env.YAAK_PLUGINS_DIR }}
|
||||
|
||||
- name: Set version
|
||||
run: npm run replace-version
|
||||
env:
|
||||
YAAK_VERSION: ${{ github.ref_name }}
|
||||
|
||||
- uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
YAAK_PLUGINS_DIR: ${{ env.YAAK_PLUGINS_DIR }}
|
||||
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||
ENABLE_CODE_SIGNING: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
with:
|
||||
tagName: 'v__VERSION__'
|
||||
releaseName: 'Release __VERSION__'
|
||||
releaseBody: 'https://yaak.app/changelog/__VERSION__'
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }}
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -26,3 +26,8 @@ dist-ssr
|
||||
|
||||
*.sqlite
|
||||
*.sqlite-*
|
||||
|
||||
.cargo
|
||||
|
||||
.tmp
|
||||
tmp
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Build Desktop" type="ShConfigurationType">
|
||||
<option name="SCRIPT_TEXT" value="npm run tauri build -- --target universal-apple-darwin" />
|
||||
<option name="INDEPENDENT_SCRIPT_PATH" value="true" />
|
||||
<option name="SCRIPT_PATH" value="" />
|
||||
<option name="SCRIPT_OPTIONS" value="" />
|
||||
<option name="INDEPENDENT_SCRIPT_WORKING_DIRECTORY" value="true" />
|
||||
<option name="SCRIPT_WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||
<option name="INDEPENDENT_INTERPRETER_PATH" value="true" />
|
||||
<option name="INTERPRETER_PATH" value="/bin/zsh" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="EXECUTE_IN_TERMINAL" value="true" />
|
||||
<option name="EXECUTE_SCRIPT_FILE" value="false" />
|
||||
<envs>
|
||||
<env name="TAURI_KEY_PASSWORD" value="fishhook-upstream-wash-assured" />
|
||||
<env name="TAURI_PRIVATE_KEY" value="dW50cnVzdGVkIGNvbW1lbnQ6IHJzaWduIGVuY3J5cHRlZCBzZWNyZXQga2V5ClJXUlRZMEl5OGxWaytTa3dIa2xXVUltQzRGUXIzd2lYQ2NpV0ZhQURSbWJWZ1NrK0tnY0FBQkFBQUFBQUFBQUFBQUlBQUFBQUV2M1VKdVRyVHpHSzhQdGc2ZVFtOVNsMU5tNEVSN280cFNrbXhncW9tdjNXaFJZUTJqUzQ5Q01zWTJWRVhaY1pGNHNjR1NFR3JmcWFRN09NdWdGMXpZVXhzejR4V3lDV1JpZHlnbW5LNS9vMFFtRlZjbUl4YjZSNzhlMmk3ait5SExYcG5QZUkxOFE9Cg==" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
@@ -1 +0,0 @@
|
||||
plugins
|
||||
Binary file not shown.
22
index.html
22
index.html
@@ -1,27 +1,27 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
||||
<title>Yaak App</title>
|
||||
<!-- <script src="http://localhost:8097"></script>-->
|
||||
<script src="http://localhost:8097"></script>
|
||||
|
||||
<!-- Certain elements like webview (and maybe <select>?) will use background
|
||||
color depending on document background color-->
|
||||
<style>
|
||||
html, body {
|
||||
background-color: white;
|
||||
}
|
||||
html, body {
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
html, body {
|
||||
background-color: black;
|
||||
}
|
||||
@media (prefers-color-scheme: dark) {
|
||||
html, body {
|
||||
background-color: #1b1a29;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<body class="text-base">
|
||||
<div id="root"></div>
|
||||
<div id="cm-portal" class="cm-portal"></div>
|
||||
<div id="react-portal"></div>
|
||||
|
||||
4483
package-lock.json
generated
4483
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
59
package.json
59
package.json
@@ -4,29 +4,27 @@
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"start": "npm run build:plugins && npm run tauri-dev",
|
||||
"tauri-dev": "tauri dev --no-watch --config ./src-tauri/tauri-dev.conf.json",
|
||||
"start": "npm run tauri-dev:desktop",
|
||||
"tauri-dev:desktop": "tauri dev --no-watch --config ./src-tauri/tauri-dev.conf.json",
|
||||
"tauri-dev:ios": "tauri ios dev --force-ip-prompt --config ./src-tauri/tauri-dev.conf.json",
|
||||
"tauri-build": "tauri build",
|
||||
"tauri": "tauri",
|
||||
"build": "npm run build:frontend",
|
||||
"dev": "vite dev",
|
||||
"dev:js": "vite dev",
|
||||
"lint": "tsc && eslint . --ext .ts,.tsx",
|
||||
"build:icon:release": "tauri icon design/icon.png --output ./src-tauri/icons/release",
|
||||
"build:icon:dev": "tauri icon design/icon-dev.png --output ./src-tauri/icons/dev",
|
||||
"build:frontend": "vite build",
|
||||
"build:plugins": "run-p build:plugin:*",
|
||||
"build:plugin:importer-insomnia": "cd plugins/importer-insomnia && vite build --emptyOutDir",
|
||||
"build:plugin:importer-postman": "cd plugins/importer-postman && vite build --emptyOutDir",
|
||||
"build:plugin:importer-yaak": "cd plugins/importer-yaak && vite build --emptyOutDir",
|
||||
"build:plugin:filter-jsonpath": "cd plugins/filter-jsonpath && vite build --emptyOutDir",
|
||||
"build:plugin:filter-xpath": "cd plugins/filter-xpath && vite build --emptyOutDir",
|
||||
"test": "vitest",
|
||||
"coverage": "vitest run --coverage",
|
||||
"prepare": "husky install"
|
||||
"build": "run-p build:*",
|
||||
"build:js": "vite build",
|
||||
"build:vendor-protoc": "node scripts/vendor-protoc.cjs",
|
||||
"build:vendor-plugins": "node scripts/vendor-plugins.cjs",
|
||||
"build:vendor-node": "node scripts/vendor-node.cjs",
|
||||
"build:plugin-runtime": "npm run --prefix plugin-runtime build",
|
||||
"prepare": "husky install",
|
||||
"replace-version": "node scripts/replace-version.cjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"@codemirror/commands": "^6.2.1",
|
||||
"@codemirror/lang-javascript": "^6.1.4",
|
||||
"@codemirror/lang-javascript": "^6.2.2",
|
||||
"@codemirror/lang-json": "^6.0.1",
|
||||
"@codemirror/lang-xml": "^6.0.2",
|
||||
"@codemirror/language": "^6.6.0",
|
||||
@@ -36,17 +34,20 @@
|
||||
"@lezer/lr": "^1.3.3",
|
||||
"@react-hook/resize-observer": "^1.2.6",
|
||||
"@tailwindcss/container-queries": "^0.1.0",
|
||||
"@tanstack/query-sync-storage-persister": "^4.27.1",
|
||||
"@tanstack/react-query": "^4.28.0",
|
||||
"@tanstack/react-query-devtools": "^4.28.0",
|
||||
"@tanstack/react-query-persist-client": "^4.28.0",
|
||||
"@tauri-apps/api": "^1.5.3",
|
||||
"@tanstack/react-query": "^5.45.1",
|
||||
"@tauri-apps/api": "^2.0.0-beta.15",
|
||||
"@tauri-apps/plugin-clipboard-manager": "^2.1.0-beta.5",
|
||||
"@tauri-apps/plugin-dialog": "^2.0.0-beta.7",
|
||||
"@tauri-apps/plugin-fs": "^2.0.0-beta.7",
|
||||
"@tauri-apps/plugin-os": "^2.0.0-beta.7",
|
||||
"@tauri-apps/plugin-shell": "^2.0.0-beta.8",
|
||||
"buffer": "^6.0.3",
|
||||
"classnames": "^2.3.2",
|
||||
"cm6-graphql": "^0.0.9",
|
||||
"codemirror": "^6.0.1",
|
||||
"codemirror-json-schema": "^0.6.1",
|
||||
"date-fns": "^3.3.1",
|
||||
"fast-fuzzy": "^1.12.0",
|
||||
"focus-trap-react": "^10.1.1",
|
||||
"format-graphql": "^1.4.0",
|
||||
"framer-motion": "^9.0.4",
|
||||
@@ -59,16 +60,18 @@
|
||||
"react-dnd-html5-backend": "^16.0.1",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-helmet-async": "^1.3.0",
|
||||
"react-pdf": "^9.0.0",
|
||||
"react-router-dom": "^6.8.1",
|
||||
"react-use": "^17.4.0",
|
||||
"slugify": "^1.6.6",
|
||||
"tauri-plugin-log-api": "github:tauri-apps/tauri-plugin-log#v1",
|
||||
"tauri-plugin-log-api": "github:tauri-apps/tauri-plugin-log#v2",
|
||||
"uuid": "^9.0.0",
|
||||
"xml-formatter": "^3.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/nesting": "^0.0.0-insiders.565cd3e",
|
||||
"@tauri-apps/cli": "^1.5.10",
|
||||
"@tanstack/react-query-devtools": "^5.45.1",
|
||||
"@tauri-apps/cli": "^2.0.0-beta.22",
|
||||
"@types/node": "^18.7.10",
|
||||
"@types/papaparse": "^5.3.7",
|
||||
"@types/parse-color": "^1.0.1",
|
||||
@@ -80,6 +83,7 @@
|
||||
"@typescript-eslint/parser": "^7.0.2",
|
||||
"@vitejs/plugin-react": "^4.2.1",
|
||||
"autoprefixer": "^10.4.13",
|
||||
"decompress": "^4.2.1",
|
||||
"eslint": "^8.34.0",
|
||||
"eslint-config-prettier": "^8.6.0",
|
||||
"eslint-plugin-import": "^2.27.5",
|
||||
@@ -87,18 +91,21 @@
|
||||
"eslint-plugin-react": "^7.32.2",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"husky": "^8.0.3",
|
||||
"internal-ip": "^8.0.0",
|
||||
"lint-staged": "^15.0.2",
|
||||
"nodejs-file-downloader": "^4.13.0",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"postcss": "^8.4.21",
|
||||
"postcss-nesting": "^11.2.1",
|
||||
"prettier": "^2.8.4",
|
||||
"react-devtools": "^4.27.2",
|
||||
"rimraf": "^5.0.7",
|
||||
"tailwindcss": "^3.2.7",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^5.1.1",
|
||||
"typescript": "^5.4.5",
|
||||
"vite": "^5.0.0",
|
||||
"vite-plugin-static-copy": "^1.0.5",
|
||||
"vite-plugin-svgr": "^4.2.0",
|
||||
"vite-plugin-top-level-await": "^1.4.1",
|
||||
"vitest": "^1.3.0"
|
||||
"vite-plugin-top-level-await": "^1.4.1"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{ts,tsx}": "eslint --cache --fix",
|
||||
|
||||
3
plugin-runtime/.gitignore
vendored
Normal file
3
plugin-runtime/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
build
|
||||
node_modules
|
||||
*.blob
|
||||
5
plugin-runtime/nodemon.json
Normal file
5
plugin-runtime/nodemon.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"ext": "ts",
|
||||
"exec": "node -r ts-node/register ./src/index.ts"
|
||||
}
|
||||
3004
plugin-runtime/package-lock.json
generated
Normal file
3004
plugin-runtime/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
23
plugin-runtime/package.json
Normal file
23
plugin-runtime/package.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "@yaak/plugin-runtime",
|
||||
"scripts": {
|
||||
"dev": "nodemon",
|
||||
"build": "run-p build:*",
|
||||
"build:main": "esbuild src/index.ts --bundle --platform=node --outfile=build/index.cjs",
|
||||
"build:worker": "esbuild src/index.worker.ts --bundle --platform=node --outfile=build/index.worker.cjs",
|
||||
"build:proto": "grpc_tools_node_protoc --ts_proto_out=src/gen --ts_proto_opt=outputServices=nice-grpc,outputServices=generic-definitions,useExactTypes=false --proto_path=../proto ../proto/plugins/*.proto"
|
||||
},
|
||||
"dependencies": {
|
||||
"long": "^5.2.3",
|
||||
"nice-grpc": "^2.1.9",
|
||||
"protobufjs": "^7.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"grpc-tools": "^1.12.4",
|
||||
"nodemon": "^3.1.4",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"ts-proto": "^1.180.0",
|
||||
"typescript": "^5.5.2"
|
||||
}
|
||||
}
|
||||
92
plugin-runtime/src/PluginHandle.ts
Normal file
92
plugin-runtime/src/PluginHandle.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import path from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
import { PluginInfo } from './plugins';
|
||||
|
||||
export interface ParentToWorkerEvent<T = any> {
|
||||
callbackId: string;
|
||||
name: string;
|
||||
payload: T;
|
||||
}
|
||||
|
||||
export type WorkerToParentSuccessEvent<T> = {
|
||||
callbackId: string;
|
||||
payload: T;
|
||||
};
|
||||
|
||||
export type WorkerToParentErrorEvent = {
|
||||
callbackId: string;
|
||||
error: string;
|
||||
};
|
||||
|
||||
export type WorkerToParentEvent<T = any> = WorkerToParentErrorEvent | WorkerToParentSuccessEvent<T>;
|
||||
|
||||
export class PluginHandle {
|
||||
readonly pluginDir: string;
|
||||
readonly #worker: Worker;
|
||||
|
||||
constructor(pluginDir: string) {
|
||||
this.pluginDir = pluginDir;
|
||||
|
||||
const workerPath = path.join(__dirname, 'index.worker.cjs');
|
||||
this.#worker = new Worker(workerPath, {
|
||||
workerData: {
|
||||
pluginDir: this.pluginDir,
|
||||
},
|
||||
});
|
||||
|
||||
this.#worker.on('error', this.#handleError.bind(this));
|
||||
this.#worker.on('exit', this.#handleExit.bind(this));
|
||||
}
|
||||
|
||||
async getInfo(): Promise<PluginInfo> {
|
||||
return this.#callPlugin('info', null);
|
||||
}
|
||||
|
||||
async runResponseFilter({ filter, body }: { filter: string; body: string }): Promise<string> {
|
||||
return this.#callPlugin('run-filter', { filter, body });
|
||||
}
|
||||
|
||||
async runExport(request: any): Promise<string> {
|
||||
return this.#callPlugin('run-export', request);
|
||||
}
|
||||
|
||||
async runImport(data: string): Promise<string> {
|
||||
const result = await this.#callPlugin('run-import', data);
|
||||
|
||||
// Plugin returns object, but we convert to string
|
||||
return JSON.stringify(result, null, 2);
|
||||
}
|
||||
|
||||
#callPlugin<P, R>(name: string, payload: P): Promise<R> {
|
||||
const callbackId = `cb_${randomUUID().replaceAll('-', '')}`;
|
||||
return new Promise((resolve, reject) => {
|
||||
const cb = (e: WorkerToParentEvent<R>) => {
|
||||
if (e.callbackId !== callbackId) return;
|
||||
|
||||
if ('error' in e) {
|
||||
reject(e.error);
|
||||
} else {
|
||||
resolve(e.payload as R);
|
||||
}
|
||||
|
||||
this.#worker.removeListener('message', cb);
|
||||
};
|
||||
|
||||
this.#worker.addListener('message', cb);
|
||||
this.#worker.postMessage({ callbackId, name, payload });
|
||||
});
|
||||
}
|
||||
|
||||
async #handleError(err: Error) {
|
||||
console.error('Plugin errored', this.pluginDir, err);
|
||||
}
|
||||
|
||||
async #handleExit(code: number) {
|
||||
if (code === 0) {
|
||||
console.log('Plugin exited successfully', this.pluginDir);
|
||||
} else {
|
||||
console.log('Plugin exited with error', code, this.pluginDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
44
plugin-runtime/src/PluginManager.ts
Normal file
44
plugin-runtime/src/PluginManager.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { PluginHandle } from './PluginHandle';
|
||||
import { loadPlugins, PluginInfo } from './plugins';
|
||||
|
||||
export class PluginManager {
|
||||
#handles: PluginHandle[] | null = null;
|
||||
static #instance: PluginManager | null = null;
|
||||
|
||||
public static instance(): PluginManager {
|
||||
if (PluginManager.#instance == null) {
|
||||
PluginManager.#instance = new PluginManager();
|
||||
PluginManager.#instance.plugins(); // Trigger workers to boot, as it takes a few seconds
|
||||
}
|
||||
return PluginManager.#instance;
|
||||
}
|
||||
|
||||
async plugins(): Promise<PluginHandle[]> {
|
||||
this.#handles = this.#handles ?? loadPlugins();
|
||||
return this.#handles;
|
||||
}
|
||||
|
||||
async #pluginsWithInfo(): Promise<{ plugin: PluginHandle; info: PluginInfo }[]> {
|
||||
const plugins = await this.plugins();
|
||||
return Promise.all(plugins.map(async (plugin) => ({ plugin, info: await plugin.getInfo() })));
|
||||
}
|
||||
|
||||
async pluginsWith(capability: PluginInfo['capabilities'][0]): Promise<PluginHandle[]> {
|
||||
return (await this.#pluginsWithInfo())
|
||||
.filter((v) => v.info.capabilities.includes(capability))
|
||||
.map((v) => v.plugin);
|
||||
}
|
||||
|
||||
async plugin(name: string): Promise<PluginHandle | null> {
|
||||
return (await this.#pluginsWithInfo()).find((v) => v.info.name === name)?.plugin ?? null;
|
||||
}
|
||||
|
||||
async pluginOrThrow(name: string): Promise<PluginHandle> {
|
||||
const plugin = await this.plugin(name);
|
||||
if (plugin == null) {
|
||||
throw new Error(`Failed to find plugin by ${name}`);
|
||||
}
|
||||
|
||||
return plugin;
|
||||
}
|
||||
}
|
||||
432
plugin-runtime/src/gen/plugins/runtime.ts
Normal file
432
plugin-runtime/src/gen/plugins/runtime.ts
Normal file
@@ -0,0 +1,432 @@
|
||||
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-ts_proto v1.180.0
|
||||
// protoc v3.19.1
|
||||
// source: plugins/runtime.proto
|
||||
|
||||
/* eslint-disable */
|
||||
import { type CallContext, type CallOptions } from "nice-grpc-common";
|
||||
import * as _m0 from "protobufjs/minimal";
|
||||
|
||||
export const protobufPackage = "yaak.plugins.runtime";
|
||||
|
||||
export interface PluginInfo {
|
||||
plugin: string;
|
||||
}
|
||||
|
||||
export interface HookResponse {
|
||||
info: PluginInfo | undefined;
|
||||
data: string;
|
||||
}
|
||||
|
||||
export interface HookImportRequest {
|
||||
data: string;
|
||||
}
|
||||
|
||||
export interface HookResponseFilterRequest {
|
||||
filter: string;
|
||||
body: string;
|
||||
contentType: string;
|
||||
}
|
||||
|
||||
export interface HookExportRequest {
|
||||
request: string;
|
||||
}
|
||||
|
||||
function createBasePluginInfo(): PluginInfo {
|
||||
return { plugin: "" };
|
||||
}
|
||||
|
||||
export const PluginInfo = {
|
||||
encode(message: PluginInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
|
||||
if (message.plugin !== "") {
|
||||
writer.uint32(10).string(message.plugin);
|
||||
}
|
||||
return writer;
|
||||
},
|
||||
|
||||
decode(input: _m0.Reader | Uint8Array, length?: number): PluginInfo {
|
||||
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
|
||||
let end = length === undefined ? reader.len : reader.pos + length;
|
||||
const message = createBasePluginInfo();
|
||||
while (reader.pos < end) {
|
||||
const tag = reader.uint32();
|
||||
switch (tag >>> 3) {
|
||||
case 1:
|
||||
if (tag !== 10) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.plugin = reader.string();
|
||||
continue;
|
||||
}
|
||||
if ((tag & 7) === 4 || tag === 0) {
|
||||
break;
|
||||
}
|
||||
reader.skipType(tag & 7);
|
||||
}
|
||||
return message;
|
||||
},
|
||||
|
||||
fromJSON(object: any): PluginInfo {
|
||||
return { plugin: isSet(object.plugin) ? globalThis.String(object.plugin) : "" };
|
||||
},
|
||||
|
||||
toJSON(message: PluginInfo): unknown {
|
||||
const obj: any = {};
|
||||
if (message.plugin !== "") {
|
||||
obj.plugin = message.plugin;
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
|
||||
create(base?: DeepPartial<PluginInfo>): PluginInfo {
|
||||
return PluginInfo.fromPartial(base ?? {});
|
||||
},
|
||||
fromPartial(object: DeepPartial<PluginInfo>): PluginInfo {
|
||||
const message = createBasePluginInfo();
|
||||
message.plugin = object.plugin ?? "";
|
||||
return message;
|
||||
},
|
||||
};
|
||||
|
||||
function createBaseHookResponse(): HookResponse {
|
||||
return { info: undefined, data: "" };
|
||||
}
|
||||
|
||||
export const HookResponse = {
|
||||
encode(message: HookResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
|
||||
if (message.info !== undefined) {
|
||||
PluginInfo.encode(message.info, writer.uint32(10).fork()).ldelim();
|
||||
}
|
||||
if (message.data !== "") {
|
||||
writer.uint32(18).string(message.data);
|
||||
}
|
||||
return writer;
|
||||
},
|
||||
|
||||
decode(input: _m0.Reader | Uint8Array, length?: number): HookResponse {
|
||||
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
|
||||
let end = length === undefined ? reader.len : reader.pos + length;
|
||||
const message = createBaseHookResponse();
|
||||
while (reader.pos < end) {
|
||||
const tag = reader.uint32();
|
||||
switch (tag >>> 3) {
|
||||
case 1:
|
||||
if (tag !== 10) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.info = PluginInfo.decode(reader, reader.uint32());
|
||||
continue;
|
||||
case 2:
|
||||
if (tag !== 18) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.data = reader.string();
|
||||
continue;
|
||||
}
|
||||
if ((tag & 7) === 4 || tag === 0) {
|
||||
break;
|
||||
}
|
||||
reader.skipType(tag & 7);
|
||||
}
|
||||
return message;
|
||||
},
|
||||
|
||||
fromJSON(object: any): HookResponse {
|
||||
return {
|
||||
info: isSet(object.info) ? PluginInfo.fromJSON(object.info) : undefined,
|
||||
data: isSet(object.data) ? globalThis.String(object.data) : "",
|
||||
};
|
||||
},
|
||||
|
||||
toJSON(message: HookResponse): unknown {
|
||||
const obj: any = {};
|
||||
if (message.info !== undefined) {
|
||||
obj.info = PluginInfo.toJSON(message.info);
|
||||
}
|
||||
if (message.data !== "") {
|
||||
obj.data = message.data;
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
|
||||
create(base?: DeepPartial<HookResponse>): HookResponse {
|
||||
return HookResponse.fromPartial(base ?? {});
|
||||
},
|
||||
fromPartial(object: DeepPartial<HookResponse>): HookResponse {
|
||||
const message = createBaseHookResponse();
|
||||
message.info = (object.info !== undefined && object.info !== null)
|
||||
? PluginInfo.fromPartial(object.info)
|
||||
: undefined;
|
||||
message.data = object.data ?? "";
|
||||
return message;
|
||||
},
|
||||
};
|
||||
|
||||
function createBaseHookImportRequest(): HookImportRequest {
|
||||
return { data: "" };
|
||||
}
|
||||
|
||||
export const HookImportRequest = {
|
||||
encode(message: HookImportRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
|
||||
if (message.data !== "") {
|
||||
writer.uint32(10).string(message.data);
|
||||
}
|
||||
return writer;
|
||||
},
|
||||
|
||||
decode(input: _m0.Reader | Uint8Array, length?: number): HookImportRequest {
|
||||
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
|
||||
let end = length === undefined ? reader.len : reader.pos + length;
|
||||
const message = createBaseHookImportRequest();
|
||||
while (reader.pos < end) {
|
||||
const tag = reader.uint32();
|
||||
switch (tag >>> 3) {
|
||||
case 1:
|
||||
if (tag !== 10) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.data = reader.string();
|
||||
continue;
|
||||
}
|
||||
if ((tag & 7) === 4 || tag === 0) {
|
||||
break;
|
||||
}
|
||||
reader.skipType(tag & 7);
|
||||
}
|
||||
return message;
|
||||
},
|
||||
|
||||
fromJSON(object: any): HookImportRequest {
|
||||
return { data: isSet(object.data) ? globalThis.String(object.data) : "" };
|
||||
},
|
||||
|
||||
toJSON(message: HookImportRequest): unknown {
|
||||
const obj: any = {};
|
||||
if (message.data !== "") {
|
||||
obj.data = message.data;
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
|
||||
create(base?: DeepPartial<HookImportRequest>): HookImportRequest {
|
||||
return HookImportRequest.fromPartial(base ?? {});
|
||||
},
|
||||
fromPartial(object: DeepPartial<HookImportRequest>): HookImportRequest {
|
||||
const message = createBaseHookImportRequest();
|
||||
message.data = object.data ?? "";
|
||||
return message;
|
||||
},
|
||||
};
|
||||
|
||||
function createBaseHookResponseFilterRequest(): HookResponseFilterRequest {
|
||||
return { filter: "", body: "", contentType: "" };
|
||||
}
|
||||
|
||||
export const HookResponseFilterRequest = {
|
||||
encode(message: HookResponseFilterRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
|
||||
if (message.filter !== "") {
|
||||
writer.uint32(10).string(message.filter);
|
||||
}
|
||||
if (message.body !== "") {
|
||||
writer.uint32(18).string(message.body);
|
||||
}
|
||||
if (message.contentType !== "") {
|
||||
writer.uint32(26).string(message.contentType);
|
||||
}
|
||||
return writer;
|
||||
},
|
||||
|
||||
decode(input: _m0.Reader | Uint8Array, length?: number): HookResponseFilterRequest {
|
||||
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
|
||||
let end = length === undefined ? reader.len : reader.pos + length;
|
||||
const message = createBaseHookResponseFilterRequest();
|
||||
while (reader.pos < end) {
|
||||
const tag = reader.uint32();
|
||||
switch (tag >>> 3) {
|
||||
case 1:
|
||||
if (tag !== 10) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.filter = reader.string();
|
||||
continue;
|
||||
case 2:
|
||||
if (tag !== 18) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.body = reader.string();
|
||||
continue;
|
||||
case 3:
|
||||
if (tag !== 26) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.contentType = reader.string();
|
||||
continue;
|
||||
}
|
||||
if ((tag & 7) === 4 || tag === 0) {
|
||||
break;
|
||||
}
|
||||
reader.skipType(tag & 7);
|
||||
}
|
||||
return message;
|
||||
},
|
||||
|
||||
fromJSON(object: any): HookResponseFilterRequest {
|
||||
return {
|
||||
filter: isSet(object.filter) ? globalThis.String(object.filter) : "",
|
||||
body: isSet(object.body) ? globalThis.String(object.body) : "",
|
||||
contentType: isSet(object.contentType) ? globalThis.String(object.contentType) : "",
|
||||
};
|
||||
},
|
||||
|
||||
toJSON(message: HookResponseFilterRequest): unknown {
|
||||
const obj: any = {};
|
||||
if (message.filter !== "") {
|
||||
obj.filter = message.filter;
|
||||
}
|
||||
if (message.body !== "") {
|
||||
obj.body = message.body;
|
||||
}
|
||||
if (message.contentType !== "") {
|
||||
obj.contentType = message.contentType;
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
|
||||
create(base?: DeepPartial<HookResponseFilterRequest>): HookResponseFilterRequest {
|
||||
return HookResponseFilterRequest.fromPartial(base ?? {});
|
||||
},
|
||||
fromPartial(object: DeepPartial<HookResponseFilterRequest>): HookResponseFilterRequest {
|
||||
const message = createBaseHookResponseFilterRequest();
|
||||
message.filter = object.filter ?? "";
|
||||
message.body = object.body ?? "";
|
||||
message.contentType = object.contentType ?? "";
|
||||
return message;
|
||||
},
|
||||
};
|
||||
|
||||
function createBaseHookExportRequest(): HookExportRequest {
|
||||
return { request: "" };
|
||||
}
|
||||
|
||||
export const HookExportRequest = {
|
||||
encode(message: HookExportRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
|
||||
if (message.request !== "") {
|
||||
writer.uint32(10).string(message.request);
|
||||
}
|
||||
return writer;
|
||||
},
|
||||
|
||||
decode(input: _m0.Reader | Uint8Array, length?: number): HookExportRequest {
|
||||
const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input);
|
||||
let end = length === undefined ? reader.len : reader.pos + length;
|
||||
const message = createBaseHookExportRequest();
|
||||
while (reader.pos < end) {
|
||||
const tag = reader.uint32();
|
||||
switch (tag >>> 3) {
|
||||
case 1:
|
||||
if (tag !== 10) {
|
||||
break;
|
||||
}
|
||||
|
||||
message.request = reader.string();
|
||||
continue;
|
||||
}
|
||||
if ((tag & 7) === 4 || tag === 0) {
|
||||
break;
|
||||
}
|
||||
reader.skipType(tag & 7);
|
||||
}
|
||||
return message;
|
||||
},
|
||||
|
||||
fromJSON(object: any): HookExportRequest {
|
||||
return { request: isSet(object.request) ? globalThis.String(object.request) : "" };
|
||||
},
|
||||
|
||||
toJSON(message: HookExportRequest): unknown {
|
||||
const obj: any = {};
|
||||
if (message.request !== "") {
|
||||
obj.request = message.request;
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
|
||||
create(base?: DeepPartial<HookExportRequest>): HookExportRequest {
|
||||
return HookExportRequest.fromPartial(base ?? {});
|
||||
},
|
||||
fromPartial(object: DeepPartial<HookExportRequest>): HookExportRequest {
|
||||
const message = createBaseHookExportRequest();
|
||||
message.request = object.request ?? "";
|
||||
return message;
|
||||
},
|
||||
};
|
||||
|
||||
export type PluginRuntimeDefinition = typeof PluginRuntimeDefinition;
|
||||
export const PluginRuntimeDefinition = {
|
||||
name: "PluginRuntime",
|
||||
fullName: "yaak.plugins.runtime.PluginRuntime",
|
||||
methods: {
|
||||
hookImport: {
|
||||
name: "hookImport",
|
||||
requestType: HookImportRequest,
|
||||
requestStream: false,
|
||||
responseType: HookResponse,
|
||||
responseStream: false,
|
||||
options: {},
|
||||
},
|
||||
hookExport: {
|
||||
name: "hookExport",
|
||||
requestType: HookExportRequest,
|
||||
requestStream: false,
|
||||
responseType: HookResponse,
|
||||
responseStream: false,
|
||||
options: {},
|
||||
},
|
||||
hookResponseFilter: {
|
||||
name: "hookResponseFilter",
|
||||
requestType: HookResponseFilterRequest,
|
||||
requestStream: false,
|
||||
responseType: HookResponse,
|
||||
responseStream: false,
|
||||
options: {},
|
||||
},
|
||||
},
|
||||
} as const;
|
||||
|
||||
export interface PluginRuntimeServiceImplementation<CallContextExt = {}> {
|
||||
hookImport(request: HookImportRequest, context: CallContext & CallContextExt): Promise<DeepPartial<HookResponse>>;
|
||||
hookExport(request: HookExportRequest, context: CallContext & CallContextExt): Promise<DeepPartial<HookResponse>>;
|
||||
hookResponseFilter(
|
||||
request: HookResponseFilterRequest,
|
||||
context: CallContext & CallContextExt,
|
||||
): Promise<DeepPartial<HookResponse>>;
|
||||
}
|
||||
|
||||
export interface PluginRuntimeClient<CallOptionsExt = {}> {
|
||||
hookImport(request: DeepPartial<HookImportRequest>, options?: CallOptions & CallOptionsExt): Promise<HookResponse>;
|
||||
hookExport(request: DeepPartial<HookExportRequest>, options?: CallOptions & CallOptionsExt): Promise<HookResponse>;
|
||||
hookResponseFilter(
|
||||
request: DeepPartial<HookResponseFilterRequest>,
|
||||
options?: CallOptions & CallOptionsExt,
|
||||
): Promise<HookResponse>;
|
||||
}
|
||||
|
||||
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
|
||||
|
||||
export type DeepPartial<T> = T extends Builtin ? T
|
||||
: T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>>
|
||||
: T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
|
||||
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
|
||||
: Partial<T>;
|
||||
|
||||
function isSet(value: any): boolean {
|
||||
return value !== null && value !== undefined;
|
||||
}
|
||||
87
plugin-runtime/src/index.ts
Normal file
87
plugin-runtime/src/index.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { isAbortError } from 'abort-controller-x';
|
||||
import { createServer, ServerError, ServerMiddlewareCall, Status } from 'nice-grpc';
|
||||
import { CallContext } from 'nice-grpc-common';
|
||||
import * as fs from 'node:fs';
|
||||
import {
|
||||
DeepPartial,
|
||||
HookExportRequest,
|
||||
HookImportRequest,
|
||||
HookResponse,
|
||||
HookResponseFilterRequest,
|
||||
PluginRuntimeDefinition,
|
||||
PluginRuntimeServiceImplementation,
|
||||
} from './gen/plugins/runtime';
|
||||
import { PluginManager } from './PluginManager';
|
||||
|
||||
class PluginRuntimeService implements PluginRuntimeServiceImplementation {
|
||||
#manager: PluginManager;
|
||||
|
||||
constructor() {
|
||||
this.#manager = PluginManager.instance();
|
||||
}
|
||||
|
||||
async hookExport(request: HookExportRequest): Promise<DeepPartial<HookResponse>> {
|
||||
const plugin = await this.#manager.pluginOrThrow('exporter-curl');
|
||||
const data = await plugin.runExport(JSON.parse(request.request));
|
||||
const info = { plugin: (await plugin.getInfo()).name };
|
||||
return { info, data };
|
||||
}
|
||||
|
||||
async hookImport(request: HookImportRequest): Promise<DeepPartial<HookResponse>> {
|
||||
const plugins = await this.#manager.pluginsWith('import');
|
||||
for (const p of plugins) {
|
||||
const data = await p.runImport(request.data);
|
||||
if (data != null && data !== 'null') {
|
||||
const info = { plugin: (await p.getInfo()).name };
|
||||
return { info, data };
|
||||
}
|
||||
}
|
||||
|
||||
throw new ServerError(Status.UNKNOWN, 'No importers found for data');
|
||||
}
|
||||
|
||||
async hookResponseFilter(request: HookResponseFilterRequest): Promise<DeepPartial<HookResponse>> {
|
||||
const pluginName = request.contentType.includes('json') ? 'filter-jsonpath' : 'filter-xpath';
|
||||
const plugin = await this.#manager.pluginOrThrow(pluginName);
|
||||
const data = await plugin.runResponseFilter(request);
|
||||
const info = { plugin: (await plugin.getInfo()).name };
|
||||
return { info, data };
|
||||
}
|
||||
}
|
||||
|
||||
let server = createServer();
|
||||
|
||||
async function* errorHandlingMiddleware<Request, Response>(
|
||||
call: ServerMiddlewareCall<Request, Response>,
|
||||
context: CallContext,
|
||||
) {
|
||||
try {
|
||||
return yield* call.next(call.request, context);
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof ServerError || isAbortError(error)) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
let details = String(error);
|
||||
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
// @ts-ignore
|
||||
details += `: ${error.stack}`;
|
||||
}
|
||||
|
||||
throw new ServerError(Status.UNKNOWN, details);
|
||||
}
|
||||
}
|
||||
|
||||
server = server.use(errorHandlingMiddleware);
|
||||
server.add(PluginRuntimeDefinition, new PluginRuntimeService());
|
||||
|
||||
// Start on random port if YAAK_GRPC_PORT_FILE_PATH is set, or :4000
|
||||
const addr = process.env.YAAK_GRPC_PORT_FILE_PATH ? 'localhost:0' : 'localhost:4000';
|
||||
server.listen(addr).then((port) => {
|
||||
console.log('gRPC server listening on', `http://localhost:${port}`);
|
||||
if (process.env.YAAK_GRPC_PORT_FILE_PATH) {
|
||||
console.log('Wrote port file to', process.env.YAAK_GRPC_PORT_FILE_PATH);
|
||||
fs.writeFileSync(process.env.YAAK_GRPC_PORT_FILE_PATH, JSON.stringify({ port }, null, 2));
|
||||
}
|
||||
});
|
||||
76
plugin-runtime/src/index.worker.ts
Normal file
76
plugin-runtime/src/index.worker.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { readFileSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { parentPort, workerData } from 'node:worker_threads';
|
||||
import { ParentToWorkerEvent } from './PluginHandle';
|
||||
import { PluginInfo } from './plugins';
|
||||
|
||||
new Promise<void>(async (resolve, reject) => {
|
||||
const { pluginDir } = workerData;
|
||||
const pathMod = path.join(pluginDir, 'build/index.js');
|
||||
const pathPkg = path.join(pluginDir, 'package.json');
|
||||
|
||||
let pkg: { [x: string]: any };
|
||||
try {
|
||||
pkg = JSON.parse(readFileSync(pathPkg, 'utf8'));
|
||||
} catch (err) {
|
||||
// TODO: Do something better here
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const mod = (await import(`file://${pathMod}`)).default ?? {};
|
||||
|
||||
const info: PluginInfo = {
|
||||
capabilities: [],
|
||||
name: pkg['name'] ?? 'n/a',
|
||||
dir: pluginDir,
|
||||
};
|
||||
|
||||
if (typeof mod['pluginHookImport'] === 'function') {
|
||||
info.capabilities.push('import');
|
||||
}
|
||||
|
||||
if (typeof mod['pluginHookExport'] === 'function') {
|
||||
info.capabilities.push('export');
|
||||
}
|
||||
|
||||
if (typeof mod['pluginHookResponseFilter'] === 'function') {
|
||||
info.capabilities.push('filter');
|
||||
}
|
||||
|
||||
console.log('Loaded plugin', info.name, info.capabilities, info.dir);
|
||||
|
||||
function reply<T>(originalMsg: ParentToWorkerEvent, payload: T) {
|
||||
parentPort!.postMessage({ payload, callbackId: originalMsg.callbackId });
|
||||
}
|
||||
|
||||
function replyErr(originalMsg: ParentToWorkerEvent, error: unknown) {
|
||||
parentPort!.postMessage({
|
||||
error: String(error),
|
||||
callbackId: originalMsg.callbackId,
|
||||
});
|
||||
}
|
||||
|
||||
parentPort!.on('message', async (msg: ParentToWorkerEvent) => {
|
||||
try {
|
||||
const ctx = { todo: 'implement me' };
|
||||
if (msg.name === 'run-import') {
|
||||
reply(msg, await mod.pluginHookImport(ctx, msg.payload));
|
||||
} else if (msg.name === 'run-filter') {
|
||||
reply(msg, await mod.pluginHookResponseFilter(ctx, msg.payload));
|
||||
} else if (msg.name === 'run-export') {
|
||||
reply(msg, await mod.pluginHookExport(ctx, msg.payload));
|
||||
} else if (msg.name === 'info') {
|
||||
reply(msg, info);
|
||||
} else {
|
||||
console.log('Unknown message', msg);
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
replyErr(msg, err);
|
||||
}
|
||||
});
|
||||
|
||||
resolve();
|
||||
}).catch((err) => {
|
||||
console.log('failed to boot plugin', err);
|
||||
});
|
||||
18
plugin-runtime/src/plugins.ts
Normal file
18
plugin-runtime/src/plugins.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import * as fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { PluginHandle } from './PluginHandle';
|
||||
|
||||
export interface PluginInfo {
|
||||
name: string;
|
||||
dir: string;
|
||||
capabilities: ('import' | 'export' | 'filter')[];
|
||||
}
|
||||
|
||||
export function loadPlugins(): PluginHandle[] {
|
||||
const pluginsDir = process.env.YAAK_PLUGINS_DIR;
|
||||
if (!pluginsDir) throw new Error('YAAK_PLUGINS_DIR is not set');
|
||||
console.log('Loading plugins from', pluginsDir);
|
||||
|
||||
const pluginDirs = fs.readdirSync(pluginsDir).map((p) => path.join(pluginsDir, p));
|
||||
return pluginDirs.map((pluginDir) => new PluginHandle(pluginDir));
|
||||
}
|
||||
25
plugin-runtime/tsconfig.json
Normal file
25
plugin-runtime/tsconfig.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "node16",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"target": "es2021",
|
||||
"lib": ["es2021"],
|
||||
"noImplicitAny": false,
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"outDir": "dist",
|
||||
"baseUrl": ".",
|
||||
"skipLibCheck": true,
|
||||
"paths": {
|
||||
"*": [
|
||||
"node_modules/*",
|
||||
"src/types/*"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
]
|
||||
}
|
||||
173
plugins/filter-jsonpath/package-lock.json
generated
173
plugins/filter-jsonpath/package-lock.json
generated
@@ -1,173 +0,0 @@
|
||||
{
|
||||
"name": "filter-jsonpath",
|
||||
"version": "0.0.1",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "filter-jsonpath",
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"jsonpath": "^1.1.1"
|
||||
}
|
||||
},
|
||||
"node_modules/deep-is": {
|
||||
"version": "0.1.4",
|
||||
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
|
||||
"integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="
|
||||
},
|
||||
"node_modules/escodegen": {
|
||||
"version": "1.14.3",
|
||||
"resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz",
|
||||
"integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==",
|
||||
"dependencies": {
|
||||
"esprima": "^4.0.1",
|
||||
"estraverse": "^4.2.0",
|
||||
"esutils": "^2.0.2",
|
||||
"optionator": "^0.8.1"
|
||||
},
|
||||
"bin": {
|
||||
"escodegen": "bin/escodegen.js",
|
||||
"esgenerate": "bin/esgenerate.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"source-map": "~0.6.1"
|
||||
}
|
||||
},
|
||||
"node_modules/escodegen/node_modules/esprima": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
|
||||
"integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
|
||||
"bin": {
|
||||
"esparse": "bin/esparse.js",
|
||||
"esvalidate": "bin/esvalidate.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/esprima": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-1.2.2.tgz",
|
||||
"integrity": "sha512-+JpPZam9w5DuJ3Q67SqsMGtiHKENSMRVoxvArfJZK01/BfLEObtZ6orJa/MtoGNR/rfMgp5837T41PAmTwAv/A==",
|
||||
"bin": {
|
||||
"esparse": "bin/esparse.js",
|
||||
"esvalidate": "bin/esvalidate.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/estraverse": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
|
||||
"integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
|
||||
"engines": {
|
||||
"node": ">=4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/esutils": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
|
||||
"integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-levenshtein": {
|
||||
"version": "2.0.6",
|
||||
"resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
|
||||
"integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="
|
||||
},
|
||||
"node_modules/jsonpath": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/jsonpath/-/jsonpath-1.1.1.tgz",
|
||||
"integrity": "sha512-l6Cg7jRpixfbgoWgkrl77dgEj8RPvND0wMH6TwQmi9Qs4TFfS9u5cUFnbeKTwj5ga5Y3BTGGNI28k117LJ009w==",
|
||||
"dependencies": {
|
||||
"esprima": "1.2.2",
|
||||
"static-eval": "2.0.2",
|
||||
"underscore": "1.12.1"
|
||||
}
|
||||
},
|
||||
"node_modules/levn": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz",
|
||||
"integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==",
|
||||
"dependencies": {
|
||||
"prelude-ls": "~1.1.2",
|
||||
"type-check": "~0.3.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/optionator": {
|
||||
"version": "0.8.3",
|
||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz",
|
||||
"integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==",
|
||||
"dependencies": {
|
||||
"deep-is": "~0.1.3",
|
||||
"fast-levenshtein": "~2.0.6",
|
||||
"levn": "~0.3.0",
|
||||
"prelude-ls": "~1.1.2",
|
||||
"type-check": "~0.3.2",
|
||||
"word-wrap": "~1.2.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/prelude-ls": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz",
|
||||
"integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==",
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/static-eval": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/static-eval/-/static-eval-2.0.2.tgz",
|
||||
"integrity": "sha512-N/D219Hcr2bPjLxPiV+TQE++Tsmrady7TqAJugLy7Xk1EumfDWS/f5dtBbkRCGE7wKKXuYockQoj8Rm2/pVKyg==",
|
||||
"dependencies": {
|
||||
"escodegen": "^1.8.1"
|
||||
}
|
||||
},
|
||||
"node_modules/type-check": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz",
|
||||
"integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==",
|
||||
"dependencies": {
|
||||
"prelude-ls": "~1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/underscore": {
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.12.1.tgz",
|
||||
"integrity": "sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw=="
|
||||
},
|
||||
"node_modules/word-wrap": {
|
||||
"version": "1.2.5",
|
||||
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
|
||||
"integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"name": "filter-jsonpath",
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"jsonpath": "^1.1.1"
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
import jp from 'jsonpath';
|
||||
|
||||
export function pluginHookResponseFilter(filter, text) {
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(text);
|
||||
} catch (e) {
|
||||
return;
|
||||
}
|
||||
const filtered = jp.query(parsed, filter);
|
||||
return { filtered: JSON.stringify(filtered, null, 2) };
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
import { resolve } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.js'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
outDir: resolve(__dirname, '../../src-tauri/plugins/filter-jsonpath'),
|
||||
},
|
||||
});
|
||||
32
plugins/filter-xpath/package-lock.json
generated
32
plugins/filter-xpath/package-lock.json
generated
@@ -1,32 +0,0 @@
|
||||
{
|
||||
"name": "filter-xpath",
|
||||
"version": "0.0.1",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "filter-xpath",
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"@xmldom/xmldom": "^0.8.10",
|
||||
"xpath": "^0.0.34"
|
||||
}
|
||||
},
|
||||
"node_modules/@xmldom/xmldom": {
|
||||
"version": "0.8.10",
|
||||
"resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz",
|
||||
"integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/xpath": {
|
||||
"version": "0.0.34",
|
||||
"resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.34.tgz",
|
||||
"integrity": "sha512-FxF6+rkr1rNSQrhUNYrAFJpRXNzlDoMxeXN5qI84939ylEv3qqPFKa85Oxr6tDaJKqwW6KKyo2v26TSv3k6LeA==",
|
||||
"engines": {
|
||||
"node": ">=0.6.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"name": "filter-xpath",
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"@xmldom/xmldom": "^0.8.10",
|
||||
"xpath": "^0.0.34"
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
import xpath from 'xpath';
|
||||
import { DOMParser } from '@xmldom/xmldom';
|
||||
|
||||
export function pluginHookResponseFilter(filter, text) {
|
||||
const doc = new DOMParser().parseFromString(text, 'text/xml');
|
||||
const filtered = `${xpath.select(filter, doc)}`;
|
||||
return { filtered };
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
import { resolve } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.js'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
outDir: resolve(__dirname, '../../src-tauri/plugins/filter-xpath'),
|
||||
},
|
||||
});
|
||||
12
plugins/importer-insomnia/package-lock.json
generated
12
plugins/importer-insomnia/package-lock.json
generated
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"name": "importer-insomnia",
|
||||
"version": "0.0.1",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "importer-insomnia",
|
||||
"version": "0.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"name": "importer-insomnia",
|
||||
"version": "0.0.1"
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
export function isWorkspace(obj) {
|
||||
return isJSObject(obj) && obj._type === 'workspace';
|
||||
}
|
||||
|
||||
export function isRequestGroup(obj) {
|
||||
return isJSObject(obj) && obj._type === 'request_group';
|
||||
}
|
||||
|
||||
export function isHttpRequest(obj) {
|
||||
return isJSObject(obj) && obj._type === 'request';
|
||||
}
|
||||
|
||||
export function isGrpcRequest(obj) {
|
||||
return isJSObject(obj) && obj._type === 'grpc_request';
|
||||
}
|
||||
|
||||
export function isEnvironment(obj) {
|
||||
return isJSObject(obj) && obj._type === 'environment';
|
||||
}
|
||||
|
||||
export function isJSObject(obj) {
|
||||
return Object.prototype.toString.call(obj) === '[object Object]';
|
||||
}
|
||||
|
||||
export function isJSString(obj) {
|
||||
return Object.prototype.toString.call(obj) === '[object String]';
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import { isJSString } from './types.js';
|
||||
|
||||
export function parseVariables(data) {
|
||||
return Object.entries(data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Insomnia syntax to Yaak syntax
|
||||
* @param {string} variable - Text to convert
|
||||
*/
|
||||
export function convertSyntax(variable) {
|
||||
if (!isJSString(variable)) return variable;
|
||||
return variable.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, '${[$2]}');
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
/**
|
||||
* Import an Insomnia environment object.
|
||||
* @param {Object} e - The environment object to import.
|
||||
* @param workspaceId - Workspace to import into.
|
||||
*/
|
||||
export function importEnvironment(e, workspaceId) {
|
||||
console.log('IMPORTING Environment', e._id, e.name, JSON.stringify(e, null, 2));
|
||||
return {
|
||||
id: e._id,
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId,
|
||||
model: 'environment',
|
||||
name: e.name,
|
||||
variables: Object.entries(e.data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
})),
|
||||
};
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
/**
|
||||
* Import an Insomnia folder object.
|
||||
* @param {Object} f - The environment object to import.
|
||||
* @param workspaceId - Workspace to import into.
|
||||
*/
|
||||
export function importFolder(f, workspaceId) {
|
||||
console.log('IMPORTING FOLDER', f._id, f.name, JSON.stringify(f, null, 2));
|
||||
return {
|
||||
id: f._id,
|
||||
createdAt: new Date(f.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(f.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
folderId: f.parentId === workspaceId ? null : f.parentId,
|
||||
workspaceId,
|
||||
model: 'folder',
|
||||
name: f.name,
|
||||
};
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
import { convertSyntax } from '../helpers/variables.js';
|
||||
|
||||
/**
|
||||
* Import an Insomnia GRPC request object.
|
||||
* @param {Object} r - The request object to import.
|
||||
* @param workspaceId - The workspace ID to use for the request.
|
||||
* @param {number} sortPriority - The sort priority to use for the request.
|
||||
*/
|
||||
export function importGrpcRequest(r, workspaceId, sortPriority = 0) {
|
||||
console.log('IMPORTING GRPC REQUEST', r._id, r.name, JSON.stringify(r, null, 2));
|
||||
|
||||
const parts = r.protoMethodName.split('/').filter((p) => p !== '');
|
||||
const service = parts[0] ?? null;
|
||||
const method = parts[1] ?? null;
|
||||
|
||||
return {
|
||||
id: r._id,
|
||||
createdAt: new Date(r.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(r.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId,
|
||||
folderId: r.parentId === workspaceId ? null : r.parentId,
|
||||
model: 'grpc_request',
|
||||
sortPriority,
|
||||
name: r.name,
|
||||
url: convertSyntax(r.url),
|
||||
service,
|
||||
method,
|
||||
message: r.body?.text ?? '',
|
||||
metadata: (r.metadata ?? [])
|
||||
.map(({ name, value, disabled }) => ({
|
||||
enabled: !disabled,
|
||||
name,
|
||||
value,
|
||||
}))
|
||||
.filter(({ name, value }) => name !== '' || value !== ''),
|
||||
};
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
import { convertSyntax } from '../helpers/variables.js';
|
||||
|
||||
/**
|
||||
* Import an Insomnia request object.
|
||||
* @param {Object} r - The request object to import.
|
||||
* @param workspaceId - The workspace ID to use for the request.
|
||||
* @param {number} sortPriority - The sort priority to use for the request.
|
||||
*/
|
||||
export function importHttpRequest(r, workspaceId, sortPriority = 0) {
|
||||
console.log('IMPORTING REQUEST', r._id, r.name, JSON.stringify(r, null, 2));
|
||||
|
||||
let bodyType = null;
|
||||
let body = null;
|
||||
if (r.body?.mimeType === 'application/graphql') {
|
||||
bodyType = 'graphql';
|
||||
body = convertSyntax(r.body.text);
|
||||
} else if (r.body?.mimeType === 'application/json') {
|
||||
bodyType = 'application/json';
|
||||
body = convertSyntax(r.body.text);
|
||||
}
|
||||
|
||||
let authenticationType = null;
|
||||
let authentication = {};
|
||||
if (r.authentication.type === 'bearer') {
|
||||
authenticationType = 'bearer';
|
||||
authentication = {
|
||||
token: convertSyntax(r.authentication.token),
|
||||
};
|
||||
} else if (r.authentication.type === 'basic') {
|
||||
authenticationType = 'basic';
|
||||
authentication = {
|
||||
username: convertSyntax(r.authentication.username),
|
||||
password: convertSyntax(r.authentication.password),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: r._id,
|
||||
createdAt: new Date(r.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(r.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId,
|
||||
folderId: r.parentId === workspaceId ? null : r.parentId,
|
||||
model: 'http_request',
|
||||
sortPriority,
|
||||
name: r.name,
|
||||
url: convertSyntax(r.url),
|
||||
body,
|
||||
bodyType,
|
||||
authentication,
|
||||
authenticationType,
|
||||
method: r.method,
|
||||
headers: (r.headers ?? [])
|
||||
.map(({ name, value, disabled }) => ({
|
||||
enabled: !disabled,
|
||||
name,
|
||||
value,
|
||||
}))
|
||||
.filter(({ name, value }) => name !== '' || value !== ''),
|
||||
};
|
||||
}
|
||||
@@ -1,86 +0,0 @@
|
||||
import { importEnvironment } from './importers/environment';
|
||||
import { importHttpRequest } from './importers/httpRequest';
|
||||
import {
|
||||
isEnvironment,
|
||||
isJSObject,
|
||||
isHttpRequest,
|
||||
isRequestGroup,
|
||||
isWorkspace,
|
||||
isGrpcRequest,
|
||||
} from './helpers/types.js';
|
||||
import { parseVariables } from './helpers/variables.js';
|
||||
import { importFolder } from './importers/folder.js';
|
||||
import { importGrpcRequest } from './importers/grpcRequest';
|
||||
|
||||
export function pluginHookImport(contents) {
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(contents);
|
||||
} catch (e) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isJSObject(parsed)) return;
|
||||
if (!Array.isArray(parsed.resources)) return;
|
||||
|
||||
const resources = {
|
||||
workspaces: [],
|
||||
httpRequests: [],
|
||||
grpcRequests: [],
|
||||
environments: [],
|
||||
folders: [],
|
||||
};
|
||||
|
||||
// Import workspaces
|
||||
const workspacesToImport = parsed.resources.filter(isWorkspace);
|
||||
for (const workspaceToImport of workspacesToImport) {
|
||||
const baseEnvironment = parsed.resources.find(
|
||||
(r) => isEnvironment(r) && r.parentId === workspaceToImport._id,
|
||||
);
|
||||
resources.workspaces.push({
|
||||
id: workspaceToImport._id,
|
||||
createdAt: new Date(workspacesToImport.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(workspacesToImport.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
model: 'workspace',
|
||||
name: workspaceToImport.name,
|
||||
variables: baseEnvironment ? parseVariables(baseEnvironment.data) : [],
|
||||
});
|
||||
const environmentsToImport = parsed.resources.filter(
|
||||
(r) => isEnvironment(r) && r.parentId === baseEnvironment?._id,
|
||||
);
|
||||
resources.environments.push(
|
||||
...environmentsToImport.map((r) => importEnvironment(r, workspaceToImport._id)),
|
||||
);
|
||||
|
||||
const nextFolder = (parentId) => {
|
||||
const children = parsed.resources.filter((r) => r.parentId === parentId);
|
||||
let sortPriority = 0;
|
||||
for (const child of children) {
|
||||
if (isRequestGroup(child)) {
|
||||
resources.folders.push(importFolder(child, workspaceToImport._id));
|
||||
nextFolder(child._id);
|
||||
} else if (isHttpRequest(child)) {
|
||||
resources.httpRequests.push(
|
||||
importHttpRequest(child, workspaceToImport._id, sortPriority++),
|
||||
);
|
||||
} else if (isGrpcRequest(child)) {
|
||||
console.log('GRPC', JSON.stringify(child, null, 1));
|
||||
resources.grpcRequests.push(
|
||||
importGrpcRequest(child, workspaceToImport._id, sortPriority++),
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Import folders
|
||||
nextFolder(workspaceToImport._id);
|
||||
}
|
||||
|
||||
// Filter out any `null` values
|
||||
resources.httpRequests = resources.httpRequests.filter(Boolean);
|
||||
resources.grpcRequests = resources.grpcRequests.filter(Boolean);
|
||||
resources.environments = resources.environments.filter(Boolean);
|
||||
resources.workspaces = resources.workspaces.filter(Boolean);
|
||||
|
||||
return { resources };
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
import { resolve } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.js'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
outDir: resolve(__dirname, '../../src-tauri/plugins/importer-insomnia'),
|
||||
},
|
||||
});
|
||||
1505
plugins/importer-postman/package-lock.json
generated
1505
plugins/importer-postman/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"name": "importer-postman",
|
||||
"version": "0.0.1",
|
||||
"devDependencies": {
|
||||
"vitest": "^1.4.0"
|
||||
}
|
||||
}
|
||||
@@ -1,239 +0,0 @@
|
||||
import { Environment, Folder, HttpRequest, Workspace } from '../../../src-web/lib/models';
|
||||
|
||||
const POSTMAN_2_1_0_SCHEMA = 'https://schema.getpostman.com/json/collection/v2.1.0/collection.json';
|
||||
const POSTMAN_2_0_0_SCHEMA = 'https://schema.getpostman.com/json/collection/v2.0.0/collection.json';
|
||||
const VALID_SCHEMAS = [POSTMAN_2_0_0_SCHEMA, POSTMAN_2_1_0_SCHEMA];
|
||||
|
||||
type AtLeast<T, K extends keyof T> = Partial<T> & Pick<T, K>;
|
||||
|
||||
interface ExportResources {
|
||||
workspaces: AtLeast<Workspace, 'name' | 'id' | 'model'>[];
|
||||
environments: AtLeast<Environment, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
httpRequests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
folders: AtLeast<Folder, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
}
|
||||
|
||||
export function pluginHookImport(contents: string): { resources: ExportResources } | undefined {
|
||||
const root = parseJSONToRecord(contents);
|
||||
if (root == null) return;
|
||||
|
||||
const info = toRecord(root.info);
|
||||
const isValidSchema = VALID_SCHEMAS.includes(info.schema);
|
||||
if (!isValidSchema || !Array.isArray(root.item)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const globalAuth = importAuth(root.auth);
|
||||
|
||||
const exportResources: ExportResources = {
|
||||
workspaces: [],
|
||||
environments: [],
|
||||
httpRequests: [],
|
||||
folders: [],
|
||||
};
|
||||
|
||||
const workspace: ExportResources['workspaces'][0] = {
|
||||
model: 'workspace',
|
||||
id: generateId('wk'),
|
||||
name: info.name || 'Postman Import',
|
||||
description: info.description || '',
|
||||
variables:
|
||||
root.variable?.map((v: any) => ({
|
||||
name: v.key,
|
||||
value: v.value,
|
||||
})) ?? [],
|
||||
};
|
||||
exportResources.workspaces.push(workspace);
|
||||
|
||||
const importItem = (v: Record<string, any>, folderId: string | null = null) => {
|
||||
if (typeof v.name === 'string' && Array.isArray(v.item)) {
|
||||
const folder: ExportResources['folders'][0] = {
|
||||
model: 'folder',
|
||||
workspaceId: workspace.id,
|
||||
id: generateId('fl'),
|
||||
name: v.name,
|
||||
folderId,
|
||||
};
|
||||
exportResources.folders.push(folder);
|
||||
for (const child of v.item) {
|
||||
importItem(child, folder.id);
|
||||
}
|
||||
} else if (typeof v.name === 'string' && 'request' in v) {
|
||||
const r = toRecord(v.request);
|
||||
const bodyPatch = importBody(r.body);
|
||||
const requestAuthPath = importAuth(r.auth);
|
||||
const authPatch = requestAuthPath.authenticationType == null ? globalAuth : requestAuthPath;
|
||||
const request: ExportResources['httpRequests'][0] = {
|
||||
model: 'http_request',
|
||||
id: generateId('rq'),
|
||||
workspaceId: workspace.id,
|
||||
folderId,
|
||||
name: v.name,
|
||||
method: r.method || 'GET',
|
||||
url: typeof r.url === 'string' ? r.url : toRecord(r.url).raw,
|
||||
body: bodyPatch.body,
|
||||
bodyType: bodyPatch.bodyType,
|
||||
authentication: authPatch.authentication,
|
||||
authenticationType: authPatch.authenticationType,
|
||||
headers: [
|
||||
...bodyPatch.headers,
|
||||
...authPatch.headers,
|
||||
...toArray(r.header).map((h) => {
|
||||
return {
|
||||
name: h.key,
|
||||
value: h.value,
|
||||
enabled: !h.disabled,
|
||||
};
|
||||
}),
|
||||
],
|
||||
};
|
||||
exportResources.httpRequests.push(request);
|
||||
} else {
|
||||
console.log('Unknown item', v, folderId);
|
||||
}
|
||||
};
|
||||
|
||||
for (const item of root.item) {
|
||||
importItem(item);
|
||||
}
|
||||
|
||||
return { resources: convertTemplateSyntax(exportResources) };
|
||||
}
|
||||
|
||||
function importAuth(
|
||||
rawAuth: any,
|
||||
): Pick<HttpRequest, 'authentication' | 'authenticationType' | 'headers'> {
|
||||
const auth = toRecord(rawAuth);
|
||||
if ('basic' in auth) {
|
||||
return {
|
||||
headers: [],
|
||||
authenticationType: 'basic',
|
||||
authentication: {
|
||||
username: auth.basic.username || '',
|
||||
password: auth.basic.password || '',
|
||||
},
|
||||
};
|
||||
} else if ('bearer' in auth) {
|
||||
return {
|
||||
headers: [],
|
||||
authenticationType: 'bearer',
|
||||
authentication: {
|
||||
token: auth.bearer.token || '',
|
||||
},
|
||||
};
|
||||
} else {
|
||||
// TODO: support other auth types
|
||||
return { headers: [], authenticationType: null, authentication: {} };
|
||||
}
|
||||
}
|
||||
|
||||
function importBody(rawBody: any): Pick<HttpRequest, 'body' | 'bodyType' | 'headers'> {
|
||||
const body = toRecord(rawBody);
|
||||
if ('graphql' in body) {
|
||||
return {
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/json',
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
bodyType: 'graphql',
|
||||
body: {
|
||||
text: JSON.stringify(
|
||||
{ query: body.graphql.query, variables: parseJSONToRecord(body.graphql.variables) },
|
||||
null,
|
||||
2,
|
||||
),
|
||||
},
|
||||
};
|
||||
} else if ('urlencoded' in body) {
|
||||
return {
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/x-www-form-urlencoded',
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
bodyType: 'application/x-www-form-urlencoded',
|
||||
body: {
|
||||
form: toArray(body.urlencoded).map((f) => ({
|
||||
enabled: !f.disabled,
|
||||
name: f.key ?? '',
|
||||
value: f.value ?? '',
|
||||
})),
|
||||
},
|
||||
};
|
||||
} else if ('formdata' in body) {
|
||||
return {
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'multipart/form-data',
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
bodyType: 'multipart/form-data',
|
||||
body: {
|
||||
form: toArray(body.formdata).map((f) =>
|
||||
f.src != null
|
||||
? {
|
||||
enabled: !f.disabled,
|
||||
name: f.key ?? '',
|
||||
file: f.src ?? '',
|
||||
}
|
||||
: {
|
||||
enabled: !f.disabled,
|
||||
name: f.key ?? '',
|
||||
value: f.value ?? '',
|
||||
},
|
||||
),
|
||||
},
|
||||
};
|
||||
} else {
|
||||
// TODO: support other body types
|
||||
return { headers: [], bodyType: null, body: {} };
|
||||
}
|
||||
}
|
||||
|
||||
function parseJSONToRecord(jsonStr: string): Record<string, any> | null {
|
||||
try {
|
||||
return toRecord(JSON.parse(jsonStr));
|
||||
} catch (err) {}
|
||||
return null;
|
||||
}
|
||||
|
||||
function toRecord(value: any): Record<string, any> {
|
||||
if (Object.prototype.toString.call(value) === '[object Object]') return value;
|
||||
else return {};
|
||||
}
|
||||
|
||||
function toArray(value: any): any[] {
|
||||
if (Object.prototype.toString.call(value) === '[object Array]') return value;
|
||||
else return [];
|
||||
}
|
||||
|
||||
/** Recursively render all nested object properties */
|
||||
function convertTemplateSyntax<T>(obj: T): T {
|
||||
if (typeof obj === 'string') {
|
||||
return obj.replace(/{{\s*(_\.)?([^}]+)\s*}}/g, '${[$2]}') as T;
|
||||
} else if (Array.isArray(obj) && obj != null) {
|
||||
return obj.map(convertTemplateSyntax) as T;
|
||||
} else if (typeof obj === 'object' && obj != null) {
|
||||
return Object.fromEntries(
|
||||
Object.entries(obj).map(([k, v]) => [k, convertTemplateSyntax(v)]),
|
||||
) as T;
|
||||
} else {
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
|
||||
export function generateId(prefix: 'wk' | 'rq' | 'fl'): string {
|
||||
const alphabet = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
||||
let id = `${prefix}_`;
|
||||
for (let i = 0; i < 10; i++) {
|
||||
id += alphabet[Math.floor(Math.random() * alphabet.length)];
|
||||
}
|
||||
return id;
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"info": {
|
||||
"_postman_id": "9e6dfada-256c-49ea-a38f-7d1b05b7ca2d",
|
||||
"name": "New Collection",
|
||||
"schema": "https://schema.getpostman.com/json/collection/v2.0.0/collection.json",
|
||||
"_exporter_id": "18798"
|
||||
},
|
||||
"item": [
|
||||
{
|
||||
"name": "Top Folder",
|
||||
"item": [
|
||||
{
|
||||
"name": "Nested Folder",
|
||||
"item": [
|
||||
{
|
||||
"name": "Request 1",
|
||||
"request": {
|
||||
"method": "GET"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Request 2",
|
||||
"request": {
|
||||
"method": "GET"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Request 3",
|
||||
"request": {
|
||||
"method": "GET"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import { pluginHookImport } from '../src';
|
||||
|
||||
let originalRandom = Math.random;
|
||||
|
||||
describe('importer-postman', () => {
|
||||
beforeEach(() => {
|
||||
let i = 0;
|
||||
// Psuedo-random number generator to ensure consistent ID generation
|
||||
Math.random = vi.fn(() => ((i++ * 1000) % 133) / 100);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
Math.random = originalRandom;
|
||||
});
|
||||
|
||||
const p = path.join(__dirname, 'fixtures');
|
||||
const fixtures = fs.readdirSync(p);
|
||||
|
||||
for (const fixture of fixtures) {
|
||||
test('Imports ' + fixture, () => {
|
||||
const contents = fs.readFileSync(path.join(p, fixture), 'utf-8');
|
||||
const imported = pluginHookImport(contents);
|
||||
expect(imported).toEqual({
|
||||
resources: expect.objectContaining({
|
||||
folders: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
name: 'Top Folder',
|
||||
workspaceId: 'wk_0G3J6M9QcT',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
name: 'Nested Folder',
|
||||
workspaceId: 'wk_0G3J6M9QcT',
|
||||
}),
|
||||
]),
|
||||
httpRequests: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
name: 'Request 1',
|
||||
workspaceId: 'wk_0G3J6M9QcT',
|
||||
folderId: 'fl_vundefinedyundefinedBundefinedE0H3',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
name: 'Request 2',
|
||||
workspaceId: 'wk_0G3J6M9QcT',
|
||||
folderId: 'fl_fWiZlundefinedoundefinedrundefined',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
name: 'Request 3',
|
||||
workspaceId: 'wk_0G3J6M9QcT',
|
||||
folderId: null,
|
||||
}),
|
||||
]),
|
||||
workspaces: [
|
||||
expect.objectContaining({
|
||||
name: 'New Collection',
|
||||
}),
|
||||
],
|
||||
}),
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -1,23 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ESNext",
|
||||
"useDefineForClassFields": true,
|
||||
"module": "ESNext",
|
||||
"lib": [
|
||||
"ESNext",
|
||||
],
|
||||
"skipLibCheck": true,
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"noEmit": true,
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noFallthroughCasesInSwitch": true
|
||||
},
|
||||
"include": [
|
||||
"./src"
|
||||
]
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
import { resolve } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.ts'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
outDir: resolve(__dirname, '../../src-tauri/plugins/importer-postman'),
|
||||
},
|
||||
});
|
||||
12
plugins/importer-yaak/package-lock.json
generated
12
plugins/importer-yaak/package-lock.json
generated
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"name": "importer-yaak",
|
||||
"version": "0.0.1",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "importer-yaak",
|
||||
"version": "0.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"name": "importer-yaak",
|
||||
"version": "0.0.1"
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
export function pluginHookImport(contents: string) {
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(contents);
|
||||
} catch (err) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (!isJSObject(parsed)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const isYaakExport = 'yaakSchema' in parsed;
|
||||
if (!isYaakExport) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Migrate v1 to v2 -- changes requests to httpRequests
|
||||
if ('requests' in parsed.resources) {
|
||||
parsed.resources.httpRequests = parsed.resources.requests;
|
||||
delete parsed.resources.requests;
|
||||
}
|
||||
|
||||
return { resources: parsed.resources }; // Should already be in the correct format
|
||||
}
|
||||
|
||||
export function isJSObject(obj: any) {
|
||||
return Object.prototype.toString.call(obj) === '[object Object]';
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
import { pluginHookImport } from '../src';
|
||||
|
||||
describe('importer-yaak', () => {
|
||||
test('Skips invalid imports', () => {
|
||||
expect(pluginHookImport('not JSON')).toBeUndefined();
|
||||
expect(pluginHookImport('[]')).toBeUndefined();
|
||||
expect(pluginHookImport(JSON.stringify({ resources: {} }))).toBeUndefined();
|
||||
});
|
||||
|
||||
test('converts schema 1 to 2', () => {
|
||||
const imported = pluginHookImport(
|
||||
JSON.stringify({
|
||||
yaakSchema: 1,
|
||||
resources: {
|
||||
requests: [],
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
expect(imported).toEqual(
|
||||
expect.objectContaining({
|
||||
resources: {
|
||||
httpRequests: [],
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,13 +0,0 @@
|
||||
import { resolve } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.ts'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
outDir: resolve(__dirname, '../../src-tauri/plugins/importer-yaak'),
|
||||
},
|
||||
});
|
||||
32
proto/plugins/runtime.proto
Normal file
32
proto/plugins/runtime.proto
Normal file
@@ -0,0 +1,32 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package yaak.plugins.runtime;
|
||||
|
||||
service PluginRuntime {
|
||||
rpc hookImport (HookImportRequest) returns (HookResponse);
|
||||
rpc hookExport (HookExportRequest) returns (HookResponse);
|
||||
rpc hookResponseFilter (HookResponseFilterRequest) returns (HookResponse);
|
||||
}
|
||||
|
||||
message PluginInfo {
|
||||
string plugin = 1;
|
||||
}
|
||||
|
||||
message HookResponse {
|
||||
PluginInfo info = 1;
|
||||
string data = 2;
|
||||
}
|
||||
|
||||
message HookImportRequest {
|
||||
string data = 1;
|
||||
}
|
||||
|
||||
message HookResponseFilterRequest {
|
||||
string filter = 1;
|
||||
string body = 2;
|
||||
string contentType = 3;
|
||||
}
|
||||
|
||||
message HookExportRequest {
|
||||
string request = 1;
|
||||
}
|
||||
15
scripts/replace-version.cjs
Normal file
15
scripts/replace-version.cjs
Normal file
@@ -0,0 +1,15 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
const version = process.env.YAAK_VERSION?.replace('v', '');
|
||||
if (!version) {
|
||||
throw new Error('YAAK_VERSION environment variable not set')
|
||||
}
|
||||
|
||||
const tauriConfigPath = path.join(__dirname, '../src-tauri/tauri.conf.json');
|
||||
const tauriConfig = JSON.parse(fs.readFileSync(tauriConfigPath, 'utf8'));
|
||||
|
||||
tauriConfig.version = version;
|
||||
|
||||
console.log('Writing version ' + version + ' to ' + tauriConfigPath)
|
||||
fs.writeFileSync(tauriConfigPath, JSON.stringify(tauriConfig, null, 2));
|
||||
69
scripts/vendor-node.cjs
Normal file
69
scripts/vendor-node.cjs
Normal file
@@ -0,0 +1,69 @@
|
||||
const path = require('node:path');
|
||||
const decompress = require('decompress');
|
||||
const Downloader = require("nodejs-file-downloader");
|
||||
const {rmSync, cpSync, mkdirSync, existsSync} = require("node:fs");
|
||||
const {execSync} = require("node:child_process");
|
||||
|
||||
const NODE_VERSION = 'v22.5.1';
|
||||
|
||||
// `${process.platform}_${process.arch}`
|
||||
const MAC_ARM = 'darwin_arm64';
|
||||
const MAC_X64 = 'darwin_x64';
|
||||
const LNX_X64 = 'linux_x64';
|
||||
const WIN_X64 = 'win32_x64';
|
||||
|
||||
const URL_MAP = {
|
||||
[MAC_ARM]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-darwin-arm64.tar.gz`,
|
||||
[MAC_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-darwin-x64.tar.gz`,
|
||||
[LNX_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-linux-x64.tar.gz`,
|
||||
[WIN_X64]: `https://nodejs.org/download/release/${NODE_VERSION}/node-${NODE_VERSION}-win-x64.zip`,
|
||||
};
|
||||
|
||||
const SRC_BIN_MAP = {
|
||||
[MAC_ARM]: `node-${NODE_VERSION}-darwin-arm64/bin/node`,
|
||||
[MAC_X64]: `node-${NODE_VERSION}-darwin-x64/bin/node`,
|
||||
[LNX_X64]: `node-${NODE_VERSION}-linux-x64/bin/node`,
|
||||
[WIN_X64]: `node-${NODE_VERSION}-win-x64/node.exe`,
|
||||
};
|
||||
|
||||
const DST_BIN_MAP = {
|
||||
darwin_arm64: 'yaaknode-aarch64-apple-darwin',
|
||||
darwin_x64: 'yaaknode-x86_64-apple-darwin',
|
||||
linux_x64: 'yaaknode-x86_64-unknown-linux-gnu',
|
||||
win32_x64: 'yaaknode-x86_64-pc-windows-msvc.exe',
|
||||
};
|
||||
|
||||
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
|
||||
|
||||
const destDir = path.join(__dirname, `..`, 'src-tauri', 'vendored', 'node');
|
||||
const binDest = path.join(destDir, DST_BIN_MAP[key]);
|
||||
console.log(`Vendoring NodeJS ${NODE_VERSION} for ${key}`);
|
||||
|
||||
if (existsSync(binDest) && execSync(`${binDest} --version`).toString('utf-8').trim() === NODE_VERSION) {
|
||||
console.log("NodeJS already vendored");
|
||||
return;
|
||||
}
|
||||
|
||||
rmSync(destDir, {recursive: true, force: true});
|
||||
mkdirSync(destDir, {recursive: true});
|
||||
|
||||
(async function () {
|
||||
const url = URL_MAP[key];
|
||||
const tmpDir = path.join(__dirname, 'tmp', Date.now().toString());
|
||||
|
||||
// Download GitHub release artifact
|
||||
const {filePath} = await new Downloader({url, directory: tmpDir,}).download();
|
||||
|
||||
// Decompress to the same directory
|
||||
await decompress(filePath, tmpDir, {});
|
||||
|
||||
// Copy binary
|
||||
const binSrc = path.join(tmpDir, SRC_BIN_MAP[key]);
|
||||
cpSync(binSrc, binDest);
|
||||
rmSync(tmpDir, {recursive: true, force: true});
|
||||
|
||||
console.log("Downloaded NodeJS to", binDest);
|
||||
})().catch(err => {
|
||||
console.log('Script failed:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
25
scripts/vendor-plugins.cjs
Normal file
25
scripts/vendor-plugins.cjs
Normal file
@@ -0,0 +1,25 @@
|
||||
const {readdirSync, cpSync} = require("node:fs");
|
||||
const path = require("node:path");
|
||||
const {execSync} = require("node:child_process");
|
||||
const pluginsDir = process.env.YAAK_PLUGINS_DIR;
|
||||
if (!pluginsDir) {
|
||||
console.log("YAAK_PLUGINS_DIR is not set");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('Installing Yaak plugins dependencies', pluginsDir);
|
||||
execSync('npm ci', {cwd: pluginsDir});
|
||||
console.log('Building Yaak plugins', pluginsDir);
|
||||
execSync('npm run build', {cwd: pluginsDir});
|
||||
|
||||
console.log('Copying Yaak plugins to', pluginsDir);
|
||||
|
||||
const pluginsRoot = path.join(pluginsDir, 'plugins');
|
||||
for (const name of readdirSync(pluginsRoot)) {
|
||||
const dir = path.join(pluginsRoot, name);
|
||||
if (name.startsWith('.')) continue;
|
||||
const destDir = path.join(__dirname, '../src-tauri/vendored/plugins/', name);
|
||||
console.log(`Copying ${name} to ${destDir}`);
|
||||
cpSync(path.join(dir, 'package.json'), path.join(destDir, 'package.json'));
|
||||
cpSync(path.join(dir, 'build/index.js'), path.join(destDir, 'build/index.js'));
|
||||
}
|
||||
62
scripts/vendor-protoc.cjs
Normal file
62
scripts/vendor-protoc.cjs
Normal file
@@ -0,0 +1,62 @@
|
||||
const decompress = require('decompress');
|
||||
const Downloader = require("nodejs-file-downloader");
|
||||
const path = require("node:path");
|
||||
const {rmSync, mkdirSync, cpSync} = require("node:fs");
|
||||
|
||||
// `${process.platform}_${process.arch}`
|
||||
const MAC_ARM = 'darwin_arm64';
|
||||
const MAC_X64 = 'darwin_x64';
|
||||
const LNX_X64 = 'linux_x64';
|
||||
const WIN_X64 = 'win32_x64';
|
||||
|
||||
const URL_MAP = {
|
||||
[MAC_ARM]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-osx-aarch_64.zip',
|
||||
[MAC_X64]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-osx-x86_64.zip',
|
||||
[LNX_X64]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-linux-x86_64.zip',
|
||||
[WIN_X64]: 'https://github.com/protocolbuffers/protobuf/releases/download/v27.2/protoc-27.2-win64.zip',
|
||||
};
|
||||
|
||||
const SRC_BIN_MAP = {
|
||||
[MAC_ARM]: 'bin/protoc',
|
||||
[MAC_X64]: 'bin/protoc',
|
||||
[LNX_X64]: 'bin/protoc',
|
||||
[WIN_X64]: 'bin/protoc.exe',
|
||||
};
|
||||
|
||||
const DST_BIN_MAP = {
|
||||
[MAC_ARM]: 'yaakprotoc-aarch64-apple-darwin',
|
||||
[MAC_X64]: 'yaakprotoc-x86_64-apple-darwin',
|
||||
[LNX_X64]: 'yaakprotoc-x86_64-unknown-linux-gnu',
|
||||
[WIN_X64]: 'yaakprotoc-x86_64-pc-windows-msvc.exe',
|
||||
};
|
||||
|
||||
const dstDir = path.join(__dirname, `..`, 'src-tauri', 'vendored', 'protoc');
|
||||
rmSync(dstDir, {recursive: true, force: true});
|
||||
mkdirSync(dstDir, {recursive: true});
|
||||
|
||||
(async function () {
|
||||
const key = `${process.platform}_${process.env.YAAK_TARGET_ARCH ?? process.arch}`;
|
||||
console.log("Vendoring protoc binary for", key);
|
||||
const url = URL_MAP[key];
|
||||
const tmpDir = path.join(__dirname, 'tmp', Date.now().toString());
|
||||
|
||||
// Download GitHub release artifact
|
||||
const {filePath} = await new Downloader({url, directory: tmpDir,}).download();
|
||||
|
||||
// Decompress to the same directory
|
||||
await decompress(filePath, tmpDir, {});
|
||||
|
||||
// Copy binary
|
||||
const binSrc = path.join(tmpDir, SRC_BIN_MAP[key]);
|
||||
const binDst = path.join(dstDir, DST_BIN_MAP[key]);
|
||||
cpSync(binSrc, binDst);
|
||||
|
||||
// Copy other files
|
||||
const includeSrc = path.join(tmpDir, 'include');
|
||||
const includeDst = path.join(dstDir, 'include');
|
||||
cpSync(includeSrc, includeDst, {recursive: true});
|
||||
|
||||
rmSync(tmpDir, {recursive: true, force: true});
|
||||
|
||||
console.log("Downloaded protoc to", binDst);
|
||||
})().catch(err => console.log('Script failed:', err));
|
||||
1
src-tauri/.gitignore
vendored
1
src-tauri/.gitignore
vendored
@@ -2,3 +2,4 @@
|
||||
# will have compiled files and executables
|
||||
/target/
|
||||
|
||||
vendored
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO http_requests (\n id, workspace_id, folder_id, name, url, url_parameters, method, body, body_type,\n authentication, authentication_type, headers, sort_priority\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n folder_id = excluded.folder_id,\n method = excluded.method,\n headers = excluded.headers,\n body = excluded.body,\n body_type = excluded.body_type,\n authentication = excluded.authentication,\n authentication_type = excluded.authentication_type,\n url = excluded.url,\n url_parameters = excluded.url_parameters,\n sort_priority = excluded.sort_priority\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 13
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "11394af12419cca3be3a26dff9275514ea2a44504e3c7a568a9578c64b5713d1"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO grpc_requests (\n id, name, workspace_id, folder_id, sort_priority, url, service, method, message,\n authentication_type, authentication, metadata\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n name = excluded.name,\n folder_id = excluded.folder_id,\n sort_priority = excluded.sort_priority,\n url = excluded.url,\n service = excluded.service,\n method = excluded.method,\n message = excluded.message,\n authentication_type = excluded.authentication_type,\n authentication = excluded.authentication,\n metadata = excluded.metadata\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 12
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "467b87ad1209a4653b1dc8462d79236a655240c5b402fa9fd75c12ebd9bb6b86"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n UPDATE settings SET (\n theme, appearance, update_channel\n ) = (?, ?, ?) WHERE id = 'default';\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "48ec5fdf20f34add763c540061caa25054545503704e19f149987f99b1a0e4f0"
|
||||
}
|
||||
12
src-tauri/.sqlx/query-5af82cd333895d3d7d67a92f37b0feb338f615b88aea2bd09cb5809008c645a3.json
generated
Normal file
12
src-tauri/.sqlx/query-5af82cd333895d3d7d67a92f37b0feb338f615b88aea2bd09cb5809008c645a3.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO grpc_requests (\n id, name, workspace_id, folder_id, sort_priority, url, service, method, message,\n authentication_type, authentication, metadata\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n workspace_id = excluded.workspace_id,\n name = excluded.name,\n folder_id = excluded.folder_id,\n sort_priority = excluded.sort_priority,\n url = excluded.url,\n service = excluded.service,\n method = excluded.method,\n message = excluded.message,\n authentication_type = excluded.authentication_type,\n authentication = excluded.authentication,\n metadata = excluded.metadata\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 12
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "5af82cd333895d3d7d67a92f37b0feb338f615b88aea2bd09cb5809008c645a3"
|
||||
}
|
||||
12
src-tauri/.sqlx/query-5f2f40062abbe93e23b38876319cf16d4d2b3f8d0be32ffe7848528c725e1429.json
generated
Normal file
12
src-tauri/.sqlx/query-5f2f40062abbe93e23b38876319cf16d4d2b3f8d0be32ffe7848528c725e1429.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT INTO http_requests (\n id, workspace_id, folder_id, name, url, url_parameters, method, body, body_type,\n authentication, authentication_type, headers, sort_priority\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO UPDATE SET\n updated_at = CURRENT_TIMESTAMP,\n workspace_id = excluded.workspace_id,\n name = excluded.name,\n folder_id = excluded.folder_id,\n method = excluded.method,\n headers = excluded.headers,\n body = excluded.body,\n body_type = excluded.body_type,\n authentication = excluded.authentication,\n authentication_type = excluded.authentication_type,\n url = excluded.url,\n url_parameters = excluded.url_parameters,\n sort_priority = excluded.sort_priority\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 13
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "5f2f40062abbe93e23b38876319cf16d4d2b3f8d0be32ffe7848528c725e1429"
|
||||
}
|
||||
12
src-tauri/.sqlx/query-6b5edf45a6799cd7f87c23a3c7f818ad110d58c601f694a619d9345ae9e8e11d.json
generated
Normal file
12
src-tauri/.sqlx/query-6b5edf45a6799cd7f87c23a3c7f818ad110d58c601f694a619d9345ae9e8e11d.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n UPDATE settings SET (\n theme, appearance, theme_dark, theme_light, update_channel,\n interface_font_size, interface_scale, editor_font_size, editor_soft_wrap,\n open_workspace_new_window\n ) = (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) WHERE id = 'default';\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 10
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "6b5edf45a6799cd7f87c23a3c7f818ad110d58c601f694a619d9345ae9e8e11d"
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, created_at, updated_at, theme, appearance, update_channel\n FROM settings\n WHERE id = 'default'\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 2,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 3,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "theme",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "appearance",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "update_channel",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "b32994b09ae7a06eb0f031069d327e55127a5bce60cbb499b83d1701386a23cb"
|
||||
}
|
||||
98
src-tauri/.sqlx/query-daa61066517df649e7c80a8ce407839ad502e8e5e43aa8c02e049865acbbae75.json
generated
Normal file
98
src-tauri/.sqlx/query-daa61066517df649e7c80a8ce407839ad502e8e5e43aa8c02e049865acbbae75.json
generated
Normal file
@@ -0,0 +1,98 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, created_at, updated_at, theme, appearance,\n theme_dark, theme_light, update_channel,\n interface_font_size, interface_scale, editor_font_size, editor_soft_wrap,\n open_workspace_new_window\n FROM settings\n WHERE id = 'default'\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 2,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 3,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "theme",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "appearance",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "theme_dark",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "theme_light",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "update_channel",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "interface_font_size",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "interface_scale",
|
||||
"ordinal": 10,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "editor_font_size",
|
||||
"ordinal": 11,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "editor_soft_wrap",
|
||||
"ordinal": 12,
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "open_workspace_new_window",
|
||||
"ordinal": 13,
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "daa61066517df649e7c80a8ce407839ad502e8e5e43aa8c02e049865acbbae75"
|
||||
}
|
||||
4768
src-tauri/Cargo.lock
generated
4768
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,18 +1,20 @@
|
||||
workspace = { members = ["grpc"] }
|
||||
workspace = { members = ["grpc", "templates", "tauri-plugin-plugin-runtime"] }
|
||||
|
||||
[package]
|
||||
name = "yaak-app"
|
||||
version = "0.0.0"
|
||||
description = "A network protocol testing utility app"
|
||||
authors = ["Gregory Schier"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/gschier/yaak-app"
|
||||
edition = "2021"
|
||||
|
||||
# Produce a library for mobile support
|
||||
[lib]
|
||||
name = "tauri_app_lib"
|
||||
crate-type = ["staticlib", "cdylib", "lib"]
|
||||
|
||||
[profile.release]
|
||||
strip = true # Automatically strip symbols from the binary.
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "1.5", features = [] }
|
||||
tauri-build = { version = "2.0.0-beta", features = [] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
objc = "0.2.7"
|
||||
@@ -22,53 +24,35 @@ cocoa = "0.25.0"
|
||||
openssl-sys = { version = "0.9", features = ["vendored"] } # For Ubuntu installation to work
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.22.0"
|
||||
boa_engine = { version = "0.17.3", features = ["annex-b"] }
|
||||
boa_runtime = { version = "0.17.3" }
|
||||
chrono = { version = "0.4.31", features = ["serde"] }
|
||||
futures = "0.3.26"
|
||||
http = "0.2.8"
|
||||
rand = "0.8.5"
|
||||
reqwest = { version = "0.11.23", features = ["multipart", "cookies", "gzip", "brotli", "deflate"] }
|
||||
cookie = { version = "0.18.0" }
|
||||
serde = { version = "1.0.195", features = ["derive"] }
|
||||
serde_json = { version = "1.0.111", features = ["raw_value"] }
|
||||
sqlx = { version = "0.7.4", features = ["sqlite", "runtime-tokio-rustls", "json", "chrono", "time"] }
|
||||
tauri = { version = "1.5.4", features = [
|
||||
"config-toml",
|
||||
"path-all",
|
||||
"devtools",
|
||||
"dialog-open",
|
||||
"dialog-save",
|
||||
"fs-read-file",
|
||||
"os-all",
|
||||
"protocol-asset",
|
||||
"shell-open",
|
||||
"shell-sidecar",
|
||||
"updater",
|
||||
"window-close",
|
||||
"window-maximize",
|
||||
"window-minimize",
|
||||
"window-set-decorations",
|
||||
"window-set-title",
|
||||
"window-start-dragging",
|
||||
"window-unmaximize",
|
||||
] }
|
||||
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
||||
tauri-plugin-log = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1", features = ["colored"] }
|
||||
tokio = { version = "1.36.0", features = ["sync"] }
|
||||
uuid = "1.3.0"
|
||||
log = "0.4.20"
|
||||
datetime = "0.5.2"
|
||||
window-shadows = "0.2.2"
|
||||
reqwest_cookie_store = "0.6.0"
|
||||
grpc = { path = "./grpc" }
|
||||
templates = { path = "./templates" }
|
||||
plugin_runtime = { path = "tauri-plugin-plugin-runtime" }
|
||||
anyhow = "1.0.86"
|
||||
base64 = "0.22.0"
|
||||
chrono = { version = "0.4.31", features = ["serde"] }
|
||||
datetime = "0.5.2"
|
||||
hex_color = "3.0.0"
|
||||
http = "1"
|
||||
log = "0.4.21"
|
||||
rand = "0.8.5"
|
||||
regex = "1.10.2"
|
||||
reqwest = { version = "0.12.4", features = ["multipart", "cookies", "gzip", "brotli", "deflate", "json", "native-tls-alpn"] }
|
||||
reqwest_cookie_store = "0.8.0"
|
||||
serde = { version = "1.0.198", features = ["derive"] }
|
||||
serde_json = { version = "1.0.116", features = ["raw_value"] }
|
||||
serde_yaml = "0.9.34"
|
||||
sqlx = { version = "0.7.4", features = ["sqlite", "runtime-tokio-rustls", "json", "chrono", "time"] }
|
||||
tauri = { version = "2.0.0-beta", features = ["devtools", "protocol-asset"] }
|
||||
tauri-plugin-clipboard-manager = "2.1.0-beta"
|
||||
tauri-plugin-dialog = "2.0.0-beta"
|
||||
tauri-plugin-fs = "2.0.0-beta"
|
||||
tauri-plugin-log = { version = "2.0.0-beta", features = ["colored"] }
|
||||
tauri-plugin-os = "2.0.0-beta"
|
||||
tauri-plugin-shell = "2.0.0-beta"
|
||||
tauri-plugin-updater = "2.0.0-beta"
|
||||
tauri-plugin-window-state = "2.0.0-beta"
|
||||
tokio = { version = "1.36.0", features = ["sync"] }
|
||||
tokio-stream = "0.1.15"
|
||||
|
||||
[features]
|
||||
# by default Tauri runs in production mode
|
||||
# when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` is an URL
|
||||
default = ["custom-protocol"]
|
||||
# this feature is used used for production builds where `devPath` points to the filesystem
|
||||
# DO NOT remove this
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
uuid = "1.7.0"
|
||||
thiserror = "1.0.61"
|
||||
mime_guess = "2.0.5"
|
||||
|
||||
46
src-tauri/capabilities/capabilities.json
Normal file
46
src-tauri/capabilities/capabilities.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"$schema": "../gen/schemas/capabilities.json",
|
||||
"identifier": "main",
|
||||
"description": "Main permissions",
|
||||
"local": true,
|
||||
"windows": [
|
||||
"*"
|
||||
],
|
||||
"permissions": [
|
||||
"os:allow-os-type",
|
||||
"event:allow-emit",
|
||||
"clipboard-manager:allow-write-text",
|
||||
"clipboard-manager:allow-read-text",
|
||||
"dialog:allow-open",
|
||||
"dialog:allow-save",
|
||||
"event:allow-listen",
|
||||
"event:allow-unlisten",
|
||||
"fs:allow-read-file",
|
||||
"fs:allow-read-text-file",
|
||||
{
|
||||
"identifier": "fs:scope",
|
||||
"allow": [
|
||||
{
|
||||
"path": "$APPDATA"
|
||||
},
|
||||
{
|
||||
"path": "$APPDATA/**"
|
||||
}
|
||||
]
|
||||
},
|
||||
"shell:allow-open",
|
||||
"webview:allow-set-webview-zoom",
|
||||
"window:allow-close",
|
||||
"window:allow-is-fullscreen",
|
||||
"window:allow-maximize",
|
||||
"window:allow-minimize",
|
||||
"window:allow-toggle-maximize",
|
||||
"window:allow-set-decorations",
|
||||
"window:allow-set-title",
|
||||
"window:allow-start-dragging",
|
||||
"window:allow-unmaximize",
|
||||
"window:allow-theme",
|
||||
"clipboard-manager:allow-read-text",
|
||||
"clipboard-manager:allow-write-text"
|
||||
]
|
||||
}
|
||||
1
src-tauri/gen/schemas/acl-manifests.json
Normal file
1
src-tauri/gen/schemas/acl-manifests.json
Normal file
File diff suppressed because one or more lines are too long
1
src-tauri/gen/schemas/capabilities.json
Normal file
1
src-tauri/gen/schemas/capabilities.json
Normal file
@@ -0,0 +1 @@
|
||||
{"main":{"identifier":"main","description":"Main permissions","local":true,"windows":["*"],"permissions":["os:allow-os-type","event:allow-emit","clipboard-manager:allow-write-text","clipboard-manager:allow-read-text","dialog:allow-open","dialog:allow-save","event:allow-listen","event:allow-unlisten","fs:allow-read-file","fs:allow-read-text-file",{"identifier":"fs:scope","allow":[{"path":"$APPDATA"},{"path":"$APPDATA/**"}]},"shell:allow-open","webview:allow-set-webview-zoom","window:allow-close","window:allow-is-fullscreen","window:allow-maximize","window:allow-minimize","window:allow-toggle-maximize","window:allow-set-decorations","window:allow-set-title","window:allow-start-dragging","window:allow-unmaximize","window:allow-theme","clipboard-manager:allow-read-text","clipboard-manager:allow-write-text"]}}
|
||||
6924
src-tauri/gen/schemas/desktop-schema.json
Normal file
6924
src-tauri/gen/schemas/desktop-schema.json
Normal file
File diff suppressed because it is too large
Load Diff
6861
src-tauri/gen/schemas/linux-schema.json
Normal file
6861
src-tauri/gen/schemas/linux-schema.json
Normal file
File diff suppressed because it is too large
Load Diff
6924
src-tauri/gen/schemas/macOS-schema.json
Normal file
6924
src-tauri/gen/schemas/macOS-schema.json
Normal file
File diff suppressed because it is too large
Load Diff
6924
src-tauri/gen/schemas/windows-schema.json
Normal file
6924
src-tauri/gen/schemas/windows-schema.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -14,10 +14,11 @@ serde = { version = "1.0.196", features = ["derive"] }
|
||||
serde_json = "1.0.113"
|
||||
prost-reflect = { version = "0.12.0", features = ["serde", "derive"] }
|
||||
log = "0.4.20"
|
||||
once_cell = { version = "1.19.0", features = [] }
|
||||
anyhow = "1.0.79"
|
||||
hyper = { version = "0.14" }
|
||||
hyper-rustls = { version = "0.24.0", features = ["http2"] }
|
||||
protoc-bin-vendored = "3.0.0"
|
||||
uuid = { version = "1.7.0", features = ["v4"] }
|
||||
tauri = { version = "1.5.4", features = ["process-command-api"]}
|
||||
tauri = { version = "2.0.0-beta" }
|
||||
tauri-plugin-shell = "2.0.0-beta"
|
||||
md5 = "0.7.0"
|
||||
dunce = "1.0.4"
|
||||
|
||||
@@ -16,7 +16,9 @@ use tonic::transport::Uri;
|
||||
use tonic::{IntoRequest, IntoStreamingRequest, Request, Response, Status, Streaming};
|
||||
|
||||
use crate::codec::DynamicCodec;
|
||||
use crate::proto::{fill_pool, fill_pool_from_files, get_transport, method_desc_to_path};
|
||||
use crate::proto::{
|
||||
fill_pool_from_files, fill_pool_from_reflection, get_transport, method_desc_to_path,
|
||||
};
|
||||
use crate::{json_schema, MethodDefinition, ServiceDefinition};
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -174,33 +176,45 @@ pub struct GrpcHandle {
|
||||
impl GrpcHandle {
|
||||
pub fn new(app_handle: &AppHandle) -> Self {
|
||||
let pools = HashMap::new();
|
||||
Self { pools, app_handle: app_handle.clone() }
|
||||
Self {
|
||||
pools,
|
||||
app_handle: app_handle.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl GrpcHandle {
|
||||
pub async fn services_from_files(
|
||||
pub async fn reflect(
|
||||
&mut self,
|
||||
id: &str,
|
||||
uri: &Uri,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<Vec<ServiceDefinition>, String> {
|
||||
let pool = fill_pool_from_files(&self.app_handle, paths).await?;
|
||||
self.pools.insert(self.get_pool_key(id, uri), pool.clone());
|
||||
Ok(self.services_from_pool(&pool))
|
||||
}
|
||||
pub async fn services_from_reflection(
|
||||
&mut self,
|
||||
id: &str,
|
||||
uri: &Uri,
|
||||
) -> Result<Vec<ServiceDefinition>, String> {
|
||||
let pool = fill_pool(uri).await?;
|
||||
self.pools.insert(self.get_pool_key(id, uri), pool.clone());
|
||||
Ok(self.services_from_pool(&pool))
|
||||
uri: &str,
|
||||
proto_files: &Vec<PathBuf>,
|
||||
) -> Result<(), String> {
|
||||
let pool = if proto_files.is_empty() {
|
||||
let full_uri = uri_from_str(uri)?;
|
||||
fill_pool_from_reflection(&full_uri).await
|
||||
} else {
|
||||
fill_pool_from_files(&self.app_handle, proto_files).await
|
||||
}?;
|
||||
|
||||
self.pools
|
||||
.insert(make_pool_key(id, uri, proto_files), pool.clone());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_pool_key(&self, id: &str, uri: &Uri) -> String {
|
||||
format!("{}-{}", id, uri)
|
||||
pub async fn services(
|
||||
&mut self,
|
||||
id: &str,
|
||||
uri: &str,
|
||||
proto_files: &Vec<PathBuf>,
|
||||
) -> Result<Vec<ServiceDefinition>, String> {
|
||||
// Ensure reflection is up-to-date
|
||||
self.reflect(id, uri, proto_files).await?;
|
||||
|
||||
let pool = self
|
||||
.get_pool(id, uri, proto_files)
|
||||
.ok_or("Failed to get pool".to_string())?;
|
||||
Ok(self.services_from_pool(&pool))
|
||||
}
|
||||
|
||||
fn services_from_pool(&self, pool: &DescriptorPool) -> Vec<ServiceDefinition> {
|
||||
@@ -231,25 +245,27 @@ impl GrpcHandle {
|
||||
pub async fn connect(
|
||||
&mut self,
|
||||
id: &str,
|
||||
uri: Uri,
|
||||
proto_files: Vec<PathBuf>,
|
||||
uri: &str,
|
||||
proto_files: &Vec<PathBuf>,
|
||||
) -> Result<GrpcConnection, String> {
|
||||
let pool = match self.pools.get(id) {
|
||||
Some(p) => p.clone(),
|
||||
None => match proto_files.len() {
|
||||
0 => fill_pool(&uri).await?,
|
||||
_ => {
|
||||
let pool = fill_pool_from_files(&self.app_handle, proto_files).await?;
|
||||
self.pools.insert(id.to_string(), pool.clone());
|
||||
pool
|
||||
}
|
||||
},
|
||||
};
|
||||
self.reflect(id, uri, proto_files).await?;
|
||||
let pool = self
|
||||
.get_pool(id, uri, proto_files)
|
||||
.ok_or("Failed to get pool")?;
|
||||
|
||||
let uri = uri_from_str(uri)?;
|
||||
let conn = get_transport();
|
||||
let connection = GrpcConnection { pool, conn, uri };
|
||||
let connection = GrpcConnection {
|
||||
pool: pool.clone(),
|
||||
conn,
|
||||
uri,
|
||||
};
|
||||
Ok(connection)
|
||||
}
|
||||
|
||||
fn get_pool(&self, id: &str, uri: &str, proto_files: &Vec<PathBuf>) -> Option<&DescriptorPool> {
|
||||
self.pools.get(make_pool_key(id, uri, proto_files).as_str())
|
||||
}
|
||||
}
|
||||
|
||||
fn decorate_req<T>(metadata: HashMap<String, String>, req: &mut Request<T>) -> Result<(), String> {
|
||||
@@ -261,3 +277,28 @@ fn decorate_req<T>(metadata: HashMap<String, String>, req: &mut Request<T>) -> R
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn uri_from_str(uri_str: &str) -> Result<Uri, String> {
|
||||
match Uri::from_str(uri_str) {
|
||||
Ok(uri) => Ok(uri),
|
||||
Err(err) => {
|
||||
// Uri::from_str basically only returns "invalid format" so we add more context here
|
||||
Err(format!("Failed to parse URL, {}", err.to_string()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn make_pool_key(id: &str, uri: &str, proto_files: &Vec<PathBuf>) -> String {
|
||||
let pool_key = format!(
|
||||
"{}::{}::{}",
|
||||
id,
|
||||
uri,
|
||||
proto_files
|
||||
.iter()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(":")
|
||||
);
|
||||
|
||||
format!("{:x}", md5::compute(pool_key))
|
||||
}
|
||||
|
||||
@@ -7,12 +7,13 @@ use anyhow::anyhow;
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::Client;
|
||||
use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder};
|
||||
use log::{debug, info, warn};
|
||||
use log::{debug, warn};
|
||||
use prost::Message;
|
||||
use prost_reflect::{DescriptorPool, MethodDescriptor};
|
||||
use prost_types::{FileDescriptorProto, FileDescriptorSet};
|
||||
use tauri::api::process::{Command, CommandEvent};
|
||||
use tauri::AppHandle;
|
||||
use tauri::path::BaseDirectory;
|
||||
use tauri::{AppHandle, Manager};
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
use tokio::fs;
|
||||
use tokio_stream::StreamExt;
|
||||
use tonic::body::BoxBody;
|
||||
@@ -26,21 +27,27 @@ use tonic_reflection::pb::ServerReflectionRequest;
|
||||
|
||||
pub async fn fill_pool_from_files(
|
||||
app_handle: &AppHandle,
|
||||
paths: Vec<PathBuf>,
|
||||
paths: &Vec<PathBuf>,
|
||||
) -> Result<DescriptorPool, String> {
|
||||
let mut pool = DescriptorPool::new();
|
||||
let random_file_name = format!("{}.desc", uuid::Uuid::new_v4());
|
||||
let desc_path = temp_dir().join(random_file_name);
|
||||
let global_import_dir = app_handle
|
||||
.path_resolver()
|
||||
.resolve_resource("protoc-vendored/include")
|
||||
.path()
|
||||
.resolve("protoc-include", BaseDirectory::Resource)
|
||||
.expect("failed to resolve protoc include directory");
|
||||
|
||||
// HACK: Remove UNC prefix for Windows paths
|
||||
let global_import_dir = dunce::simplified(global_import_dir.as_path())
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
let desc_path = dunce::simplified(desc_path.as_path());
|
||||
|
||||
let mut args = vec![
|
||||
"--include_imports".to_string(),
|
||||
"--include_source_info".to_string(),
|
||||
"-I".to_string(),
|
||||
global_import_dir.to_string_lossy().to_string(),
|
||||
global_import_dir,
|
||||
"-o".to_string(),
|
||||
desc_path.to_string_lossy().to_string(),
|
||||
];
|
||||
@@ -63,55 +70,36 @@ pub async fn fill_pool_from_files(
|
||||
}
|
||||
}
|
||||
|
||||
let (mut rx, _child) = Command::new_sidecar("protoc")
|
||||
.expect("protoc not found")
|
||||
let out = app_handle
|
||||
.shell()
|
||||
.sidecar("yaakprotoc")
|
||||
.expect("yaakprotoc not found")
|
||||
.args(args)
|
||||
.spawn()
|
||||
.expect("protoc failed to start");
|
||||
.output()
|
||||
.await
|
||||
.expect("yaakprotoc failed to run");
|
||||
|
||||
while let Some(event) = rx.recv().await {
|
||||
match event {
|
||||
CommandEvent::Stdout(line) => {
|
||||
info!("protoc stdout: {}", line);
|
||||
}
|
||||
CommandEvent::Stderr(line) => {
|
||||
info!("protoc stderr: {}", line);
|
||||
}
|
||||
CommandEvent::Error(e) => {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
CommandEvent::Terminated(c) => {
|
||||
match c.code {
|
||||
Some(0) => {
|
||||
// success
|
||||
}
|
||||
Some(code) => {
|
||||
return Err(format!("protoc failed with exit code: {}", code,));
|
||||
}
|
||||
None => {
|
||||
return Err("protoc failed with no exit code".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
if !out.status.success() {
|
||||
return Err(format!(
|
||||
"protoc failed with status {}: {}",
|
||||
out.status.code().unwrap(),
|
||||
String::from_utf8_lossy(out.stderr.as_slice())
|
||||
));
|
||||
}
|
||||
|
||||
let bytes = fs::read(desc_path.as_path())
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
let bytes = fs::read(desc_path).await.map_err(|e| e.to_string())?;
|
||||
let fdp = FileDescriptorSet::decode(bytes.deref()).map_err(|e| e.to_string())?;
|
||||
pool.add_file_descriptor_set(fdp)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
fs::remove_file(desc_path.as_path())
|
||||
fs::remove_file(desc_path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(pool)
|
||||
}
|
||||
|
||||
pub async fn fill_pool(uri: &Uri) -> Result<DescriptorPool, String> {
|
||||
pub async fn fill_pool_from_reflection(uri: &Uri) -> Result<DescriptorPool, String> {
|
||||
let mut pool = DescriptorPool::new();
|
||||
let mut client = ServerReflectionClient::with_origin(get_transport(), uri.clone());
|
||||
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Re-enable for sandboxing. Currently disabled because auto-updater doesn't work with sandboxing.-->
|
||||
<!-- <key>com.apple.security.app-sandbox</key> <true/>-->
|
||||
<!-- <key>com.apple.security.files.user-selected.read-write</key> <true/>-->
|
||||
<!-- <key>com.apple.security.network.client</key> <true/>-->
|
||||
</dict>
|
||||
<dict>
|
||||
<!-- Enable for v8 execution -->
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
|
||||
<!-- Re-enable for sandboxing. Currently disabled because auto-updater doesn't work with sandboxing.-->
|
||||
<!-- <key>com.apple.security.app-sandbox</key> <true/>-->
|
||||
<!-- <key>com.apple.security.files.user-selected.read-write</key> <true/>-->
|
||||
<!-- <key>com.apple.security.network.client</key> <true/>-->
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
4
src-tauri/migrations/20240522031045_theme-settings.sql
Normal file
4
src-tauri/migrations/20240522031045_theme-settings.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN theme_dark TEXT DEFAULT 'yaak-dark' NOT NULL;
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN theme_light TEXT DEFAULT 'yaak-light' NOT NULL;
|
||||
4
src-tauri/migrations/20240529143147_more-settings.sql
Normal file
4
src-tauri/migrations/20240529143147_more-settings.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE settings ADD COLUMN interface_font_size INTEGER DEFAULT 15 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN interface_scale INTEGER DEFAULT 1 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN editor_font_size INTEGER DEFAULT 13 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN editor_soft_wrap BOOLEAN DEFAULT 1 NOT NULL;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE settings ADD COLUMN open_workspace_new_window BOOLEAN NULL DEFAULT NULL;
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,165 +0,0 @@
|
||||
function g(e, n) {
|
||||
return console.log("IMPORTING Environment", e._id, e.name, JSON.stringify(e, null, 2)), {
|
||||
id: e._id,
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace("Z", ""),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace("Z", ""),
|
||||
workspaceId: n,
|
||||
model: "environment",
|
||||
name: e.name,
|
||||
variables: Object.entries(e.data).map(([t, a]) => ({
|
||||
enabled: !0,
|
||||
name: t,
|
||||
value: `${a}`
|
||||
}))
|
||||
};
|
||||
}
|
||||
function S(e) {
|
||||
return m(e) && e._type === "workspace";
|
||||
}
|
||||
function I(e) {
|
||||
return m(e) && e._type === "request_group";
|
||||
}
|
||||
function y(e) {
|
||||
return m(e) && e._type === "request";
|
||||
}
|
||||
function h(e) {
|
||||
return m(e) && e._type === "grpc_request";
|
||||
}
|
||||
function f(e) {
|
||||
return m(e) && e._type === "environment";
|
||||
}
|
||||
function m(e) {
|
||||
return Object.prototype.toString.call(e) === "[object Object]";
|
||||
}
|
||||
function w(e) {
|
||||
return Object.prototype.toString.call(e) === "[object String]";
|
||||
}
|
||||
function O(e) {
|
||||
return Object.entries(e).map(([n, t]) => ({
|
||||
enabled: !0,
|
||||
name: n,
|
||||
value: `${t}`
|
||||
}));
|
||||
}
|
||||
function d(e) {
|
||||
return w(e) ? e.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, "${[$2]}") : e;
|
||||
}
|
||||
function _(e, n, t = 0) {
|
||||
var l, r;
|
||||
console.log("IMPORTING REQUEST", e._id, e.name, JSON.stringify(e, null, 2));
|
||||
let a = null, o = null;
|
||||
((l = e.body) == null ? void 0 : l.mimeType) === "application/graphql" ? (a = "graphql", o = d(e.body.text)) : ((r = e.body) == null ? void 0 : r.mimeType) === "application/json" && (a = "application/json", o = d(e.body.text));
|
||||
let s = null, p = {};
|
||||
return e.authentication.type === "bearer" ? (s = "bearer", p = {
|
||||
token: d(e.authentication.token)
|
||||
}) : e.authentication.type === "basic" && (s = "basic", p = {
|
||||
username: d(e.authentication.username),
|
||||
password: d(e.authentication.password)
|
||||
}), {
|
||||
id: e._id,
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace("Z", ""),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace("Z", ""),
|
||||
workspaceId: n,
|
||||
folderId: e.parentId === n ? null : e.parentId,
|
||||
model: "http_request",
|
||||
sortPriority: t,
|
||||
name: e.name,
|
||||
url: d(e.url),
|
||||
body: o,
|
||||
bodyType: a,
|
||||
authentication: p,
|
||||
authenticationType: s,
|
||||
method: e.method,
|
||||
headers: (e.headers ?? []).map(({ name: u, value: c, disabled: i }) => ({
|
||||
enabled: !i,
|
||||
name: u,
|
||||
value: c
|
||||
})).filter(({ name: u, value: c }) => u !== "" || c !== "")
|
||||
};
|
||||
}
|
||||
function R(e, n) {
|
||||
return console.log("IMPORTING FOLDER", e._id, e.name, JSON.stringify(e, null, 2)), {
|
||||
id: e._id,
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace("Z", ""),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace("Z", ""),
|
||||
folderId: e.parentId === n ? null : e.parentId,
|
||||
workspaceId: n,
|
||||
model: "folder",
|
||||
name: e.name
|
||||
};
|
||||
}
|
||||
function D(e, n, t = 0) {
|
||||
var p;
|
||||
console.log("IMPORTING GRPC REQUEST", e._id, e.name, JSON.stringify(e, null, 2));
|
||||
const a = e.protoMethodName.split("/").filter((l) => l !== ""), o = a[0] ?? null, s = a[1] ?? null;
|
||||
return {
|
||||
id: e._id,
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace("Z", ""),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace("Z", ""),
|
||||
workspaceId: n,
|
||||
folderId: e.parentId === n ? null : e.parentId,
|
||||
model: "grpc_request",
|
||||
sortPriority: t,
|
||||
name: e.name,
|
||||
url: d(e.url),
|
||||
service: o,
|
||||
method: s,
|
||||
message: ((p = e.body) == null ? void 0 : p.text) ?? "",
|
||||
metadata: (e.metadata ?? []).map(({ name: l, value: r, disabled: u }) => ({
|
||||
enabled: !u,
|
||||
name: l,
|
||||
value: r
|
||||
})).filter(({ name: l, value: r }) => l !== "" || r !== "")
|
||||
};
|
||||
}
|
||||
function q(e) {
|
||||
let n;
|
||||
try {
|
||||
n = JSON.parse(e);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
if (!m(n) || !Array.isArray(n.resources))
|
||||
return;
|
||||
const t = {
|
||||
workspaces: [],
|
||||
httpRequests: [],
|
||||
grpcRequests: [],
|
||||
environments: [],
|
||||
folders: []
|
||||
}, a = n.resources.filter(S);
|
||||
for (const o of a) {
|
||||
const s = n.resources.find(
|
||||
(r) => f(r) && r.parentId === o._id
|
||||
);
|
||||
t.workspaces.push({
|
||||
id: o._id,
|
||||
createdAt: new Date(a.created ?? Date.now()).toISOString().replace("Z", ""),
|
||||
updatedAt: new Date(a.updated ?? Date.now()).toISOString().replace("Z", ""),
|
||||
model: "workspace",
|
||||
name: o.name,
|
||||
variables: s ? O(s.data) : []
|
||||
});
|
||||
const p = n.resources.filter(
|
||||
(r) => f(r) && r.parentId === (s == null ? void 0 : s._id)
|
||||
);
|
||||
t.environments.push(
|
||||
...p.map((r) => g(r, o._id))
|
||||
);
|
||||
const l = (r) => {
|
||||
const u = n.resources.filter((i) => i.parentId === r);
|
||||
let c = 0;
|
||||
for (const i of u)
|
||||
I(i) ? (t.folders.push(R(i, o._id)), l(i._id)) : y(i) ? t.httpRequests.push(
|
||||
_(i, o._id, c++)
|
||||
) : h(i) && (console.log("GRPC", JSON.stringify(i, null, 1)), t.grpcRequests.push(
|
||||
D(i, o._id, c++)
|
||||
));
|
||||
};
|
||||
l(o._id);
|
||||
}
|
||||
return t.httpRequests = t.httpRequests.filter(Boolean), t.grpcRequests = t.grpcRequests.filter(Boolean), t.environments = t.environments.filter(Boolean), t.workspaces = t.workspaces.filter(Boolean), { resources: t };
|
||||
}
|
||||
export {
|
||||
q as pluginHookImport
|
||||
};
|
||||
@@ -1,172 +0,0 @@
|
||||
const q = "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", S = "https://schema.getpostman.com/json/collection/v2.0.0/collection.json", _ = [S, q];
|
||||
function v(t) {
|
||||
var b;
|
||||
const e = w(t);
|
||||
if (e == null)
|
||||
return;
|
||||
const n = o(e.info);
|
||||
if (!_.includes(n.schema) || !Array.isArray(e.item))
|
||||
return;
|
||||
const A = g(e.auth), i = {
|
||||
workspaces: [],
|
||||
environments: [],
|
||||
httpRequests: [],
|
||||
folders: []
|
||||
}, c = {
|
||||
model: "workspace",
|
||||
id: m("wk"),
|
||||
name: n.name || "Postman Import",
|
||||
description: n.description || "",
|
||||
variables: ((b = e.variable) == null ? void 0 : b.map((r) => ({
|
||||
name: r.key,
|
||||
value: r.value
|
||||
}))) ?? []
|
||||
};
|
||||
i.workspaces.push(c);
|
||||
const f = (r, u = null) => {
|
||||
if (typeof r.name == "string" && Array.isArray(r.item)) {
|
||||
const a = {
|
||||
model: "folder",
|
||||
workspaceId: c.id,
|
||||
id: m("fl"),
|
||||
name: r.name,
|
||||
folderId: u
|
||||
};
|
||||
i.folders.push(a);
|
||||
for (const s of r.item)
|
||||
f(s, a.id);
|
||||
} else if (typeof r.name == "string" && "request" in r) {
|
||||
const a = o(r.request), s = O(a.body), T = g(a.auth), d = T.authenticationType == null ? A : T, k = {
|
||||
model: "http_request",
|
||||
id: m("rq"),
|
||||
workspaceId: c.id,
|
||||
folderId: u,
|
||||
name: r.name,
|
||||
method: a.method || "GET",
|
||||
url: typeof a.url == "string" ? a.url : o(a.url).raw,
|
||||
body: s.body,
|
||||
bodyType: s.bodyType,
|
||||
authentication: d.authentication,
|
||||
authenticationType: d.authenticationType,
|
||||
headers: [
|
||||
...s.headers,
|
||||
...d.headers,
|
||||
...y(a.header).map((p) => ({
|
||||
name: p.key,
|
||||
value: p.value,
|
||||
enabled: !p.disabled
|
||||
}))
|
||||
]
|
||||
};
|
||||
i.httpRequests.push(k);
|
||||
} else
|
||||
console.log("Unknown item", r, u);
|
||||
};
|
||||
for (const r of e.item)
|
||||
f(r);
|
||||
return { resources: h(i) };
|
||||
}
|
||||
function g(t) {
|
||||
const e = o(t);
|
||||
return "basic" in e ? {
|
||||
headers: [],
|
||||
authenticationType: "basic",
|
||||
authentication: {
|
||||
username: e.basic.username || "",
|
||||
password: e.basic.password || ""
|
||||
}
|
||||
} : "bearer" in e ? {
|
||||
headers: [],
|
||||
authenticationType: "bearer",
|
||||
authentication: {
|
||||
token: e.bearer.token || ""
|
||||
}
|
||||
} : { headers: [], authenticationType: null, authentication: {} };
|
||||
}
|
||||
function O(t) {
|
||||
const e = o(t);
|
||||
return "graphql" in e ? {
|
||||
headers: [
|
||||
{
|
||||
name: "Content-Type",
|
||||
value: "application/json",
|
||||
enabled: !0
|
||||
}
|
||||
],
|
||||
bodyType: "graphql",
|
||||
body: {
|
||||
text: JSON.stringify(
|
||||
{ query: e.graphql.query, variables: w(e.graphql.variables) },
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
} : "urlencoded" in e ? {
|
||||
headers: [
|
||||
{
|
||||
name: "Content-Type",
|
||||
value: "application/x-www-form-urlencoded",
|
||||
enabled: !0
|
||||
}
|
||||
],
|
||||
bodyType: "application/x-www-form-urlencoded",
|
||||
body: {
|
||||
form: y(e.urlencoded).map((n) => ({
|
||||
enabled: !n.disabled,
|
||||
name: n.key ?? "",
|
||||
value: n.value ?? ""
|
||||
}))
|
||||
}
|
||||
} : "formdata" in e ? {
|
||||
headers: [
|
||||
{
|
||||
name: "Content-Type",
|
||||
value: "multipart/form-data",
|
||||
enabled: !0
|
||||
}
|
||||
],
|
||||
bodyType: "multipart/form-data",
|
||||
body: {
|
||||
form: y(e.formdata).map(
|
||||
(n) => n.src != null ? {
|
||||
enabled: !n.disabled,
|
||||
name: n.key ?? "",
|
||||
file: n.src ?? ""
|
||||
} : {
|
||||
enabled: !n.disabled,
|
||||
name: n.key ?? "",
|
||||
value: n.value ?? ""
|
||||
}
|
||||
)
|
||||
}
|
||||
} : { headers: [], bodyType: null, body: {} };
|
||||
}
|
||||
function w(t) {
|
||||
try {
|
||||
return o(JSON.parse(t));
|
||||
} catch {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function o(t) {
|
||||
return Object.prototype.toString.call(t) === "[object Object]" ? t : {};
|
||||
}
|
||||
function y(t) {
|
||||
return Object.prototype.toString.call(t) === "[object Array]" ? t : [];
|
||||
}
|
||||
function h(t) {
|
||||
return typeof t == "string" ? t.replace(/{{\s*(_\.)?([^}]+)\s*}}/g, "${[$2]}") : Array.isArray(t) && t != null ? t.map(h) : typeof t == "object" && t != null ? Object.fromEntries(
|
||||
Object.entries(t).map(([e, n]) => [e, h(n)])
|
||||
) : t;
|
||||
}
|
||||
function m(t) {
|
||||
const e = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
let n = `${t}_`;
|
||||
for (let l = 0; l < 10; l++)
|
||||
n += e[Math.floor(Math.random() * e.length)];
|
||||
return n;
|
||||
}
|
||||
export {
|
||||
m as generateId,
|
||||
v as pluginHookImport
|
||||
};
|
||||
@@ -1,17 +0,0 @@
|
||||
function u(r) {
|
||||
let e;
|
||||
try {
|
||||
e = JSON.parse(r);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
if (!(!t(e) || !("yaakSchema" in e)))
|
||||
return "requests" in e.resources && (e.resources.httpRequests = e.resources.requests, delete e.resources.requests), { resources: e.resources };
|
||||
}
|
||||
function t(r) {
|
||||
return Object.prototype.toString.call(r) === "[object Object]";
|
||||
}
|
||||
export {
|
||||
t as isJSObject,
|
||||
u as pluginHookImport
|
||||
};
|
||||
@@ -1,162 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option go_package = "google.golang.org/protobuf/types/known/anypb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "AnyProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// `Any` contains an arbitrary serialized protocol buffer message along with a
|
||||
// URL that describes the type of the serialized message.
|
||||
//
|
||||
// Protobuf library provides support to pack/unpack Any values in the form
|
||||
// of utility functions or additional generated methods of the Any type.
|
||||
//
|
||||
// Example 1: Pack and unpack a message in C++.
|
||||
//
|
||||
// Foo foo = ...;
|
||||
// Any any;
|
||||
// any.PackFrom(foo);
|
||||
// ...
|
||||
// if (any.UnpackTo(&foo)) {
|
||||
// ...
|
||||
// }
|
||||
//
|
||||
// Example 2: Pack and unpack a message in Java.
|
||||
//
|
||||
// Foo foo = ...;
|
||||
// Any any = Any.pack(foo);
|
||||
// ...
|
||||
// if (any.is(Foo.class)) {
|
||||
// foo = any.unpack(Foo.class);
|
||||
// }
|
||||
// // or ...
|
||||
// if (any.isSameTypeAs(Foo.getDefaultInstance())) {
|
||||
// foo = any.unpack(Foo.getDefaultInstance());
|
||||
// }
|
||||
//
|
||||
// Example 3: Pack and unpack a message in Python.
|
||||
//
|
||||
// foo = Foo(...)
|
||||
// any = Any()
|
||||
// any.Pack(foo)
|
||||
// ...
|
||||
// if any.Is(Foo.DESCRIPTOR):
|
||||
// any.Unpack(foo)
|
||||
// ...
|
||||
//
|
||||
// Example 4: Pack and unpack a message in Go
|
||||
//
|
||||
// foo := &pb.Foo{...}
|
||||
// any, err := anypb.New(foo)
|
||||
// if err != nil {
|
||||
// ...
|
||||
// }
|
||||
// ...
|
||||
// foo := &pb.Foo{}
|
||||
// if err := any.UnmarshalTo(foo); err != nil {
|
||||
// ...
|
||||
// }
|
||||
//
|
||||
// The pack methods provided by protobuf library will by default use
|
||||
// 'type.googleapis.com/full.type.name' as the type URL and the unpack
|
||||
// methods only use the fully qualified type name after the last '/'
|
||||
// in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||
// name "y.z".
|
||||
//
|
||||
// JSON
|
||||
// ====
|
||||
// The JSON representation of an `Any` value uses the regular
|
||||
// representation of the deserialized, embedded message, with an
|
||||
// additional field `@type` which contains the type URL. Example:
|
||||
//
|
||||
// package google.profile;
|
||||
// message Person {
|
||||
// string first_name = 1;
|
||||
// string last_name = 2;
|
||||
// }
|
||||
//
|
||||
// {
|
||||
// "@type": "type.googleapis.com/google.profile.Person",
|
||||
// "firstName": <string>,
|
||||
// "lastName": <string>
|
||||
// }
|
||||
//
|
||||
// If the embedded message type is well-known and has a custom JSON
|
||||
// representation, that representation will be embedded adding a field
|
||||
// `value` which holds the custom JSON in addition to the `@type`
|
||||
// field. Example (for message [google.protobuf.Duration][]):
|
||||
//
|
||||
// {
|
||||
// "@type": "type.googleapis.com/google.protobuf.Duration",
|
||||
// "value": "1.212s"
|
||||
// }
|
||||
//
|
||||
message Any {
|
||||
// A URL/resource name that uniquely identifies the type of the serialized
|
||||
// protocol buffer message. This string must contain at least
|
||||
// one "/" character. The last segment of the URL's path must represent
|
||||
// the fully qualified name of the type (as in
|
||||
// `path/google.protobuf.Duration`). The name should be in a canonical form
|
||||
// (e.g., leading "." is not accepted).
|
||||
//
|
||||
// In practice, teams usually precompile into the binary all types that they
|
||||
// expect it to use in the context of Any. However, for URLs which use the
|
||||
// scheme `http`, `https`, or no scheme, one can optionally set up a type
|
||||
// server that maps type URLs to message definitions as follows:
|
||||
//
|
||||
// * If no scheme is provided, `https` is assumed.
|
||||
// * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
// value in binary format, or produce an error.
|
||||
// * Applications are allowed to cache lookup results based on the
|
||||
// URL, or have them precompiled into a binary to avoid any
|
||||
// lookup. Therefore, binary compatibility needs to be preserved
|
||||
// on changes to types. (Use versioned type names to manage
|
||||
// breaking changes.)
|
||||
//
|
||||
// Note: this functionality is not currently available in the official
|
||||
// protobuf release, and it is not used for type URLs beginning with
|
||||
// type.googleapis.com. As of May 2023, there are no widely used type server
|
||||
// implementations and no plans to implement one.
|
||||
//
|
||||
// Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
// used with implementation specific semantics.
|
||||
//
|
||||
string type_url = 1;
|
||||
|
||||
// Must be a valid serialized protocol buffer of the above specified type.
|
||||
bytes value = 2;
|
||||
}
|
||||
@@ -1,207 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
import "google/protobuf/source_context.proto";
|
||||
import "google/protobuf/type.proto";
|
||||
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "ApiProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/apipb";
|
||||
|
||||
// Api is a light-weight descriptor for an API Interface.
|
||||
//
|
||||
// Interfaces are also described as "protocol buffer services" in some contexts,
|
||||
// such as by the "service" keyword in a .proto file, but they are different
|
||||
// from API Services, which represent a concrete implementation of an interface
|
||||
// as opposed to simply a description of methods and bindings. They are also
|
||||
// sometimes simply referred to as "APIs" in other contexts, such as the name of
|
||||
// this message itself. See https://cloud.google.com/apis/design/glossary for
|
||||
// detailed terminology.
|
||||
message Api {
|
||||
// The fully qualified name of this interface, including package name
|
||||
// followed by the interface's simple name.
|
||||
string name = 1;
|
||||
|
||||
// The methods of this interface, in unspecified order.
|
||||
repeated Method methods = 2;
|
||||
|
||||
// Any metadata attached to the interface.
|
||||
repeated Option options = 3;
|
||||
|
||||
// A version string for this interface. If specified, must have the form
|
||||
// `major-version.minor-version`, as in `1.10`. If the minor version is
|
||||
// omitted, it defaults to zero. If the entire version field is empty, the
|
||||
// major version is derived from the package name, as outlined below. If the
|
||||
// field is not empty, the version in the package name will be verified to be
|
||||
// consistent with what is provided here.
|
||||
//
|
||||
// The versioning schema uses [semantic
|
||||
// versioning](http://semver.org) where the major version number
|
||||
// indicates a breaking change and the minor version an additive,
|
||||
// non-breaking change. Both version numbers are signals to users
|
||||
// what to expect from different versions, and should be carefully
|
||||
// chosen based on the product plan.
|
||||
//
|
||||
// The major version is also reflected in the package name of the
|
||||
// interface, which must end in `v<major-version>`, as in
|
||||
// `google.feature.v1`. For major versions 0 and 1, the suffix can
|
||||
// be omitted. Zero major versions must only be used for
|
||||
// experimental, non-GA interfaces.
|
||||
//
|
||||
string version = 4;
|
||||
|
||||
// Source context for the protocol buffer service represented by this
|
||||
// message.
|
||||
SourceContext source_context = 5;
|
||||
|
||||
// Included interfaces. See [Mixin][].
|
||||
repeated Mixin mixins = 6;
|
||||
|
||||
// The source syntax of the service.
|
||||
Syntax syntax = 7;
|
||||
}
|
||||
|
||||
// Method represents a method of an API interface.
|
||||
message Method {
|
||||
// The simple name of this method.
|
||||
string name = 1;
|
||||
|
||||
// A URL of the input message type.
|
||||
string request_type_url = 2;
|
||||
|
||||
// If true, the request is streamed.
|
||||
bool request_streaming = 3;
|
||||
|
||||
// The URL of the output message type.
|
||||
string response_type_url = 4;
|
||||
|
||||
// If true, the response is streamed.
|
||||
bool response_streaming = 5;
|
||||
|
||||
// Any metadata attached to the method.
|
||||
repeated Option options = 6;
|
||||
|
||||
// The source syntax of this method.
|
||||
Syntax syntax = 7;
|
||||
}
|
||||
|
||||
// Declares an API Interface to be included in this interface. The including
|
||||
// interface must redeclare all the methods from the included interface, but
|
||||
// documentation and options are inherited as follows:
|
||||
//
|
||||
// - If after comment and whitespace stripping, the documentation
|
||||
// string of the redeclared method is empty, it will be inherited
|
||||
// from the original method.
|
||||
//
|
||||
// - Each annotation belonging to the service config (http,
|
||||
// visibility) which is not set in the redeclared method will be
|
||||
// inherited.
|
||||
//
|
||||
// - If an http annotation is inherited, the path pattern will be
|
||||
// modified as follows. Any version prefix will be replaced by the
|
||||
// version of the including interface plus the [root][] path if
|
||||
// specified.
|
||||
//
|
||||
// Example of a simple mixin:
|
||||
//
|
||||
// package google.acl.v1;
|
||||
// service AccessControl {
|
||||
// // Get the underlying ACL object.
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl) {
|
||||
// option (google.api.http).get = "/v1/{resource=**}:getAcl";
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// package google.storage.v2;
|
||||
// service Storage {
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl);
|
||||
//
|
||||
// // Get a data record.
|
||||
// rpc GetData(GetDataRequest) returns (Data) {
|
||||
// option (google.api.http).get = "/v2/{resource=**}";
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// Example of a mixin configuration:
|
||||
//
|
||||
// apis:
|
||||
// - name: google.storage.v2.Storage
|
||||
// mixins:
|
||||
// - name: google.acl.v1.AccessControl
|
||||
//
|
||||
// The mixin construct implies that all methods in `AccessControl` are
|
||||
// also declared with same name and request/response types in
|
||||
// `Storage`. A documentation generator or annotation processor will
|
||||
// see the effective `Storage.GetAcl` method after inherting
|
||||
// documentation and annotations as follows:
|
||||
//
|
||||
// service Storage {
|
||||
// // Get the underlying ACL object.
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl) {
|
||||
// option (google.api.http).get = "/v2/{resource=**}:getAcl";
|
||||
// }
|
||||
// ...
|
||||
// }
|
||||
//
|
||||
// Note how the version in the path pattern changed from `v1` to `v2`.
|
||||
//
|
||||
// If the `root` field in the mixin is specified, it should be a
|
||||
// relative path under which inherited HTTP paths are placed. Example:
|
||||
//
|
||||
// apis:
|
||||
// - name: google.storage.v2.Storage
|
||||
// mixins:
|
||||
// - name: google.acl.v1.AccessControl
|
||||
// root: acls
|
||||
//
|
||||
// This implies the following inherited HTTP annotation:
|
||||
//
|
||||
// service Storage {
|
||||
// // Get the underlying ACL object.
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl) {
|
||||
// option (google.api.http).get = "/v2/acls/{resource=**}:getAcl";
|
||||
// }
|
||||
// ...
|
||||
// }
|
||||
message Mixin {
|
||||
// The fully qualified name of the interface which is included.
|
||||
string name = 1;
|
||||
|
||||
// If non-empty specifies a path under which inherited HTTP paths
|
||||
// are rooted.
|
||||
string root = 2;
|
||||
}
|
||||
@@ -1,168 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file or at
|
||||
// https://developers.google.com/open-source/licenses/bsd
|
||||
|
||||
// Author: kenton@google.com (Kenton Varda)
|
||||
//
|
||||
// protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is
|
||||
// just a program that reads a CodeGeneratorRequest from stdin and writes a
|
||||
// CodeGeneratorResponse to stdout.
|
||||
//
|
||||
// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead
|
||||
// of dealing with the raw protocol defined here.
|
||||
//
|
||||
// A plugin executable needs only to be placed somewhere in the path. The
|
||||
// plugin should be named "protoc-gen-$NAME", and will then be used when the
|
||||
// flag "--${NAME}_out" is passed to protoc.
|
||||
|
||||
syntax = "proto2";
|
||||
|
||||
package google.protobuf.compiler;
|
||||
option java_package = "com.google.protobuf.compiler";
|
||||
option java_outer_classname = "PluginProtos";
|
||||
|
||||
option csharp_namespace = "Google.Protobuf.Compiler";
|
||||
option go_package = "google.golang.org/protobuf/types/pluginpb";
|
||||
|
||||
import "google/protobuf/descriptor.proto";
|
||||
|
||||
// The version number of protocol compiler.
|
||||
message Version {
|
||||
optional int32 major = 1;
|
||||
optional int32 minor = 2;
|
||||
optional int32 patch = 3;
|
||||
// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
|
||||
// be empty for mainline stable releases.
|
||||
optional string suffix = 4;
|
||||
}
|
||||
|
||||
// An encoded CodeGeneratorRequest is written to the plugin's stdin.
|
||||
message CodeGeneratorRequest {
|
||||
// The .proto files that were explicitly listed on the command-line. The
|
||||
// code generator should generate code only for these files. Each file's
|
||||
// descriptor will be included in proto_file, below.
|
||||
repeated string file_to_generate = 1;
|
||||
|
||||
// The generator parameter passed on the command-line.
|
||||
optional string parameter = 2;
|
||||
|
||||
// FileDescriptorProtos for all files in files_to_generate and everything
|
||||
// they import. The files will appear in topological order, so each file
|
||||
// appears before any file that imports it.
|
||||
//
|
||||
// Note: the files listed in files_to_generate will include runtime-retention
|
||||
// options only, but all other files will include source-retention options.
|
||||
// The source_file_descriptors field below is available in case you need
|
||||
// source-retention options for files_to_generate.
|
||||
//
|
||||
// protoc guarantees that all proto_files will be written after
|
||||
// the fields above, even though this is not technically guaranteed by the
|
||||
// protobuf wire format. This theoretically could allow a plugin to stream
|
||||
// in the FileDescriptorProtos and handle them one by one rather than read
|
||||
// the entire set into memory at once. However, as of this writing, this
|
||||
// is not similarly optimized on protoc's end -- it will store all fields in
|
||||
// memory at once before sending them to the plugin.
|
||||
//
|
||||
// Type names of fields and extensions in the FileDescriptorProto are always
|
||||
// fully qualified.
|
||||
repeated FileDescriptorProto proto_file = 15;
|
||||
|
||||
// File descriptors with all options, including source-retention options.
|
||||
// These descriptors are only provided for the files listed in
|
||||
// files_to_generate.
|
||||
repeated FileDescriptorProto source_file_descriptors = 17;
|
||||
|
||||
// The version number of protocol compiler.
|
||||
optional Version compiler_version = 3;
|
||||
}
|
||||
|
||||
// The plugin writes an encoded CodeGeneratorResponse to stdout.
|
||||
message CodeGeneratorResponse {
|
||||
// Error message. If non-empty, code generation failed. The plugin process
|
||||
// should exit with status code zero even if it reports an error in this way.
|
||||
//
|
||||
// This should be used to indicate errors in .proto files which prevent the
|
||||
// code generator from generating correct code. Errors which indicate a
|
||||
// problem in protoc itself -- such as the input CodeGeneratorRequest being
|
||||
// unparseable -- should be reported by writing a message to stderr and
|
||||
// exiting with a non-zero status code.
|
||||
optional string error = 1;
|
||||
|
||||
// A bitmask of supported features that the code generator supports.
|
||||
// This is a bitwise "or" of values from the Feature enum.
|
||||
optional uint64 supported_features = 2;
|
||||
|
||||
// Sync with code_generator.h.
|
||||
enum Feature {
|
||||
FEATURE_NONE = 0;
|
||||
FEATURE_PROTO3_OPTIONAL = 1;
|
||||
FEATURE_SUPPORTS_EDITIONS = 2;
|
||||
}
|
||||
|
||||
// Represents a single generated file.
|
||||
message File {
|
||||
// The file name, relative to the output directory. The name must not
|
||||
// contain "." or ".." components and must be relative, not be absolute (so,
|
||||
// the file cannot lie outside the output directory). "/" must be used as
|
||||
// the path separator, not "\".
|
||||
//
|
||||
// If the name is omitted, the content will be appended to the previous
|
||||
// file. This allows the generator to break large files into small chunks,
|
||||
// and allows the generated text to be streamed back to protoc so that large
|
||||
// files need not reside completely in memory at one time. Note that as of
|
||||
// this writing protoc does not optimize for this -- it will read the entire
|
||||
// CodeGeneratorResponse before writing files to disk.
|
||||
optional string name = 1;
|
||||
|
||||
// If non-empty, indicates that the named file should already exist, and the
|
||||
// content here is to be inserted into that file at a defined insertion
|
||||
// point. This feature allows a code generator to extend the output
|
||||
// produced by another code generator. The original generator may provide
|
||||
// insertion points by placing special annotations in the file that look
|
||||
// like:
|
||||
// @@protoc_insertion_point(NAME)
|
||||
// The annotation can have arbitrary text before and after it on the line,
|
||||
// which allows it to be placed in a comment. NAME should be replaced with
|
||||
// an identifier naming the point -- this is what other generators will use
|
||||
// as the insertion_point. Code inserted at this point will be placed
|
||||
// immediately above the line containing the insertion point (thus multiple
|
||||
// insertions to the same point will come out in the order they were added).
|
||||
// The double-@ is intended to make it unlikely that the generated code
|
||||
// could contain things that look like insertion points by accident.
|
||||
//
|
||||
// For example, the C++ code generator places the following line in the
|
||||
// .pb.h files that it generates:
|
||||
// // @@protoc_insertion_point(namespace_scope)
|
||||
// This line appears within the scope of the file's package namespace, but
|
||||
// outside of any particular class. Another plugin can then specify the
|
||||
// insertion_point "namespace_scope" to generate additional classes or
|
||||
// other declarations that should be placed in this scope.
|
||||
//
|
||||
// Note that if the line containing the insertion point begins with
|
||||
// whitespace, the same whitespace will be added to every line of the
|
||||
// inserted text. This is useful for languages like Python, where
|
||||
// indentation matters. In these languages, the insertion point comment
|
||||
// should be indented the same amount as any inserted code will need to be
|
||||
// in order to work correctly in that context.
|
||||
//
|
||||
// The code generator that generates the initial file and the one which
|
||||
// inserts into it must both run as part of a single invocation of protoc.
|
||||
// Code generators are executed in the order in which they appear on the
|
||||
// command line.
|
||||
//
|
||||
// If |insertion_point| is present, |name| must also be present.
|
||||
optional string insertion_point = 2;
|
||||
|
||||
// The file contents.
|
||||
optional string content = 15;
|
||||
|
||||
// Information describing the file content being inserted. If an insertion
|
||||
// point is used, this information will be appropriately offset and inserted
|
||||
// into the code generation metadata for the generated files.
|
||||
optional GeneratedCodeInfo generated_code_info = 16;
|
||||
}
|
||||
repeated File file = 15;
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,115 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/durationpb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "DurationProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// A Duration represents a signed, fixed-length span of time represented
|
||||
// as a count of seconds and fractions of seconds at nanosecond
|
||||
// resolution. It is independent of any calendar and concepts like "day"
|
||||
// or "month". It is related to Timestamp in that the difference between
|
||||
// two Timestamp values is a Duration and it can be added or subtracted
|
||||
// from a Timestamp. Range is approximately +-10,000 years.
|
||||
//
|
||||
// # Examples
|
||||
//
|
||||
// Example 1: Compute Duration from two Timestamps in pseudo code.
|
||||
//
|
||||
// Timestamp start = ...;
|
||||
// Timestamp end = ...;
|
||||
// Duration duration = ...;
|
||||
//
|
||||
// duration.seconds = end.seconds - start.seconds;
|
||||
// duration.nanos = end.nanos - start.nanos;
|
||||
//
|
||||
// if (duration.seconds < 0 && duration.nanos > 0) {
|
||||
// duration.seconds += 1;
|
||||
// duration.nanos -= 1000000000;
|
||||
// } else if (duration.seconds > 0 && duration.nanos < 0) {
|
||||
// duration.seconds -= 1;
|
||||
// duration.nanos += 1000000000;
|
||||
// }
|
||||
//
|
||||
// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
|
||||
//
|
||||
// Timestamp start = ...;
|
||||
// Duration duration = ...;
|
||||
// Timestamp end = ...;
|
||||
//
|
||||
// end.seconds = start.seconds + duration.seconds;
|
||||
// end.nanos = start.nanos + duration.nanos;
|
||||
//
|
||||
// if (end.nanos < 0) {
|
||||
// end.seconds -= 1;
|
||||
// end.nanos += 1000000000;
|
||||
// } else if (end.nanos >= 1000000000) {
|
||||
// end.seconds += 1;
|
||||
// end.nanos -= 1000000000;
|
||||
// }
|
||||
//
|
||||
// Example 3: Compute Duration from datetime.timedelta in Python.
|
||||
//
|
||||
// td = datetime.timedelta(days=3, minutes=10)
|
||||
// duration = Duration()
|
||||
// duration.FromTimedelta(td)
|
||||
//
|
||||
// # JSON Mapping
|
||||
//
|
||||
// In JSON format, the Duration type is encoded as a string rather than an
|
||||
// object, where the string ends in the suffix "s" (indicating seconds) and
|
||||
// is preceded by the number of seconds, with nanoseconds expressed as
|
||||
// fractional seconds. For example, 3 seconds with 0 nanoseconds should be
|
||||
// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
|
||||
// be expressed in JSON format as "3.000000001s", and 3 seconds and 1
|
||||
// microsecond should be expressed in JSON format as "3.000001s".
|
||||
//
|
||||
message Duration {
|
||||
// Signed seconds of the span of time. Must be from -315,576,000,000
|
||||
// to +315,576,000,000 inclusive. Note: these bounds are computed from:
|
||||
// 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
|
||||
int64 seconds = 1;
|
||||
|
||||
// Signed fractions of a second at nanosecond resolution of the span
|
||||
// of time. Durations less than one second are represented with a 0
|
||||
// `seconds` field and a positive or negative `nanos` field. For durations
|
||||
// of one second or more, a non-zero value for the `nanos` field must be
|
||||
// of the same sign as the `seconds` field. Must be from -999,999,999
|
||||
// to +999,999,999 inclusive.
|
||||
int32 nanos = 2;
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option go_package = "google.golang.org/protobuf/types/known/emptypb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "EmptyProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option cc_enable_arenas = true;
|
||||
|
||||
// A generic empty message that you can re-use to avoid defining duplicated
|
||||
// empty messages in your APIs. A typical example is to use it as the request
|
||||
// or the response type of an API method. For instance:
|
||||
//
|
||||
// service Foo {
|
||||
// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
|
||||
// }
|
||||
//
|
||||
message Empty {}
|
||||
@@ -1,245 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "FieldMaskProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/fieldmaskpb";
|
||||
option cc_enable_arenas = true;
|
||||
|
||||
// `FieldMask` represents a set of symbolic field paths, for example:
|
||||
//
|
||||
// paths: "f.a"
|
||||
// paths: "f.b.d"
|
||||
//
|
||||
// Here `f` represents a field in some root message, `a` and `b`
|
||||
// fields in the message found in `f`, and `d` a field found in the
|
||||
// message in `f.b`.
|
||||
//
|
||||
// Field masks are used to specify a subset of fields that should be
|
||||
// returned by a get operation or modified by an update operation.
|
||||
// Field masks also have a custom JSON encoding (see below).
|
||||
//
|
||||
// # Field Masks in Projections
|
||||
//
|
||||
// When used in the context of a projection, a response message or
|
||||
// sub-message is filtered by the API to only contain those fields as
|
||||
// specified in the mask. For example, if the mask in the previous
|
||||
// example is applied to a response message as follows:
|
||||
//
|
||||
// f {
|
||||
// a : 22
|
||||
// b {
|
||||
// d : 1
|
||||
// x : 2
|
||||
// }
|
||||
// y : 13
|
||||
// }
|
||||
// z: 8
|
||||
//
|
||||
// The result will not contain specific values for fields x,y and z
|
||||
// (their value will be set to the default, and omitted in proto text
|
||||
// output):
|
||||
//
|
||||
//
|
||||
// f {
|
||||
// a : 22
|
||||
// b {
|
||||
// d : 1
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// A repeated field is not allowed except at the last position of a
|
||||
// paths string.
|
||||
//
|
||||
// If a FieldMask object is not present in a get operation, the
|
||||
// operation applies to all fields (as if a FieldMask of all fields
|
||||
// had been specified).
|
||||
//
|
||||
// Note that a field mask does not necessarily apply to the
|
||||
// top-level response message. In case of a REST get operation, the
|
||||
// field mask applies directly to the response, but in case of a REST
|
||||
// list operation, the mask instead applies to each individual message
|
||||
// in the returned resource list. In case of a REST custom method,
|
||||
// other definitions may be used. Where the mask applies will be
|
||||
// clearly documented together with its declaration in the API. In
|
||||
// any case, the effect on the returned resource/resources is required
|
||||
// behavior for APIs.
|
||||
//
|
||||
// # Field Masks in Update Operations
|
||||
//
|
||||
// A field mask in update operations specifies which fields of the
|
||||
// targeted resource are going to be updated. The API is required
|
||||
// to only change the values of the fields as specified in the mask
|
||||
// and leave the others untouched. If a resource is passed in to
|
||||
// describe the updated values, the API ignores the values of all
|
||||
// fields not covered by the mask.
|
||||
//
|
||||
// If a repeated field is specified for an update operation, new values will
|
||||
// be appended to the existing repeated field in the target resource. Note that
|
||||
// a repeated field is only allowed in the last position of a `paths` string.
|
||||
//
|
||||
// If a sub-message is specified in the last position of the field mask for an
|
||||
// update operation, then new value will be merged into the existing sub-message
|
||||
// in the target resource.
|
||||
//
|
||||
// For example, given the target message:
|
||||
//
|
||||
// f {
|
||||
// b {
|
||||
// d: 1
|
||||
// x: 2
|
||||
// }
|
||||
// c: [1]
|
||||
// }
|
||||
//
|
||||
// And an update message:
|
||||
//
|
||||
// f {
|
||||
// b {
|
||||
// d: 10
|
||||
// }
|
||||
// c: [2]
|
||||
// }
|
||||
//
|
||||
// then if the field mask is:
|
||||
//
|
||||
// paths: ["f.b", "f.c"]
|
||||
//
|
||||
// then the result will be:
|
||||
//
|
||||
// f {
|
||||
// b {
|
||||
// d: 10
|
||||
// x: 2
|
||||
// }
|
||||
// c: [1, 2]
|
||||
// }
|
||||
//
|
||||
// An implementation may provide options to override this default behavior for
|
||||
// repeated and message fields.
|
||||
//
|
||||
// In order to reset a field's value to the default, the field must
|
||||
// be in the mask and set to the default value in the provided resource.
|
||||
// Hence, in order to reset all fields of a resource, provide a default
|
||||
// instance of the resource and set all fields in the mask, or do
|
||||
// not provide a mask as described below.
|
||||
//
|
||||
// If a field mask is not present on update, the operation applies to
|
||||
// all fields (as if a field mask of all fields has been specified).
|
||||
// Note that in the presence of schema evolution, this may mean that
|
||||
// fields the client does not know and has therefore not filled into
|
||||
// the request will be reset to their default. If this is unwanted
|
||||
// behavior, a specific service may require a client to always specify
|
||||
// a field mask, producing an error if not.
|
||||
//
|
||||
// As with get operations, the location of the resource which
|
||||
// describes the updated values in the request message depends on the
|
||||
// operation kind. In any case, the effect of the field mask is
|
||||
// required to be honored by the API.
|
||||
//
|
||||
// ## Considerations for HTTP REST
|
||||
//
|
||||
// The HTTP kind of an update operation which uses a field mask must
|
||||
// be set to PATCH instead of PUT in order to satisfy HTTP semantics
|
||||
// (PUT must only be used for full updates).
|
||||
//
|
||||
// # JSON Encoding of Field Masks
|
||||
//
|
||||
// In JSON, a field mask is encoded as a single string where paths are
|
||||
// separated by a comma. Fields name in each path are converted
|
||||
// to/from lower-camel naming conventions.
|
||||
//
|
||||
// As an example, consider the following message declarations:
|
||||
//
|
||||
// message Profile {
|
||||
// User user = 1;
|
||||
// Photo photo = 2;
|
||||
// }
|
||||
// message User {
|
||||
// string display_name = 1;
|
||||
// string address = 2;
|
||||
// }
|
||||
//
|
||||
// In proto a field mask for `Profile` may look as such:
|
||||
//
|
||||
// mask {
|
||||
// paths: "user.display_name"
|
||||
// paths: "photo"
|
||||
// }
|
||||
//
|
||||
// In JSON, the same mask is represented as below:
|
||||
//
|
||||
// {
|
||||
// mask: "user.displayName,photo"
|
||||
// }
|
||||
//
|
||||
// # Field Masks and Oneof Fields
|
||||
//
|
||||
// Field masks treat fields in oneofs just as regular fields. Consider the
|
||||
// following message:
|
||||
//
|
||||
// message SampleMessage {
|
||||
// oneof test_oneof {
|
||||
// string name = 4;
|
||||
// SubMessage sub_message = 9;
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// The field mask can be:
|
||||
//
|
||||
// mask {
|
||||
// paths: "name"
|
||||
// }
|
||||
//
|
||||
// Or:
|
||||
//
|
||||
// mask {
|
||||
// paths: "sub_message"
|
||||
// }
|
||||
//
|
||||
// Note that oneof type names ("test_oneof" in this case) cannot be used in
|
||||
// paths.
|
||||
//
|
||||
// ## Field Mask Verification
|
||||
//
|
||||
// The implementation of any API method which has a FieldMask type field in the
|
||||
// request should verify the included field paths, and return an
|
||||
// `INVALID_ARGUMENT` error if any path is unmappable.
|
||||
message FieldMask {
|
||||
// The set of field mask paths.
|
||||
repeated string paths = 1;
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "SourceContextProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/sourcecontextpb";
|
||||
|
||||
// `SourceContext` represents information about the source of a
|
||||
// protobuf element, like the file in which it is defined.
|
||||
message SourceContext {
|
||||
// The path-qualified name of the .proto file that contained the associated
|
||||
// protobuf element. For example: `"google/protobuf/source_context.proto"`.
|
||||
string file_name = 1;
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/structpb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "StructProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// `Struct` represents a structured data value, consisting of fields
|
||||
// which map to dynamically typed values. In some languages, `Struct`
|
||||
// might be supported by a native representation. For example, in
|
||||
// scripting languages like JS a struct is represented as an
|
||||
// object. The details of that representation are described together
|
||||
// with the proto support for the language.
|
||||
//
|
||||
// The JSON representation for `Struct` is JSON object.
|
||||
message Struct {
|
||||
// Unordered map of dynamically typed values.
|
||||
map<string, Value> fields = 1;
|
||||
}
|
||||
|
||||
// `Value` represents a dynamically typed value which can be either
|
||||
// null, a number, a string, a boolean, a recursive struct value, or a
|
||||
// list of values. A producer of value is expected to set one of these
|
||||
// variants. Absence of any variant indicates an error.
|
||||
//
|
||||
// The JSON representation for `Value` is JSON value.
|
||||
message Value {
|
||||
// The kind of value.
|
||||
oneof kind {
|
||||
// Represents a null value.
|
||||
NullValue null_value = 1;
|
||||
// Represents a double value.
|
||||
double number_value = 2;
|
||||
// Represents a string value.
|
||||
string string_value = 3;
|
||||
// Represents a boolean value.
|
||||
bool bool_value = 4;
|
||||
// Represents a structured value.
|
||||
Struct struct_value = 5;
|
||||
// Represents a repeated `Value`.
|
||||
ListValue list_value = 6;
|
||||
}
|
||||
}
|
||||
|
||||
// `NullValue` is a singleton enumeration to represent the null value for the
|
||||
// `Value` type union.
|
||||
//
|
||||
// The JSON representation for `NullValue` is JSON `null`.
|
||||
enum NullValue {
|
||||
// Null value.
|
||||
NULL_VALUE = 0;
|
||||
}
|
||||
|
||||
// `ListValue` is a wrapper around a repeated field of values.
|
||||
//
|
||||
// The JSON representation for `ListValue` is JSON array.
|
||||
message ListValue {
|
||||
// Repeated field of dynamically typed values.
|
||||
repeated Value values = 1;
|
||||
}
|
||||
@@ -1,144 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/timestamppb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "TimestampProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// A Timestamp represents a point in time independent of any time zone or local
|
||||
// calendar, encoded as a count of seconds and fractions of seconds at
|
||||
// nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
||||
// January 1, 1970, in the proleptic Gregorian calendar which extends the
|
||||
// Gregorian calendar backwards to year one.
|
||||
//
|
||||
// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
||||
// second table is needed for interpretation, using a [24-hour linear
|
||||
// smear](https://developers.google.com/time/smear).
|
||||
//
|
||||
// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
||||
// restricting to that range, we ensure that we can convert to and from [RFC
|
||||
// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
||||
//
|
||||
// # Examples
|
||||
//
|
||||
// Example 1: Compute Timestamp from POSIX `time()`.
|
||||
//
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds(time(NULL));
|
||||
// timestamp.set_nanos(0);
|
||||
//
|
||||
// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
//
|
||||
// struct timeval tv;
|
||||
// gettimeofday(&tv, NULL);
|
||||
//
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds(tv.tv_sec);
|
||||
// timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
//
|
||||
// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
//
|
||||
// FILETIME ft;
|
||||
// GetSystemTimeAsFileTime(&ft);
|
||||
// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
//
|
||||
// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
//
|
||||
// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
//
|
||||
// long millis = System.currentTimeMillis();
|
||||
//
|
||||
// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
// .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
//
|
||||
// Example 5: Compute Timestamp from Java `Instant.now()`.
|
||||
//
|
||||
// Instant now = Instant.now();
|
||||
//
|
||||
// Timestamp timestamp =
|
||||
// Timestamp.newBuilder().setSeconds(now.getEpochSecond())
|
||||
// .setNanos(now.getNano()).build();
|
||||
//
|
||||
// Example 6: Compute Timestamp from current time in Python.
|
||||
//
|
||||
// timestamp = Timestamp()
|
||||
// timestamp.GetCurrentTime()
|
||||
//
|
||||
// # JSON Mapping
|
||||
//
|
||||
// In JSON format, the Timestamp type is encoded as a string in the
|
||||
// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
// where {year} is always expressed using four digits while {month}, {day},
|
||||
// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
// is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
// "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
// able to accept both UTC and other timezones (as indicated by an offset).
|
||||
//
|
||||
// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
// 01:30 UTC on January 15, 2017.
|
||||
//
|
||||
// In JavaScript, one can convert a Date object to this format using the
|
||||
// standard
|
||||
// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
|
||||
// method. In Python, a standard `datetime.datetime` object can be converted
|
||||
// to this format using
|
||||
// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
||||
// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
||||
// the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()
|
||||
// ) to obtain a formatter capable of generating timestamps in this format.
|
||||
//
|
||||
message Timestamp {
|
||||
// Represents seconds of UTC time since Unix epoch
|
||||
// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
// 9999-12-31T23:59:59Z inclusive.
|
||||
int64 seconds = 1;
|
||||
|
||||
// Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
// second values with fractions must still have non-negative nanos values
|
||||
// that count forward in time. Must be from 0 to 999,999,999
|
||||
// inclusive.
|
||||
int32 nanos = 2;
|
||||
}
|
||||
@@ -1,193 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
import "google/protobuf/any.proto";
|
||||
import "google/protobuf/source_context.proto";
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "TypeProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/typepb";
|
||||
|
||||
// A protocol buffer message type.
|
||||
message Type {
|
||||
// The fully qualified message name.
|
||||
string name = 1;
|
||||
// The list of fields.
|
||||
repeated Field fields = 2;
|
||||
// The list of types appearing in `oneof` definitions in this type.
|
||||
repeated string oneofs = 3;
|
||||
// The protocol buffer options.
|
||||
repeated Option options = 4;
|
||||
// The source context.
|
||||
SourceContext source_context = 5;
|
||||
// The source syntax.
|
||||
Syntax syntax = 6;
|
||||
// The source edition string, only valid when syntax is SYNTAX_EDITIONS.
|
||||
string edition = 7;
|
||||
}
|
||||
|
||||
// A single field of a message type.
|
||||
message Field {
|
||||
// Basic field types.
|
||||
enum Kind {
|
||||
// Field type unknown.
|
||||
TYPE_UNKNOWN = 0;
|
||||
// Field type double.
|
||||
TYPE_DOUBLE = 1;
|
||||
// Field type float.
|
||||
TYPE_FLOAT = 2;
|
||||
// Field type int64.
|
||||
TYPE_INT64 = 3;
|
||||
// Field type uint64.
|
||||
TYPE_UINT64 = 4;
|
||||
// Field type int32.
|
||||
TYPE_INT32 = 5;
|
||||
// Field type fixed64.
|
||||
TYPE_FIXED64 = 6;
|
||||
// Field type fixed32.
|
||||
TYPE_FIXED32 = 7;
|
||||
// Field type bool.
|
||||
TYPE_BOOL = 8;
|
||||
// Field type string.
|
||||
TYPE_STRING = 9;
|
||||
// Field type group. Proto2 syntax only, and deprecated.
|
||||
TYPE_GROUP = 10;
|
||||
// Field type message.
|
||||
TYPE_MESSAGE = 11;
|
||||
// Field type bytes.
|
||||
TYPE_BYTES = 12;
|
||||
// Field type uint32.
|
||||
TYPE_UINT32 = 13;
|
||||
// Field type enum.
|
||||
TYPE_ENUM = 14;
|
||||
// Field type sfixed32.
|
||||
TYPE_SFIXED32 = 15;
|
||||
// Field type sfixed64.
|
||||
TYPE_SFIXED64 = 16;
|
||||
// Field type sint32.
|
||||
TYPE_SINT32 = 17;
|
||||
// Field type sint64.
|
||||
TYPE_SINT64 = 18;
|
||||
}
|
||||
|
||||
// Whether a field is optional, required, or repeated.
|
||||
enum Cardinality {
|
||||
// For fields with unknown cardinality.
|
||||
CARDINALITY_UNKNOWN = 0;
|
||||
// For optional fields.
|
||||
CARDINALITY_OPTIONAL = 1;
|
||||
// For required fields. Proto2 syntax only.
|
||||
CARDINALITY_REQUIRED = 2;
|
||||
// For repeated fields.
|
||||
CARDINALITY_REPEATED = 3;
|
||||
}
|
||||
|
||||
// The field type.
|
||||
Kind kind = 1;
|
||||
// The field cardinality.
|
||||
Cardinality cardinality = 2;
|
||||
// The field number.
|
||||
int32 number = 3;
|
||||
// The field name.
|
||||
string name = 4;
|
||||
// The field type URL, without the scheme, for message or enumeration
|
||||
// types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`.
|
||||
string type_url = 6;
|
||||
// The index of the field type in `Type.oneofs`, for message or enumeration
|
||||
// types. The first type has index 1; zero means the type is not in the list.
|
||||
int32 oneof_index = 7;
|
||||
// Whether to use alternative packed wire representation.
|
||||
bool packed = 8;
|
||||
// The protocol buffer options.
|
||||
repeated Option options = 9;
|
||||
// The field JSON name.
|
||||
string json_name = 10;
|
||||
// The string value of the default value of this field. Proto2 syntax only.
|
||||
string default_value = 11;
|
||||
}
|
||||
|
||||
// Enum type definition.
|
||||
message Enum {
|
||||
// Enum type name.
|
||||
string name = 1;
|
||||
// Enum value definitions.
|
||||
repeated EnumValue enumvalue = 2;
|
||||
// Protocol buffer options.
|
||||
repeated Option options = 3;
|
||||
// The source context.
|
||||
SourceContext source_context = 4;
|
||||
// The source syntax.
|
||||
Syntax syntax = 5;
|
||||
// The source edition string, only valid when syntax is SYNTAX_EDITIONS.
|
||||
string edition = 6;
|
||||
}
|
||||
|
||||
// Enum value definition.
|
||||
message EnumValue {
|
||||
// Enum value name.
|
||||
string name = 1;
|
||||
// Enum value number.
|
||||
int32 number = 2;
|
||||
// Protocol buffer options.
|
||||
repeated Option options = 3;
|
||||
}
|
||||
|
||||
// A protocol buffer option, which can be attached to a message, field,
|
||||
// enumeration, etc.
|
||||
message Option {
|
||||
// The option's name. For protobuf built-in options (options defined in
|
||||
// descriptor.proto), this is the short name. For example, `"map_entry"`.
|
||||
// For custom options, it should be the fully-qualified name. For example,
|
||||
// `"google.api.http"`.
|
||||
string name = 1;
|
||||
// The option's value packed in an Any message. If the value is a primitive,
|
||||
// the corresponding wrapper type defined in google/protobuf/wrappers.proto
|
||||
// should be used. If the value is an enum, it should be stored as an int32
|
||||
// value using the google.protobuf.Int32Value type.
|
||||
Any value = 2;
|
||||
}
|
||||
|
||||
// The syntax in which a protocol buffer element is defined.
|
||||
enum Syntax {
|
||||
// Syntax `proto2`.
|
||||
SYNTAX_PROTO2 = 0;
|
||||
// Syntax `proto3`.
|
||||
SYNTAX_PROTO3 = 1;
|
||||
// Syntax `editions`.
|
||||
SYNTAX_EDITIONS = 2;
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
// Wrappers for primitive (non-message) types. These types are useful
|
||||
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||
// where we need to distinguish between the absence of a primitive
|
||||
// typed field and its default value.
|
||||
//
|
||||
// These wrappers have no meaningful use within repeated fields as they lack
|
||||
// the ability to detect presence on individual elements.
|
||||
// These wrappers have no meaningful use within a map or a oneof since
|
||||
// individual entries of a map or fields of a oneof can already detect presence.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/wrapperspb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "WrappersProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// Wrapper message for `double`.
|
||||
//
|
||||
// The JSON representation for `DoubleValue` is JSON number.
|
||||
message DoubleValue {
|
||||
// The double value.
|
||||
double value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `float`.
|
||||
//
|
||||
// The JSON representation for `FloatValue` is JSON number.
|
||||
message FloatValue {
|
||||
// The float value.
|
||||
float value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `int64`.
|
||||
//
|
||||
// The JSON representation for `Int64Value` is JSON string.
|
||||
message Int64Value {
|
||||
// The int64 value.
|
||||
int64 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `uint64`.
|
||||
//
|
||||
// The JSON representation for `UInt64Value` is JSON string.
|
||||
message UInt64Value {
|
||||
// The uint64 value.
|
||||
uint64 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `int32`.
|
||||
//
|
||||
// The JSON representation for `Int32Value` is JSON number.
|
||||
message Int32Value {
|
||||
// The int32 value.
|
||||
int32 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `uint32`.
|
||||
//
|
||||
// The JSON representation for `UInt32Value` is JSON number.
|
||||
message UInt32Value {
|
||||
// The uint32 value.
|
||||
uint32 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `bool`.
|
||||
//
|
||||
// The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
message BoolValue {
|
||||
// The bool value.
|
||||
bool value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `string`.
|
||||
//
|
||||
// The JSON representation for `StringValue` is JSON string.
|
||||
message StringValue {
|
||||
// The string value.
|
||||
string value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `bytes`.
|
||||
//
|
||||
// The JSON representation for `BytesValue` is JSON string.
|
||||
message BytesValue {
|
||||
// The bytes value.
|
||||
bytes value = 1;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user