mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-01-19 23:46:58 +01:00
Compare commits
651 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
083f83ccab | ||
|
|
4f749be2e2 | ||
|
|
cefdc3ecf3 | ||
|
|
02960d2d64 | ||
|
|
9e5226aa83 | ||
|
|
63d7a44586 | ||
|
|
c851dfe206 | ||
|
|
6adc15a249 | ||
|
|
9ac7aac296 | ||
|
|
325d63e1b7 | ||
|
|
e639a77165 | ||
|
|
c075efc752 | ||
|
|
c4f42f71c3 | ||
|
|
535adfe200 | ||
|
|
85fa159f0d | ||
|
|
fd2fe46c95 | ||
|
|
6e52f35626 | ||
|
|
a0d1e7023d | ||
|
|
97a2f00d59 | ||
|
|
50ad4efad7 | ||
|
|
79a3d9c8df | ||
|
|
b8e20d885f | ||
|
|
752eb3dbd5 | ||
|
|
616acdfb56 | ||
|
|
b2bcbababe | ||
|
|
9f5a3ef96a | ||
|
|
d2c5bdc3c8 | ||
|
|
0d6899a12c | ||
|
|
1b25cb0c4c | ||
|
|
783b7222df | ||
|
|
ff3165ab30 | ||
|
|
9780dc88a1 | ||
|
|
e4f0d2a341 | ||
|
|
bcf0ae159d | ||
|
|
5664d41073 | ||
|
|
e75e6865ea | ||
|
|
fd5b495b70 | ||
|
|
16506d1ddd | ||
|
|
e3016f7100 | ||
|
|
766da4327c | ||
|
|
6f389b0010 | ||
|
|
007ea88edd | ||
|
|
5409678855 | ||
|
|
4c6bd63b8b | ||
|
|
8db80d2e97 | ||
|
|
c80fca8063 | ||
|
|
7384398813 | ||
|
|
b57ea8adeb | ||
|
|
8ff2caf3c3 | ||
|
|
a521b8f308 | ||
|
|
50ba167516 | ||
|
|
cb102657ea | ||
|
|
a7d9e2432b | ||
|
|
d842b168e6 | ||
|
|
870cb25980 | ||
|
|
fde0c5540b | ||
|
|
2ec9a1c19d | ||
|
|
c2f5a3bf45 | ||
|
|
7c18eeae8c | ||
|
|
d7a1b4b7bc | ||
|
|
4566ede184 | ||
|
|
f45c898be0 | ||
|
|
4e1700f8a4 | ||
|
|
f14311d14a | ||
|
|
470a7e2278 | ||
|
|
2d67be481d | ||
|
|
9f6ddb1558 | ||
|
|
853f07b9af | ||
|
|
0eb6358387 | ||
|
|
d43e045f25 | ||
|
|
17432fca29 | ||
|
|
d5931660c2 | ||
|
|
cd7678b7a1 | ||
|
|
706be1188b | ||
|
|
8989b61a13 | ||
|
|
a997944f16 | ||
|
|
f8e8f5d3f2 | ||
|
|
812e5238ac | ||
|
|
16d4f2952d | ||
|
|
ac9a6d5871 | ||
|
|
4fcf1df61f | ||
|
|
394beb374e | ||
|
|
ba4d1063e3 | ||
|
|
2bd9b436e6 | ||
|
|
915a59dec4 | ||
|
|
ae2b746cb2 | ||
|
|
b04cff153b | ||
|
|
bd8e71e567 | ||
|
|
562a36d616 | ||
|
|
c85a11edf1 | ||
|
|
ef7f942a8f | ||
|
|
a7f2a86d71 | ||
|
|
bf90f84d16 | ||
|
|
4284aa2549 | ||
|
|
60773cab53 | ||
|
|
e2c17873ae | ||
|
|
88982156ee | ||
|
|
722c8a1c6b | ||
|
|
8c15274786 | ||
|
|
1abba4980a | ||
|
|
3a340999ec | ||
|
|
b7261e77aa | ||
|
|
23431b40e9 | ||
|
|
04f31cd4a7 | ||
|
|
7d82a7e74a | ||
|
|
d31255d987 | ||
|
|
e53693f605 | ||
|
|
67aa7b7268 | ||
|
|
e27ed9becc | ||
|
|
22d21af3c2 | ||
|
|
67000af7f9 | ||
|
|
b84c7ba50c | ||
|
|
a0b3f86462 | ||
|
|
6a8395660d | ||
|
|
2c041fbac6 | ||
|
|
1eed0e8f22 | ||
|
|
63a0ed273d | ||
|
|
d0be5ca515 | ||
|
|
b964c942d6 | ||
|
|
a05fc5fd20 | ||
|
|
de183abd24 | ||
|
|
5c44df7b00 | ||
|
|
dbdce4cf9a | ||
|
|
219a6b78da | ||
|
|
fb11aff03f | ||
|
|
15714ae188 | ||
|
|
ce116d032d | ||
|
|
6f41df6e52 | ||
|
|
0853d2ca95 | ||
|
|
6798331ce5 | ||
|
|
5ffc75e0ad | ||
|
|
bf92371a49 | ||
|
|
bd3da86317 | ||
|
|
3db3d42246 | ||
|
|
de8bf3ca70 | ||
|
|
8bc131de6c | ||
|
|
efce69292d | ||
|
|
0ccc893440 | ||
|
|
1f9756c917 | ||
|
|
be8f0e4521 | ||
|
|
bcdf51d231 | ||
|
|
1a1553eebd | ||
|
|
321c3862fe | ||
|
|
466d412e65 | ||
|
|
86f50b826f | ||
|
|
ac1e646e68 | ||
|
|
33374eefc7 | ||
|
|
7047df4f7e | ||
|
|
c8bd4d0ae0 | ||
|
|
1e79f76701 | ||
|
|
18852dca06 | ||
|
|
408e7e80b7 | ||
|
|
fc185de023 | ||
|
|
bb9d3a42f3 | ||
|
|
baf0f4291d | ||
|
|
536066142c | ||
|
|
04cf16497d | ||
|
|
feb5972090 | ||
|
|
77bf5a58d8 | ||
|
|
3539642491 | ||
|
|
08abea6a6f | ||
|
|
0045b85f00 | ||
|
|
4b34c3d101 | ||
|
|
4af0a15d9f | ||
|
|
3a4a76c58d | ||
|
|
3086d815c1 | ||
|
|
a48a9eab4a | ||
|
|
48664c66e5 | ||
|
|
7aee5176a9 | ||
|
|
0da68ced18 | ||
|
|
39f7d9c113 | ||
|
|
138943bfb6 | ||
|
|
c1c9f882a6 | ||
|
|
1bcf26f656 | ||
|
|
7c2466da5e | ||
|
|
7dc78a1f6f | ||
|
|
88d024023b | ||
|
|
626aacf982 | ||
|
|
d5855c45a6 | ||
|
|
793bff9f27 | ||
|
|
88ea68e72f | ||
|
|
35e40d2c55 | ||
|
|
c472b83409 | ||
|
|
52c26d235c | ||
|
|
ac54729012 | ||
|
|
0586034ef4 | ||
|
|
91790ba708 | ||
|
|
d8ab6c0b50 | ||
|
|
b600a21a2b | ||
|
|
4f9d1278f7 | ||
|
|
15aa93f5f9 | ||
|
|
c7798092d8 | ||
|
|
5560593aaa | ||
|
|
66639e651d | ||
|
|
8e42d5ccdb | ||
|
|
5c62594087 | ||
|
|
26b6c48657 | ||
|
|
0290aba982 | ||
|
|
0bafc4e4f5 | ||
|
|
9a36f94279 | ||
|
|
1d8e66179e | ||
|
|
fda6d16d8e | ||
|
|
c4737916df | ||
|
|
919465cdbb | ||
|
|
de3730fa4f | ||
|
|
aff26fdd46 | ||
|
|
3c0edf06af | ||
|
|
cb8939db88 | ||
|
|
bf4b3213c4 | ||
|
|
633d7c52c4 | ||
|
|
0401cb92aa | ||
|
|
bff6c668a0 | ||
|
|
ee87e65763 | ||
|
|
f165a0b827 | ||
|
|
f7426dc8ce | ||
|
|
6114039f7e | ||
|
|
da414debe1 | ||
|
|
11f5541558 | ||
|
|
1bc155d684 | ||
|
|
335231060e | ||
|
|
0fdf64440f | ||
|
|
a984fb33dc | ||
|
|
41b1ec96c9 | ||
|
|
df83a61d6f | ||
|
|
d289f1fd13 | ||
|
|
aea4e961aa | ||
|
|
c554b73d48 | ||
|
|
b519bff3d6 | ||
|
|
8381104302 | ||
|
|
5ef7c6a1a2 | ||
|
|
6d7a81850c | ||
|
|
6e5d5fcb95 | ||
|
|
004fef6729 | ||
|
|
0bec5a6405 | ||
|
|
60b091ff1c | ||
|
|
bee1a5cb2d | ||
|
|
bb2d3dd5b1 | ||
|
|
bf8aad04c7 | ||
|
|
4306294a72 | ||
|
|
10f3722fe3 | ||
|
|
c1af9ca44a | ||
|
|
5b230c74f0 | ||
|
|
5cebb4e61a | ||
|
|
bd9d1e2244 | ||
|
|
9bdaa05f00 | ||
|
|
750ad0c902 | ||
|
|
a9c16838e6 | ||
|
|
d5065ab6d9 | ||
|
|
9ebb3ef532 | ||
|
|
aeda72f13e | ||
|
|
83aa9041cb | ||
|
|
d51913509d | ||
|
|
5106f28ba5 | ||
|
|
0c55c6eaab | ||
|
|
b0edbd19c8 | ||
|
|
7630db79b7 | ||
|
|
55a7b82567 | ||
|
|
b5cb46918a | ||
|
|
a793ece1a5 | ||
|
|
0f6e4b641a | ||
|
|
5ac5fab0c6 | ||
|
|
8030a8a235 | ||
|
|
d98426cad3 | ||
|
|
06034a8fc4 | ||
|
|
1ee9f9bb51 | ||
|
|
4b99d1405e | ||
|
|
8480e52195 | ||
|
|
243e65a992 | ||
|
|
b82304a233 | ||
|
|
f7a4ea9735 | ||
|
|
33d1a84ecd | ||
|
|
f4a071ee05 | ||
|
|
e26ba0f9d0 | ||
|
|
b4e2a12375 | ||
|
|
5e7aacd31a | ||
|
|
00718df49e | ||
|
|
bb9025ab07 | ||
|
|
867f3908ed | ||
|
|
30e1ecac39 | ||
|
|
7eb2abe9b2 | ||
|
|
a5ac8fa035 | ||
|
|
dd705de155 | ||
|
|
b15cdec701 | ||
|
|
a99a36b5cc | ||
|
|
e0b0e3d781 | ||
|
|
98a4834d4f | ||
|
|
32b135dbaf | ||
|
|
0fc8d12a06 | ||
|
|
3c2bdab101 | ||
|
|
8b5d7ae3ed | ||
|
|
51949f4fbf | ||
|
|
6013cd2329 | ||
|
|
eba28ade48 | ||
|
|
44af1ddc8a | ||
|
|
63c0d09df8 | ||
|
|
f305633d94 | ||
|
|
13155f8591 | ||
|
|
f2ac97aa62 | ||
|
|
18eb0027a1 | ||
|
|
9e2803fcfb | ||
|
|
705e30b6e0 | ||
|
|
f1260911ea | ||
|
|
076ff63dbe | ||
|
|
899092b4d2 | ||
|
|
c2c3a28aab | ||
|
|
25c0db502e | ||
|
|
6dcbe45a53 | ||
|
|
e2b46f25ff | ||
|
|
981182be46 | ||
|
|
ad164ebd5e | ||
|
|
cacdad8826 | ||
|
|
77e5142a7c | ||
|
|
613081728d | ||
|
|
23e77dfec1 | ||
|
|
6e273ae2a3 | ||
|
|
4061094988 | ||
|
|
82b185e27f | ||
|
|
27dc261639 | ||
|
|
7e45fecf19 | ||
|
|
1a5053380b | ||
|
|
408665c62d | ||
|
|
65efee2048 | ||
|
|
3faa66a1fc | ||
|
|
9dafe4f704 | ||
|
|
356eaf1713 | ||
|
|
f8584f1537 | ||
|
|
6ad6cb34b0 | ||
|
|
32b27cd780 | ||
|
|
0344a1e8c9 | ||
|
|
0515271c12 | ||
|
|
5ae8d54ce0 | ||
|
|
33c406ce49 | ||
|
|
3b660ddbd0 | ||
|
|
3132728a27 | ||
|
|
7063128342 | ||
|
|
2187775462 | ||
|
|
18adcd1004 | ||
|
|
b0656d1e38 | ||
|
|
38e66047e0 | ||
|
|
c24f049dac | ||
|
|
53d13c8172 | ||
|
|
0727c6e437 | ||
|
|
8328d20150 | ||
|
|
afe6a3bf57 | ||
|
|
d920632cbd | ||
|
|
5c456fd4d5 | ||
|
|
38c247e350 | ||
|
|
0c8f72124a | ||
|
|
80ed6b1525 | ||
|
|
4424b3f208 | ||
|
|
2c75abce09 | ||
|
|
4e15eb197f | ||
|
|
a7544b4f8c | ||
|
|
d126aad172 | ||
|
|
acc5c0de50 | ||
|
|
3391da111d | ||
|
|
e37ce96956 | ||
|
|
c51831c975 | ||
|
|
180aa39de4 | ||
|
|
3bd780782e | ||
|
|
f9ba2f79c2 | ||
|
|
d9493de2be | ||
|
|
bc9a623742 | ||
|
|
532edbf274 | ||
|
|
1585692328 | ||
|
|
083f565b12 | ||
|
|
f7f7438c9e | ||
|
|
19934a93bb | ||
|
|
577cfe5bdc | ||
|
|
43ac6afae1 | ||
|
|
8cc11703d3 | ||
|
|
4f7a116378 | ||
|
|
513793d9ce | ||
|
|
67f32b6734 | ||
|
|
66813d67fe | ||
|
|
a38691ed53 | ||
|
|
deeefdcfbf | ||
|
|
db292511b1 | ||
|
|
1a5334c1ce | ||
|
|
11002abe39 | ||
|
|
d922dcb062 | ||
|
|
6fcaa18e86 | ||
|
|
7664c941dd | ||
|
|
6f5cb528c6 | ||
|
|
ebb78922f0 | ||
|
|
2285fe9f1c | ||
|
|
38ba8625d8 | ||
|
|
ab5681c7ad | ||
|
|
f66dcb9267 | ||
|
|
1b6cfbac77 | ||
|
|
4c27e788ea | ||
|
|
769da0b052 | ||
|
|
6b60c86300 | ||
|
|
30c1b5e8c7 | ||
|
|
10af9b6f99 | ||
|
|
aa8c066f2d | ||
|
|
b913b74449 | ||
|
|
b71adce50b | ||
|
|
0fbb44c701 | ||
|
|
de335e8637 | ||
|
|
2999f63a4c | ||
|
|
2abc5e6f0b | ||
|
|
639de4321e | ||
|
|
b3c461afdd | ||
|
|
7d154800a0 | ||
|
|
b48ed0399e | ||
|
|
c5d6e7d74a | ||
|
|
e82f915363 | ||
|
|
3128e9ce76 | ||
|
|
bc0e86757c | ||
|
|
fec99916c2 | ||
|
|
3b5d059b11 | ||
|
|
c3fe2acc8a | ||
|
|
4d002c412b | ||
|
|
46d152b5f1 | ||
|
|
25fa81ebbc | ||
|
|
7c2de3c360 | ||
|
|
3a3b187cd0 | ||
|
|
3226bbe083 | ||
|
|
a1e4e0e6c9 | ||
|
|
b3aa8b893b | ||
|
|
f057139634 | ||
|
|
71a2b11ab4 | ||
|
|
587254a0e7 | ||
|
|
9f4de66f3c | ||
|
|
b0d8908724 | ||
|
|
15c22d98c6 | ||
|
|
3105ae0edc | ||
|
|
11a89f06c1 | ||
|
|
9cbe24e740 | ||
|
|
bfbed13b8f | ||
|
|
2268de6321 | ||
|
|
dd99aa7fcd | ||
|
|
be436bb706 | ||
|
|
bd48726f44 | ||
|
|
10bea83f98 | ||
|
|
8122b4fb84 | ||
|
|
3ae57fb2d8 | ||
|
|
6dc3eecca4 | ||
|
|
9d1d732154 | ||
|
|
8a117415b7 | ||
|
|
d36623ebc9 | ||
|
|
94a3ae3696 | ||
|
|
2836a28988 | ||
|
|
946d7dc89e | ||
|
|
af6300f18b | ||
|
|
905cb4b18e | ||
|
|
305ed09547 | ||
|
|
643356bad3 | ||
|
|
e458675627 | ||
|
|
91e3853692 | ||
|
|
5f0876a136 | ||
|
|
3a38127fb4 | ||
|
|
f3b6070235 | ||
|
|
5e6e78eb9e | ||
|
|
9b66a1d1a8 | ||
|
|
e954d0d7bc | ||
|
|
dab2df7e79 | ||
|
|
bc40e22008 | ||
|
|
eef262c398 | ||
|
|
8eab6e14db | ||
|
|
ded33a110a | ||
|
|
e448a7602a | ||
|
|
4c22215ca5 | ||
|
|
4f501abb72 | ||
|
|
b2dcc38982 | ||
|
|
11b719955b | ||
|
|
d563ac63db | ||
|
|
6d826064c6 | ||
|
|
d30b9d6518 | ||
|
|
8da3364d0f | ||
|
|
07c372b7f5 | ||
|
|
7e01f38253 | ||
|
|
ba637009a7 | ||
|
|
da7388e510 | ||
|
|
3ec88fc896 | ||
|
|
1c9381b2bd | ||
|
|
06349b8d5b | ||
|
|
6dc7dc6ad2 | ||
|
|
f981a15ec3 | ||
|
|
8b648c0301 | ||
|
|
83ce09075b | ||
|
|
168dfb9f6b | ||
|
|
9b8961c23d | ||
|
|
89bca42ee6 | ||
|
|
07d2a43a17 | ||
|
|
c84f2afd09 | ||
|
|
df4dbaecc8 | ||
|
|
d9bf03cefe | ||
|
|
39223e8d89 | ||
|
|
67925e18b2 | ||
|
|
89ad65513d | ||
|
|
90166ddfa3 | ||
|
|
0981b23faf | ||
|
|
664f3b4d87 | ||
|
|
dc97b91a4e | ||
|
|
d310272d19 | ||
|
|
f1be3f01e1 | ||
|
|
c57b6e1d73 | ||
|
|
a938dc45f0 | ||
|
|
bb139744a1 | ||
|
|
3aa3e09552 | ||
|
|
74abfd21b8 | ||
|
|
e703817ba2 | ||
|
|
80dd1e457b | ||
|
|
ea9f8d3ab2 | ||
|
|
fa222bdf12 | ||
|
|
45b360dabd | ||
|
|
5923399359 | ||
|
|
f4600f3e90 | ||
|
|
f883837685 | ||
|
|
b58bc409f0 | ||
|
|
e893e539bb | ||
|
|
90294fbb5d | ||
|
|
ae65f222bc | ||
|
|
1b9813fb4c | ||
|
|
b708b5ae41 | ||
|
|
df136fa915 | ||
|
|
f8329f5b8d | ||
|
|
21141090de | ||
|
|
c0d9740a7d | ||
|
|
afcf630443 | ||
|
|
1fe2c9826a | ||
|
|
7272b80a3f | ||
|
|
92114b7368 | ||
|
|
f39d3e7eed | ||
|
|
cbe0d27a5e | ||
|
|
cd39699467 | ||
|
|
b3ea67aacf | ||
|
|
db4ed9797c | ||
|
|
1ea7d7d685 | ||
|
|
2df725b57a | ||
|
|
74e6648249 | ||
|
|
1026350d9c | ||
|
|
98fb87874d | ||
|
|
41fc3afdc1 | ||
|
|
83dbf46ba4 | ||
|
|
0b2e35bdde | ||
|
|
d90a7331c9 | ||
|
|
264e64a996 | ||
|
|
8915915c47 | ||
|
|
951ed787fa | ||
|
|
64ef6b0c22 | ||
|
|
ef18377b3c | ||
|
|
5904b6fded | ||
|
|
f4401e77bb | ||
|
|
efa5455a7b | ||
|
|
619c8d9e72 | ||
|
|
bdf89ac288 | ||
|
|
debd3c8185 | ||
|
|
f81a3ae8e7 | ||
|
|
7d4e9894c3 | ||
|
|
4bf22d8a60 | ||
|
|
8be4971a23 | ||
|
|
359e916b73 | ||
|
|
68058f3e41 | ||
|
|
0c6fa3e634 | ||
|
|
0fa25c6335 | ||
|
|
5684479f1d | ||
|
|
2d1603601c | ||
|
|
f5394b2210 | ||
|
|
833db5df06 | ||
|
|
525ac7e980 | ||
|
|
44a747c80a | ||
|
|
2056e7f40a | ||
|
|
9b6c1ad364 | ||
|
|
34987bcacb | ||
|
|
b62c11222a | ||
|
|
b3cee3ace3 | ||
|
|
222c054c95 | ||
|
|
46f18a2491 | ||
|
|
f2ca8e2753 | ||
|
|
b0d243c378 | ||
|
|
6161fb86c8 | ||
|
|
b09cc91fe5 | ||
|
|
ef1638cbb3 | ||
|
|
00ef8743f2 | ||
|
|
68222659e3 | ||
|
|
69420a4bba | ||
|
|
0161bbaeb1 | ||
|
|
948dbfe3cc | ||
|
|
338ba8b189 | ||
|
|
ca4655b441 | ||
|
|
bf37499428 | ||
|
|
0b94b57e2a | ||
|
|
fc40aead98 | ||
|
|
7d7f934e6a | ||
|
|
d5fbf4d622 | ||
|
|
e4f6c919dc | ||
|
|
4d806ff2b1 | ||
|
|
bf8f12274f | ||
|
|
f4f438d9fe | ||
|
|
2434f373be | ||
|
|
2bb2061f97 | ||
|
|
2c011a5c2a | ||
|
|
f66b0ccea1 | ||
|
|
665dd8447d | ||
|
|
1b61ce31e6 | ||
|
|
ef4d960698 | ||
|
|
b6d557b632 | ||
|
|
b700bd356c | ||
|
|
620dd7d3ef | ||
|
|
6575121902 | ||
|
|
7c1755a0dc | ||
|
|
8ad301a666 | ||
|
|
ae24cd4939 | ||
|
|
7152e1845e | ||
|
|
96c1dd4081 | ||
|
|
87c7b3a663 | ||
|
|
c1be46a539 | ||
|
|
4655e0018b | ||
|
|
da5ba2e3be | ||
|
|
aaf95f565f | ||
|
|
f32b984e77 | ||
|
|
548aa4c7cd | ||
|
|
0ccceaac77 | ||
|
|
70f534f1d8 | ||
|
|
61fe95b300 | ||
|
|
915e0e8613 | ||
|
|
aace2580da | ||
|
|
3d36905664 | ||
|
|
0d671423da | ||
|
|
aebfcb9437 | ||
|
|
be7ef7beb1 | ||
|
|
d77ed0c5cc | ||
|
|
e57e7bcec5 | ||
|
|
a637842ce4 | ||
|
|
fc54ec49af | ||
|
|
5c43d8510a | ||
|
|
83f84ded8d | ||
|
|
5658da34a2 | ||
|
|
38e8ef6535 | ||
|
|
8c89b06238 | ||
|
|
d85c021305 | ||
|
|
83bb18df03 | ||
|
|
93105a3e89 | ||
|
|
ba3b899115 | ||
|
|
fcfbc1d1da | ||
|
|
72486b448c | ||
|
|
7dea1b7870 | ||
|
|
4de2c496c9 | ||
|
|
9e1393a392 | ||
|
|
0901690ed6 | ||
|
|
95303648cc | ||
|
|
1dbb08c045 | ||
|
|
00a7d9a180 | ||
|
|
6c549dc086 | ||
|
|
dc368e326a | ||
|
|
e42188a627 | ||
|
|
7a6a337eff | ||
|
|
3907344884 |
72
.github/workflows/artifacts.yml
vendored
Normal file
72
.github/workflows/artifacts.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
name: Generate Artifacts
|
||||
on:
|
||||
push:
|
||||
tags: [ v* ]
|
||||
|
||||
permissions: write-all
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
- os: macos-latest
|
||||
target: x86_64-apple-darwin
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
- os: ubuntu-20.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
- name: Cache Rust
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
./src-tauri/target
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('src-tauri/Cargo.lock') }}
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
- name: install dependencies (ubuntu only)
|
||||
if: matrix.os == 'ubuntu-20.04'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
# Pin dev version to get non-default targets
|
||||
# https://github.com/tauri-apps/tauri-action/issues/356
|
||||
- uses: tauri-apps/tauri-action@dev
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||
ENABLE_CODE_SIGNING: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
with:
|
||||
tagName: 'v__VERSION__'
|
||||
releaseName: 'Release __VERSION__'
|
||||
releaseBody: 'https://yaak.app/changelog/__VERSION__'
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: '--target ${{ matrix.target }}'
|
||||
64
.github/workflows/release.yml
vendored
64
.github/workflows/release.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Generate Artifacts
|
||||
on:
|
||||
push:
|
||||
tags: [ v* ]
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
permissions:
|
||||
contents: write
|
||||
name: Build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: 'macos-latest' # for Arm based macs (M1 and above).
|
||||
args: '--target aarch64-apple-darwin'
|
||||
- platform: 'macos-latest' # for Intel based macs.
|
||||
args: '--target x86_64-apple-darwin'
|
||||
- platform: 'ubuntu-22.04' # for Tauri v1 you could replace this with ubuntu-20.04.
|
||||
args: ''
|
||||
- platform: 'windows-latest'
|
||||
args: ''
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: setup node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: lts/*
|
||||
- name: install Rust stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
# Those targets are only used on macos runners so it's in an `if` to slightly speed up windows and linux builds.
|
||||
targets: ${{ matrix.platform == 'macos-latest' && 'aarch64-apple-darwin,x86_64-apple-darwin' || '' }}
|
||||
- name: install dependencies (ubuntu only)
|
||||
if: matrix.platform == 'ubuntu-22.04' # This must match the platform value defined above.
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Run lint
|
||||
run: npm run lint
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
- uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||
ENABLE_CODE_SIGNING: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
with:
|
||||
tagName: 'v__VERSION__'
|
||||
releaseName: 'Release __VERSION__'
|
||||
releaseBody: 'https://yaak.app/changelog/__VERSION__'
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }}
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -26,5 +26,3 @@ dist-ssr
|
||||
|
||||
*.sqlite
|
||||
*.sqlite-*
|
||||
|
||||
.cargo
|
||||
20
.run/Build Desktop.run.xml
Normal file
20
.run/Build Desktop.run.xml
Normal file
@@ -0,0 +1,20 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Build Desktop" type="ShConfigurationType">
|
||||
<option name="SCRIPT_TEXT" value="npm run tauri build -- --target universal-apple-darwin" />
|
||||
<option name="INDEPENDENT_SCRIPT_PATH" value="true" />
|
||||
<option name="SCRIPT_PATH" value="" />
|
||||
<option name="SCRIPT_OPTIONS" value="" />
|
||||
<option name="INDEPENDENT_SCRIPT_WORKING_DIRECTORY" value="true" />
|
||||
<option name="SCRIPT_WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||
<option name="INDEPENDENT_INTERPRETER_PATH" value="true" />
|
||||
<option name="INTERPRETER_PATH" value="/bin/zsh" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="EXECUTE_IN_TERMINAL" value="true" />
|
||||
<option name="EXECUTE_SCRIPT_FILE" value="false" />
|
||||
<envs>
|
||||
<env name="TAURI_KEY_PASSWORD" value="fishhook-upstream-wash-assured" />
|
||||
<env name="TAURI_PRIVATE_KEY" value="dW50cnVzdGVkIGNvbW1lbnQ6IHJzaWduIGVuY3J5cHRlZCBzZWNyZXQga2V5ClJXUlRZMEl5OGxWaytTa3dIa2xXVUltQzRGUXIzd2lYQ2NpV0ZhQURSbWJWZ1NrK0tnY0FBQkFBQUFBQUFBQUFBQUlBQUFBQUV2M1VKdVRyVHpHSzhQdGc2ZVFtOVNsMU5tNEVSN280cFNrbXhncW9tdjNXaFJZUTJqUzQ5Q01zWTJWRVhaY1pGNHNjR1NFR3JmcWFRN09NdWdGMXpZVXhzejR4V3lDV1JpZHlnbW5LNS9vMFFtRlZjbUl4YjZSNzhlMmk3ait5SExYcG5QZUkxOFE9Cg==" />
|
||||
</envs>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
1
.tauriignore
Normal file
1
.tauriignore
Normal file
@@ -0,0 +1 @@
|
||||
plugins
|
||||
@@ -1,8 +1,8 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Yaak App</title>
|
||||
<!-- <script src="http://localhost:8097"></script>-->
|
||||
|
||||
@@ -15,13 +15,13 @@
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
html, body {
|
||||
background-color: #1b1a29;
|
||||
background-color: black;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body class="text-base">
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<div id="cm-portal" class="cm-portal"></div>
|
||||
<div id="react-portal"></div>
|
||||
|
||||
2990
package-lock.json
generated
2990
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
32
package.json
32
package.json
@@ -4,9 +4,8 @@
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"start": "npm run tauri-dev:desktop",
|
||||
"tauri-dev:desktop": "tauri dev --no-watch --config ./src-tauri/tauri-dev.conf.json",
|
||||
"tauri-dev:ios": "tauri ios dev --force-ip-prompt --config ./src-tauri/tauri-dev.conf.json",
|
||||
"start": "npm run build:plugins && npm run tauri-dev",
|
||||
"tauri-dev": "tauri dev --no-watch --config ./src-tauri/tauri-dev.conf.json",
|
||||
"tauri-build": "tauri build",
|
||||
"tauri": "tauri",
|
||||
"build": "npm run build:frontend",
|
||||
@@ -16,11 +15,9 @@
|
||||
"build:icon:dev": "tauri icon design/icon-dev.png --output ./src-tauri/icons/dev",
|
||||
"build:frontend": "vite build",
|
||||
"build:plugins": "run-p build:plugin:*",
|
||||
"build:plugin:exporter-curl": "cd plugins/exporter-curl && vite build --emptyOutDir",
|
||||
"build:plugin:importer-insomnia": "cd plugins/importer-insomnia && vite build --emptyOutDir",
|
||||
"build:plugin:importer-postman": "cd plugins/importer-postman && vite build --emptyOutDir",
|
||||
"build:plugin:importer-yaak": "cd plugins/importer-yaak && vite build --emptyOutDir",
|
||||
"build:plugin:importer-curl": "cd plugins/importer-curl && vite build --emptyOutDir",
|
||||
"build:plugin:filter-jsonpath": "cd plugins/filter-jsonpath && vite build --emptyOutDir",
|
||||
"build:plugin:filter-xpath": "cd plugins/filter-xpath && vite build --emptyOutDir",
|
||||
"test": "vitest",
|
||||
@@ -29,7 +26,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@codemirror/commands": "^6.2.1",
|
||||
"@codemirror/lang-javascript": "^6.2.2",
|
||||
"@codemirror/lang-javascript": "^6.1.4",
|
||||
"@codemirror/lang-json": "^6.0.1",
|
||||
"@codemirror/lang-xml": "^6.0.2",
|
||||
"@codemirror/language": "^6.6.0",
|
||||
@@ -39,13 +36,11 @@
|
||||
"@lezer/lr": "^1.3.3",
|
||||
"@react-hook/resize-observer": "^1.2.6",
|
||||
"@tailwindcss/container-queries": "^0.1.0",
|
||||
"@tanstack/react-query": "^5.35.5",
|
||||
"@tauri-apps/api": ">=2.0.0-beta.0",
|
||||
"@tauri-apps/plugin-clipboard-manager": "^2.1.0-beta.1",
|
||||
"@tauri-apps/plugin-dialog": ">=2.0.0-beta.0",
|
||||
"@tauri-apps/plugin-fs": ">=2.0.0-beta.0",
|
||||
"@tauri-apps/plugin-os": ">=2.0.0-beta.0",
|
||||
"@tauri-apps/plugin-shell": ">=2.0.0-beta.0",
|
||||
"@tanstack/query-sync-storage-persister": "^4.27.1",
|
||||
"@tanstack/react-query": "^4.28.0",
|
||||
"@tanstack/react-query-devtools": "^4.28.0",
|
||||
"@tanstack/react-query-persist-client": "^4.28.0",
|
||||
"@tauri-apps/api": "^1.5.3",
|
||||
"buffer": "^6.0.3",
|
||||
"classnames": "^2.3.2",
|
||||
"cm6-graphql": "^0.0.9",
|
||||
@@ -56,7 +51,6 @@
|
||||
"format-graphql": "^1.4.0",
|
||||
"framer-motion": "^9.0.4",
|
||||
"lucide-react": "^0.309.0",
|
||||
"mime": "^4.0.1",
|
||||
"papaparse": "^5.4.1",
|
||||
"parse-color": "^1.0.0",
|
||||
"react": "^18.2.0",
|
||||
@@ -67,14 +61,13 @@
|
||||
"react-router-dom": "^6.8.1",
|
||||
"react-use": "^17.4.0",
|
||||
"slugify": "^1.6.6",
|
||||
"tauri-plugin-log-api": "github:tauri-apps/tauri-plugin-log#v2",
|
||||
"tauri-plugin-log-api": "github:tauri-apps/tauri-plugin-log#v1",
|
||||
"uuid": "^9.0.0",
|
||||
"xml-formatter": "^3.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/nesting": "^0.0.0-insiders.565cd3e",
|
||||
"@tanstack/react-query-devtools": "^5.35.5",
|
||||
"@tauri-apps/cli": ">=2.0.0-beta.0",
|
||||
"@tauri-apps/cli": "^1.5.10",
|
||||
"@types/node": "^18.7.10",
|
||||
"@types/papaparse": "^5.3.7",
|
||||
"@types/parse-color": "^1.0.1",
|
||||
@@ -93,7 +86,6 @@
|
||||
"eslint-plugin-react": "^7.32.2",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"husky": "^8.0.3",
|
||||
"internal-ip": "^8.0.0",
|
||||
"lint-staged": "^15.0.2",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"postcss": "^8.4.21",
|
||||
@@ -101,8 +93,8 @@
|
||||
"prettier": "^2.8.4",
|
||||
"react-devtools": "^4.27.2",
|
||||
"tailwindcss": "^3.2.7",
|
||||
"typescript": "^5.4.5",
|
||||
"vite": "^5.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^5.1.1",
|
||||
"vite-plugin-svgr": "^4.2.0",
|
||||
"vite-plugin-top-level-await": "^1.4.1",
|
||||
"vitest": "^1.3.0"
|
||||
|
||||
1544
plugins/exporter-curl/package-lock.json
generated
1544
plugins/exporter-curl/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"name": "exporter-curl",
|
||||
"version": "0.0.1",
|
||||
"devDependencies": {
|
||||
"vitest": "^1.4.0"
|
||||
}
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
import { HttpRequest } from '../../../src-web/lib/models';
|
||||
|
||||
const NEWLINE = '\\\n ';
|
||||
|
||||
export function pluginHookExport(request: Partial<HttpRequest>) {
|
||||
const xs = ['curl'];
|
||||
|
||||
// Add method and URL all on first line
|
||||
if (request.method) xs.push('-X', request.method);
|
||||
if (request.url) xs.push(quote(request.url));
|
||||
|
||||
xs.push(NEWLINE);
|
||||
|
||||
// Add URL params
|
||||
for (const p of (request.urlParameters ?? []).filter(onlyEnabled)) {
|
||||
xs.push('--url-query', quote(`${p.name}=${p.value}`));
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
|
||||
// Add headers
|
||||
for (const h of (request.headers ?? []).filter(onlyEnabled)) {
|
||||
xs.push('--header', quote(`${h.name}: ${h.value}`));
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
|
||||
// Add form params
|
||||
if (Array.isArray(request.body?.form)) {
|
||||
const flag = request.bodyType === 'multipart/form-data' ? '--form' : '--data';
|
||||
for (const p of (request.body?.form ?? []).filter(onlyEnabled)) {
|
||||
if (p.file) {
|
||||
let v = `${p.name}=@${p.file}`;
|
||||
v += p.contentType ? `;type=${p.contentType}` : '';
|
||||
xs.push(flag, v);
|
||||
} else {
|
||||
xs.push(flag, quote(`${p.name}=${p.value}`));
|
||||
}
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
} else if (typeof request.body?.text === 'string') {
|
||||
// --data-raw $'...' to do special ANSI C quoting
|
||||
xs.push('--data-raw', `$${quote(request.body.text)}`);
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
|
||||
// Add basic/digest authentication
|
||||
if (request.authenticationType === 'basic' || request.authenticationType === 'digest') {
|
||||
if (request.authenticationType === 'digest') xs.push('--digest');
|
||||
xs.push(
|
||||
'--user',
|
||||
quote(`${request.authentication?.username ?? ''}:${request.authentication?.password ?? ''}`),
|
||||
);
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
|
||||
// Add bearer authentication
|
||||
if (request.authenticationType === 'bearer') {
|
||||
xs.push('--header', quote(`Authorization: Bearer ${request.authentication?.token ?? ''}`));
|
||||
xs.push(NEWLINE);
|
||||
}
|
||||
|
||||
// Remove trailing newline
|
||||
if (xs[xs.length - 1] === NEWLINE) {
|
||||
xs.splice(xs.length - 1, 1);
|
||||
}
|
||||
|
||||
return xs.join(' ');
|
||||
}
|
||||
|
||||
function quote(arg: string): string {
|
||||
const escaped = arg.replace(/'/g, "\\'");
|
||||
return `'${escaped}'`;
|
||||
}
|
||||
|
||||
function onlyEnabled(v: { name?: string; enabled?: boolean }): boolean {
|
||||
return v.enabled !== false && !!v.name;
|
||||
}
|
||||
@@ -1,175 +0,0 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
import { pluginHookExport } from '../src';
|
||||
|
||||
describe('exporter-curl', () => {
|
||||
test('Exports GET with params', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
urlParameters: [
|
||||
{ name: 'a', value: 'aaa' },
|
||||
{ name: 'b', value: 'bbb', enabled: true },
|
||||
{ name: 'c', value: 'ccc', enabled: false },
|
||||
],
|
||||
}),
|
||||
).toEqual(
|
||||
[`curl 'https://yaak.app'`, `--url-query 'a=aaa'`, `--url-query 'b=bbb'`].join(` \\\n `),
|
||||
);
|
||||
});
|
||||
test('Exports POST with url form data', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
bodyType: 'application/x-www-form-urlencoded',
|
||||
body: {
|
||||
form: [
|
||||
{ name: 'a', value: 'aaa' },
|
||||
{ name: 'b', value: 'bbb', enabled: true },
|
||||
{ name: 'c', value: 'ccc', enabled: false },
|
||||
],
|
||||
},
|
||||
}),
|
||||
).toEqual(
|
||||
[`curl -X POST 'https://yaak.app'`, `--data 'a=aaa'`, `--data 'b=bbb'`].join(` \\\n `),
|
||||
);
|
||||
});
|
||||
|
||||
test('Exports PUT with multipart form', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
method: 'PUT',
|
||||
bodyType: 'multipart/form-data',
|
||||
body: {
|
||||
form: [
|
||||
{ name: 'a', value: 'aaa' },
|
||||
{ name: 'b', value: 'bbb', enabled: true },
|
||||
{ name: 'c', value: 'ccc', enabled: false },
|
||||
{ name: 'f', file: '/foo/bar.png', contentType: 'image/png' },
|
||||
],
|
||||
},
|
||||
}),
|
||||
).toEqual(
|
||||
[
|
||||
`curl -X PUT 'https://yaak.app'`,
|
||||
`--form 'a=aaa'`,
|
||||
`--form 'b=bbb'`,
|
||||
`--form f=@/foo/bar.png;type=image/png`,
|
||||
].join(` \\\n `),
|
||||
);
|
||||
});
|
||||
|
||||
test('Exports JSON body', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
bodyType: 'application/json',
|
||||
body: {
|
||||
text: `{"foo":"bar's"}`,
|
||||
},
|
||||
headers: [{ name: 'Content-Type', value: 'application/json' }],
|
||||
}),
|
||||
).toEqual(
|
||||
[
|
||||
`curl -X POST 'https://yaak.app'`,
|
||||
`--header 'Content-Type: application/json'`,
|
||||
`--data-raw $'{"foo":"bar\\'s"}'`,
|
||||
].join(` \\\n `),
|
||||
);
|
||||
});
|
||||
|
||||
test('Exports multi-line JSON body', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
bodyType: 'application/json',
|
||||
body: {
|
||||
text: `{"foo":"bar",\n"baz":"qux"}`,
|
||||
},
|
||||
headers: [{ name: 'Content-Type', value: 'application/json' }],
|
||||
}),
|
||||
).toEqual(
|
||||
[
|
||||
`curl -X POST 'https://yaak.app'`,
|
||||
`--header 'Content-Type: application/json'`,
|
||||
`--data-raw $'{"foo":"bar",\n"baz":"qux"}'`,
|
||||
].join(` \\\n `),
|
||||
);
|
||||
});
|
||||
|
||||
test('Exports headers', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
headers: [
|
||||
{ name: 'a', value: 'aaa' },
|
||||
{ name: 'b', value: 'bbb', enabled: true },
|
||||
{ name: 'c', value: 'ccc', enabled: false },
|
||||
],
|
||||
}),
|
||||
).toEqual([`curl`, `--header 'a: aaa'`, `--header 'b: bbb'`].join(` \\\n `));
|
||||
});
|
||||
|
||||
test('Basic auth', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'basic',
|
||||
authentication: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
}),
|
||||
).toEqual([`curl 'https://yaak.app'`, `--user 'user:pass'`].join(` \\\n `));
|
||||
});
|
||||
|
||||
test('Broken basic auth', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'basic',
|
||||
authentication: {},
|
||||
}),
|
||||
).toEqual([`curl 'https://yaak.app'`, `--user ':'`].join(` \\\n `));
|
||||
});
|
||||
|
||||
test('Digest auth', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'digest',
|
||||
authentication: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
}),
|
||||
).toEqual([`curl 'https://yaak.app'`, `--digest --user 'user:pass'`].join(` \\\n `));
|
||||
});
|
||||
|
||||
test('Bearer auth', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'bearer',
|
||||
authentication: {
|
||||
token: 'tok',
|
||||
},
|
||||
}),
|
||||
).toEqual([`curl 'https://yaak.app'`, `--header 'Authorization: Bearer tok'`].join(` \\\n `));
|
||||
});
|
||||
|
||||
test('Broken bearer auth', () => {
|
||||
expect(
|
||||
pluginHookExport({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'bearer',
|
||||
authentication: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
}),
|
||||
).toEqual([`curl 'https://yaak.app'`, `--header 'Authorization: Bearer '`].join(` \\\n `));
|
||||
});
|
||||
});
|
||||
@@ -1,13 +0,0 @@
|
||||
import { resolve } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.ts'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
outDir: resolve(__dirname, '../../src-tauri/plugins/exporter-curl'),
|
||||
},
|
||||
});
|
||||
1562
plugins/importer-curl/package-lock.json
generated
1562
plugins/importer-curl/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"name": "importer-curl",
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"shell-quote": "^1.8.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/shell-quote": "^1.7.5",
|
||||
"vitest": "^1.4.0"
|
||||
}
|
||||
}
|
||||
@@ -1,421 +0,0 @@
|
||||
import { ControlOperator, parse, ParseEntry } from 'shell-quote';
|
||||
import {
|
||||
Environment,
|
||||
Folder,
|
||||
HttpRequest,
|
||||
HttpUrlParameter,
|
||||
Model,
|
||||
Workspace,
|
||||
} from '../../../src-web/lib/models';
|
||||
|
||||
type AtLeast<T, K extends keyof T> = Partial<T> & Pick<T, K>;
|
||||
|
||||
interface ExportResources {
|
||||
workspaces: AtLeast<Workspace, 'name' | 'id' | 'model'>[];
|
||||
environments: AtLeast<Environment, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
httpRequests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
folders: AtLeast<Folder, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
}
|
||||
|
||||
export const id = 'curl';
|
||||
export const name = 'cURL';
|
||||
export const description = 'cURL command line tool';
|
||||
|
||||
const DATA_FLAGS = ['d', 'data', 'data-raw', 'data-urlencode', 'data-binary', 'data-ascii'];
|
||||
const SUPPORTED_ARGS = [
|
||||
['url'], // Specify the URL explicitly
|
||||
['user', 'u'], // Authentication
|
||||
['digest'], // Apply auth as digest
|
||||
['header', 'H'],
|
||||
['cookie', 'b'],
|
||||
['get', 'G'], // Put the post data in the URL
|
||||
['d', 'data'], // Add url encoded data
|
||||
['data-raw'],
|
||||
['data-urlencode'],
|
||||
['data-binary'],
|
||||
['data-ascii'],
|
||||
['form', 'F'], // Add multipart data
|
||||
['request', 'X'], // Request method
|
||||
DATA_FLAGS,
|
||||
].flatMap((v) => v);
|
||||
|
||||
type Pair = string | boolean;
|
||||
|
||||
type PairsByName = Record<string, Pair[]>;
|
||||
|
||||
export function pluginHookImport(rawData: string) {
|
||||
if (!rawData.match(/^\s*curl /)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const commands: ParseEntry[][] = [];
|
||||
|
||||
// Replace non-escaped newlines with semicolons to make parsing easier
|
||||
// NOTE: This is really slow in debug build but fast in release mode
|
||||
const normalizedData = rawData.replace(/\ncurl/g, '; curl');
|
||||
|
||||
let currentCommand: ParseEntry[] = [];
|
||||
|
||||
const parsed = parse(normalizedData);
|
||||
|
||||
// Break up `-XPOST` into `-X POST`
|
||||
const normalizedParseEntries = parsed.flatMap((entry) => {
|
||||
if (
|
||||
typeof entry === 'string' &&
|
||||
entry.startsWith('-') &&
|
||||
!entry.startsWith('--') &&
|
||||
entry.length > 2
|
||||
) {
|
||||
return [entry.slice(0, 2), entry.slice(2)];
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
|
||||
for (const parseEntry of normalizedParseEntries) {
|
||||
if (typeof parseEntry === 'string') {
|
||||
if (parseEntry.startsWith('$')) {
|
||||
currentCommand.push(parseEntry.slice(1));
|
||||
} else {
|
||||
currentCommand.push(parseEntry);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if ('comment' in parseEntry) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const { op } = parseEntry as { op: 'glob'; pattern: string } | { op: ControlOperator };
|
||||
|
||||
// `;` separates commands
|
||||
if (op === ';') {
|
||||
commands.push(currentCommand);
|
||||
currentCommand = [];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (op?.startsWith('$')) {
|
||||
// Handle the case where literal like -H $'Header: \'Some Quoted Thing\''
|
||||
const str = op.slice(2, op.length - 1).replace(/\\'/g, "'");
|
||||
|
||||
currentCommand.push(str);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (op === 'glob') {
|
||||
currentCommand.push((parseEntry as { op: 'glob'; pattern: string }).pattern);
|
||||
}
|
||||
}
|
||||
|
||||
commands.push(currentCommand);
|
||||
|
||||
const workspace: ExportResources['workspaces'][0] = {
|
||||
model: 'workspace',
|
||||
id: generateId('workspace'),
|
||||
name: 'Curl Import',
|
||||
};
|
||||
|
||||
const requests: ExportResources['httpRequests'] = commands
|
||||
.filter((command) => command[0] === 'curl')
|
||||
.map((v) => importCommand(v, workspace.id));
|
||||
|
||||
return {
|
||||
resources: {
|
||||
httpRequests: requests,
|
||||
workspaces: [workspace],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function importCommand(parseEntries: ParseEntry[], workspaceId: string) {
|
||||
// ~~~~~~~~~~~~~~~~~~~~~ //
|
||||
// Collect all the flags //
|
||||
// ~~~~~~~~~~~~~~~~~~~~~ //
|
||||
const pairsByName: PairsByName = {};
|
||||
const singletons: ParseEntry[] = [];
|
||||
|
||||
// Start at 1 so we can skip the ^curl part
|
||||
for (let i = 1; i < parseEntries.length; i++) {
|
||||
let parseEntry = parseEntries[i];
|
||||
if (typeof parseEntry === 'string') {
|
||||
parseEntry = parseEntry.trim();
|
||||
}
|
||||
|
||||
if (typeof parseEntry === 'string' && parseEntry.match(/^-{1,2}[\w-]+/)) {
|
||||
const isSingleDash = parseEntry[0] === '-' && parseEntry[1] !== '-';
|
||||
let name = parseEntry.replace(/^-{1,2}/, '');
|
||||
|
||||
if (!SUPPORTED_ARGS.includes(name)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let value;
|
||||
const nextEntry = parseEntries[i + 1];
|
||||
if (isSingleDash && name.length > 1) {
|
||||
// Handle squished arguments like -XPOST
|
||||
value = name.slice(1);
|
||||
name = name.slice(0, 1);
|
||||
} else if (typeof nextEntry === 'string' && !nextEntry.startsWith('-')) {
|
||||
// Next arg is not a flag, so assign it as the value
|
||||
value = nextEntry;
|
||||
i++; // Skip next one
|
||||
} else {
|
||||
value = true;
|
||||
}
|
||||
|
||||
pairsByName[name] = pairsByName[name] || [];
|
||||
pairsByName[name]!.push(value);
|
||||
} else if (parseEntry) {
|
||||
singletons.push(parseEntry);
|
||||
}
|
||||
}
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~ //
|
||||
// Build the request //
|
||||
// ~~~~~~~~~~~~~~~~~ //
|
||||
|
||||
// Url & parameters
|
||||
|
||||
let urlParameters: HttpUrlParameter[];
|
||||
let url: string;
|
||||
|
||||
const urlArg = getPairValue(pairsByName, (singletons[0] as string) || '', ['url']);
|
||||
const [baseUrl, search] = splitOnce(urlArg, '?');
|
||||
urlParameters =
|
||||
search?.split('&').map((p) => {
|
||||
const v = splitOnce(p, '=');
|
||||
return { name: v[0] ?? '', value: v[1] ?? '', enabled: true };
|
||||
}) ?? [];
|
||||
|
||||
url = baseUrl ?? urlArg;
|
||||
|
||||
// Authentication
|
||||
const [username, password] = getPairValue(pairsByName, '', ['u', 'user']).split(/:(.*)$/);
|
||||
|
||||
const isDigest = getPairValue(pairsByName, false, ['digest']);
|
||||
const authenticationType = username ? (isDigest ? 'digest' : 'basic') : null;
|
||||
const authentication = username
|
||||
? {
|
||||
username: username.trim(),
|
||||
password: (password ?? '').trim(),
|
||||
}
|
||||
: {};
|
||||
|
||||
// Headers
|
||||
const headers = [
|
||||
...((pairsByName.header as string[] | undefined) || []),
|
||||
...((pairsByName.H as string[] | undefined) || []),
|
||||
].map((header) => {
|
||||
const [name, value] = header.split(/:(.*)$/);
|
||||
// remove final colon from header name if present
|
||||
if (!value) {
|
||||
return {
|
||||
name: (name ?? '').trim().replace(/;$/, ''),
|
||||
value: '',
|
||||
enabled: true,
|
||||
};
|
||||
}
|
||||
return {
|
||||
name: (name ?? '').trim(),
|
||||
value: value.trim(),
|
||||
enabled: true,
|
||||
};
|
||||
});
|
||||
|
||||
// Cookies
|
||||
const cookieHeaderValue = [
|
||||
...((pairsByName.cookie as string[] | undefined) || []),
|
||||
...((pairsByName.b as string[] | undefined) || []),
|
||||
]
|
||||
.map((str) => {
|
||||
const name = str.split('=', 1)[0];
|
||||
const value = str.replace(`${name}=`, '');
|
||||
return `${name}=${value}`;
|
||||
})
|
||||
.join('; ');
|
||||
|
||||
// Convert cookie value to header
|
||||
const existingCookieHeader = headers.find((header) => header.name.toLowerCase() === 'cookie');
|
||||
|
||||
if (cookieHeaderValue && existingCookieHeader) {
|
||||
// Has existing cookie header, so let's update it
|
||||
existingCookieHeader.value += `; ${cookieHeaderValue}`;
|
||||
} else if (cookieHeaderValue) {
|
||||
// No existing cookie header, so let's make a new one
|
||||
headers.push({
|
||||
name: 'Cookie',
|
||||
value: cookieHeaderValue,
|
||||
enabled: true,
|
||||
});
|
||||
}
|
||||
|
||||
///Body (Text or Blob)
|
||||
const dataParameters = pairsToDataParameters(pairsByName);
|
||||
const contentTypeHeader = headers.find((header) => header.name.toLowerCase() === 'content-type');
|
||||
const mimeType = contentTypeHeader ? contentTypeHeader.value.split(';')[0] : null;
|
||||
|
||||
// Body (Multipart Form Data)
|
||||
const formDataParams = [
|
||||
...((pairsByName.form as string[] | undefined) || []),
|
||||
...((pairsByName.F as string[] | undefined) || []),
|
||||
].map((str) => {
|
||||
const parts = str.split('=');
|
||||
const name = parts[0] ?? '';
|
||||
const value = parts[1] ?? '';
|
||||
const item: { name: string; value?: string; file?: string; enabled: boolean } = {
|
||||
name,
|
||||
enabled: true,
|
||||
};
|
||||
|
||||
if (value.indexOf('@') === 0) {
|
||||
item.file = value.slice(1);
|
||||
} else {
|
||||
item.value = value;
|
||||
}
|
||||
|
||||
return item;
|
||||
});
|
||||
|
||||
// Body
|
||||
let body = {};
|
||||
let bodyType: string | null = null;
|
||||
const bodyAsGET = getPairValue(pairsByName, false, ['G', 'get']);
|
||||
|
||||
if (dataParameters.length > 0 && bodyAsGET) {
|
||||
urlParameters.push(...dataParameters);
|
||||
} else if (
|
||||
dataParameters.length > 0 &&
|
||||
(mimeType == null || mimeType === 'application/x-www-form-urlencoded')
|
||||
) {
|
||||
bodyType = mimeType ?? 'application/x-www-form-urlencoded';
|
||||
body = {
|
||||
form: dataParameters.map((parameter) => ({
|
||||
...parameter,
|
||||
name: decodeURIComponent(parameter.name || ''),
|
||||
value: decodeURIComponent(parameter.value || ''),
|
||||
})),
|
||||
};
|
||||
headers.push({
|
||||
name: 'Content-Type',
|
||||
value: 'application/x-www-form-urlencoded',
|
||||
enabled: true,
|
||||
});
|
||||
} else if (dataParameters.length > 0) {
|
||||
bodyType =
|
||||
mimeType === 'application/json' || mimeType === 'text/xml' || mimeType === 'text/plain'
|
||||
? mimeType
|
||||
: 'other';
|
||||
body = {
|
||||
text: dataParameters
|
||||
.map(({ name, value }) => (name && value ? `${name}=${value}` : name || value))
|
||||
.join('&'),
|
||||
};
|
||||
} else if (formDataParams.length) {
|
||||
bodyType = mimeType ?? 'multipart/form-data';
|
||||
body = {
|
||||
form: formDataParams,
|
||||
};
|
||||
if (mimeType == null) {
|
||||
headers.push({
|
||||
name: 'Content-Type',
|
||||
value: 'multipart/form-data',
|
||||
enabled: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Method
|
||||
let method = getPairValue(pairsByName, '', ['X', 'request']).toUpperCase();
|
||||
|
||||
if (method === '' && body) {
|
||||
method = 'text' in body || 'form' in body ? 'POST' : 'GET';
|
||||
}
|
||||
|
||||
const request: ExportResources['httpRequests'][0] = {
|
||||
id: generateId('http_request'),
|
||||
model: 'http_request',
|
||||
workspaceId,
|
||||
name: '',
|
||||
urlParameters,
|
||||
url,
|
||||
method,
|
||||
headers,
|
||||
authentication,
|
||||
authenticationType,
|
||||
body,
|
||||
bodyType,
|
||||
folderId: null,
|
||||
sortPriority: 0,
|
||||
};
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
const pairsToDataParameters = (keyedPairs: PairsByName) => {
|
||||
let dataParameters: {
|
||||
name: string;
|
||||
value: string;
|
||||
contentType?: string;
|
||||
filePath?: string;
|
||||
enabled?: boolean;
|
||||
}[] = [];
|
||||
|
||||
for (const flagName of DATA_FLAGS) {
|
||||
const pairs = keyedPairs[flagName];
|
||||
|
||||
if (!pairs || pairs.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const p of pairs) {
|
||||
if (typeof p !== 'string') continue;
|
||||
|
||||
const [name, value] = p.split('=');
|
||||
if (p.startsWith('@')) {
|
||||
// Yaak doesn't support files in url-encoded data, so
|
||||
dataParameters.push({
|
||||
name: name ?? '',
|
||||
value: '',
|
||||
filePath: p.slice(1),
|
||||
enabled: true,
|
||||
});
|
||||
} else {
|
||||
dataParameters.push({
|
||||
name: name ?? '',
|
||||
value: flagName === 'data-urlencode' ? encodeURIComponent(value ?? '') : value ?? '',
|
||||
enabled: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dataParameters;
|
||||
};
|
||||
|
||||
const getPairValue = <T extends string | boolean>(
|
||||
pairsByName: PairsByName,
|
||||
defaultValue: T,
|
||||
names: string[],
|
||||
) => {
|
||||
for (const name of names) {
|
||||
if (pairsByName[name] && pairsByName[name]!.length) {
|
||||
return pairsByName[name]![0] as T;
|
||||
}
|
||||
}
|
||||
|
||||
return defaultValue;
|
||||
};
|
||||
|
||||
function splitOnce(str: string, sep: string): string[] {
|
||||
const index = str.indexOf(sep);
|
||||
if (index > -1) {
|
||||
return [str.slice(0, index), str.slice(index + 1)];
|
||||
}
|
||||
return [str];
|
||||
}
|
||||
|
||||
const idCount: Partial<Record<Model['model'], number>> = {};
|
||||
function generateId(model: Model['model']): string {
|
||||
idCount[model] = (idCount[model] ?? -1) + 1;
|
||||
return `GENERATE_ID::${model.toUpperCase()}_${idCount[model]}`;
|
||||
}
|
||||
@@ -1,335 +0,0 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
import { HttpRequest, Model, Workspace } from '../../../src-web/lib/models';
|
||||
import { pluginHookImport } from '../src';
|
||||
|
||||
describe('importer-curl', () => {
|
||||
test('Imports basic GET', () => {
|
||||
expect(pluginHookImport('curl https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Explicit URL', () => {
|
||||
expect(pluginHookImport('curl --url https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Missing URL', () => {
|
||||
expect(pluginHookImport('curl -X POST')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
method: 'POST',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('URL between', () => {
|
||||
expect(pluginHookImport('curl -v https://yaak.app -X POST')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Random flags', () => {
|
||||
expect(pluginHookImport('curl --random -Z -Y -S --foo https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports --request method', () => {
|
||||
expect(pluginHookImport('curl --request POST https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports -XPOST method', () => {
|
||||
expect(pluginHookImport('curl -XPOST --request POST https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports multiple requests', () => {
|
||||
expect(
|
||||
pluginHookImport('curl \\\n https://yaak.app\necho "foo"\ncurl example.com;curl foo.com'),
|
||||
).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({ url: 'https://yaak.app' }),
|
||||
baseRequest({ url: 'example.com' }),
|
||||
baseRequest({ url: 'foo.com' }),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports form data', () => {
|
||||
expect(
|
||||
pluginHookImport('curl -X POST -F "a=aaa" -F b=bbb" -F f=@filepath https://yaak.app'),
|
||||
).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
method: 'POST',
|
||||
url: 'https://yaak.app',
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'multipart/form-data',
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
bodyType: 'multipart/form-data',
|
||||
body: {
|
||||
form: [
|
||||
{ enabled: true, name: 'a', value: 'aaa' },
|
||||
{ enabled: true, name: 'b', value: 'bbb' },
|
||||
{ enabled: true, name: 'f', file: 'filepath' },
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports data params as form url-encoded', () => {
|
||||
expect(pluginHookImport('curl -d a -d b -d c=ccc https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
method: 'POST',
|
||||
url: 'https://yaak.app',
|
||||
bodyType: 'application/x-www-form-urlencoded',
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/x-www-form-urlencoded',
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
body: {
|
||||
form: [
|
||||
{ name: 'a', value: '', enabled: true },
|
||||
{ name: 'b', value: '', enabled: true },
|
||||
{ name: 'c', value: 'ccc', enabled: true },
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports data params as text', () => {
|
||||
expect(
|
||||
pluginHookImport('curl -H Content-Type:text/plain -d a -d b -d c=ccc https://yaak.app'),
|
||||
).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
method: 'POST',
|
||||
url: 'https://yaak.app',
|
||||
headers: [{ name: 'Content-Type', value: 'text/plain', enabled: true }],
|
||||
bodyType: 'text/plain',
|
||||
body: { text: 'a&b&c=ccc' },
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports multi-line JSON', () => {
|
||||
expect(
|
||||
pluginHookImport(
|
||||
`curl -H Content-Type:application/json -d $'{\n "foo":"bar"\n}' https://yaak.app`,
|
||||
),
|
||||
).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
method: 'POST',
|
||||
url: 'https://yaak.app',
|
||||
headers: [{ name: 'Content-Type', value: 'application/json', enabled: true }],
|
||||
bodyType: 'application/json',
|
||||
body: { text: '{\n "foo":"bar"\n}' },
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports multiple headers', () => {
|
||||
expect(
|
||||
pluginHookImport('curl -H Foo:bar --header Name -H AAA:bbb -H :ccc https://yaak.app'),
|
||||
).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
headers: [
|
||||
{ name: 'Name', value: '', enabled: true },
|
||||
{ name: 'Foo', value: 'bar', enabled: true },
|
||||
{ name: 'AAA', value: 'bbb', enabled: true },
|
||||
{ name: '', value: 'ccc', enabled: true },
|
||||
],
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports basic auth', () => {
|
||||
expect(pluginHookImport('curl --user user:pass https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'basic',
|
||||
authentication: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports digest auth', () => {
|
||||
expect(pluginHookImport('curl --digest --user user:pass https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
authenticationType: 'digest',
|
||||
authentication: {
|
||||
username: 'user',
|
||||
password: 'pass',
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports cookie as header', () => {
|
||||
expect(pluginHookImport('curl --cookie "foo=bar" https://yaak.app')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
headers: [{ name: 'Cookie', value: 'foo=bar', enabled: true }],
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Imports query params from the URL', () => {
|
||||
expect(pluginHookImport('curl "https://yaak.app?foo=bar&baz=a%20a"')).toEqual({
|
||||
resources: {
|
||||
workspaces: [baseWorkspace()],
|
||||
httpRequests: [
|
||||
baseRequest({
|
||||
url: 'https://yaak.app',
|
||||
urlParameters: [
|
||||
{ name: 'foo', value: 'bar', enabled: true },
|
||||
{ name: 'baz', value: 'a%20a', enabled: true },
|
||||
],
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const idCount: Partial<Record<Model['model'], number>> = {};
|
||||
|
||||
function baseRequest(mergeWith: Partial<HttpRequest>) {
|
||||
idCount.http_request = (idCount.http_request ?? -1) + 1;
|
||||
return {
|
||||
id: `GENERATE_ID::HTTP_REQUEST_${idCount.http_request}`,
|
||||
model: 'http_request',
|
||||
authentication: {},
|
||||
authenticationType: null,
|
||||
body: {},
|
||||
bodyType: null,
|
||||
folderId: null,
|
||||
headers: [],
|
||||
method: 'GET',
|
||||
name: '',
|
||||
sortPriority: 0,
|
||||
url: '',
|
||||
urlParameters: [],
|
||||
workspaceId: `GENERATE_ID::WORKSPACE_${idCount.workspace}`,
|
||||
...mergeWith,
|
||||
};
|
||||
}
|
||||
|
||||
function baseWorkspace(mergeWith: Partial<Workspace> = {}) {
|
||||
idCount.workspace = (idCount.workspace ?? -1) + 1;
|
||||
return {
|
||||
id: `GENERATE_ID::WORKSPACE_${idCount.workspace}`,
|
||||
model: 'workspace',
|
||||
name: 'Curl Import',
|
||||
...mergeWith,
|
||||
};
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
import { resolve } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.ts'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
outDir: resolve(__dirname, '../../src-tauri/plugins/importer-curl'),
|
||||
},
|
||||
});
|
||||
16
plugins/importer-insomnia/package-lock.json
generated
16
plugins/importer-insomnia/package-lock.json
generated
@@ -6,21 +6,7 @@
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "importer-insomnia",
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"yaml": "^2.4.2"
|
||||
}
|
||||
},
|
||||
"node_modules/yaml": {
|
||||
"version": "2.4.2",
|
||||
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.2.tgz",
|
||||
"integrity": "sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA==",
|
||||
"bin": {
|
||||
"yaml": "bin.mjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
"version": "0.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
{
|
||||
"name": "importer-insomnia",
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"yaml": "^2.4.2"
|
||||
}
|
||||
"version": "0.0.1"
|
||||
}
|
||||
|
||||
23
plugins/importer-insomnia/src/helpers/types.js
Normal file
23
plugins/importer-insomnia/src/helpers/types.js
Normal file
@@ -0,0 +1,23 @@
|
||||
export function isWorkspace(obj) {
|
||||
return isJSObject(obj) && obj._type === 'workspace';
|
||||
}
|
||||
|
||||
export function isRequestGroup(obj) {
|
||||
return isJSObject(obj) && obj._type === 'request_group';
|
||||
}
|
||||
|
||||
export function isRequest(obj) {
|
||||
return isJSObject(obj) && obj._type === 'request';
|
||||
}
|
||||
|
||||
export function isEnvironment(obj) {
|
||||
return isJSObject(obj) && obj._type === 'environment';
|
||||
}
|
||||
|
||||
export function isJSObject(obj) {
|
||||
return Object.prototype.toString.call(obj) === '[object Object]';
|
||||
}
|
||||
|
||||
export function isJSString(obj) {
|
||||
return Object.prototype.toString.call(obj) === '[object String]';
|
||||
}
|
||||
18
plugins/importer-insomnia/src/helpers/variables.js
Normal file
18
plugins/importer-insomnia/src/helpers/variables.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { isJSString } from './types.js';
|
||||
|
||||
export function parseVariables(data) {
|
||||
return Object.entries(data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Insomnia syntax to Yaak syntax
|
||||
* @param {string} variable - Text to convert
|
||||
*/
|
||||
export function convertSyntax(variable) {
|
||||
if (!isJSString(variable)) return variable;
|
||||
return variable.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, '${[$2]}');
|
||||
}
|
||||
21
plugins/importer-insomnia/src/importers/environment.js
Normal file
21
plugins/importer-insomnia/src/importers/environment.js
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* Import an Insomnia environment object.
|
||||
* @param {Object} e - The environment object to import.
|
||||
* @param workspaceId - Workspace to import into.
|
||||
*/
|
||||
export function importEnvironment(e, workspaceId) {
|
||||
console.log('IMPORTING Environment', e._id, e.name, JSON.stringify(e, null, 2));
|
||||
return {
|
||||
id: e._id,
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId,
|
||||
model: 'environment',
|
||||
name: e.name,
|
||||
variables: Object.entries(e.data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
})),
|
||||
};
|
||||
}
|
||||
17
plugins/importer-insomnia/src/importers/folder.js
Normal file
17
plugins/importer-insomnia/src/importers/folder.js
Normal file
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* Import an Insomnia folder object.
|
||||
* @param {Object} f - The environment object to import.
|
||||
* @param workspaceId - Workspace to import into.
|
||||
*/
|
||||
export function importFolder(f, workspaceId) {
|
||||
console.log('IMPORTING FOLDER', f._id, f.name, JSON.stringify(f, null, 2));
|
||||
return {
|
||||
id: f._id,
|
||||
createdAt: new Date(f.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(f.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
folderId: f.parentId === workspaceId ? null : f.parentId,
|
||||
workspaceId,
|
||||
model: 'folder',
|
||||
name: f.name,
|
||||
};
|
||||
}
|
||||
60
plugins/importer-insomnia/src/importers/request.js
Normal file
60
plugins/importer-insomnia/src/importers/request.js
Normal file
@@ -0,0 +1,60 @@
|
||||
import { convertSyntax } from '../helpers/variables.js';
|
||||
|
||||
/**
|
||||
* Import an Insomnia request object.
|
||||
* @param {Object} r - The request object to import.
|
||||
* @param workspaceId - The workspace ID to use for the request.
|
||||
* @param {number} sortPriority - The sort priority to use for the request.
|
||||
*/
|
||||
export function importRequest(r, workspaceId, sortPriority = 0) {
|
||||
console.log('IMPORTING REQUEST', r._id, r.name, JSON.stringify(r, null, 2));
|
||||
|
||||
let bodyType = null;
|
||||
let body = null;
|
||||
if (r.body?.mimeType === 'application/graphql') {
|
||||
bodyType = 'graphql';
|
||||
body = convertSyntax(r.body.text);
|
||||
} else if (r.body?.mimeType === 'application/json') {
|
||||
bodyType = 'application/json';
|
||||
body = convertSyntax(r.body.text);
|
||||
}
|
||||
|
||||
let authenticationType = null;
|
||||
let authentication = {};
|
||||
if (r.authentication.type === 'bearer') {
|
||||
authenticationType = 'bearer';
|
||||
authentication = {
|
||||
token: convertSyntax(r.authentication.token),
|
||||
};
|
||||
} else if (r.authentication.type === 'basic') {
|
||||
authenticationType = 'basic';
|
||||
authentication = {
|
||||
username: convertSyntax(r.authentication.username),
|
||||
password: convertSyntax(r.authentication.password),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: r._id,
|
||||
createdAt: new Date(r.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(r.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId,
|
||||
folderId: r.parentId === workspaceId ? null : r.parentId,
|
||||
model: 'http_request',
|
||||
sortPriority,
|
||||
name: r.name,
|
||||
url: convertSyntax(r.url),
|
||||
body,
|
||||
bodyType,
|
||||
authentication,
|
||||
authenticationType,
|
||||
method: r.method,
|
||||
headers: (r.headers ?? [])
|
||||
.map(({ name, value, disabled }) => ({
|
||||
enabled: !disabled,
|
||||
name,
|
||||
value,
|
||||
}))
|
||||
.filter(({ name, value }) => name !== '' || value !== ''),
|
||||
};
|
||||
}
|
||||
76
plugins/importer-insomnia/src/index.js
Normal file
76
plugins/importer-insomnia/src/index.js
Normal file
@@ -0,0 +1,76 @@
|
||||
import { importEnvironment } from './importers/environment.js';
|
||||
import { importRequest } from './importers/request.js';
|
||||
import {
|
||||
isEnvironment,
|
||||
isJSObject,
|
||||
isRequest,
|
||||
isRequestGroup,
|
||||
isWorkspace,
|
||||
} from './helpers/types.js';
|
||||
import { parseVariables } from './helpers/variables.js';
|
||||
import { importFolder } from './importers/folder.js';
|
||||
|
||||
export function pluginHookImport(contents) {
|
||||
console.log('RUNNING INSOMNIA');
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(contents);
|
||||
} catch (e) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isJSObject(parsed)) return;
|
||||
if (!Array.isArray(parsed.resources)) return;
|
||||
|
||||
const resources = {
|
||||
workspaces: [],
|
||||
requests: [],
|
||||
environments: [],
|
||||
folders: [],
|
||||
};
|
||||
|
||||
// Import workspaces
|
||||
const workspacesToImport = parsed.resources.filter(isWorkspace);
|
||||
for (const workspaceToImport of workspacesToImport) {
|
||||
const baseEnvironment = parsed.resources.find(
|
||||
(r) => isEnvironment(r) && r.parentId === workspaceToImport._id,
|
||||
);
|
||||
resources.workspaces.push({
|
||||
id: workspaceToImport._id,
|
||||
createdAt: new Date(workspacesToImport.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(workspacesToImport.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
model: 'workspace',
|
||||
name: workspaceToImport.name,
|
||||
variables: baseEnvironment ? parseVariables(baseEnvironment.data) : [],
|
||||
});
|
||||
const environmentsToImport = parsed.resources.filter(
|
||||
(r) => isEnvironment(r) && r.parentId === baseEnvironment?._id,
|
||||
);
|
||||
resources.environments.push(
|
||||
...environmentsToImport.map((r) => importEnvironment(r, workspaceToImport._id)),
|
||||
);
|
||||
|
||||
const nextFolder = (parentId) => {
|
||||
const children = parsed.resources.filter((r) => r.parentId === parentId);
|
||||
let sortPriority = 0;
|
||||
for (const child of children) {
|
||||
if (isRequestGroup(child)) {
|
||||
resources.folders.push(importFolder(child, workspaceToImport._id));
|
||||
nextFolder(child._id);
|
||||
} else if (isRequest(child)) {
|
||||
resources.requests.push(importRequest(child, workspaceToImport._id, sortPriority++));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Import folders
|
||||
nextFolder(workspaceToImport._id);
|
||||
}
|
||||
|
||||
// Filter out any `null` values
|
||||
resources.requests = resources.requests.filter(Boolean);
|
||||
resources.environments = resources.environments.filter(Boolean);
|
||||
resources.workspaces = resources.workspaces.filter(Boolean);
|
||||
|
||||
return { resources };
|
||||
}
|
||||
@@ -1,278 +0,0 @@
|
||||
import {
|
||||
Environment,
|
||||
Folder,
|
||||
GrpcRequest,
|
||||
HttpRequest,
|
||||
Workspace,
|
||||
} from '../../../src-web/lib/models';
|
||||
import { parse as parseYaml } from 'yaml';
|
||||
|
||||
type AtLeast<T, K extends keyof T> = Partial<T> & Pick<T, K>;
|
||||
|
||||
export interface ExportResources {
|
||||
workspaces: AtLeast<Workspace, 'name' | 'id' | 'model'>[];
|
||||
environments: AtLeast<Environment, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
httpRequests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
grpcRequests: AtLeast<GrpcRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
folders: AtLeast<Folder, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
}
|
||||
|
||||
export function pluginHookImport(contents: string) {
|
||||
let parsed: any;
|
||||
|
||||
try {
|
||||
parsed = JSON.parse(contents);
|
||||
} catch (e) {}
|
||||
|
||||
try {
|
||||
parsed = parseYaml(contents);
|
||||
} catch (e) {}
|
||||
|
||||
if (!isJSObject(parsed)) return;
|
||||
if (!Array.isArray(parsed.resources)) return;
|
||||
|
||||
const resources: ExportResources = {
|
||||
workspaces: [],
|
||||
httpRequests: [],
|
||||
grpcRequests: [],
|
||||
environments: [],
|
||||
folders: [],
|
||||
};
|
||||
|
||||
// Import workspaces
|
||||
const workspacesToImport = parsed.resources.filter(isWorkspace);
|
||||
for (const workspaceToImport of workspacesToImport) {
|
||||
const baseEnvironment = parsed.resources.find(
|
||||
(r: any) => isEnvironment(r) && r.parentId === workspaceToImport._id,
|
||||
);
|
||||
resources.workspaces.push({
|
||||
id: convertId(workspaceToImport._id),
|
||||
createdAt: new Date(workspacesToImport.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(workspacesToImport.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
model: 'workspace',
|
||||
name: workspaceToImport.name,
|
||||
variables: baseEnvironment ? parseVariables(baseEnvironment.data) : [],
|
||||
});
|
||||
const environmentsToImport = parsed.resources.filter(
|
||||
(r: any) => isEnvironment(r) && r.parentId === baseEnvironment?._id,
|
||||
);
|
||||
resources.environments.push(
|
||||
...environmentsToImport.map((r: any) => importEnvironment(r, workspaceToImport._id)),
|
||||
);
|
||||
|
||||
const nextFolder = (parentId: string) => {
|
||||
const children = parsed.resources.filter((r: any) => r.parentId === parentId);
|
||||
let sortPriority = 0;
|
||||
for (const child of children) {
|
||||
if (isRequestGroup(child)) {
|
||||
resources.folders.push(importFolder(child, workspaceToImport._id));
|
||||
nextFolder(child._id);
|
||||
} else if (isHttpRequest(child)) {
|
||||
resources.httpRequests.push(
|
||||
importHttpRequest(child, workspaceToImport._id, sortPriority++),
|
||||
);
|
||||
} else if (isGrpcRequest(child)) {
|
||||
resources.grpcRequests.push(
|
||||
importGrpcRequest(child, workspaceToImport._id, sortPriority++),
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Import folders
|
||||
nextFolder(workspaceToImport._id);
|
||||
}
|
||||
|
||||
// Filter out any `null` values
|
||||
resources.httpRequests = resources.httpRequests.filter(Boolean);
|
||||
resources.grpcRequests = resources.grpcRequests.filter(Boolean);
|
||||
resources.environments = resources.environments.filter(Boolean);
|
||||
resources.workspaces = resources.workspaces.filter(Boolean);
|
||||
|
||||
return { resources };
|
||||
}
|
||||
|
||||
function importEnvironment(e: any, workspaceId: string): ExportResources['environments'][0] {
|
||||
return {
|
||||
id: convertId(e._id),
|
||||
createdAt: new Date(e.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(e.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId: convertId(workspaceId),
|
||||
model: 'environment',
|
||||
name: e.name,
|
||||
variables: Object.entries(e.data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
function importFolder(f: any, workspaceId: string): ExportResources['folders'][0] {
|
||||
return {
|
||||
id: convertId(f._id),
|
||||
createdAt: new Date(f.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(f.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
folderId: f.parentId === workspaceId ? null : convertId(f.parentId),
|
||||
workspaceId: convertId(workspaceId),
|
||||
model: 'folder',
|
||||
name: f.name,
|
||||
};
|
||||
}
|
||||
|
||||
function importGrpcRequest(
|
||||
r: any,
|
||||
workspaceId: string,
|
||||
sortPriority = 0,
|
||||
): ExportResources['grpcRequests'][0] {
|
||||
const parts = r.protoMethodName.split('/').filter((p: any) => p !== '');
|
||||
const service = parts[0] ?? null;
|
||||
const method = parts[1] ?? null;
|
||||
|
||||
return {
|
||||
id: convertId(r._id),
|
||||
createdAt: new Date(r.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(r.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId: convertId(workspaceId),
|
||||
folderId: r.parentId === workspaceId ? null : convertId(r.parentId),
|
||||
model: 'grpc_request',
|
||||
sortPriority,
|
||||
name: r.name,
|
||||
url: convertSyntax(r.url),
|
||||
service,
|
||||
method,
|
||||
message: r.body?.text ?? '',
|
||||
metadata: (r.metadata ?? [])
|
||||
.map((h: any) => ({
|
||||
enabled: !h.disabled,
|
||||
name: h.name ?? '',
|
||||
value: h.value ?? '',
|
||||
}))
|
||||
.filter(({ name, value }: any) => name !== '' || value !== ''),
|
||||
};
|
||||
}
|
||||
|
||||
function importHttpRequest(
|
||||
r: any,
|
||||
workspaceId: string,
|
||||
sortPriority = 0,
|
||||
): ExportResources['httpRequests'][0] {
|
||||
let bodyType = null;
|
||||
let body = {};
|
||||
if (r.body.mimeType === 'application/octet-stream') {
|
||||
bodyType = 'binary';
|
||||
body = { filePath: r.body.fileName ?? '' };
|
||||
} else if (r.body?.mimeType === 'application/x-www-form-urlencoded') {
|
||||
bodyType = 'application/x-www-form-urlencoded';
|
||||
body = {
|
||||
form: (r.body.params ?? []).map((p: any) => ({
|
||||
enabled: !p.disabled,
|
||||
name: p.name ?? '',
|
||||
value: p.value ?? '',
|
||||
})),
|
||||
};
|
||||
} else if (r.body?.mimeType === 'multipart/form-data') {
|
||||
bodyType = 'multipart/form-data';
|
||||
body = {
|
||||
form: (r.body.params ?? []).map((p: any) => ({
|
||||
enabled: !p.disabled,
|
||||
name: p.name ?? '',
|
||||
value: p.value ?? '',
|
||||
file: p.fileName ?? null,
|
||||
})),
|
||||
};
|
||||
} else if (r.body?.mimeType === 'application/graphql') {
|
||||
bodyType = 'graphql';
|
||||
body = { text: convertSyntax(r.body.text ?? '') };
|
||||
} else if (r.body?.mimeType === 'application/json') {
|
||||
bodyType = 'application/json';
|
||||
body = { text: convertSyntax(r.body.text ?? '') };
|
||||
}
|
||||
|
||||
let authenticationType = null;
|
||||
let authentication = {};
|
||||
if (r.authentication.type === 'bearer') {
|
||||
authenticationType = 'bearer';
|
||||
authentication = {
|
||||
token: convertSyntax(r.authentication.token),
|
||||
};
|
||||
} else if (r.authentication.type === 'basic') {
|
||||
authenticationType = 'basic';
|
||||
authentication = {
|
||||
username: convertSyntax(r.authentication.username),
|
||||
password: convertSyntax(r.authentication.password),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
id: convertId(r._id),
|
||||
createdAt: new Date(r.created ?? Date.now()).toISOString().replace('Z', ''),
|
||||
updatedAt: new Date(r.updated ?? Date.now()).toISOString().replace('Z', ''),
|
||||
workspaceId: convertId(workspaceId),
|
||||
folderId: r.parentId === workspaceId ? null : convertId(r.parentId),
|
||||
model: 'http_request',
|
||||
sortPriority,
|
||||
name: r.name,
|
||||
url: convertSyntax(r.url),
|
||||
body,
|
||||
bodyType,
|
||||
authentication,
|
||||
authenticationType,
|
||||
method: r.method,
|
||||
headers: (r.headers ?? [])
|
||||
.map((h: any) => ({
|
||||
enabled: !h.disabled,
|
||||
name: h.name ?? '',
|
||||
value: h.value ?? '',
|
||||
}))
|
||||
.filter(({ name, value }: any) => name !== '' || value !== ''),
|
||||
};
|
||||
}
|
||||
|
||||
function parseVariables(data: Record<string, string>) {
|
||||
return Object.entries(data).map(([name, value]) => ({
|
||||
enabled: true,
|
||||
name,
|
||||
value: `${value}`,
|
||||
}));
|
||||
}
|
||||
|
||||
function convertSyntax(variable: string): string {
|
||||
if (!isJSString(variable)) return variable;
|
||||
return variable.replaceAll(/{{\s*(_\.)?([^}]+)\s*}}/g, '${[$2]}');
|
||||
}
|
||||
|
||||
function isWorkspace(obj: any) {
|
||||
return isJSObject(obj) && obj._type === 'workspace';
|
||||
}
|
||||
|
||||
function isRequestGroup(obj: any) {
|
||||
return isJSObject(obj) && obj._type === 'request_group';
|
||||
}
|
||||
|
||||
function isHttpRequest(obj: any) {
|
||||
return isJSObject(obj) && obj._type === 'request';
|
||||
}
|
||||
|
||||
function isGrpcRequest(obj: any) {
|
||||
return isJSObject(obj) && obj._type === 'grpc_request';
|
||||
}
|
||||
|
||||
function isEnvironment(obj: any) {
|
||||
return isJSObject(obj) && obj._type === 'environment';
|
||||
}
|
||||
|
||||
function isJSObject(obj: any) {
|
||||
return Object.prototype.toString.call(obj) === '[object Object]';
|
||||
}
|
||||
|
||||
function isJSString(obj: any) {
|
||||
return Object.prototype.toString.call(obj) === '[object String]';
|
||||
}
|
||||
|
||||
function convertId(id: string): string {
|
||||
if (id.startsWith('GENERATE_ID::')) {
|
||||
return id;
|
||||
}
|
||||
return `GENERATE_ID::${id}`;
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { defineConfig } from 'vite';
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.ts'),
|
||||
entry: resolve(__dirname, 'src/index.js'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
|
||||
1495
plugins/importer-postman/package-lock.json
generated
1495
plugins/importer-postman/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,4 @@
|
||||
{
|
||||
"name": "importer-postman",
|
||||
"version": "0.0.1",
|
||||
"devDependencies": {
|
||||
"vitest": "^1.4.0"
|
||||
}
|
||||
"version": "0.0.1"
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Environment, Folder, HttpRequest, Model, Workspace } from '../../../src-web/lib/models';
|
||||
import { Environment, Folder, HttpRequest, Workspace } from '../../../src-web/lib/models';
|
||||
|
||||
const POSTMAN_2_1_0_SCHEMA = 'https://schema.getpostman.com/json/collection/v2.1.0/collection.json';
|
||||
const POSTMAN_2_0_0_SCHEMA = 'https://schema.getpostman.com/json/collection/v2.0.0/collection.json';
|
||||
@@ -9,7 +9,7 @@ type AtLeast<T, K extends keyof T> = Partial<T> & Pick<T, K>;
|
||||
interface ExportResources {
|
||||
workspaces: AtLeast<Workspace, 'name' | 'id' | 'model'>[];
|
||||
environments: AtLeast<Environment, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
httpRequests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
requests: AtLeast<HttpRequest, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
folders: AtLeast<Folder, 'name' | 'id' | 'model' | 'workspaceId'>[];
|
||||
}
|
||||
|
||||
@@ -23,25 +23,18 @@ export function pluginHookImport(contents: string): { resources: ExportResources
|
||||
return;
|
||||
}
|
||||
|
||||
const globalAuth = importAuth(root.auth);
|
||||
|
||||
const exportResources: ExportResources = {
|
||||
workspaces: [],
|
||||
environments: [],
|
||||
httpRequests: [],
|
||||
requests: [],
|
||||
folders: [],
|
||||
};
|
||||
|
||||
const workspace: ExportResources['workspaces'][0] = {
|
||||
model: 'workspace',
|
||||
id: generateId('workspace'),
|
||||
id: generateId('wk'),
|
||||
name: info.name || 'Postman Import',
|
||||
description: info.description || '',
|
||||
variables:
|
||||
root.variable?.map((v: any) => ({
|
||||
name: v.key,
|
||||
value: v.value,
|
||||
})) ?? [],
|
||||
};
|
||||
exportResources.workspaces.push(workspace);
|
||||
|
||||
@@ -50,7 +43,7 @@ export function pluginHookImport(contents: string): { resources: ExportResources
|
||||
const folder: ExportResources['folders'][0] = {
|
||||
model: 'folder',
|
||||
workspaceId: workspace.id,
|
||||
id: generateId('folder'),
|
||||
id: generateId('fl'),
|
||||
name: v.name,
|
||||
folderId,
|
||||
};
|
||||
@@ -61,11 +54,10 @@ export function pluginHookImport(contents: string): { resources: ExportResources
|
||||
} else if (typeof v.name === 'string' && 'request' in v) {
|
||||
const r = toRecord(v.request);
|
||||
const bodyPatch = importBody(r.body);
|
||||
const requestAuthPath = importAuth(r.auth);
|
||||
const authPatch = requestAuthPath.authenticationType == null ? globalAuth : requestAuthPath;
|
||||
const request: ExportResources['httpRequests'][0] = {
|
||||
const authPatch = importAuth(r.auth);
|
||||
const request: ExportResources['requests'][0] = {
|
||||
model: 'http_request',
|
||||
id: generateId('http_request'),
|
||||
id: generateId('rq'),
|
||||
workspaceId: workspace.id,
|
||||
folderId,
|
||||
name: v.name,
|
||||
@@ -87,7 +79,7 @@ export function pluginHookImport(contents: string): { resources: ExportResources
|
||||
}),
|
||||
],
|
||||
};
|
||||
exportResources.httpRequests.push(request);
|
||||
exportResources.requests.push(request);
|
||||
} else {
|
||||
console.log('Unknown item', v, folderId);
|
||||
}
|
||||
@@ -113,14 +105,6 @@ function importAuth(
|
||||
password: auth.basic.password || '',
|
||||
},
|
||||
};
|
||||
} else if ('bearer' in auth) {
|
||||
return {
|
||||
headers: [],
|
||||
authenticationType: 'bearer',
|
||||
authentication: {
|
||||
token: auth.bearer.token || '',
|
||||
},
|
||||
};
|
||||
} else {
|
||||
// TODO: support other auth types
|
||||
return { headers: [], authenticationType: null, authentication: {} };
|
||||
@@ -180,7 +164,6 @@ function importBody(rawBody: any): Pick<HttpRequest, 'body' | 'bodyType' | 'head
|
||||
f.src != null
|
||||
? {
|
||||
enabled: !f.disabled,
|
||||
contentType: f.contentType ?? null,
|
||||
name: f.key ?? '',
|
||||
file: f.src ?? '',
|
||||
}
|
||||
@@ -192,20 +175,6 @@ function importBody(rawBody: any): Pick<HttpRequest, 'body' | 'bodyType' | 'head
|
||||
),
|
||||
},
|
||||
};
|
||||
} else if ('raw' in body) {
|
||||
return {
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: body.options?.raw?.language === 'json' ? 'application/json' : '',
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
bodyType: body.options?.raw?.language === 'json' ? 'application/json' : 'other',
|
||||
body: {
|
||||
text: body.raw ?? '',
|
||||
},
|
||||
};
|
||||
} else {
|
||||
// TODO: support other body types
|
||||
return { headers: [], bodyType: null, body: {} };
|
||||
@@ -244,9 +213,11 @@ function convertTemplateSyntax<T>(obj: T): T {
|
||||
}
|
||||
}
|
||||
|
||||
const idCount: Partial<Record<Model['model'], number>> = {};
|
||||
|
||||
function generateId(model: Model['model']): string {
|
||||
idCount[model] = (idCount[model] ?? -1) + 1;
|
||||
return `GENERATE_ID::${model.toUpperCase()}_${idCount[model]}`;
|
||||
function generateId(prefix: 'wk' | 'rq' | 'fl'): string {
|
||||
const alphabet = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
||||
let id = `${prefix}_`;
|
||||
for (let i = 0; i < 10; i++) {
|
||||
id += alphabet[Math.floor(Math.random() * alphabet.length)];
|
||||
}
|
||||
return id;
|
||||
}
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"info": {
|
||||
"_postman_id": "9e6dfada-256c-49ea-a38f-7d1b05b7ca2d",
|
||||
"name": "New Collection",
|
||||
"schema": "https://schema.getpostman.com/json/collection/v2.0.0/collection.json",
|
||||
"_exporter_id": "18798"
|
||||
},
|
||||
"item": [
|
||||
{
|
||||
"name": "Top Folder",
|
||||
"item": [
|
||||
{
|
||||
"name": "Nested Folder",
|
||||
"item": [
|
||||
{
|
||||
"name": "Request 1",
|
||||
"request": {
|
||||
"method": "GET"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Request 2",
|
||||
"request": {
|
||||
"method": "GET"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Request 3",
|
||||
"request": {
|
||||
"method": "GET"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import { Model } from '../../../src-web/lib/models';
|
||||
import { pluginHookImport } from '../src';
|
||||
|
||||
let originalRandom = Math.random;
|
||||
|
||||
describe('importer-postman', () => {
|
||||
beforeEach(() => {
|
||||
let i = 0;
|
||||
// Psuedo-random number generator to ensure consistent ID generation
|
||||
Math.random = vi.fn(() => ((i++ * 1000) % 133) / 100);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
Math.random = originalRandom;
|
||||
});
|
||||
|
||||
const p = path.join(__dirname, 'fixtures');
|
||||
const fixtures = fs.readdirSync(p);
|
||||
|
||||
for (const fixture of fixtures) {
|
||||
test('Imports ' + fixture, () => {
|
||||
const contents = fs.readFileSync(path.join(p, fixture), 'utf-8');
|
||||
const imported = pluginHookImport(contents);
|
||||
const folder0 = newId('folder');
|
||||
const folder1 = newId('folder');
|
||||
expect(imported).toEqual({
|
||||
resources: expect.objectContaining({
|
||||
workspaces: [
|
||||
expect.objectContaining({
|
||||
id: newId('workspace'),
|
||||
model: 'workspace',
|
||||
name: 'New Collection',
|
||||
}),
|
||||
],
|
||||
folders: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: folder0,
|
||||
model: 'folder',
|
||||
workspaceId: existingId('workspace'),
|
||||
name: 'Top Folder',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
folderId: folder0,
|
||||
id: folder1,
|
||||
model: 'folder',
|
||||
workspaceId: existingId('workspace'),
|
||||
name: 'Nested Folder',
|
||||
}),
|
||||
]),
|
||||
httpRequests: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: newId('http_request'),
|
||||
model: 'http_request',
|
||||
name: 'Request 1',
|
||||
workspaceId: existingId('workspace'),
|
||||
folderId: folder1,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: newId('http_request'),
|
||||
model: 'http_request',
|
||||
name: 'Request 2',
|
||||
workspaceId: existingId('workspace'),
|
||||
folderId: folder0,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: newId('http_request'),
|
||||
model: 'http_request',
|
||||
name: 'Request 3',
|
||||
workspaceId: existingId('workspace'),
|
||||
folderId: null,
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const idCount: Partial<Record<Model['model'], number>> = {};
|
||||
function newId(model: Model['model']): string {
|
||||
idCount[model] = (idCount[model] ?? -1) + 1;
|
||||
return `GENERATE_ID::${model.toUpperCase()}_${idCount[model]}`;
|
||||
}
|
||||
|
||||
function existingId(model: Model['model']): string {
|
||||
return `GENERATE_ID::${model.toUpperCase()}_${idCount[model] ?? 0}`;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
export function pluginHookImport(contents: string) {
|
||||
export function pluginHookImport(contents) {
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(contents);
|
||||
@@ -10,20 +10,23 @@ export function pluginHookImport(contents: string) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const isYaakExport = 'yaakSchema' in parsed;
|
||||
if (!isYaakExport) {
|
||||
if (!('yaakSchema' in parsed)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Migrate v1 to v2 -- changes requests to httpRequests
|
||||
if ('requests' in parsed.resources) {
|
||||
if (parsed.yaakSchema === 1) {
|
||||
parsed.resources.httpRequests = parsed.resources.requests;
|
||||
delete parsed.resources.requests;
|
||||
parsed.yaakSchema = 2;
|
||||
}
|
||||
|
||||
return { resources: parsed.resources }; // Should already be in the correct format
|
||||
if (parsed.yaakSchema === 2) {
|
||||
return { resources: parsed.resources }; // Should already be in the correct format
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function isJSObject(obj: any) {
|
||||
export function isJSObject(obj) {
|
||||
return Object.prototype.toString.call(obj) === '[object Object]';
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
import { pluginHookImport } from '../src';
|
||||
|
||||
describe('importer-yaak', () => {
|
||||
test('Skips invalid imports', () => {
|
||||
expect(pluginHookImport('not JSON')).toBeUndefined();
|
||||
expect(pluginHookImport('[]')).toBeUndefined();
|
||||
expect(pluginHookImport(JSON.stringify({ resources: {} }))).toBeUndefined();
|
||||
});
|
||||
|
||||
test('converts schema 1 to 2', () => {
|
||||
const imported = pluginHookImport(
|
||||
JSON.stringify({
|
||||
yaakSchema: 1,
|
||||
resources: {
|
||||
requests: [],
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
expect(imported).toEqual(
|
||||
expect.objectContaining({
|
||||
resources: {
|
||||
httpRequests: [],
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -4,7 +4,7 @@ import { defineConfig } from 'vite';
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'src/index.ts'),
|
||||
entry: resolve(__dirname, 'src/index.js'),
|
||||
fileName: 'index',
|
||||
formats: ['es'],
|
||||
},
|
||||
|
||||
12
src-tauri/.sqlx/query-48ec5fdf20f34add763c540061caa25054545503704e19f149987f99b1a0e4f0.json
generated
Normal file
12
src-tauri/.sqlx/query-48ec5fdf20f34add763c540061caa25054545503704e19f149987f99b1a0e4f0.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n UPDATE settings SET (\n theme, appearance, update_channel\n ) = (?, ?, ?) WHERE id = 'default';\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "48ec5fdf20f34add763c540061caa25054545503704e19f149987f99b1a0e4f0"
|
||||
}
|
||||
56
src-tauri/.sqlx/query-b32994b09ae7a06eb0f031069d327e55127a5bce60cbb499b83d1701386a23cb.json
generated
Normal file
56
src-tauri/.sqlx/query-b32994b09ae7a06eb0f031069d327e55127a5bce60cbb499b83d1701386a23cb.json
generated
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, created_at, updated_at, theme, appearance, update_channel\n FROM settings\n WHERE id = 'default'\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 2,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 3,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "theme",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "appearance",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "update_channel",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "b32994b09ae7a06eb0f031069d327e55127a5bce60cbb499b83d1701386a23cb"
|
||||
}
|
||||
@@ -1,92 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n id, model, created_at, updated_at, theme, appearance,\n theme_dark, theme_light, update_channel,\n interface_font_size, interface_scale, editor_font_size, editor_soft_wrap\n FROM settings\n WHERE id = 'default'\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "model",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_at",
|
||||
"ordinal": 2,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "updated_at",
|
||||
"ordinal": 3,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "theme",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "appearance",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "theme_dark",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "theme_light",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "update_channel",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "interface_font_size",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "interface_scale",
|
||||
"ordinal": 10,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "editor_font_size",
|
||||
"ordinal": 11,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "editor_soft_wrap",
|
||||
"ordinal": 12,
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "ca3485d87b060cd77c4114d2af544adf18f6f15341d9d5db40865e92a80da4e2"
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n UPDATE settings SET (\n theme, appearance, theme_dark, theme_light, update_channel,\n interface_font_size, interface_scale, editor_font_size, editor_soft_wrap\n ) = (?, ?, ?, ?, ?, ?, ?, ?, ?) WHERE id = 'default';\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 9
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "efd8ba41ea909b18dd520c57c1d464c5ae057b720cbbedcaec1513d43535632c"
|
||||
}
|
||||
4242
src-tauri/Cargo.lock
generated
4242
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,20 +1,18 @@
|
||||
workspace = { members = ["grpc"] }
|
||||
|
||||
[package]
|
||||
name = "yaak-app"
|
||||
version = "0.0.0"
|
||||
description = "A network protocol testing utility app"
|
||||
authors = ["Gregory Schier"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/gschier/yaak-app"
|
||||
edition = "2021"
|
||||
|
||||
# Produce a library for mobile support
|
||||
[lib]
|
||||
name = "tauri_app_lib"
|
||||
crate-type = ["staticlib", "cdylib", "lib"]
|
||||
|
||||
[profile.release]
|
||||
strip = true # Automatically strip symbols from the binary.
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.0.0-beta", features = [] }
|
||||
tauri-build = { version = "1.5", features = [] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
objc = "0.2.7"
|
||||
@@ -24,32 +22,53 @@ cocoa = "0.25.0"
|
||||
openssl-sys = { version = "0.9", features = ["vendored"] } # For Ubuntu installation to work
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.22.0"
|
||||
boa_engine = { version = "0.18.0", features = ["annex-b"] }
|
||||
boa_runtime = { version = "0.18.0" }
|
||||
base64 = "0.21.0"
|
||||
boa_engine = { version = "0.17.3", features = ["annex-b"] }
|
||||
boa_runtime = { version = "0.17.3" }
|
||||
chrono = { version = "0.4.31", features = ["serde"] }
|
||||
http = "0.2.10"
|
||||
futures = "0.3.26"
|
||||
http = "0.2.8"
|
||||
rand = "0.8.5"
|
||||
reqwest = { version = "0.11.23", features = ["multipart", "cookies", "gzip", "brotli", "deflate", "json"] }
|
||||
serde = { version = "1.0.198", features = ["derive"] }
|
||||
serde_json = { version = "1.0.116", features = ["raw_value"] }
|
||||
sqlx = { version = "0.7.4", features = ["sqlite", "runtime-tokio-rustls", "json", "chrono", "time"] }
|
||||
tauri = { version = "2.0.0-beta", features = ["config-toml", "devtools", "protocol-asset"] }
|
||||
tauri-plugin-clipboard-manager = "2.1.0-beta"
|
||||
tauri-plugin-dialog = "2.0.0-beta"
|
||||
tauri-plugin-log = { version = "2.0.0-beta", features = ["colored"] }
|
||||
tauri-plugin-shell = "2.0.0-beta"
|
||||
tauri-plugin-os = "2.0.0-beta"
|
||||
tauri-plugin-updater = "2.0.0-beta"
|
||||
tauri-plugin-window-state = "2.0.0-beta"
|
||||
tauri-plugin-fs = "2.0.0-beta"
|
||||
tauri-plugin-deep-link = "2.0.0-beta"
|
||||
tokio = { version = "1.36.0", features = ["sync"] }
|
||||
uuid = "1.7.0"
|
||||
log = "0.4.21"
|
||||
reqwest = { version = "0.11.23", features = ["multipart", "cookies", "gzip", "brotli", "deflate"] }
|
||||
cookie = { version = "0.18.0" }
|
||||
serde = { version = "1.0.195", features = ["derive"] }
|
||||
serde_json = { version = "1.0.111", features = ["raw_value"] }
|
||||
sqlx = { version = "0.7.3", features = ["sqlite", "runtime-tokio-rustls", "json", "chrono", "time"] }
|
||||
tauri = { version = "1.5.4", features = [
|
||||
"config-toml",
|
||||
"path-all",
|
||||
"devtools",
|
||||
"dialog-open",
|
||||
"dialog-save",
|
||||
"fs-read-file",
|
||||
"os-all",
|
||||
"protocol-asset",
|
||||
"shell-open",
|
||||
"shell-sidecar",
|
||||
"updater",
|
||||
"window-close",
|
||||
"window-maximize",
|
||||
"window-minimize",
|
||||
"window-set-decorations",
|
||||
"window-set-title",
|
||||
"window-start-dragging",
|
||||
"window-unmaximize",
|
||||
] }
|
||||
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
||||
tauri-plugin-log = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1", features = ["colored"] }
|
||||
tokio = { version = "1.25.0", features = ["sync"] }
|
||||
uuid = "1.3.0"
|
||||
log = "0.4.20"
|
||||
datetime = "0.5.2"
|
||||
window-shadows = "0.2.2"
|
||||
reqwest_cookie_store = "0.6.0"
|
||||
grpc = { path = "./grpc" }
|
||||
tokio-stream = "0.1.15"
|
||||
regex = "1.10.2"
|
||||
hex_color = "3.0.0"
|
||||
tokio-stream = "0.1.14"
|
||||
|
||||
[features]
|
||||
# by default Tauri runs in production mode
|
||||
# when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` is an URL
|
||||
default = ["custom-protocol"]
|
||||
# this feature is used used for production builds where `devPath` points to the filesystem
|
||||
# DO NOT remove this
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
{
|
||||
"$schema": "../gen/schemas/capabilities.json",
|
||||
"identifier": "main",
|
||||
"description": "Main permissions",
|
||||
"local": true,
|
||||
"windows": [
|
||||
"*"
|
||||
],
|
||||
"permissions": [
|
||||
"os:allow-os-type",
|
||||
"event:allow-emit",
|
||||
"clipboard-manager:allow-write-text",
|
||||
"clipboard-manager:allow-read-text",
|
||||
"dialog:allow-open",
|
||||
"dialog:allow-save",
|
||||
"event:allow-listen",
|
||||
"event:allow-unlisten",
|
||||
"fs:allow-read-file",
|
||||
"fs:allow-read-text-file",
|
||||
{
|
||||
"identifier": "fs:scope",
|
||||
"allow": [
|
||||
{
|
||||
"path": "$APPDATA"
|
||||
},
|
||||
{
|
||||
"path": "$APPDATA/**"
|
||||
}
|
||||
]
|
||||
},
|
||||
"shell:allow-open",
|
||||
{
|
||||
"identifier": "shell:allow-execute",
|
||||
"allow": [
|
||||
{
|
||||
"name": "protoc",
|
||||
"sidecar": true,
|
||||
"args": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"webview:allow-set-webview-zoom",
|
||||
"window:allow-close",
|
||||
"window:allow-is-fullscreen",
|
||||
"window:allow-maximize",
|
||||
"window:allow-minimize",
|
||||
"window:allow-toggle-maximize",
|
||||
"window:allow-set-decorations",
|
||||
"window:allow-set-title",
|
||||
"window:allow-start-dragging",
|
||||
"window:allow-unmaximize",
|
||||
"window:allow-theme",
|
||||
"clipboard-manager:allow-read-text",
|
||||
"clipboard-manager:allow-write-text"
|
||||
]
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
{"main":{"identifier":"main","description":"Main permissions","local":true,"windows":["*"],"permissions":["os:allow-os-type","event:allow-emit","clipboard-manager:allow-write-text","clipboard-manager:allow-read-text","dialog:allow-open","dialog:allow-save","event:allow-listen","event:allow-unlisten","fs:allow-read-file","fs:allow-read-text-file",{"identifier":"fs:scope","allow":[{"path":"$APPDATA"},{"path":"$APPDATA/**"}]},"shell:allow-open",{"identifier":"shell:allow-execute","allow":[{"args":true,"name":"protoc","sidecar":true}]},"webview:allow-set-webview-zoom","window:allow-close","window:allow-is-fullscreen","window:allow-maximize","window:allow-minimize","window:allow-toggle-maximize","window:allow-set-decorations","window:allow-set-title","window:allow-start-dragging","window:allow-unmaximize","window:allow-theme","clipboard-manager:allow-read-text","clipboard-manager:allow-write-text"]}}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -14,9 +14,10 @@ serde = { version = "1.0.196", features = ["derive"] }
|
||||
serde_json = "1.0.113"
|
||||
prost-reflect = { version = "0.12.0", features = ["serde", "derive"] }
|
||||
log = "0.4.20"
|
||||
once_cell = { version = "1.19.0", features = [] }
|
||||
anyhow = "1.0.79"
|
||||
hyper = { version = "0.14" }
|
||||
hyper-rustls = { version = "0.24.0", features = ["http2"] }
|
||||
protoc-bin-vendored = "3.0.0"
|
||||
uuid = { version = "1.7.0", features = ["v4"] }
|
||||
tauri = { version = "2.0.0-beta" }
|
||||
tauri-plugin-shell = "2.0.0-beta"
|
||||
tauri = { version = "1.5.4", features = ["process-command-api"]}
|
||||
|
||||
@@ -8,7 +8,6 @@ use hyper_rustls::HttpsConnector;
|
||||
pub use prost_reflect::DynamicMessage;
|
||||
use prost_reflect::{DescriptorPool, MethodDescriptor, ServiceDescriptor};
|
||||
use serde_json::Deserializer;
|
||||
use tauri::AppHandle;
|
||||
use tokio_stream::wrappers::ReceiverStream;
|
||||
use tonic::body::BoxBody;
|
||||
use tonic::metadata::{MetadataKey, MetadataValue};
|
||||
@@ -167,17 +166,13 @@ impl GrpcConnection {
|
||||
}
|
||||
|
||||
pub struct GrpcHandle {
|
||||
app_handle: AppHandle,
|
||||
pools: HashMap<String, DescriptorPool>,
|
||||
}
|
||||
|
||||
impl GrpcHandle {
|
||||
pub fn new(app_handle: &AppHandle) -> Self {
|
||||
impl Default for GrpcHandle {
|
||||
fn default() -> Self {
|
||||
let pools = HashMap::new();
|
||||
Self {
|
||||
pools,
|
||||
app_handle: app_handle.clone(),
|
||||
}
|
||||
Self { pools }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -188,7 +183,7 @@ impl GrpcHandle {
|
||||
uri: &Uri,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<Vec<ServiceDefinition>, String> {
|
||||
let pool = fill_pool_from_files(&self.app_handle, paths).await?;
|
||||
let pool = fill_pool_from_files(paths).await?;
|
||||
self.pools.insert(self.get_pool_key(id, uri), pool.clone());
|
||||
Ok(self.services_from_pool(&pool))
|
||||
}
|
||||
@@ -242,7 +237,7 @@ impl GrpcHandle {
|
||||
None => match proto_files.len() {
|
||||
0 => fill_pool(&uri).await?,
|
||||
_ => {
|
||||
let pool = fill_pool_from_files(&self.app_handle, proto_files).await?;
|
||||
let pool = fill_pool_from_files(proto_files).await?;
|
||||
self.pools.insert(id.to_string(), pool.clone());
|
||||
pool
|
||||
}
|
||||
|
||||
@@ -1,48 +1,36 @@
|
||||
use std::env::temp_dir;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::str::{from_utf8, FromStr};
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::Client;
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder};
|
||||
use log::{debug, info, warn};
|
||||
use prost::Message;
|
||||
use prost_reflect::{DescriptorPool, MethodDescriptor};
|
||||
use prost_types::{FileDescriptorProto, FileDescriptorSet};
|
||||
use tauri::path::BaseDirectory;
|
||||
use tauri::{AppHandle, Manager};
|
||||
use tauri_plugin_shell::process::CommandEvent;
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
use tauri::api::process::{Command, CommandEvent};
|
||||
use tokio::fs;
|
||||
use tokio_stream::StreamExt;
|
||||
use tonic::body::BoxBody;
|
||||
use tonic::codegen::http::uri::PathAndQuery;
|
||||
use tonic::transport::Uri;
|
||||
use tonic::Request;
|
||||
use tonic::transport::Uri;
|
||||
use tonic_reflection::pb::server_reflection_client::ServerReflectionClient;
|
||||
use tonic_reflection::pb::server_reflection_request::MessageRequest;
|
||||
use tonic_reflection::pb::server_reflection_response::MessageResponse;
|
||||
use tonic_reflection::pb::ServerReflectionRequest;
|
||||
|
||||
pub async fn fill_pool_from_files(
|
||||
app_handle: &AppHandle,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<DescriptorPool, String> {
|
||||
pub async fn fill_pool_from_files(paths: Vec<PathBuf>) -> Result<DescriptorPool, String> {
|
||||
let mut pool = DescriptorPool::new();
|
||||
let random_file_name = format!("{}.desc", uuid::Uuid::new_v4());
|
||||
let desc_path = temp_dir().join(random_file_name);
|
||||
let global_import_dir = app_handle
|
||||
.path()
|
||||
.resolve("protoc-vendored/include", BaseDirectory::Resource)
|
||||
.expect("failed to resolve protoc include directory");
|
||||
|
||||
let mut args = vec![
|
||||
"--include_imports".to_string(),
|
||||
"--include_source_info".to_string(),
|
||||
"-I".to_string(),
|
||||
global_import_dir.to_string_lossy().to_string(),
|
||||
"-o".to_string(),
|
||||
desc_path.to_string_lossy().to_string(),
|
||||
];
|
||||
@@ -65,9 +53,7 @@ pub async fn fill_pool_from_files(
|
||||
}
|
||||
}
|
||||
|
||||
let (mut rx, _child) = app_handle
|
||||
.shell()
|
||||
.sidecar("protoc")
|
||||
let (mut rx, _child) = Command::new_sidecar("protoc")
|
||||
.expect("protoc not found")
|
||||
.args(args)
|
||||
.spawn()
|
||||
@@ -76,16 +62,10 @@ pub async fn fill_pool_from_files(
|
||||
while let Some(event) = rx.recv().await {
|
||||
match event {
|
||||
CommandEvent::Stdout(line) => {
|
||||
info!(
|
||||
"protoc stdout: {}",
|
||||
from_utf8(line.as_slice()).unwrap_or_default().to_string()
|
||||
);
|
||||
info!("protoc stdout: {}", line);
|
||||
}
|
||||
CommandEvent::Stderr(line) => {
|
||||
info!(
|
||||
"protoc stderr: {}",
|
||||
from_utf8(line.as_slice()).unwrap_or_default().to_string()
|
||||
);
|
||||
info!("protoc stderr: {}", line);
|
||||
}
|
||||
CommandEvent::Error(e) => {
|
||||
return Err(e.to_string());
|
||||
@@ -96,7 +76,10 @@ pub async fn fill_pool_from_files(
|
||||
// success
|
||||
}
|
||||
Some(code) => {
|
||||
return Err(format!("protoc failed with exit code: {}", code,));
|
||||
return Err(format!(
|
||||
"protoc failed with exit code: {}",
|
||||
code,
|
||||
));
|
||||
}
|
||||
None => {
|
||||
return Err("protoc failed with no exit code".to_string());
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN theme_dark TEXT DEFAULT 'yaak-dark' NOT NULL;
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN theme_light TEXT DEFAULT 'yaak-light' NOT NULL;
|
||||
@@ -1,4 +0,0 @@
|
||||
ALTER TABLE settings ADD COLUMN interface_font_size INTEGER DEFAULT 15 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN interface_scale INTEGER DEFAULT 1 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN editor_font_size INTEGER DEFAULT 13 NOT NULL;
|
||||
ALTER TABLE settings ADD COLUMN editor_soft_wrap BOOLEAN DEFAULT 1 NOT NULL;
|
||||
@@ -1,36 +0,0 @@
|
||||
const o = `\\
|
||||
`;
|
||||
function d(n) {
|
||||
var h, f, r, u, l, s;
|
||||
const t = ["curl"];
|
||||
n.method && t.push("-X", n.method), n.url && t.push(i(n.url)), t.push(o);
|
||||
for (const a of (n.urlParameters ?? []).filter(p))
|
||||
t.push("--url-query", i(`${a.name}=${a.value}`)), t.push(o);
|
||||
for (const a of (n.headers ?? []).filter(p))
|
||||
t.push("--header", i(`${a.name}: ${a.value}`)), t.push(o);
|
||||
if (Array.isArray((h = n.body) == null ? void 0 : h.form)) {
|
||||
const a = n.bodyType === "multipart/form-data" ? "--form" : "--data";
|
||||
for (const e of (((f = n.body) == null ? void 0 : f.form) ?? []).filter(p)) {
|
||||
if (e.file) {
|
||||
let c = `${e.name}=@${e.file}`;
|
||||
c += e.contentType ? `;type=${e.contentType}` : "", t.push(a, c);
|
||||
} else
|
||||
t.push(a, i(`${e.name}=${e.value}`));
|
||||
t.push(o);
|
||||
}
|
||||
} else
|
||||
typeof ((r = n.body) == null ? void 0 : r.text) == "string" && (t.push("--data-raw", `$${i(n.body.text)}`), t.push(o));
|
||||
return (n.authenticationType === "basic" || n.authenticationType === "digest") && (n.authenticationType === "digest" && t.push("--digest"), t.push(
|
||||
"--user",
|
||||
i(`${((u = n.authentication) == null ? void 0 : u.username) ?? ""}:${((l = n.authentication) == null ? void 0 : l.password) ?? ""}`)
|
||||
), t.push(o)), n.authenticationType === "bearer" && (t.push("--header", i(`Authorization: Bearer ${((s = n.authentication) == null ? void 0 : s.token) ?? ""}`)), t.push(o)), t[t.length - 1] === o && t.splice(t.length - 1, 1), t.join(" ");
|
||||
}
|
||||
function i(n) {
|
||||
return `'${n.replace(/'/g, "\\'")}'`;
|
||||
}
|
||||
function p(n) {
|
||||
return n.enabled !== !1 && !!n.name;
|
||||
}
|
||||
export {
|
||||
d as pluginHookExport
|
||||
};
|
||||
@@ -1,297 +0,0 @@
|
||||
var j = "(?:" + [
|
||||
"\\|\\|",
|
||||
"\\&\\&",
|
||||
";;",
|
||||
"\\|\\&",
|
||||
"\\<\\(",
|
||||
"\\<\\<\\<",
|
||||
">>",
|
||||
">\\&",
|
||||
"<\\&",
|
||||
"[&;()|<>]"
|
||||
].join("|") + ")", D = new RegExp("^" + j + "$"), q = "|&;()<> \\t", M = '"((\\\\"|[^"])*?)"', Q = "'((\\\\'|[^'])*?)'", V = /^#$/, _ = "'", G = '"', U = "$", $ = "", z = 4294967296;
|
||||
for (var L = 0; L < 4; L++)
|
||||
$ += (z * Math.random()).toString(16);
|
||||
var J = new RegExp("^" + $);
|
||||
function X(n, s) {
|
||||
for (var e = s.lastIndex, t = [], c; c = s.exec(n); )
|
||||
t.push(c), s.lastIndex === c.index && (s.lastIndex += 1);
|
||||
return s.lastIndex = e, t;
|
||||
}
|
||||
function F(n, s, e) {
|
||||
var t = typeof n == "function" ? n(e) : n[e];
|
||||
return typeof t > "u" && e != "" ? t = "" : typeof t > "u" && (t = "$"), typeof t == "object" ? s + $ + JSON.stringify(t) + $ : s + t;
|
||||
}
|
||||
function K(n, s, e) {
|
||||
e || (e = {});
|
||||
var t = e.escape || "\\", c = "(\\" + t + `['"` + q + `]|[^\\s'"` + q + "])+", m = new RegExp([
|
||||
"(" + j + ")",
|
||||
// control chars
|
||||
"(" + c + "|" + M + "|" + Q + ")+"
|
||||
].join("|"), "g"), f = X(n, m);
|
||||
if (f.length === 0)
|
||||
return [];
|
||||
s || (s = {});
|
||||
var w = !1;
|
||||
return f.map(function(r) {
|
||||
var a = r[0];
|
||||
if (!a || w)
|
||||
return;
|
||||
if (D.test(a))
|
||||
return { op: a };
|
||||
var x = !1, C = !1, d = "", O = !1, i;
|
||||
function T() {
|
||||
i += 1;
|
||||
var v, p, R = a.charAt(i);
|
||||
if (R === "{") {
|
||||
if (i += 1, a.charAt(i) === "}")
|
||||
throw new Error("Bad substitution: " + a.slice(i - 2, i + 1));
|
||||
if (v = a.indexOf("}", i), v < 0)
|
||||
throw new Error("Bad substitution: " + a.slice(i));
|
||||
p = a.slice(i, v), i = v;
|
||||
} else if (/[*@#?$!_-]/.test(R))
|
||||
p = R, i += 1;
|
||||
else {
|
||||
var g = a.slice(i);
|
||||
v = g.match(/[^\w\d_]/), v ? (p = g.slice(0, v.index), i += v.index - 1) : (p = g, i = a.length);
|
||||
}
|
||||
return F(s, "", p);
|
||||
}
|
||||
for (i = 0; i < a.length; i++) {
|
||||
var u = a.charAt(i);
|
||||
if (O = O || !x && (u === "*" || u === "?"), C)
|
||||
d += u, C = !1;
|
||||
else if (x)
|
||||
u === x ? x = !1 : x == _ ? d += u : u === t ? (i += 1, u = a.charAt(i), u === G || u === t || u === U ? d += u : d += t + u) : u === U ? d += T() : d += u;
|
||||
else if (u === G || u === _)
|
||||
x = u;
|
||||
else {
|
||||
if (D.test(u))
|
||||
return { op: a };
|
||||
if (V.test(u)) {
|
||||
w = !0;
|
||||
var b = { comment: n.slice(r.index + i + 1) };
|
||||
return d.length ? [d, b] : [b];
|
||||
} else
|
||||
u === t ? C = !0 : u === U ? d += T() : d += u;
|
||||
}
|
||||
}
|
||||
return O ? { op: "glob", pattern: d } : d;
|
||||
}).reduce(function(r, a) {
|
||||
return typeof a > "u" ? r : r.concat(a);
|
||||
}, []);
|
||||
}
|
||||
var Y = function(s, e, t) {
|
||||
var c = K(s, e, t);
|
||||
return typeof e != "function" ? c : c.reduce(function(m, f) {
|
||||
if (typeof f == "object")
|
||||
return m.concat(f);
|
||||
var w = f.split(RegExp("(" + $ + ".*?" + $ + ")", "g"));
|
||||
return w.length === 1 ? m.concat(w[0]) : m.concat(w.filter(Boolean).map(function(r) {
|
||||
return J.test(r) ? JSON.parse(r.split($)[1]) : r;
|
||||
}));
|
||||
}, []);
|
||||
}, Z = Y;
|
||||
const ae = "curl", se = "cURL", ie = "cURL command line tool", H = ["d", "data", "data-raw", "data-urlencode", "data-binary", "data-ascii"], ee = [
|
||||
["url"],
|
||||
// Specify the URL explicitly
|
||||
["user", "u"],
|
||||
// Authentication
|
||||
["digest"],
|
||||
// Apply auth as digest
|
||||
["header", "H"],
|
||||
["cookie", "b"],
|
||||
["get", "G"],
|
||||
// Put the post data in the URL
|
||||
["d", "data"],
|
||||
// Add url encoded data
|
||||
["data-raw"],
|
||||
["data-urlencode"],
|
||||
["data-binary"],
|
||||
["data-ascii"],
|
||||
["form", "F"],
|
||||
// Add multipart data
|
||||
["request", "X"],
|
||||
// Request method
|
||||
H
|
||||
].flatMap((n) => n);
|
||||
function oe(n) {
|
||||
if (!n.match(/^\s*curl /))
|
||||
return null;
|
||||
const s = [], e = n.replace(/\ncurl/g, "; curl");
|
||||
let t = [];
|
||||
const m = Z(e).flatMap((r) => typeof r == "string" && r.startsWith("-") && !r.startsWith("--") && r.length > 2 ? [r.slice(0, 2), r.slice(2)] : r);
|
||||
for (const r of m) {
|
||||
if (typeof r == "string") {
|
||||
r.startsWith("$") ? t.push(r.slice(1)) : t.push(r);
|
||||
continue;
|
||||
}
|
||||
if ("comment" in r)
|
||||
continue;
|
||||
const { op: a } = r;
|
||||
if (a === ";") {
|
||||
s.push(t), t = [];
|
||||
continue;
|
||||
}
|
||||
if (a != null && a.startsWith("$")) {
|
||||
const x = a.slice(2, a.length - 1).replace(/\\'/g, "'");
|
||||
t.push(x);
|
||||
continue;
|
||||
}
|
||||
a === "glob" && t.push(r.pattern);
|
||||
}
|
||||
s.push(t);
|
||||
const f = {
|
||||
model: "workspace",
|
||||
id: N("workspace"),
|
||||
name: "Curl Import"
|
||||
};
|
||||
return {
|
||||
resources: {
|
||||
httpRequests: s.filter((r) => r[0] === "curl").map((r) => te(r, f.id)),
|
||||
workspaces: [f]
|
||||
}
|
||||
};
|
||||
}
|
||||
function te(n, s) {
|
||||
const e = {}, t = [];
|
||||
for (let o = 1; o < n.length; o++) {
|
||||
let l = n[o];
|
||||
if (typeof l == "string" && (l = l.trim()), typeof l == "string" && l.match(/^-{1,2}[\w-]+/)) {
|
||||
const E = l[0] === "-" && l[1] !== "-";
|
||||
let h = l.replace(/^-{1,2}/, "");
|
||||
if (!ee.includes(h))
|
||||
continue;
|
||||
let y;
|
||||
const S = n[o + 1];
|
||||
E && h.length > 1 ? (y = h.slice(1), h = h.slice(0, 1)) : typeof S == "string" && !S.startsWith("-") ? (y = S, o++) : y = !0, e[h] = e[h] || [], e[h].push(y);
|
||||
} else
|
||||
l && t.push(l);
|
||||
}
|
||||
let c, m;
|
||||
const f = A(e, t[0] || "", ["url"]), [w, r] = W(f, "?");
|
||||
c = (r == null ? void 0 : r.split("&").map((o) => {
|
||||
const l = W(o, "=");
|
||||
return { name: l[0] ?? "", value: l[1] ?? "", enabled: !0 };
|
||||
})) ?? [], m = w ?? f;
|
||||
const [a, x] = A(e, "", ["u", "user"]).split(/:(.*)$/), C = A(e, !1, ["digest"]), d = a ? C ? "digest" : "basic" : null, O = a ? {
|
||||
username: a.trim(),
|
||||
password: (x ?? "").trim()
|
||||
} : {}, i = [
|
||||
...e.header || [],
|
||||
...e.H || []
|
||||
].map((o) => {
|
||||
const [l, E] = o.split(/:(.*)$/);
|
||||
return E ? {
|
||||
name: (l ?? "").trim(),
|
||||
value: E.trim(),
|
||||
enabled: !0
|
||||
} : {
|
||||
name: (l ?? "").trim().replace(/;$/, ""),
|
||||
value: "",
|
||||
enabled: !0
|
||||
};
|
||||
}), T = [
|
||||
...e.cookie || [],
|
||||
...e.b || []
|
||||
].map((o) => {
|
||||
const l = o.split("=", 1)[0], E = o.replace(`${l}=`, "");
|
||||
return `${l}=${E}`;
|
||||
}).join("; "), u = i.find((o) => o.name.toLowerCase() === "cookie");
|
||||
T && u ? u.value += `; ${T}` : T && i.push({
|
||||
name: "Cookie",
|
||||
value: T,
|
||||
enabled: !0
|
||||
});
|
||||
const b = ne(e), v = i.find((o) => o.name.toLowerCase() === "content-type"), p = v ? v.value.split(";")[0] : null, R = [
|
||||
...e.form || [],
|
||||
...e.F || []
|
||||
].map((o) => {
|
||||
const l = o.split("="), E = l[0] ?? "", h = l[1] ?? "", y = {
|
||||
name: E,
|
||||
enabled: !0
|
||||
};
|
||||
return h.indexOf("@") === 0 ? y.file = h.slice(1) : y.value = h, y;
|
||||
});
|
||||
let g = {}, I = null;
|
||||
const B = A(e, !1, ["G", "get"]);
|
||||
b.length > 0 && B ? c.push(...b) : b.length > 0 && (p == null || p === "application/x-www-form-urlencoded") ? (I = p ?? "application/x-www-form-urlencoded", g = {
|
||||
form: b.map((o) => ({
|
||||
...o,
|
||||
name: decodeURIComponent(o.name || ""),
|
||||
value: decodeURIComponent(o.value || "")
|
||||
}))
|
||||
}, i.push({
|
||||
name: "Content-Type",
|
||||
value: "application/x-www-form-urlencoded",
|
||||
enabled: !0
|
||||
})) : b.length > 0 ? (I = p === "application/json" || p === "text/xml" || p === "text/plain" ? p : "other", g = {
|
||||
text: b.map(({ name: o, value: l }) => o && l ? `${o}=${l}` : o || l).join("&")
|
||||
}) : R.length && (I = p ?? "multipart/form-data", g = {
|
||||
form: R
|
||||
}, p == null && i.push({
|
||||
name: "Content-Type",
|
||||
value: "multipart/form-data",
|
||||
enabled: !0
|
||||
}));
|
||||
let P = A(e, "", ["X", "request"]).toUpperCase();
|
||||
return P === "" && g && (P = "text" in g || "form" in g ? "POST" : "GET"), {
|
||||
id: N("http_request"),
|
||||
model: "http_request",
|
||||
workspaceId: s,
|
||||
name: "",
|
||||
urlParameters: c,
|
||||
url: m,
|
||||
method: P,
|
||||
headers: i,
|
||||
authentication: O,
|
||||
authenticationType: d,
|
||||
body: g,
|
||||
bodyType: I,
|
||||
folderId: null,
|
||||
sortPriority: 0
|
||||
};
|
||||
}
|
||||
const ne = (n) => {
|
||||
let s = [];
|
||||
for (const e of H) {
|
||||
const t = n[e];
|
||||
if (!(!t || t.length === 0))
|
||||
for (const c of t) {
|
||||
if (typeof c != "string")
|
||||
continue;
|
||||
const [m, f] = c.split("=");
|
||||
c.startsWith("@") ? s.push({
|
||||
name: m ?? "",
|
||||
value: "",
|
||||
filePath: c.slice(1),
|
||||
enabled: !0
|
||||
}) : s.push({
|
||||
name: m ?? "",
|
||||
value: e === "data-urlencode" ? encodeURIComponent(f ?? "") : f ?? "",
|
||||
enabled: !0
|
||||
});
|
||||
}
|
||||
}
|
||||
return s;
|
||||
}, A = (n, s, e) => {
|
||||
for (const t of e)
|
||||
if (n[t] && n[t].length)
|
||||
return n[t][0];
|
||||
return s;
|
||||
};
|
||||
function W(n, s) {
|
||||
const e = n.indexOf(s);
|
||||
return e > -1 ? [n.slice(0, e), n.slice(e + 1)] : [n];
|
||||
}
|
||||
const k = {};
|
||||
function N(n) {
|
||||
return k[n] = (k[n] ?? -1) + 1, `GENERATE_ID::${n.toUpperCase()}_${k[n]}`;
|
||||
}
|
||||
export {
|
||||
ie as description,
|
||||
ae as id,
|
||||
te as importCommand,
|
||||
se as name,
|
||||
oe as pluginHookImport
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,73 +1,68 @@
|
||||
const S = "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", _ = "https://schema.getpostman.com/json/collection/v2.0.0/collection.json", O = [_, S];
|
||||
function v(e) {
|
||||
var g;
|
||||
const t = k(e);
|
||||
const T = "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", w = "https://schema.getpostman.com/json/collection/v2.0.0/collection.json", A = [w, T];
|
||||
function q(e) {
|
||||
const t = b(e);
|
||||
if (t == null)
|
||||
return;
|
||||
const o = i(t.info);
|
||||
if (!O.includes(o.schema) || !Array.isArray(t.item))
|
||||
const n = a(t.info);
|
||||
if (!A.includes(n.schema) || !Array.isArray(t.item))
|
||||
return;
|
||||
const u = A(t.auth), s = {
|
||||
const i = {
|
||||
workspaces: [],
|
||||
environments: [],
|
||||
httpRequests: [],
|
||||
requests: [],
|
||||
folders: []
|
||||
}, n = {
|
||||
}, c = {
|
||||
model: "workspace",
|
||||
id: h("workspace"),
|
||||
name: o.name || "Postman Import",
|
||||
description: o.description || "",
|
||||
variables: ((g = t.variable) == null ? void 0 : g.map((r) => ({
|
||||
name: r.key,
|
||||
value: r.value
|
||||
}))) ?? []
|
||||
id: m("wk"),
|
||||
name: n.name || "Postman Import",
|
||||
description: n.description || ""
|
||||
};
|
||||
s.workspaces.push(n);
|
||||
const T = (r, p = null) => {
|
||||
i.workspaces.push(c);
|
||||
const f = (r, u = null) => {
|
||||
if (typeof r.name == "string" && Array.isArray(r.item)) {
|
||||
const a = {
|
||||
const o = {
|
||||
model: "folder",
|
||||
workspaceId: n.id,
|
||||
id: h("folder"),
|
||||
workspaceId: c.id,
|
||||
id: m("fl"),
|
||||
name: r.name,
|
||||
folderId: p
|
||||
folderId: u
|
||||
};
|
||||
s.folders.push(a);
|
||||
for (const l of r.item)
|
||||
T(l, a.id);
|
||||
i.folders.push(o);
|
||||
for (const s of r.item)
|
||||
f(s, o.id);
|
||||
} else if (typeof r.name == "string" && "request" in r) {
|
||||
const a = i(r.request), l = j(a.body), w = A(a.auth), d = w.authenticationType == null ? u : w, q = {
|
||||
const o = a(r.request), s = k(o.body), d = S(o.auth), g = {
|
||||
model: "http_request",
|
||||
id: h("http_request"),
|
||||
workspaceId: n.id,
|
||||
folderId: p,
|
||||
id: m("rq"),
|
||||
workspaceId: c.id,
|
||||
folderId: u,
|
||||
name: r.name,
|
||||
method: a.method || "GET",
|
||||
url: typeof a.url == "string" ? a.url : i(a.url).raw,
|
||||
body: l.body,
|
||||
bodyType: l.bodyType,
|
||||
method: o.method || "GET",
|
||||
url: typeof o.url == "string" ? o.url : a(o.url).raw,
|
||||
body: s.body,
|
||||
bodyType: s.bodyType,
|
||||
authentication: d.authentication,
|
||||
authenticationType: d.authenticationType,
|
||||
headers: [
|
||||
...l.headers,
|
||||
...s.headers,
|
||||
...d.headers,
|
||||
...b(a.header).map((m) => ({
|
||||
name: m.key,
|
||||
value: m.value,
|
||||
enabled: !m.disabled
|
||||
...y(o.header).map((p) => ({
|
||||
name: p.key,
|
||||
value: p.value,
|
||||
enabled: !p.disabled
|
||||
}))
|
||||
]
|
||||
};
|
||||
s.httpRequests.push(q);
|
||||
i.requests.push(g);
|
||||
} else
|
||||
console.log("Unknown item", r, p);
|
||||
console.log("Unknown item", r, u);
|
||||
};
|
||||
for (const r of t.item)
|
||||
T(r);
|
||||
return { resources: f(s) };
|
||||
f(r);
|
||||
return { resources: h(i) };
|
||||
}
|
||||
function A(e) {
|
||||
const t = i(e);
|
||||
function S(e) {
|
||||
const t = a(e);
|
||||
return "basic" in t ? {
|
||||
headers: [],
|
||||
authenticationType: "basic",
|
||||
@@ -75,17 +70,10 @@ function A(e) {
|
||||
username: t.basic.username || "",
|
||||
password: t.basic.password || ""
|
||||
}
|
||||
} : "bearer" in t ? {
|
||||
headers: [],
|
||||
authenticationType: "bearer",
|
||||
authentication: {
|
||||
token: t.bearer.token || ""
|
||||
}
|
||||
} : { headers: [], authenticationType: null, authentication: {} };
|
||||
}
|
||||
function j(e) {
|
||||
var o, c, u, s;
|
||||
const t = i(e);
|
||||
function k(e) {
|
||||
const t = a(e);
|
||||
return "graphql" in t ? {
|
||||
headers: [
|
||||
{
|
||||
@@ -97,7 +85,7 @@ function j(e) {
|
||||
bodyType: "graphql",
|
||||
body: {
|
||||
text: JSON.stringify(
|
||||
{ query: t.graphql.query, variables: k(t.graphql.variables) },
|
||||
{ query: t.graphql.query, variables: b(t.graphql.variables) },
|
||||
null,
|
||||
2
|
||||
)
|
||||
@@ -112,7 +100,7 @@ function j(e) {
|
||||
],
|
||||
bodyType: "application/x-www-form-urlencoded",
|
||||
body: {
|
||||
form: b(t.urlencoded).map((n) => ({
|
||||
form: y(t.urlencoded).map((n) => ({
|
||||
enabled: !n.disabled,
|
||||
name: n.key ?? "",
|
||||
value: n.value ?? ""
|
||||
@@ -128,10 +116,9 @@ function j(e) {
|
||||
],
|
||||
bodyType: "multipart/form-data",
|
||||
body: {
|
||||
form: b(t.formdata).map(
|
||||
form: y(t.formdata).map(
|
||||
(n) => n.src != null ? {
|
||||
enabled: !n.disabled,
|
||||
contentType: n.contentType ?? null,
|
||||
name: n.key ?? "",
|
||||
file: n.src ?? ""
|
||||
} : {
|
||||
@@ -141,42 +128,33 @@ function j(e) {
|
||||
}
|
||||
)
|
||||
}
|
||||
} : "raw" in t ? {
|
||||
headers: [
|
||||
{
|
||||
name: "Content-Type",
|
||||
value: ((c = (o = t.options) == null ? void 0 : o.raw) == null ? void 0 : c.language) === "json" ? "application/json" : "",
|
||||
enabled: !0
|
||||
}
|
||||
],
|
||||
bodyType: ((s = (u = t.options) == null ? void 0 : u.raw) == null ? void 0 : s.language) === "json" ? "application/json" : "other",
|
||||
body: {
|
||||
text: t.raw ?? ""
|
||||
}
|
||||
} : { headers: [], bodyType: null, body: {} };
|
||||
}
|
||||
function k(e) {
|
||||
function b(e) {
|
||||
try {
|
||||
return i(JSON.parse(e));
|
||||
return a(JSON.parse(e));
|
||||
} catch {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function i(e) {
|
||||
function a(e) {
|
||||
return Object.prototype.toString.call(e) === "[object Object]" ? e : {};
|
||||
}
|
||||
function b(e) {
|
||||
function y(e) {
|
||||
return Object.prototype.toString.call(e) === "[object Array]" ? e : [];
|
||||
}
|
||||
function f(e) {
|
||||
return typeof e == "string" ? e.replace(/{{\s*(_\.)?([^}]+)\s*}}/g, "${[$2]}") : Array.isArray(e) && e != null ? e.map(f) : typeof e == "object" && e != null ? Object.fromEntries(
|
||||
Object.entries(e).map(([t, o]) => [t, f(o)])
|
||||
function h(e) {
|
||||
return typeof e == "string" ? e.replace(/{{\s*(_\.)?([^}]+)\s*}}/g, "${[$2]}") : Array.isArray(e) && e != null ? e.map(h) : typeof e == "object" && e != null ? Object.fromEntries(
|
||||
Object.entries(e).map(([t, n]) => [t, h(n)])
|
||||
) : e;
|
||||
}
|
||||
const y = {};
|
||||
function h(e) {
|
||||
return y[e] = (y[e] ?? -1) + 1, `GENERATE_ID::${e.toUpperCase()}_${y[e]}`;
|
||||
function m(e) {
|
||||
const t = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
let n = `${e}_`;
|
||||
for (let l = 0; l < 10; l++)
|
||||
n += t[Math.floor(Math.random() * t.length)];
|
||||
return n;
|
||||
}
|
||||
export {
|
||||
v as pluginHookImport
|
||||
q as pluginHookImport
|
||||
};
|
||||
|
||||
@@ -5,8 +5,8 @@ function u(r) {
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
if (!(!t(e) || !("yaakSchema" in e)))
|
||||
return "requests" in e.resources && (e.resources.httpRequests = e.resources.requests, delete e.resources.requests), { resources: e.resources };
|
||||
if (t(e) && "yaakSchema" in e && (e.yaakSchema === 1 && (e.resources.httpRequests = e.resources.requests, e.yaakSchema = 2), e.yaakSchema === 2))
|
||||
return { resources: e.resources };
|
||||
}
|
||||
function t(r) {
|
||||
return Object.prototype.toString.call(r) === "[object Object]";
|
||||
|
||||
@@ -1,162 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option go_package = "google.golang.org/protobuf/types/known/anypb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "AnyProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// `Any` contains an arbitrary serialized protocol buffer message along with a
|
||||
// URL that describes the type of the serialized message.
|
||||
//
|
||||
// Protobuf library provides support to pack/unpack Any values in the form
|
||||
// of utility functions or additional generated methods of the Any type.
|
||||
//
|
||||
// Example 1: Pack and unpack a message in C++.
|
||||
//
|
||||
// Foo foo = ...;
|
||||
// Any any;
|
||||
// any.PackFrom(foo);
|
||||
// ...
|
||||
// if (any.UnpackTo(&foo)) {
|
||||
// ...
|
||||
// }
|
||||
//
|
||||
// Example 2: Pack and unpack a message in Java.
|
||||
//
|
||||
// Foo foo = ...;
|
||||
// Any any = Any.pack(foo);
|
||||
// ...
|
||||
// if (any.is(Foo.class)) {
|
||||
// foo = any.unpack(Foo.class);
|
||||
// }
|
||||
// // or ...
|
||||
// if (any.isSameTypeAs(Foo.getDefaultInstance())) {
|
||||
// foo = any.unpack(Foo.getDefaultInstance());
|
||||
// }
|
||||
//
|
||||
// Example 3: Pack and unpack a message in Python.
|
||||
//
|
||||
// foo = Foo(...)
|
||||
// any = Any()
|
||||
// any.Pack(foo)
|
||||
// ...
|
||||
// if any.Is(Foo.DESCRIPTOR):
|
||||
// any.Unpack(foo)
|
||||
// ...
|
||||
//
|
||||
// Example 4: Pack and unpack a message in Go
|
||||
//
|
||||
// foo := &pb.Foo{...}
|
||||
// any, err := anypb.New(foo)
|
||||
// if err != nil {
|
||||
// ...
|
||||
// }
|
||||
// ...
|
||||
// foo := &pb.Foo{}
|
||||
// if err := any.UnmarshalTo(foo); err != nil {
|
||||
// ...
|
||||
// }
|
||||
//
|
||||
// The pack methods provided by protobuf library will by default use
|
||||
// 'type.googleapis.com/full.type.name' as the type URL and the unpack
|
||||
// methods only use the fully qualified type name after the last '/'
|
||||
// in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||
// name "y.z".
|
||||
//
|
||||
// JSON
|
||||
// ====
|
||||
// The JSON representation of an `Any` value uses the regular
|
||||
// representation of the deserialized, embedded message, with an
|
||||
// additional field `@type` which contains the type URL. Example:
|
||||
//
|
||||
// package google.profile;
|
||||
// message Person {
|
||||
// string first_name = 1;
|
||||
// string last_name = 2;
|
||||
// }
|
||||
//
|
||||
// {
|
||||
// "@type": "type.googleapis.com/google.profile.Person",
|
||||
// "firstName": <string>,
|
||||
// "lastName": <string>
|
||||
// }
|
||||
//
|
||||
// If the embedded message type is well-known and has a custom JSON
|
||||
// representation, that representation will be embedded adding a field
|
||||
// `value` which holds the custom JSON in addition to the `@type`
|
||||
// field. Example (for message [google.protobuf.Duration][]):
|
||||
//
|
||||
// {
|
||||
// "@type": "type.googleapis.com/google.protobuf.Duration",
|
||||
// "value": "1.212s"
|
||||
// }
|
||||
//
|
||||
message Any {
|
||||
// A URL/resource name that uniquely identifies the type of the serialized
|
||||
// protocol buffer message. This string must contain at least
|
||||
// one "/" character. The last segment of the URL's path must represent
|
||||
// the fully qualified name of the type (as in
|
||||
// `path/google.protobuf.Duration`). The name should be in a canonical form
|
||||
// (e.g., leading "." is not accepted).
|
||||
//
|
||||
// In practice, teams usually precompile into the binary all types that they
|
||||
// expect it to use in the context of Any. However, for URLs which use the
|
||||
// scheme `http`, `https`, or no scheme, one can optionally set up a type
|
||||
// server that maps type URLs to message definitions as follows:
|
||||
//
|
||||
// * If no scheme is provided, `https` is assumed.
|
||||
// * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
// value in binary format, or produce an error.
|
||||
// * Applications are allowed to cache lookup results based on the
|
||||
// URL, or have them precompiled into a binary to avoid any
|
||||
// lookup. Therefore, binary compatibility needs to be preserved
|
||||
// on changes to types. (Use versioned type names to manage
|
||||
// breaking changes.)
|
||||
//
|
||||
// Note: this functionality is not currently available in the official
|
||||
// protobuf release, and it is not used for type URLs beginning with
|
||||
// type.googleapis.com. As of May 2023, there are no widely used type server
|
||||
// implementations and no plans to implement one.
|
||||
//
|
||||
// Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
// used with implementation specific semantics.
|
||||
//
|
||||
string type_url = 1;
|
||||
|
||||
// Must be a valid serialized protocol buffer of the above specified type.
|
||||
bytes value = 2;
|
||||
}
|
||||
@@ -1,207 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
import "google/protobuf/source_context.proto";
|
||||
import "google/protobuf/type.proto";
|
||||
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "ApiProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/apipb";
|
||||
|
||||
// Api is a light-weight descriptor for an API Interface.
|
||||
//
|
||||
// Interfaces are also described as "protocol buffer services" in some contexts,
|
||||
// such as by the "service" keyword in a .proto file, but they are different
|
||||
// from API Services, which represent a concrete implementation of an interface
|
||||
// as opposed to simply a description of methods and bindings. They are also
|
||||
// sometimes simply referred to as "APIs" in other contexts, such as the name of
|
||||
// this message itself. See https://cloud.google.com/apis/design/glossary for
|
||||
// detailed terminology.
|
||||
message Api {
|
||||
// The fully qualified name of this interface, including package name
|
||||
// followed by the interface's simple name.
|
||||
string name = 1;
|
||||
|
||||
// The methods of this interface, in unspecified order.
|
||||
repeated Method methods = 2;
|
||||
|
||||
// Any metadata attached to the interface.
|
||||
repeated Option options = 3;
|
||||
|
||||
// A version string for this interface. If specified, must have the form
|
||||
// `major-version.minor-version`, as in `1.10`. If the minor version is
|
||||
// omitted, it defaults to zero. If the entire version field is empty, the
|
||||
// major version is derived from the package name, as outlined below. If the
|
||||
// field is not empty, the version in the package name will be verified to be
|
||||
// consistent with what is provided here.
|
||||
//
|
||||
// The versioning schema uses [semantic
|
||||
// versioning](http://semver.org) where the major version number
|
||||
// indicates a breaking change and the minor version an additive,
|
||||
// non-breaking change. Both version numbers are signals to users
|
||||
// what to expect from different versions, and should be carefully
|
||||
// chosen based on the product plan.
|
||||
//
|
||||
// The major version is also reflected in the package name of the
|
||||
// interface, which must end in `v<major-version>`, as in
|
||||
// `google.feature.v1`. For major versions 0 and 1, the suffix can
|
||||
// be omitted. Zero major versions must only be used for
|
||||
// experimental, non-GA interfaces.
|
||||
//
|
||||
string version = 4;
|
||||
|
||||
// Source context for the protocol buffer service represented by this
|
||||
// message.
|
||||
SourceContext source_context = 5;
|
||||
|
||||
// Included interfaces. See [Mixin][].
|
||||
repeated Mixin mixins = 6;
|
||||
|
||||
// The source syntax of the service.
|
||||
Syntax syntax = 7;
|
||||
}
|
||||
|
||||
// Method represents a method of an API interface.
|
||||
message Method {
|
||||
// The simple name of this method.
|
||||
string name = 1;
|
||||
|
||||
// A URL of the input message type.
|
||||
string request_type_url = 2;
|
||||
|
||||
// If true, the request is streamed.
|
||||
bool request_streaming = 3;
|
||||
|
||||
// The URL of the output message type.
|
||||
string response_type_url = 4;
|
||||
|
||||
// If true, the response is streamed.
|
||||
bool response_streaming = 5;
|
||||
|
||||
// Any metadata attached to the method.
|
||||
repeated Option options = 6;
|
||||
|
||||
// The source syntax of this method.
|
||||
Syntax syntax = 7;
|
||||
}
|
||||
|
||||
// Declares an API Interface to be included in this interface. The including
|
||||
// interface must redeclare all the methods from the included interface, but
|
||||
// documentation and options are inherited as follows:
|
||||
//
|
||||
// - If after comment and whitespace stripping, the documentation
|
||||
// string of the redeclared method is empty, it will be inherited
|
||||
// from the original method.
|
||||
//
|
||||
// - Each annotation belonging to the service config (http,
|
||||
// visibility) which is not set in the redeclared method will be
|
||||
// inherited.
|
||||
//
|
||||
// - If an http annotation is inherited, the path pattern will be
|
||||
// modified as follows. Any version prefix will be replaced by the
|
||||
// version of the including interface plus the [root][] path if
|
||||
// specified.
|
||||
//
|
||||
// Example of a simple mixin:
|
||||
//
|
||||
// package google.acl.v1;
|
||||
// service AccessControl {
|
||||
// // Get the underlying ACL object.
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl) {
|
||||
// option (google.api.http).get = "/v1/{resource=**}:getAcl";
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// package google.storage.v2;
|
||||
// service Storage {
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl);
|
||||
//
|
||||
// // Get a data record.
|
||||
// rpc GetData(GetDataRequest) returns (Data) {
|
||||
// option (google.api.http).get = "/v2/{resource=**}";
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// Example of a mixin configuration:
|
||||
//
|
||||
// apis:
|
||||
// - name: google.storage.v2.Storage
|
||||
// mixins:
|
||||
// - name: google.acl.v1.AccessControl
|
||||
//
|
||||
// The mixin construct implies that all methods in `AccessControl` are
|
||||
// also declared with same name and request/response types in
|
||||
// `Storage`. A documentation generator or annotation processor will
|
||||
// see the effective `Storage.GetAcl` method after inherting
|
||||
// documentation and annotations as follows:
|
||||
//
|
||||
// service Storage {
|
||||
// // Get the underlying ACL object.
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl) {
|
||||
// option (google.api.http).get = "/v2/{resource=**}:getAcl";
|
||||
// }
|
||||
// ...
|
||||
// }
|
||||
//
|
||||
// Note how the version in the path pattern changed from `v1` to `v2`.
|
||||
//
|
||||
// If the `root` field in the mixin is specified, it should be a
|
||||
// relative path under which inherited HTTP paths are placed. Example:
|
||||
//
|
||||
// apis:
|
||||
// - name: google.storage.v2.Storage
|
||||
// mixins:
|
||||
// - name: google.acl.v1.AccessControl
|
||||
// root: acls
|
||||
//
|
||||
// This implies the following inherited HTTP annotation:
|
||||
//
|
||||
// service Storage {
|
||||
// // Get the underlying ACL object.
|
||||
// rpc GetAcl(GetAclRequest) returns (Acl) {
|
||||
// option (google.api.http).get = "/v2/acls/{resource=**}:getAcl";
|
||||
// }
|
||||
// ...
|
||||
// }
|
||||
message Mixin {
|
||||
// The fully qualified name of the interface which is included.
|
||||
string name = 1;
|
||||
|
||||
// If non-empty specifies a path under which inherited HTTP paths
|
||||
// are rooted.
|
||||
string root = 2;
|
||||
}
|
||||
@@ -1,168 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file or at
|
||||
// https://developers.google.com/open-source/licenses/bsd
|
||||
|
||||
// Author: kenton@google.com (Kenton Varda)
|
||||
//
|
||||
// protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is
|
||||
// just a program that reads a CodeGeneratorRequest from stdin and writes a
|
||||
// CodeGeneratorResponse to stdout.
|
||||
//
|
||||
// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead
|
||||
// of dealing with the raw protocol defined here.
|
||||
//
|
||||
// A plugin executable needs only to be placed somewhere in the path. The
|
||||
// plugin should be named "protoc-gen-$NAME", and will then be used when the
|
||||
// flag "--${NAME}_out" is passed to protoc.
|
||||
|
||||
syntax = "proto2";
|
||||
|
||||
package google.protobuf.compiler;
|
||||
option java_package = "com.google.protobuf.compiler";
|
||||
option java_outer_classname = "PluginProtos";
|
||||
|
||||
option csharp_namespace = "Google.Protobuf.Compiler";
|
||||
option go_package = "google.golang.org/protobuf/types/pluginpb";
|
||||
|
||||
import "google/protobuf/descriptor.proto";
|
||||
|
||||
// The version number of protocol compiler.
|
||||
message Version {
|
||||
optional int32 major = 1;
|
||||
optional int32 minor = 2;
|
||||
optional int32 patch = 3;
|
||||
// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
|
||||
// be empty for mainline stable releases.
|
||||
optional string suffix = 4;
|
||||
}
|
||||
|
||||
// An encoded CodeGeneratorRequest is written to the plugin's stdin.
|
||||
message CodeGeneratorRequest {
|
||||
// The .proto files that were explicitly listed on the command-line. The
|
||||
// code generator should generate code only for these files. Each file's
|
||||
// descriptor will be included in proto_file, below.
|
||||
repeated string file_to_generate = 1;
|
||||
|
||||
// The generator parameter passed on the command-line.
|
||||
optional string parameter = 2;
|
||||
|
||||
// FileDescriptorProtos for all files in files_to_generate and everything
|
||||
// they import. The files will appear in topological order, so each file
|
||||
// appears before any file that imports it.
|
||||
//
|
||||
// Note: the files listed in files_to_generate will include runtime-retention
|
||||
// options only, but all other files will include source-retention options.
|
||||
// The source_file_descriptors field below is available in case you need
|
||||
// source-retention options for files_to_generate.
|
||||
//
|
||||
// protoc guarantees that all proto_files will be written after
|
||||
// the fields above, even though this is not technically guaranteed by the
|
||||
// protobuf wire format. This theoretically could allow a plugin to stream
|
||||
// in the FileDescriptorProtos and handle them one by one rather than read
|
||||
// the entire set into memory at once. However, as of this writing, this
|
||||
// is not similarly optimized on protoc's end -- it will store all fields in
|
||||
// memory at once before sending them to the plugin.
|
||||
//
|
||||
// Type names of fields and extensions in the FileDescriptorProto are always
|
||||
// fully qualified.
|
||||
repeated FileDescriptorProto proto_file = 15;
|
||||
|
||||
// File descriptors with all options, including source-retention options.
|
||||
// These descriptors are only provided for the files listed in
|
||||
// files_to_generate.
|
||||
repeated FileDescriptorProto source_file_descriptors = 17;
|
||||
|
||||
// The version number of protocol compiler.
|
||||
optional Version compiler_version = 3;
|
||||
}
|
||||
|
||||
// The plugin writes an encoded CodeGeneratorResponse to stdout.
|
||||
message CodeGeneratorResponse {
|
||||
// Error message. If non-empty, code generation failed. The plugin process
|
||||
// should exit with status code zero even if it reports an error in this way.
|
||||
//
|
||||
// This should be used to indicate errors in .proto files which prevent the
|
||||
// code generator from generating correct code. Errors which indicate a
|
||||
// problem in protoc itself -- such as the input CodeGeneratorRequest being
|
||||
// unparseable -- should be reported by writing a message to stderr and
|
||||
// exiting with a non-zero status code.
|
||||
optional string error = 1;
|
||||
|
||||
// A bitmask of supported features that the code generator supports.
|
||||
// This is a bitwise "or" of values from the Feature enum.
|
||||
optional uint64 supported_features = 2;
|
||||
|
||||
// Sync with code_generator.h.
|
||||
enum Feature {
|
||||
FEATURE_NONE = 0;
|
||||
FEATURE_PROTO3_OPTIONAL = 1;
|
||||
FEATURE_SUPPORTS_EDITIONS = 2;
|
||||
}
|
||||
|
||||
// Represents a single generated file.
|
||||
message File {
|
||||
// The file name, relative to the output directory. The name must not
|
||||
// contain "." or ".." components and must be relative, not be absolute (so,
|
||||
// the file cannot lie outside the output directory). "/" must be used as
|
||||
// the path separator, not "\".
|
||||
//
|
||||
// If the name is omitted, the content will be appended to the previous
|
||||
// file. This allows the generator to break large files into small chunks,
|
||||
// and allows the generated text to be streamed back to protoc so that large
|
||||
// files need not reside completely in memory at one time. Note that as of
|
||||
// this writing protoc does not optimize for this -- it will read the entire
|
||||
// CodeGeneratorResponse before writing files to disk.
|
||||
optional string name = 1;
|
||||
|
||||
// If non-empty, indicates that the named file should already exist, and the
|
||||
// content here is to be inserted into that file at a defined insertion
|
||||
// point. This feature allows a code generator to extend the output
|
||||
// produced by another code generator. The original generator may provide
|
||||
// insertion points by placing special annotations in the file that look
|
||||
// like:
|
||||
// @@protoc_insertion_point(NAME)
|
||||
// The annotation can have arbitrary text before and after it on the line,
|
||||
// which allows it to be placed in a comment. NAME should be replaced with
|
||||
// an identifier naming the point -- this is what other generators will use
|
||||
// as the insertion_point. Code inserted at this point will be placed
|
||||
// immediately above the line containing the insertion point (thus multiple
|
||||
// insertions to the same point will come out in the order they were added).
|
||||
// The double-@ is intended to make it unlikely that the generated code
|
||||
// could contain things that look like insertion points by accident.
|
||||
//
|
||||
// For example, the C++ code generator places the following line in the
|
||||
// .pb.h files that it generates:
|
||||
// // @@protoc_insertion_point(namespace_scope)
|
||||
// This line appears within the scope of the file's package namespace, but
|
||||
// outside of any particular class. Another plugin can then specify the
|
||||
// insertion_point "namespace_scope" to generate additional classes or
|
||||
// other declarations that should be placed in this scope.
|
||||
//
|
||||
// Note that if the line containing the insertion point begins with
|
||||
// whitespace, the same whitespace will be added to every line of the
|
||||
// inserted text. This is useful for languages like Python, where
|
||||
// indentation matters. In these languages, the insertion point comment
|
||||
// should be indented the same amount as any inserted code will need to be
|
||||
// in order to work correctly in that context.
|
||||
//
|
||||
// The code generator that generates the initial file and the one which
|
||||
// inserts into it must both run as part of a single invocation of protoc.
|
||||
// Code generators are executed in the order in which they appear on the
|
||||
// command line.
|
||||
//
|
||||
// If |insertion_point| is present, |name| must also be present.
|
||||
optional string insertion_point = 2;
|
||||
|
||||
// The file contents.
|
||||
optional string content = 15;
|
||||
|
||||
// Information describing the file content being inserted. If an insertion
|
||||
// point is used, this information will be appropriately offset and inserted
|
||||
// into the code generation metadata for the generated files.
|
||||
optional GeneratedCodeInfo generated_code_info = 16;
|
||||
}
|
||||
repeated File file = 15;
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,115 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/durationpb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "DurationProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// A Duration represents a signed, fixed-length span of time represented
|
||||
// as a count of seconds and fractions of seconds at nanosecond
|
||||
// resolution. It is independent of any calendar and concepts like "day"
|
||||
// or "month". It is related to Timestamp in that the difference between
|
||||
// two Timestamp values is a Duration and it can be added or subtracted
|
||||
// from a Timestamp. Range is approximately +-10,000 years.
|
||||
//
|
||||
// # Examples
|
||||
//
|
||||
// Example 1: Compute Duration from two Timestamps in pseudo code.
|
||||
//
|
||||
// Timestamp start = ...;
|
||||
// Timestamp end = ...;
|
||||
// Duration duration = ...;
|
||||
//
|
||||
// duration.seconds = end.seconds - start.seconds;
|
||||
// duration.nanos = end.nanos - start.nanos;
|
||||
//
|
||||
// if (duration.seconds < 0 && duration.nanos > 0) {
|
||||
// duration.seconds += 1;
|
||||
// duration.nanos -= 1000000000;
|
||||
// } else if (duration.seconds > 0 && duration.nanos < 0) {
|
||||
// duration.seconds -= 1;
|
||||
// duration.nanos += 1000000000;
|
||||
// }
|
||||
//
|
||||
// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
|
||||
//
|
||||
// Timestamp start = ...;
|
||||
// Duration duration = ...;
|
||||
// Timestamp end = ...;
|
||||
//
|
||||
// end.seconds = start.seconds + duration.seconds;
|
||||
// end.nanos = start.nanos + duration.nanos;
|
||||
//
|
||||
// if (end.nanos < 0) {
|
||||
// end.seconds -= 1;
|
||||
// end.nanos += 1000000000;
|
||||
// } else if (end.nanos >= 1000000000) {
|
||||
// end.seconds += 1;
|
||||
// end.nanos -= 1000000000;
|
||||
// }
|
||||
//
|
||||
// Example 3: Compute Duration from datetime.timedelta in Python.
|
||||
//
|
||||
// td = datetime.timedelta(days=3, minutes=10)
|
||||
// duration = Duration()
|
||||
// duration.FromTimedelta(td)
|
||||
//
|
||||
// # JSON Mapping
|
||||
//
|
||||
// In JSON format, the Duration type is encoded as a string rather than an
|
||||
// object, where the string ends in the suffix "s" (indicating seconds) and
|
||||
// is preceded by the number of seconds, with nanoseconds expressed as
|
||||
// fractional seconds. For example, 3 seconds with 0 nanoseconds should be
|
||||
// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
|
||||
// be expressed in JSON format as "3.000000001s", and 3 seconds and 1
|
||||
// microsecond should be expressed in JSON format as "3.000001s".
|
||||
//
|
||||
message Duration {
|
||||
// Signed seconds of the span of time. Must be from -315,576,000,000
|
||||
// to +315,576,000,000 inclusive. Note: these bounds are computed from:
|
||||
// 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
|
||||
int64 seconds = 1;
|
||||
|
||||
// Signed fractions of a second at nanosecond resolution of the span
|
||||
// of time. Durations less than one second are represented with a 0
|
||||
// `seconds` field and a positive or negative `nanos` field. For durations
|
||||
// of one second or more, a non-zero value for the `nanos` field must be
|
||||
// of the same sign as the `seconds` field. Must be from -999,999,999
|
||||
// to +999,999,999 inclusive.
|
||||
int32 nanos = 2;
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option go_package = "google.golang.org/protobuf/types/known/emptypb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "EmptyProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option cc_enable_arenas = true;
|
||||
|
||||
// A generic empty message that you can re-use to avoid defining duplicated
|
||||
// empty messages in your APIs. A typical example is to use it as the request
|
||||
// or the response type of an API method. For instance:
|
||||
//
|
||||
// service Foo {
|
||||
// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
|
||||
// }
|
||||
//
|
||||
message Empty {}
|
||||
@@ -1,245 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "FieldMaskProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/fieldmaskpb";
|
||||
option cc_enable_arenas = true;
|
||||
|
||||
// `FieldMask` represents a set of symbolic field paths, for example:
|
||||
//
|
||||
// paths: "f.a"
|
||||
// paths: "f.b.d"
|
||||
//
|
||||
// Here `f` represents a field in some root message, `a` and `b`
|
||||
// fields in the message found in `f`, and `d` a field found in the
|
||||
// message in `f.b`.
|
||||
//
|
||||
// Field masks are used to specify a subset of fields that should be
|
||||
// returned by a get operation or modified by an update operation.
|
||||
// Field masks also have a custom JSON encoding (see below).
|
||||
//
|
||||
// # Field Masks in Projections
|
||||
//
|
||||
// When used in the context of a projection, a response message or
|
||||
// sub-message is filtered by the API to only contain those fields as
|
||||
// specified in the mask. For example, if the mask in the previous
|
||||
// example is applied to a response message as follows:
|
||||
//
|
||||
// f {
|
||||
// a : 22
|
||||
// b {
|
||||
// d : 1
|
||||
// x : 2
|
||||
// }
|
||||
// y : 13
|
||||
// }
|
||||
// z: 8
|
||||
//
|
||||
// The result will not contain specific values for fields x,y and z
|
||||
// (their value will be set to the default, and omitted in proto text
|
||||
// output):
|
||||
//
|
||||
//
|
||||
// f {
|
||||
// a : 22
|
||||
// b {
|
||||
// d : 1
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// A repeated field is not allowed except at the last position of a
|
||||
// paths string.
|
||||
//
|
||||
// If a FieldMask object is not present in a get operation, the
|
||||
// operation applies to all fields (as if a FieldMask of all fields
|
||||
// had been specified).
|
||||
//
|
||||
// Note that a field mask does not necessarily apply to the
|
||||
// top-level response message. In case of a REST get operation, the
|
||||
// field mask applies directly to the response, but in case of a REST
|
||||
// list operation, the mask instead applies to each individual message
|
||||
// in the returned resource list. In case of a REST custom method,
|
||||
// other definitions may be used. Where the mask applies will be
|
||||
// clearly documented together with its declaration in the API. In
|
||||
// any case, the effect on the returned resource/resources is required
|
||||
// behavior for APIs.
|
||||
//
|
||||
// # Field Masks in Update Operations
|
||||
//
|
||||
// A field mask in update operations specifies which fields of the
|
||||
// targeted resource are going to be updated. The API is required
|
||||
// to only change the values of the fields as specified in the mask
|
||||
// and leave the others untouched. If a resource is passed in to
|
||||
// describe the updated values, the API ignores the values of all
|
||||
// fields not covered by the mask.
|
||||
//
|
||||
// If a repeated field is specified for an update operation, new values will
|
||||
// be appended to the existing repeated field in the target resource. Note that
|
||||
// a repeated field is only allowed in the last position of a `paths` string.
|
||||
//
|
||||
// If a sub-message is specified in the last position of the field mask for an
|
||||
// update operation, then new value will be merged into the existing sub-message
|
||||
// in the target resource.
|
||||
//
|
||||
// For example, given the target message:
|
||||
//
|
||||
// f {
|
||||
// b {
|
||||
// d: 1
|
||||
// x: 2
|
||||
// }
|
||||
// c: [1]
|
||||
// }
|
||||
//
|
||||
// And an update message:
|
||||
//
|
||||
// f {
|
||||
// b {
|
||||
// d: 10
|
||||
// }
|
||||
// c: [2]
|
||||
// }
|
||||
//
|
||||
// then if the field mask is:
|
||||
//
|
||||
// paths: ["f.b", "f.c"]
|
||||
//
|
||||
// then the result will be:
|
||||
//
|
||||
// f {
|
||||
// b {
|
||||
// d: 10
|
||||
// x: 2
|
||||
// }
|
||||
// c: [1, 2]
|
||||
// }
|
||||
//
|
||||
// An implementation may provide options to override this default behavior for
|
||||
// repeated and message fields.
|
||||
//
|
||||
// In order to reset a field's value to the default, the field must
|
||||
// be in the mask and set to the default value in the provided resource.
|
||||
// Hence, in order to reset all fields of a resource, provide a default
|
||||
// instance of the resource and set all fields in the mask, or do
|
||||
// not provide a mask as described below.
|
||||
//
|
||||
// If a field mask is not present on update, the operation applies to
|
||||
// all fields (as if a field mask of all fields has been specified).
|
||||
// Note that in the presence of schema evolution, this may mean that
|
||||
// fields the client does not know and has therefore not filled into
|
||||
// the request will be reset to their default. If this is unwanted
|
||||
// behavior, a specific service may require a client to always specify
|
||||
// a field mask, producing an error if not.
|
||||
//
|
||||
// As with get operations, the location of the resource which
|
||||
// describes the updated values in the request message depends on the
|
||||
// operation kind. In any case, the effect of the field mask is
|
||||
// required to be honored by the API.
|
||||
//
|
||||
// ## Considerations for HTTP REST
|
||||
//
|
||||
// The HTTP kind of an update operation which uses a field mask must
|
||||
// be set to PATCH instead of PUT in order to satisfy HTTP semantics
|
||||
// (PUT must only be used for full updates).
|
||||
//
|
||||
// # JSON Encoding of Field Masks
|
||||
//
|
||||
// In JSON, a field mask is encoded as a single string where paths are
|
||||
// separated by a comma. Fields name in each path are converted
|
||||
// to/from lower-camel naming conventions.
|
||||
//
|
||||
// As an example, consider the following message declarations:
|
||||
//
|
||||
// message Profile {
|
||||
// User user = 1;
|
||||
// Photo photo = 2;
|
||||
// }
|
||||
// message User {
|
||||
// string display_name = 1;
|
||||
// string address = 2;
|
||||
// }
|
||||
//
|
||||
// In proto a field mask for `Profile` may look as such:
|
||||
//
|
||||
// mask {
|
||||
// paths: "user.display_name"
|
||||
// paths: "photo"
|
||||
// }
|
||||
//
|
||||
// In JSON, the same mask is represented as below:
|
||||
//
|
||||
// {
|
||||
// mask: "user.displayName,photo"
|
||||
// }
|
||||
//
|
||||
// # Field Masks and Oneof Fields
|
||||
//
|
||||
// Field masks treat fields in oneofs just as regular fields. Consider the
|
||||
// following message:
|
||||
//
|
||||
// message SampleMessage {
|
||||
// oneof test_oneof {
|
||||
// string name = 4;
|
||||
// SubMessage sub_message = 9;
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// The field mask can be:
|
||||
//
|
||||
// mask {
|
||||
// paths: "name"
|
||||
// }
|
||||
//
|
||||
// Or:
|
||||
//
|
||||
// mask {
|
||||
// paths: "sub_message"
|
||||
// }
|
||||
//
|
||||
// Note that oneof type names ("test_oneof" in this case) cannot be used in
|
||||
// paths.
|
||||
//
|
||||
// ## Field Mask Verification
|
||||
//
|
||||
// The implementation of any API method which has a FieldMask type field in the
|
||||
// request should verify the included field paths, and return an
|
||||
// `INVALID_ARGUMENT` error if any path is unmappable.
|
||||
message FieldMask {
|
||||
// The set of field mask paths.
|
||||
repeated string paths = 1;
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "SourceContextProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/sourcecontextpb";
|
||||
|
||||
// `SourceContext` represents information about the source of a
|
||||
// protobuf element, like the file in which it is defined.
|
||||
message SourceContext {
|
||||
// The path-qualified name of the .proto file that contained the associated
|
||||
// protobuf element. For example: `"google/protobuf/source_context.proto"`.
|
||||
string file_name = 1;
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/structpb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "StructProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// `Struct` represents a structured data value, consisting of fields
|
||||
// which map to dynamically typed values. In some languages, `Struct`
|
||||
// might be supported by a native representation. For example, in
|
||||
// scripting languages like JS a struct is represented as an
|
||||
// object. The details of that representation are described together
|
||||
// with the proto support for the language.
|
||||
//
|
||||
// The JSON representation for `Struct` is JSON object.
|
||||
message Struct {
|
||||
// Unordered map of dynamically typed values.
|
||||
map<string, Value> fields = 1;
|
||||
}
|
||||
|
||||
// `Value` represents a dynamically typed value which can be either
|
||||
// null, a number, a string, a boolean, a recursive struct value, or a
|
||||
// list of values. A producer of value is expected to set one of these
|
||||
// variants. Absence of any variant indicates an error.
|
||||
//
|
||||
// The JSON representation for `Value` is JSON value.
|
||||
message Value {
|
||||
// The kind of value.
|
||||
oneof kind {
|
||||
// Represents a null value.
|
||||
NullValue null_value = 1;
|
||||
// Represents a double value.
|
||||
double number_value = 2;
|
||||
// Represents a string value.
|
||||
string string_value = 3;
|
||||
// Represents a boolean value.
|
||||
bool bool_value = 4;
|
||||
// Represents a structured value.
|
||||
Struct struct_value = 5;
|
||||
// Represents a repeated `Value`.
|
||||
ListValue list_value = 6;
|
||||
}
|
||||
}
|
||||
|
||||
// `NullValue` is a singleton enumeration to represent the null value for the
|
||||
// `Value` type union.
|
||||
//
|
||||
// The JSON representation for `NullValue` is JSON `null`.
|
||||
enum NullValue {
|
||||
// Null value.
|
||||
NULL_VALUE = 0;
|
||||
}
|
||||
|
||||
// `ListValue` is a wrapper around a repeated field of values.
|
||||
//
|
||||
// The JSON representation for `ListValue` is JSON array.
|
||||
message ListValue {
|
||||
// Repeated field of dynamically typed values.
|
||||
repeated Value values = 1;
|
||||
}
|
||||
@@ -1,144 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/timestamppb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "TimestampProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// A Timestamp represents a point in time independent of any time zone or local
|
||||
// calendar, encoded as a count of seconds and fractions of seconds at
|
||||
// nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
||||
// January 1, 1970, in the proleptic Gregorian calendar which extends the
|
||||
// Gregorian calendar backwards to year one.
|
||||
//
|
||||
// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
||||
// second table is needed for interpretation, using a [24-hour linear
|
||||
// smear](https://developers.google.com/time/smear).
|
||||
//
|
||||
// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
||||
// restricting to that range, we ensure that we can convert to and from [RFC
|
||||
// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
||||
//
|
||||
// # Examples
|
||||
//
|
||||
// Example 1: Compute Timestamp from POSIX `time()`.
|
||||
//
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds(time(NULL));
|
||||
// timestamp.set_nanos(0);
|
||||
//
|
||||
// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
//
|
||||
// struct timeval tv;
|
||||
// gettimeofday(&tv, NULL);
|
||||
//
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds(tv.tv_sec);
|
||||
// timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
//
|
||||
// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
//
|
||||
// FILETIME ft;
|
||||
// GetSystemTimeAsFileTime(&ft);
|
||||
// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
//
|
||||
// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
//
|
||||
// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
//
|
||||
// long millis = System.currentTimeMillis();
|
||||
//
|
||||
// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
// .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
//
|
||||
// Example 5: Compute Timestamp from Java `Instant.now()`.
|
||||
//
|
||||
// Instant now = Instant.now();
|
||||
//
|
||||
// Timestamp timestamp =
|
||||
// Timestamp.newBuilder().setSeconds(now.getEpochSecond())
|
||||
// .setNanos(now.getNano()).build();
|
||||
//
|
||||
// Example 6: Compute Timestamp from current time in Python.
|
||||
//
|
||||
// timestamp = Timestamp()
|
||||
// timestamp.GetCurrentTime()
|
||||
//
|
||||
// # JSON Mapping
|
||||
//
|
||||
// In JSON format, the Timestamp type is encoded as a string in the
|
||||
// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
// where {year} is always expressed using four digits while {month}, {day},
|
||||
// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
// is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
// "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
// able to accept both UTC and other timezones (as indicated by an offset).
|
||||
//
|
||||
// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
// 01:30 UTC on January 15, 2017.
|
||||
//
|
||||
// In JavaScript, one can convert a Date object to this format using the
|
||||
// standard
|
||||
// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
|
||||
// method. In Python, a standard `datetime.datetime` object can be converted
|
||||
// to this format using
|
||||
// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
||||
// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
||||
// the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()
|
||||
// ) to obtain a formatter capable of generating timestamps in this format.
|
||||
//
|
||||
message Timestamp {
|
||||
// Represents seconds of UTC time since Unix epoch
|
||||
// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
// 9999-12-31T23:59:59Z inclusive.
|
||||
int64 seconds = 1;
|
||||
|
||||
// Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
// second values with fractions must still have non-negative nanos values
|
||||
// that count forward in time. Must be from 0 to 999,999,999
|
||||
// inclusive.
|
||||
int32 nanos = 2;
|
||||
}
|
||||
@@ -1,193 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
import "google/protobuf/any.proto";
|
||||
import "google/protobuf/source_context.proto";
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "TypeProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "google.golang.org/protobuf/types/known/typepb";
|
||||
|
||||
// A protocol buffer message type.
|
||||
message Type {
|
||||
// The fully qualified message name.
|
||||
string name = 1;
|
||||
// The list of fields.
|
||||
repeated Field fields = 2;
|
||||
// The list of types appearing in `oneof` definitions in this type.
|
||||
repeated string oneofs = 3;
|
||||
// The protocol buffer options.
|
||||
repeated Option options = 4;
|
||||
// The source context.
|
||||
SourceContext source_context = 5;
|
||||
// The source syntax.
|
||||
Syntax syntax = 6;
|
||||
// The source edition string, only valid when syntax is SYNTAX_EDITIONS.
|
||||
string edition = 7;
|
||||
}
|
||||
|
||||
// A single field of a message type.
|
||||
message Field {
|
||||
// Basic field types.
|
||||
enum Kind {
|
||||
// Field type unknown.
|
||||
TYPE_UNKNOWN = 0;
|
||||
// Field type double.
|
||||
TYPE_DOUBLE = 1;
|
||||
// Field type float.
|
||||
TYPE_FLOAT = 2;
|
||||
// Field type int64.
|
||||
TYPE_INT64 = 3;
|
||||
// Field type uint64.
|
||||
TYPE_UINT64 = 4;
|
||||
// Field type int32.
|
||||
TYPE_INT32 = 5;
|
||||
// Field type fixed64.
|
||||
TYPE_FIXED64 = 6;
|
||||
// Field type fixed32.
|
||||
TYPE_FIXED32 = 7;
|
||||
// Field type bool.
|
||||
TYPE_BOOL = 8;
|
||||
// Field type string.
|
||||
TYPE_STRING = 9;
|
||||
// Field type group. Proto2 syntax only, and deprecated.
|
||||
TYPE_GROUP = 10;
|
||||
// Field type message.
|
||||
TYPE_MESSAGE = 11;
|
||||
// Field type bytes.
|
||||
TYPE_BYTES = 12;
|
||||
// Field type uint32.
|
||||
TYPE_UINT32 = 13;
|
||||
// Field type enum.
|
||||
TYPE_ENUM = 14;
|
||||
// Field type sfixed32.
|
||||
TYPE_SFIXED32 = 15;
|
||||
// Field type sfixed64.
|
||||
TYPE_SFIXED64 = 16;
|
||||
// Field type sint32.
|
||||
TYPE_SINT32 = 17;
|
||||
// Field type sint64.
|
||||
TYPE_SINT64 = 18;
|
||||
}
|
||||
|
||||
// Whether a field is optional, required, or repeated.
|
||||
enum Cardinality {
|
||||
// For fields with unknown cardinality.
|
||||
CARDINALITY_UNKNOWN = 0;
|
||||
// For optional fields.
|
||||
CARDINALITY_OPTIONAL = 1;
|
||||
// For required fields. Proto2 syntax only.
|
||||
CARDINALITY_REQUIRED = 2;
|
||||
// For repeated fields.
|
||||
CARDINALITY_REPEATED = 3;
|
||||
}
|
||||
|
||||
// The field type.
|
||||
Kind kind = 1;
|
||||
// The field cardinality.
|
||||
Cardinality cardinality = 2;
|
||||
// The field number.
|
||||
int32 number = 3;
|
||||
// The field name.
|
||||
string name = 4;
|
||||
// The field type URL, without the scheme, for message or enumeration
|
||||
// types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`.
|
||||
string type_url = 6;
|
||||
// The index of the field type in `Type.oneofs`, for message or enumeration
|
||||
// types. The first type has index 1; zero means the type is not in the list.
|
||||
int32 oneof_index = 7;
|
||||
// Whether to use alternative packed wire representation.
|
||||
bool packed = 8;
|
||||
// The protocol buffer options.
|
||||
repeated Option options = 9;
|
||||
// The field JSON name.
|
||||
string json_name = 10;
|
||||
// The string value of the default value of this field. Proto2 syntax only.
|
||||
string default_value = 11;
|
||||
}
|
||||
|
||||
// Enum type definition.
|
||||
message Enum {
|
||||
// Enum type name.
|
||||
string name = 1;
|
||||
// Enum value definitions.
|
||||
repeated EnumValue enumvalue = 2;
|
||||
// Protocol buffer options.
|
||||
repeated Option options = 3;
|
||||
// The source context.
|
||||
SourceContext source_context = 4;
|
||||
// The source syntax.
|
||||
Syntax syntax = 5;
|
||||
// The source edition string, only valid when syntax is SYNTAX_EDITIONS.
|
||||
string edition = 6;
|
||||
}
|
||||
|
||||
// Enum value definition.
|
||||
message EnumValue {
|
||||
// Enum value name.
|
||||
string name = 1;
|
||||
// Enum value number.
|
||||
int32 number = 2;
|
||||
// Protocol buffer options.
|
||||
repeated Option options = 3;
|
||||
}
|
||||
|
||||
// A protocol buffer option, which can be attached to a message, field,
|
||||
// enumeration, etc.
|
||||
message Option {
|
||||
// The option's name. For protobuf built-in options (options defined in
|
||||
// descriptor.proto), this is the short name. For example, `"map_entry"`.
|
||||
// For custom options, it should be the fully-qualified name. For example,
|
||||
// `"google.api.http"`.
|
||||
string name = 1;
|
||||
// The option's value packed in an Any message. If the value is a primitive,
|
||||
// the corresponding wrapper type defined in google/protobuf/wrappers.proto
|
||||
// should be used. If the value is an enum, it should be stored as an int32
|
||||
// value using the google.protobuf.Int32Value type.
|
||||
Any value = 2;
|
||||
}
|
||||
|
||||
// The syntax in which a protocol buffer element is defined.
|
||||
enum Syntax {
|
||||
// Syntax `proto2`.
|
||||
SYNTAX_PROTO2 = 0;
|
||||
// Syntax `proto3`.
|
||||
SYNTAX_PROTO3 = 1;
|
||||
// Syntax `editions`.
|
||||
SYNTAX_EDITIONS = 2;
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
// Wrappers for primitive (non-message) types. These types are useful
|
||||
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||
// where we need to distinguish between the absence of a primitive
|
||||
// typed field and its default value.
|
||||
//
|
||||
// These wrappers have no meaningful use within repeated fields as they lack
|
||||
// the ability to detect presence on individual elements.
|
||||
// These wrappers have no meaningful use within a map or a oneof since
|
||||
// individual entries of a map or fields of a oneof can already detect presence.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/protobuf/types/known/wrapperspb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "WrappersProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
|
||||
// Wrapper message for `double`.
|
||||
//
|
||||
// The JSON representation for `DoubleValue` is JSON number.
|
||||
message DoubleValue {
|
||||
// The double value.
|
||||
double value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `float`.
|
||||
//
|
||||
// The JSON representation for `FloatValue` is JSON number.
|
||||
message FloatValue {
|
||||
// The float value.
|
||||
float value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `int64`.
|
||||
//
|
||||
// The JSON representation for `Int64Value` is JSON string.
|
||||
message Int64Value {
|
||||
// The int64 value.
|
||||
int64 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `uint64`.
|
||||
//
|
||||
// The JSON representation for `UInt64Value` is JSON string.
|
||||
message UInt64Value {
|
||||
// The uint64 value.
|
||||
uint64 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `int32`.
|
||||
//
|
||||
// The JSON representation for `Int32Value` is JSON number.
|
||||
message Int32Value {
|
||||
// The int32 value.
|
||||
int32 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `uint32`.
|
||||
//
|
||||
// The JSON representation for `UInt32Value` is JSON number.
|
||||
message UInt32Value {
|
||||
// The uint32 value.
|
||||
uint32 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `bool`.
|
||||
//
|
||||
// The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
message BoolValue {
|
||||
// The bool value.
|
||||
bool value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `string`.
|
||||
//
|
||||
// The JSON representation for `StringValue` is JSON string.
|
||||
message StringValue {
|
||||
// The string value.
|
||||
string value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `bytes`.
|
||||
//
|
||||
// The JSON representation for `BytesValue` is JSON string.
|
||||
message BytesValue {
|
||||
// The bytes value.
|
||||
bytes value = 1;
|
||||
}
|
||||
@@ -1,25 +1,19 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use log::{debug, info, warn};
|
||||
use log::{debug, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sqlx::types::JsonValue;
|
||||
use tauri::{AppHandle, Manager};
|
||||
|
||||
use crate::is_dev;
|
||||
use crate::models::{
|
||||
generate_id, get_key_value_int, get_key_value_string, set_key_value_int, set_key_value_string,
|
||||
};
|
||||
|
||||
const NAMESPACE: &str = "analytics";
|
||||
const NUM_LAUNCHES_KEY: &str = "num_launches";
|
||||
use crate::models::{generate_id, get_key_value_int, get_key_value_string, set_key_value_int, set_key_value_string};
|
||||
|
||||
// serializable
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum AnalyticsResource {
|
||||
App,
|
||||
Appearance,
|
||||
CookieJar,
|
||||
Dialog,
|
||||
Environment,
|
||||
@@ -30,9 +24,7 @@ pub enum AnalyticsResource {
|
||||
HttpRequest,
|
||||
HttpResponse,
|
||||
KeyValue,
|
||||
Setting,
|
||||
Sidebar,
|
||||
Theme,
|
||||
Workspace,
|
||||
}
|
||||
|
||||
@@ -98,19 +90,20 @@ pub struct LaunchEventInfo {
|
||||
pub num_launches: i32,
|
||||
}
|
||||
|
||||
pub async fn track_launch_event(app: &AppHandle) -> LaunchEventInfo {
|
||||
pub async fn track_launch_event(app_handle: &AppHandle) -> LaunchEventInfo {
|
||||
let namespace = "analytics";
|
||||
let last_tracked_version_key = "last_tracked_version";
|
||||
|
||||
let mut info = LaunchEventInfo::default();
|
||||
|
||||
info.num_launches = get_num_launches(app).await + 1;
|
||||
info.num_launches = get_key_value_int(app_handle, namespace, "num_launches", 0).await + 1;
|
||||
info.previous_version =
|
||||
get_key_value_string(app, NAMESPACE, last_tracked_version_key, "").await;
|
||||
info.current_version = app.package_info().version.to_string();
|
||||
get_key_value_string(app_handle, namespace, last_tracked_version_key, "").await;
|
||||
info.current_version = app_handle.package_info().version.to_string();
|
||||
|
||||
if info.previous_version.is_empty() {
|
||||
track_event(
|
||||
app,
|
||||
app_handle,
|
||||
AnalyticsResource::App,
|
||||
AnalyticsAction::LaunchFirst,
|
||||
None,
|
||||
@@ -120,10 +113,10 @@ pub async fn track_launch_event(app: &AppHandle) -> LaunchEventInfo {
|
||||
info.launched_after_update = info.current_version != info.previous_version;
|
||||
if info.launched_after_update {
|
||||
track_event(
|
||||
app,
|
||||
app_handle,
|
||||
AnalyticsResource::App,
|
||||
AnalyticsAction::LaunchUpdate,
|
||||
Some(json!({ NUM_LAUNCHES_KEY: info.num_launches })),
|
||||
Some(json!({ "num_launches": info.num_launches })),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
@@ -131,23 +124,23 @@ pub async fn track_launch_event(app: &AppHandle) -> LaunchEventInfo {
|
||||
|
||||
// Track a launch event in all cases
|
||||
track_event(
|
||||
app,
|
||||
app_handle,
|
||||
AnalyticsResource::App,
|
||||
AnalyticsAction::Launch,
|
||||
Some(json!({ NUM_LAUNCHES_KEY: info.num_launches })),
|
||||
Some(json!({ "num_launches": info.num_launches })),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Update key values
|
||||
|
||||
set_key_value_string(
|
||||
app,
|
||||
NAMESPACE,
|
||||
app_handle,
|
||||
namespace,
|
||||
last_tracked_version_key,
|
||||
info.current_version.as_str(),
|
||||
)
|
||||
.await;
|
||||
set_key_value_int(app, NAMESPACE, NUM_LAUNCHES_KEY, info.num_launches).await;
|
||||
set_key_value_int(app_handle, namespace, "num_launches", info.num_launches).await;
|
||||
|
||||
info
|
||||
}
|
||||
@@ -189,12 +182,15 @@ pub async fn track_event(
|
||||
|
||||
// Disable analytics actual sending in dev
|
||||
if is_dev() {
|
||||
debug!("track: {}", event);
|
||||
debug!("track: {} {} {:?}", event, attributes_json, params);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(e) = req.send().await {
|
||||
info!("Error sending analytics event: {}", e);
|
||||
warn!(
|
||||
"Error sending analytics event: {} {} {} {:?}",
|
||||
e, event, attributes_json, params,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -211,7 +207,7 @@ fn get_os() -> &'static str {
|
||||
}
|
||||
|
||||
fn get_window_size(app_handle: &AppHandle) -> String {
|
||||
let window = match app_handle.webview_windows().into_values().next() {
|
||||
let window = match app_handle.windows().into_values().next() {
|
||||
Some(w) => w,
|
||||
None => return "unknown".to_string(),
|
||||
};
|
||||
@@ -236,14 +232,10 @@ fn get_window_size(app_handle: &AppHandle) -> String {
|
||||
async fn get_id(app_handle: &AppHandle) -> String {
|
||||
let id = get_key_value_string(app_handle, "analytics", "id", "").await;
|
||||
if id.is_empty() {
|
||||
let new_id = generate_id();
|
||||
let new_id = generate_id(None);
|
||||
set_key_value_string(app_handle, "analytics", "id", new_id.as_str()).await;
|
||||
new_id
|
||||
} else {
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_num_launches(app: &AppHandle) -> i32 {
|
||||
get_key_value_int(app, NAMESPACE, NUM_LAUNCHES_KEY, 0).await
|
||||
}
|
||||
|
||||
@@ -7,20 +7,20 @@ use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use base64::Engine;
|
||||
use http::header::{ACCEPT, USER_AGENT};
|
||||
use http::{HeaderMap, HeaderName, HeaderValue, Method};
|
||||
use http::header::{ACCEPT, USER_AGENT};
|
||||
use log::{error, info, warn};
|
||||
use reqwest::redirect::Policy;
|
||||
use reqwest::{multipart, Url};
|
||||
use reqwest::redirect::Policy;
|
||||
use sqlx::types::{Json, JsonValue};
|
||||
use tauri::{Manager, WebviewWindow};
|
||||
use tauri::{Manager, Window};
|
||||
use tokio::sync::oneshot;
|
||||
use tokio::sync::watch::Receiver;
|
||||
use tokio::sync::watch::{Receiver};
|
||||
|
||||
use crate::{models, render, response_err};
|
||||
|
||||
pub async fn send_http_request(
|
||||
window: &WebviewWindow,
|
||||
window: &Window,
|
||||
request: models::HttpRequest,
|
||||
response: &models::HttpResponse,
|
||||
environment: Option<models::Environment>,
|
||||
@@ -35,7 +35,6 @@ pub async fn send_http_request(
|
||||
|
||||
let mut url_string = render::render(&request.url, &workspace, environment.as_ref());
|
||||
|
||||
url_string = ensure_proto(&url_string);
|
||||
if !url_string.starts_with("http://") && !url_string.starts_with("https://") {
|
||||
url_string = format!("http://{}", url_string);
|
||||
}
|
||||
@@ -89,24 +88,14 @@ pub async fn send_http_request(
|
||||
let uri = match http::Uri::from_str(url_string.as_str()) {
|
||||
Ok(u) => u,
|
||||
Err(e) => {
|
||||
return response_err(
|
||||
response,
|
||||
format!("Failed to parse URL \"{}\": {}", url_string, e.to_string()),
|
||||
window,
|
||||
)
|
||||
.await;
|
||||
return response_err(response, e.to_string(), window).await;
|
||||
}
|
||||
};
|
||||
// Yes, we're parsing both URI and URL because they could return different errors
|
||||
let url = match Url::from_str(uri.to_string().as_str()) {
|
||||
Ok(u) => u,
|
||||
Err(e) => {
|
||||
return response_err(
|
||||
response,
|
||||
format!("Failed to parse URL \"{}\": {}", url_string, e.to_string()),
|
||||
window,
|
||||
)
|
||||
.await;
|
||||
return response_err(response, e.to_string(), window).await;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -123,6 +112,7 @@ pub async fn send_http_request(
|
||||
// everything manually to know that).
|
||||
// if let Some(cookie_store) = maybe_cookie_store.clone() {
|
||||
// let values1 = cookie_store.get_request_values(&url);
|
||||
// println!("COOKIE VLUAES: {:?}", values1.collect::<Vec<_>>());
|
||||
// let raw_value = cookie_store.get_request_values(&url)
|
||||
// .map(|(name, value)| format!("{}={}", name, value))
|
||||
// .collect::<Vec<_>>()
|
||||
@@ -254,21 +244,6 @@ pub async fn send_http_request(
|
||||
}
|
||||
}
|
||||
request_builder = request_builder.form(&form_params);
|
||||
} else if body_type == "binary" && request_body.contains_key("filePath") {
|
||||
let file_path = request_body
|
||||
.get("filePath")
|
||||
.ok_or("filePath not set")?
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
|
||||
match fs::read(file_path).map_err(|e| e.to_string()) {
|
||||
Ok(f) => {
|
||||
request_builder = request_builder.body(f);
|
||||
}
|
||||
Err(e) => {
|
||||
return response_err(response, e, window).await;
|
||||
}
|
||||
}
|
||||
} else if body_type == "multipart/form-data" && request_body.contains_key("form") {
|
||||
let mut multipart_form = multipart::Form::new();
|
||||
if let Some(form_definition) = request_body.get("form") {
|
||||
@@ -278,67 +253,38 @@ pub async fn send_http_request(
|
||||
.unwrap_or(empty_bool)
|
||||
.as_bool()
|
||||
.unwrap_or(false);
|
||||
let name_raw = p
|
||||
let name = p
|
||||
.get("name")
|
||||
.unwrap_or(empty_string)
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
|
||||
if !enabled || name_raw.is_empty() {
|
||||
if !enabled || name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let file_path = p
|
||||
let file = p
|
||||
.get("file")
|
||||
.unwrap_or(empty_string)
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
let value_raw = p
|
||||
let value = p
|
||||
.get("value")
|
||||
.unwrap_or(empty_string)
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
|
||||
let name = render::render(name_raw, &workspace, environment_ref);
|
||||
let mut part = if file_path.is_empty() {
|
||||
multipart::Part::text(render::render(
|
||||
value_raw,
|
||||
&workspace,
|
||||
environment_ref,
|
||||
))
|
||||
} else {
|
||||
match fs::read(file_path) {
|
||||
Ok(f) => multipart::Part::bytes(f),
|
||||
Err(e) => {
|
||||
return response_err(response, e.to_string(), window).await;
|
||||
multipart_form = multipart_form.part(
|
||||
render::render(name, &workspace, environment_ref),
|
||||
match !file.is_empty() {
|
||||
true => {
|
||||
multipart::Part::bytes(fs::read(file).map_err(|e| e.to_string())?)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let ct_raw = p
|
||||
.get("contentType")
|
||||
.unwrap_or(empty_string)
|
||||
.as_str()
|
||||
.unwrap_or_default();
|
||||
|
||||
if !ct_raw.is_empty() {
|
||||
let content_type = render::render(ct_raw, &workspace, environment_ref);
|
||||
part = part
|
||||
.mime_str(content_type.as_str())
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
|
||||
if !file_path.is_empty() {
|
||||
let filename = PathBuf::from(file_path)
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_str()
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
part = part.file_name(filename);
|
||||
}
|
||||
|
||||
multipart_form = multipart_form.part(name, part);
|
||||
false => multipart::Part::text(render::render(
|
||||
value,
|
||||
&workspace,
|
||||
environment_ref,
|
||||
)),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
headers.remove("Content-Type"); // reqwest will add this automatically
|
||||
@@ -361,11 +307,11 @@ pub async fn send_http_request(
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
let (resp_tx, resp_rx) = oneshot::channel();
|
||||
|
||||
|
||||
tokio::spawn(async move {
|
||||
let _ = resp_tx.send(client.execute(sendable_req).await);
|
||||
});
|
||||
|
||||
|
||||
let raw_response = tokio::select! {
|
||||
Ok(r) = resp_rx => {r}
|
||||
_ = cancel_rx.changed() => {
|
||||
@@ -412,7 +358,7 @@ pub async fn send_http_request(
|
||||
|
||||
{
|
||||
// Write body to FS
|
||||
let dir = window.app_handle().path().app_data_dir().unwrap();
|
||||
let dir = window.app_handle().path_resolver().app_data_dir().unwrap();
|
||||
let base_dir = dir.join("responses");
|
||||
create_dir_all(base_dir.clone()).expect("Failed to create responses dir");
|
||||
let body_path = match response.id.is_empty() {
|
||||
@@ -477,26 +423,3 @@ pub async fn send_http_request(
|
||||
Err(e) => response_err(response, e.to_string(), window).await,
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_proto(url_str: &str) -> String {
|
||||
if url_str.starts_with("http://") || url_str.starts_with("https://") {
|
||||
return url_str.to_string();
|
||||
}
|
||||
|
||||
// Url::from_str will fail without a proto, so add one
|
||||
let parseable_url = format!("http://{}", url_str);
|
||||
if let Ok(u) = Url::from_str(parseable_url.as_str()) {
|
||||
match u.host() {
|
||||
Some(host) => {
|
||||
let h = host.to_string();
|
||||
// These TLDs force HTTPS
|
||||
if h.ends_with(".app") || h.ends_with(".dev") || h.ends_with(".page") {
|
||||
return format!("https://{url_str}");
|
||||
}
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
format!("http://{url_str}")
|
||||
}
|
||||
1904
src-tauri/src/lib.rs
1904
src-tauri/src/lib.rs
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -7,38 +7,9 @@ use serde::{Deserialize, Serialize};
|
||||
use sqlx::types::chrono::NaiveDateTime;
|
||||
use sqlx::types::{Json, JsonValue};
|
||||
use sqlx::{Pool, Sqlite};
|
||||
use tauri::{AppHandle, Manager, WebviewWindow, Wry};
|
||||
use tauri::{AppHandle, Manager, Wry};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
pub enum ModelType {
|
||||
TypeCookieJar,
|
||||
TypeEnvironment,
|
||||
TypeFolder,
|
||||
TypeGrpcConnection,
|
||||
TypeGrpcEvent,
|
||||
TypeGrpcRequest,
|
||||
TypeHttpRequest,
|
||||
TypeHttpResponse,
|
||||
TypeWorkspace,
|
||||
}
|
||||
|
||||
impl ModelType {
|
||||
pub fn id_prefix(&self) -> String {
|
||||
match self {
|
||||
ModelType::TypeCookieJar => "cj",
|
||||
ModelType::TypeEnvironment => "ev",
|
||||
ModelType::TypeFolder => "fl",
|
||||
ModelType::TypeGrpcConnection => "gc",
|
||||
ModelType::TypeGrpcEvent => "ge",
|
||||
ModelType::TypeGrpcRequest => "gr",
|
||||
ModelType::TypeHttpRequest => "rq",
|
||||
ModelType::TypeHttpResponse => "rs",
|
||||
ModelType::TypeWorkspace => "wk",
|
||||
}
|
||||
.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn default_true() -> bool {
|
||||
true
|
||||
}
|
||||
@@ -52,13 +23,7 @@ pub struct Settings {
|
||||
pub updated_at: NaiveDateTime,
|
||||
pub theme: String,
|
||||
pub appearance: String,
|
||||
pub theme_dark: String,
|
||||
pub theme_light: String,
|
||||
pub update_channel: String,
|
||||
pub interface_font_size: i64,
|
||||
pub interface_scale: i64,
|
||||
pub editor_font_size: i64,
|
||||
pub editor_soft_wrap: bool,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
|
||||
@@ -461,9 +426,9 @@ pub async fn get_workspace(mgr: &impl Manager<Wry>, id: &str) -> Result<Workspac
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_workspace(window: &WebviewWindow, id: &str) -> Result<Workspace, sqlx::Error> {
|
||||
let db = get_db(window).await;
|
||||
let workspace = get_workspace(window, id).await?;
|
||||
pub async fn delete_workspace(mgr: &impl Manager<Wry>, id: &str) -> Result<Workspace, sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
let workspace = get_workspace(mgr, id).await?;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM workspaces
|
||||
@@ -474,11 +439,11 @@ pub async fn delete_workspace(window: &WebviewWindow, id: &str) -> Result<Worksp
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
for r in list_responses_by_workspace_id(window, id).await? {
|
||||
delete_http_response(window, &r.id).await?;
|
||||
for r in list_responses_by_workspace_id(mgr, id).await? {
|
||||
delete_http_response(mgr, &r.id).await?;
|
||||
}
|
||||
|
||||
emit_deleted_model(window, workspace)
|
||||
emit_deleted_model(mgr, workspace)
|
||||
}
|
||||
|
||||
pub async fn get_cookie_jar(mgr: &impl Manager<Wry>, id: &str) -> Result<CookieJar, sqlx::Error> {
|
||||
@@ -516,9 +481,12 @@ pub async fn list_cookie_jars(
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_cookie_jar(window: &WebviewWindow, id: &str) -> Result<CookieJar, sqlx::Error> {
|
||||
let cookie_jar = get_cookie_jar(window, id).await?;
|
||||
let db = get_db(window).await;
|
||||
pub async fn delete_cookie_jar(
|
||||
mgr: &impl Manager<Wry>,
|
||||
id: &str,
|
||||
) -> Result<CookieJar, sqlx::Error> {
|
||||
let cookie_jar = get_cookie_jar(mgr, id).await?;
|
||||
let db = get_db(mgr).await;
|
||||
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
@@ -530,25 +498,25 @@ pub async fn delete_cookie_jar(window: &WebviewWindow, id: &str) -> Result<Cooki
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(window, cookie_jar)
|
||||
emit_deleted_model(mgr, cookie_jar)
|
||||
}
|
||||
|
||||
pub async fn duplicate_grpc_request(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
id: &str,
|
||||
) -> Result<GrpcRequest, sqlx::Error> {
|
||||
let mut request = get_grpc_request(window, id).await?.clone();
|
||||
let mut request = get_grpc_request(mgr, id).await?.clone();
|
||||
request.id = "".to_string();
|
||||
upsert_grpc_request(window, &request).await
|
||||
upsert_grpc_request(mgr, &request).await
|
||||
}
|
||||
|
||||
pub async fn upsert_grpc_request(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
request: &GrpcRequest,
|
||||
) -> Result<GrpcRequest, sqlx::Error> {
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
let id = match request.id.as_str() {
|
||||
"" => generate_model_id(ModelType::TypeGrpcRequest),
|
||||
"" => generate_id(Some("gr")),
|
||||
_ => request.id.to_string(),
|
||||
};
|
||||
let trimmed_name = request.name.trim();
|
||||
@@ -588,8 +556,8 @@ pub async fn upsert_grpc_request(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_grpc_request(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
match get_grpc_request(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -639,12 +607,12 @@ pub async fn list_grpc_requests(
|
||||
}
|
||||
|
||||
pub async fn upsert_grpc_connection(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
connection: &GrpcConnection,
|
||||
) -> Result<GrpcConnection, sqlx::Error> {
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
let id = match connection.id.as_str() {
|
||||
"" => generate_model_id(ModelType::TypeGrpcConnection),
|
||||
"" => generate_id(Some("gc")),
|
||||
_ => connection.id.to_string(),
|
||||
};
|
||||
sqlx::query!(
|
||||
@@ -678,8 +646,8 @@ pub async fn upsert_grpc_connection(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_grpc_connection(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
match get_grpc_connection(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -728,12 +696,12 @@ pub async fn list_grpc_connections(
|
||||
}
|
||||
|
||||
pub async fn upsert_grpc_event(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
event: &GrpcEvent,
|
||||
) -> Result<GrpcEvent, sqlx::Error> {
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
let id = match event.id.as_str() {
|
||||
"" => generate_model_id(ModelType::TypeGrpcEvent),
|
||||
"" => generate_id(Some("ge")),
|
||||
_ => event.id.to_string(),
|
||||
};
|
||||
sqlx::query!(
|
||||
@@ -764,8 +732,8 @@ pub async fn upsert_grpc_event(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_grpc_event(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
match get_grpc_event(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -810,16 +778,16 @@ pub async fn list_grpc_events(
|
||||
}
|
||||
|
||||
pub async fn upsert_cookie_jar(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
cookie_jar: &CookieJar,
|
||||
) -> Result<CookieJar, sqlx::Error> {
|
||||
let id = match cookie_jar.id.as_str() {
|
||||
"" => generate_model_id(ModelType::TypeCookieJar),
|
||||
"" => generate_id(Some("cj")),
|
||||
_ => cookie_jar.id.to_string(),
|
||||
};
|
||||
let trimmed_name = cookie_jar.name.trim();
|
||||
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO cookie_jars (
|
||||
@@ -839,8 +807,8 @@ pub async fn upsert_cookie_jar(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_cookie_jar(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
match get_cookie_jar(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -865,11 +833,11 @@ pub async fn list_environments(
|
||||
}
|
||||
|
||||
pub async fn delete_environment(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
id: &str,
|
||||
) -> Result<Environment, sqlx::Error> {
|
||||
let db = get_db(window).await;
|
||||
let env = get_environment(window, id).await?;
|
||||
let db = get_db(mgr).await;
|
||||
let env = get_environment(mgr, id).await?;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM environments
|
||||
@@ -880,7 +848,7 @@ pub async fn delete_environment(
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(window, env)
|
||||
emit_deleted_model(mgr, env)
|
||||
}
|
||||
|
||||
async fn get_settings(mgr: &impl Manager<Wry>) -> Result<Settings, sqlx::Error> {
|
||||
@@ -889,9 +857,7 @@ async fn get_settings(mgr: &impl Manager<Wry>) -> Result<Settings, sqlx::Error>
|
||||
Settings,
|
||||
r#"
|
||||
SELECT
|
||||
id, model, created_at, updated_at, theme, appearance,
|
||||
theme_dark, theme_light, update_channel,
|
||||
interface_font_size, interface_scale, editor_font_size, editor_soft_wrap
|
||||
id, model, created_at, updated_at, theme, appearance, update_channel
|
||||
FROM settings
|
||||
WHERE id = 'default'
|
||||
"#,
|
||||
@@ -920,46 +886,39 @@ pub async fn get_or_create_settings(mgr: &impl Manager<Wry>) -> Settings {
|
||||
}
|
||||
|
||||
pub async fn update_settings(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
settings: Settings,
|
||||
) -> Result<Settings, sqlx::Error> {
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE settings SET (
|
||||
theme, appearance, theme_dark, theme_light, update_channel,
|
||||
interface_font_size, interface_scale, editor_font_size, editor_soft_wrap
|
||||
) = (?, ?, ?, ?, ?, ?, ?, ?, ?) WHERE id = 'default';
|
||||
theme, appearance, update_channel
|
||||
) = (?, ?, ?) WHERE id = 'default';
|
||||
"#,
|
||||
settings.theme,
|
||||
settings.appearance,
|
||||
settings.theme_dark,
|
||||
settings.theme_light,
|
||||
settings.update_channel,
|
||||
settings.interface_font_size,
|
||||
settings.interface_scale,
|
||||
settings.editor_font_size,
|
||||
settings.editor_soft_wrap,
|
||||
settings.update_channel
|
||||
)
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_settings(window).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
match get_settings(mgr).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn upsert_environment(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
environment: Environment,
|
||||
) -> Result<Environment, sqlx::Error> {
|
||||
let id = match environment.id.as_str() {
|
||||
"" => generate_model_id(ModelType::TypeEnvironment),
|
||||
"" => generate_id(Some("ev")),
|
||||
_ => environment.id.to_string(),
|
||||
};
|
||||
let trimmed_name = environment.name.trim();
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO environments (
|
||||
@@ -979,8 +938,8 @@ pub async fn upsert_environment(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_environment(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
match get_environment(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -1040,9 +999,9 @@ pub async fn list_folders(
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_folder(window: &WebviewWindow, id: &str) -> Result<Folder, sqlx::Error> {
|
||||
let folder = get_folder(window, id).await?;
|
||||
let db = get_db(window).await;
|
||||
pub async fn delete_folder(mgr: &impl Manager<Wry>, id: &str) -> Result<Folder, sqlx::Error> {
|
||||
let folder = get_folder(mgr, id).await?;
|
||||
let db = get_db(mgr).await;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM folders
|
||||
@@ -1053,17 +1012,17 @@ pub async fn delete_folder(window: &WebviewWindow, id: &str) -> Result<Folder, s
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(window, folder)
|
||||
emit_deleted_model(mgr, folder)
|
||||
}
|
||||
|
||||
pub async fn upsert_folder(window: &WebviewWindow, r: Folder) -> Result<Folder, sqlx::Error> {
|
||||
pub async fn upsert_folder(mgr: &impl Manager<Wry>, r: Folder) -> Result<Folder, sqlx::Error> {
|
||||
let id = match r.id.as_str() {
|
||||
"" => generate_model_id(ModelType::TypeFolder),
|
||||
"" => generate_id(Some("fl")),
|
||||
_ => r.id.to_string(),
|
||||
};
|
||||
let trimmed_name = r.name.trim();
|
||||
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO folders (
|
||||
@@ -1085,32 +1044,32 @@ pub async fn upsert_folder(window: &WebviewWindow, r: Folder) -> Result<Folder,
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_folder(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
match get_folder(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn duplicate_http_request(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
id: &str,
|
||||
) -> Result<HttpRequest, sqlx::Error> {
|
||||
let mut request = get_http_request(window, id).await?.clone();
|
||||
let mut request = get_http_request(mgr, id).await?.clone();
|
||||
request.id = "".to_string();
|
||||
upsert_http_request(window, request).await
|
||||
upsert_http_request(mgr, request).await
|
||||
}
|
||||
|
||||
pub async fn upsert_http_request(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
r: HttpRequest,
|
||||
) -> Result<HttpRequest, sqlx::Error> {
|
||||
let id = match r.id.as_str() {
|
||||
"" => generate_model_id(ModelType::TypeHttpRequest),
|
||||
"" => generate_id(Some("rq")),
|
||||
_ => r.id.to_string(),
|
||||
};
|
||||
let trimmed_name = r.name.trim();
|
||||
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
|
||||
sqlx::query!(
|
||||
r#"
|
||||
@@ -1150,13 +1109,13 @@ pub async fn upsert_http_request(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_http_request(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
match get_http_request(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_http_requests(
|
||||
pub async fn list_requests(
|
||||
mgr: &impl Manager<Wry>,
|
||||
workspace_id: &str,
|
||||
) -> Result<Vec<HttpRequest>, sqlx::Error> {
|
||||
@@ -1206,15 +1165,15 @@ pub async fn get_http_request(
|
||||
}
|
||||
|
||||
pub async fn delete_http_request(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
id: &str,
|
||||
) -> Result<HttpRequest, sqlx::Error> {
|
||||
let req = get_http_request(window, id).await?;
|
||||
let req = get_http_request(mgr, id).await?;
|
||||
|
||||
// DB deletes will cascade but this will delete the files
|
||||
delete_all_http_responses(window, id).await?;
|
||||
delete_all_http_responses(mgr, id).await?;
|
||||
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM http_requests
|
||||
@@ -1225,12 +1184,12 @@ pub async fn delete_http_request(
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(window, req)
|
||||
emit_deleted_model(mgr, req)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn create_http_response(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
request_id: &str,
|
||||
elapsed: i64,
|
||||
elapsed_headers: i64,
|
||||
@@ -1243,10 +1202,10 @@ pub async fn create_http_response(
|
||||
version: Option<&str>,
|
||||
remote_addr: Option<&str>,
|
||||
) -> Result<HttpResponse, sqlx::Error> {
|
||||
let req = get_http_request(window, request_id).await?;
|
||||
let id = generate_model_id(ModelType::TypeHttpResponse);
|
||||
let req = get_http_request(mgr, request_id).await?;
|
||||
let id = generate_id(Some("rp"));
|
||||
let headers_json = Json(headers);
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO http_responses (
|
||||
@@ -1272,14 +1231,14 @@ pub async fn create_http_response(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_http_response(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
match get_http_response(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn cancel_pending_grpc_connections(app: &AppHandle) -> Result<(), sqlx::Error> {
|
||||
let db = get_db(app).await;
|
||||
pub async fn cancel_pending_grpc_connections(mgr: &impl Manager<Wry>) -> Result<(), sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE grpc_connections
|
||||
@@ -1292,8 +1251,8 @@ pub async fn cancel_pending_grpc_connections(app: &AppHandle) -> Result<(), sqlx
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn cancel_pending_responses(app: &AppHandle) -> Result<(), sqlx::Error> {
|
||||
let db = get_db(app).await;
|
||||
pub async fn cancel_pending_responses(mgr: &impl Manager<Wry>) -> Result<(), sqlx::Error> {
|
||||
let db = get_db(mgr).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE http_responses
|
||||
@@ -1307,27 +1266,27 @@ pub async fn cancel_pending_responses(app: &AppHandle) -> Result<(), sqlx::Error
|
||||
}
|
||||
|
||||
pub async fn update_response_if_id(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
response: &HttpResponse,
|
||||
) -> Result<HttpResponse, sqlx::Error> {
|
||||
if response.id.is_empty() {
|
||||
Ok(response.clone())
|
||||
} else {
|
||||
update_response(window, response).await
|
||||
update_response(mgr, response).await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn upsert_workspace(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
workspace: Workspace,
|
||||
) -> Result<Workspace, sqlx::Error> {
|
||||
let id = match workspace.id.as_str() {
|
||||
"" => generate_model_id(ModelType::TypeWorkspace),
|
||||
"" => generate_id(Some("wk")),
|
||||
_ => workspace.id.to_string(),
|
||||
};
|
||||
let trimmed_name = workspace.name.trim();
|
||||
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO workspaces (
|
||||
@@ -1355,17 +1314,17 @@ pub async fn upsert_workspace(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_workspace(window, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
match get_workspace(mgr, &id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn update_response(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
response: &HttpResponse,
|
||||
) -> Result<HttpResponse, sqlx::Error> {
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE http_responses SET (
|
||||
@@ -1389,8 +1348,8 @@ pub async fn update_response(
|
||||
.execute(&db)
|
||||
.await?;
|
||||
|
||||
match get_http_response(window, &response.id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(window, m)),
|
||||
match get_http_response(mgr, &response.id).await {
|
||||
Ok(m) => Ok(emit_upserted_model(mgr, m)),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -1468,12 +1427,12 @@ pub async fn list_responses_by_workspace_id(
|
||||
}
|
||||
|
||||
pub async fn delete_grpc_request(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
id: &str,
|
||||
) -> Result<GrpcRequest, sqlx::Error> {
|
||||
let req = get_grpc_request(window, id).await?;
|
||||
let req = get_grpc_request(mgr, id).await?;
|
||||
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM grpc_requests
|
||||
@@ -1484,16 +1443,16 @@ pub async fn delete_grpc_request(
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(window, req)
|
||||
emit_deleted_model(mgr, req)
|
||||
}
|
||||
|
||||
pub async fn delete_grpc_connection(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
id: &str,
|
||||
) -> Result<GrpcConnection, sqlx::Error> {
|
||||
let resp = get_grpc_connection(window, id).await?;
|
||||
let resp = get_grpc_connection(mgr, id).await?;
|
||||
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM grpc_connections
|
||||
@@ -1504,14 +1463,14 @@ pub async fn delete_grpc_connection(
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(window, resp)
|
||||
emit_deleted_model(mgr, resp)
|
||||
}
|
||||
|
||||
pub async fn delete_http_response(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
id: &str,
|
||||
) -> Result<HttpResponse, sqlx::Error> {
|
||||
let resp = get_http_response(window, id).await?;
|
||||
let resp = get_http_response(mgr, id).await?;
|
||||
|
||||
// Delete the body file if it exists
|
||||
if let Some(p) = resp.body_path.clone() {
|
||||
@@ -1520,7 +1479,7 @@ pub async fn delete_http_response(
|
||||
};
|
||||
}
|
||||
|
||||
let db = get_db(window).await;
|
||||
let db = get_db(mgr).await;
|
||||
let _ = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM http_responses
|
||||
@@ -1531,129 +1490,92 @@ pub async fn delete_http_response(
|
||||
.execute(&db)
|
||||
.await;
|
||||
|
||||
emit_deleted_model(window, resp)
|
||||
emit_deleted_model(mgr, resp)
|
||||
}
|
||||
|
||||
pub async fn delete_all_grpc_connections(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
request_id: &str,
|
||||
) -> Result<(), sqlx::Error> {
|
||||
for r in list_grpc_connections(window, request_id).await? {
|
||||
delete_grpc_connection(window, &r.id).await?;
|
||||
for r in list_grpc_connections(mgr, request_id).await? {
|
||||
delete_grpc_connection(mgr, &r.id).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_all_http_responses(
|
||||
window: &WebviewWindow,
|
||||
mgr: &impl Manager<Wry>,
|
||||
request_id: &str,
|
||||
) -> Result<(), sqlx::Error> {
|
||||
for r in list_responses(window, request_id, None).await? {
|
||||
delete_http_response(window, &r.id).await?;
|
||||
for r in list_responses(mgr, request_id, None).await? {
|
||||
delete_http_response(mgr, &r.id).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_model_id(model: ModelType) -> String {
|
||||
let id = generate_id();
|
||||
format!("{}_{}", model.id_prefix(), id)
|
||||
}
|
||||
|
||||
pub fn generate_id() -> String {
|
||||
Alphanumeric.sample_string(&mut rand::thread_rng(), 10)
|
||||
pub fn generate_id(prefix: Option<&str>) -> String {
|
||||
let id = Alphanumeric.sample_string(&mut rand::thread_rng(), 10);
|
||||
match prefix {
|
||||
None => id,
|
||||
Some(p) => format!("{p}_{id}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Deserialize, Serialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct WorkspaceExport {
|
||||
pub yaak_version: String,
|
||||
pub yaak_schema: i64,
|
||||
pub timestamp: NaiveDateTime,
|
||||
pub resources: WorkspaceExportResources,
|
||||
pub yaak_version: String,
|
||||
pub yaak_schema: i64,
|
||||
pub timestamp: NaiveDateTime,
|
||||
pub resources: WorkspaceExportResources,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Deserialize, Serialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct WorkspaceExportResources {
|
||||
pub workspaces: Vec<Workspace>,
|
||||
pub environments: Vec<Environment>,
|
||||
pub folders: Vec<Folder>,
|
||||
pub http_requests: Vec<HttpRequest>,
|
||||
pub grpc_requests: Vec<GrpcRequest>,
|
||||
pub workspaces: Vec<Workspace>,
|
||||
pub environments: Vec<Environment>,
|
||||
pub folders: Vec<Folder>,
|
||||
pub http_requests: Vec<HttpRequest>,
|
||||
pub grpc_requests: Vec<GrpcRequest>,
|
||||
}
|
||||
|
||||
pub async fn get_workspace_export_resources(
|
||||
window: &WebviewWindow,
|
||||
workspace_ids: Vec<&str>,
|
||||
app_handle: &AppHandle,
|
||||
workspace_id: &str,
|
||||
) -> WorkspaceExport {
|
||||
let app_handle = window.app_handle();
|
||||
let mut data = WorkspaceExport {
|
||||
let workspace = get_workspace(app_handle, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get workspace");
|
||||
return WorkspaceExport {
|
||||
yaak_version: app_handle.package_info().version.clone().to_string(),
|
||||
yaak_schema: 2,
|
||||
timestamp: chrono::Utc::now().naive_utc(),
|
||||
resources: WorkspaceExportResources {
|
||||
workspaces: Vec::new(),
|
||||
environments: Vec::new(),
|
||||
folders: Vec::new(),
|
||||
http_requests: Vec::new(),
|
||||
grpc_requests: Vec::new(),
|
||||
},
|
||||
};
|
||||
|
||||
for workspace_id in workspace_ids {
|
||||
data.resources.workspaces.push(
|
||||
get_workspace(window, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get workspace"),
|
||||
);
|
||||
data.resources.environments.append(
|
||||
&mut list_environments(window, workspace_id)
|
||||
workspaces: vec![workspace],
|
||||
environments: list_environments(app_handle, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get environments"),
|
||||
);
|
||||
data.resources.folders.append(
|
||||
&mut list_folders(window, workspace_id)
|
||||
folders: list_folders(app_handle, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get folders"),
|
||||
);
|
||||
data.resources.http_requests.append(
|
||||
&mut list_http_requests(window, workspace_id)
|
||||
http_requests: list_requests(app_handle, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get http requests"),
|
||||
);
|
||||
data.resources.grpc_requests.append(
|
||||
&mut list_grpc_requests(window, workspace_id)
|
||||
.expect("Failed to get requests"),
|
||||
grpc_requests: list_grpc_requests(app_handle, workspace_id)
|
||||
.await
|
||||
.expect("Failed to get grpc requests"),
|
||||
);
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
struct ModelPayload<M: Serialize + Clone> {
|
||||
pub model: M,
|
||||
pub window_label: String,
|
||||
}
|
||||
|
||||
fn emit_upserted_model<M: Serialize + Clone>(window: &WebviewWindow, model: M) -> M {
|
||||
let payload = ModelPayload {
|
||||
model: model.clone(),
|
||||
window_label: window.label().to_string(),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
window.emit("upserted_model", payload).unwrap();
|
||||
fn emit_upserted_model<S: Serialize + Clone>(mgr: &impl Manager<Wry>, model: S) -> S {
|
||||
mgr.emit_all("upserted_model", model.clone()).unwrap();
|
||||
model
|
||||
}
|
||||
|
||||
fn emit_deleted_model<M: Serialize + Clone, E>(window: &WebviewWindow, model: M) -> Result<M, E> {
|
||||
let payload = ModelPayload {
|
||||
model: model.clone(),
|
||||
window_label: window.label().to_string(),
|
||||
};
|
||||
window.emit("deleted_model", payload).unwrap();
|
||||
fn emit_deleted_model<S: Serialize + Clone, E>(mgr: &impl Manager<Wry>, model: S) -> Result<S, E> {
|
||||
mgr.emit_all("deleted_model", model.clone()).unwrap();
|
||||
Ok(model)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
use std::time::SystemTime;
|
||||
|
||||
use chrono::{Duration, NaiveDateTime, Utc};
|
||||
use http::Method;
|
||||
use log::debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::{AppHandle, Manager};
|
||||
|
||||
use crate::analytics::get_num_launches;
|
||||
use crate::models::{get_key_value_raw, set_key_value_raw};
|
||||
|
||||
// Check for updates every hour
|
||||
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
||||
|
||||
const KV_NAMESPACE: &str = "notifications";
|
||||
const KV_KEY: &str = "seen";
|
||||
|
||||
// Create updater struct
|
||||
pub struct YaakNotifier {
|
||||
last_check: SystemTime,
|
||||
}
|
||||
|
||||
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct YaakNotification {
|
||||
timestamp: NaiveDateTime,
|
||||
id: String,
|
||||
message: String,
|
||||
action: Option<YaakNotificationAction>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
pub struct YaakNotificationAction {
|
||||
label: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
impl YaakNotifier {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
last_check: SystemTime::UNIX_EPOCH,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn seen(&mut self, app: &AppHandle, id: &str) -> Result<(), String> {
|
||||
let mut seen = get_kv(app).await?;
|
||||
seen.push(id.to_string());
|
||||
debug!("Marked notification as seen {}", id);
|
||||
let seen_json = serde_json::to_string(&seen).map_err(|e| e.to_string())?;
|
||||
set_key_value_raw(app, KV_NAMESPACE, KV_KEY, seen_json.as_str()).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn check(&mut self, app: &AppHandle) -> Result<(), String> {
|
||||
let ignore_check = self.last_check.elapsed().unwrap().as_secs() < MAX_UPDATE_CHECK_SECONDS;
|
||||
|
||||
if ignore_check {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.last_check = SystemTime::now();
|
||||
|
||||
let num_launches = get_num_launches(app).await;
|
||||
let info = app.package_info().clone();
|
||||
let req = reqwest::Client::default()
|
||||
.request(Method::GET, "https://notify.yaak.app/notifications")
|
||||
.query(&[
|
||||
("version", info.version.to_string()),
|
||||
("launches", num_launches.to_string()),
|
||||
]);
|
||||
let resp = req.send().await.map_err(|e| e.to_string())?;
|
||||
let notification = resp
|
||||
.json::<YaakNotification>()
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let age = notification
|
||||
.timestamp
|
||||
.signed_duration_since(Utc::now().naive_utc());
|
||||
let seen = get_kv(app).await?;
|
||||
if seen.contains(¬ification.id) || (age > Duration::days(1)) {
|
||||
debug!("Already seen notification {}", notification.id);
|
||||
return Ok(());
|
||||
}
|
||||
debug!("Got notification {:?}", notification);
|
||||
|
||||
let _ = app.emit("notification", notification.clone());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_kv(app: &AppHandle) -> Result<Vec<String>, String> {
|
||||
match get_key_value_raw(app, "notifications", "seen").await {
|
||||
None => Ok(Vec::new()),
|
||||
Some(v) => serde_json::from_str(&v.value).map_err(|e| e.to_string()),
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,18 @@
|
||||
use std::rc::Rc;
|
||||
use std::fs;
|
||||
|
||||
use boa_engine::builtins::promise::PromiseState;
|
||||
use boa_engine::{
|
||||
js_string, module::SimpleModuleLoader, property::Attribute, Context, JsNativeError, JsValue,
|
||||
Module, Source,
|
||||
Context, js_string, JsNativeError, JsValue, Module, module::SimpleModuleLoader,
|
||||
property::Attribute, Source,
|
||||
};
|
||||
use boa_engine::builtins::promise::PromiseState;
|
||||
use boa_engine::module::ModuleLoader;
|
||||
use boa_runtime::Console;
|
||||
use log::{debug, error};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use tauri::path::BaseDirectory;
|
||||
use tauri::{AppHandle, Manager};
|
||||
use tauri::AppHandle;
|
||||
|
||||
use crate::models::{HttpRequest, WorkspaceExportResources};
|
||||
use crate::models::{WorkspaceExportResources};
|
||||
|
||||
#[derive(Default, Debug, Deserialize, Serialize)]
|
||||
pub struct FilterResult {
|
||||
@@ -47,28 +47,14 @@ pub async fn run_plugin_filter(
|
||||
Some(resources)
|
||||
}
|
||||
|
||||
pub fn run_plugin_export_curl(
|
||||
app_handle: &AppHandle,
|
||||
request: &HttpRequest,
|
||||
) -> Result<String, String> {
|
||||
let mut context = Context::default();
|
||||
let request_json = serde_json::to_value(request).map_err(|e| e.to_string())?;
|
||||
let result_json = run_plugin(
|
||||
app_handle,
|
||||
"exporter-curl",
|
||||
"pluginHookExport",
|
||||
&[JsValue::from_json(&request_json, &mut context).map_err(|e| e.to_string())?],
|
||||
);
|
||||
|
||||
let resources: String = serde_json::from_value(result_json).map_err(|e| e.to_string())?;
|
||||
Ok(resources)
|
||||
}
|
||||
|
||||
pub async fn run_plugin_import(
|
||||
app_handle: &AppHandle,
|
||||
plugin_name: &str,
|
||||
file_contents: &str,
|
||||
) -> Result<Option<ImportResult>, String> {
|
||||
file_path: &str,
|
||||
) -> Option<ImportResult> {
|
||||
let file = fs::read_to_string(file_path)
|
||||
.unwrap_or_else(|_| panic!("Unable to read file {}", file_path));
|
||||
let file_contents = file.as_str();
|
||||
let result_json = run_plugin(
|
||||
app_handle,
|
||||
plugin_name,
|
||||
@@ -77,11 +63,12 @@ pub async fn run_plugin_import(
|
||||
);
|
||||
|
||||
if result_json.is_null() {
|
||||
return Ok(None);
|
||||
return None;
|
||||
}
|
||||
|
||||
let resources: ImportResult = serde_json::from_value(result_json).map_err(|e| e.to_string())?;
|
||||
Ok(Some(resources))
|
||||
let resources: ImportResult =
|
||||
serde_json::from_value(result_json).expect("failed to parse result json");
|
||||
Some(resources)
|
||||
}
|
||||
|
||||
fn run_plugin(
|
||||
@@ -91,8 +78,8 @@ fn run_plugin(
|
||||
js_args: &[JsValue],
|
||||
) -> serde_json::Value {
|
||||
let plugin_dir = app_handle
|
||||
.path()
|
||||
.resolve("plugins", BaseDirectory::Resource)
|
||||
.path_resolver()
|
||||
.resolve_resource("plugins")
|
||||
.expect("failed to resolve plugin directory resource")
|
||||
.join(plugin_name);
|
||||
let plugin_index_file = plugin_dir.join("index.mjs");
|
||||
@@ -102,9 +89,12 @@ fn run_plugin(
|
||||
plugin_dir, plugin_index_file
|
||||
);
|
||||
|
||||
let loader = Rc::new(SimpleModuleLoader::new(plugin_dir).unwrap());
|
||||
// Module loader for the specific plugin
|
||||
let loader = &SimpleModuleLoader::new(plugin_dir).expect("failed to create module loader");
|
||||
let dyn_loader: &dyn ModuleLoader = loader;
|
||||
|
||||
let context = &mut Context::builder()
|
||||
.module_loader(loader.clone())
|
||||
.module_loader(dyn_loader)
|
||||
.build()
|
||||
.expect("failed to create context");
|
||||
|
||||
@@ -118,13 +108,15 @@ fn run_plugin(
|
||||
// Insert parsed entrypoint into the module loader
|
||||
loader.insert(plugin_index_file, module.clone());
|
||||
|
||||
let promise_result = module.load_link_evaluate(context);
|
||||
let promise_result = module
|
||||
.load_link_evaluate(context)
|
||||
.expect("failed to evaluate module");
|
||||
|
||||
// Very important to push forward the job queue after queueing promises.
|
||||
context.run_jobs();
|
||||
|
||||
// Checking if the final promise didn't return an error.
|
||||
match promise_result.state() {
|
||||
match promise_result.state().expect("failed to get promise state") {
|
||||
PromiseState::Pending => {
|
||||
panic!("Promise was pending");
|
||||
}
|
||||
|
||||
@@ -1,67 +1,6 @@
|
||||
use crate::models::{Environment, Workspace};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use regex::Regex;
|
||||
use sqlx::types::{Json, JsonValue};
|
||||
|
||||
use crate::models::{Environment, HttpRequest, HttpRequestHeader, HttpUrlParameter, Workspace};
|
||||
|
||||
pub fn render_request(r: &HttpRequest, w: &Workspace, e: Option<&Environment>) -> HttpRequest {
|
||||
let r = r.clone();
|
||||
HttpRequest {
|
||||
url: render(r.url.as_str(), w, e),
|
||||
url_parameters: Json(
|
||||
r.url_parameters
|
||||
.0
|
||||
.iter()
|
||||
.map(|p| HttpUrlParameter {
|
||||
enabled: p.enabled,
|
||||
name: render(p.name.as_str(), w, e),
|
||||
value: render(p.value.as_str(), w, e),
|
||||
})
|
||||
.collect::<Vec<HttpUrlParameter>>(),
|
||||
),
|
||||
headers: Json(
|
||||
r.headers
|
||||
.0
|
||||
.iter()
|
||||
.map(|p| HttpRequestHeader {
|
||||
enabled: p.enabled,
|
||||
name: render(p.name.as_str(), w, e),
|
||||
value: render(p.value.as_str(), w, e),
|
||||
})
|
||||
.collect::<Vec<HttpRequestHeader>>(),
|
||||
),
|
||||
body: Json(
|
||||
r.body
|
||||
.0
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
let v = if v.is_string() {
|
||||
render(v.as_str().unwrap(), w, e)
|
||||
} else {
|
||||
v.to_string()
|
||||
};
|
||||
(render(k, w, e), JsonValue::from(v))
|
||||
})
|
||||
.collect::<HashMap<String, JsonValue>>(),
|
||||
),
|
||||
authentication: Json(
|
||||
r.authentication
|
||||
.0
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
let v = if v.is_string() {
|
||||
render(v.as_str().unwrap(), w, e)
|
||||
} else {
|
||||
v.to_string()
|
||||
};
|
||||
(render(k, w, e), JsonValue::from(v))
|
||||
})
|
||||
.collect::<HashMap<String, JsonValue>>(),
|
||||
),
|
||||
..r
|
||||
}
|
||||
}
|
||||
use tauri::regex::Regex;
|
||||
|
||||
pub fn render(template: &str, workspace: &Workspace, environment: Option<&Environment>) -> String {
|
||||
let mut map = HashMap::new();
|
||||
@@ -85,7 +24,7 @@ pub fn render(template: &str, workspace: &Workspace, environment: Option<&Enviro
|
||||
|
||||
Regex::new(r"\$\{\[\s*([^]\s]+)\s*]}")
|
||||
.expect("Failed to create regex")
|
||||
.replace_all(template, |caps: ®ex::Captures| {
|
||||
.replace_all(template, |caps: &tauri::regex::Captures| {
|
||||
let key = caps.get(1).unwrap().as_str();
|
||||
map.get(key).unwrap_or(&"")
|
||||
})
|
||||
|
||||
@@ -1,450 +0,0 @@
|
||||
use hex_color::HexColor;
|
||||
use objc::{msg_send, sel, sel_impl};
|
||||
use rand::{distributions::Alphanumeric, Rng};
|
||||
use tauri::{
|
||||
plugin::{Builder, TauriPlugin},
|
||||
Manager, Runtime, Window, WindowEvent,
|
||||
};
|
||||
|
||||
const WINDOW_CONTROL_PAD_X: f64 = 13.0;
|
||||
const WINDOW_CONTROL_PAD_Y: f64 = 18.0;
|
||||
|
||||
struct UnsafeWindowHandle(*mut std::ffi::c_void);
|
||||
|
||||
unsafe impl Send for UnsafeWindowHandle {}
|
||||
|
||||
unsafe impl Sync for UnsafeWindowHandle {}
|
||||
|
||||
pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
Builder::new("mac_window")
|
||||
.on_window_ready(|window| {
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
setup_traffic_light_positioner(&window);
|
||||
let h = window.app_handle();
|
||||
|
||||
let window_for_theme = window.clone();
|
||||
let id1 = h.listen("yaak_bg_changed", move |ev| {
|
||||
let payload = serde_json::from_str::<&str>(ev.payload()).unwrap().trim();
|
||||
let color = HexColor::parse_rgb(payload).unwrap();
|
||||
update_window_theme(window_for_theme.clone(), color);
|
||||
});
|
||||
|
||||
let window_for_title = window.clone();
|
||||
let id2 = h.listen("yaak_title_changed", move |ev| {
|
||||
let payload = serde_json::from_str::<&str>(ev.payload()).unwrap().trim();
|
||||
update_window_title(window_for_title.clone(), payload.to_string());
|
||||
});
|
||||
|
||||
let h = h.clone();
|
||||
window.on_window_event(move |e| {
|
||||
match e {
|
||||
WindowEvent::Destroyed => {
|
||||
h.unlisten(id1);
|
||||
h.unlisten(id2);
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
});
|
||||
}
|
||||
return;
|
||||
})
|
||||
.build()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn update_window_title<R: Runtime>(window: Window<R>, title: String) {
|
||||
use cocoa::{appkit::NSWindow, base::nil, foundation::NSString};
|
||||
|
||||
unsafe {
|
||||
let window_handle = UnsafeWindowHandle(window.ns_window().unwrap());
|
||||
|
||||
let window2 = window.clone();
|
||||
let label = window.label().to_string();
|
||||
let _ = window.run_on_main_thread(move || {
|
||||
let win_title = NSString::alloc(nil).init_str(&title);
|
||||
let handle = window_handle;
|
||||
NSWindow::setTitle_(handle.0 as cocoa::base::id, win_title);
|
||||
position_traffic_lights(
|
||||
UnsafeWindowHandle(window2.ns_window().expect("Failed to create window handle")),
|
||||
WINDOW_CONTROL_PAD_X,
|
||||
WINDOW_CONTROL_PAD_Y,
|
||||
label,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn update_window_theme<R: Runtime>(window: Window<R>, color: HexColor) {
|
||||
use cocoa::appkit::{
|
||||
NSAppearance, NSAppearanceNameVibrantDark, NSAppearanceNameVibrantLight, NSWindow,
|
||||
};
|
||||
|
||||
let brightness = (color.r as u64 + color.g as u64 + color.b as u64) / 3;
|
||||
let label = window.label().to_string();
|
||||
|
||||
unsafe {
|
||||
let window_handle = UnsafeWindowHandle(window.ns_window().unwrap());
|
||||
let window2 = window.clone();
|
||||
let _ = window.run_on_main_thread(move || {
|
||||
let handle = window_handle;
|
||||
|
||||
let selected_appearance = if brightness >= 128 {
|
||||
NSAppearance(NSAppearanceNameVibrantLight)
|
||||
} else {
|
||||
NSAppearance(NSAppearanceNameVibrantDark)
|
||||
};
|
||||
|
||||
NSWindow::setAppearance(handle.0 as cocoa::base::id, selected_appearance);
|
||||
position_traffic_lights(
|
||||
UnsafeWindowHandle(window2.ns_window().expect("Failed to create window handle")),
|
||||
WINDOW_CONTROL_PAD_X,
|
||||
WINDOW_CONTROL_PAD_Y,
|
||||
label,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn position_traffic_lights(ns_window_handle: UnsafeWindowHandle, x: f64, y: f64, label: String) {
|
||||
if label.starts_with("nested_") {
|
||||
return;
|
||||
}
|
||||
|
||||
use cocoa::appkit::{NSView, NSWindow, NSWindowButton};
|
||||
use cocoa::foundation::NSRect;
|
||||
|
||||
let ns_window = ns_window_handle.0 as cocoa::base::id;
|
||||
unsafe {
|
||||
let close = ns_window.standardWindowButton_(NSWindowButton::NSWindowCloseButton);
|
||||
let miniaturize =
|
||||
ns_window.standardWindowButton_(NSWindowButton::NSWindowMiniaturizeButton);
|
||||
let zoom = ns_window.standardWindowButton_(NSWindowButton::NSWindowZoomButton);
|
||||
|
||||
let title_bar_container_view = close.superview().superview();
|
||||
|
||||
let close_rect: NSRect = msg_send![close, frame];
|
||||
let button_height = close_rect.size.height;
|
||||
|
||||
let title_bar_frame_height = button_height + y;
|
||||
let mut title_bar_rect = NSView::frame(title_bar_container_view);
|
||||
title_bar_rect.size.height = title_bar_frame_height;
|
||||
title_bar_rect.origin.y = NSView::frame(ns_window).size.height - title_bar_frame_height;
|
||||
let _: () = msg_send![title_bar_container_view, setFrame: title_bar_rect];
|
||||
|
||||
let window_buttons = vec![close, miniaturize, zoom];
|
||||
let space_between = NSView::frame(miniaturize).origin.x - NSView::frame(close).origin.x;
|
||||
|
||||
for (i, button) in window_buttons.into_iter().enumerate() {
|
||||
let mut rect: NSRect = NSView::frame(button);
|
||||
rect.origin.x = x + (i as f64 * space_between);
|
||||
button.setFrameOrigin(rect.origin);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
#[derive(Debug)]
|
||||
struct WindowState<R: Runtime> {
|
||||
window: Window<R>,
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub fn setup_traffic_light_positioner<R: Runtime>(window: &Window<R>) {
|
||||
use cocoa::appkit::NSWindow;
|
||||
use cocoa::base::{id, BOOL};
|
||||
use cocoa::delegate;
|
||||
use cocoa::foundation::NSUInteger;
|
||||
use objc::runtime::{Object, Sel};
|
||||
use std::ffi::c_void;
|
||||
|
||||
position_traffic_lights(
|
||||
UnsafeWindowHandle(window.ns_window().expect("Failed to create window handle")),
|
||||
WINDOW_CONTROL_PAD_X,
|
||||
WINDOW_CONTROL_PAD_Y,
|
||||
window.label().to_string(),
|
||||
);
|
||||
|
||||
// Ensure they stay in place while resizing the window.
|
||||
fn with_window_state<R: Runtime, F: FnOnce(&mut WindowState<R>) -> T, T>(
|
||||
this: &Object,
|
||||
func: F,
|
||||
) {
|
||||
let ptr = unsafe {
|
||||
let x: *mut c_void = *this.get_ivar("app_box");
|
||||
&mut *(x as *mut WindowState<R>)
|
||||
};
|
||||
func(ptr);
|
||||
}
|
||||
|
||||
unsafe {
|
||||
let ns_win = window
|
||||
.ns_window()
|
||||
.expect("NS Window should exist to mount traffic light delegate.")
|
||||
as id;
|
||||
|
||||
let current_delegate: id = ns_win.delegate();
|
||||
|
||||
extern "C" fn on_window_should_close(this: &Object, _cmd: Sel, sender: id) -> BOOL {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
msg_send![super_del, windowShouldClose: sender]
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_will_close(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowWillClose: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_resize<R: Runtime>(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
with_window_state(&*this, |state: &mut WindowState<R>| {
|
||||
let id = state
|
||||
.window
|
||||
.ns_window()
|
||||
.expect("NS window should exist on state to handle resize")
|
||||
as id;
|
||||
|
||||
position_traffic_lights(
|
||||
UnsafeWindowHandle(id as *mut c_void),
|
||||
WINDOW_CONTROL_PAD_X,
|
||||
WINDOW_CONTROL_PAD_Y,
|
||||
state.window.label().to_string(),
|
||||
);
|
||||
});
|
||||
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidResize: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_move(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidMove: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_change_backing_properties(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidChangeBackingProperties: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_become_key(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidBecomeKey: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_resign_key(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidResignKey: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_dragging_entered(this: &Object, _cmd: Sel, notification: id) -> BOOL {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
msg_send![super_del, draggingEntered: notification]
|
||||
}
|
||||
}
|
||||
extern "C" fn on_prepare_for_drag_operation(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) -> BOOL {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
msg_send![super_del, prepareForDragOperation: notification]
|
||||
}
|
||||
}
|
||||
extern "C" fn on_perform_drag_operation(this: &Object, _cmd: Sel, sender: id) -> BOOL {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
msg_send![super_del, performDragOperation: sender]
|
||||
}
|
||||
}
|
||||
extern "C" fn on_conclude_drag_operation(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, concludeDragOperation: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_dragging_exited(this: &Object, _cmd: Sel, notification: id) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, draggingExited: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_will_use_full_screen_presentation_options(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
window: id,
|
||||
proposed_options: NSUInteger,
|
||||
) -> NSUInteger {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
msg_send![super_del, window: window willUseFullScreenPresentationOptions: proposed_options]
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_enter_full_screen<R: Runtime>(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
with_window_state(&*this, |state: &mut WindowState<R>| {
|
||||
state
|
||||
.window
|
||||
.emit("did-enter-fullscreen", ())
|
||||
.expect("Failed to emit event");
|
||||
});
|
||||
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidEnterFullScreen: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_will_enter_full_screen<R: Runtime>(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
with_window_state(&*this, |state: &mut WindowState<R>| {
|
||||
state
|
||||
.window
|
||||
.emit("will-enter-fullscreen", ())
|
||||
.expect("Failed to emit event");
|
||||
});
|
||||
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowWillEnterFullScreen: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_exit_full_screen<R: Runtime>(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
with_window_state(&*this, |state: &mut WindowState<R>| {
|
||||
state
|
||||
.window
|
||||
.emit("did-exit-fullscreen", ())
|
||||
.expect("Failed to emit event");
|
||||
|
||||
let id = state.window.ns_window().expect("Failed to emit event") as id;
|
||||
position_traffic_lights(
|
||||
UnsafeWindowHandle(id as *mut c_void),
|
||||
WINDOW_CONTROL_PAD_X,
|
||||
WINDOW_CONTROL_PAD_Y,
|
||||
state.window.label().to_string(),
|
||||
);
|
||||
});
|
||||
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidExitFullScreen: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_will_exit_full_screen<R: Runtime>(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
with_window_state(&*this, |state: &mut WindowState<R>| {
|
||||
state
|
||||
.window
|
||||
.emit("will-exit-fullscreen", ())
|
||||
.expect("Failed to emit event");
|
||||
});
|
||||
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowWillExitFullScreen: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_window_did_fail_to_enter_full_screen(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
window: id,
|
||||
) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, windowDidFailToEnterFullScreen: window];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_effective_appearance_did_change(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![super_del, effectiveAppearanceDidChange: notification];
|
||||
}
|
||||
}
|
||||
extern "C" fn on_effective_appearance_did_changed_on_main_thread(
|
||||
this: &Object,
|
||||
_cmd: Sel,
|
||||
notification: id,
|
||||
) {
|
||||
unsafe {
|
||||
let super_del: id = *this.get_ivar("super_delegate");
|
||||
let _: () = msg_send![
|
||||
super_del,
|
||||
effectiveAppearanceDidChangedOnMainThread: notification
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
// Are we de-allocing this properly ? (I miss safe Rust :( )
|
||||
let window_label = window.label().to_string();
|
||||
|
||||
let app_state = WindowState {
|
||||
window: window.clone(),
|
||||
};
|
||||
let app_box = Box::into_raw(Box::new(app_state)) as *mut c_void;
|
||||
let random_str: String = rand::thread_rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(20)
|
||||
.map(char::from)
|
||||
.collect();
|
||||
|
||||
// We need to ensure we have a unique delegate name, otherwise we will panic while trying to create a duplicate
|
||||
// delegate with the same name.
|
||||
let delegate_name = format!("windowDelegate_{}_{}", window_label, random_str);
|
||||
|
||||
ns_win.setDelegate_(delegate!(&delegate_name, {
|
||||
window: id = ns_win,
|
||||
app_box: *mut c_void = app_box,
|
||||
toolbar: id = cocoa::base::nil,
|
||||
super_delegate: id = current_delegate,
|
||||
(windowShouldClose:) => on_window_should_close as extern fn(&Object, Sel, id) -> BOOL,
|
||||
(windowWillClose:) => on_window_will_close as extern fn(&Object, Sel, id),
|
||||
(windowDidResize:) => on_window_did_resize::<R> as extern fn(&Object, Sel, id),
|
||||
(windowDidMove:) => on_window_did_move as extern fn(&Object, Sel, id),
|
||||
(windowDidChangeBackingProperties:) => on_window_did_change_backing_properties as extern fn(&Object, Sel, id),
|
||||
(windowDidBecomeKey:) => on_window_did_become_key as extern fn(&Object, Sel, id),
|
||||
(windowDidResignKey:) => on_window_did_resign_key as extern fn(&Object, Sel, id),
|
||||
(draggingEntered:) => on_dragging_entered as extern fn(&Object, Sel, id) -> BOOL,
|
||||
(prepareForDragOperation:) => on_prepare_for_drag_operation as extern fn(&Object, Sel, id) -> BOOL,
|
||||
(performDragOperation:) => on_perform_drag_operation as extern fn(&Object, Sel, id) -> BOOL,
|
||||
(concludeDragOperation:) => on_conclude_drag_operation as extern fn(&Object, Sel, id),
|
||||
(draggingExited:) => on_dragging_exited as extern fn(&Object, Sel, id),
|
||||
(window:willUseFullScreenPresentationOptions:) => on_window_will_use_full_screen_presentation_options as extern fn(&Object, Sel, id, NSUInteger) -> NSUInteger,
|
||||
(windowDidEnterFullScreen:) => on_window_did_enter_full_screen::<R> as extern fn(&Object, Sel, id),
|
||||
(windowWillEnterFullScreen:) => on_window_will_enter_full_screen::<R> as extern fn(&Object, Sel, id),
|
||||
(windowDidExitFullScreen:) => on_window_did_exit_full_screen::<R> as extern fn(&Object, Sel, id),
|
||||
(windowWillExitFullScreen:) => on_window_will_exit_full_screen::<R> as extern fn(&Object, Sel, id),
|
||||
(windowDidFailToEnterFullScreen:) => on_window_did_fail_to_enter_full_screen as extern fn(&Object, Sel, id),
|
||||
(effectiveAppearanceDidChange:) => on_effective_appearance_did_change as extern fn(&Object, Sel, id),
|
||||
(effectiveAppearanceDidChangedOnMainThread:) => on_effective_appearance_did_changed_on_main_thread as extern fn(&Object, Sel, id)
|
||||
}))
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,8 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::time::SystemTime;
|
||||
|
||||
use log::info;
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_dialog::DialogExt;
|
||||
use tauri_plugin_updater::UpdaterExt;
|
||||
use tauri::api::dialog;
|
||||
use tauri::{updater, AppHandle, Window};
|
||||
|
||||
use crate::is_dev;
|
||||
|
||||
@@ -19,28 +17,6 @@ pub struct YaakUpdater {
|
||||
pub enum UpdateMode {
|
||||
Stable,
|
||||
Beta,
|
||||
Alpha,
|
||||
}
|
||||
|
||||
impl Display for UpdateMode {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let s = match self {
|
||||
UpdateMode::Stable => "stable",
|
||||
UpdateMode::Beta => "beta",
|
||||
UpdateMode::Alpha => "alpha",
|
||||
};
|
||||
write!(f, "{}", s)
|
||||
}
|
||||
}
|
||||
|
||||
impl UpdateMode {
|
||||
pub fn new(mode: &str) -> UpdateMode {
|
||||
match mode {
|
||||
"beta" => UpdateMode::Beta,
|
||||
"alpha" => UpdateMode::Alpha,
|
||||
_ => UpdateMode::Stable,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl YaakUpdater {
|
||||
@@ -53,57 +29,64 @@ impl YaakUpdater {
|
||||
&mut self,
|
||||
app_handle: &AppHandle,
|
||||
mode: UpdateMode,
|
||||
) -> Result<bool, tauri_plugin_updater::Error> {
|
||||
) -> Result<bool, updater::Error> {
|
||||
self.last_update_check = SystemTime::now();
|
||||
|
||||
info!("Checking for updates mode={}", mode);
|
||||
let update_mode = get_update_mode_str(mode);
|
||||
let enabled = !is_dev();
|
||||
info!(
|
||||
"Checking for updates mode={} enabled={}",
|
||||
update_mode, enabled
|
||||
);
|
||||
|
||||
let update_check_result = app_handle
|
||||
.updater_builder()
|
||||
.header("X-Update-Mode", mode.to_string())?
|
||||
.build()?
|
||||
if !enabled {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
match app_handle
|
||||
.updater()
|
||||
.header("X-Update-Mode", update_mode)?
|
||||
.check()
|
||||
.await;
|
||||
|
||||
match update_check_result {
|
||||
Ok(Some(update)) => {
|
||||
.await
|
||||
{
|
||||
Ok(update) => {
|
||||
let h = app_handle.clone();
|
||||
app_handle
|
||||
.dialog()
|
||||
.message(format!(
|
||||
dialog::ask(
|
||||
None::<&Window>,
|
||||
"Update Available",
|
||||
format!(
|
||||
"{} is available. Would you like to download and install it now?",
|
||||
update.version
|
||||
))
|
||||
.ok_button_label("Download")
|
||||
.cancel_button_label("Later")
|
||||
.title("Update Available")
|
||||
.show(|confirmed| {
|
||||
update.latest_version()
|
||||
),
|
||||
|confirmed| {
|
||||
if !confirmed {
|
||||
return;
|
||||
}
|
||||
tauri::async_runtime::spawn(async move {
|
||||
match update.download_and_install(|_, _| {}, || {}).await {
|
||||
match update.download_and_install().await {
|
||||
Ok(_) => {
|
||||
if h.dialog()
|
||||
.message("Would you like to restart the app?")
|
||||
.title("Update Installed")
|
||||
.ok_button_label("Restart")
|
||||
.cancel_button_label("Later")
|
||||
.blocking_show()
|
||||
{
|
||||
if dialog::blocking::ask(
|
||||
None::<&Window>,
|
||||
"Update Installed",
|
||||
"Would you like to restart the app?",
|
||||
) {
|
||||
h.restart();
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
h.dialog()
|
||||
.message(format!("The update failed to install: {}", e));
|
||||
dialog::message(
|
||||
None::<&Window>,
|
||||
"Update Failed",
|
||||
format!("The update failed to install: {}", e),
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
);
|
||||
Ok(true)
|
||||
}
|
||||
Ok(None) => Ok(false),
|
||||
Err(updater::Error::UpToDate) => Ok(false),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
@@ -111,18 +94,27 @@ impl YaakUpdater {
|
||||
&mut self,
|
||||
app_handle: &AppHandle,
|
||||
mode: UpdateMode,
|
||||
) -> Result<bool, tauri_plugin_updater::Error> {
|
||||
) -> Result<bool, updater::Error> {
|
||||
let ignore_check =
|
||||
self.last_update_check.elapsed().unwrap().as_secs() < MAX_UPDATE_CHECK_SECONDS;
|
||||
if ignore_check {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// Don't check if dev
|
||||
if is_dev() {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
self.force_check(app_handle, mode).await
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_mode_from_str(mode: &str) -> UpdateMode {
|
||||
match mode {
|
||||
"beta" => UpdateMode::Beta,
|
||||
_ => UpdateMode::Stable,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_update_mode_str(mode: UpdateMode) -> &'static str {
|
||||
match mode {
|
||||
UpdateMode::Stable => "stable",
|
||||
UpdateMode::Beta => "beta",
|
||||
}
|
||||
}
|
||||
|
||||
53
src-tauri/src/window_ext.rs
Normal file
53
src-tauri/src/window_ext.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use tauri::{Runtime, Window};
|
||||
|
||||
const TRAFFIC_LIGHT_OFFSET_X: f64 = 13.0;
|
||||
const TRAFFIC_LIGHT_OFFSET_Y: f64 = 18.0;
|
||||
|
||||
pub trait TrafficLightWindowExt {
|
||||
fn position_traffic_lights(&self);
|
||||
}
|
||||
|
||||
impl<R: Runtime> TrafficLightWindowExt for Window<R> {
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
fn position_traffic_lights(&self) {
|
||||
// No-op on other platforms
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn position_traffic_lights(&self) {
|
||||
use cocoa::appkit::{NSView, NSWindow, NSWindowButton};
|
||||
use cocoa::foundation::NSRect;
|
||||
|
||||
let window = self.ns_window().unwrap() as cocoa::base::id;
|
||||
|
||||
let x = TRAFFIC_LIGHT_OFFSET_X;
|
||||
let y = TRAFFIC_LIGHT_OFFSET_Y;
|
||||
|
||||
unsafe {
|
||||
let close = window.standardWindowButton_(NSWindowButton::NSWindowCloseButton);
|
||||
let miniaturize =
|
||||
window.standardWindowButton_(NSWindowButton::NSWindowMiniaturizeButton);
|
||||
let zoom = window.standardWindowButton_(NSWindowButton::NSWindowZoomButton);
|
||||
|
||||
let title_bar_container_view = close.superview().superview();
|
||||
|
||||
let close_rect: NSRect = msg_send![close, frame];
|
||||
let button_height = close_rect.size.height;
|
||||
|
||||
let title_bar_frame_height = button_height + y;
|
||||
let mut title_bar_rect = NSView::frame(title_bar_container_view);
|
||||
title_bar_rect.size.height = title_bar_frame_height;
|
||||
title_bar_rect.origin.y = NSView::frame(window).size.height - title_bar_frame_height;
|
||||
let _: () = msg_send![title_bar_container_view, setFrame: title_bar_rect];
|
||||
|
||||
let window_buttons = vec![close, miniaturize, zoom];
|
||||
let space_between = NSView::frame(miniaturize).origin.x - NSView::frame(close).origin.x;
|
||||
|
||||
for (i, button) in window_buttons.into_iter().enumerate() {
|
||||
let mut rect: NSRect = NSView::frame(button);
|
||||
rect.origin.x = x + (i as f64 * space_between);
|
||||
button.setFrameOrigin(rect.origin);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,145 +1,140 @@
|
||||
use tauri::menu::{
|
||||
AboutMetadata, Menu, MenuItemBuilder, PredefinedMenuItem, Submenu, HELP_SUBMENU_ID,
|
||||
WINDOW_SUBMENU_ID,
|
||||
};
|
||||
pub use tauri::AppHandle;
|
||||
use tauri::Wry;
|
||||
use tauri::{AboutMetadata, CustomMenuItem, Menu, MenuItem, Submenu};
|
||||
use crate::is_dev;
|
||||
|
||||
pub fn app_menu(app_handle: &AppHandle) -> tauri::Result<Menu<Wry>> {
|
||||
let pkg_info = app_handle.package_info();
|
||||
let config = app_handle.config();
|
||||
let about_metadata = AboutMetadata {
|
||||
name: Some(pkg_info.name.clone()),
|
||||
version: Some(pkg_info.version.to_string()),
|
||||
copyright: config.bundle.copyright.clone(),
|
||||
authors: config.bundle.publisher.clone().map(|p| vec![p]),
|
||||
..Default::default()
|
||||
};
|
||||
pub fn os_default(#[allow(unused)] app_name: &str) -> Menu {
|
||||
let mut menu = Menu::new();
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
menu = menu.add_submenu(Submenu::new(
|
||||
app_name,
|
||||
Menu::new()
|
||||
.add_native_item(MenuItem::About(
|
||||
app_name.to_string(),
|
||||
AboutMetadata::default(),
|
||||
))
|
||||
.add_native_item(MenuItem::Separator)
|
||||
.add_item(
|
||||
CustomMenuItem::new("toggle_settings".to_string(), "Settings")
|
||||
.accelerator("CmdOrCtrl+,"),
|
||||
)
|
||||
.add_native_item(MenuItem::Separator)
|
||||
.add_native_item(MenuItem::Services)
|
||||
.add_native_item(MenuItem::Separator)
|
||||
.add_native_item(MenuItem::Hide)
|
||||
.add_native_item(MenuItem::HideOthers)
|
||||
.add_native_item(MenuItem::ShowAll)
|
||||
.add_native_item(MenuItem::Separator)
|
||||
.add_native_item(MenuItem::Quit),
|
||||
));
|
||||
}
|
||||
|
||||
let window_menu = Submenu::with_id_and_items(
|
||||
app_handle,
|
||||
WINDOW_SUBMENU_ID,
|
||||
"Window",
|
||||
true,
|
||||
&[
|
||||
&PredefinedMenuItem::minimize(app_handle, None)?,
|
||||
&PredefinedMenuItem::maximize(app_handle, None)?,
|
||||
#[cfg(target_os = "macos")]
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&PredefinedMenuItem::close_window(app_handle, None)?,
|
||||
],
|
||||
)?;
|
||||
let mut file_menu = Menu::new();
|
||||
file_menu = file_menu.add_native_item(MenuItem::CloseWindow);
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
{
|
||||
file_menu = file_menu.add_native_item(MenuItem::Quit);
|
||||
}
|
||||
menu = menu.add_submenu(Submenu::new("File", file_menu));
|
||||
|
||||
let help_menu = Submenu::with_id_and_items(
|
||||
app_handle,
|
||||
HELP_SUBMENU_ID,
|
||||
"Help",
|
||||
true,
|
||||
&[
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
&PredefinedMenuItem::about(app_handle, None, Some(about_metadata))?,
|
||||
#[cfg(target_os = "macos")]
|
||||
&MenuItemBuilder::with_id("open_feedback".to_string(), "Give Feedback")
|
||||
.build(app_handle)?,
|
||||
],
|
||||
)?;
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
let mut edit_menu = Menu::new();
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Undo);
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Redo);
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Separator);
|
||||
}
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Cut);
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Copy);
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::Paste);
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
edit_menu = edit_menu.add_native_item(MenuItem::SelectAll);
|
||||
}
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
menu = menu.add_submenu(Submenu::new("Edit", edit_menu));
|
||||
}
|
||||
let mut view_menu = Menu::new();
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
view_menu = view_menu
|
||||
.add_native_item(MenuItem::EnterFullScreen)
|
||||
.add_native_item(MenuItem::Separator);
|
||||
}
|
||||
view_menu = view_menu
|
||||
.add_item(
|
||||
CustomMenuItem::new("zoom_reset".to_string(), "Zoom to Actual Size")
|
||||
.accelerator("CmdOrCtrl+0"),
|
||||
)
|
||||
.add_item(
|
||||
CustomMenuItem::new("zoom_in".to_string(), "Zoom In").accelerator("CmdOrCtrl+Plus"),
|
||||
)
|
||||
.add_item(
|
||||
CustomMenuItem::new("zoom_out".to_string(), "Zoom Out").accelerator("CmdOrCtrl+-"),
|
||||
);
|
||||
// .add_native_item(MenuItem::Separator)
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("toggle_sidebar".to_string(), "Toggle Sidebar")
|
||||
// .accelerator("CmdOrCtrl+b"),
|
||||
// )
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("focus_sidebar".to_string(), "Focus Sidebar")
|
||||
// .accelerator("CmdOrCtrl+1"),
|
||||
// )
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("toggle_settings".to_string(), "Toggle Settings")
|
||||
// .accelerator("CmdOrCtrl+,"),
|
||||
// )
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("focus_url".to_string(), "Focus URL").accelerator("CmdOrCtrl+l"),
|
||||
// );
|
||||
menu = menu.add_submenu(Submenu::new("View", view_menu));
|
||||
|
||||
let menu = Menu::with_items(
|
||||
app_handle,
|
||||
&[
|
||||
#[cfg(target_os = "macos")]
|
||||
&Submenu::with_items(
|
||||
app_handle,
|
||||
pkg_info.name.clone(),
|
||||
true,
|
||||
&[
|
||||
&PredefinedMenuItem::about(app_handle, None, Some(about_metadata))?,
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&MenuItemBuilder::with_id("settings".to_string(), "Settings")
|
||||
.accelerator("CmdOrCtrl+,")
|
||||
.build(app_handle)?,
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&PredefinedMenuItem::services(app_handle, None)?,
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&PredefinedMenuItem::hide(app_handle, None)?,
|
||||
&PredefinedMenuItem::hide_others(app_handle, None)?,
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&PredefinedMenuItem::quit(app_handle, None)?,
|
||||
],
|
||||
)?,
|
||||
#[cfg(not(any(
|
||||
target_os = "linux",
|
||||
target_os = "dragonfly",
|
||||
target_os = "freebsd",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd"
|
||||
)))]
|
||||
&Submenu::with_items(
|
||||
app_handle,
|
||||
"File",
|
||||
true,
|
||||
&[
|
||||
&PredefinedMenuItem::close_window(app_handle, None)?,
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
&PredefinedMenuItem::quit(app_handle, None)?,
|
||||
],
|
||||
)?,
|
||||
&Submenu::with_items(
|
||||
app_handle,
|
||||
"Edit",
|
||||
true,
|
||||
&[
|
||||
&PredefinedMenuItem::undo(app_handle, None)?,
|
||||
&PredefinedMenuItem::redo(app_handle, None)?,
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&PredefinedMenuItem::cut(app_handle, None)?,
|
||||
&PredefinedMenuItem::copy(app_handle, None)?,
|
||||
&PredefinedMenuItem::paste(app_handle, None)?,
|
||||
&PredefinedMenuItem::select_all(app_handle, None)?,
|
||||
],
|
||||
)?,
|
||||
&Submenu::with_items(
|
||||
app_handle,
|
||||
"View",
|
||||
true,
|
||||
&[
|
||||
#[cfg(target_os = "macos")]
|
||||
&PredefinedMenuItem::fullscreen(app_handle, None)?,
|
||||
#[cfg(target_os = "macos")]
|
||||
&PredefinedMenuItem::separator(app_handle)?,
|
||||
&MenuItemBuilder::with_id("zoom_reset".to_string(), "Zoom to Actual Size")
|
||||
.accelerator("CmdOrCtrl+0")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("zoom_in".to_string(), "Zoom In")
|
||||
.accelerator("CmdOrCtrl+=")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("zoom_out".to_string(), "Zoom Out")
|
||||
.accelerator("CmdOrCtrl+-")
|
||||
.build(app_handle)?,
|
||||
],
|
||||
)?,
|
||||
&window_menu,
|
||||
&help_menu,
|
||||
#[cfg(dev)]
|
||||
&Submenu::with_items(
|
||||
app_handle,
|
||||
"Develop",
|
||||
true,
|
||||
&[
|
||||
&MenuItemBuilder::with_id("dev.refresh".to_string(), "Refresh")
|
||||
.accelerator("CmdOrCtrl+Shift+r")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("dev.toggle_devtools".to_string(), "Open Devtools")
|
||||
.accelerator("CmdOrCtrl+Option+i")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("dev.generate_theme_css".to_string(), "Generate Theme CSS")
|
||||
.build(app_handle)?,
|
||||
],
|
||||
)?,
|
||||
],
|
||||
)?;
|
||||
let mut window_menu = Menu::new();
|
||||
window_menu = window_menu.add_native_item(MenuItem::Minimize);
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
window_menu = window_menu.add_native_item(MenuItem::Zoom);
|
||||
window_menu = window_menu.add_native_item(MenuItem::Separator);
|
||||
}
|
||||
window_menu = window_menu.add_native_item(MenuItem::CloseWindow);
|
||||
menu = menu.add_submenu(Submenu::new("Window", window_menu));
|
||||
|
||||
Ok(menu)
|
||||
// menu = menu.add_submenu(Submenu::new(
|
||||
// "Workspace",
|
||||
// Menu::new()
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("send_request".to_string(), "Send Request")
|
||||
// .accelerator("CmdOrCtrl+r"),
|
||||
// )
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("new_request".to_string(), "New Request")
|
||||
// .accelerator("CmdOrCtrl+n"),
|
||||
// )
|
||||
// .add_item(
|
||||
// CustomMenuItem::new("duplicate_request".to_string(), "Duplicate Request")
|
||||
// .accelerator("CmdOrCtrl+d"),
|
||||
// ),
|
||||
// ));
|
||||
|
||||
if is_dev() {
|
||||
menu = menu.add_submenu(Submenu::new(
|
||||
"Developer",
|
||||
Menu::new()
|
||||
.add_item(
|
||||
CustomMenuItem::new("refresh".to_string(), "Refresh")
|
||||
.accelerator("CmdOrCtrl + Shift + r"),
|
||||
)
|
||||
.add_item(
|
||||
CustomMenuItem::new("toggle_devtools".to_string(), "Open Devtools")
|
||||
.accelerator("CmdOrCtrl + Option + i"),
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
menu
|
||||
}
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
{
|
||||
"productName": "Daak",
|
||||
"identifier": "app.yaak.desktop.dev",
|
||||
"bundle": {
|
||||
"icon": [
|
||||
"icons/dev/32x32.png",
|
||||
"icons/dev/128x128.png",
|
||||
"icons/dev/128x128@2x.png",
|
||||
"icons/dev/icon.icns",
|
||||
"icons/dev/icon.ico"
|
||||
]
|
||||
"package": {
|
||||
"productName": "Daak"
|
||||
},
|
||||
"tauri": {
|
||||
"bundle": {
|
||||
"icon": [
|
||||
"icons/dev/32x32.png",
|
||||
"icons/dev/128x128.png",
|
||||
"icons/dev/128x128@2x.png",
|
||||
"icons/dev/icon.icns",
|
||||
"icons/dev/icon.ico"
|
||||
],
|
||||
"identifier": "app.yaak.desktop.dev"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,81 +1,113 @@
|
||||
{
|
||||
"productName": "yaak",
|
||||
"version": "2024.5.2",
|
||||
"identifier": "app.yaak.desktop",
|
||||
"build": {
|
||||
"beforeBuildCommand": "npm run build",
|
||||
"beforeDevCommand": "npm run dev",
|
||||
"devUrl": "http://localhost:1420",
|
||||
"frontendDist": "../dist"
|
||||
"devPath": "http://localhost:1420",
|
||||
"distDir": "../dist",
|
||||
"withGlobalTauri": false
|
||||
},
|
||||
"app": {
|
||||
"withGlobalTauri": false,
|
||||
"security": {
|
||||
"assetProtocol": {
|
||||
"enable": true,
|
||||
"scope": {
|
||||
"allow": [
|
||||
"$APPDATA/responses/*"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"package": {
|
||||
"productName": "Yaak",
|
||||
"version": "2024.3.1"
|
||||
},
|
||||
"plugins": {
|
||||
"deep-link": {
|
||||
"mobile": [],
|
||||
"desktop": {
|
||||
"schemes": [
|
||||
"yaak"
|
||||
"tauri": {
|
||||
"windows": [],
|
||||
"allowlist": {
|
||||
"all": false,
|
||||
"os": {
|
||||
"all": true
|
||||
},
|
||||
"protocol": {
|
||||
"assetScope": [
|
||||
"$APPDATA/responses/*"
|
||||
],
|
||||
"asset": true
|
||||
},
|
||||
"fs": {
|
||||
"readFile": true,
|
||||
"scope": [
|
||||
"$RESOURCE/*",
|
||||
"$APPDATA/responses/*"
|
||||
]
|
||||
},
|
||||
"shell": {
|
||||
"all": false,
|
||||
"open": true,
|
||||
"sidecar": true,
|
||||
"scope": [
|
||||
{ "name": "protoc", "sidecar": true,
|
||||
"args": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"window": {
|
||||
"close": true,
|
||||
"maximize": true,
|
||||
"minimize": true,
|
||||
"setDecorations": true,
|
||||
"setTitle": true,
|
||||
"startDragging": true,
|
||||
"unmaximize": true
|
||||
},
|
||||
"dialog": {
|
||||
"all": false,
|
||||
"open": true,
|
||||
"save": true
|
||||
},
|
||||
"path": {
|
||||
"all": true
|
||||
}
|
||||
},
|
||||
"bundle": {
|
||||
"active": true,
|
||||
"category": "DeveloperTool",
|
||||
"copyright": "",
|
||||
"externalBin": [
|
||||
"protoc-vendored/protoc"
|
||||
],
|
||||
"icon": [
|
||||
"icons/release/32x32.png",
|
||||
"icons/release/128x128.png",
|
||||
"icons/release/128x128@2x.png",
|
||||
"icons/release/icon.icns",
|
||||
"icons/release/icon.ico"
|
||||
],
|
||||
"identifier": "app.yaak.desktop",
|
||||
"longDescription": "The best cross-platform visual API client",
|
||||
"resources": [
|
||||
"migrations/*",
|
||||
"plugins/*"
|
||||
],
|
||||
"shortDescription": "The best API client",
|
||||
"targets": [
|
||||
"deb",
|
||||
"appimage",
|
||||
"nsis",
|
||||
"app",
|
||||
"dmg",
|
||||
"updater"
|
||||
],
|
||||
"deb": {
|
||||
"depends": []
|
||||
},
|
||||
"macOS": {
|
||||
"exceptionDomain": "",
|
||||
"entitlements": "macos/entitlements.plist",
|
||||
"frameworks": []
|
||||
},
|
||||
"windows": {
|
||||
"digestAlgorithm": "sha256",
|
||||
"timestampUrl": ""
|
||||
}
|
||||
},
|
||||
"security": {},
|
||||
"updater": {
|
||||
"active": true,
|
||||
"dialog": false,
|
||||
"endpoints": [
|
||||
"https://update.yaak.app/check/{{target}}/{{arch}}/{{current_version}}"
|
||||
],
|
||||
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEMxRDJFREQ1MjExQjdGN0IKUldSN2Z4c2gxZTNTd1FHNCtmYnFXMHVVQzhuNkJOM1cwOFBodmdLall3ckhKenpKUytHSTR1MlkK"
|
||||
}
|
||||
},
|
||||
"bundle": {
|
||||
"active": true,
|
||||
"category": "DeveloperTool",
|
||||
"externalBin": [
|
||||
"protoc-vendored/protoc"
|
||||
],
|
||||
"icon": [
|
||||
"icons/release/32x32.png",
|
||||
"icons/release/128x128.png",
|
||||
"icons/release/128x128@2x.png",
|
||||
"icons/release/icon.icns",
|
||||
"icons/release/icon.ico"
|
||||
],
|
||||
"longDescription": "A cross-platform desktop app for interacting with REST, GraphQL, and gRPC",
|
||||
"resources": [
|
||||
"migrations/*",
|
||||
"plugins/*",
|
||||
"protoc-vendored/include/*"
|
||||
],
|
||||
"shortDescription": "Desktop API client",
|
||||
"targets": [
|
||||
"deb",
|
||||
"appimage",
|
||||
"nsis",
|
||||
"app",
|
||||
"dmg",
|
||||
"updater"
|
||||
],
|
||||
"iOS": {
|
||||
"developmentTeam": "7PU3P6ELJ8"
|
||||
},
|
||||
"macOS": {
|
||||
"exceptionDomain": "",
|
||||
"entitlements": "macos/entitlements.plist",
|
||||
"frameworks": []
|
||||
},
|
||||
"windows": {
|
||||
"digestAlgorithm": "sha256",
|
||||
"timestampUrl": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import { HelmetProvider } from 'react-helmet-async';
|
||||
import { AppRouter } from './AppRouter';
|
||||
|
||||
const queryClient = new QueryClient({
|
||||
logger: undefined,
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { createBrowserRouter, Navigate, RouterProvider, useParams } from 'react-router-dom';
|
||||
import { createBrowserRouter, Navigate, Outlet, RouterProvider, useParams } from 'react-router-dom';
|
||||
import { routePaths, useAppRoutes } from '../hooks/useAppRoutes';
|
||||
import { DefaultLayout } from './DefaultLayout';
|
||||
import { RedirectToLatestWorkspace } from './RedirectToLatestWorkspace';
|
||||
import { DialogProvider } from './DialogContext';
|
||||
import { GlobalHooks } from './GlobalHooks';
|
||||
import RouteError from './RouteError';
|
||||
import { Settings } from './Settings/Settings';
|
||||
import Workspace from './Workspace';
|
||||
import { RedirectToLatestWorkspace } from './RedirectToLatestWorkspace';
|
||||
|
||||
const router = createBrowserRouter([
|
||||
{
|
||||
@@ -37,12 +37,6 @@ const router = createBrowserRouter([
|
||||
path: '/workspaces/:workspaceId/environments/:environmentId/requests/:requestId',
|
||||
element: <RedirectLegacyEnvironmentURLs />,
|
||||
},
|
||||
{
|
||||
path: routePaths.workspaceSettings({
|
||||
workspaceId: ':workspaceId',
|
||||
}),
|
||||
element: <Settings />,
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
@@ -64,7 +58,7 @@ function RedirectLegacyEnvironmentURLs() {
|
||||
}>();
|
||||
const environmentId = rawEnvironmentId === '__default__' ? undefined : rawEnvironmentId;
|
||||
|
||||
let to;
|
||||
let to = '/';
|
||||
if (workspaceId != null && requestId != null) {
|
||||
to = routes.paths.request({ workspaceId, environmentId, requestId });
|
||||
} else if (workspaceId != null) {
|
||||
@@ -75,3 +69,12 @@ function RedirectLegacyEnvironmentURLs() {
|
||||
|
||||
return <Navigate to={to} />;
|
||||
}
|
||||
|
||||
function DefaultLayout() {
|
||||
return (
|
||||
<DialogProvider>
|
||||
<Outlet />
|
||||
<GlobalHooks />
|
||||
</DialogProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ export function BasicAuth<T extends HttpRequest | GrpcRequest>({ request }: Prop
|
||||
const updateGrpcRequest = useUpdateGrpcRequest(request.id);
|
||||
|
||||
return (
|
||||
<VStack className="py-2 overflow-y-auto h-full" space={2}>
|
||||
<VStack className="my-2" space={2}>
|
||||
<Input
|
||||
useTemplating
|
||||
autocompleteVariables
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
import { open } from '@tauri-apps/plugin-dialog';
|
||||
import mime from 'mime';
|
||||
import { useKeyValue } from '../hooks/useKeyValue';
|
||||
import type { HttpRequest } from '../lib/models';
|
||||
import { Banner } from './core/Banner';
|
||||
import { Button } from './core/Button';
|
||||
import { InlineCode } from './core/InlineCode';
|
||||
import { HStack, VStack } from './core/Stacks';
|
||||
|
||||
type Props = {
|
||||
requestId: string;
|
||||
contentType: string | null;
|
||||
body: HttpRequest['body'];
|
||||
onChange: (body: HttpRequest['body']) => void;
|
||||
onChangeContentType: (contentType: string | null) => void;
|
||||
};
|
||||
|
||||
export function BinaryFileEditor({
|
||||
contentType,
|
||||
body,
|
||||
onChange,
|
||||
onChangeContentType,
|
||||
requestId,
|
||||
}: Props) {
|
||||
const ignoreContentType = useKeyValue<boolean>({
|
||||
namespace: 'global',
|
||||
key: ['ignore_content_type', requestId],
|
||||
fallback: false,
|
||||
});
|
||||
|
||||
const handleClick = async () => {
|
||||
await ignoreContentType.set(false);
|
||||
const selected = await open({
|
||||
title: 'Select File',
|
||||
multiple: false,
|
||||
});
|
||||
if (selected == null) {
|
||||
return;
|
||||
}
|
||||
onChange({ filePath: selected.path });
|
||||
};
|
||||
|
||||
const filePath = typeof body.filePath === 'string' ? body.filePath : undefined;
|
||||
const mimeType = mime.getType(filePath ?? '') ?? 'application/octet-stream';
|
||||
|
||||
return (
|
||||
<VStack space={2}>
|
||||
<HStack space={2}>
|
||||
<Button variant="border" color="secondary" size="sm" onClick={handleClick}>
|
||||
Choose File
|
||||
</Button>
|
||||
<div className="text-sm font-mono truncate rtl pr-3 text-fg">
|
||||
{/* Special character to insert ltr text in rtl element without making things wonky */}
|
||||
‎
|
||||
{filePath ?? 'Select File'}
|
||||
</div>
|
||||
</HStack>
|
||||
{filePath != null && mimeType !== contentType && !ignoreContentType.value && (
|
||||
<Banner className="mt-3 !py-5">
|
||||
<div className="mb-4 text-center">
|
||||
<div>Set Content-Type header</div>
|
||||
<InlineCode>{mimeType}</InlineCode> for current request?
|
||||
</div>
|
||||
<HStack space={1.5} justifyContent="center">
|
||||
<Button size="sm" variant="border" onClick={() => ignoreContentType.set(true)}>
|
||||
Ignore
|
||||
</Button>
|
||||
<Button
|
||||
variant="solid"
|
||||
color="primary"
|
||||
size="sm"
|
||||
onClick={() => onChangeContentType(mimeType)}
|
||||
>
|
||||
Set Header
|
||||
</Button>
|
||||
</HStack>
|
||||
</Banner>
|
||||
)}
|
||||
</VStack>
|
||||
);
|
||||
}
|
||||
@@ -1,127 +0,0 @@
|
||||
import classNames from 'classnames';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useActiveEnvironmentId } from '../hooks/useActiveEnvironmentId';
|
||||
import { useAppRoutes } from '../hooks/useAppRoutes';
|
||||
import { getRecentEnvironments } from '../hooks/useRecentEnvironments';
|
||||
import { useRequests } from '../hooks/useRequests';
|
||||
import { useWorkspaces } from '../hooks/useWorkspaces';
|
||||
import { fallbackRequestName } from '../lib/fallbackRequestName';
|
||||
import { Input } from './core/Input';
|
||||
|
||||
export function CommandPalette({ onClose }: { onClose: () => void }) {
|
||||
const [selectedIndex, setSelectedIndex] = useState<number>(0);
|
||||
const routes = useAppRoutes();
|
||||
const activeEnvironmentId = useActiveEnvironmentId();
|
||||
const workspaces = useWorkspaces();
|
||||
const requests = useRequests();
|
||||
const [command, setCommand] = useState<string>('');
|
||||
|
||||
const items = useMemo<{ label: string; onSelect: () => void; key: string }[]>(() => {
|
||||
const items = [];
|
||||
for (const r of requests) {
|
||||
items.push({
|
||||
key: `switch-request-${r.id}`,
|
||||
label: `Switch Request → ${fallbackRequestName(r)}`,
|
||||
onSelect: () => {
|
||||
return routes.navigate('request', {
|
||||
workspaceId: r.workspaceId,
|
||||
requestId: r.id,
|
||||
environmentId: activeEnvironmentId ?? undefined,
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
for (const w of workspaces) {
|
||||
items.push({
|
||||
key: `switch-workspace-${w.id}`,
|
||||
label: `Switch Workspace → ${w.name}`,
|
||||
onSelect: async () => {
|
||||
const environmentId = (await getRecentEnvironments(w.id))[0];
|
||||
return routes.navigate('workspace', {
|
||||
workspaceId: w.id,
|
||||
environmentId,
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
return items;
|
||||
}, [activeEnvironmentId, requests, routes, workspaces]);
|
||||
|
||||
const filteredItems = useMemo(() => {
|
||||
return items.filter((v) => v.label.toLowerCase().includes(command.toLowerCase()));
|
||||
}, [command, items]);
|
||||
|
||||
const handleSelectAndClose = useCallback(
|
||||
(cb: () => void) => {
|
||||
onClose();
|
||||
cb();
|
||||
},
|
||||
[onClose],
|
||||
);
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: KeyboardEvent) => {
|
||||
if (e.key === 'ArrowDown') {
|
||||
setSelectedIndex((prev) => prev + 1);
|
||||
} else if (e.key === 'ArrowUp') {
|
||||
setSelectedIndex((prev) => prev - 1);
|
||||
} else if (e.key === 'Enter') {
|
||||
const item = filteredItems[selectedIndex];
|
||||
if (item) {
|
||||
handleSelectAndClose(item.onSelect);
|
||||
}
|
||||
}
|
||||
},
|
||||
[filteredItems, handleSelectAndClose, selectedIndex],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="h-full grid grid-rows-[auto_minmax(0,1fr)]">
|
||||
<div className="px-2 py-2 w-full">
|
||||
<Input
|
||||
hideLabel
|
||||
name="command"
|
||||
label="Command"
|
||||
placeholder="Type a command"
|
||||
defaultValue=""
|
||||
onChange={setCommand}
|
||||
onKeyDown={handleKeyDown}
|
||||
/>
|
||||
</div>
|
||||
<div className="h-full px-1.5 overflow-y-auto">
|
||||
{filteredItems.map((v, i) => (
|
||||
<CommandPaletteItem
|
||||
active={i === selectedIndex}
|
||||
key={v.key}
|
||||
onClick={() => handleSelectAndClose(v.onSelect)}
|
||||
>
|
||||
{v.label}
|
||||
</CommandPaletteItem>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function CommandPaletteItem({
|
||||
children,
|
||||
active,
|
||||
onClick,
|
||||
}: {
|
||||
children: ReactNode;
|
||||
active: boolean;
|
||||
onClick: () => void;
|
||||
}) {
|
||||
return (
|
||||
<button
|
||||
onClick={onClick}
|
||||
className={classNames(
|
||||
'w-full h-xs flex items-center rounded px-1.5 text-fg-subtle',
|
||||
active && 'bg-background-highlight-secondary text-fg',
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
</button>
|
||||
);
|
||||
}
|
||||
@@ -28,7 +28,7 @@ export const CookieDialog = function ({ cookieJarId }: Props) {
|
||||
|
||||
return (
|
||||
<div className="pb-2">
|
||||
<table className="w-full text-sm mb-auto min-w-full max-w-full divide-y divide-background-highlight">
|
||||
<table className="w-full text-xs mb-auto min-w-full max-w-full divide-y">
|
||||
<thead>
|
||||
<tr>
|
||||
<th className="py-2 text-left">Domain</th>
|
||||
@@ -36,13 +36,13 @@ export const CookieDialog = function ({ cookieJarId }: Props) {
|
||||
<th className="py-2 pl-4"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody className="divide-y divide-background-highlight-secondary">
|
||||
<tbody className="divide-y">
|
||||
{cookieJar?.cookies.map((c) => (
|
||||
<tr key={c.domain + c.raw_cookie}>
|
||||
<td className="py-2 select-text cursor-text font-mono font-semibold max-w-0">
|
||||
{cookieDomain(c)}
|
||||
</td>
|
||||
<td className="py-2 pl-4 select-text cursor-text font-mono text-fg-subtle whitespace-nowrap overflow-x-auto max-w-[200px] hide-scrollbars">
|
||||
<td className="py-2 pl-4 select-text cursor-text font-mono text-gray-700 whitespace-nowrap overflow-x-auto max-w-[200px] hide-scrollbars">
|
||||
{c.raw_cookie}
|
||||
</td>
|
||||
<td className="max-w-0 w-10">
|
||||
@@ -53,6 +53,11 @@ export const CookieDialog = function ({ cookieJarId }: Props) {
|
||||
title="Delete"
|
||||
className="ml-auto"
|
||||
onClick={async () => {
|
||||
console.log(
|
||||
'DELETE COOKIE',
|
||||
c,
|
||||
cookieJar.cookies.filter((c2) => c2 !== c).length,
|
||||
);
|
||||
await updateCookieJar.mutateAsync({
|
||||
...cookieJar,
|
||||
cookies: cookieJar.cookies.filter((c2) => c2 !== c),
|
||||
|
||||
@@ -2,14 +2,16 @@ import { useCreateDropdownItems } from '../hooks/useCreateDropdownItems';
|
||||
import type { DropdownProps } from './core/Dropdown';
|
||||
import { Dropdown } from './core/Dropdown';
|
||||
|
||||
interface Props extends Omit<DropdownProps, 'items'> {
|
||||
interface Props {
|
||||
hideFolder?: boolean;
|
||||
children: DropdownProps['children'];
|
||||
openOnHotKeyAction?: DropdownProps['openOnHotKeyAction'];
|
||||
}
|
||||
|
||||
export function CreateDropdown({ hideFolder, children, ...props }: Props) {
|
||||
export function CreateDropdown({ hideFolder, children, openOnHotKeyAction }: Props) {
|
||||
const items = useCreateDropdownItems({ hideFolder, hideIcons: true });
|
||||
return (
|
||||
<Dropdown items={items} {...props}>
|
||||
<Dropdown openOnHotKeyAction={openOnHotKeyAction} items={items}>
|
||||
{children}
|
||||
</Dropdown>
|
||||
);
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import { Outlet } from 'react-router-dom';
|
||||
import { DialogProvider } from './DialogContext';
|
||||
import { GlobalHooks } from './GlobalHooks';
|
||||
import { ToastProvider } from './ToastContext';
|
||||
import classNames from 'classnames';
|
||||
import { useOsInfo } from '../hooks/useOsInfo';
|
||||
import { motion } from 'framer-motion';
|
||||
|
||||
export function DefaultLayout() {
|
||||
const osInfo = useOsInfo();
|
||||
return (
|
||||
<DialogProvider>
|
||||
<ToastProvider>
|
||||
<motion.div
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
transition={{ duration: 0.1, delay: 0.1 }}
|
||||
className={classNames(
|
||||
'w-full h-full',
|
||||
osInfo?.osType === 'linux' && 'border border-background-highlight-secondary',
|
||||
)}
|
||||
>
|
||||
<Outlet />
|
||||
</motion.div>
|
||||
<GlobalHooks />
|
||||
</ToastProvider>
|
||||
</DialogProvider>
|
||||
);
|
||||
}
|
||||
@@ -6,7 +6,7 @@ import { Dialog } from './core/Dialog';
|
||||
type DialogEntry = {
|
||||
id: string;
|
||||
render: ({ hide }: { hide: () => void }) => React.ReactNode;
|
||||
} & Omit<DialogProps, 'onClose' | 'open' | 'children'>;
|
||||
} & Pick<DialogProps, 'title' | 'description' | 'hideX' | 'className' | 'size' | 'noPadding'>;
|
||||
|
||||
interface State {
|
||||
dialogs: DialogEntry[];
|
||||
|
||||
@@ -14,7 +14,7 @@ export const DropMarker = memo(
|
||||
'relative w-full h-0 overflow-visible pointer-events-none',
|
||||
)}
|
||||
>
|
||||
<div className="absolute z-50 left-2 right-2 -bottom-[0.1rem] h-[0.2rem] bg-fg-primary rounded-full" />
|
||||
<div className="absolute z-50 left-2 right-2 -bottom-[0.1rem] h-[0.2rem] bg-blue-500/50 rounded-full" />
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import classNames from 'classnames';
|
||||
import type { ReactNode } from 'react';
|
||||
import React from 'react';
|
||||
|
||||
interface Props {
|
||||
children: ReactNode;
|
||||
@@ -12,8 +11,7 @@ export function EmptyStateText({ children, className }: Props) {
|
||||
<div
|
||||
className={classNames(
|
||||
className,
|
||||
'rounded-lg border border-dashed border-background-highlight',
|
||||
'h-full py-2 text-fg-subtler flex items-center justify-center italic',
|
||||
'rounded-lg border border-dashed border-highlight h-full py-2 text-gray-400 flex items-center justify-center',
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
|
||||
@@ -71,8 +71,8 @@ export const EnvironmentActionsDropdown = memo(function EnvironmentActionsDropdo
|
||||
size="sm"
|
||||
className={classNames(
|
||||
className,
|
||||
'text-fg !px-2 truncate',
|
||||
activeEnvironment == null && 'text-fg-subtler italic',
|
||||
'text-gray-800 !px-2 truncate',
|
||||
activeEnvironment == null && 'text-opacity-disabled italic',
|
||||
)}
|
||||
{...buttonProps}
|
||||
>
|
||||
|
||||
@@ -5,7 +5,6 @@ import { useActiveWorkspace } from '../hooks/useActiveWorkspace';
|
||||
import { useCreateEnvironment } from '../hooks/useCreateEnvironment';
|
||||
import { useDeleteEnvironment } from '../hooks/useDeleteEnvironment';
|
||||
import { useEnvironments } from '../hooks/useEnvironments';
|
||||
import { useKeyValue } from '../hooks/useKeyValue';
|
||||
import { usePrompt } from '../hooks/usePrompt';
|
||||
import { useUpdateEnvironment } from '../hooks/useUpdateEnvironment';
|
||||
import { useUpdateWorkspace } from '../hooks/useUpdateWorkspace';
|
||||
@@ -60,16 +59,14 @@ export const EnvironmentEditDialog = function ({ initialEnvironment }: Props) {
|
||||
<SidebarButton
|
||||
active={selectedEnvironment?.id == null}
|
||||
onClick={() => setSelectedEnvironmentId(null)}
|
||||
className="group"
|
||||
environment={null}
|
||||
rightSlot={
|
||||
<IconButton
|
||||
size="sm"
|
||||
iconSize="md"
|
||||
color="custom"
|
||||
title="Add sub environment"
|
||||
icon="plusCircle"
|
||||
iconClassName="text-fg-subtler group-hover:text-fg-subtle"
|
||||
className="group"
|
||||
iconClassName="text-gray-500 group-hover:text-gray-700"
|
||||
onClick={handleCreateEnvironment}
|
||||
/>
|
||||
}
|
||||
@@ -97,7 +94,7 @@ export const EnvironmentEditDialog = function ({ initialEnvironment }: Props) {
|
||||
secondSlot={() =>
|
||||
activeWorkspace != null && (
|
||||
<EnvironmentEditor
|
||||
className="pt-2 border-l border-background-highlight-secondary"
|
||||
className="pt-2 border-l border-highlight"
|
||||
environment={selectedEnvironment}
|
||||
workspace={activeWorkspace}
|
||||
/>
|
||||
@@ -116,11 +113,6 @@ const EnvironmentEditor = function ({
|
||||
workspace: Workspace;
|
||||
className?: string;
|
||||
}) {
|
||||
const valueVisibility = useKeyValue<boolean>({
|
||||
namespace: 'global',
|
||||
key: 'environmentValueVisibility',
|
||||
fallback: true,
|
||||
});
|
||||
const environments = useEnvironments();
|
||||
const updateEnvironment = useUpdateEnvironment(environment?.id ?? null);
|
||||
const updateWorkspace = useUpdateWorkspace(workspace.id);
|
||||
@@ -172,26 +164,15 @@ const EnvironmentEditor = function ({
|
||||
return (
|
||||
<VStack space={4} className={classNames(className, 'pl-4')}>
|
||||
<HStack space={2} className="justify-between">
|
||||
<Heading className="w-full flex items-center gap-1">
|
||||
<Heading className="w-full flex items-center">
|
||||
<div>{environment?.name ?? 'Global Variables'}</div>
|
||||
<IconButton
|
||||
iconClassName="text-fg-subtler"
|
||||
size="sm"
|
||||
icon={valueVisibility.value ? 'eye' : 'eyeClosed'}
|
||||
title={valueVisibility.value ? 'Hide Values' : 'Reveal Values'}
|
||||
onClick={() => {
|
||||
return valueVisibility.set((v) => !v);
|
||||
}}
|
||||
/>
|
||||
</Heading>
|
||||
</HStack>
|
||||
<PairEditor
|
||||
className="pr-2"
|
||||
nameAutocomplete={nameAutocomplete}
|
||||
nameAutocompleteVariables={false}
|
||||
namePlaceholder="VAR_NAME"
|
||||
nameValidate={validateName}
|
||||
valueType={valueVisibility.value ? 'text' : 'password'}
|
||||
valueAutocompleteVariables={false}
|
||||
forceUpdateKey={environment?.id ?? workspace?.id ?? 'n/a'}
|
||||
pairs={variables}
|
||||
@@ -235,8 +216,8 @@ function SidebarButton({
|
||||
<div
|
||||
className={classNames(
|
||||
className,
|
||||
'w-full grid grid-cols-[minmax(0,1fr)_auto] items-center gap-0.5',
|
||||
'px-2', // Padding to show focus border
|
||||
'w-full grid grid-cols-[minmax(0,1fr)_auto] items-center',
|
||||
'px-1', // Padding to show focus border
|
||||
)}
|
||||
>
|
||||
<Button
|
||||
@@ -244,7 +225,7 @@ function SidebarButton({
|
||||
size="xs"
|
||||
className={classNames(
|
||||
'w-full',
|
||||
active ? 'text-fg bg-background-active' : 'text-fg-subtle hover:text-fg',
|
||||
active ? 'text-gray-800' : 'text-gray-600 hover:text-gray-700',
|
||||
)}
|
||||
justify="start"
|
||||
onClick={onClick}
|
||||
@@ -281,7 +262,7 @@ function SidebarButton({
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'delete-environment',
|
||||
key: 'delete',
|
||||
variant: 'danger',
|
||||
label: 'Delete',
|
||||
leftSlot: <Icon icon="trash" size="sm" />,
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
import { invoke } from '@tauri-apps/api/core';
|
||||
import { save } from '@tauri-apps/plugin-dialog';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import slugify from 'slugify';
|
||||
import type { Workspace } from '../lib/models';
|
||||
import { count } from '../lib/pluralize';
|
||||
import { Button } from './core/Button';
|
||||
import { Checkbox } from './core/Checkbox';
|
||||
import { HStack, VStack } from './core/Stacks';
|
||||
|
||||
interface Props {
|
||||
onHide: () => void;
|
||||
onSuccess: (path: string) => void;
|
||||
activeWorkspace: Workspace;
|
||||
workspaces: Workspace[];
|
||||
}
|
||||
|
||||
export function ExportDataDialog({
|
||||
onHide,
|
||||
onSuccess,
|
||||
activeWorkspace,
|
||||
workspaces: allWorkspaces,
|
||||
}: Props) {
|
||||
const [selectedWorkspaces, setSelectedWorkspaces] = useState<Record<string, boolean>>({
|
||||
[activeWorkspace.id]: true,
|
||||
});
|
||||
|
||||
// Put active workspace first
|
||||
const workspaces = useMemo(
|
||||
() => [activeWorkspace, ...allWorkspaces.filter((w) => w.id !== activeWorkspace.id)],
|
||||
[activeWorkspace, allWorkspaces],
|
||||
);
|
||||
|
||||
const handleToggleAll = () => {
|
||||
setSelectedWorkspaces(
|
||||
allSelected ? {} : workspaces.reduce((acc, w) => ({ ...acc, [w.id]: true }), {}),
|
||||
);
|
||||
};
|
||||
|
||||
const handleExport = useCallback(async () => {
|
||||
const ids = Object.keys(selectedWorkspaces).filter((k) => selectedWorkspaces[k]);
|
||||
const workspace = ids.length === 1 ? workspaces.find((w) => w.id === ids[0]) : undefined;
|
||||
const slug = workspace ? slugify(workspace.name, { lower: true }) : 'workspaces';
|
||||
const exportPath = await save({
|
||||
title: 'Export Data',
|
||||
defaultPath: `yaak.${slug}.json`,
|
||||
});
|
||||
if (exportPath == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
await invoke('cmd_export_data', { workspaceIds: ids, exportPath });
|
||||
onHide();
|
||||
onSuccess(exportPath);
|
||||
}, [onHide, onSuccess, selectedWorkspaces, workspaces]);
|
||||
|
||||
const allSelected = workspaces.every((w) => selectedWorkspaces[w.id]);
|
||||
const numSelected = Object.values(selectedWorkspaces).filter(Boolean).length;
|
||||
const noneSelected = numSelected === 0;
|
||||
return (
|
||||
<VStack space={3} className="w-full mb-3 px-4">
|
||||
<table className="w-full mb-auto min-w-full max-w-full divide-y">
|
||||
<thead>
|
||||
<tr>
|
||||
<th className="w-6 min-w-0 py-2 text-left pl-1">
|
||||
<Checkbox
|
||||
checked={allSelected}
|
||||
indeterminate={!allSelected && !noneSelected}
|
||||
hideLabel
|
||||
title="All workspaces"
|
||||
onChange={handleToggleAll}
|
||||
/>
|
||||
</th>
|
||||
<th className="py-2 text-left pl-4" onClick={handleToggleAll}>
|
||||
Workspace
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody className="divide-y">
|
||||
{workspaces.map((w) => (
|
||||
<tr key={w.id}>
|
||||
<td className="min-w-0 py-1 pl-1">
|
||||
<Checkbox
|
||||
checked={selectedWorkspaces[w.id] ?? false}
|
||||
title={w.name}
|
||||
hideLabel
|
||||
onChange={() =>
|
||||
setSelectedWorkspaces((prev) => ({ ...prev, [w.id]: !prev[w.id] }))
|
||||
}
|
||||
/>
|
||||
</td>
|
||||
<td
|
||||
className="py-1 pl-4 text-fg whitespace-nowrap overflow-x-auto hide-scrollbars"
|
||||
onClick={() => setSelectedWorkspaces((prev) => ({ ...prev, [w.id]: !prev[w.id] }))}
|
||||
>
|
||||
{w.name} {w.id === activeWorkspace.id ? '(current workspace)' : ''}
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
<HStack space={2} justifyContent="end">
|
||||
<Button className="focus" variant="border" onClick={onHide}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
type="submit"
|
||||
className="focus"
|
||||
color="primary"
|
||||
disabled={noneSelected}
|
||||
onClick={() => handleExport()}
|
||||
>
|
||||
Export {count('Workspace', numSelected, { omitSingle: true, noneWord: 'Nothing' })}
|
||||
</Button>
|
||||
</HStack>
|
||||
</VStack>
|
||||
);
|
||||
}
|
||||
@@ -6,7 +6,7 @@ import { PairEditor } from './core/PairEditor';
|
||||
type Props = {
|
||||
forceUpdateKey: string;
|
||||
body: HttpRequest['body'];
|
||||
onChange: (body: HttpRequest['body']) => void;
|
||||
onChange: (headers: HttpRequest['body']) => void;
|
||||
};
|
||||
|
||||
export function FormMultipartEditor({ body, forceUpdateKey, onChange }: Props) {
|
||||
@@ -16,7 +16,6 @@ export function FormMultipartEditor({ body, forceUpdateKey, onChange }: Props) {
|
||||
enabled: p.enabled,
|
||||
name: p.name,
|
||||
value: p.file ?? p.value,
|
||||
contentType: p.contentType,
|
||||
isFile: !!p.file,
|
||||
})),
|
||||
[body.form],
|
||||
@@ -28,7 +27,6 @@ export function FormMultipartEditor({ body, forceUpdateKey, onChange }: Props) {
|
||||
form: pairs.map((p) => ({
|
||||
enabled: p.enabled,
|
||||
name: p.name,
|
||||
contentType: p.contentType,
|
||||
file: p.isFile ? p.value : undefined,
|
||||
value: p.isFile ? undefined : p.value,
|
||||
})),
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user