mirror of
https://github.com/ysoftdevs/DependencyCheck.git
synced 2026-01-15 16:23:37 +01:00
Compare commits
580 Commits
cvedb-cach
...
feature/ja
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3e5c8f28c8 | ||
|
|
e854c37605 | ||
|
|
7229f3bde4 | ||
|
|
b9cf393eb6 | ||
|
|
4baf1687b5 | ||
|
|
8eaf6a9cf0 | ||
|
|
3b294282f3 | ||
|
|
ccd08e0c9d | ||
|
|
62a5db6b8b | ||
|
|
3736161e39 | ||
|
|
92b4a74440 | ||
|
|
f8c18ae270 | ||
|
|
7ca02f06c6 | ||
|
|
61eeaca039 | ||
|
|
03f6777197 | ||
|
|
09a5dd6aa9 | ||
|
|
32cde3b7d8 | ||
|
|
c25bfb98d2 | ||
|
|
62d1b135fb | ||
|
|
dda33932e1 | ||
|
|
00a7197f12 | ||
|
|
33817684cc | ||
|
|
005036a272 | ||
|
|
3ba68d8a34 | ||
|
|
4bedacfdd2 | ||
|
|
41d4f1fcc2 | ||
|
|
23cd3bfa93 | ||
|
|
0a23f44052 | ||
|
|
1c10ff54e7 | ||
|
|
b35f47e9cb | ||
|
|
7c98f19d73 | ||
|
|
2ef3fdcc4e | ||
|
|
40f70c257d | ||
|
|
275d1bdbf9 | ||
|
|
9cf3313f31 | ||
|
|
3e7e5688ef | ||
|
|
90073f48c1 | ||
|
|
71f9831a5c | ||
|
|
1781aadab0 | ||
|
|
d9aa52befb | ||
|
|
3abe415805 | ||
|
|
560f7b6e24 | ||
|
|
5c2fe57252 | ||
|
|
2bd6895bf3 | ||
|
|
79deda799f | ||
|
|
b86fa133c5 | ||
|
|
30622f9c4a | ||
|
|
23159caf2b | ||
|
|
2cd5ced015 | ||
|
|
e9e7f095be | ||
|
|
43af96bb0f | ||
|
|
e8088c2bda | ||
|
|
af303df965 | ||
|
|
48ff396e7e | ||
|
|
a85a0456bc | ||
|
|
b5e7a54d35 | ||
|
|
df031a1bd6 | ||
|
|
f598857a83 | ||
|
|
420f50b9bf | ||
|
|
40699fa1ac | ||
|
|
64e44ad614 | ||
|
|
c6f2bf66e6 | ||
|
|
d27b47c0d5 | ||
|
|
fe2cbdd007 | ||
|
|
3afea40bbb | ||
|
|
536914c3b7 | ||
|
|
302fe1ce05 | ||
|
|
121972ffd9 | ||
|
|
f58ee9896a | ||
|
|
dd4d1495c1 | ||
|
|
16a6a2d2d8 | ||
|
|
b91d086340 | ||
|
|
0f25e53eda | ||
|
|
bb20129f0e | ||
|
|
9be1da7e12 | ||
|
|
6636d7d143 | ||
|
|
8619aadf16 | ||
|
|
3555733bbf | ||
|
|
2d9ad67b14 | ||
|
|
5fc2fcd7d8 | ||
|
|
2a0df96c5b | ||
|
|
4bfb7d341e | ||
|
|
eed32bd4f6 | ||
|
|
8b35fe2683 | ||
|
|
2db2235803 | ||
|
|
05ed69193c | ||
|
|
7b561d559e | ||
|
|
1e72c49eb2 | ||
|
|
63a2874cec | ||
|
|
fd47ede9d6 | ||
|
|
c0a0636fe9 | ||
|
|
412b72540a | ||
|
|
a1012ded26 | ||
|
|
782a9dea7a | ||
|
|
df4cc59efa | ||
|
|
d20c679528 | ||
|
|
872a524c44 | ||
|
|
5a0e597124 | ||
|
|
2d984dda94 | ||
|
|
dc1acc99da | ||
|
|
363399d95f | ||
|
|
d713e5d7d7 | ||
|
|
c6363fde7a | ||
|
|
eddffaae3d | ||
|
|
b46d13fc18 | ||
|
|
482f8daaf3 | ||
|
|
334829604f | ||
|
|
ebff547b6f | ||
|
|
4862811600 | ||
|
|
0a2bfcaed2 | ||
|
|
a31dddf8ef | ||
|
|
19c223161d | ||
|
|
ae128c38ec | ||
|
|
a7dddfa905 | ||
|
|
c465bc9fc7 | ||
|
|
7bcde5d439 | ||
|
|
76d79186c7 | ||
|
|
d7606d0263 | ||
|
|
72c121797f | ||
|
|
f51edf52e7 | ||
|
|
eb023c0c99 | ||
|
|
0e3fa6645d | ||
|
|
93f25abd99 | ||
|
|
f1631e9ff3 | ||
|
|
8c4187967a | ||
|
|
ddb60cab61 | ||
|
|
f7a72489d4 | ||
|
|
c58ec0ff8c | ||
|
|
9e6cf2e6f3 | ||
|
|
332bbe72aa | ||
|
|
0b32d3b991 | ||
|
|
9e92e2f8da | ||
|
|
fb138364d4 | ||
|
|
082ac5d229 | ||
|
|
e18c32c5dc | ||
|
|
5ebc2dc244 | ||
|
|
e4b7f7aa8f | ||
|
|
a754a8e6b4 | ||
|
|
43621016cf | ||
|
|
741fba51f5 | ||
|
|
4a3f8c4f2a | ||
|
|
14839cadf5 | ||
|
|
d560ca927e | ||
|
|
eacb4c9d62 | ||
|
|
804f8e38da | ||
|
|
7e1b6d0cc7 | ||
|
|
3440edbfb6 | ||
|
|
2a1186c4fa | ||
|
|
af63b40307 | ||
|
|
38499898aa | ||
|
|
dea9fa1145 | ||
|
|
49d14d1272 | ||
|
|
f8bf9d4eb7 | ||
|
|
0536fa6c2a | ||
|
|
16977e4869 | ||
|
|
5cb1d93029 | ||
|
|
376fe38a02 | ||
|
|
fda21e3eff | ||
|
|
6197660292 | ||
|
|
f474276807 | ||
|
|
5c44ea19cc | ||
|
|
95331082d5 | ||
|
|
0a344912d3 | ||
|
|
7952df0883 | ||
|
|
02785f2a4a | ||
|
|
8ea104544c | ||
|
|
8428e96702 | ||
|
|
3d11a36671 | ||
|
|
cc2b033e6d | ||
|
|
c0dfacbf6c | ||
|
|
8ebaf055a1 | ||
|
|
2431da4c6e | ||
|
|
088566a2cf | ||
|
|
210dd3f778 | ||
|
|
4776a542a7 | ||
|
|
8ab6b77b56 | ||
|
|
e92e3aa321 | ||
|
|
9077c77908 | ||
|
|
01c7979231 | ||
|
|
7f01829de1 | ||
|
|
ce7c07813b | ||
|
|
c36348611b | ||
|
|
9525ab449f | ||
|
|
5b7a978f01 | ||
|
|
988d1d5147 | ||
|
|
644f4ca6c2 | ||
|
|
8ceaa04320 | ||
|
|
a78f28ade6 | ||
|
|
3a07795e39 | ||
|
|
aab42547ad | ||
|
|
62ca5e890a | ||
|
|
765bfa0e1d | ||
|
|
5b7314e6d3 | ||
|
|
714b3d29b9 | ||
|
|
19fde6d667 | ||
|
|
f42d3bea5a | ||
|
|
60fd4f6311 | ||
|
|
0b3a50f320 | ||
|
|
b8f938b81b | ||
|
|
23a6a726fe | ||
|
|
f4b11d8e44 | ||
|
|
c8bfdddd59 | ||
|
|
f2d1819589 | ||
|
|
e2a97738e1 | ||
|
|
2f6e40f123 | ||
|
|
f6d301fd67 | ||
|
|
c484edf7ae | ||
|
|
667e784d06 | ||
|
|
20ff49f66c | ||
|
|
2332c0fa5e | ||
|
|
fa05482e69 | ||
|
|
2ef4237adf | ||
|
|
79b7d74387 | ||
|
|
b3d034a435 | ||
|
|
f5ec0331eb | ||
|
|
b4661d85f4 | ||
|
|
f15edfb806 | ||
|
|
143c8d151f | ||
|
|
01ff6d986c | ||
|
|
c153463471 | ||
|
|
e90444f012 | ||
|
|
55b9a42b62 | ||
|
|
c51c772ff6 | ||
|
|
2507a56a3a | ||
|
|
77b4372eff | ||
|
|
8a7066cda7 | ||
|
|
51d7618661 | ||
|
|
4b0164cffb | ||
|
|
d18a63635d | ||
|
|
51cf98eb60 | ||
|
|
4370dfcd5a | ||
|
|
664f083071 | ||
|
|
57e729512e | ||
|
|
818b8b295f | ||
|
|
c8dd241567 | ||
|
|
ed49251310 | ||
|
|
98f9628e27 | ||
|
|
bfbec1d0a6 | ||
|
|
6ddc0bfa27 | ||
|
|
eacf3ac906 | ||
|
|
6fc15984b8 | ||
|
|
cd875777e7 | ||
|
|
a38f8b447c | ||
|
|
142eb41312 | ||
|
|
1835355f4d | ||
|
|
3c3534e7da | ||
|
|
27abb72df1 | ||
|
|
159b9006cc | ||
|
|
54ccd04c17 | ||
|
|
cf2f2dc62d | ||
|
|
52b55434eb | ||
|
|
f5e16ea6ee | ||
|
|
16892d022f | ||
|
|
fa377cfc05 | ||
|
|
423216f1a3 | ||
|
|
b6936bf805 | ||
|
|
3b019d173c | ||
|
|
1be196698d | ||
|
|
cd018def91 | ||
|
|
9c0a166b7d | ||
|
|
516390827b | ||
|
|
bb5b6b75b8 | ||
|
|
c33cc3f230 | ||
|
|
4fc8dd59d2 | ||
|
|
04dc5f8491 | ||
|
|
e0af41e439 | ||
|
|
1564f11b89 | ||
|
|
69323bf0a4 | ||
|
|
6726101e36 | ||
|
|
9998cd0ccc | ||
|
|
562269dd2b | ||
|
|
a8b740a538 | ||
|
|
7a74917b67 | ||
|
|
4a95efefac | ||
|
|
9b718490e3 | ||
|
|
4e745c9c89 | ||
|
|
4ac0a0e305 | ||
|
|
3b00b764ac | ||
|
|
8595f55eb3 | ||
|
|
67aa59c4b8 | ||
|
|
165170e5d4 | ||
|
|
816e17a67b | ||
|
|
a98b946354 | ||
|
|
a5c3ecf6de | ||
|
|
c998bff178 | ||
|
|
620e518e92 | ||
|
|
1e96b43720 | ||
|
|
58bf7ff670 | ||
|
|
a0081318b6 | ||
|
|
9175b2624d | ||
|
|
9db7012042 | ||
|
|
9a9cf826ab | ||
|
|
60c2e31cea | ||
|
|
cb6287eacc | ||
|
|
6182ac3307 | ||
|
|
7ee7d2fa1c | ||
|
|
322f0518f9 | ||
|
|
4358952e17 | ||
|
|
82a5b4ab12 | ||
|
|
e45a5a99c3 | ||
|
|
e5eb056324 | ||
|
|
3a0a170904 | ||
|
|
b05bb8a1ee | ||
|
|
a4768386cc | ||
|
|
1bfd2d7ac1 | ||
|
|
1548f9a4b2 | ||
|
|
61390b200d | ||
|
|
df737539a5 | ||
|
|
5bbb386f8c | ||
|
|
bbd59be1d6 | ||
|
|
1b1debdb30 | ||
|
|
37eefc7f8b | ||
|
|
325ed8e47c | ||
|
|
33640ccfbb | ||
|
|
519d90e3d0 | ||
|
|
417fda8c7c | ||
|
|
c31a56228b | ||
|
|
c472608876 | ||
|
|
7f92109bde | ||
|
|
31fb9b0a20 | ||
|
|
a967735e11 | ||
|
|
d8f79fa51d | ||
|
|
3bf69651fd | ||
|
|
ed22b6532f | ||
|
|
c4ee53e147 | ||
|
|
cc256d5ef0 | ||
|
|
f51a7371b7 | ||
|
|
235df3e482 | ||
|
|
f36f4068f3 | ||
|
|
9b491fb286 | ||
|
|
dc41cb7674 | ||
|
|
070f4edce1 | ||
|
|
ab5de24518 | ||
|
|
795de6f9ea | ||
|
|
7eda83a434 | ||
|
|
190fa55ace | ||
|
|
ff7ebf405c | ||
|
|
74a2326e0e | ||
|
|
c4b67a1db2 | ||
|
|
ae50b01318 | ||
|
|
e203bc63e9 | ||
|
|
f700b22358 | ||
|
|
dc1195f8b1 | ||
|
|
8f582c55d1 | ||
|
|
e82bbbefe8 | ||
|
|
aa033cde4b | ||
|
|
af02238f01 | ||
|
|
2421380d1d | ||
|
|
c8a73afe84 | ||
|
|
0f87dee1a0 | ||
|
|
5ff9814894 | ||
|
|
5d87dc2942 | ||
|
|
1049a18a15 | ||
|
|
e07401dc55 | ||
|
|
bcae8d2015 | ||
|
|
631c10f8b6 | ||
|
|
a015cf4210 | ||
|
|
07f838ccf3 | ||
|
|
d06d561a55 | ||
|
|
6567c971e1 | ||
|
|
d6eac2b3c8 | ||
|
|
ec3aec6445 | ||
|
|
a9449fe5ff | ||
|
|
d7b6988e2e | ||
|
|
c39e223f0f | ||
|
|
06f59893af | ||
|
|
6d7f7d8e42 | ||
|
|
e7055c8a38 | ||
|
|
ab2e5f31c8 | ||
|
|
828ff5a1ec | ||
|
|
60b1775e37 | ||
|
|
c0aca39d31 | ||
|
|
bf5aafe455 | ||
|
|
fb2b3159e8 | ||
|
|
9ebbbe6a5b | ||
|
|
593fddb1f9 | ||
|
|
3ef80644f8 | ||
|
|
d401a7e60a | ||
|
|
1e269f2a2c | ||
|
|
333dc96d6f | ||
|
|
ade69168d0 | ||
|
|
89c63ac5c9 | ||
|
|
9fd8f1c898 | ||
|
|
b44862f713 | ||
|
|
4da950f37c | ||
|
|
10a8bf5356 | ||
|
|
a06c6dda40 | ||
|
|
c6c194dddb | ||
|
|
a13d29b0cc | ||
|
|
1caca99e82 | ||
|
|
0336fcb7a3 | ||
|
|
95c0d9b9a0 | ||
|
|
240d06d7e4 | ||
|
|
2753bb97c8 | ||
|
|
9c744211d7 | ||
|
|
8a8d4fb994 | ||
|
|
d24dac26ea | ||
|
|
93088c2e9a | ||
|
|
61ad90c939 | ||
|
|
c849af19ed | ||
|
|
4f7ce49dea | ||
|
|
c94717bd1c | ||
|
|
06cf39b59b | ||
|
|
c3c52c2b2a | ||
|
|
b4dcd61f58 | ||
|
|
89b8f314d8 | ||
|
|
e975ba5199 | ||
|
|
12d74510cd | ||
|
|
176363492e | ||
|
|
b8edcaeaf8 | ||
|
|
db2a0abcb6 | ||
|
|
84c6320e49 | ||
|
|
cc2051b308 | ||
|
|
43d71e7665 | ||
|
|
37b9f49467 | ||
|
|
79d64a617d | ||
|
|
784a1393fc | ||
|
|
af9bc9ec3e | ||
|
|
f3580dece7 | ||
|
|
0183457b7a | ||
|
|
60b8bde19a | ||
|
|
5e8b012a5d | ||
|
|
5703a44ab5 | ||
|
|
91b1d5cbde | ||
|
|
2ab92a940b | ||
|
|
4ec8e3bbbb | ||
|
|
ed56eb2ec1 | ||
|
|
d4c1a9ea08 | ||
|
|
48947ca722 | ||
|
|
5d028ee9fe | ||
|
|
35b762bd75 | ||
|
|
cbb10a1b1c | ||
|
|
239c5f2e46 | ||
|
|
d7d5e0c757 | ||
|
|
fccac8cb85 | ||
|
|
6d2a6bbd3d | ||
|
|
4fc2abd183 | ||
|
|
b762d8e664 | ||
|
|
ccce1eea4b | ||
|
|
11ef55920e | ||
|
|
7c0a7a0dd0 | ||
|
|
e6ec9d9aa3 | ||
|
|
1fe24a2e0c | ||
|
|
f2aa3f12be | ||
|
|
cb3cf79beb | ||
|
|
a27f390d37 | ||
|
|
d8107c1232 | ||
|
|
9272bded7e | ||
|
|
6800029163 | ||
|
|
64c824fedf | ||
|
|
8338668ab4 | ||
|
|
eb244e0234 | ||
|
|
3ffb2d1312 | ||
|
|
d76832f761 | ||
|
|
d5503ff615 | ||
|
|
87f327b095 | ||
|
|
756d39df9a | ||
|
|
fa4d8832d7 | ||
|
|
9d0a5da783 | ||
|
|
725f1e9759 | ||
|
|
55689fe911 | ||
|
|
de4c116271 | ||
|
|
10ebe49287 | ||
|
|
305db5f8b1 | ||
|
|
6e2b82c446 | ||
|
|
db135f8e11 | ||
|
|
c3b5d2f620 | ||
|
|
02052799ed | ||
|
|
9f31c33938 | ||
|
|
dd1cadf621 | ||
|
|
5bc1c3f616 | ||
|
|
c555f60f47 | ||
|
|
a4ea892f20 | ||
|
|
2cb017cf83 | ||
|
|
ac830d5784 | ||
|
|
1db9add9ff | ||
|
|
9936b1339c | ||
|
|
7a373799cf | ||
|
|
466562df41 | ||
|
|
b9e9c837c8 | ||
|
|
9b289e619a | ||
|
|
006b180a0c | ||
|
|
abcb5c3a32 | ||
|
|
0c0151e550 | ||
|
|
dee1ccfd3e | ||
|
|
af2259d69e | ||
|
|
131caf0e3e | ||
|
|
3d5b86d96f | ||
|
|
9a30c3d0cb | ||
|
|
7545329db2 | ||
|
|
066c331f96 | ||
|
|
7ccfee73bc | ||
|
|
3f7a9b92ec | ||
|
|
5d15c60c68 | ||
|
|
dacf493a94 | ||
|
|
65ad53f59e | ||
|
|
b0cf555b6e | ||
|
|
d0bfe114f6 | ||
|
|
4f4e734eee | ||
|
|
37ea0bf05b | ||
|
|
31463597ef | ||
|
|
8b2c6d6918 | ||
|
|
9c52ffc48f | ||
|
|
74dd1e6359 | ||
|
|
74fbaeefbf | ||
|
|
fec0878091 | ||
|
|
f257388108 | ||
|
|
99828b5cb3 | ||
|
|
d56f452f31 | ||
|
|
9f52bf5dc9 | ||
|
|
aed980f79d | ||
|
|
f219cb69d4 | ||
|
|
936830084e | ||
|
|
2e35c5bcab | ||
|
|
a13c6fcb25 | ||
|
|
c748d59146 | ||
|
|
d1ac0de740 | ||
|
|
0075a7e1ce | ||
|
|
091108a369 | ||
|
|
0be494a211 | ||
|
|
8021aaed4b | ||
|
|
584fd2a47b | ||
|
|
237dbe7061 | ||
|
|
ed214d05fa | ||
|
|
76218da8d1 | ||
|
|
869c9c0114 | ||
|
|
c3c1869829 | ||
|
|
e2617b7434 | ||
|
|
5607e1f179 | ||
|
|
23ad3d04b0 | ||
|
|
d498c7c7b0 | ||
|
|
66dbcb98d2 | ||
|
|
422418f396 | ||
|
|
31ad7adadd | ||
|
|
b3216effa4 | ||
|
|
060cfd625e | ||
|
|
38c0882a3a | ||
|
|
a47d46914a | ||
|
|
789a57b430 | ||
|
|
6b359a7138 | ||
|
|
e9e7042923 | ||
|
|
1fff0db18c | ||
|
|
516129533e | ||
|
|
ca4da60dc1 | ||
|
|
c3ff5bac54 | ||
|
|
a07ab11f9f | ||
|
|
bbf0b295ce | ||
|
|
ed09242cb7 | ||
|
|
52b2b4794e | ||
|
|
4293cce282 | ||
|
|
839d869137 | ||
|
|
a8add14255 | ||
|
|
6ca6d4d71c | ||
|
|
ee1934f74b | ||
|
|
69f39d4dfe | ||
|
|
d355cab2f9 | ||
|
|
217b08b571 | ||
|
|
72aec26ede | ||
|
|
a076ce6e8e | ||
|
|
ec448438e5 | ||
|
|
9777406460 | ||
|
|
7956606876 | ||
|
|
04e0b95a8a | ||
|
|
1b14c10085 | ||
|
|
d5df4920c7 | ||
|
|
576e26144d | ||
|
|
e411c03e7f | ||
|
|
bf5f1df8a7 | ||
|
|
986ad0584d | ||
|
|
6d2ac67011 | ||
|
|
3e4d012a69 | ||
|
|
1b84095c0e | ||
|
|
c96ef88222 | ||
|
|
8206aa9bfd | ||
|
|
dd4a1f2d56 | ||
|
|
b0f9935fcb | ||
|
|
122c78648a | ||
|
|
d457fd1452 | ||
|
|
454a875593 | ||
|
|
9da95e592c |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,6 +1,7 @@
|
||||
*/target/**
|
||||
# IntelliJ test run side-effects
|
||||
dependency-check-core/data/
|
||||
dependency-check-ant/data/
|
||||
# Intellij project files
|
||||
*.iml
|
||||
*.ipr
|
||||
@@ -15,6 +16,7 @@ maven-eclipse.xml
|
||||
.pmd
|
||||
# Netbeans configuration
|
||||
nb-configuration.xml
|
||||
**/nbproject/
|
||||
/target/
|
||||
#maven-shade-plugin generated pom
|
||||
dependency-reduced-pom.xml
|
||||
@@ -27,4 +29,4 @@ _site/**
|
||||
#coverity
|
||||
/cov-int/
|
||||
/dependency-check-core/nbproject/
|
||||
cov-scan.bat
|
||||
cov-scan.bat
|
||||
|
||||
66
.travis.settings.xml
Normal file
66
.travis.settings.xml
Normal file
@@ -0,0 +1,66 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<settings xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.1.0 http://maven.apache.org/xsd/settings-1.1.0.xsd" xmlns="http://maven.apache.org/SETTINGS/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<servers>
|
||||
<server>
|
||||
<username>${env.JFROG_USER}</username>
|
||||
<password>${env.JFROG_PASSWORD}</password>
|
||||
<id>release</id>
|
||||
</server>
|
||||
<server>
|
||||
<username>${env.JFROG_USER}</username>
|
||||
<password>${env.JFROG_PASSWORD}</password>
|
||||
<id>snapshot</id>
|
||||
</server>
|
||||
<server>
|
||||
<username>${env.JFROG_USER}</username>
|
||||
<password>${env.JFROG_PASSWORD}</password>
|
||||
<id>plugins-release</id>
|
||||
</server>
|
||||
<server>
|
||||
<username>${env.JFROG_USER}</username>
|
||||
<password>${env.JFROG_PASSWORD}</password>
|
||||
<id>plugins-snapshot</id>
|
||||
</server>
|
||||
</servers>
|
||||
<profiles>
|
||||
<profile>
|
||||
<repositories>
|
||||
<repository>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
<id>release</id>
|
||||
<name>libs-release</name>
|
||||
<url>https://dependencycheck.jfrog.io/dependencycheck/libs-release</url>
|
||||
</repository>
|
||||
<repository>
|
||||
<snapshots />
|
||||
<id>snapshot</id>
|
||||
<name>libs-snapshot</name>
|
||||
<url>https://dependencycheck.jfrog.io/dependencycheck/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
<id>plugins-release</id>
|
||||
<name>plugins-release</name>
|
||||
<url>https://dependencycheck.jfrog.io/dependencycheck/plugins-release</url>
|
||||
</pluginRepository>
|
||||
<pluginRepository>
|
||||
<snapshots />
|
||||
<id>plugins-snapshot</id>
|
||||
<name>plugins-snapshot</name>
|
||||
<url>https://dependencycheck.jfrog.io/dependencycheck/plugins-snapshot</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
<id>artifactory</id>
|
||||
</profile>
|
||||
</profiles>
|
||||
<activeProfiles>
|
||||
<activeProfile>artifactory</activeProfile>
|
||||
</activeProfiles>
|
||||
</settings>
|
||||
73
.travis.yml
73
.travis.yml
@@ -1,13 +1,78 @@
|
||||
language: java
|
||||
jdk: oraclejdk7
|
||||
script: mvn install -DreleaseTesting
|
||||
sudo: required
|
||||
group: deprecated-2017Q4
|
||||
|
||||
env:
|
||||
global:
|
||||
secure: ZUzhWfpXJw/oAeDlUkDFkEJMT0T7kCN3d7ah8urkL2B0KFfKOqQagkbXkgvDa1SYud8VdcnoGa69LfkEr5IrdqW7R4bEYZAiN5swm4Z0iO8t53szVspm2f+O9jQ44O/sfOfpfLxWUUuhdc7Vbrszp+tSszxdPmssWL+f5a/mfWs=
|
||||
- secure: "ZUzhWfpXJw/oAeDlUkDFkEJMT0T7kCN3d7ah8urkL2B0KFfKOqQagkbXkgvDa1SYud8VdcnoGa69LfkEr5IrdqW7R4bEYZAiN5swm4Z0iO8t53szVspm2f+O9jQ44O/sfOfpfLxWUUuhdc7Vbrszp+tSszxdPmssWL+f5a/mfWs="
|
||||
- secure: "pmFymoI7qH0Kna3NkcHrqLiTVWKmrhwqA4Z9U6XLhWDQxcs5g94wCCKpGB6Lkz9mkvRxBRFpZZelnXJa9W9mnuVOMIa5tQfS5gBuaNXOe7AXXdc+Y2975OR9sSfvf16FxLFvNJILmZq+bpMLs+EXaQvjYQHW2O6OWZdLhAPVG6A="
|
||||
- secure: "omj5HP2wKdegLYp8/a24Wsoryb92+XYWheEkxp7CzHGDJB1Y4SSr315n/na/mdgd7lr1Ac+m4stYfCrclG7be71xWs6ApF+6I5QSzplJ1fyIF5piHrmhgw6ymIf/HBdeevggJM8igD8agCOwEETYFKfPEj5wFWhNQfxYwANbpl0="
|
||||
- secure: "FqPcda7a6rEvGVYEyWeaFP+mIhZeJ6FGSdHvVRlBL0H9I3bz6eZg50g6DH3yo1bkmTPQ94eXdDpoKihk9+CDLl0TS+Sg9W8HplG3B2U1/6Yi3vd0T8yjKZC7xf0VZO6t8AT9vpFvzQBRZe24n+6kDtp2OiBzawJhgU5t09zH6is="
|
||||
- secure: "Bh5LAk8XQnJ885jc/Lli2fhPKDx0TNZRxcJMnNo96EgwOnD+Zhw+v3u/DMCgyyrRToM8Bkca/HktrlZaRTk2htsdKZZ3RHFMCXO0fXCgpcf+wkaSYDF/lnErpSJG3Lrz8ILxJPODsrGhjaIg2++79lwhsBYtpujc6UdxFhgpffc="
|
||||
|
||||
addons:
|
||||
sonarcloud:
|
||||
organization: "odc"
|
||||
token:
|
||||
secure: "YVDnYmonPug885Hmr2pLWBko+rQ+oKyTUA95ry0PGGyfgs0z6kPCjmWBDVm7K4GM7NOluldWb5gLMf0QXoHGstdp9L6fQCQElt8hZMOwJf+IR3bWjiG3VfVyyB3gJWBWlcJFM9NVyfICidwBH5ZiJ0+LXhKUgnNqarTh/YmNj9w="
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- "$HOME/.m2/repository"
|
||||
- "$HOME/.sonar/cache"
|
||||
|
||||
before_install:
|
||||
- sudo apt-get install jq
|
||||
- wget -O ~/codacy-coverage-reporter-assembly-latest.jar $(curl https://api.github.com/repos/codacy/codacy-coverage-reporter/releases/latest | jq -r .assets[0].browser_download_url)
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- jdk: openjdk7
|
||||
env:
|
||||
- JDK="JDK7"
|
||||
script:
|
||||
- if [ ! -z "$TRAVIS_TAG" ]; then travis_wait 60 mvn install site site:stage -DreleaseTesting; else travis_wait 60 mvn install -DreleaseTesting; fi
|
||||
- jdk: oraclejdk8
|
||||
env:
|
||||
- JDK="JDK8"
|
||||
script:
|
||||
- travis_wait 60 mvn install -DreleaseTesting
|
||||
|
||||
after_success:
|
||||
- java -cp ~/codacy-coverage-reporter-assembly-latest.jar com.codacy.CodacyCoverageReporter -l Java -r build-reporting/target/site/jacoco-aggregate/jacoco.xml
|
||||
- if [ "$JDK" == "JDK8" ]; then
|
||||
java -cp ~/codacy-coverage-reporter-assembly-latest.jar com.codacy.CodacyCoverageReporter -l Java -r build-reporting/target/coverage-reports/jacoco.xml;
|
||||
mvn sonar:sonar -Dsonar.java.coveragePlugin=jacoco -Dsonar.jacoco.reportPath=build-reporting/target/jacoco.xml;
|
||||
./coverity_scan.sh;
|
||||
fi;
|
||||
|
||||
after_failure:
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/617-hierarchical-cross-deps/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/618-aggregator-purge/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/618-aggregator-update-only/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/629-jackson-dataformat/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/690-threadsafety/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/710-pom-parse-error/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/729-system-scope-resolved/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/729-system-scope-skipped/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/730-multiple-suppression-files/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/730-multiple-suppression-files-configs/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/815-broken-suppression-aggregate/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/846-site-plugin/build.log
|
||||
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/false-positives/build.log
|
||||
|
||||
deploy:
|
||||
- provider: script
|
||||
script: mvn --settings .travis.settings.xml source:jar javadoc:jar package deploy -DskipTests=true
|
||||
skip_cleanup: true
|
||||
on:
|
||||
branch: master
|
||||
jdk: openjdk7
|
||||
- provider: pages
|
||||
skip_cleanup: true
|
||||
local_dir: target/staging
|
||||
github_token: $GITHUB_TOKEN
|
||||
on:
|
||||
tags: true
|
||||
branch: master
|
||||
jdk: openjdk7
|
||||
|
||||
35
Dockerfile
35
Dockerfile
@@ -1,4 +1,4 @@
|
||||
FROM java:8
|
||||
FROM openjdk:8-jre-slim
|
||||
|
||||
MAINTAINER Timo Pagel <dependencycheckmaintainer@timo-pagel.de>
|
||||
|
||||
@@ -6,24 +6,31 @@ ENV user=dependencycheck
|
||||
ENV version_url=https://jeremylong.github.io/DependencyCheck/current.txt
|
||||
ENV download_url=https://dl.bintray.com/jeremy-long/owasp
|
||||
|
||||
RUN wget -O /tmp/current.txt ${version_url} && \
|
||||
version=$(cat /tmp/current.txt) && \
|
||||
file="dependency-check-${version}-release.zip" && \
|
||||
wget "$download_url/$file" && \
|
||||
unzip ${file} && \
|
||||
rm ${file} && \
|
||||
mv dependency-check /usr/share/
|
||||
|
||||
RUN useradd -ms /bin/bash ${user} && \
|
||||
chown -R ${user}:${user} /usr/share/dependency-check && \
|
||||
mkdir /report && \
|
||||
chown -R ${user}:${user} /report
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends wget ruby mono-runtime && \
|
||||
gem install bundle-audit && \
|
||||
gem cleanup
|
||||
|
||||
RUN wget -O /tmp/current.txt ${version_url} && \
|
||||
version=$(cat /tmp/current.txt) && \
|
||||
file="dependency-check-${version}-release.zip" && \
|
||||
wget "$download_url/$file" && \
|
||||
unzip ${file} && \
|
||||
rm ${file} && \
|
||||
mv dependency-check /usr/share/ && \
|
||||
useradd -ms /bin/bash ${user} && \
|
||||
chown -R ${user}:${user} /usr/share/dependency-check && \
|
||||
mkdir /report && \
|
||||
chown -R ${user}:${user} /report && \
|
||||
apt-get remove --purge -y wget && \
|
||||
apt-get autoremove -y && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/*
|
||||
|
||||
USER ${user}
|
||||
|
||||
VOLUME ["/src" "/usr/share/dependency-check/data" "/report"]
|
||||
|
||||
WORKDIR /report
|
||||
WORKDIR /src
|
||||
|
||||
CMD ["--help"]
|
||||
ENTRYPOINT ["/usr/share/dependency-check/bin/dependency-check.sh"]
|
||||
|
||||
115
README.md
115
README.md
@@ -1,4 +1,4 @@
|
||||
[](https://travis-ci.org/jeremylong/DependencyCheck) [](https://scan.coverity.com/projects/dependencycheck) [](https://www.codacy.com/app/jeremylong/DependencyCheck?utm_source=github.com&utm_medium=referral&utm_content=jeremylong/DependencyCheck&utm_campaign=Badge_Grade) [](https://www.apache.org/licenses/LICENSE-2.0.txt)
|
||||
[](https://travis-ci.org/jeremylong/DependencyCheck) [](https://scan.coverity.com/projects/dependencycheck) [](https://www.codacy.com/app/jeremylong/DependencyCheck?utm_source=github.com&utm_medium=referral&utm_content=jeremylong/DependencyCheck&utm_campaign=Badge_Grade) [](https://bestpractices.coreinfrastructure.org/projects/843) [](https://www.apache.org/licenses/LICENSE-2.0.txt)
|
||||
|
||||
[](https://www.toolswatch.org/2015/06/black-hat-arsenal-usa-2015-speakers-lineup/) [](https://www.toolswatch.org/2014/06/black-hat-usa-2014-arsenal-tools-speaker-list/) [](https://www.toolswatch.org/2013/06/announcement-blackhat-arsenal-usa-2013-selected-tools/)
|
||||
|
||||
@@ -42,7 +42,10 @@ $ dependency-check --project Testing --out . --scan [path to jar files to be sca
|
||||
### Maven Plugin
|
||||
|
||||
More detailed instructions can be found on the [dependency-check-maven github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-maven).
|
||||
The plugin can be configured using the following:
|
||||
By default, the plugin is tied to the `verify` phase (i.e. `mvn verify`). Alternatively,
|
||||
one can directly invoke the plugin via `mvn org.owasp:dependency-check-maven:check`.
|
||||
|
||||
The dependency-check plugin can be configured using the following:
|
||||
|
||||
```xml
|
||||
<project>
|
||||
@@ -97,7 +100,7 @@ On Windows
|
||||
> .\dependency-check-cli\target\release\bin\dependency-check.bat --project Testing --out . --scan ./src/test/resources
|
||||
```
|
||||
|
||||
Then load the resulting 'DependencyCheck-Report.html' into your favorite browser.
|
||||
Then load the resulting 'dependency-check-report.html' into your favorite browser.
|
||||
|
||||
### Docker
|
||||
|
||||
@@ -136,6 +139,110 @@ docker run --rm \
|
||||
```
|
||||
|
||||
|
||||
Upgrade Notes
|
||||
-------------
|
||||
|
||||
### Upgrading from **1.x.x** to **2.x.x**
|
||||
|
||||
Note that when upgrading from version 1.x.x that the following changes will need to be made to your configuration.
|
||||
|
||||
#### Suppression file
|
||||
|
||||
In order to support multiple suppression files, the mechanism for configuring suppression files has changed.
|
||||
As such, users that have defined a suppression file in their configuration will need to update.
|
||||
|
||||
See the examples below:
|
||||
|
||||
##### Ant
|
||||
|
||||
Old:
|
||||
|
||||
```xml
|
||||
<dependency-check
|
||||
failBuildOnCVSS="3"
|
||||
suppressionFile="suppression.xml">
|
||||
</dependency-check>
|
||||
```
|
||||
|
||||
New:
|
||||
|
||||
```xml
|
||||
<dependency-check
|
||||
failBuildOnCVSS="3">
|
||||
<suppressionFile path="suppression.xml" />
|
||||
</dependency-check>
|
||||
```
|
||||
|
||||
##### Maven
|
||||
|
||||
Old:
|
||||
|
||||
```xml
|
||||
<plugin>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-maven</artifactId>
|
||||
<configuration>
|
||||
<suppressionFile>suppression.xml</suppressionFile>
|
||||
</configuration>
|
||||
</plugin>
|
||||
```
|
||||
|
||||
New:
|
||||
|
||||
```xml
|
||||
<plugin>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-maven</artifactId>
|
||||
<configuration>
|
||||
<suppressionFiles>
|
||||
<suppressionFile>suppression.xml</suppressionFile>
|
||||
</suppressionFiles>
|
||||
</configuration>
|
||||
</plugin>
|
||||
```
|
||||
|
||||
### Gradle
|
||||
|
||||
In addition to the changes to the suppression file, the task `dependencyCheck` has been
|
||||
renamed to `dependencyCheckAnalyze`.
|
||||
|
||||
Old:
|
||||
|
||||
```groovy
|
||||
buildscript {
|
||||
repositories {
|
||||
mavenLocal()
|
||||
}
|
||||
dependencies {
|
||||
classpath 'org.owasp:dependency-check-gradle:2.0.1-SNAPSHOT'
|
||||
}
|
||||
}
|
||||
apply plugin: 'org.owasp.dependencycheck'
|
||||
|
||||
dependencyCheck {
|
||||
suppressionFile='path/to/suppression.xml'
|
||||
}
|
||||
check.dependsOn dependencyCheckAnalyze
|
||||
```
|
||||
|
||||
New:
|
||||
```groovy
|
||||
buildscript {
|
||||
repositories {
|
||||
mavenLocal()
|
||||
}
|
||||
dependencies {
|
||||
classpath 'org.owasp:dependency-check-gradle:2.0.1-SNAPSHOT'
|
||||
}
|
||||
}
|
||||
apply plugin: 'org.owasp.dependencycheck'
|
||||
|
||||
dependencyCheck {
|
||||
suppressionFiles = ['path/to/suppression1.xml', 'path/to/suppression2.xml']
|
||||
}
|
||||
check.dependsOn dependencyCheckAnalyze
|
||||
```
|
||||
|
||||
Mailing List
|
||||
------------
|
||||
|
||||
@@ -148,7 +255,7 @@ Archive: [google group](https://groups.google.com/forum/#!forum/dependency-check
|
||||
Copyright & License
|
||||
-
|
||||
|
||||
Dependency-Check is Copyright (c) 2012-2016 Jeremy Long. All Rights Reserved.
|
||||
Dependency-Check is Copyright (c) 2012-2017 Jeremy Long. All Rights Reserved.
|
||||
|
||||
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/LICENSE.txt) file for the full license.
|
||||
|
||||
|
||||
61
RELEASE_NOTES.md
Normal file
61
RELEASE_NOTES.md
Normal file
@@ -0,0 +1,61 @@
|
||||
# Release Notes
|
||||
|
||||
Please see the [dependency-check google group](https://groups.google.com/forum/#!forum/dependency-check) for the release notes on versions not listed below.
|
||||
|
||||
## [Version 3.1.1](https://github.com/jeremylong/DependencyCheck/releases/tag/v3.1.1) (2018-01-29)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fixed the Central Analyzer to use the updated SHA1 query syntax.
|
||||
- Reverted change that broke Maven 3.1.0 compatability; Maven 3.1.0 and beyond is once again supported.
|
||||
- False positive reduction.
|
||||
- Minor documentation cleanup.
|
||||
|
||||
|
||||
## [Version 3.1.0](https://github.com/jeremylong/DependencyCheck/releases/tag/v3.1.0) (2018-01-02)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Major enhancements to the Node and NSP analyzer - the analyzers are now considered
|
||||
production ready and should be used in combination.
|
||||
- Added a shutdown hook so that if the update process is interrupted while using an H2
|
||||
database the lock files will be properly removed allowing future executions of ODC to
|
||||
succeed.
|
||||
- UNC paths can now be scanned using the CLI.
|
||||
- Batch updates are now used which may help with the update speed when using some DBMS
|
||||
instead of the embedded H2.
|
||||
- Upgrade Lucene to 5.5.5, the highest version that will allow us to maintain Java 7 support
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fixed the CSV report output to correctly list all fields.
|
||||
- Invalid suppression files will now break the build instead of causing ODC to
|
||||
skip the usage of the suppression analyzer.
|
||||
- Fixed bug in Lucene query where LARGE entries in the pom.xml or manifest caused
|
||||
the query to break.
|
||||
- General cleanup, false positive, and false negative reduction.
|
||||
|
||||
## [Version 3.0.2](https://github.com/jeremylong/DependencyCheck/releases/tag/v3.0.2) (2017-11-13)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Updated the query format for the CentralAnalyzer; the old format caused the CentralAnalyzer to fail
|
||||
|
||||
## [Version 3.0.1](https://github.com/jeremylong/DependencyCheck/releases/tag/v3.0.1) (2017-10-20)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fixed a database connection issue that affected some usages.
|
||||
|
||||
## [Version 3.0.0](https://github.com/jeremylong/DependencyCheck/releases/tag/v3.0.0) (2017-10-16)
|
||||
|
||||
- Several bug fixes and false positive reduction
|
||||
- The 2.x branch introduced several new false positives – but also reduced the false negatives
|
||||
- Java 9 compatibility update
|
||||
- Stability issues with the Central Analyzer resolved
|
||||
- This comes at a cost of a longer analysis time
|
||||
- The CSV report now includes the GAV and CPE
|
||||
- The Hint Analyzer now supports regular expressions
|
||||
- If show summary is disabled and vulnerable libraries are found that fail the build details are no longer displayed in the console – only that vulnerable libraries were identified
|
||||
- Resolved issues with threading and multiple connections to the embedded H2 database
|
||||
- This allows the Jenkins pipeline, Maven Plugin, etc. to safely run parallel executions of dependency-check
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.6-SNAPSHOT</version>
|
||||
<version>3.1.2-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-ant</artifactId>
|
||||
@@ -28,15 +28,6 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
|
||||
<name>Dependency-Check Ant Task</name>
|
||||
<description>dependency-check-ant is an Ant Task that uses dependency-check-core to detect publicly disclosed vulnerabilities associated with the project's dependencies. The task will generate a report listing the dependency, any identified Common Platform Enumeration (CPE) identifiers, and the associated Common Vulnerability and Exposure (CVE) entries.</description>
|
||||
<!-- begin copy from http://minds.coremedia.com/2012/09/11/problem-solved-deploy-multi-module-maven-project-site-as-github-pages/ -->
|
||||
<distributionManagement>
|
||||
<site>
|
||||
<id>github-pages-site</id>
|
||||
<name>Deployment through GitHub's site deployment plugin</name>
|
||||
<url>${basedir}/../target/site/${project.version}/dependency-check-ant</url>
|
||||
</site>
|
||||
</distributionManagement>
|
||||
<!-- end copy -->
|
||||
<build>
|
||||
<resources>
|
||||
<resource>
|
||||
@@ -225,42 +216,6 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<reporting>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>${reporting.checkstyle-plugin.version}</version>
|
||||
<configuration>
|
||||
<enableRulesSummary>false</enableRulesSummary>
|
||||
<enableFilesSummary>false</enableFilesSummary>
|
||||
<configLocation>${basedir}/../src/main/config/checkstyle-checks.xml</configLocation>
|
||||
<headerLocation>${basedir}/../src/main/config/checkstyle-header.txt</headerLocation>
|
||||
<suppressionsLocation>${basedir}/../src/main/config/checkstyle-suppressions.xml</suppressionsLocation>
|
||||
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-pmd-plugin</artifactId>
|
||||
<version>${reporting.pmd-plugin.version}</version>
|
||||
<configuration>
|
||||
<targetJdk>1.6</targetJdk>
|
||||
<linkXRef>true</linkXRef>
|
||||
<sourceEncoding>utf-8</sourceEncoding>
|
||||
<excludes>
|
||||
<exclude>**/generated/*.java</exclude>
|
||||
</excludes>
|
||||
<rulesets>
|
||||
<ruleset>../src/main/config/dcrules.xml</ruleset>
|
||||
<ruleset>/rulesets/java/basic.xml</ruleset>
|
||||
<ruleset>/rulesets/java/imports.xml</ruleset>
|
||||
<ruleset>/rulesets/java/unusedcode.xml</ruleset>
|
||||
</rulesets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</reporting>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.owasp</groupId>
|
||||
@@ -18,7 +18,10 @@
|
||||
package org.owasp.dependencycheck.taskdefs;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
|
||||
import org.apache.tools.ant.BuildException;
|
||||
import org.apache.tools.ant.Project;
|
||||
import org.apache.tools.ant.types.EnumeratedAttribute;
|
||||
@@ -28,16 +31,13 @@ import org.apache.tools.ant.types.ResourceCollection;
|
||||
import org.apache.tools.ant.types.resources.FileProvider;
|
||||
import org.apache.tools.ant.types.resources.Resources;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.exception.ExceptionCollection;
|
||||
import org.owasp.dependencycheck.exception.ReportException;
|
||||
import org.owasp.dependencycheck.reporting.ReportGenerator;
|
||||
import org.owasp.dependencycheck.reporting.ReportGenerator.Format;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.impl.StaticLoggerBinder;
|
||||
@@ -47,6 +47,7 @@ import org.slf4j.impl.StaticLoggerBinder;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class Check extends Update {
|
||||
|
||||
/**
|
||||
@@ -61,6 +62,11 @@ public class Check extends Update {
|
||||
* Whether or not the Node.js Analyzer is enabled.
|
||||
*/
|
||||
private Boolean nodeAnalyzerEnabled;
|
||||
/**
|
||||
* Whether or not the NSP Analyzer is enabled.
|
||||
*/
|
||||
private Boolean nspAnalyzerEnabled;
|
||||
|
||||
/**
|
||||
* Whether or not the Ruby Bundle Audit Analyzer is enabled.
|
||||
*/
|
||||
@@ -146,14 +152,20 @@ public class Check extends Update {
|
||||
private boolean updateOnly = false;
|
||||
|
||||
/**
|
||||
* The report format to be generated (HTML, XML, VULN, CSV, JSON, ALL). Default is
|
||||
* HTML.
|
||||
* The report format to be generated (HTML, XML, VULN, CSV, JSON, ALL).
|
||||
* Default is HTML.
|
||||
*/
|
||||
private String reportFormat = "HTML";
|
||||
/**
|
||||
* The path to the suppression file.
|
||||
* Suppression file path.
|
||||
*/
|
||||
private String suppressionFile;
|
||||
private String suppressionFile = null;
|
||||
/**
|
||||
* Suppression file paths.
|
||||
*/
|
||||
@SuppressWarnings("CanBeFinal")
|
||||
private List<String> suppressionFiles = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* The path to the suppression file.
|
||||
*/
|
||||
@@ -166,6 +178,10 @@ public class Check extends Update {
|
||||
* Whether experimental analyzers are enabled.
|
||||
*/
|
||||
private Boolean enableExperimental;
|
||||
/**
|
||||
* Whether retired analyzers are enabled.
|
||||
*/
|
||||
private Boolean enableRetired;
|
||||
/**
|
||||
* Whether or not the Jar Analyzer is enabled.
|
||||
*/
|
||||
@@ -228,6 +244,17 @@ public class Check extends Update {
|
||||
getPath().add(rc);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a suppression file.
|
||||
*
|
||||
* This is called by Ant with the configured {@link SuppressionFile}.
|
||||
*
|
||||
* @param suppressionFile the suppression file to add.
|
||||
*/
|
||||
public void addConfiguredSuppressionFile(final SuppressionFile suppressionFile) {
|
||||
suppressionFiles.add(suppressionFile.getPath());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the path. If the path has not been initialized yet, this class is
|
||||
* synchronized, and will instantiate the path object.
|
||||
@@ -434,12 +461,12 @@ public class Check extends Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of suppressionFile.
|
||||
* Gets suppression file paths.
|
||||
*
|
||||
* @return the value of suppressionFile
|
||||
* @return the suppression files.
|
||||
*/
|
||||
public String getSuppressionFile() {
|
||||
return suppressionFile;
|
||||
public List<String> getSuppressionFiles() {
|
||||
return suppressionFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -449,6 +476,7 @@ public class Check extends Update {
|
||||
*/
|
||||
public void setSuppressionFile(String suppressionFile) {
|
||||
this.suppressionFile = suppressionFile;
|
||||
suppressionFiles.add(suppressionFile);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -505,6 +533,24 @@ public class Check extends Update {
|
||||
this.enableExperimental = enableExperimental;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of enableRetired.
|
||||
*
|
||||
* @return the value of enableRetired
|
||||
*/
|
||||
public Boolean isEnableRetired() {
|
||||
return enableRetired;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of enableRetired.
|
||||
*
|
||||
* @param enableRetired new value of enableRetired
|
||||
*/
|
||||
public void setEnableRetired(Boolean enableRetired) {
|
||||
this.enableRetired = enableRetired;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether or not the analyzer is enabled.
|
||||
*
|
||||
@@ -741,6 +787,24 @@ public class Check extends Update {
|
||||
this.nodeAnalyzerEnabled = nodeAnalyzerEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of nspAnalyzerEnabled.
|
||||
*
|
||||
* @return the value of nspAnalyzerEnabled
|
||||
*/
|
||||
public Boolean isNspAnalyzerEnabled() {
|
||||
return nspAnalyzerEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of nspAnalyzerEnabled.
|
||||
*
|
||||
* @param nspAnalyzerEnabled new value of nspAnalyzerEnabled
|
||||
*/
|
||||
public void setNspAnalyzerEnabled(Boolean nspAnalyzerEnabled) {
|
||||
this.nspAnalyzerEnabled = nspAnalyzerEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of rubygemsAnalyzerEnabled.
|
||||
*
|
||||
@@ -909,9 +973,7 @@ public class Check extends Update {
|
||||
dealWithReferences();
|
||||
validateConfiguration();
|
||||
populateSettings();
|
||||
Engine engine = null;
|
||||
try {
|
||||
engine = new Engine(Check.class.getClassLoader());
|
||||
try (Engine engine = new Engine(Check.class.getClassLoader(), getSettings())) {
|
||||
if (isUpdateOnly()) {
|
||||
log("Deprecated 'UpdateOnly' property set; please use the UpdateTask instead", Project.MSG_WARN);
|
||||
try {
|
||||
@@ -940,16 +1002,7 @@ public class Check extends Update {
|
||||
throw new BuildException(ex);
|
||||
}
|
||||
}
|
||||
DatabaseProperties prop = null;
|
||||
try (CveDB cve = CveDB.getInstance()) {
|
||||
prop = cve.getDatabaseProperties();
|
||||
} catch (DatabaseException ex) {
|
||||
//TODO shouldn't this be a fatal exception
|
||||
log("Unable to retrieve DB Properties", ex, Project.MSG_DEBUG);
|
||||
}
|
||||
|
||||
final ReportGenerator reporter = new ReportGenerator(getProjectName(), engine.getDependencies(), engine.getAnalyzers(), prop);
|
||||
reporter.generateReports(reportOutputDirectory, reportFormat);
|
||||
engine.writeReports(getProjectName(), new File(reportOutputDirectory), reportFormat);
|
||||
|
||||
if (this.failBuildOnCVSS <= 10) {
|
||||
checkForFailure(engine.getDependencies());
|
||||
@@ -970,11 +1023,6 @@ public class Check extends Update {
|
||||
throw new BuildException(msg, ex);
|
||||
}
|
||||
log(msg, ex, Project.MSG_ERR);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
if (engine != null) {
|
||||
engine.cleanup();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -984,8 +1032,8 @@ public class Check extends Update {
|
||||
*
|
||||
* @throws BuildException if the task was not configured correctly.
|
||||
*/
|
||||
private void validateConfiguration() throws BuildException {
|
||||
if (getPath() == null) {
|
||||
private synchronized void validateConfiguration() throws BuildException {
|
||||
if (path == null) {
|
||||
throw new BuildException("No project dependencies have been defined to analyze.");
|
||||
}
|
||||
if (failBuildOnCVSS < 0 || failBuildOnCVSS > 11) {
|
||||
@@ -1003,32 +1051,34 @@ public class Check extends Update {
|
||||
@Override
|
||||
protected void populateSettings() throws BuildException {
|
||||
super.populateSettings();
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, enableExperimental);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, pyDistributionAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, pyPackageAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, rubygemsAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, opensslAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_CMAKE_ENABLED, cmakeAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, swiftPackageManagerAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, cocoapodsAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, bundleAuditAnalyzerEnabled);
|
||||
Settings.setStringIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, bundleAuditPath);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, autoconfAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, composerAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, nodeAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, nuspecAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, archiveAnalyzerEnabled);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, assemblyAnalyzerEnabled);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
getSettings().setArrayIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFiles.toArray(new String[suppressionFiles.size()]));
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, enableExperimental);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_RETIRED_ENABLED, enableRetired);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, pyDistributionAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, pyPackageAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, rubygemsAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, opensslAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_CMAKE_ENABLED, cmakeAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, swiftPackageManagerAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, cocoapodsAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, bundleAuditAnalyzerEnabled);
|
||||
getSettings().setStringIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, bundleAuditPath);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, autoconfAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, composerAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, nodeAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED, nspAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, nuspecAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, archiveAnalyzerEnabled);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, assemblyAnalyzerEnabled);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1039,7 +1089,7 @@ public class Check extends Update {
|
||||
* @throws BuildException thrown if a CVSS score is found that is higher
|
||||
* than the threshold set
|
||||
*/
|
||||
private void checkForFailure(List<Dependency> dependencies) throws BuildException {
|
||||
private void checkForFailure(Dependency[] dependencies) throws BuildException {
|
||||
final StringBuilder ids = new StringBuilder();
|
||||
for (Dependency d : dependencies) {
|
||||
for (Vulnerability v : d.getVulnerabilities()) {
|
||||
@@ -1053,9 +1103,16 @@ public class Check extends Update {
|
||||
}
|
||||
}
|
||||
if (ids.length() > 0) {
|
||||
final String msg = String.format("%n%nDependency-Check Failure:%n"
|
||||
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than '%.1f': %s%n"
|
||||
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
|
||||
final String msg;
|
||||
if (showSummary) {
|
||||
msg = String.format("%n%nDependency-Check Failure:%n"
|
||||
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than or equal to '%.1f': %s%n"
|
||||
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
|
||||
} else {
|
||||
msg = String.format("%n%nDependency-Check Failure:%n"
|
||||
+ "One or more dependencies were identified with vulnerabilities.%n%n"
|
||||
+ "See the dependency-check report for more details.%n%n");
|
||||
}
|
||||
throw new BuildException(msg);
|
||||
}
|
||||
}
|
||||
@@ -1066,12 +1123,12 @@ public class Check extends Update {
|
||||
*
|
||||
* @param dependencies a list of dependency objects
|
||||
*/
|
||||
private void showSummary(List<Dependency> dependencies) {
|
||||
private void showSummary(Dependency[] dependencies) {
|
||||
final StringBuilder summary = new StringBuilder();
|
||||
for (Dependency d : dependencies) {
|
||||
boolean firstEntry = true;
|
||||
final StringBuilder ids = new StringBuilder();
|
||||
for (Vulnerability v : d.getVulnerabilities()) {
|
||||
for (Vulnerability v : d.getVulnerabilities(true)) {
|
||||
if (firstEntry) {
|
||||
firstEntry = false;
|
||||
} else {
|
||||
@@ -1102,8 +1159,8 @@ public class Check extends Update {
|
||||
}
|
||||
|
||||
/**
|
||||
* An enumeration of supported report formats: "ALL", "HTML", "XML", "CSV", "JSON", "VULN",
|
||||
* etc..
|
||||
* An enumeration of supported report formats: "ALL", "HTML", "XML", "CSV",
|
||||
* "JSON", "VULN", etc..
|
||||
*/
|
||||
public static class ReportFormats extends EnumeratedAttribute {
|
||||
|
||||
@@ -37,21 +37,35 @@ public class Purge extends Task {
|
||||
* The properties file location.
|
||||
*/
|
||||
private static final String PROPERTIES_FILE = "task.properties";
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings;
|
||||
|
||||
/**
|
||||
* The location of the data directory that contains
|
||||
*/
|
||||
private String dataDirectory = null;
|
||||
/**
|
||||
* Indicates if dependency-check should fail the build if an exception
|
||||
* occurs.
|
||||
*/
|
||||
private boolean failOnError = true;
|
||||
|
||||
/**
|
||||
* Construct a new DependencyCheckTask.
|
||||
*/
|
||||
public Purge() {
|
||||
super();
|
||||
|
||||
// Call this before Dependency Check Core starts logging anything - this way, all SLF4J messages from
|
||||
// core end up coming through this tasks logger
|
||||
StaticLoggerBinder.getSingleton().setTask(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* The location of the data directory that contains
|
||||
*/
|
||||
private String dataDirectory = null;
|
||||
public Settings getSettings() {
|
||||
return settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of dataDirectory.
|
||||
@@ -71,12 +85,6 @@ public class Purge extends Task {
|
||||
this.dataDirectory = dataDirectory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates if dependency-check should fail the build if an exception
|
||||
* occurs.
|
||||
*/
|
||||
private boolean failOnError = true;
|
||||
|
||||
/**
|
||||
* Get the value of failOnError.
|
||||
*
|
||||
@@ -106,7 +114,7 @@ public class Purge extends Task {
|
||||
populateSettings();
|
||||
File db;
|
||||
try {
|
||||
db = new File(Settings.getDataDirectory(), "dc.h2.db");
|
||||
db = new File(settings.getDataDirectory(), "dc.h2.db");
|
||||
if (db.exists()) {
|
||||
if (db.delete()) {
|
||||
log("Database file purged; local copy of the NVD has been removed", Project.MSG_INFO);
|
||||
@@ -118,7 +126,7 @@ public class Purge extends Task {
|
||||
log(msg, Project.MSG_ERR);
|
||||
}
|
||||
} else {
|
||||
final String msg = String.format("Unable to purge database; the database file does not exists: %s", db.getAbsolutePath());
|
||||
final String msg = String.format("Unable to purge database; the database file does not exist: %s", db.getAbsolutePath());
|
||||
if (this.failOnError) {
|
||||
throw new BuildException(msg);
|
||||
}
|
||||
@@ -131,7 +139,7 @@ public class Purge extends Task {
|
||||
}
|
||||
log(msg, Project.MSG_ERR);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
settings.cleanup(true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -143,9 +151,9 @@ public class Purge extends Task {
|
||||
* @throws BuildException thrown if the properties file cannot be read.
|
||||
*/
|
||||
protected void populateSettings() throws BuildException {
|
||||
Settings.initialize();
|
||||
settings = new Settings();
|
||||
try (InputStream taskProperties = this.getClass().getClassLoader().getResourceAsStream(PROPERTIES_FILE)) {
|
||||
Settings.mergeProperties(taskProperties);
|
||||
settings.mergeProperties(taskProperties);
|
||||
} catch (IOException ex) {
|
||||
final String msg = "Unable to load the dependency-check ant task.properties file.";
|
||||
if (this.failOnError) {
|
||||
@@ -154,13 +162,13 @@ public class Purge extends Task {
|
||||
log(msg, ex, Project.MSG_WARN);
|
||||
}
|
||||
if (dataDirectory != null) {
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
} else {
|
||||
final File jarPath = new File(Purge.class.getProtectionDomain().getCodeSource().getLocation().getPath());
|
||||
final File base = jarPath.getParentFile();
|
||||
final String sub = Settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final String sub = settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final File dataDir = new File(base, sub);
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
/*
|
||||
* This file is part of dependency-check-ant.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2017 The OWASP Foundation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.taskdefs;
|
||||
|
||||
/**
|
||||
* Class : {@link SuppressionFile} Responsibility : Models a suppression file
|
||||
* nested XML element where the simple content is its location.
|
||||
*
|
||||
* @author Phillip Whittlesea
|
||||
*/
|
||||
public class SuppressionFile {
|
||||
|
||||
/**
|
||||
* The path to the suppression file.
|
||||
*/
|
||||
private String path;
|
||||
|
||||
/**
|
||||
* Sets the path to the suppression file.
|
||||
*
|
||||
* @param path the path to the suppression file
|
||||
*/
|
||||
public void setPath(String path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the path to the suppression file.
|
||||
*
|
||||
* @return the path
|
||||
*/
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -385,9 +385,7 @@ public class Update extends Purge {
|
||||
@Override
|
||||
public void execute() throws BuildException {
|
||||
populateSettings();
|
||||
Engine engine = null;
|
||||
try {
|
||||
engine = new Engine(Update.class.getClassLoader());
|
||||
try (Engine engine = new Engine(Update.class.getClassLoader(), getSettings())) {
|
||||
try {
|
||||
engine.doUpdates();
|
||||
} catch (UpdateException ex) {
|
||||
@@ -402,11 +400,6 @@ public class Update extends Purge {
|
||||
throw new BuildException(msg, ex);
|
||||
}
|
||||
log(msg, Project.MSG_ERR);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
if (engine != null) {
|
||||
engine.cleanup();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -420,23 +413,23 @@ public class Update extends Purge {
|
||||
@Override
|
||||
protected void populateSettings() throws BuildException {
|
||||
super.populateSettings();
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
|
||||
getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
|
||||
if (cveValidForHours != null) {
|
||||
if (cveValidForHours >= 0) {
|
||||
Settings.setInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
getSettings().setInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
} else {
|
||||
throw new BuildException("Invalid setting: `cpeValidForHours` must be 0 or greater");
|
||||
}
|
||||
@@ -48,7 +48,7 @@ public class StaticLoggerBinder implements LoggerFactoryBinder {
|
||||
*
|
||||
* @return the StaticLoggerBinder singleton
|
||||
*/
|
||||
public static final StaticLoggerBinder getSingleton() {
|
||||
public static StaticLoggerBinder getSingleton() {
|
||||
return SINGLETON;
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ the project's dependencies.
|
||||
<dependency-check projectname="Hello World"
|
||||
reportoutputdirectory="${basedir}"
|
||||
reportformat="ALL">
|
||||
|
||||
<suppressionfile path="${basedir}/path/to/suppression.xml" />
|
||||
<fileset dir="lib">
|
||||
<include name="**/*.jar"/>
|
||||
</fileset>
|
||||
@@ -33,19 +33,26 @@ Property | Description
|
||||
----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------
|
||||
autoUpdate | Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. | true
|
||||
cveValidForHours | Sets the number of hours to wait before checking for new updates from the NVD | 4
|
||||
failBuildOnCVSS | Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which means since the CVSS scores are 0-10, by default the build will never fail. | 11
|
||||
failBuildOnCVSS | Specifies if the build should be failed if a CVSS score equal to or above a specified level is identified. The default is 11 which means since the CVSS scores are 0-10, by default the build will never fail. | 11
|
||||
failOnError | Whether the build should fail if there is an error executing the dependency-check analysis | true
|
||||
projectName | The name of the project being scanned. | Dependency-Check
|
||||
reportFormat | The report format to be generated (HTML, XML, CSV, JSON, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML
|
||||
reportOutputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target'
|
||||
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html) |
|
||||
hintsFile | The file path to the XML hints file \- used to resolve [false negatives](../general/hints.html) |
|
||||
proxyServer | The Proxy Server; see the [proxy configuration](../data/proxy.html) page for more information. |
|
||||
proxyPort | The Proxy Port. |
|
||||
proxyUsername | Defines the proxy user name. |
|
||||
proxyPassword | Defines the proxy password. |
|
||||
connectionTimeout | The URL Connection Timeout. |
|
||||
enableExperimental | Enable the [experimental analyzers](../analyzers/index.html). If not enabled the experimental analyzers (see below) will not be loaded or used. | false
|
||||
enableExperimental | Enable the [experimental analyzers](../analyzers/index.html). If not enabled the experimental analyzers (see below) will not be loaded or used. | false
|
||||
enableRetired | Enable the [retired analyzers](../analyzers/index.html). If not enabled the retired analyzers (see below) will not be loaded or used. | false
|
||||
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html). |
|
||||
|
||||
The following nested elements can be set on the dependency-check task.
|
||||
|
||||
Element | Property | Description | Default Value
|
||||
------------------|----------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------
|
||||
suppressionFile | path | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html). Element can be specified multiple times. |
|
||||
|
||||
Analyzer Configuration
|
||||
====================
|
||||
@@ -55,23 +62,24 @@ Note, that specific analyzers will automatically disable themselves if no file
|
||||
types that they support are detected - so specifically disabling them may not
|
||||
be needed.
|
||||
|
||||
Property | Description | Default Value
|
||||
------------------------------|-----------------------------------------------------------------------------------|------------------
|
||||
archiveAnalyzerEnabled | Sets whether the Archive Analyzer will be used. | true
|
||||
Property | Description | Default Value
|
||||
------------------------------|------------------------------------------------------------------------------------------------------------|------------------
|
||||
archiveAnalyzerEnabled | Sets whether the Archive Analyzer will be used. | true
|
||||
zipExtensions | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. |
|
||||
jarAnalyzer | Sets whether the Jar Analyzer will be used. | true
|
||||
jarAnalyzer | Sets whether the Jar Analyzer will be used. | true
|
||||
centralAnalyzerEnabled | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer (see below). | true
|
||||
nexusAnalyzerEnabled | Sets whether Nexus Analyzer will be used. This analyzer is superceded by the Central Analyzer; however, you can configure this to run against a Nexus Pro installation. | true
|
||||
nexusAnalyzerEnabled | Sets whether Nexus Analyzer will be used (requires Nexus Pro). This analyzer is superceded by the Central Analyzer; however, you can configure this to run against a Nexus Pro installation. | true
|
||||
nexusUrl | Defines the Nexus web service endpoint (example http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. |
|
||||
nexusUsesProxy | Whether or not the defined proxy should be used when connecting to Nexus. | true
|
||||
nexusUsesProxy | Whether or not the defined proxy should be used when connecting to Nexus. | true
|
||||
pyDistributionAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Python Distribution Analyzer will be used. | true
|
||||
pyPackageAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Python Package Analyzer will be used. | true
|
||||
rubygemsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Ruby Gemspec Analyzer will be used. | true
|
||||
opensslAnalyzerEnabled | Sets whether the openssl Analyzer should be used. | true
|
||||
opensslAnalyzerEnabled | Sets whether the openssl Analyzer should be used. | true
|
||||
cmakeAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) CMake Analyzer should be used. | true
|
||||
autoconfAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) autoconf Analyzer should be used. | true
|
||||
composerAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer should be used. | true
|
||||
nodeAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Node.js Analyzer should be used. | true
|
||||
nodeAnalyzerEnabled | Sets whether the [retired](../analyzers/index.html) Node.js Analyzer should be used. | true
|
||||
nspAnalyzerEnabled | Sets whether the NSP Analyzer should be used. | true
|
||||
nuspecAnalyzerEnabled | Sets whether the .NET Nuget Nuspec Analyzer will be used. | true
|
||||
cocoapodsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer should be used. | true
|
||||
bundleAuditAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Bundle Audit Analyzer should be used. | true
|
||||
@@ -92,8 +100,8 @@ cveUrl20Modified | URL for the modified CVE 2.0.
|
||||
cveUrl12Base | Base URL for each year's CVE 1.2, the %d will be replaced with the year. | http://nvd.nist.gov/download/nvdcve-%d.xml
|
||||
cveUrl20Base | Base URL for each year's CVE 2.0, the %d will be replaced with the year. | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml
|
||||
dataDirectory | Data directory that is used to store the local copy of the NVD. This should generally not be changed. | data
|
||||
databaseDriverName | The name of the database driver. Example: org.h2.Driver. |
|
||||
databaseDriverName | The name of the database driver. Example: org.h2.Driver. |
|
||||
databaseDriverPath | The path to the database driver JAR file; only used if the driver is not in the class path. |
|
||||
connectionString | The connection string used to connect to the database. |
|
||||
databaseUser | The username used when connecting to the database. |
|
||||
databasePassword | The password used when connecting to the database. |
|
||||
connectionString | The connection string used to connect to the database. |
|
||||
databaseUser | The username used when connecting to the database. |
|
||||
databasePassword | The password used when connecting to the database. |
|
||||
|
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 10 KiB |
@@ -21,13 +21,12 @@ import java.io.File;
|
||||
|
||||
import org.apache.tools.ant.BuildException;
|
||||
import org.apache.tools.ant.BuildFileRule;
|
||||
import org.junit.After;
|
||||
import org.apache.tools.ant.types.LogLevel;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.owasp.dependencycheck.BaseDBTestCase;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
@@ -35,7 +34,7 @@ import static org.junit.Assert.assertTrue;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyCheckTaskTest {
|
||||
public class DependencyCheckTaskTest extends BaseDBTestCase {
|
||||
|
||||
@Rule
|
||||
public BuildFileRule buildFileRule = new BuildFileRule();
|
||||
@@ -44,18 +43,11 @@ public class DependencyCheckTaskTest {
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
@Before
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
Settings.initialize();
|
||||
BaseDBTestCase.ensureDBExists();
|
||||
super.setUp();
|
||||
final String buildFile = this.getClass().getClassLoader().getResource("build.xml").getPath();
|
||||
buildFileRule.configureProject(buildFile);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
//no cleanup...
|
||||
//executeTarget("cleanup");
|
||||
Settings.cleanup(true);
|
||||
buildFileRule.configureProject(buildFile, LogLevel.VERBOSE.getLevel());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -65,7 +57,7 @@ public class DependencyCheckTaskTest {
|
||||
public void testAddFileSet() throws Exception {
|
||||
File report = new File("target/dependency-check-report.html");
|
||||
if (report.exists() && !report.delete()) {
|
||||
throw new Exception("Unable to delete 'target/DependencyCheck-Report.html' prior to test.");
|
||||
throw new Exception("Unable to delete 'target/dependency-check-report.html' prior to test.");
|
||||
}
|
||||
buildFileRule.executeTarget("test.fileset");
|
||||
assertTrue("DependencyCheck report was not generated", report.exists());
|
||||
@@ -114,4 +106,66 @@ public class DependencyCheckTaskTest {
|
||||
expectedException.expect(BuildException.class);
|
||||
buildFileRule.executeTarget("failCVSS");
|
||||
}
|
||||
|
||||
/**
|
||||
* Test the DependencyCheckTask where a CVE is suppressed.
|
||||
*/
|
||||
@Test
|
||||
public void testSuppressingCVE() {
|
||||
// GIVEN an ant task with a vulnerability
|
||||
final String antTaskName = "suppression";
|
||||
|
||||
// WHEN executing the ant task
|
||||
buildFileRule.executeTarget(antTaskName);
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println(buildFileRule.getError());
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println(buildFileRule.getFullLog());
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.println("----------------------------------------------------------");
|
||||
|
||||
// THEN the ant task executed without error
|
||||
final File report = new File("target/suppression-report.html");
|
||||
assertTrue("Expected the DependencyCheck report to be generated", report.exists());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test the DependencyCheckTask deprecated suppression property throws an
|
||||
* exception with a warning.
|
||||
*/
|
||||
@Test
|
||||
public void testSuppressingSingle() {
|
||||
// GIVEN an ant task with a vulnerability using the legacy property
|
||||
final String antTaskName = "suppression-single";
|
||||
|
||||
// WHEN executing the ant task
|
||||
buildFileRule.executeTarget(antTaskName);
|
||||
|
||||
// THEN the ant task executed without error
|
||||
final File report = new File("target/suppression-single-report.html");
|
||||
assertTrue("Expected the DependencyCheck report to be generated", report.exists());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test the DependencyCheckTask deprecated suppression property throws an
|
||||
* exception with a warning.
|
||||
*/
|
||||
@Test
|
||||
public void testSuppressingMultiple() {
|
||||
// GIVEN an ant task with a vulnerability using multiple was to configure the suppression file
|
||||
final String antTaskName = "suppression-multiple";
|
||||
|
||||
// WHEN executing the ant task
|
||||
buildFileRule.executeTarget(antTaskName);
|
||||
|
||||
// THEN the ant task executed without error
|
||||
final File report = new File("target/suppression-multiple-report.html");
|
||||
assertTrue("Expected the DependencyCheck report to be generated", report.exists());
|
||||
}
|
||||
}
|
||||
@@ -71,4 +71,47 @@
|
||||
</fileset>
|
||||
</dependency-check>
|
||||
</target>
|
||||
|
||||
<target name="suppression">
|
||||
<dependency-check
|
||||
applicationName="test suppression"
|
||||
reportOutputDirectory="${project.build.directory}/suppression-report.html"
|
||||
autoupdate="false"
|
||||
failBuildOnCVSS="3">
|
||||
<suppressionfile path="${project.build.directory}/test-classes/test-suppression1.xml" />
|
||||
<suppressionfile path="${project.build.directory}/test-classes/test-suppression2.xml" />
|
||||
<fileset dir="${project.build.directory}/test-classes/jars">
|
||||
<include name="axis-1.4.jar"/>
|
||||
</fileset>
|
||||
<filelist
|
||||
dir="${project.build.directory}/test-classes/list"
|
||||
files="jetty-6.1.0.jar,org.mortbay.jetty.jar"/>
|
||||
</dependency-check>
|
||||
</target>
|
||||
<target name="suppression-single">
|
||||
<dependency-check
|
||||
applicationName="test suppression"
|
||||
reportOutputDirectory="${project.build.directory}/suppression-single-report.html"
|
||||
autoupdate="false"
|
||||
failBuildOnCVSS="3"
|
||||
suppressionFile="${project.build.directory}/test-classes/test-suppression.xml">
|
||||
<fileset dir="${project.build.directory}/test-classes/jars">
|
||||
<include name="axis-1.4.jar"/>
|
||||
</fileset>
|
||||
</dependency-check>
|
||||
</target>
|
||||
<target name="suppression-multiple">
|
||||
<dependency-check
|
||||
applicationName="test suppression"
|
||||
reportOutputDirectory="${project.build.directory}/suppression-multiple-report.html"
|
||||
autoupdate="false"
|
||||
failBuildOnCVSS="3"
|
||||
suppressionFile="${project.build.directory}/test-classes/test-suppression1.xml">
|
||||
<suppressionfile path="${project.build.directory}/test-classes/test-suppression2.xml"/>
|
||||
<fileset dir="${project.build.directory}/test-classes/jars">
|
||||
<include name="axis-1.4.jar"/>
|
||||
</fileset>
|
||||
</dependency-check>
|
||||
</target>
|
||||
|
||||
</project>
|
||||
48
ant/src/test/resources/test-suppression.xml
Normal file
48
ant/src/test/resources/test-suppression.xml
Normal file
@@ -0,0 +1,48 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This file is part of dependency-check-core.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
Copyright (c) 2017 The OWASP Foundation. All Rights Reserved.
|
||||
-->
|
||||
<suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.1.xsd">
|
||||
<suppress>
|
||||
<notes><![CDATA[
|
||||
file name: axis-1.4.jar
|
||||
]]></notes>
|
||||
<gav regex="true">^org\.apache\.axis:axis:.*$</gav>
|
||||
<cpe>cpe:/a:apache:axis</cpe>
|
||||
</suppress>
|
||||
<suppress>
|
||||
<notes><![CDATA[
|
||||
file name: org.mortbay.jetty.jar
|
||||
]]></notes>
|
||||
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
|
||||
<cpe>cpe:/a:jetty:jetty</cpe>
|
||||
</suppress>
|
||||
<suppress>
|
||||
<notes><![CDATA[
|
||||
file name: org.mortbay.jetty.jar
|
||||
]]></notes>
|
||||
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
|
||||
<cpe>cpe:/a:mortbay:jetty</cpe>
|
||||
</suppress>
|
||||
<suppress>
|
||||
<notes><![CDATA[
|
||||
file name: org.mortbay.jetty.jar
|
||||
]]></notes>
|
||||
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
|
||||
<cpe>cpe:/a:mortbay_jetty:jetty</cpe>
|
||||
</suppress>
|
||||
</suppressions>
|
||||
27
ant/src/test/resources/test-suppression1.xml
Normal file
27
ant/src/test/resources/test-suppression1.xml
Normal file
@@ -0,0 +1,27 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This file is part of dependency-check-core.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
Copyright (c) 2017 The OWASP Foundation. All Rights Reserved.
|
||||
-->
|
||||
<suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.1.xsd">
|
||||
<suppress>
|
||||
<notes><![CDATA[
|
||||
file name: axis-1.4.jar
|
||||
]]></notes>
|
||||
<gav regex="true">^org\.apache\.axis:axis:.*$</gav>
|
||||
<cpe>cpe:/a:apache:axis</cpe>
|
||||
</suppress>
|
||||
</suppressions>
|
||||
41
ant/src/test/resources/test-suppression2.xml
Normal file
41
ant/src/test/resources/test-suppression2.xml
Normal file
@@ -0,0 +1,41 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This file is part of dependency-check-core.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
Copyright (c) 2017 The OWASP Foundation. All Rights Reserved.
|
||||
-->
|
||||
<suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.1.xsd">
|
||||
<suppress>
|
||||
<notes><![CDATA[
|
||||
file name: org.mortbay.jetty.jar
|
||||
]]></notes>
|
||||
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
|
||||
<cpe>cpe:/a:jetty:jetty</cpe>
|
||||
</suppress>
|
||||
<suppress>
|
||||
<notes><![CDATA[
|
||||
file name: org.mortbay.jetty.jar
|
||||
]]></notes>
|
||||
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
|
||||
<cpe>cpe:/a:mortbay:jetty</cpe>
|
||||
</suppress>
|
||||
<suppress>
|
||||
<notes><![CDATA[
|
||||
file name: org.mortbay.jetty.jar
|
||||
]]></notes>
|
||||
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
|
||||
<cpe>cpe:/a:mortbay_jetty:jetty</cpe>
|
||||
</suppress>
|
||||
</suppressions>
|
||||
@@ -21,21 +21,12 @@ Copyright (c) 2017 Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.6-SNAPSHOT</version>
|
||||
<version>3.1.2-SNAPSHOT</version>
|
||||
</parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-plugin</artifactId>
|
||||
<name>Dependency-Check Plugin Archetype</name>
|
||||
<packaging>jar</packaging>
|
||||
<!-- begin copy from http://minds.coremedia.com/2012/09/11/problem-solved-deploy-multi-module-maven-project-site-as-github-pages/ -->
|
||||
<distributionManagement>
|
||||
<site>
|
||||
<id>github-pages-site</id>
|
||||
<name>Deployment through GitHub's site deployment plugin</name>
|
||||
<url>${basedir}/../target/site/${project.version}/dependency-check-plugin</url>
|
||||
</site>
|
||||
</distributionManagement>
|
||||
<!-- end copy -->
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
@@ -23,6 +23,7 @@ import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
* An OWASP dependency-check plug-in example. If you are not implementing a
|
||||
@@ -66,7 +67,7 @@ public class ${analyzerName} implements Analyzer, FileTypeAnalyzer {
|
||||
@Override
|
||||
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (enabled) {
|
||||
throw new UnsupportedOperationException("Not implemented yet.");
|
||||
//TODO implement analyze
|
||||
}
|
||||
}
|
||||
|
||||
@@ -91,15 +92,26 @@ public class ${analyzerName} implements Analyzer, FileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* The initialize method is called (once) prior to the analyze method being
|
||||
* called on all of the dependencies.
|
||||
* The initialize method is called just after instantiation of the object.
|
||||
*
|
||||
* @throws InitializationException is thrown if an exception occurs
|
||||
* initializing the analyzer.
|
||||
* @param settings a reference to the configured settings
|
||||
*/
|
||||
@Override
|
||||
public void initialize() throws InitializationException {
|
||||
public void initialize(Settings settings) {
|
||||
//TODO implement initialize
|
||||
}
|
||||
|
||||
/**
|
||||
* The prepare method is called once just prior to repeated calls to
|
||||
* analyze.
|
||||
*
|
||||
* @param engine a reference to the engine
|
||||
* @throws InitializationException thrown when the analyzer cannot be
|
||||
* initialized
|
||||
*/
|
||||
@Override
|
||||
public void prepare(Engine engine) throws InitializationException {
|
||||
//TODO implement prepare
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -116,7 +128,7 @@ public class ${analyzerName} implements Analyzer, FileTypeAnalyzer {
|
||||
/**
|
||||
* Returns whether multiple instances of the same type of analyzer can run
|
||||
* in parallel. If the analyzer does not support parallel processing it is
|
||||
* generally best to also mark the analyze(Dependency,Engine) as synchronized.
|
||||
* generally best to also mark the analyze(Dependency,Engine) as synchronized.
|
||||
*
|
||||
* @return {@code true} if the analyzer supports parallel processing,
|
||||
* {@code false} else
|
||||
@@ -30,25 +30,27 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
*/
|
||||
public class ${analyzerName}Test {
|
||||
|
||||
Settings settings = null;
|
||||
|
||||
public ${analyzerName}Test() {
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() {
|
||||
Settings.initialize();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownClass() {
|
||||
Settings.cleanup();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
settings = new Settings();
|
||||
}
|
||||
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
settings.cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -68,12 +70,14 @@ public class ${analyzerName}Test {
|
||||
*/
|
||||
@Test
|
||||
public void testAnalyze() throws Exception {
|
||||
//The engine is generally null for most analyzer test cases but can be instantiated if needed.
|
||||
Engine engine = null;
|
||||
${analyzerName} instance = new ${analyzerName}();
|
||||
instance.initialize();
|
||||
instance.initialize(settings);
|
||||
instance.prepare(engine);
|
||||
|
||||
File file = new File(${analyzerName}.class.getClassLoader().getResource("test.file").toURI().getPath());
|
||||
Dependency dependency = new Dependency(file);
|
||||
//The engine is generally null for most analyzer test cases.
|
||||
Engine engine = null;
|
||||
|
||||
//TODO uncomment the following line and add assertions against the dependency.
|
||||
//instance.analyze(dependency, engine);
|
||||
@@ -107,7 +111,7 @@ public class ${analyzerName}Test {
|
||||
@Test
|
||||
public void testInitialize() throws Exception {
|
||||
${analyzerName} instance = new ${analyzerName}();
|
||||
instance.initialize();
|
||||
instance.initialize(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
|
Before Width: | Height: | Size: 9.0 KiB After Width: | Height: | Size: 9.0 KiB |
@@ -20,19 +20,11 @@ Copyright (c) 2017 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.6-SNAPSHOT</version>
|
||||
<version>3.1.2-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<name>Dependency-Check Build-Reporting</name>
|
||||
<artifactId>build-reporting</artifactId>
|
||||
<!-- begin copy from http://minds.coremedia.com/2012/09/11/problem-solved-deploy-multi-module-maven-project-site-as-github-pages/ -->
|
||||
<distributionManagement>
|
||||
<site>
|
||||
<id>github-pages-site</id>
|
||||
<name>Deployment through GitHub's site deployment plugin</name>
|
||||
<url>${basedir}/../target/site/${project.version}/build-reporting</url>
|
||||
</site>
|
||||
</distributionManagement>
|
||||
<!-- end copy -->
|
||||
<packaging>pom</packaging>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.owasp</groupId>
|
||||
@@ -62,16 +54,58 @@ Copyright (c) 2017 - Jeremy Long. All Rights Reserved.
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-deploy-plugin</artifactId>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>report-merge</id>
|
||||
<phase>verify</phase>
|
||||
<goals>
|
||||
<goal>merge</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<fileSets>
|
||||
<fileSet implementation="org.apache.maven.shared.model.fileset.FileSet">
|
||||
<directory>${project.basedir}/../</directory>
|
||||
<includes>
|
||||
<include>utils/target/coverage-reports/*.exec</include>
|
||||
<include>core/target/coverage-reports/*.exec</include>
|
||||
<include>cli/target/coverage-reports/*.exec</include>
|
||||
<include>ant/target/coverage-reports/*.exec</include>
|
||||
<include>maven/target/coverage-reports/*.exec</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>report-aggregate</id>
|
||||
<phase>verify</phase>
|
||||
<goals>
|
||||
<goal>report-aggregate</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>target/coverage-reports/</outputDirectory>
|
||||
<dataFileIncludes>
|
||||
<dataFileInclude>target/coverage-reports/jacoco-ut.exec</dataFileInclude>
|
||||
<dataFileInclude>target/coverage-reports/jacoco-it.exec</dataFileInclude>
|
||||
</dataFileIncludes>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.6-SNAPSHOT</version>
|
||||
<version>3.1.2-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-cli</artifactId>
|
||||
@@ -28,15 +28,6 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
|
||||
<name>Dependency-Check Command Line</name>
|
||||
<description>dependency-check-cli is an command line tool that uses dependency-check-core to detect publicly disclosed vulnerabilities associated with the scanned project dependencies. The tool will generate a report listing the dependency, any identified Common Platform Enumeration (CPE) identifiers, and the associated Common Vulnerability and Exposure (CVE) entries.</description>
|
||||
<!-- begin copy from http://minds.coremedia.com/2012/09/11/problem-solved-deploy-multi-module-maven-project-site-as-github-pages/ -->
|
||||
<distributionManagement>
|
||||
<site>
|
||||
<id>github-pages-site</id>
|
||||
<name>Deployment through GitHub's site deployment plugin</name>
|
||||
<url>${basedir}/../target/site/${project.version}/dependency-check-cli</url>
|
||||
</site>
|
||||
</distributionManagement>
|
||||
<!-- end copy -->
|
||||
<build>
|
||||
<finalName>dependency-check-${project.version}</finalName>
|
||||
<resources>
|
||||
@@ -119,42 +110,6 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<reporting>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>${reporting.checkstyle-plugin.version}</version>
|
||||
<configuration>
|
||||
<enableRulesSummary>false</enableRulesSummary>
|
||||
<enableFilesSummary>false</enableFilesSummary>
|
||||
<configLocation>${basedir}/../src/main/config/checkstyle-checks.xml</configLocation>
|
||||
<headerLocation>${basedir}/../src/main/config/checkstyle-header.txt</headerLocation>
|
||||
<suppressionsLocation>${basedir}/../src/main/config/checkstyle-suppressions.xml</suppressionsLocation>
|
||||
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-pmd-plugin</artifactId>
|
||||
<version>${reporting.pmd-plugin.version}</version>
|
||||
<configuration>
|
||||
<targetJdk>1.6</targetJdk>
|
||||
<linkXRef>true</linkXRef>
|
||||
<sourceEncoding>utf-8</sourceEncoding>
|
||||
<excludes>
|
||||
<exclude>**/generated/*.java</exclude>
|
||||
</excludes>
|
||||
<rulesets>
|
||||
<ruleset>../src/main/config/dcrules.xml</ruleset>
|
||||
<ruleset>/rulesets/java/basic.xml</ruleset>
|
||||
<ruleset>/rulesets/java/imports.xml</ruleset>
|
||||
<ruleset>/rulesets/java/unusedcode.xml</ruleset>
|
||||
</rulesets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</reporting>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
@@ -28,13 +28,10 @@ import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.apache.tools.ant.DirectoryScanner;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.reporting.ReportGenerator;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -56,6 +53,10 @@ public class App {
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(App.class);
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings = null;
|
||||
|
||||
/**
|
||||
* The main method for the application.
|
||||
@@ -64,17 +65,28 @@ public class App {
|
||||
*/
|
||||
public static void main(String[] args) {
|
||||
int exitCode = 0;
|
||||
try {
|
||||
Settings.initialize();
|
||||
final App app = new App();
|
||||
exitCode = app.run(args);
|
||||
LOGGER.debug("Exit code: " + exitCode);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
}
|
||||
final App app = new App();
|
||||
exitCode = app.run(args);
|
||||
LOGGER.debug("Exit code: {}", exitCode);
|
||||
System.exit(exitCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the App object.
|
||||
*/
|
||||
public App() {
|
||||
settings = new Settings();
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the App object; this method is used for testing.
|
||||
*
|
||||
* @param settings the configured settings
|
||||
*/
|
||||
protected App(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main CLI entry-point into the application.
|
||||
*
|
||||
@@ -83,7 +95,7 @@ public class App {
|
||||
*/
|
||||
public int run(String[] args) {
|
||||
int exitCode = 0;
|
||||
final CliParser cli = new CliParser();
|
||||
final CliParser cli = new CliParser(settings);
|
||||
|
||||
try {
|
||||
cli.parse(args);
|
||||
@@ -112,10 +124,11 @@ public class App {
|
||||
LOGGER.error(ex.getMessage());
|
||||
LOGGER.debug("Error loading properties file", ex);
|
||||
exitCode = -4;
|
||||
return exitCode;
|
||||
}
|
||||
File db;
|
||||
try {
|
||||
db = new File(Settings.getDataDirectory(), "dc.h2.db");
|
||||
db = new File(settings.getDataDirectory(), settings.getString(Settings.KEYS.DB_FILE_NAME, "dc.h2.db"));
|
||||
if (db.exists()) {
|
||||
if (db.delete()) {
|
||||
LOGGER.info("Database file purged; local copy of the NVD has been removed");
|
||||
@@ -124,12 +137,14 @@ public class App {
|
||||
exitCode = -5;
|
||||
}
|
||||
} else {
|
||||
LOGGER.error("Unable to purge database; the database file does not exists: {}", db.getAbsolutePath());
|
||||
LOGGER.error("Unable to purge database; the database file does not exist: {}", db.getAbsolutePath());
|
||||
exitCode = -6;
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.error("Unable to delete the database");
|
||||
exitCode = -7;
|
||||
} finally {
|
||||
settings.cleanup();
|
||||
}
|
||||
}
|
||||
} else if (cli.isGetVersion()) {
|
||||
@@ -141,6 +156,7 @@ public class App {
|
||||
LOGGER.error(ex.getMessage());
|
||||
LOGGER.debug("Error loading properties file", ex);
|
||||
exitCode = -4;
|
||||
return exitCode;
|
||||
}
|
||||
try {
|
||||
runUpdateOnly();
|
||||
@@ -150,6 +166,8 @@ public class App {
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.error(ex.getMessage());
|
||||
exitCode = -9;
|
||||
} finally {
|
||||
settings.cleanup();
|
||||
}
|
||||
} else if (cli.isRunScan()) {
|
||||
try {
|
||||
@@ -158,6 +176,7 @@ public class App {
|
||||
LOGGER.error(ex.getMessage());
|
||||
LOGGER.debug("Error loading properties file", ex);
|
||||
exitCode = -4;
|
||||
return exitCode;
|
||||
}
|
||||
try {
|
||||
final String[] scanFiles = cli.getScanFiles();
|
||||
@@ -184,8 +203,12 @@ public class App {
|
||||
exitCode = -14;
|
||||
}
|
||||
for (Throwable e : ex.getExceptions()) {
|
||||
LOGGER.error(e.getMessage());
|
||||
if (e.getMessage() != null) {
|
||||
LOGGER.error(e.getMessage());
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
settings.cleanup();
|
||||
}
|
||||
} else {
|
||||
cli.printHelp();
|
||||
@@ -220,56 +243,11 @@ public class App {
|
||||
String[] excludes, int symLinkDepth, int cvssFailScore) throws InvalidScanPathException, DatabaseException,
|
||||
ExceptionCollection, ReportException {
|
||||
Engine engine = null;
|
||||
int retCode = 0;
|
||||
try {
|
||||
engine = new Engine();
|
||||
final List<String> antStylePaths = new ArrayList<>();
|
||||
for (String file : files) {
|
||||
final String antPath = ensureCanonicalPath(file);
|
||||
antStylePaths.add(antPath);
|
||||
}
|
||||
final List<String> antStylePaths = getPaths(files);
|
||||
final Set<File> paths = scanAntStylePaths(antStylePaths, symLinkDepth, excludes);
|
||||
|
||||
final Set<File> paths = new HashSet<>();
|
||||
for (String file : antStylePaths) {
|
||||
LOGGER.debug("Scanning {}", file);
|
||||
final DirectoryScanner scanner = new DirectoryScanner();
|
||||
String include = file.replace('\\', '/');
|
||||
File baseDir;
|
||||
|
||||
if (include.startsWith("//")) {
|
||||
throw new InvalidScanPathException("Unable to scan paths specified by //");
|
||||
} else {
|
||||
final int pos = getLastFileSeparator(include);
|
||||
final String tmpBase = include.substring(0, pos);
|
||||
final String tmpInclude = include.substring(pos + 1);
|
||||
if (tmpInclude.indexOf('*') >= 0 || tmpInclude.indexOf('?') >= 0
|
||||
|| (new File(include)).isFile()) {
|
||||
baseDir = new File(tmpBase);
|
||||
include = tmpInclude;
|
||||
} else {
|
||||
baseDir = new File(tmpBase, tmpInclude);
|
||||
include = "**/*";
|
||||
}
|
||||
}
|
||||
scanner.setBasedir(baseDir);
|
||||
final String[] includes = {include};
|
||||
scanner.setIncludes(includes);
|
||||
scanner.setMaxLevelsOfSymlinks(symLinkDepth);
|
||||
if (symLinkDepth <= 0) {
|
||||
scanner.setFollowSymlinks(false);
|
||||
}
|
||||
if (excludes != null && excludes.length > 0) {
|
||||
scanner.addExcludes(excludes);
|
||||
}
|
||||
scanner.scan();
|
||||
if (scanner.getIncludedFilesCount() > 0) {
|
||||
for (String s : scanner.getIncludedFiles()) {
|
||||
final File f = new File(baseDir, s);
|
||||
LOGGER.debug("Found file {}", f.toString());
|
||||
paths.add(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
engine = new Engine(settings);
|
||||
engine.scan(paths);
|
||||
|
||||
ExceptionCollection exCol = null;
|
||||
@@ -281,18 +259,9 @@ public class App {
|
||||
}
|
||||
exCol = ex;
|
||||
}
|
||||
final List<Dependency> dependencies = engine.getDependencies();
|
||||
DatabaseProperties prop = null;
|
||||
try (CveDB cve = CveDB.getInstance()) {
|
||||
prop = cve.getDatabaseProperties();
|
||||
} catch (DatabaseException ex) {
|
||||
//TODO shouldn't this be a fatal exception
|
||||
LOGGER.debug("Unable to retrieve DB Properties", ex);
|
||||
}
|
||||
final ReportGenerator report = new ReportGenerator(applicationName, dependencies, engine.getAnalyzers(), prop);
|
||||
|
||||
try {
|
||||
report.generateReports(reportDirectory, outputFormat);
|
||||
engine.writeReports(applicationName, new File(reportDirectory), outputFormat);
|
||||
} catch (ReportException ex) {
|
||||
if (exCol != null) {
|
||||
exCol.addException(ex);
|
||||
@@ -301,28 +270,110 @@ public class App {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
if (exCol != null && exCol.getExceptions().size() > 0) {
|
||||
if (exCol != null && !exCol.getExceptions().isEmpty()) {
|
||||
throw exCol;
|
||||
}
|
||||
return determineReturnCode(engine, cvssFailScore);
|
||||
} finally {
|
||||
if (engine != null) {
|
||||
engine.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Set the exit code based on whether we found a high enough vulnerability
|
||||
for (Dependency dep : dependencies) {
|
||||
if (!dep.getVulnerabilities().isEmpty()) {
|
||||
for (Vulnerability vuln : dep.getVulnerabilities()) {
|
||||
LOGGER.debug("VULNERABILITY FOUND " + dep.getDisplayFileName());
|
||||
if (vuln.getCvssScore() > cvssFailScore) {
|
||||
retCode = 1;
|
||||
}
|
||||
/**
|
||||
* Determines the return code based on if one of the dependencies scanned
|
||||
* has a vulnerability with a CVSS score above the cvssFailScore.
|
||||
*
|
||||
* @param engine the engine used during analysis
|
||||
* @param cvssFailScore the max allowed CVSS score
|
||||
* @return returns <code>1</code> if a severe enough vulnerability is
|
||||
* identified; otherwise <code>0</code>
|
||||
*/
|
||||
private int determineReturnCode(Engine engine, int cvssFailScore) {
|
||||
int retCode = 0;
|
||||
//Set the exit code based on whether we found a high enough vulnerability
|
||||
for (Dependency dep : engine.getDependencies()) {
|
||||
if (!dep.getVulnerabilities().isEmpty()) {
|
||||
for (Vulnerability vuln : dep.getVulnerabilities()) {
|
||||
LOGGER.debug("VULNERABILITY FOUND {}", dep.getDisplayFileName());
|
||||
if (vuln.getCvssScore() > cvssFailScore) {
|
||||
retCode = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return retCode;
|
||||
}
|
||||
|
||||
return retCode;
|
||||
} finally {
|
||||
if (engine != null) {
|
||||
engine.cleanup();
|
||||
/**
|
||||
* Scans the give Ant Style paths and collects the actual files.
|
||||
*
|
||||
* @param antStylePaths a list of ant style paths to scan for actual files
|
||||
* @param symLinkDepth the depth to traverse symbolic links
|
||||
* @param excludes an array of ant style excludes
|
||||
* @return returns the set of identified files
|
||||
* @throws InvalidScanPathException thrown when the scan path is invalid
|
||||
*/
|
||||
private Set<File> scanAntStylePaths(List<String> antStylePaths, int symLinkDepth, String[] excludes)
|
||||
throws InvalidScanPathException {
|
||||
final Set<File> paths = new HashSet<>();
|
||||
for (String file : antStylePaths) {
|
||||
LOGGER.debug("Scanning {}", file);
|
||||
final DirectoryScanner scanner = new DirectoryScanner();
|
||||
String include = file.replace('\\', '/');
|
||||
File baseDir;
|
||||
|
||||
if (include.startsWith("//")) {
|
||||
throw new InvalidScanPathException("Unable to scan paths specified by //");
|
||||
} else {
|
||||
final int pos = getLastFileSeparator(include);
|
||||
final String tmpBase = include.substring(0, pos);
|
||||
final String tmpInclude = include.substring(pos + 1);
|
||||
if (tmpInclude.indexOf('*') >= 0 || tmpInclude.indexOf('?') >= 0
|
||||
|| (new File(include)).isFile()) {
|
||||
baseDir = new File(tmpBase);
|
||||
include = tmpInclude;
|
||||
} else {
|
||||
baseDir = new File(tmpBase, tmpInclude);
|
||||
include = "**/*";
|
||||
}
|
||||
}
|
||||
scanner.setBasedir(baseDir);
|
||||
final String[] includes = {include};
|
||||
scanner.setIncludes(includes);
|
||||
scanner.setMaxLevelsOfSymlinks(symLinkDepth);
|
||||
if (symLinkDepth <= 0) {
|
||||
scanner.setFollowSymlinks(false);
|
||||
}
|
||||
if (excludes != null && excludes.length > 0) {
|
||||
scanner.addExcludes(excludes);
|
||||
}
|
||||
scanner.scan();
|
||||
if (scanner.getIncludedFilesCount() > 0) {
|
||||
for (String s : scanner.getIncludedFiles()) {
|
||||
final File f = new File(baseDir, s);
|
||||
LOGGER.debug("Found file {}", f.toString());
|
||||
paths.add(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the ant style paths from the given array of files.
|
||||
*
|
||||
* @param files an array of file paths
|
||||
* @return a list containing ant style paths
|
||||
*/
|
||||
private List<String> getPaths(String[] files) {
|
||||
final List<String> antStylePaths = new ArrayList<>();
|
||||
for (String file : files) {
|
||||
final String antPath = ensureCanonicalPath(file);
|
||||
antStylePaths.add(antPath);
|
||||
}
|
||||
return antStylePaths;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -333,14 +384,8 @@ public class App {
|
||||
* connection to the database could not be established
|
||||
*/
|
||||
private void runUpdateOnly() throws UpdateException, DatabaseException {
|
||||
Engine engine = null;
|
||||
try {
|
||||
engine = new Engine();
|
||||
try (Engine engine = new Engine(settings)) {
|
||||
engine.doUpdates();
|
||||
} finally {
|
||||
if (engine != null) {
|
||||
engine.cleanup();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -361,7 +406,7 @@ public class App {
|
||||
final String proxyPass = cli.getProxyPassword();
|
||||
final String dataDirectory = cli.getDataDirectory();
|
||||
final File propertiesFile = cli.getPropertiesFile();
|
||||
final String suppressionFile = cli.getSuppressionFile();
|
||||
final String[] suppressionFiles = cli.getSuppressionFiles();
|
||||
final String hintsFile = cli.getHintsFile();
|
||||
final String nexusUrl = cli.getNexusUrl();
|
||||
final String databaseDriverName = cli.getDatabaseDriverName();
|
||||
@@ -378,10 +423,11 @@ public class App {
|
||||
final Integer cveValidForHours = cli.getCveValidForHours();
|
||||
final Boolean autoUpdate = cli.isAutoUpdate();
|
||||
final Boolean experimentalEnabled = cli.isExperimentalEnabled();
|
||||
final Boolean retiredEnabled = cli.isRetiredEnabled();
|
||||
|
||||
if (propertiesFile != null) {
|
||||
try {
|
||||
Settings.mergeProperties(propertiesFile);
|
||||
settings.mergeProperties(propertiesFile);
|
||||
} catch (FileNotFoundException ex) {
|
||||
throw new InvalidSettingException("Unable to find properties file '" + propertiesFile.getPath() + "'", ex);
|
||||
} catch (IOException ex) {
|
||||
@@ -393,63 +439,66 @@ public class App {
|
||||
// on the command line. This is true of other boolean values set below not using the setBooleanIfNotNull.
|
||||
final boolean nexusUsesProxy = cli.isNexusUsesProxy();
|
||||
if (dataDirectory != null) {
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
} else if (System.getProperty("basedir") != null) {
|
||||
final File dataDir = new File(System.getProperty("basedir"), "data");
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
} else {
|
||||
final File jarPath = new File(App.class.getProtectionDomain().getCodeSource().getLocation().getPath());
|
||||
final File base = jarPath.getParentFile();
|
||||
final String sub = Settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final String sub = settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final File dataDir = new File(base, sub);
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
}
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPass);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
Settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUser);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPass);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
|
||||
settings.setArrayIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFiles);
|
||||
|
||||
//File Type Analyzer Settings
|
||||
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, experimentalEnabled);
|
||||
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, experimentalEnabled);
|
||||
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RETIRED_ENABLED, retiredEnabled);
|
||||
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !cli.isJarDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !cli.isArchiveDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !cli.isPythonDistributionDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, !cli.isPythonPackageDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, !cli.isAutoconfDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CMAKE_ENABLED, !cli.isCmakeDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, !cli.isNuspecDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, !cli.isAssemblyDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, !cli.isBundleAuditDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isComposerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isNodeJsDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, !cli.isSwiftPackageAnalyzerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, !cli.isCocoapodsAnalyzerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isRubyGemspecDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isCentralDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !cli.isNexusDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !cli.isJarDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !cli.isArchiveDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !cli.isPythonDistributionDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, !cli.isPythonPackageDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, !cli.isAutoconfDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_CMAKE_ENABLED, !cli.isCmakeDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, !cli.isNuspecDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, !cli.isAssemblyDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, !cli.isBundleAuditDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isComposerDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isNodeJsDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED, !cli.isNspDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, !cli.isSwiftPackageAnalyzerDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, !cli.isCocoapodsAnalyzerDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isRubyGemspecDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isCentralDisabled());
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !cli.isNexusDisabled());
|
||||
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, cli.getPathToBundleAudit());
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, additionalZipExtensions);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, cli.getPathToBundleAudit());
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, additionalZipExtensions);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
if (cveBase12 != null && !cveBase12.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, cveBase12);
|
||||
Settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, cveBase20);
|
||||
Settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, cveMod12);
|
||||
Settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, cveMod20);
|
||||
settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, cveBase12);
|
||||
settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, cveBase20);
|
||||
settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, cveMod12);
|
||||
settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, cveMod20);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,6 +53,19 @@ public final class CliParser {
|
||||
* Indicates whether the arguments are valid.
|
||||
*/
|
||||
private boolean isValid = true;
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* Constructs a new CLI Parser object with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings
|
||||
*/
|
||||
public CliParser(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the arguments passed in and captures the results for later use.
|
||||
@@ -192,14 +205,13 @@ public final class CliParser {
|
||||
final String msg = String.format("Invalid '%s' argument: '%s'", argumentName, path);
|
||||
throw new FileNotFoundException(msg);
|
||||
}
|
||||
} else if (path.startsWith("//") || path.startsWith("\\\\")) {
|
||||
isValid = false;
|
||||
final String msg = String.format("Invalid '%s' argument: '%s'%nUnable to scan paths that start with '//'.", argumentName, path);
|
||||
throw new FileNotFoundException(msg);
|
||||
// } else if (path.startsWith("//") || path.startsWith("\\\\")) {
|
||||
// isValid = false;
|
||||
// final String msg = String.format("Invalid '%s' argument: '%s'%nUnable to scan paths that start with '//'.", argumentName, path);
|
||||
// throw new FileNotFoundException(msg);
|
||||
} else if ((path.endsWith("/*") && !path.endsWith("**/*")) || (path.endsWith("\\*") && path.endsWith("**\\*"))) {
|
||||
final String msg = String.format("Possibly incorrect path '%s' from argument '%s' because it ends with a slash star; "
|
||||
LOGGER.warn("Possibly incorrect path '{}' from argument '{}' because it ends with a slash star; "
|
||||
+ "dependency-check uses ant-style paths", path, argumentName);
|
||||
LOGGER.warn(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -222,10 +234,9 @@ public final class CliParser {
|
||||
* Adds the standard command line options to the given options collection.
|
||||
*
|
||||
* @param options a collection of command line arguments
|
||||
* @throws IllegalArgumentException thrown if there is an exception
|
||||
*/
|
||||
@SuppressWarnings("static-access")
|
||||
private void addStandardOptions(final Options options) throws IllegalArgumentException {
|
||||
private void addStandardOptions(final Options options) {
|
||||
final Option help = new Option(ARGUMENT.HELP_SHORT, ARGUMENT.HELP, false,
|
||||
"Print this message.");
|
||||
|
||||
@@ -273,8 +284,9 @@ public final class CliParser {
|
||||
.desc("Sets how deep nested symbolic links will be followed; 0 indicates symbolic links will not be followed.")
|
||||
.build();
|
||||
|
||||
final Option suppressionFile = Option.builder().argName("file").hasArg().longOpt(ARGUMENT.SUPPRESSION_FILE)
|
||||
.desc("The file path to the suppression XML file.")
|
||||
final Option suppressionFile = Option.builder().argName("file").hasArgs().longOpt(ARGUMENT.SUPPRESSION_FILES)
|
||||
.desc("The file path to the suppression XML file. This can be specified more then once to utilize multiple "
|
||||
+ "suppression files")
|
||||
.build();
|
||||
|
||||
final Option hintsFile = Option.builder().argName("file").hasArg().longOpt(ARGUMENT.HINTS_FILE)
|
||||
@@ -289,6 +301,10 @@ public final class CliParser {
|
||||
.desc("Enables the experimental analyzers.")
|
||||
.build();
|
||||
|
||||
final Option retiredEnabled = Option.builder().longOpt(ARGUMENT.RETIRED)
|
||||
.desc("Enables the experimental analyzers.")
|
||||
.build();
|
||||
|
||||
final Option failOnCVSS = Option.builder().argName("score").hasArg().longOpt(ARGUMENT.FAIL_ON_CVSS)
|
||||
.desc("Specifies if the build should be failed if a CVSS score above a specified level is identified. "
|
||||
+ "The default is 11; since the CVSS scores are 0-10, by default the build will never fail.")
|
||||
@@ -317,6 +333,7 @@ public final class CliParser {
|
||||
.addOption(hintsFile)
|
||||
.addOption(cveValidForHours)
|
||||
.addOption(experimentalEnabled)
|
||||
.addOption(retiredEnabled)
|
||||
.addOption(failOnCVSS);
|
||||
}
|
||||
|
||||
@@ -326,10 +343,9 @@ public final class CliParser {
|
||||
* help messages.
|
||||
*
|
||||
* @param options a collection of command line arguments
|
||||
* @throws IllegalArgumentException thrown if there is an exception
|
||||
*/
|
||||
@SuppressWarnings("static-access")
|
||||
private void addAdvancedOptions(final Options options) throws IllegalArgumentException {
|
||||
private void addAdvancedOptions(final Options options) {
|
||||
|
||||
final Option cve12Base = Option.builder().argName("url").hasArg().longOpt(ARGUMENT.CVE_BASE_12)
|
||||
.desc("Base URL for each year’s CVE 1.2, the %d will be replaced with the year. ")
|
||||
@@ -490,6 +506,8 @@ public final class CliParser {
|
||||
.addOption(swiftPackageManagerAnalyzerEnabled)
|
||||
.addOption(Option.builder().longOpt(ARGUMENT.DISABLE_NODE_JS)
|
||||
.desc("Disable the Node.js Package Analyzer.").build())
|
||||
.addOption(Option.builder().longOpt(ARGUMENT.DISABLE_NSP)
|
||||
.desc("Disable the NSP Package Analyzer.").build())
|
||||
.addOption(nexusUrl)
|
||||
.addOption(nexusUsesProxy)
|
||||
.addOption(additionalZipExtensions)
|
||||
@@ -505,10 +523,9 @@ public final class CliParser {
|
||||
* existing scripts.
|
||||
*
|
||||
* @param options a collection of command line arguments
|
||||
* @throws IllegalArgumentException thrown if there is an exception
|
||||
*/
|
||||
@SuppressWarnings({"static-access", "deprecation"})
|
||||
private void addDeprecatedOptions(final Options options) throws IllegalArgumentException {
|
||||
private void addDeprecatedOptions(final Options options) {
|
||||
|
||||
final Option proxyServer = Option.builder().argName("url").hasArg().longOpt(ARGUMENT.PROXY_URL)
|
||||
.desc("The proxy url argument is deprecated, use proxyserver instead.")
|
||||
@@ -583,7 +600,7 @@ public final class CliParser {
|
||||
private boolean hasDisableOption(String argument, String setting) {
|
||||
if (line == null || !line.hasOption(argument)) {
|
||||
try {
|
||||
return !Settings.getBoolean(setting);
|
||||
return !settings.getBoolean(setting);
|
||||
} catch (InvalidSettingException ise) {
|
||||
LOGGER.warn("Invalid property setting '{}' defaulting to false", setting);
|
||||
return false;
|
||||
@@ -734,6 +751,16 @@ public final class CliParser {
|
||||
return hasDisableOption(ARGUMENT.DISABLE_NODE_JS, Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the disableNSP command line argument was specified.
|
||||
*
|
||||
* @return true if the disableNSP command line argument was specified;
|
||||
* otherwise false
|
||||
*/
|
||||
public boolean isNspDisabled() {
|
||||
return hasDisableOption(ARGUMENT.DISABLE_NSP, Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the disableCocoapodsAnalyzer command line argument was
|
||||
* specified.
|
||||
@@ -792,7 +819,7 @@ public final class CliParser {
|
||||
// still honor the property if it's set.
|
||||
if (line == null || !line.hasOption(ARGUMENT.NEXUS_USES_PROXY)) {
|
||||
try {
|
||||
return Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
|
||||
return settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
|
||||
} catch (InvalidSettingException ise) {
|
||||
return true;
|
||||
}
|
||||
@@ -814,10 +841,10 @@ public final class CliParser {
|
||||
final String helpMsg = String.format("%n%s"
|
||||
+ " can be used to identify if there are any known CVE vulnerabilities in libraries utilized by an application. "
|
||||
+ "%s will automatically update required data from the Internet, such as the CVE and CPE data files from nvd.nist.gov.%n%n",
|
||||
Settings.getString("application.name", "DependencyCheck"),
|
||||
Settings.getString("application.name", "DependencyCheck"));
|
||||
settings.getString("application.name", "DependencyCheck"),
|
||||
settings.getString("application.name", "DependencyCheck"));
|
||||
|
||||
formatter.printHelp(Settings.getString("application.name", "DependencyCheck"),
|
||||
formatter.printHelp(settings.getString("application.name", "DependencyCheck"),
|
||||
helpMsg,
|
||||
options,
|
||||
"",
|
||||
@@ -893,7 +920,7 @@ public final class CliParser {
|
||||
String name = line.getOptionValue(ARGUMENT.PROJECT);
|
||||
if (name == null && appName != null) {
|
||||
name = appName;
|
||||
LOGGER.warn("The '" + ARGUMENT.APP_NAME + "' argument should no longer be used; use '" + ARGUMENT.PROJECT + "' instead.");
|
||||
LOGGER.warn("The '{}' argument should no longer be used; use '{}' instead.", ARGUMENT.APP_NAME, ARGUMENT.PROJECT);
|
||||
}
|
||||
return name;
|
||||
}
|
||||
@@ -1020,12 +1047,12 @@ public final class CliParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the path to the suppression file.
|
||||
* Returns the paths to the suppression files.
|
||||
*
|
||||
* @return the path to the suppression file
|
||||
* @return the paths to the suppression files.
|
||||
*/
|
||||
public String getSuppressionFile() {
|
||||
return line.getOptionValue(ARGUMENT.SUPPRESSION_FILE);
|
||||
public String[] getSuppressionFiles() {
|
||||
return line.getOptionValues(ARGUMENT.SUPPRESSION_FILES);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1045,8 +1072,8 @@ public final class CliParser {
|
||||
*/
|
||||
public void printVersionInfo() {
|
||||
final String version = String.format("%s version %s",
|
||||
Settings.getString(Settings.KEYS.APPLICATION_NAME, "dependency-check"),
|
||||
Settings.getString(Settings.KEYS.APPLICATION_VERSION, "Unknown"));
|
||||
settings.getString(Settings.KEYS.APPLICATION_NAME, "dependency-check"),
|
||||
settings.getString(Settings.KEYS.APPLICATION_VERSION, "Unknown"));
|
||||
System.out.println(version);
|
||||
}
|
||||
|
||||
@@ -1166,6 +1193,15 @@ public final class CliParser {
|
||||
return (line != null && line.hasOption(ARGUMENT.EXPERIMENTAL)) ? true : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the retired analyzers are enabled.
|
||||
*
|
||||
* @return true if the retired analyzers are enabled; otherwise null
|
||||
*/
|
||||
public Boolean isRetiredEnabled() {
|
||||
return (line != null && line.hasOption(ARGUMENT.RETIRED)) ? true : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the CVSS value to fail on.
|
||||
*
|
||||
@@ -1363,9 +1399,9 @@ public final class CliParser {
|
||||
public static final String SYM_LINK_DEPTH = "symLink";
|
||||
/**
|
||||
* The CLI argument name for setting the location of the suppression
|
||||
* file.
|
||||
* file(s).
|
||||
*/
|
||||
public static final String SUPPRESSION_FILE = "suppression";
|
||||
public static final String SUPPRESSION_FILES = "suppression";
|
||||
/**
|
||||
* The CLI argument name for setting the location of the hint file.
|
||||
*/
|
||||
@@ -1443,6 +1479,10 @@ public final class CliParser {
|
||||
* Disables the Node.js Package Analyzer.
|
||||
*/
|
||||
public static final String DISABLE_NODE_JS = "disableNodeJS";
|
||||
/**
|
||||
* Disables the NSP Analyzer.
|
||||
*/
|
||||
public static final String DISABLE_NSP = "disableNSP";
|
||||
/**
|
||||
* The URL of the nexus server.
|
||||
*/
|
||||
@@ -1495,6 +1535,10 @@ public final class CliParser {
|
||||
* The CLI argument to enable the experimental analyzers.
|
||||
*/
|
||||
private static final String EXPERIMENTAL = "enableExperimental";
|
||||
/**
|
||||
* The CLI argument to enable the retired analyzers.
|
||||
*/
|
||||
private static final String RETIRED = "enableRetired";
|
||||
/**
|
||||
* The CLI argument to enable the experimental analyzers.
|
||||
*/
|
||||
69
cli/src/site/markdown/arguments.md
Normal file
69
cli/src/site/markdown/arguments.md
Normal file
@@ -0,0 +1,69 @@
|
||||
Command Line Arguments
|
||||
======================
|
||||
|
||||
The following table lists the command line arguments:
|
||||
|
||||
Short | Argument Name | Parameter | Description | Requirement
|
||||
-------|------------------------|-----------------|-------------|------------
|
||||
| \-\-project | \<name\> | The name of the project being scanned. | Required
|
||||
\-s | \-\-scan | \<path\> | The path to scan \- this option can be specified multiple times. It is also possible to specify Ant style paths (e.g. directory/**/*.jar). | Required
|
||||
| \-\-exclude | \<pattern\> | The path patterns to exclude from the scan \- this option can be specified multiple times. This accepts Ant style path patterns (e.g. **/exclude/**). | Optional
|
||||
| \-\-symLink | \<depth\> | The depth that symbolic links will be followed; the default is 0 meaning symbolic links will not be followed. | Optional
|
||||
\-o | \-\-out | \<path\> | The folder to write reports to. This defaults to the current directory. If the format is not set to ALL one could specify a specific file name. | Optional
|
||||
\-f | \-\-format | \<format\> | The output format to write to (XML, HTML, CSV, JSON, VULN, ALL). The default is HTML. | Required
|
||||
| \-\-failOnCVSS | \<score\> | If the score set between 0 and 10 the exit code from dependency-check will indicate if a vulnerability with a CVSS score equal to or higher was identified. | Optional
|
||||
\-l | \-\-log | \<file\> | The file path to write verbose logging information. | Optional
|
||||
\-n | \-\-noupdate | | Disables the automatic updating of the CPE data. | Optional
|
||||
| \-\-suppression | \<files\> | The file paths to the suppression XML files; used to suppress [false positives](../general/suppression.html). This can be specified more than once to utilize multiple suppression files. | Optional
|
||||
\-h | \-\-help | | Print the help message. | Optional
|
||||
| \-\-advancedHelp | | Print the advanced help message. | Optional
|
||||
\-v | \-\-version | | Print the version information. | Optional
|
||||
| \-\-cveValidForHours | \<hours\> | The number of hours to wait before checking for new updates from the NVD. The default is 4 hours. | Optional
|
||||
| \-\-enableExperimental | | Enable the [experimental analyzers](../analyzers/index.html). If not set the analyzers marked as experimental below will not be loaded or used. | Optional
|
||||
| \-\-enableRetired | | Enable the [retired analyzers](../analyzers/index.html). If not set the analyzers marked as retired below will not be loaded or used. | Optional
|
||||
|
||||
Advanced Options
|
||||
================
|
||||
Short | Argument Name | Parameter | Description | Default Value
|
||||
-------|------------------------|-----------------|----------------------------------------------------------------------------------|-------------------
|
||||
| \-\-cveUrl12Modified | \<url\> | URL for the modified CVE 1.2 | https://nvd.nist.gov/download/nvdcve-Modified.xml.gz
|
||||
| \-\-cveUrl20Modified | \<url\> | URL for the modified CVE 2.0 | https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-Modified.xml.gz
|
||||
| \-\-cveUrl12Base | \<url\> | Base URL for each year's CVE 1.2, the %d will be replaced with the year | https://nvd.nist.gov/download/nvdcve-%d.xml.gz
|
||||
| \-\-cveUrl20Base | \<url\> | Base URL for each year's CVE 2.0, the %d will be replaced with the year | https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz
|
||||
\-P | \-\-propertyfile | \<file\> | Specifies a file that contains properties to use instead of applicaion defaults. |
|
||||
| \-\-updateonly | | If set only the update phase of dependency-check will be executed; no scan will be executed and no report will be generated. |
|
||||
| \-\-disablePyDist | | Sets whether the [experimental](../analyzers/index.html) Python Distribution Analyzer will be used. | false
|
||||
| \-\-disablePyPkg | | Sets whether the [experimental](../analyzers/index.html) Python Package Analyzer will be used. | false
|
||||
| \-\-disableNodeJS | | Sets whether the [retired](../analyzers/index.html) Node.js Package Analyzer will be used. | false
|
||||
| \-\-disableNSP | | Sets whether the NSP Analyzer will be used. | false
|
||||
| \-\-disableRubygems | | Sets whether the [experimental](../analyzers/index.html) Ruby Gemspec Analyzer will be used. | false
|
||||
| \-\-disableBundleAudit | | Sets whether the [experimental](../analyzers/index.html) Ruby Bundler Audit Analyzer will be used. | false
|
||||
| \-\-disableCocoapodsAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer will be used. | false
|
||||
| \-\-disableSwiftPackageManagerAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Swift Package Manager Analyzer will be used. | false
|
||||
| \-\-disableAutoconf | | Sets whether the [experimental](../analyzers/index.html) Autoconf Analyzer will be used. | false
|
||||
| \-\-disableOpenSSL | | Sets whether the OpenSSL Analyzer will be used. | false
|
||||
| \-\-disableCmake | | Sets whether the [experimental](../analyzers/index.html) Cmake Analyzer will be disabled. | false
|
||||
| \-\-disableArchive | | Sets whether the Archive Analyzer will be disabled. | false
|
||||
| \-\-zipExtensions | \<strings\> | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. |
|
||||
| \-\-disableJar | | Sets whether the Jar Analyzer will be disabled. | false
|
||||
| \-\-disableComposer | | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer will be disabled. | false
|
||||
| \-\-disableCentral | | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer. | false
|
||||
| \-\-disableNexus | | Sets whether the Nexus Analyzer will be used (requires Nexus Pro). Note, this has been superceded by the Central Analyzer. However, you can configure the Nexus URL to utilize an internally hosted Nexus Pro server. | false
|
||||
| \-\-nexus | \<url\> | The url to the Nexus Server's web service end point (example: http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. |
|
||||
| \-\-nexusUsesProxy | \<true\|false\> | Whether or not the defined proxy should be used when connecting to Nexus. | true
|
||||
| \-\-disableNuspec | | Sets whether or not the .NET Nuget Nuspec Analyzer will be used. | false
|
||||
| \-\-disableAssembly | | Sets whether or not the .NET Assembly Analyzer should be used. | false
|
||||
| \-\-mono | \<path\> | The path to Mono for .NET Assembly analysis on non-windows systems. |
|
||||
| \-\-bundleAudit | | The path to the bundle-audit executable. |
|
||||
| \-\-proxyserver | \<server\> | The proxy server to use when downloading resources; see the [proxy configuration](../data/proxy.html) page for more information. |
|
||||
| \-\-proxyport | \<port\> | The proxy port to use when downloading resources. |
|
||||
| \-\-connectiontimeout | \<timeout\> | The connection timeout (in milliseconds) to use when downloading resources. |
|
||||
| \-\-proxypass | \<pass\> | The proxy password to use when downloading resources. |
|
||||
| \-\-proxyuser | \<user\> | The proxy username to use when downloading resources. |
|
||||
| \-\-connectionString | \<connStr\> | The connection string to the database. |
|
||||
| \-\-dbDriverName | \<driver\> | The database driver name. |
|
||||
| \-\-dbDriverPath | \<path\> | The path to the database driver; note, this does not need to be set unless the JAR is outside of the class path. |
|
||||
| \-\-dbPassword | \<password\> | The password for connecting to the database. |
|
||||
| \-\-dbUser | \<user\> | The username used to connect to the database. |
|
||||
\-d | \-\-data | \<path\> | The location of the data directory used to store persistent data. This option should generally not be set. |
|
||||
| \-\-purge | | Delete the local copy of the NVD. This is used to force a refresh of the data. |
|
||||
|
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 10 KiB |
@@ -13,27 +13,40 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2017 The OWASP Foundatio. All Rights Reserved.
|
||||
* Copyright (c) 2017 The OWASP Foundation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.UnrecognizedOptionException;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.Settings.KEYS;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author jeremy
|
||||
* Tests for the {@link AppTest} class.
|
||||
*/
|
||||
public class AppTest {
|
||||
public class AppTest extends BaseTest {
|
||||
|
||||
/**
|
||||
* Test rule for asserting exceptions and their contents.
|
||||
*/
|
||||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
/**
|
||||
* Test of ensureCanonicalPath method, of class App.
|
||||
@@ -41,7 +54,7 @@ public class AppTest {
|
||||
@Test
|
||||
public void testEnsureCanonicalPath() {
|
||||
String file = "../*.jar";
|
||||
App instance = new App();
|
||||
App instance = new App(getSettings());
|
||||
String result = instance.ensureCanonicalPath(file);
|
||||
assertFalse(result.contains(".."));
|
||||
assertTrue(result.endsWith("*.jar"));
|
||||
@@ -52,20 +65,20 @@ public class AppTest {
|
||||
assertTrue("result=" + result, result.endsWith(expResult));
|
||||
}
|
||||
|
||||
@Test(expected = UnrecognizedOptionException.class)
|
||||
public void testPopulateSettingsException() throws FileNotFoundException, ParseException, InvalidSettingException, URISyntaxException {
|
||||
String[] args = {"-invalidPROPERTY"};
|
||||
assertTrue(testBooleanProperties(args, null));
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that boolean properties can be set on the CLI and parsed into the
|
||||
* {@link Settings}.
|
||||
*
|
||||
* @throws Exception the unexpected {@link Exception}.
|
||||
*/
|
||||
@Test
|
||||
public void testPopulateSettings() throws FileNotFoundException, ParseException, InvalidSettingException, URISyntaxException {
|
||||
public void testPopulateSettings() throws Exception {
|
||||
File prop = new File(this.getClass().getClassLoader().getResource("sample.properties").toURI().getPath());
|
||||
String[] args = {"-P", prop.getAbsolutePath()};
|
||||
Map<String, Boolean> expected = new HashMap<>();
|
||||
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE);
|
||||
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.TRUE);
|
||||
|
||||
|
||||
assertTrue(testBooleanProperties(args, expected));
|
||||
|
||||
String[] args2 = {"-n"};
|
||||
@@ -103,25 +116,79 @@ public class AppTest {
|
||||
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE);
|
||||
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE);
|
||||
assertTrue(testBooleanProperties(args8, expected));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Assert that an {@link UnrecognizedOptionException} is thrown when a
|
||||
* property that is not supported is specified on the CLI.
|
||||
*
|
||||
* @throws Exception the unexpected {@link Exception}.
|
||||
*/
|
||||
@Test
|
||||
public void testPopulateSettingsException() throws Exception {
|
||||
String[] args = {"-invalidPROPERTY"};
|
||||
|
||||
expectedException.expect(UnrecognizedOptionException.class);
|
||||
expectedException.expectMessage("Unrecognized option: -invalidPROPERTY");
|
||||
testBooleanProperties(args, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that a single suppression file can be set using the CLI.
|
||||
*
|
||||
* @throws Exception the unexpected {@link Exception}.
|
||||
*/
|
||||
@Test
|
||||
public void testPopulatingSuppressionSettingsWithASingleFile() throws Exception {
|
||||
// GIVEN CLI properties with the mandatory arguments
|
||||
File prop = new File(this.getClass().getClassLoader().getResource("sample.properties").toURI().getPath());
|
||||
|
||||
// AND a single suppression file
|
||||
String[] args = {"-P", prop.getAbsolutePath(), "--suppression", "another-file.xml"};
|
||||
|
||||
// WHEN parsing the CLI arguments
|
||||
final CliParser cli = new CliParser(getSettings());
|
||||
cli.parse(args);
|
||||
final App classUnderTest = new App(getSettings());
|
||||
classUnderTest.populateSettings(cli);
|
||||
|
||||
// THEN the suppression file is set in the settings for use in the application core
|
||||
assertThat("Expected the suppression file to be set in the Settings", getSettings().getString(KEYS.SUPPRESSION_FILE), is("another-file.xml"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that multiple suppression files can be set using the CLI.
|
||||
*
|
||||
* @throws Exception the unexpected {@link Exception}.
|
||||
*/
|
||||
@Test
|
||||
public void testPopulatingSuppressionSettingsWithMultipleFiles() throws Exception {
|
||||
// GIVEN CLI properties with the mandatory arguments
|
||||
File prop = new File(this.getClass().getClassLoader().getResource("sample.properties").toURI().getPath());
|
||||
|
||||
// AND a single suppression file
|
||||
String[] args = {"-P", prop.getAbsolutePath(), "--suppression", "first-file.xml", "another-file.xml"};
|
||||
|
||||
// WHEN parsing the CLI arguments
|
||||
final CliParser cli = new CliParser(getSettings());
|
||||
cli.parse(args);
|
||||
final App classUnderTest = new App(getSettings());
|
||||
classUnderTest.populateSettings(cli);
|
||||
|
||||
// THEN the suppression file is set in the settings for use in the application core
|
||||
assertThat("Expected the suppression files to be set in the Settings with a separator", getSettings().getString(KEYS.SUPPRESSION_FILE), is("first-file.xml,another-file.xml"));
|
||||
}
|
||||
|
||||
private boolean testBooleanProperties(String[] args, Map<String, Boolean> expected) throws URISyntaxException, FileNotFoundException, ParseException, InvalidSettingException {
|
||||
Settings.initialize();
|
||||
try {
|
||||
final CliParser cli = new CliParser();
|
||||
cli.parse(args);
|
||||
App instance = new App();
|
||||
instance.populateSettings(cli);
|
||||
boolean results = true;
|
||||
for (Map.Entry<String, Boolean> entry : expected.entrySet()) {
|
||||
results &= Settings.getBoolean(entry.getKey()) == entry.getValue();
|
||||
}
|
||||
|
||||
return results;
|
||||
} finally {
|
||||
Settings.cleanup();
|
||||
this.reloadSettings();
|
||||
final CliParser cli = new CliParser(getSettings());
|
||||
cli.parse(args);
|
||||
App instance = new App(getSettings());
|
||||
instance.populateSettings(cli);
|
||||
boolean results = true;
|
||||
for (Map.Entry<String, Boolean> entry : expected.entrySet()) {
|
||||
results &= getSettings().getBoolean(entry.getKey()) == entry.getValue();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
}
|
||||
62
cli/src/test/java/org/owasp/dependencycheck/BaseTest.java
Normal file
62
cli/src/test/java/org/owasp/dependencycheck/BaseTest.java
Normal file
@@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Copyright 2014 OWASP.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class BaseTest {
|
||||
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings;
|
||||
|
||||
/**
|
||||
* Initialize the {@link Settings}.
|
||||
*/
|
||||
@Before
|
||||
public void setUp() {
|
||||
settings = new Settings();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean the {@link Settings}.
|
||||
*/
|
||||
@After
|
||||
public void tearDown() {
|
||||
settings.cleanup(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the settings for the test cases.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected Settings getSettings() {
|
||||
return settings;
|
||||
}
|
||||
|
||||
protected void reloadSettings() {
|
||||
tearDown();
|
||||
setUp();
|
||||
}
|
||||
}
|
||||
@@ -33,17 +33,7 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class CliParserTest {
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception {
|
||||
Settings.initialize();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownClass() throws Exception {
|
||||
Settings.cleanup(true);
|
||||
}
|
||||
public class CliParserTest extends BaseTest {
|
||||
|
||||
/**
|
||||
* Test of parse method, of class CliParser.
|
||||
@@ -59,7 +49,7 @@ public class CliParserTest {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(baos));
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
@@ -78,7 +68,7 @@ public class CliParserTest {
|
||||
String[] args = {"-help"};
|
||||
PrintStream out = System.out;
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
@@ -96,7 +86,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"-version"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
Assert.assertTrue(instance.isGetVersion());
|
||||
Assert.assertFalse(instance.isGetHelp());
|
||||
@@ -114,7 +104,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"--failOnCVSS"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
try {
|
||||
instance.parse(args);
|
||||
} catch (ParseException ex) {
|
||||
@@ -135,7 +125,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"--failOnCVSS","bad"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
Assert.assertEquals("Default should be 11", 11, instance.getFailOnCVSS());
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
@@ -153,7 +143,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"--failOnCVSS","6"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
Assert.assertEquals(6, instance.getFailOnCVSS());
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
@@ -178,7 +168,7 @@ public class CliParserTest {
|
||||
System.setOut(new PrintStream(baos_out));
|
||||
System.setErr(new PrintStream(baos_err));
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
|
||||
try {
|
||||
instance.parse(args);
|
||||
@@ -200,7 +190,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"-scan"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
|
||||
try {
|
||||
instance.parse(args);
|
||||
@@ -223,7 +213,7 @@ public class CliParserTest {
|
||||
|
||||
String[] args = {"-scan", "jar.that.does.not.exist", "-app", "test"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
try {
|
||||
instance.parse(args);
|
||||
} catch (FileNotFoundException ex) {
|
||||
@@ -245,7 +235,7 @@ public class CliParserTest {
|
||||
File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").toURI().getPath());
|
||||
String[] args = {"-scan", path.getCanonicalPath(), "-out", "./", "-app", "test"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.parse(args);
|
||||
|
||||
Assert.assertEquals(path.getCanonicalPath(), instance.getScanFiles()[0]);
|
||||
@@ -267,7 +257,7 @@ public class CliParserTest {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(baos));
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
instance.printVersionInfo();
|
||||
try {
|
||||
baos.flush();
|
||||
@@ -296,7 +286,7 @@ public class CliParserTest {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(baos));
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
CliParser instance = new CliParser(getSettings());
|
||||
String[] args = {"-h"};
|
||||
instance.parse(args);
|
||||
instance.printHelp();
|
||||
@@ -1,5 +1,5 @@
|
||||
autoupdate=false
|
||||
|
||||
somethingmadeup=test
|
||||
analyzer.experimental.enabled=false
|
||||
analyzer.jar.enabled=true
|
||||
analyzer.archive.enabled=true
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.4.6-SNAPSHOT</version>
|
||||
<version>3.1.2-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-core</artifactId>
|
||||
@@ -28,15 +28,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
|
||||
<name>Dependency-Check Core</name>
|
||||
<description>dependency-check-core is the engine and reporting tool used to identify and report if there are any known, publicly disclosed vulnerabilities in the scanned project's dependencies. The engine extracts meta-data from the dependencies and uses this to do fuzzy key-word matching against the Common Platfrom Enumeration (CPE), if any CPE identifiers are found the associated Common Vulnerability and Exposure (CVE) entries are added to the generated report.</description>
|
||||
<!-- begin copy from http://minds.coremedia.com/2012/09/11/problem-solved-deploy-multi-module-maven-project-site-as-github-pages/ -->
|
||||
<distributionManagement>
|
||||
<site>
|
||||
<id>github-pages-site</id>
|
||||
<name>Deployment through GitHub's site deployment plugin</name>
|
||||
<url>${basedir}/../target/site/${project.version}/dependency-check-core</url>
|
||||
</site>
|
||||
</distributionManagement>
|
||||
<!-- end copy -->
|
||||
<build>
|
||||
<resources>
|
||||
<resource>
|
||||
@@ -99,6 +90,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<configuration>
|
||||
<outputDirectory>${project.build.directory}/test-classes</outputDirectory>
|
||||
<includeScope>test</includeScope>
|
||||
<excludeArtifactIds>dependency-check-utils</excludeArtifactIds>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
@@ -123,56 +115,11 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<reporting>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-report-plugin</artifactId>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<id>integration-tests</id>
|
||||
<reports>
|
||||
<report>report-only</report>
|
||||
<report>failsafe-report-only</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>${reporting.checkstyle-plugin.version}</version>
|
||||
<configuration>
|
||||
<enableRulesSummary>false</enableRulesSummary>
|
||||
<enableFilesSummary>false</enableFilesSummary>
|
||||
<configLocation>${basedir}/../src/main/config/checkstyle-checks.xml</configLocation>
|
||||
<headerLocation>${basedir}/../src/main/config/checkstyle-header.txt</headerLocation>
|
||||
<suppressionsLocation>${basedir}/../src/main/config/checkstyle-suppressions.xml</suppressionsLocation>
|
||||
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-pmd-plugin</artifactId>
|
||||
<version>${reporting.pmd-plugin.version}</version>
|
||||
<configuration>
|
||||
<targetJdk>1.6</targetJdk>
|
||||
<linkXRef>true</linkXRef>
|
||||
<sourceEncoding>utf-8</sourceEncoding>
|
||||
<excludes>
|
||||
<exclude>**/generated/*.java</exclude>
|
||||
</excludes>
|
||||
<rulesets>
|
||||
<ruleset>../src/main/config/dcrules.xml</ruleset>
|
||||
<ruleset>/rulesets/java/basic.xml</ruleset>
|
||||
<ruleset>/rulesets/java/imports.xml</ruleset>
|
||||
<ruleset>/rulesets/java/unusedcode.xml</ruleset>
|
||||
</rulesets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</reporting>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.vdurmont</groupId>
|
||||
<artifactId>semver4j</artifactId>
|
||||
</dependency>
|
||||
<!-- Note, to stay compatible with Jenkins installations only JARs compiled to 1.6 can be used -->
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
@@ -257,7 +204,8 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<groupId>com.google.code.gson</groupId>
|
||||
<artifactId>gson</artifactId>
|
||||
</dependency>
|
||||
<!-- The following dependencies are only used during testing -->
|
||||
<!-- The following dependencies are only used during testing
|
||||
and must not be converted to a properties based version number -->
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.scm</groupId>
|
||||
<artifactId>maven-scm-provider-cvsexe</artifactId>
|
||||
@@ -21,12 +21,12 @@ import org.owasp.dependencycheck.analyzer.Analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* Task to support parallelism of dependency-check analysis. Analyses a single
|
||||
@@ -34,6 +34,7 @@ import java.util.concurrent.Callable;
|
||||
*
|
||||
* @author Stefan Neuhaus
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class AnalysisTask implements Callable<Void> {
|
||||
|
||||
/**
|
||||
@@ -57,10 +58,6 @@ public class AnalysisTask implements Callable<Void> {
|
||||
* The list of exceptions that may occur during analysis.
|
||||
*/
|
||||
private final List<Throwable> exceptions;
|
||||
/**
|
||||
* A reference to the global settings object.
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* Creates a new analysis task.
|
||||
@@ -70,16 +67,12 @@ public class AnalysisTask implements Callable<Void> {
|
||||
* @param engine the dependency-check engine
|
||||
* @param exceptions exceptions that occur during analysis will be added to
|
||||
* this collection of exceptions
|
||||
* @param settings a reference to the global settings object; this is
|
||||
* necessary so that when the thread is started the dependencies have a
|
||||
* correct reference to the global settings.
|
||||
*/
|
||||
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions, Settings settings) {
|
||||
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions) {
|
||||
this.analyzer = analyzer;
|
||||
this.dependency = dependency;
|
||||
this.engine = engine;
|
||||
this.exceptions = exceptions;
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -89,26 +82,20 @@ public class AnalysisTask implements Callable<Void> {
|
||||
*/
|
||||
@Override
|
||||
public Void call() {
|
||||
try {
|
||||
Settings.setInstance(settings);
|
||||
|
||||
if (shouldAnalyze()) {
|
||||
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
|
||||
try {
|
||||
analyzer.analyze(dependency, engine);
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
|
||||
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
}
|
||||
if (shouldAnalyze()) {
|
||||
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
|
||||
try {
|
||||
analyzer.analyze(dependency, engine);
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
|
||||
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
}
|
||||
} finally {
|
||||
Settings.cleanup(false);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -123,7 +110,6 @@ public class AnalysisTask implements Callable<Void> {
|
||||
final FileTypeAnalyzer fileTypeAnalyzer = (FileTypeAnalyzer) analyzer;
|
||||
return fileTypeAnalyzer.accept(dependency.getActualFile());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -24,6 +24,7 @@ import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
import org.owasp.dependencycheck.data.update.CachedWebDataSource;
|
||||
import org.owasp.dependencycheck.data.update.UpdateService;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
@@ -31,6 +32,8 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.ExceptionCollection;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.exception.NoDataException;
|
||||
import org.owasp.dependencycheck.exception.ReportException;
|
||||
import org.owasp.dependencycheck.reporting.ReportGenerator;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
@@ -39,6 +42,7 @@ import org.slf4j.LoggerFactory;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
@@ -54,6 +58,13 @@ import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.owasp.dependencycheck.exception.H2DBLockException;
|
||||
import org.owasp.dependencycheck.utils.H2DBLock;
|
||||
|
||||
//CSOFF: AvoidStarImport
|
||||
import static org.owasp.dependencycheck.analyzer.AnalysisPhase.*;
|
||||
//CSON: AvoidStarImport
|
||||
|
||||
/**
|
||||
* Scans files, directories, etc. for Dependencies. Analyzers are loaded and
|
||||
@@ -63,12 +74,93 @@ import java.util.concurrent.TimeUnit;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class Engine implements FileFilter {
|
||||
@NotThreadSafe
|
||||
public class Engine implements FileFilter, AutoCloseable {
|
||||
|
||||
/**
|
||||
* {@link Engine} execution modes.
|
||||
*/
|
||||
public enum Mode {
|
||||
/**
|
||||
* In evidence collection mode the {@link Engine} only collects evidence
|
||||
* from the scan targets, and doesn't require a database.
|
||||
*/
|
||||
EVIDENCE_COLLECTION(
|
||||
false,
|
||||
INITIAL,
|
||||
PRE_INFORMATION_COLLECTION,
|
||||
INFORMATION_COLLECTION,
|
||||
POST_INFORMATION_COLLECTION
|
||||
),
|
||||
/**
|
||||
* In evidence processing mode the {@link Engine} processes the evidence
|
||||
* collected using the {@link #EVIDENCE_COLLECTION} mode. Dependencies
|
||||
* should be injected into the {@link Engine} using
|
||||
* {@link Engine#setDependencies(List)}.
|
||||
*/
|
||||
EVIDENCE_PROCESSING(
|
||||
true,
|
||||
PRE_IDENTIFIER_ANALYSIS,
|
||||
IDENTIFIER_ANALYSIS,
|
||||
POST_IDENTIFIER_ANALYSIS,
|
||||
PRE_FINDING_ANALYSIS,
|
||||
FINDING_ANALYSIS,
|
||||
POST_FINDING_ANALYSIS,
|
||||
FINAL
|
||||
),
|
||||
/**
|
||||
* In standalone mode the {@link Engine} will collect and process
|
||||
* evidence in a single execution.
|
||||
*/
|
||||
STANDALONE(true, AnalysisPhase.values());
|
||||
|
||||
/**
|
||||
* Whether the database is required in this mode.
|
||||
*/
|
||||
private final boolean databaseRequired;
|
||||
/**
|
||||
* The analysis phases included in the mode.
|
||||
*/
|
||||
private final AnalysisPhase[] phases;
|
||||
|
||||
/**
|
||||
* Returns true if the database is required; otherwise false.
|
||||
*
|
||||
* @return whether or not the database is required
|
||||
*/
|
||||
private boolean isDatabaseRequired() {
|
||||
return databaseRequired;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phases for this mode.
|
||||
*
|
||||
* @return the phases for this mode
|
||||
*/
|
||||
public AnalysisPhase[] getPhases() {
|
||||
return phases;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new mode.
|
||||
*
|
||||
* @param databaseRequired if the database is required for the mode
|
||||
* @param phases the analysis phases to include in the mode
|
||||
*/
|
||||
Mode(boolean databaseRequired, AnalysisPhase... phases) {
|
||||
this.databaseRequired = databaseRequired;
|
||||
this.phases = phases;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The list of dependencies.
|
||||
*/
|
||||
private final List<Dependency> dependencies = Collections.synchronizedList(new ArrayList<Dependency>());
|
||||
/**
|
||||
* The external view of the dependency list.
|
||||
*/
|
||||
private Dependency[] dependenciesExternalView = null;
|
||||
/**
|
||||
* A Map of analyzers grouped by Analysis phase.
|
||||
*/
|
||||
@@ -79,11 +171,17 @@ public class Engine implements FileFilter {
|
||||
*/
|
||||
private final Set<FileTypeAnalyzer> fileTypeAnalyzers = new HashSet<>();
|
||||
|
||||
/**
|
||||
* The engine execution mode indicating it will either collect evidence or
|
||||
* process evidence or both.
|
||||
*/
|
||||
private final Mode mode;
|
||||
|
||||
/**
|
||||
* The ClassLoader to use when dynamically loading Analyzer and Update
|
||||
* services.
|
||||
*/
|
||||
private ClassLoader serviceClassLoader = Thread.currentThread().getContextClassLoader();
|
||||
private final ClassLoader serviceClassLoader;
|
||||
/**
|
||||
* A reference to the database.
|
||||
*/
|
||||
@@ -92,26 +190,51 @@ public class Engine implements FileFilter {
|
||||
* The Logger for use throughout the class.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Engine.class);
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Mode#STANDALONE} Engine.
|
||||
*
|
||||
* @param settings reference to the configured settings
|
||||
*/
|
||||
public Engine(Settings settings) {
|
||||
this(Mode.STANDALONE, settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Engine.
|
||||
*
|
||||
* @throws DatabaseException thrown if there is an error connecting to the
|
||||
* database
|
||||
* @param mode the mode of operation
|
||||
* @param settings reference to the configured settings
|
||||
*/
|
||||
public Engine() throws DatabaseException {
|
||||
initializeEngine();
|
||||
public Engine(Mode mode, Settings settings) {
|
||||
this(Thread.currentThread().getContextClassLoader(), mode, settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Mode#STANDALONE} Engine.
|
||||
*
|
||||
* @param serviceClassLoader a reference the class loader being used
|
||||
* @param settings reference to the configured settings
|
||||
*/
|
||||
public Engine(ClassLoader serviceClassLoader, Settings settings) {
|
||||
this(serviceClassLoader, Mode.STANDALONE, settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Engine.
|
||||
*
|
||||
* @param serviceClassLoader a reference the class loader being used
|
||||
* @throws DatabaseException thrown if there is an error connecting to the
|
||||
* database
|
||||
* @param mode the mode of the engine
|
||||
* @param settings reference to the configured settings
|
||||
*/
|
||||
public Engine(ClassLoader serviceClassLoader) throws DatabaseException {
|
||||
public Engine(ClassLoader serviceClassLoader, Mode mode, Settings settings) {
|
||||
this.settings = settings;
|
||||
this.serviceClassLoader = serviceClassLoader;
|
||||
this.mode = mode;
|
||||
initializeEngine();
|
||||
}
|
||||
|
||||
@@ -122,20 +245,21 @@ public class Engine implements FileFilter {
|
||||
* @throws DatabaseException thrown if there is an error connecting to the
|
||||
* database
|
||||
*/
|
||||
protected final void initializeEngine() throws DatabaseException {
|
||||
ConnectionFactory.initialize();
|
||||
protected final void initializeEngine() {
|
||||
loadAnalyzers();
|
||||
}
|
||||
|
||||
/**
|
||||
* Properly cleans up resources allocated during analysis.
|
||||
*/
|
||||
public void cleanup() {
|
||||
if (database != null) {
|
||||
database.close();
|
||||
database = null;
|
||||
@Override
|
||||
public void close() {
|
||||
if (mode.isDatabaseRequired()) {
|
||||
if (database != null) {
|
||||
database.close();
|
||||
database = null;
|
||||
}
|
||||
}
|
||||
ConnectionFactory.cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -146,13 +270,13 @@ public class Engine implements FileFilter {
|
||||
if (!analyzers.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
for (AnalysisPhase phase : AnalysisPhase.values()) {
|
||||
for (AnalysisPhase phase : mode.getPhases()) {
|
||||
analyzers.put(phase, new ArrayList<Analyzer>());
|
||||
}
|
||||
|
||||
final AnalyzerService service = new AnalyzerService(serviceClassLoader);
|
||||
final List<Analyzer> iterator = service.getAnalyzers();
|
||||
final AnalyzerService service = new AnalyzerService(serviceClassLoader, settings);
|
||||
final List<Analyzer> iterator = service.getAnalyzers(mode.getPhases());
|
||||
for (Analyzer a : iterator) {
|
||||
a.initialize(this.settings);
|
||||
analyzers.get(a.getAnalysisPhase()).add(a);
|
||||
if (a instanceof FileTypeAnalyzer) {
|
||||
this.fileTypeAnalyzers.add((FileTypeAnalyzer) a);
|
||||
@@ -171,18 +295,44 @@ public class Engine implements FileFilter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the dependencies identified. The returned list is a reference to the
|
||||
* engine's synchronized list. <b>You must synchronize on the returned
|
||||
* list</b> when you modify and iterate over it from multiple threads. E.g.
|
||||
* this holds for analyzers supporting parallel processing during their
|
||||
* analysis phase.
|
||||
* Adds a dependency.
|
||||
*
|
||||
* @param dependency the dependency to add
|
||||
*/
|
||||
public synchronized void addDependency(Dependency dependency) {
|
||||
dependencies.add(dependency);
|
||||
dependenciesExternalView = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sorts the dependency list.
|
||||
*/
|
||||
public synchronized void sortDependencies() {
|
||||
//TODO - is this actually necassary????
|
||||
// Collections.sort(dependencies);
|
||||
// dependenciesExternalView = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the dependency.
|
||||
*
|
||||
* @param dependency the dependency to remove.
|
||||
*/
|
||||
public synchronized void removeDependency(Dependency dependency) {
|
||||
dependencies.remove(dependency);
|
||||
dependenciesExternalView = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of the dependencies as an array.
|
||||
*
|
||||
* @return the dependencies identified
|
||||
* @see Collections#synchronizedList(List)
|
||||
* @see Analyzer#supportsParallelProcessing()
|
||||
*/
|
||||
public synchronized List<Dependency> getDependencies() {
|
||||
return dependencies;
|
||||
public synchronized Dependency[] getDependencies() {
|
||||
if (dependenciesExternalView == null) {
|
||||
dependenciesExternalView = dependencies.toArray(new Dependency[dependencies.size()]);
|
||||
}
|
||||
return dependenciesExternalView;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -190,11 +340,10 @@ public class Engine implements FileFilter {
|
||||
*
|
||||
* @param dependencies the dependencies
|
||||
*/
|
||||
public void setDependencies(List<Dependency> dependencies) {
|
||||
synchronized (this.dependencies) {
|
||||
this.dependencies.clear();
|
||||
this.dependencies.addAll(dependencies);
|
||||
}
|
||||
public synchronized void setDependencies(List<Dependency> dependencies) {
|
||||
this.dependencies.clear();
|
||||
this.dependencies.addAll(dependencies);
|
||||
dependenciesExternalView = null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -403,7 +552,9 @@ public class Engine implements FileFilter {
|
||||
}
|
||||
} else {
|
||||
final Dependency d = scanFile(f, projectReference);
|
||||
deps.add(d);
|
||||
if (d != null) {
|
||||
deps.add(d);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -431,7 +582,7 @@ public class Engine implements FileFilter {
|
||||
* @return the scanned dependency
|
||||
* @since v1.4.4
|
||||
*/
|
||||
protected Dependency scanFile(File file, String projectReference) {
|
||||
protected synchronized Dependency scanFile(File file, String projectReference) {
|
||||
Dependency dependency = null;
|
||||
if (file.isFile()) {
|
||||
if (accept(file)) {
|
||||
@@ -441,31 +592,31 @@ public class Engine implements FileFilter {
|
||||
}
|
||||
final String sha1 = dependency.getSha1sum();
|
||||
boolean found = false;
|
||||
synchronized (dependencies) {
|
||||
if (sha1 != null) {
|
||||
for (Dependency existing : dependencies) {
|
||||
if (sha1.equals(existing.getSha1sum())) {
|
||||
found = true;
|
||||
if (projectReference != null) {
|
||||
existing.addProjectReference(projectReference);
|
||||
}
|
||||
if (existing.getActualFilePath() != null && dependency.getActualFilePath() != null
|
||||
&& !existing.getActualFilePath().equals(dependency.getActualFilePath())) {
|
||||
existing.addRelatedDependency(dependency);
|
||||
} else {
|
||||
dependency = existing;
|
||||
}
|
||||
break;
|
||||
|
||||
if (sha1 != null) {
|
||||
for (Dependency existing : dependencies) {
|
||||
if (sha1.equals(existing.getSha1sum())) {
|
||||
found = true;
|
||||
if (projectReference != null) {
|
||||
existing.addProjectReference(projectReference);
|
||||
}
|
||||
if (existing.getActualFilePath() != null && dependency.getActualFilePath() != null
|
||||
&& !existing.getActualFilePath().equals(dependency.getActualFilePath())) {
|
||||
existing.addRelatedDependency(dependency);
|
||||
} else {
|
||||
dependency = existing;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
dependencies.add(dependency);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file);
|
||||
if (!found) {
|
||||
dependencies.add(dependency);
|
||||
dependenciesExternalView = null;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Path passed to scanFile(File) is not a file that can be scanned by dependency-check: {}. Skipping the file.", file);
|
||||
}
|
||||
return dependency;
|
||||
}
|
||||
@@ -502,7 +653,7 @@ public class Engine implements FileFilter {
|
||||
final long analysisStart = System.currentTimeMillis();
|
||||
|
||||
// analysis phases
|
||||
for (AnalysisPhase phase : AnalysisPhase.values()) {
|
||||
for (AnalysisPhase phase : mode.getPhases()) {
|
||||
final List<Analyzer> analyzerList = analyzers.get(phase);
|
||||
|
||||
for (final Analyzer analyzer : analyzerList) {
|
||||
@@ -511,7 +662,9 @@ public class Engine implements FileFilter {
|
||||
initializeAnalyzer(analyzer);
|
||||
} catch (InitializationException ex) {
|
||||
exceptions.add(ex);
|
||||
continue;
|
||||
if (ex.isFatal()) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (analyzer.isEnabled()) {
|
||||
@@ -525,7 +678,7 @@ public class Engine implements FileFilter {
|
||||
}
|
||||
}
|
||||
}
|
||||
for (AnalysisPhase phase : AnalysisPhase.values()) {
|
||||
for (AnalysisPhase phase : mode.getPhases()) {
|
||||
final List<Analyzer> analyzerList = analyzers.get(phase);
|
||||
|
||||
for (Analyzer a : analyzerList) {
|
||||
@@ -548,17 +701,19 @@ public class Engine implements FileFilter {
|
||||
* @throws ExceptionCollection thrown if fatal exceptions occur
|
||||
*/
|
||||
private void initializeAndUpdateDatabase(final List<Throwable> exceptions) throws ExceptionCollection {
|
||||
if (!mode.isDatabaseRequired()) {
|
||||
return;
|
||||
}
|
||||
boolean autoUpdate = true;
|
||||
try {
|
||||
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
|
||||
autoUpdate = settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.debug("Invalid setting for auto-update; using true.");
|
||||
exceptions.add(ex);
|
||||
}
|
||||
if (autoUpdate) {
|
||||
try {
|
||||
database = CveDB.getInstance();
|
||||
doUpdates();
|
||||
doUpdates(true);
|
||||
} catch (UpdateException ex) {
|
||||
exceptions.add(ex);
|
||||
LOGGER.warn("Unable to update Cached Web DataSource, using local "
|
||||
@@ -569,10 +724,10 @@ public class Engine implements FileFilter {
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
if (ConnectionFactory.isH2Connection() && !ConnectionFactory.h2DataFileExists()) {
|
||||
if (ConnectionFactory.isH2Connection(settings) && !ConnectionFactory.h2DataFileExists(settings)) {
|
||||
throw new ExceptionCollection(new NoDataException("Autoupdate is disabled and the database does not exist"), true);
|
||||
} else {
|
||||
database = CveDB.getInstance();
|
||||
openDatabase(true, true);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new ExceptionCollection(new DatabaseException("Autoupdate is disabled and unable to connect to the database"), true);
|
||||
@@ -605,10 +760,11 @@ public class Engine implements FileFilter {
|
||||
} catch (ExecutionException e) {
|
||||
throwFatalExceptionCollection("Analysis task failed with a fatal exception.", e, exceptions);
|
||||
} catch (CancellationException e) {
|
||||
throwFatalExceptionCollection("Analysis task timed out.", e, exceptions);
|
||||
throwFatalExceptionCollection("Analysis task was cancelled.", e, exceptions);
|
||||
}
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
throwFatalExceptionCollection("Analysis has been interrupted.", e, exceptions);
|
||||
} finally {
|
||||
executorService.shutdown();
|
||||
@@ -622,13 +778,11 @@ public class Engine implements FileFilter {
|
||||
* @param exceptions the collection of exceptions to collect
|
||||
* @return a collection of analysis tasks
|
||||
*/
|
||||
protected List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
|
||||
protected synchronized List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
|
||||
final List<AnalysisTask> result = new ArrayList<>();
|
||||
synchronized (dependencies) {
|
||||
for (final Dependency dependency : dependencies) {
|
||||
final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions, Settings.getInstance());
|
||||
result.add(task);
|
||||
}
|
||||
for (final Dependency dependency : dependencies) {
|
||||
final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions);
|
||||
result.add(task);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -653,21 +807,23 @@ public class Engine implements FileFilter {
|
||||
/**
|
||||
* Initializes the given analyzer.
|
||||
*
|
||||
* @param analyzer the analyzer to initialize
|
||||
* @param analyzer the analyzer to prepare
|
||||
* @throws InitializationException thrown when there is a problem
|
||||
* initializing the analyzer
|
||||
*/
|
||||
protected void initializeAnalyzer(Analyzer analyzer) throws InitializationException {
|
||||
try {
|
||||
LOGGER.debug("Initializing {}", analyzer.getName());
|
||||
analyzer.initialize();
|
||||
analyzer.prepare(this);
|
||||
} catch (InitializationException ex) {
|
||||
LOGGER.error("Exception occurred initializing {}.", analyzer.getName());
|
||||
LOGGER.debug("", ex);
|
||||
try {
|
||||
analyzer.close();
|
||||
} catch (Throwable ex1) {
|
||||
LOGGER.trace("", ex1);
|
||||
if (ex.isFatal()) {
|
||||
try {
|
||||
analyzer.close();
|
||||
} catch (Throwable ex1) {
|
||||
LOGGER.trace("", ex1);
|
||||
}
|
||||
}
|
||||
throw ex;
|
||||
} catch (Throwable ex) {
|
||||
@@ -703,15 +859,131 @@ public class Engine implements FileFilter {
|
||||
* @throws UpdateException thrown if the operation fails
|
||||
*/
|
||||
public void doUpdates() throws UpdateException {
|
||||
LOGGER.info("Checking for updates");
|
||||
final long updateStart = System.currentTimeMillis();
|
||||
final UpdateService service = new UpdateService(serviceClassLoader);
|
||||
final Iterator<CachedWebDataSource> iterator = service.getDataSources();
|
||||
while (iterator.hasNext()) {
|
||||
final CachedWebDataSource source = iterator.next();
|
||||
source.update();
|
||||
doUpdates(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cycles through the cached web data sources and calls update on all of
|
||||
* them.
|
||||
*
|
||||
* @param remainOpen whether or not the database connection should remain
|
||||
* open
|
||||
* @throws UpdateException thrown if the operation fails
|
||||
*/
|
||||
public void doUpdates(boolean remainOpen) throws UpdateException {
|
||||
if (mode.isDatabaseRequired()) {
|
||||
H2DBLock dblock = null;
|
||||
try {
|
||||
if (ConnectionFactory.isH2Connection(settings)) {
|
||||
dblock = new H2DBLock(settings);
|
||||
LOGGER.debug("locking for update");
|
||||
dblock.lock();
|
||||
}
|
||||
openDatabase(false, false);
|
||||
LOGGER.info("Checking for updates");
|
||||
final long updateStart = System.currentTimeMillis();
|
||||
final UpdateService service = new UpdateService(serviceClassLoader);
|
||||
final Iterator<CachedWebDataSource> iterator = service.getDataSources();
|
||||
while (iterator.hasNext()) {
|
||||
final CachedWebDataSource source = iterator.next();
|
||||
source.update(this);
|
||||
}
|
||||
database.close();
|
||||
database = null;
|
||||
LOGGER.info("Check for updates complete ({} ms)", System.currentTimeMillis() - updateStart);
|
||||
if (remainOpen) {
|
||||
openDatabase(true, false);
|
||||
}
|
||||
} catch (DatabaseException ex) {
|
||||
throw new UpdateException(ex.getMessage(), ex.getCause());
|
||||
} catch (H2DBLockException ex) {
|
||||
throw new UpdateException("Unable to obtain an exclusive lock on the H2 database to perform updates", ex);
|
||||
} finally {
|
||||
if (dblock != null) {
|
||||
dblock.release();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOGGER.info("Skipping update check in evidence collection mode.");
|
||||
}
|
||||
LOGGER.info("Check for updates complete ({} ms)", System.currentTimeMillis() - updateStart);
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This method is only public for unit/integration testing. This method
|
||||
* should not be called by any integration that uses
|
||||
* dependency-check-core.</p>
|
||||
* <p>
|
||||
* Opens the database connection.</p>
|
||||
*
|
||||
* @throws DatabaseException if the database connection could not be created
|
||||
*/
|
||||
public void openDatabase() throws DatabaseException {
|
||||
openDatabase(false, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This method is only public for unit/integration testing. This method
|
||||
* should not be called by any integration that uses
|
||||
* dependency-check-core.</p>
|
||||
* <p>
|
||||
* Opens the database connection; if readOnly is true a copy of the database
|
||||
* will be made.</p>
|
||||
*
|
||||
* @param readOnly whether or not the database connection should be readonly
|
||||
* @param lockRequired whether or not a lock needs to be acquired when
|
||||
* opening the database
|
||||
* @throws DatabaseException if the database connection could not be created
|
||||
*/
|
||||
public void openDatabase(boolean readOnly, boolean lockRequired) throws DatabaseException {
|
||||
if (mode.isDatabaseRequired() && database == null) {
|
||||
//needed to update schema any required schema changes
|
||||
database = new CveDB(settings);
|
||||
if (readOnly
|
||||
&& ConnectionFactory.isH2Connection(settings)
|
||||
&& settings.getString(Settings.KEYS.DB_CONNECTION_STRING).contains("file:%s")) {
|
||||
H2DBLock lock = null;
|
||||
try {
|
||||
final File db = ConnectionFactory.getH2DataFile(settings);
|
||||
if (db.isFile()) {
|
||||
database.close();
|
||||
if (lockRequired) {
|
||||
lock = new H2DBLock(settings);
|
||||
lock.lock();
|
||||
}
|
||||
LOGGER.debug("copying database");
|
||||
final File temp = settings.getTempDirectory();
|
||||
final File tempDB = new File(temp, db.getName());
|
||||
Files.copy(db.toPath(), tempDB.toPath());
|
||||
LOGGER.debug("copying complete '{}'", temp.toPath());
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, temp.getPath());
|
||||
final String connStr = settings.getString(Settings.KEYS.DB_CONNECTION_STRING);
|
||||
if (!connStr.contains("ACCESS_MODE_DATA")) {
|
||||
settings.setString(Settings.KEYS.DB_CONNECTION_STRING, connStr + "ACCESS_MODE_DATA=r");
|
||||
}
|
||||
database = new CveDB(settings);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new DatabaseException("Unable to open db in read only mode", ex);
|
||||
} catch (H2DBLockException ex) {
|
||||
throw new DatabaseException("Failed to obtain lock - unable to open db in read only mode", ex);
|
||||
} finally {
|
||||
if (lock != null) {
|
||||
lock.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a reference to the database.
|
||||
*
|
||||
* @return a reference to the database
|
||||
*/
|
||||
public CveDB getDatabase() {
|
||||
return this.database;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -722,7 +994,7 @@ public class Engine implements FileFilter {
|
||||
*/
|
||||
public List<Analyzer> getAnalyzers() {
|
||||
final List<Analyzer> ret = new ArrayList<>();
|
||||
for (AnalysisPhase phase : AnalysisPhase.values()) {
|
||||
for (AnalysisPhase phase : mode.getPhases()) {
|
||||
final List<Analyzer> analyzerList = analyzers.get(phase);
|
||||
ret.addAll(analyzerList);
|
||||
}
|
||||
@@ -759,6 +1031,24 @@ public class Engine implements FileFilter {
|
||||
return this.fileTypeAnalyzers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the configured settings.
|
||||
*
|
||||
* @return the configured settings
|
||||
*/
|
||||
public Settings getSettings() {
|
||||
return settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the mode of the engine.
|
||||
*
|
||||
* @return the mode of the engine
|
||||
*/
|
||||
public Mode getMode() {
|
||||
return mode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a file type analyzer. This has been added solely to assist in unit
|
||||
* testing the Engine.
|
||||
@@ -776,7 +1066,7 @@ public class Engine implements FileFilter {
|
||||
* @throws NoDataException thrown if no data exists in the CPE Index
|
||||
*/
|
||||
private void ensureDataExists() throws NoDataException {
|
||||
if (database == null || !database.dataExists()) {
|
||||
if (mode.isDatabaseRequired() && (database == null || !database.dataExists())) {
|
||||
throw new NoDataException("No documents exist");
|
||||
}
|
||||
}
|
||||
@@ -796,4 +1086,44 @@ public class Engine implements FileFilter {
|
||||
exceptions.add(throwable);
|
||||
throw new ExceptionCollection(message, exceptions, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the report to the given output directory.
|
||||
*
|
||||
* @param applicationName the name of the application/project
|
||||
* @param groupId the Maven groupId
|
||||
* @param artifactId the Maven artifactId
|
||||
* @param version the Maven version
|
||||
* @param outputDir the path to the output directory (can include the full
|
||||
* file name if the format is not ALL)
|
||||
* @param format the report format (ALL, HTML, CSV, JSON, etc.)
|
||||
* @throws ReportException thrown if there is an error generating the report
|
||||
*/
|
||||
public synchronized void writeReports(String applicationName, String groupId, String artifactId,
|
||||
String version, File outputDir, String format) throws ReportException {
|
||||
if (mode == Mode.EVIDENCE_COLLECTION) {
|
||||
throw new UnsupportedOperationException("Cannot generate report in evidence collection mode.");
|
||||
}
|
||||
final DatabaseProperties prop = database.getDatabaseProperties();
|
||||
final ReportGenerator r = new ReportGenerator(applicationName, groupId, artifactId, version, dependencies, getAnalyzers(), prop, settings);
|
||||
try {
|
||||
r.write(outputDir.getAbsolutePath(), format);
|
||||
} catch (ReportException ex) {
|
||||
final String msg = String.format("Error generating the report for %s", applicationName);
|
||||
throw new ReportException(msg, ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the report to the given output directory.
|
||||
*
|
||||
* @param applicationName the name of the application/project
|
||||
* @param outputDir the path to the output directory (can include the full
|
||||
* file name if the format is not ALL)
|
||||
* @param format the report format (ALL, HTML, CSV, JSON, etc.)
|
||||
* @throws ReportException thrown if there is an error generating the report
|
||||
*/
|
||||
public void writeReports(String applicationName, File outputDir, String format) throws ReportException {
|
||||
writeReports(applicationName, null, null, null, outputDir, format);
|
||||
}
|
||||
}
|
||||
@@ -20,10 +20,10 @@ package org.owasp.dependencycheck.agent;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.NotThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
@@ -47,9 +47,9 @@ import org.slf4j.LoggerFactory;
|
||||
* <pre>
|
||||
* List<Dependency> dependencies = new ArrayList<Dependency>();
|
||||
* Dependency dependency = new Dependency(new File(FileUtils.getBitBucket()));
|
||||
* dependency.getProductEvidence().addEvidence("my-datasource", "name", "Jetty", Confidence.HIGH);
|
||||
* dependency.getVersionEvidence().addEvidence("my-datasource", "version", "5.1.10", Confidence.HIGH);
|
||||
* dependency.getVendorEvidence().addEvidence("my-datasource", "vendor", "mortbay", Confidence.HIGH);
|
||||
* dependency.addEvidence(EvidenceType.PRODUCT, "my-datasource", "name", "Jetty", Confidence.HIGH);
|
||||
* dependency.addEvidence(EvidenceType.VERSION, "my-datasource", "version", "5.1.10", Confidence.HIGH);
|
||||
* dependency.addEvidence(EvidenceType.VENDOR, "my-datasource", "vendor", "mortbay", Confidence.HIGH);
|
||||
* dependencies.add(dependency);
|
||||
*
|
||||
* DependencyCheckScanAgent scan = new DependencyCheckScanAgent();
|
||||
@@ -62,6 +62,7 @@ import org.slf4j.LoggerFactory;
|
||||
* @author Steve Springett
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
@NotThreadSafe
|
||||
public class DependencyCheckScanAgent {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="private fields">
|
||||
@@ -103,6 +104,11 @@ public class DependencyCheckScanAgent {
|
||||
* recommended that this be turned to false. Default is true.
|
||||
*/
|
||||
private boolean autoUpdate = true;
|
||||
/**
|
||||
* Sets whether the data directory should be updated without performing a
|
||||
* scan. Default is false.
|
||||
*/
|
||||
private boolean updateOnly = false;
|
||||
/**
|
||||
* flag indicating whether or not to generate a report of findings.
|
||||
*/
|
||||
@@ -149,6 +155,10 @@ public class DependencyCheckScanAgent {
|
||||
* The password to use when connecting to the database.
|
||||
*/
|
||||
private String databasePassword;
|
||||
/**
|
||||
* The starting string that identifies CPEs that are qualified to be imported.
|
||||
*/
|
||||
private String cpeStartsWithFilter;
|
||||
/**
|
||||
* Whether or not the Maven Central analyzer is enabled.
|
||||
*/
|
||||
@@ -210,6 +220,16 @@ public class DependencyCheckScanAgent {
|
||||
* The path to Mono for .NET assembly analysis on non-windows systems.
|
||||
*/
|
||||
private String pathToMono;
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings;
|
||||
/**
|
||||
* The path to optional dependency-check properties file. This will be used
|
||||
* to side-load additional user-defined properties.
|
||||
* {@link Settings#mergeProperties(String)}
|
||||
*/
|
||||
private String propertiesFilePath;
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="getters/setters">
|
||||
|
||||
@@ -321,6 +341,24 @@ public class DependencyCheckScanAgent {
|
||||
this.autoUpdate = autoUpdate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of updateOnly.
|
||||
*
|
||||
* @return the value of updateOnly
|
||||
*/
|
||||
public boolean isUpdateOnly() {
|
||||
return updateOnly;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of updateOnly.
|
||||
*
|
||||
* @param updateOnly new value of updateOnly
|
||||
*/
|
||||
public void setUpdateOnly(boolean updateOnly) {
|
||||
this.updateOnly = updateOnly;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of generateReport.
|
||||
*
|
||||
@@ -526,6 +564,22 @@ public class DependencyCheckScanAgent {
|
||||
this.showSummary = showSummary;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets starting string that identifies CPEs that are qualified to be imported.
|
||||
* @param cpeStartsWithFilter filters CPEs based on this starting string (i.e. cpe:/a: )
|
||||
*/
|
||||
public void setCpeStartsWithFilter(String cpeStartsWithFilter) {
|
||||
this.cpeStartsWithFilter = cpeStartsWithFilter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the starting string that identifies CPEs that are qualified to be imported.
|
||||
* @return the CPE starting filter (i.e. cpe:/a: )
|
||||
*/
|
||||
public String getCpeStartsWithFilter() {
|
||||
return cpeStartsWithFilter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of centralAnalyzerEnabled.
|
||||
*
|
||||
@@ -813,10 +867,29 @@ public class DependencyCheckScanAgent {
|
||||
public void setPathToMono(String pathToMono) {
|
||||
this.pathToMono = pathToMono;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of propertiesFilePath.
|
||||
*
|
||||
* @return the value of propertiesFilePath
|
||||
*/
|
||||
public String getPropertiesFilePath() {
|
||||
return propertiesFilePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of propertiesFilePath.
|
||||
*
|
||||
* @param propertiesFilePath new value of propertiesFilePath
|
||||
*/
|
||||
public void setPropertiesFilePath(String propertiesFilePath) {
|
||||
this.propertiesFilePath = propertiesFilePath;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Executes the Dependency-Check on the dependent libraries.
|
||||
* Executes the Dependency-Check on the dependent libraries. <b>Note</b>, the engine
|
||||
* object returned from this method must be closed by calling `close()`
|
||||
*
|
||||
* @return the Engine used to scan the dependencies.
|
||||
* @throws ExceptionCollection a collection of one or more exceptions that
|
||||
@@ -826,12 +899,22 @@ public class DependencyCheckScanAgent {
|
||||
populateSettings();
|
||||
final Engine engine;
|
||||
try {
|
||||
engine = new Engine();
|
||||
engine = new Engine(settings);
|
||||
} catch (DatabaseException ex) {
|
||||
throw new ExceptionCollection(ex, true);
|
||||
}
|
||||
engine.setDependencies(this.dependencies);
|
||||
engine.analyzeDependencies();
|
||||
if (this.updateOnly) {
|
||||
try {
|
||||
engine.doUpdates();
|
||||
} catch (UpdateException ex) {
|
||||
throw new ExceptionCollection("Unable to perform update", ex);
|
||||
} finally {
|
||||
engine.close();
|
||||
}
|
||||
} else {
|
||||
engine.setDependencies(this.dependencies);
|
||||
engine.analyzeDependencies();
|
||||
}
|
||||
return engine;
|
||||
}
|
||||
|
||||
@@ -840,21 +923,15 @@ public class DependencyCheckScanAgent {
|
||||
*
|
||||
* @param engine a dependency-check engine
|
||||
* @param outDirectory the directory to write the reports to
|
||||
* @throws ScanAgentException thrown if there is an error generating the
|
||||
* report
|
||||
*/
|
||||
private void generateExternalReports(Engine engine, File outDirectory) {
|
||||
DatabaseProperties prop = null;
|
||||
try (CveDB cve = CveDB.getInstance()) {
|
||||
prop = cve.getDatabaseProperties();
|
||||
} catch (DatabaseException ex) {
|
||||
//TODO shouldn't this be a fatal exception
|
||||
LOGGER.debug("Unable to retrieve DB Properties", ex);
|
||||
}
|
||||
final ReportGenerator r = new ReportGenerator(this.applicationName, engine.getDependencies(), engine.getAnalyzers(), prop);
|
||||
private void generateExternalReports(Engine engine, File outDirectory) throws ScanAgentException {
|
||||
try {
|
||||
r.generateReports(outDirectory.getCanonicalPath(), this.reportFormat.name());
|
||||
} catch (IOException | ReportException ex) {
|
||||
LOGGER.error("Unexpected exception occurred during analysis; please see the verbose error log for more details.");
|
||||
LOGGER.debug("", ex);
|
||||
engine.writeReports(applicationName, outDirectory, this.reportFormat.name());
|
||||
} catch (ReportException ex) {
|
||||
LOGGER.debug("Unexpected exception occurred during analysis; please see the verbose error log for more details.", ex);
|
||||
throw new ScanAgentException("Error generating the report", ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -864,40 +941,50 @@ public class DependencyCheckScanAgent {
|
||||
* proxy server, port, and connection timeout.
|
||||
*/
|
||||
private void populateSettings() {
|
||||
Settings.initialize();
|
||||
settings = new Settings();
|
||||
if (dataDirectory != null) {
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
} else {
|
||||
final File jarPath = new File(DependencyCheckScanAgent.class.getProtectionDomain().getCodeSource().getLocation().getPath());
|
||||
final File base = jarPath.getParentFile();
|
||||
final String sub = Settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final String sub = settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final File dataDir = new File(base, sub);
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
}
|
||||
if (propertiesFilePath != null) {
|
||||
try {
|
||||
settings.mergeProperties(propertiesFilePath);
|
||||
LOGGER.info("Successfully loaded user-defined properties");
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Unable to merge user-defined properties", e);
|
||||
LOGGER.error("Continuing execution");
|
||||
}
|
||||
}
|
||||
|
||||
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_CENTRAL_URL, centralUrl);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CVE_CPE_STARTS_WITH_FILTER, cpeStartsWithFilter);
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_CENTRAL_URL, centralUrl);
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
|
||||
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -911,14 +998,16 @@ public class DependencyCheckScanAgent {
|
||||
Engine engine = null;
|
||||
try {
|
||||
engine = executeDependencyCheck();
|
||||
if (this.generateReport) {
|
||||
generateExternalReports(engine, new File(this.reportOutputDirectory));
|
||||
}
|
||||
if (this.showSummary) {
|
||||
showSummary(engine.getDependencies());
|
||||
}
|
||||
if (this.failBuildOnCVSS <= 10) {
|
||||
checkForFailure(engine.getDependencies());
|
||||
if (!this.updateOnly) {
|
||||
if (this.generateReport) {
|
||||
generateExternalReports(engine, new File(this.reportOutputDirectory));
|
||||
}
|
||||
if (this.showSummary) {
|
||||
showSummary(engine.getDependencies());
|
||||
}
|
||||
if (this.failBuildOnCVSS <= 10) {
|
||||
checkForFailure(engine.getDependencies());
|
||||
}
|
||||
}
|
||||
} catch (ExceptionCollection ex) {
|
||||
if (ex.isFatal()) {
|
||||
@@ -927,9 +1016,9 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
throw new ScanAgentException("One or more exceptions occurred during analysis; please see the debug log for more details.", ex);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
settings.cleanup(true);
|
||||
if (engine != null) {
|
||||
engine.cleanup();
|
||||
engine.close();
|
||||
}
|
||||
}
|
||||
return engine;
|
||||
@@ -943,7 +1032,7 @@ public class DependencyCheckScanAgent {
|
||||
* @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if
|
||||
* there is an exception executing the scan.
|
||||
*/
|
||||
private void checkForFailure(List<Dependency> dependencies) throws ScanAgentException {
|
||||
private void checkForFailure(Dependency[] dependencies) throws ScanAgentException {
|
||||
final StringBuilder ids = new StringBuilder();
|
||||
for (Dependency d : dependencies) {
|
||||
boolean addName = true;
|
||||
@@ -960,10 +1049,16 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
}
|
||||
if (ids.length() > 0) {
|
||||
final String msg = String.format("%n%nDependency-Check Failure:%n"
|
||||
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than '%.1f': %s%n"
|
||||
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
|
||||
|
||||
final String msg;
|
||||
if (showSummary) {
|
||||
msg = String.format("%n%nDependency-Check Failure:%n"
|
||||
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than or equal to '%.1f': %s%n"
|
||||
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
|
||||
} else {
|
||||
msg = String.format("%n%nDependency-Check Failure:%n"
|
||||
+ "One or more dependencies were identified with vulnerabilities.%n%n"
|
||||
+ "See the dependency-check report for more details.%n%n");
|
||||
}
|
||||
throw new ScanAgentException(msg);
|
||||
}
|
||||
}
|
||||
@@ -974,12 +1069,12 @@ public class DependencyCheckScanAgent {
|
||||
*
|
||||
* @param dependencies a list of dependency objects
|
||||
*/
|
||||
private void showSummary(List<Dependency> dependencies) {
|
||||
private void showSummary(Dependency[] dependencies) {
|
||||
final StringBuilder summary = new StringBuilder();
|
||||
for (Dependency d : dependencies) {
|
||||
boolean firstEntry = true;
|
||||
final StringBuilder ids = new StringBuilder();
|
||||
for (Vulnerability v : d.getVulnerabilities()) {
|
||||
for (Vulnerability v : d.getVulnerabilities(true)) {
|
||||
if (firstEntry) {
|
||||
firstEntry = false;
|
||||
} else {
|
||||
@@ -23,15 +23,17 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Base class for analyzers to avoid code duplication of initialize and close as
|
||||
* Base class for analyzers to avoid code duplication of prepare and close as
|
||||
* most analyzers do not need these methods.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public abstract class AbstractAnalyzer implements Analyzer {
|
||||
|
||||
/**
|
||||
@@ -42,6 +44,10 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
* A flag indicating whether or not the analyzer is enabled.
|
||||
*/
|
||||
private volatile boolean enabled = true;
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private Settings settings;
|
||||
|
||||
/**
|
||||
* Get the value of enabled.
|
||||
@@ -63,41 +69,56 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
* Returns the configured settings.
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
* @return the configured settings
|
||||
*/
|
||||
protected abstract String getAnalyzerEnabledSettingKey();
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
* dependencies within the engine.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
protected abstract void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException;
|
||||
|
||||
/**
|
||||
* Initializes a given Analyzer. This will be skipped if the analyzer is
|
||||
* disabled.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
protected void initializeAnalyzer() throws InitializationException {
|
||||
protected Settings getSettings() {
|
||||
return settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes a given Analyzer. This will be skipped if the analyzer is
|
||||
* Initializes the analyzer with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings to use
|
||||
*/
|
||||
@Override
|
||||
public void initialize(Settings settings) {
|
||||
this.settings = settings;
|
||||
final String key = getAnalyzerEnabledSettingKey();
|
||||
try {
|
||||
this.setEnabled(settings.getBoolean(key, true));
|
||||
} catch (InvalidSettingException ex) {
|
||||
final String msg = String.format("Invalid setting for property '%s'", key);
|
||||
LOGGER.warn(msg);
|
||||
LOGGER.debug(msg, ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the abstract analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public final void prepare(Engine engine) throws InitializationException {
|
||||
if (isEnabled()) {
|
||||
prepareAnalyzer(engine);
|
||||
} else {
|
||||
LOGGER.debug("{} has been disabled", getName());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares a given Analyzer. This will be skipped if the analyzer is
|
||||
* disabled.
|
||||
*
|
||||
* @throws Exception thrown if there is an exception
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
protected void closeAnalyzer() throws Exception {
|
||||
// Intentionally empty, analyzer will override this if they must close a resource.
|
||||
protected void prepareAnalyzer(Engine engine) throws InitializationException {
|
||||
// Intentionally empty, analyzer will override this if they must prepare anything.
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -117,26 +138,15 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* The initialize method does nothing for this Analyzer.
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
* dependencies within the engine.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
@Override
|
||||
public final void initialize() throws InitializationException {
|
||||
final String key = getAnalyzerEnabledSettingKey();
|
||||
try {
|
||||
this.setEnabled(Settings.getBoolean(key, true));
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.warn("Invalid setting for property '{}'", key);
|
||||
LOGGER.debug("", ex);
|
||||
}
|
||||
|
||||
if (isEnabled()) {
|
||||
initializeAnalyzer();
|
||||
} else {
|
||||
LOGGER.debug("{} has been disabled", getName());
|
||||
}
|
||||
}
|
||||
protected abstract void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException;
|
||||
|
||||
/**
|
||||
* The close method does nothing for this Analyzer.
|
||||
@@ -150,6 +160,16 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes a given Analyzer. This will be skipped if the analyzer is
|
||||
* disabled.
|
||||
*
|
||||
* @throws Exception thrown if there is an exception
|
||||
*/
|
||||
protected void closeAnalyzer() throws Exception {
|
||||
// Intentionally empty, analyzer will override this if they must close a resource.
|
||||
}
|
||||
|
||||
/**
|
||||
* The default is to support parallel processing.
|
||||
*
|
||||
@@ -159,4 +179,13 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
public boolean supportsParallelProcessing() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
protected abstract String getAnalyzerEnabledSettingKey();
|
||||
|
||||
}
|
||||
@@ -0,0 +1,125 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2017 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This analyzer ensures dependencies that should be grouped together, to remove
|
||||
* excess noise from the report, are grouped. An example would be Spring, Spring
|
||||
* Beans, Spring MVC, etc. If they are all for the same version and have the
|
||||
* same relative path then these should be grouped into a single dependency
|
||||
* under the core/main library.</p>
|
||||
* <p>
|
||||
* Note, this grouping only works on dependencies with identified CVE
|
||||
* entries</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public abstract class AbstractDependencyComparingAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractDependencyComparingAnalyzer.class);
|
||||
|
||||
/**
|
||||
* a flag indicating if this analyzer has run. This analyzer only runs once.
|
||||
*/
|
||||
private boolean analyzed = false;
|
||||
|
||||
/**
|
||||
* Returns a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once. Note this is currently only used in the unit tests.
|
||||
*
|
||||
* @return a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once
|
||||
*/
|
||||
protected synchronized boolean getAnalyzed() {
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it only runs once and then
|
||||
* operates on <em>all</em> dependencies.
|
||||
*
|
||||
* @return whether or not parallel processing is enabled
|
||||
* @see #analyze(Dependency, Engine)
|
||||
*/
|
||||
@Override
|
||||
public final boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to have the same
|
||||
* base path and the same set of identifiers they are likely related. The
|
||||
* related dependencies are bundled into a single reportable item.
|
||||
*
|
||||
* @param ignore this analyzer ignores the dependency being analyzed
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR
|
||||
* file.
|
||||
*/
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
if (!analyzed) {
|
||||
analyzed = true;
|
||||
final Set<Dependency> dependenciesToRemove = new HashSet<>();
|
||||
|
||||
final Dependency[] dependencies = engine.getDependencies();
|
||||
if (dependencies.length < 2) {
|
||||
return;
|
||||
}
|
||||
for (int x = 0; x < dependencies.length - 1; x++) {
|
||||
final Dependency dependency = dependencies[x];
|
||||
if (!dependenciesToRemove.contains(dependency)) {
|
||||
for (int y = x + 1; y < dependencies.length; y++) {
|
||||
final Dependency nextDependency = dependencies[y];
|
||||
if (evaluateDependencies(dependency, nextDependency, dependenciesToRemove)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Dependency d : dependenciesToRemove) {
|
||||
engine.removeDependency(d);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates the dependencies
|
||||
*
|
||||
* @param dependency a dependency to compare
|
||||
* @param nextDependency a dependency to compare
|
||||
* @param dependenciesToRemove a set of dependencies that will be removed
|
||||
* @return true if a dependency is removed; otherwise false
|
||||
*/
|
||||
protected abstract boolean evaluateDependencies(final Dependency dependency,
|
||||
final Dependency nextDependency, final Set<Dependency> dependenciesToRemove);
|
||||
}
|
||||
@@ -25,6 +25,8 @@ import java.io.FileFilter;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
@@ -33,6 +35,7 @@ import org.owasp.dependencycheck.exception.InitializationException;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implements FileTypeAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Field definitions, getters, and setters ">
|
||||
@@ -45,16 +48,6 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
*/
|
||||
private boolean filesMatched = false;
|
||||
|
||||
/**
|
||||
* Get the value of filesMatched. A flag indicating whether the scan
|
||||
* included any file types this analyzer supports.
|
||||
*
|
||||
* @return the value of filesMatched
|
||||
*/
|
||||
protected boolean isFilesMatched() {
|
||||
return filesMatched;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of filesMatched. A flag indicating whether the scan
|
||||
* included any file types this analyzer supports.
|
||||
@@ -70,13 +63,14 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
/**
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception during
|
||||
* initialization
|
||||
*/
|
||||
@Override
|
||||
protected final void initializeAnalyzer() throws InitializationException {
|
||||
protected final void prepareAnalyzer(Engine engine) throws InitializationException {
|
||||
if (filesMatched) {
|
||||
initializeFileTypeAnalyzer();
|
||||
prepareFileTypeAnalyzer(engine);
|
||||
} else {
|
||||
this.setEnabled(false);
|
||||
}
|
||||
@@ -99,12 +93,13 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
protected abstract FileFilter getFileFilter();
|
||||
|
||||
/**
|
||||
* Initializes the file type analyzer.
|
||||
* Prepares the file type analyzer for dependency analysis.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception during
|
||||
* initialization
|
||||
*/
|
||||
protected abstract void initializeFileTypeAnalyzer() throws InitializationException;
|
||||
protected abstract void prepareFileTypeAnalyzer(Engine engine) throws InitializationException;
|
||||
|
||||
//</editor-fold>
|
||||
/**
|
||||
@@ -135,7 +130,7 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
* constructs a new Set that can be used in a final static declaration.</p>
|
||||
* <p>
|
||||
* This implementation was copied from
|
||||
* http://stackoverflow.com/questions/2041778/initialize-java-hashset-values-by-construction</p>
|
||||
* http://stackoverflow.com/questions/2041778/prepare-java-hashset-values-by-construction</p>
|
||||
*
|
||||
* @param strings a list of strings to add to the set.
|
||||
* @return a Set of strings.
|
||||
@@ -0,0 +1,291 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2017 Steve Springett. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonArray;
|
||||
import javax.json.JsonObject;
|
||||
import javax.json.JsonObjectBuilder;
|
||||
import javax.json.JsonString;
|
||||
import javax.json.JsonValue;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.utils.Checksum;
|
||||
|
||||
/**
|
||||
* An abstract NPM analyzer that contains common methods for concrete
|
||||
* implementations.
|
||||
*
|
||||
* @author Steve Springett
|
||||
*/
|
||||
@ThreadSafe
|
||||
public abstract class AbstractNpmAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractNpmAnalyzer.class);
|
||||
|
||||
/**
|
||||
* A descriptor for the type of dependencies processed or added by this
|
||||
* analyzer.
|
||||
*/
|
||||
public static final String NPM_DEPENDENCY_ECOSYSTEM = "npm";
|
||||
/**
|
||||
* The file name to scan.
|
||||
*/
|
||||
private static final String PACKAGE_JSON = "package.json";
|
||||
|
||||
/**
|
||||
* Determines if the file can be analyzed by the analyzer.
|
||||
*
|
||||
* @param pathname the path to the file
|
||||
* @return true if the file can be analyzed by the given analyzer; otherwise
|
||||
* false
|
||||
*/
|
||||
@Override
|
||||
public boolean accept(File pathname) {
|
||||
boolean accept = super.accept(pathname);
|
||||
if (accept) {
|
||||
try {
|
||||
accept |= shouldProcess(pathname);
|
||||
} catch (AnalysisException ex) {
|
||||
throw new RuntimeException(ex.getMessage(), ex.getCause());
|
||||
}
|
||||
}
|
||||
|
||||
return accept;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the path contains "/node_modules/" (i.e. it is a child
|
||||
* module. This analyzer does not scan child modules.
|
||||
*
|
||||
* @param pathname the path to test
|
||||
* @return <code>true</code> if the path does not contain "/node_modules/"
|
||||
* @throws AnalysisException thrown if the canonical path cannot be obtained
|
||||
* from the given file
|
||||
*/
|
||||
protected boolean shouldProcess(File pathname) throws AnalysisException {
|
||||
try {
|
||||
// Do not scan the node_modules directory
|
||||
if (pathname.getCanonicalPath().contains(File.separator + "node_modules" + File.separator)) {
|
||||
LOGGER.debug("Skipping analysis of node module: " + pathname.getCanonicalPath());
|
||||
return false;
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new AnalysisException("Unable to process dependency", ex);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a dependency object.
|
||||
*
|
||||
* @param dependency the parent dependency
|
||||
* @param name the name of the dependency to create
|
||||
* @param version the version of the dependency to create
|
||||
* @param scope the scope of the dependency being created
|
||||
* @return the generated dependency
|
||||
*/
|
||||
protected Dependency createDependency(Dependency dependency, String name, String version, String scope) {
|
||||
final Dependency nodeModule = new Dependency(new File(dependency.getActualFile() + "?" + name), true);
|
||||
nodeModule.setEcosystem(NPM_DEPENDENCY_ECOSYSTEM);
|
||||
//this is virtual - the sha1 is purely for the hyperlink in the final html report
|
||||
nodeModule.setSha1sum(Checksum.getSHA1Checksum(String.format("%s:%s", name, version)));
|
||||
nodeModule.setMd5sum(Checksum.getMD5Checksum(String.format("%s:%s", name, version)));
|
||||
nodeModule.addEvidence(EvidenceType.PRODUCT, "package.json", "name", name, Confidence.HIGHEST);
|
||||
nodeModule.addEvidence(EvidenceType.VENDOR, "package.json", "name", name, Confidence.HIGH);
|
||||
nodeModule.addEvidence(EvidenceType.VERSION, "package.json", "version", version, Confidence.HIGHEST);
|
||||
nodeModule.addProjectReference(dependency.getName() + ": " + scope);
|
||||
nodeModule.setName(name);
|
||||
nodeModule.setVersion(version);
|
||||
nodeModule.addIdentifier("npm", String.format("%s:%s", name, version), null, Confidence.HIGHEST);
|
||||
return nodeModule;
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a part of package.json (as defined by JsonArray) and update the
|
||||
* specified dependency with relevant info.
|
||||
*
|
||||
* @param engine the dependency-check engine
|
||||
* @param dependency the Dependency to update
|
||||
* @param jsonArray the jsonArray to parse
|
||||
* @param depType the dependency type
|
||||
*/
|
||||
protected void processPackage(Engine engine, Dependency dependency, JsonArray jsonArray, String depType) {
|
||||
final JsonObjectBuilder builder = Json.createObjectBuilder();
|
||||
for (JsonString str : jsonArray.getValuesAs(JsonString.class)) {
|
||||
builder.add(str.toString(), "");
|
||||
}
|
||||
final JsonObject jsonObject = builder.build();
|
||||
processPackage(engine, dependency, jsonObject, depType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a part of package.json (as defined by JsonObject) and update
|
||||
* the specified dependency with relevant info.
|
||||
*
|
||||
* @param engine the dependency-check engine
|
||||
* @param dependency the Dependency to update
|
||||
* @param jsonObject the jsonObject to parse
|
||||
* @param depType the dependency type
|
||||
*/
|
||||
protected void processPackage(Engine engine, Dependency dependency, JsonObject jsonObject, String depType) {
|
||||
for (int i = 0; i < jsonObject.size(); i++) {
|
||||
for (Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
|
||||
|
||||
final String name = entry.getKey();
|
||||
String version = "";
|
||||
if (entry.getValue() != null && entry.getValue().getValueType() == JsonValue.ValueType.STRING) {
|
||||
version = ((JsonString) entry.getValue()).getString();
|
||||
}
|
||||
final Dependency existing = findDependency(engine, name, version);
|
||||
if (existing == null) {
|
||||
final Dependency nodeModule = createDependency(dependency, name, version, depType);
|
||||
engine.addDependency(nodeModule);
|
||||
} else {
|
||||
existing.addProjectReference(dependency.getName() + ": " + depType);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds information to an evidence collection from the node json
|
||||
* configuration.
|
||||
*
|
||||
* @param dep the dependency to add the evidence
|
||||
* @param t the type of evidence to add
|
||||
* @param json information from node.js
|
||||
* @return the actual string set into evidence
|
||||
* @param key the key to obtain the data from the json information
|
||||
*/
|
||||
private static String addToEvidence(Dependency dep, EvidenceType t, JsonObject json, String key) {
|
||||
String evidenceStr = null;
|
||||
if (json.containsKey(key)) {
|
||||
final JsonValue value = json.get(key);
|
||||
if (value instanceof JsonString) {
|
||||
evidenceStr = ((JsonString) value).getString();
|
||||
dep.addEvidence(t, PACKAGE_JSON, key, evidenceStr, Confidence.HIGHEST);
|
||||
} else if (value instanceof JsonObject) {
|
||||
final JsonObject jsonObject = (JsonObject) value;
|
||||
for (final Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
|
||||
final String property = entry.getKey();
|
||||
final JsonValue subValue = entry.getValue();
|
||||
if (subValue instanceof JsonString) {
|
||||
evidenceStr = ((JsonString) subValue).getString();
|
||||
dep.addEvidence(t, PACKAGE_JSON,
|
||||
String.format("%s.%s", key, property),
|
||||
evidenceStr,
|
||||
Confidence.HIGHEST);
|
||||
} else {
|
||||
LOGGER.warn("JSON sub-value not string as expected: {}", subValue);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOGGER.warn("JSON value not string or JSON object as expected: {}", value);
|
||||
}
|
||||
}
|
||||
return evidenceStr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Locates the dependency from the list of dependencies that have been
|
||||
* scanned by the engine.
|
||||
*
|
||||
* @param engine the dependency-check engine
|
||||
* @param name the name of the dependency to find
|
||||
* @param version the version of the dependency to find
|
||||
* @return the identified dependency; otherwise null
|
||||
*/
|
||||
protected Dependency findDependency(Engine engine, String name, String version) {
|
||||
for (Dependency d : engine.getDependencies()) {
|
||||
if (NPM_DEPENDENCY_ECOSYSTEM.equals(d.getEcosystem()) && name.equals(d.getName()) && version != null && d.getVersion() != null) {
|
||||
final String dependencyVersion = d.getVersion();
|
||||
if (DependencyBundlingAnalyzer.npmVersionsMatch(version, dependencyVersion)) {
|
||||
return d;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects evidence from the given JSON for the associated dependency.
|
||||
*
|
||||
* @param json the JSON that contains the evidence to collect
|
||||
* @param dependency the dependency to add the evidence too
|
||||
*/
|
||||
public void gatherEvidence(final JsonObject json, Dependency dependency) {
|
||||
if (json.containsKey("name")) {
|
||||
final Object value = json.get("name");
|
||||
if (value instanceof JsonString) {
|
||||
final String valueString = ((JsonString) value).getString();
|
||||
dependency.setName(valueString);
|
||||
dependency.setPackagePath(valueString);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, PACKAGE_JSON, "name", valueString, Confidence.HIGH);
|
||||
} else {
|
||||
LOGGER.warn("JSON value not string as expected: {}", value);
|
||||
}
|
||||
}
|
||||
final String desc = addToEvidence(dependency, EvidenceType.PRODUCT, json, "description");
|
||||
dependency.setDescription(desc);
|
||||
addToEvidence(dependency, EvidenceType.VENDOR, json, "author");
|
||||
final String version = addToEvidence(dependency, EvidenceType.VERSION, json, "version");
|
||||
if (version != null) {
|
||||
dependency.setVersion(version);
|
||||
dependency.addIdentifier("npm", String.format("%s:%s", dependency.getName(), version), null, Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
// Adds the license if defined in package.json
|
||||
if (json.containsKey("license")) {
|
||||
final Object value = json.get("license");
|
||||
if (value instanceof JsonString) {
|
||||
dependency.setLicense(json.getString("license"));
|
||||
} else if (value instanceof JsonArray) {
|
||||
final JsonArray array = (JsonArray) value;
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
boolean addComma = false;
|
||||
for (int x = 0; x < array.size(); x++) {
|
||||
if (!array.isNull(x)) {
|
||||
if (addComma) {
|
||||
sb.append(", ");
|
||||
} else {
|
||||
addComma = true;
|
||||
}
|
||||
sb.append(array.getString(x));
|
||||
}
|
||||
}
|
||||
dependency.setLicense(sb.toString());
|
||||
} else {
|
||||
dependency.setLicense(json.getJsonObject("license").getString("type"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -22,9 +22,14 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.xml.suppression.SuppressionParseException;
|
||||
import org.owasp.dependencycheck.xml.suppression.SuppressionParser;
|
||||
@@ -39,18 +44,31 @@ import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
* Abstract base suppression analyzer that contains methods for parsing the
|
||||
* suppression xml file.
|
||||
* suppression XML file.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger for use throughout the class
|
||||
* The Logger for use throughout the class.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractSuppressionAnalyzer.class);
|
||||
/**
|
||||
* The list of suppression rules.
|
||||
*/
|
||||
private List<SuppressionRule> rules = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Get the number of suppression rules.
|
||||
*
|
||||
* @return the number of suppression rules
|
||||
*/
|
||||
protected int getRuleCount() {
|
||||
return rules.size();
|
||||
}
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* Returns a list of file EXTENSIONS supported by this analyzer.
|
||||
*
|
||||
@@ -60,86 +78,130 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
return null;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
/**
|
||||
* The initialize method loads the suppression XML file.
|
||||
* The prepare method loads the suppression XML file.
|
||||
*
|
||||
* @param engine a reference the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
loadSuppressionData();
|
||||
} catch (SuppressionParseException ex) {
|
||||
throw new InitializationException("Error initializing the suppression analyzer", ex);
|
||||
public synchronized void prepareAnalyzer(Engine engine) throws InitializationException {
|
||||
if (rules.isEmpty()) {
|
||||
try {
|
||||
loadSuppressionBaseData();
|
||||
} catch (SuppressionParseException ex) {
|
||||
throw new InitializationException("Error initializing the suppression analyzer: " + ex.getLocalizedMessage(), ex, true);
|
||||
}
|
||||
|
||||
try {
|
||||
loadSuppressionData();
|
||||
} catch (SuppressionParseException ex) {
|
||||
throw new InitializationException("Warn initializing the suppression analyzer: " + ex.getLocalizedMessage(), ex, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (rules.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
for (final SuppressionRule rule : rules) {
|
||||
rule.process(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The list of suppression rules
|
||||
*/
|
||||
private List<SuppressionRule> rules;
|
||||
|
||||
/**
|
||||
* Get the value of rules.
|
||||
*
|
||||
* @return the value of rules
|
||||
*/
|
||||
public List<SuppressionRule> getRules() {
|
||||
return rules;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of rules.
|
||||
*
|
||||
* @param rules new value of rules
|
||||
*/
|
||||
public void setRules(List<SuppressionRule> rules) {
|
||||
this.rules = rules;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the suppression rules file.
|
||||
* Loads all the suppression rules files configured in the {@link Settings}.
|
||||
*
|
||||
* @throws SuppressionParseException thrown if the XML cannot be parsed.
|
||||
*/
|
||||
private void loadSuppressionData() throws SuppressionParseException {
|
||||
final List<SuppressionRule> ruleList = new ArrayList<>();
|
||||
final SuppressionParser parser = new SuppressionParser();
|
||||
File file = null;
|
||||
final String[] suppressionFilePaths = getSettings().getArray(Settings.KEYS.SUPPRESSION_FILE);
|
||||
final List<String> failedLoadingFiles = new ArrayList<>();
|
||||
if (suppressionFilePaths != null && suppressionFilePaths.length > 0) {
|
||||
// Load all the suppression file paths
|
||||
for (final String suppressionFilePath : suppressionFilePaths) {
|
||||
try {
|
||||
ruleList.addAll(loadSuppressionFile(parser, suppressionFilePath));
|
||||
} catch (SuppressionParseException ex) {
|
||||
final String msg = String.format("Failed to load %s, caused by %s. ", suppressionFilePath, ex.getMessage());
|
||||
failedLoadingFiles.add(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
LOGGER.debug("{} suppression rules were loaded.", ruleList.size());
|
||||
rules.addAll(ruleList);
|
||||
if (!failedLoadingFiles.isEmpty()) {
|
||||
LOGGER.debug("{} suppression files failed to load.", failedLoadingFiles.size());
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
for (String item : failedLoadingFiles) {
|
||||
sb.append(item);
|
||||
}
|
||||
throw new SuppressionParseException(sb.toString());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads all the base suppression rules files.
|
||||
*
|
||||
* @throws SuppressionParseException thrown if the XML cannot be parsed.
|
||||
*/
|
||||
private void loadSuppressionBaseData() throws SuppressionParseException {
|
||||
final SuppressionParser parser = new SuppressionParser();
|
||||
List<SuppressionRule> ruleList;
|
||||
try {
|
||||
final InputStream in = this.getClass().getClassLoader().getResourceAsStream("dependencycheck-base-suppression.xml");
|
||||
rules = parser.parseSuppressionRules(in);
|
||||
final InputStream in = FileUtils.getResourceAsStream("dependencycheck-base-suppression.xml");
|
||||
ruleList = parser.parseSuppressionRules(in);
|
||||
} catch (SAXException ex) {
|
||||
throw new SuppressionParseException("Unable to parse the base suppression data file", ex);
|
||||
}
|
||||
final String suppressionFilePath = Settings.getString(Settings.KEYS.SUPPRESSION_FILE);
|
||||
if (suppressionFilePath == null) {
|
||||
return;
|
||||
}
|
||||
rules.addAll(ruleList);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a single suppression rules file from the path provided using the
|
||||
* parser provided.
|
||||
*
|
||||
* @param parser the parser to use for loading the file
|
||||
* @param suppressionFilePath the path to load
|
||||
* @return the list of loaded suppression rules
|
||||
* @throws SuppressionParseException thrown if the suppression file cannot
|
||||
* be loaded and parsed.
|
||||
*/
|
||||
private List<SuppressionRule> loadSuppressionFile(final SuppressionParser parser,
|
||||
final String suppressionFilePath) throws SuppressionParseException {
|
||||
LOGGER.debug("Loading suppression rules from '{}'", suppressionFilePath);
|
||||
final List<SuppressionRule> list = new ArrayList<>();
|
||||
File file = null;
|
||||
boolean deleteTempFile = false;
|
||||
try {
|
||||
final Pattern uriRx = Pattern.compile("^(https?|file)\\:.*", Pattern.CASE_INSENSITIVE);
|
||||
if (uriRx.matcher(suppressionFilePath).matches()) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("suppression", "xml");
|
||||
file = getSettings().getTempFile("suppression", "xml");
|
||||
final URL url = new URL(suppressionFilePath);
|
||||
final Downloader downloader = new Downloader(getSettings());
|
||||
try {
|
||||
Downloader.fetchFile(url, file, false);
|
||||
downloader.fetchFile(url, file, false);
|
||||
} catch (DownloadFailedException ex) {
|
||||
Downloader.fetchFile(url, file, true);
|
||||
LOGGER.trace("Failed download - first attempt", ex);
|
||||
downloader.fetchFile(url, file, true);
|
||||
}
|
||||
} else {
|
||||
file = new File(suppressionFilePath);
|
||||
|
||||
if (!file.exists()) {
|
||||
try (InputStream suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath)) {
|
||||
try (InputStream suppressionsFromClasspath = FileUtils.getResourceAsStream(suppressionFilePath)) {
|
||||
if (suppressionsFromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("suppression", "xml");
|
||||
file = getSettings().getTempFile("suppression", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throwSuppressionParseException("Unable to locate suppressions file in classpath", ex);
|
||||
throwSuppressionParseException("Unable to locate suppressions file in classpath", ex, suppressionFilePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -147,13 +209,12 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
if (file != null) {
|
||||
if (!file.exists()) {
|
||||
final String msg = String.format("Suppression file '%s' does not exists", file.getPath());
|
||||
final String msg = String.format("Suppression file '%s' does not exist", file.getPath());
|
||||
LOGGER.warn(msg);
|
||||
throw new SuppressionParseException(msg);
|
||||
}
|
||||
try {
|
||||
rules.addAll(parser.parseSuppressionRules(file));
|
||||
LOGGER.debug("{} suppression rules were loaded.", rules.size());
|
||||
list.addAll(parser.parseSuppressionRules(file));
|
||||
} catch (SuppressionParseException ex) {
|
||||
LOGGER.warn("Unable to parse suppression xml file '{}'", file.getPath());
|
||||
LOGGER.warn(ex.getMessage());
|
||||
@@ -161,18 +222,19 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
}
|
||||
} catch (DownloadFailedException ex) {
|
||||
throwSuppressionParseException("Unable to fetch the configured suppression file", ex);
|
||||
throwSuppressionParseException("Unable to fetch the configured suppression file", ex, suppressionFilePath);
|
||||
} catch (MalformedURLException ex) {
|
||||
throwSuppressionParseException("Configured suppression file has an invalid URL", ex);
|
||||
throwSuppressionParseException("Configured suppression file has an invalid URL", ex, suppressionFilePath);
|
||||
} catch (SuppressionParseException ex) {
|
||||
throw ex;
|
||||
} catch (IOException ex) {
|
||||
throwSuppressionParseException("Unable to create temp file for suppressions", ex);
|
||||
throwSuppressionParseException("Unable to create temp file for suppressions", ex, suppressionFilePath);
|
||||
} finally {
|
||||
if (deleteTempFile && file != null) {
|
||||
FileUtils.delete(file);
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -180,11 +242,12 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
*
|
||||
* @param message the exception message
|
||||
* @param exception the cause of the exception
|
||||
* @param suppressionFilePath the path file
|
||||
* @throws SuppressionParseException throws the generated
|
||||
* SuppressionParseException
|
||||
*/
|
||||
private void throwSuppressionParseException(String message, Exception exception) throws SuppressionParseException {
|
||||
LOGGER.warn(message);
|
||||
private void throwSuppressionParseException(String message, Exception exception, String suppressionFilePath) throws SuppressionParseException {
|
||||
LOGGER.warn(String.format(message + "'%s'", suppressionFilePath));
|
||||
LOGGER.debug("", exception);
|
||||
throw new SuppressionParseException(message, exception);
|
||||
}
|
||||
@@ -21,11 +21,22 @@ import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* An interface that defines an Analyzer that is used to identify Dependencies.
|
||||
* An analyzer will collect information about the dependency in the form of
|
||||
* Evidence.
|
||||
* Evidence.</p>
|
||||
* <p>
|
||||
* When the {@link org.owasp.dependencycheck.Engine} executes it will load the
|
||||
* analyzers and call the methods in the following order:</p>
|
||||
* <ol>
|
||||
* <li>{@link #initialize(org.owasp.dependencycheck.utils.Settings)}</li>
|
||||
* <li>{@link #prepare(org.owasp.dependencycheck.Engine)}</li>
|
||||
* <li>{@link #analyze(org.owasp.dependencycheck.dependency.Dependency, org.owasp.dependencycheck.Engine)}</li>
|
||||
* <li>{@link #close()}</li>
|
||||
* </ol>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -60,13 +71,21 @@ public interface Analyzer {
|
||||
AnalysisPhase getAnalysisPhase();
|
||||
|
||||
/**
|
||||
* The initialize method is called (once) prior to the analyze method being
|
||||
* Initializes the analyzer with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings
|
||||
*/
|
||||
void initialize(Settings settings);
|
||||
|
||||
/**
|
||||
* The prepare method is called (once) prior to the analyze method being
|
||||
* called on all of the dependencies.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException is thrown if an exception occurs
|
||||
* initializing the analyzer.
|
||||
*/
|
||||
void initialize() throws InitializationException;
|
||||
void prepare(Engine engine) throws InitializationException;
|
||||
|
||||
/**
|
||||
* The close method is called after all of the dependencies have been
|
||||
@@ -77,16 +96,20 @@ public interface Analyzer {
|
||||
void close() throws Exception;
|
||||
|
||||
/**
|
||||
* Returns whether multiple instances of the same type of analyzer can run in parallel.
|
||||
* Note that running analyzers of different types in parallel is not supported at all.
|
||||
* Returns whether multiple instances of the same type of analyzer can run
|
||||
* in parallel. Note that running analyzers of different types in parallel
|
||||
* is not supported at all.
|
||||
*
|
||||
* @return {@code true} if the analyzer supports parallel processing, {@code false} else
|
||||
* @return {@code true} if the analyzer supports parallel processing,
|
||||
* {@code false} else
|
||||
*/
|
||||
boolean supportsParallelProcessing();
|
||||
|
||||
/**
|
||||
* Get the value of enabled.
|
||||
*
|
||||
* @return the value of enabled
|
||||
*/
|
||||
boolean isEnabled();
|
||||
|
||||
}
|
||||
@@ -18,20 +18,25 @@
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ServiceLoader;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* The Analyzer Service Loader. This class loads all services that implement
|
||||
* org.owasp.dependencycheck.analyzer.Analyzer.
|
||||
* {@link org.owasp.dependencycheck.analyzer.Analyzer}.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class AnalyzerService {
|
||||
|
||||
/**
|
||||
* The Logger for use throughout the class.
|
||||
*/
|
||||
@@ -41,14 +46,21 @@ public class AnalyzerService {
|
||||
* The service loader for analyzers.
|
||||
*/
|
||||
private final ServiceLoader<Analyzer> service;
|
||||
/**
|
||||
* The configured settings.
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* Creates a new instance of AnalyzerService.
|
||||
*
|
||||
* @param classLoader the ClassLoader to use when dynamically loading Analyzer and Update services
|
||||
* @param classLoader the ClassLoader to use when dynamically loading
|
||||
* Analyzer and Update services
|
||||
* @param settings the configured settings
|
||||
*/
|
||||
public AnalyzerService(ClassLoader classLoader) {
|
||||
public AnalyzerService(ClassLoader classLoader, Settings settings) {
|
||||
service = ServiceLoader.load(Analyzer.class, classLoader);
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -57,19 +69,49 @@ public class AnalyzerService {
|
||||
* @return a list of Analyzers.
|
||||
*/
|
||||
public List<Analyzer> getAnalyzers() {
|
||||
return getAnalyzers(AnalysisPhase.values());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of all instances of the Analyzer interface that are bound
|
||||
* to one of the given phases.
|
||||
*
|
||||
* @param phases the phases to obtain analyzers for
|
||||
* @return a list of Analyzers.
|
||||
*/
|
||||
public List<Analyzer> getAnalyzers(AnalysisPhase... phases) {
|
||||
return getAnalyzers(asList(phases));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of all instances of the Analyzer interface that are bound
|
||||
* to one of the given phases.
|
||||
*
|
||||
* @param phases the phases to obtain analyzers for
|
||||
* @return a list of Analyzers
|
||||
*/
|
||||
private List<Analyzer> getAnalyzers(List<AnalysisPhase> phases) {
|
||||
final List<Analyzer> analyzers = new ArrayList<>();
|
||||
final Iterator<Analyzer> iterator = service.iterator();
|
||||
boolean experimentalEnabled = false;
|
||||
boolean retiredEnabled = false;
|
||||
try {
|
||||
experimentalEnabled = Settings.getBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, false);
|
||||
experimentalEnabled = settings.getBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, false);
|
||||
retiredEnabled = settings.getBoolean(Settings.KEYS.ANALYZER_RETIRED_ENABLED, false);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.error("invalid experimental setting", ex);
|
||||
LOGGER.error("invalid experimental or retired setting", ex);
|
||||
}
|
||||
while (iterator.hasNext()) {
|
||||
final Analyzer a = iterator.next();
|
||||
if (!phases.contains(a.getAnalysisPhase())) {
|
||||
continue;
|
||||
}
|
||||
if (!experimentalEnabled && a.getClass().isAnnotationPresent(Experimental.class)) {
|
||||
continue;
|
||||
}
|
||||
if (!retiredEnabled && a.getClass().isAnnotationPresent(Retired.class)) {
|
||||
continue;
|
||||
}
|
||||
LOGGER.debug("Loaded Analyzer {}", a.getName());
|
||||
analyzers.add(a);
|
||||
}
|
||||
@@ -28,6 +28,8 @@ import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
import org.apache.commons.compress.archivers.ArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.ArchiveInputStream;
|
||||
@@ -61,6 +63,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -71,7 +74,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The count of directories created during analysis. This is used for
|
||||
* creating temporary directories.
|
||||
*/
|
||||
private static int dirCount = 0;
|
||||
private static final AtomicInteger DIRECTORY_COUNT = new AtomicInteger(0);
|
||||
/**
|
||||
* The parent directory for the individual directories per archive.
|
||||
*/
|
||||
@@ -80,21 +83,11 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The max scan depth that the analyzer will recursively extract nested
|
||||
* archives.
|
||||
*/
|
||||
private static final int MAX_SCAN_DEPTH = Settings.getInt("archive.scan.depth", 3);
|
||||
private int maxScanDepth;
|
||||
/**
|
||||
* Tracks the current scan/extraction depth for nested archives.
|
||||
* The file filter used to filter supported files.
|
||||
*/
|
||||
private int scanDepth = 0;
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Archive Analyzer";
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INITIAL;
|
||||
private FileFilter fileFilter = null;
|
||||
/**
|
||||
* The set of things we can handle with Zip methods
|
||||
*/
|
||||
@@ -106,35 +99,41 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2");
|
||||
|
||||
static {
|
||||
final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
|
||||
if (additionalZipExt != null) {
|
||||
final String[] ext = additionalZipExt.split("\\s*,\\s*");
|
||||
Collections.addAll(KNOWN_ZIP_EXT, ext);
|
||||
}
|
||||
EXTENSIONS.addAll(KNOWN_ZIP_EXT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects files with extensions to remove from the engine's collection of
|
||||
* dependencies.
|
||||
*/
|
||||
private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance()
|
||||
.addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2").build();
|
||||
|
||||
/**
|
||||
* The file filter used to filter supported files.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
|
||||
|
||||
/**
|
||||
* Detects files with .zip extension.
|
||||
*/
|
||||
private static final FileFilter ZIP_FILTER = FileFilterBuilder.newInstance().addExtensions("zip").build();
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Archive Analyzer";
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INITIAL;
|
||||
|
||||
/**
|
||||
* Initializes the analyzer with the configured settings.
|
||||
*
|
||||
* @param settings the configured settings to use
|
||||
*/
|
||||
@Override
|
||||
public void initialize(Settings settings) {
|
||||
super.initialize(settings);
|
||||
initializeSettings();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
return fileFilter;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -170,15 +169,16 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* The initialize method does nothing for this Analyzer.
|
||||
* The prepare method does nothing for this Analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException is thrown if there is an exception
|
||||
* deleting or creating temporary files
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
try {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
final File baseDir = getSettings().getTempDirectory();
|
||||
tempFileLocation = File.createTempFile("check", "tmp", baseDir);
|
||||
if (!tempFileLocation.delete()) {
|
||||
setEnabled(false);
|
||||
@@ -206,31 +206,18 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
@Override
|
||||
public void closeAnalyzer() throws Exception {
|
||||
if (tempFileLocation != null && tempFileLocation.exists()) {
|
||||
LOGGER.debug("Attempting to delete temporary files");
|
||||
LOGGER.debug("Attempting to delete temporary files from `{}`", tempFileLocation.toString());
|
||||
final boolean success = FileUtils.delete(tempFileLocation);
|
||||
if (!success && tempFileLocation.exists()) {
|
||||
final String[] l = tempFileLocation.list();
|
||||
if (l != null && l.length > 0) {
|
||||
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
|
||||
LOGGER.warn("Failed to delete the Archive Analyzer's temporary files from `{}`, "
|
||||
+ "see the log for more details", tempFileLocation.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it both modifies and iterates
|
||||
* over the engine's list of dependencies.
|
||||
*
|
||||
* @return <code>true</code> if the analyzer supports parallel processing;
|
||||
* otherwise <code>false</code>
|
||||
* @see #analyzeDependency(Dependency, Engine)
|
||||
* @see #findMoreDependencies(Engine, File)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
@@ -242,6 +229,22 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
@Override
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
extractAndAnalyze(dependency, engine, 0);
|
||||
engine.sortDependencies();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the contents of the archive dependency and scans for additional
|
||||
* dependencies.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param engine the engine doing the analysis
|
||||
* @param scanDepth the current scan depth; extracctAndAnalyze is recursive
|
||||
* and will, be default, only go 3 levels deep
|
||||
* @throws AnalysisException thrown if there is a problem analyzing the
|
||||
* dependencies
|
||||
*/
|
||||
private void extractAndAnalyze(Dependency dependency, Engine engine, int scanDepth) throws AnalysisException {
|
||||
final File f = new File(dependency.getActualFilePath());
|
||||
final File tmpDir = getNextTempDirectory();
|
||||
extractFiles(f, tmpDir, engine);
|
||||
@@ -261,14 +264,12 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
d.getFileName());
|
||||
d.setFilePath(displayPath);
|
||||
d.setFileName(displayName);
|
||||
d.setProjectReferences(dependency.getProjectReferences());
|
||||
d.addAllProjectReferences(dependency.getProjectReferences());
|
||||
|
||||
//TODO - can we get more evidence from the parent? EAR contains module name, etc.
|
||||
//analyze the dependency (i.e. extract files) if it is a supported type.
|
||||
if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) {
|
||||
scanDepth += 1;
|
||||
analyze(d, engine);
|
||||
scanDepth -= 1;
|
||||
if (this.accept(d.getActualFile()) && scanDepth < maxScanDepth) {
|
||||
extractAndAnalyze(d, engine, scanDepth + 1);
|
||||
}
|
||||
} else {
|
||||
for (Dependency sub : dependencySet) {
|
||||
@@ -288,9 +289,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
if (REMOVE_FROM_ANALYSIS.accept(dependency.getActualFile())) {
|
||||
addDisguisedJarsToDependencies(dependency, engine);
|
||||
engine.getDependencies().remove(dependency);
|
||||
engine.removeDependency(dependency);
|
||||
}
|
||||
Collections.sort(engine.getDependencies());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -357,8 +357,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException thrown if unable to create temporary directory
|
||||
*/
|
||||
private File getNextTempDirectory() throws AnalysisException {
|
||||
dirCount += 1;
|
||||
final File directory = new File(tempFileLocation, String.valueOf(dirCount));
|
||||
final File directory = new File(tempFileLocation, String.valueOf(DIRECTORY_COUNT.incrementAndGet()));
|
||||
//getting an exception for some directories not being able to be created; might be because the directory already exists?
|
||||
if (directory.exists()) {
|
||||
return getNextTempDirectory();
|
||||
@@ -390,8 +389,9 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
try {
|
||||
fis = new FileInputStream(archive);
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new AnalysisException("Archive file was not found.", ex);
|
||||
final String msg = String.format("Error extracting file `%s`: %s", archive.getAbsolutePath(), ex.getMessage());
|
||||
LOGGER.debug(msg, ex);
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
BufferedInputStream in = null;
|
||||
ZipArchiveInputStream zin = null;
|
||||
@@ -473,6 +473,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
boolean stillLooking = true;
|
||||
int chr;
|
||||
int nxtChr;
|
||||
//CSOFF: InnerAssignment
|
||||
//CSOFF: NestedIfDepth
|
||||
while (stillLooking && (chr = in.read()) != -1) {
|
||||
if (chr == '\n' || chr == '\r') {
|
||||
in.mark(4);
|
||||
@@ -490,6 +492,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
}
|
||||
//CSON: InnerAssignment
|
||||
//CSON: NestedIfDepth
|
||||
} else {
|
||||
in.reset();
|
||||
}
|
||||
@@ -603,4 +607,19 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
return isJar;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes settings used by the scanning functions of the archive
|
||||
* analyzer.
|
||||
*/
|
||||
private void initializeSettings() {
|
||||
maxScanDepth = getSettings().getInt("archive.scan.depth", 3);
|
||||
final String additionalZipExt = getSettings().getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
|
||||
if (additionalZipExt != null) {
|
||||
final String[] ext = additionalZipExt.split("\\s*,\\s*");
|
||||
Collections.addAll(KNOWN_ZIP_EXT, ext);
|
||||
}
|
||||
EXTENSIONS.addAll(KNOWN_ZIP_EXT);
|
||||
fileFilter = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
|
||||
}
|
||||
}
|
||||
@@ -28,8 +28,8 @@ import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -42,9 +42,11 @@ import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.apache.commons.lang3.SystemUtils;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
|
||||
/**
|
||||
@@ -54,6 +56,7 @@ import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
* @author colezlaw
|
||||
*
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -72,6 +75,10 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* The temp value for GrokAssembly.exe
|
||||
*/
|
||||
private File grokAssemblyExe = null;
|
||||
/**
|
||||
* The temp value for GrokAssembly.exe.config
|
||||
*/
|
||||
private File grokAssemblyConfig = null;
|
||||
/**
|
||||
* Logger
|
||||
*/
|
||||
@@ -86,8 +93,8 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
// Use file.separator as a wild guess as to whether this is Windows
|
||||
final List<String> args = new ArrayList<>();
|
||||
if (!SystemUtils.IS_OS_WINDOWS) {
|
||||
if (Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) {
|
||||
args.add(Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH));
|
||||
if (getSettings().getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) {
|
||||
args.add(getSettings().getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH));
|
||||
} else if (isInPath("mono")) {
|
||||
args.add("mono");
|
||||
} else {
|
||||
@@ -106,13 +113,16 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException if anything goes sideways
|
||||
*/
|
||||
@Override
|
||||
public void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
final File test = new File(dependency.getActualFilePath());
|
||||
if (!test.isFile()) {
|
||||
throw new AnalysisException(String.format("%s does not exist and cannot be analyzed by dependency-check",
|
||||
dependency.getActualFilePath()));
|
||||
}
|
||||
if (grokAssemblyExe == null) {
|
||||
LOGGER.warn("GrokAssembly didn't get deployed");
|
||||
return;
|
||||
}
|
||||
|
||||
final List<String> args = buildArgumentList();
|
||||
if (args == null) {
|
||||
LOGGER.warn("Assembly Analyzer was unable to execute");
|
||||
@@ -137,6 +147,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
try {
|
||||
rc = proc.waitFor();
|
||||
} catch (InterruptedException ie) {
|
||||
Thread.currentThread().interrupt();
|
||||
return;
|
||||
}
|
||||
if (rc == 3) {
|
||||
@@ -159,20 +170,17 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
final String version = xpath.evaluate("/assembly/version", doc);
|
||||
if (version != null) {
|
||||
dependency.getVersionEvidence().addEvidence(new Evidence("grokassembly", "version",
|
||||
version, Confidence.HIGHEST));
|
||||
dependency.addEvidence(EvidenceType.VERSION, "grokassembly", "version", version, Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
final String vendor = xpath.evaluate("/assembly/company", doc);
|
||||
if (vendor != null) {
|
||||
dependency.getVendorEvidence().addEvidence(new Evidence("grokassembly", "vendor",
|
||||
vendor, Confidence.HIGH));
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "grokassembly", "vendor", vendor, Confidence.HIGH);
|
||||
}
|
||||
|
||||
final String product = xpath.evaluate("/assembly/product", doc);
|
||||
if (product != null) {
|
||||
dependency.getProductEvidence().addEvidence(new Evidence("grokassembly", "product",
|
||||
product, Confidence.HIGH));
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "grokassembly", "product", product, Confidence.HIGH);
|
||||
}
|
||||
|
||||
} catch (ParserConfigurationException pce) {
|
||||
@@ -186,36 +194,36 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
throw new AnalysisException("Couldn't parse Assembly Analyzer results (GrokAssembly)", saxe);
|
||||
}
|
||||
// This shouldn't happen
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the analyzer. In this case, extract GrokAssembly.exe to a
|
||||
* temporary location.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if anything goes wrong
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
final File tempFile;
|
||||
final String cfg;
|
||||
final File cfgFile;
|
||||
try {
|
||||
tempFile = File.createTempFile("GKA", ".exe", Settings.getTempDirectory());
|
||||
cfg = tempFile.getPath() + ".config";
|
||||
tempFile = File.createTempFile("GKA", ".exe", getSettings().getTempDirectory());
|
||||
cfgFile = new File(tempFile.getPath() + ".config");
|
||||
} catch (IOException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create temporary file for the assembly analyzer", ex);
|
||||
}
|
||||
try (FileOutputStream fos = new FileOutputStream(tempFile);
|
||||
InputStream is = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe");
|
||||
FileOutputStream fosCfg = new FileOutputStream(cfg);
|
||||
InputStream isCfg = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe.config")) {
|
||||
InputStream is = FileUtils.getResourceAsStream("GrokAssembly.exe");
|
||||
FileOutputStream fosCfg = new FileOutputStream(cfgFile);
|
||||
InputStream isCfg = FileUtils.getResourceAsStream("GrokAssembly.exe.config")) {
|
||||
IOUtils.copy(is, fos);
|
||||
grokAssemblyExe = tempFile;
|
||||
LOGGER.debug("Extracted GrokAssembly.exe to {}", grokAssemblyExe.getPath());
|
||||
IOUtils.copy(isCfg, fosCfg);
|
||||
LOGGER.debug("Extracted GrokAssembly.exe.config to {}", cfg);
|
||||
grokAssemblyConfig = cfgFile;
|
||||
LOGGER.debug("Extracted GrokAssembly.exe.config to {}", cfgFile);
|
||||
} catch (IOException ioe) {
|
||||
this.setEnabled(false);
|
||||
LOGGER.warn("Could not extract GrokAssembly.exe: {}", ioe.getMessage());
|
||||
@@ -286,6 +294,15 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.debug("Can't delete temporary GrokAssembly.exe");
|
||||
grokAssemblyExe.deleteOnExit();
|
||||
}
|
||||
try {
|
||||
if (grokAssemblyConfig != null && !grokAssemblyConfig.delete()) {
|
||||
LOGGER.debug("Unable to delete temporary GrokAssembly.exe.config; attempting delete on exit");
|
||||
grokAssemblyConfig.deleteOnExit();
|
||||
}
|
||||
} catch (SecurityException se) {
|
||||
LOGGER.debug("Can't delete temporary GrokAssembly.exe.config");
|
||||
grokAssemblyConfig.deleteOnExit();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -22,7 +22,6 @@ import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.UrlStringUtils;
|
||||
@@ -33,6 +32,7 @@ import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
@@ -168,15 +168,14 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final String contents = getFileContents(actualFile);
|
||||
if (!contents.isEmpty()) {
|
||||
if (isOutputScript) {
|
||||
extractConfigureScriptEvidence(dependency, name,
|
||||
contents);
|
||||
extractConfigureScriptEvidence(dependency, name, contents);
|
||||
} else {
|
||||
gatherEvidence(dependency, name, contents);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
engine.getDependencies().remove(dependency);
|
||||
engine.removeDependency(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,17 +194,13 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final String value = matcher.group(2);
|
||||
if (!value.isEmpty()) {
|
||||
if (variable.endsWith("NAME")) {
|
||||
dependency.getProductEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, name, variable, value, Confidence.HIGHEST);
|
||||
} else if ("VERSION".equals(variable)) {
|
||||
dependency.getVersionEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VERSION, name, variable, value, Confidence.HIGHEST);
|
||||
} else if ("BUGREPORT".equals(variable)) {
|
||||
dependency.getVendorEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, name, variable, value, Confidence.HIGH);
|
||||
} else if ("URL".equals(variable)) {
|
||||
dependency.getVendorEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, name, variable, value, Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -239,27 +234,19 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
String contents) {
|
||||
final Matcher matcher = AC_INIT_PATTERN.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
final EvidenceCollection productEvidence = dependency
|
||||
.getProductEvidence();
|
||||
productEvidence.addEvidence(name, "Package", matcher.group(1),
|
||||
Confidence.HIGHEST);
|
||||
dependency.getVersionEvidence().addEvidence(name,
|
||||
"Package Version", matcher.group(2), Confidence.HIGHEST);
|
||||
final EvidenceCollection vendorEvidence = dependency
|
||||
.getVendorEvidence();
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, name, "Package", matcher.group(1), Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VERSION, name, "Package Version", matcher.group(2), Confidence.HIGHEST);
|
||||
|
||||
if (null != matcher.group(3)) {
|
||||
vendorEvidence.addEvidence(name, "Bug report address",
|
||||
matcher.group(4), Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, name, "Bug report address", matcher.group(4), Confidence.HIGH);
|
||||
}
|
||||
if (null != matcher.group(5)) {
|
||||
productEvidence.addEvidence(name, "Tarname", matcher.group(6),
|
||||
Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, name, "Tarname", matcher.group(6), Confidence.HIGH);
|
||||
}
|
||||
if (null != matcher.group(7)) {
|
||||
final String url = matcher.group(8);
|
||||
if (UrlStringUtils.isUrl(url)) {
|
||||
vendorEvidence.addEvidence(name, "URL", url,
|
||||
Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, name, "URL", url, Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -268,11 +255,12 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the file type analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if there is an exception during
|
||||
* initialization
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
// No initialization needed.
|
||||
}
|
||||
}
|
||||
@@ -32,12 +32,10 @@ import org.slf4j.LoggerFactory;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
@@ -57,6 +55,12 @@ import org.owasp.dependencycheck.exception.InitializationException;
|
||||
@Experimental
|
||||
public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* A descriptor for the type of dependencies processed or added by this
|
||||
* analyzer.
|
||||
*/
|
||||
public static final String DEPENDENCY_ECOSYSTEM = "CMAKE";
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
@@ -65,8 +69,7 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Used when compiling file scanning regex patterns.
|
||||
*/
|
||||
private static final int REGEX_OPTIONS = Pattern.DOTALL
|
||||
| Pattern.CASE_INSENSITIVE | Pattern.MULTILINE;
|
||||
private static final int REGEX_OPTIONS = Pattern.DOTALL | Pattern.CASE_INSENSITIVE | Pattern.MULTILINE;
|
||||
|
||||
/**
|
||||
* Regex to extract the product information.
|
||||
@@ -81,10 +84,8 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*
|
||||
* Group 2: Version
|
||||
*/
|
||||
private static final Pattern SET_VERSION = Pattern
|
||||
.compile(
|
||||
"^ *set\\s*\\(\\s*(\\w+)_version\\s+\"?(\\d+(?:\\.\\d+)+)[\\s\"]?\\)",
|
||||
REGEX_OPTIONS);
|
||||
private static final Pattern SET_VERSION = Pattern.compile(
|
||||
"^ *set\\s*\\(\\s*(\\w+)_version\\s+\"?(\\d+(?:\\.\\d+)+)[\\s\"]?\\)", REGEX_OPTIONS);
|
||||
|
||||
/**
|
||||
* Detects files that can be analyzed.
|
||||
@@ -125,17 +126,13 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if an exception occurs getting an
|
||||
* instance of SHA1
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
getSha1MessageDigest();
|
||||
} catch (IllegalStateException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
|
||||
}
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
//do nothing
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -147,12 +144,10 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* analyzing the dependency
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
dependency.setEcosystem(DEPENDENCY_ECOSYSTEM);
|
||||
final File file = dependency.getActualFile();
|
||||
final String parentName = file.getParentFile().getName();
|
||||
final String name = file.getName();
|
||||
dependency.setDisplayFileName(String.format("%s%c%s", parentName, File.separatorChar, name));
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(file, Charset.defaultCharset()).trim();
|
||||
@@ -160,7 +155,6 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
|
||||
if (StringUtils.isNotBlank(contents)) {
|
||||
final Matcher m = PROJECT.matcher(contents);
|
||||
int count = 0;
|
||||
@@ -170,9 +164,10 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
"Found project command match with %d groups: %s",
|
||||
m.groupCount(), m.group(0)));
|
||||
final String group = m.group(1);
|
||||
LOGGER.debug("Group 1: " + group);
|
||||
dependency.getProductEvidence().addEvidence(name, "Project",
|
||||
group, Confidence.HIGH);
|
||||
LOGGER.debug("Group 1: {}", group);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, name, "Project", group, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, name, "Project", group, Confidence.HIGH);
|
||||
dependency.setName(group);
|
||||
}
|
||||
LOGGER.debug("Found {} matches.", count);
|
||||
analyzeSetVersionCommand(dependency, engine, contents);
|
||||
@@ -188,9 +183,6 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @param engine the dependency-check engine
|
||||
* @param contents the version information
|
||||
*/
|
||||
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings(
|
||||
value = "DM_DEFAULT_ENCODING",
|
||||
justification = "Default encoding is only used if UTF-8 is not available")
|
||||
private void analyzeSetVersionCommand(Dependency dependency, Engine engine, String contents) {
|
||||
Dependency currentDep = dependency;
|
||||
|
||||
@@ -202,8 +194,8 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
m.groupCount(), m.group(0));
|
||||
String product = m.group(1);
|
||||
final String version = m.group(2);
|
||||
LOGGER.debug("Group 1: " + product);
|
||||
LOGGER.debug("Group 2: " + version);
|
||||
LOGGER.debug("Group 1: {}", product);
|
||||
LOGGER.debug("Group 2: {}", version);
|
||||
final String aliasPrefix = "ALIASOF_";
|
||||
if (product.startsWith(aliasPrefix)) {
|
||||
product = product.replaceFirst(aliasPrefix, "");
|
||||
@@ -211,45 +203,26 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (count > 1) {
|
||||
//TODO - refactor so we do not assign to the parameter (checkstyle)
|
||||
currentDep = new Dependency(dependency.getActualFile());
|
||||
currentDep.setDisplayFileName(String.format("%s:%s", dependency.getDisplayFileName(), product));
|
||||
currentDep.setEcosystem(DEPENDENCY_ECOSYSTEM);
|
||||
final String filePath = String.format("%s:%s", dependency.getFilePath(), product);
|
||||
currentDep.setFilePath(filePath);
|
||||
|
||||
byte[] path;
|
||||
try {
|
||||
path = filePath.getBytes("UTF-8");
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
path = filePath.getBytes();
|
||||
}
|
||||
final MessageDigest sha1 = getSha1MessageDigest();
|
||||
currentDep.setSha1sum(Checksum.getHex(sha1.digest(path)));
|
||||
engine.getDependencies().add(currentDep);
|
||||
currentDep.setSha1sum(Checksum.getSHA1Checksum(filePath));
|
||||
currentDep.setMd5sum(Checksum.getMD5Checksum(filePath));
|
||||
engine.addDependency(currentDep);
|
||||
}
|
||||
final String source = currentDep.getDisplayFileName();
|
||||
currentDep.getProductEvidence().addEvidence(source, "Product",
|
||||
product, Confidence.MEDIUM);
|
||||
currentDep.getVersionEvidence().addEvidence(source, "Version",
|
||||
version, Confidence.MEDIUM);
|
||||
final String source = currentDep.getFileName();
|
||||
currentDep.addEvidence(EvidenceType.PRODUCT, source, "Product", product, Confidence.MEDIUM);
|
||||
currentDep.addEvidence(EvidenceType.VENDOR, source, "Vendor", product, Confidence.MEDIUM);
|
||||
currentDep.addEvidence(EvidenceType.VERSION, source, "Version", version, Confidence.MEDIUM);
|
||||
currentDep.setName(product);
|
||||
currentDep.setVersion(version);
|
||||
}
|
||||
LOGGER.debug(String.format("Found %d matches.", count));
|
||||
LOGGER.debug("Found {} matches.", count);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_CMAKE_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sha1 message digest.
|
||||
*
|
||||
* @return the sha1 message digest
|
||||
*/
|
||||
private MessageDigest getSha1MessageDigest() {
|
||||
try {
|
||||
return MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
LOGGER.error(e.getMessage());
|
||||
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,12 +21,15 @@ import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.apache.commons.lang3.builder.CompareToBuilder;
|
||||
import org.apache.lucene.analysis.util.CharArraySet;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.queryparser.classic.ParseException;
|
||||
@@ -39,12 +42,13 @@ import org.owasp.dependencycheck.data.cpe.Fields;
|
||||
import org.owasp.dependencycheck.data.cpe.IndexEntry;
|
||||
import org.owasp.dependencycheck.data.cpe.IndexException;
|
||||
import org.owasp.dependencycheck.data.lucene.LuceneUtils;
|
||||
import org.owasp.dependencycheck.data.lucene.SearchFieldAnalyzer;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
@@ -61,6 +65,7 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -77,9 +82,11 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
private static final String WEIGHTING_BOOST = "^5";
|
||||
/**
|
||||
* A string representation of a regular expression defining characters
|
||||
* utilized within the CPE Names.
|
||||
* utilized within the CPE Names. Note, the :/ are included so URLs are
|
||||
* passed into the Lucene query so that the specialized tokenizer can parse
|
||||
* them.
|
||||
*/
|
||||
private static final String CLEANSE_CHARACTER_RX = "[^A-Za-z0-9 ._-]";
|
||||
private static final String CLEANSE_CHARACTER_RX = "[^A-Za-z0-9 ._:/-]";
|
||||
/**
|
||||
* A string representation of a regular expression used to remove all but
|
||||
* alpha characters.
|
||||
@@ -90,6 +97,10 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
* data that will be written into the string.
|
||||
*/
|
||||
private static final int STRING_BUILDER_BUFFER = 20;
|
||||
/**
|
||||
* The URL to perform a search of the NVD CVE data at NIST.
|
||||
*/
|
||||
public static final String NVD_SEARCH_URL = "https://web.nvd.nist.gov/view/vuln/search-results?adv_search=true&cves=on&cpe_version=%s";
|
||||
/**
|
||||
* The CPE in memory index.
|
||||
*/
|
||||
@@ -98,11 +109,12 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
* The CVE Database.
|
||||
*/
|
||||
private CveDB cve;
|
||||
|
||||
/**
|
||||
* The URL to perform a search of the NVD CVE data at NIST.
|
||||
* The list of ecosystems to skip during analysis. These are skipped because
|
||||
* there is generally a more accurate vulnerability analyzer in the
|
||||
* pipeline.
|
||||
*/
|
||||
public static final String NVD_SEARCH_URL = "https://web.nvd.nist.gov/view/vuln/search-results?adv_search=true&cves=on&cpe_version=%s";
|
||||
private List<String> skipEcosystems;
|
||||
|
||||
/**
|
||||
* Returns the name of this analyzer.
|
||||
@@ -124,26 +136,18 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
return AnalysisPhase.IDENTIFIER_ANALYSIS;
|
||||
}
|
||||
|
||||
/**
|
||||
* The default is to support parallel processing.
|
||||
*
|
||||
* @return false
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the CPE Lucene Index.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException is thrown if there is an issue opening
|
||||
* the index.
|
||||
*/
|
||||
@Override
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
public void prepareAnalyzer(Engine engine) throws InitializationException {
|
||||
super.prepareAnalyzer(engine);
|
||||
try {
|
||||
this.open();
|
||||
this.open(engine.getDatabase());
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Exception initializing the Lucene Index", ex);
|
||||
throw new InitializationException("An exception occurred initializing the Lucene Index", ex);
|
||||
@@ -151,29 +155,35 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
LOGGER.debug("Exception accessing the database", ex);
|
||||
throw new InitializationException("An exception occurred accessing the database", ex);
|
||||
}
|
||||
final String[] tmp = engine.getSettings().getArray(Settings.KEYS.ECOSYSTEM_SKIP_CPEANALYZER);
|
||||
if (tmp == null) {
|
||||
skipEcosystems = new ArrayList<>();
|
||||
} else {
|
||||
LOGGER.info("Skipping CPE Analysis for {}", tmp);
|
||||
skipEcosystems = Arrays.asList(tmp);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the data source.
|
||||
*
|
||||
* @param cve a reference to the NVD CVE database
|
||||
* @throws IOException when the Lucene directory to be queried does not
|
||||
* exist or is corrupt.
|
||||
* @throws DatabaseException when the database throws an exception. This
|
||||
* usually occurs when the database is in use by another process.
|
||||
*/
|
||||
public void open() throws IOException, DatabaseException {
|
||||
if (!isOpen()) {
|
||||
cve = CveDB.getInstance();
|
||||
cpe = CpeMemoryIndex.getInstance();
|
||||
try {
|
||||
final long creationStart = System.currentTimeMillis();
|
||||
cpe.open(cve);
|
||||
final long creationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - creationStart);
|
||||
LOGGER.info("Created CPE Index ({} seconds)", creationSeconds);
|
||||
} catch (IndexException ex) {
|
||||
LOGGER.debug("IndexException", ex);
|
||||
throw new DatabaseException(ex);
|
||||
}
|
||||
public void open(CveDB cve) throws IOException, DatabaseException {
|
||||
this.cve = cve;
|
||||
this.cpe = CpeMemoryIndex.getInstance();
|
||||
try {
|
||||
final long creationStart = System.currentTimeMillis();
|
||||
cpe.open(cve);
|
||||
final long creationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - creationStart);
|
||||
LOGGER.info("Created CPE Index ({} seconds)", creationSeconds);
|
||||
} catch (IndexException ex) {
|
||||
LOGGER.debug("IndexException", ex);
|
||||
throw new DatabaseException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -182,25 +192,12 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
@Override
|
||||
public void closeAnalyzer() {
|
||||
if (cve != null) {
|
||||
cve.close();
|
||||
cve = null;
|
||||
}
|
||||
if (cpe != null) {
|
||||
cpe.close();
|
||||
cpe = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether or not the analyzer is open.
|
||||
*
|
||||
* @return <code>true</code> if the analyzer is open
|
||||
*/
|
||||
public boolean isOpen() {
|
||||
return cpe != null && cpe.isOpen();
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the data store of CPE entries, trying to identify the CPE for
|
||||
* the given dependency based on the evidence contained within. The
|
||||
@@ -212,21 +209,20 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
* @throws ParseException is thrown when the Lucene query cannot be parsed.
|
||||
*/
|
||||
protected void determineCPE(Dependency dependency) throws CorruptIndexException, IOException, ParseException {
|
||||
//TODO test dojo-war against this. we should get dojo-toolkit:dojo-toolkit AND dojo-toolkit:toolkit
|
||||
String vendors = "";
|
||||
String products = "";
|
||||
for (Confidence confidence : Confidence.values()) {
|
||||
if (dependency.getVendorEvidence().contains(confidence)) {
|
||||
vendors = addEvidenceWithoutDuplicateTerms(vendors, dependency.getVendorEvidence(), confidence);
|
||||
if (dependency.contains(EvidenceType.VENDOR, confidence)) {
|
||||
vendors = addEvidenceWithoutDuplicateTerms(vendors, dependency.getIterator(EvidenceType.VENDOR, confidence));
|
||||
LOGGER.debug("vendor search: {}", vendors);
|
||||
}
|
||||
if (dependency.getProductEvidence().contains(confidence)) {
|
||||
products = addEvidenceWithoutDuplicateTerms(products, dependency.getProductEvidence(), confidence);
|
||||
if (dependency.contains(EvidenceType.PRODUCT, confidence)) {
|
||||
products = addEvidenceWithoutDuplicateTerms(products, dependency.getIterator(EvidenceType.PRODUCT, confidence));
|
||||
LOGGER.debug("product search: {}", products);
|
||||
}
|
||||
if (!vendors.isEmpty() && !products.isEmpty()) {
|
||||
final List<IndexEntry> entries = searchCPE(vendors, products, dependency.getVendorEvidence().getWeighting(),
|
||||
dependency.getProductEvidence().getWeighting());
|
||||
final List<IndexEntry> entries = searchCPE(vendors, products, dependency.getVendorWeightings(),
|
||||
dependency.getProductWeightings());
|
||||
if (entries == null) {
|
||||
continue;
|
||||
}
|
||||
@@ -248,30 +244,31 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the text created by concatenating the text and the values from
|
||||
* the EvidenceCollection (filtered for a specific confidence). This
|
||||
* attempts to prevent duplicate terms from being added.<br/<br/> Note, if
|
||||
* the evidence is longer then 200 characters it will be truncated.
|
||||
* attempts to prevent duplicate terms from being added.</p>
|
||||
* <p>
|
||||
* Note, if the evidence is longer then 200 characters it will be
|
||||
* truncated.</p>
|
||||
*
|
||||
* @param text the base text.
|
||||
* @param ec an EvidenceCollection
|
||||
* @param confidenceFilter a Confidence level to filter the evidence by.
|
||||
* @param text the base text
|
||||
* @param evidence an iterable set of evidence to concatenate
|
||||
* @return the new evidence text
|
||||
*/
|
||||
private String addEvidenceWithoutDuplicateTerms(final String text, final EvidenceCollection ec, Confidence confidenceFilter) {
|
||||
@SuppressWarnings("null")
|
||||
protected String addEvidenceWithoutDuplicateTerms(final String text, final Iterable<Evidence> evidence) {
|
||||
final String txt = (text == null) ? "" : text;
|
||||
final StringBuilder sb = new StringBuilder(txt.length() + (20 * ec.size()));
|
||||
final StringBuilder sb = new StringBuilder(txt.length() * 2);
|
||||
sb.append(' ').append(txt).append(' ');
|
||||
for (Evidence e : ec.iterator(confidenceFilter)) {
|
||||
for (Evidence e : evidence) {
|
||||
String value = e.getValue();
|
||||
|
||||
//hack to get around the fact that lucene does a really good job of recognizing domains and not
|
||||
// splitting them. TODO - put together a better lucene analyzer specific to the domain.
|
||||
if (value.startsWith("http://")) {
|
||||
value = value.substring(7).replaceAll("\\.", " ");
|
||||
}
|
||||
if (value.startsWith("https://")) {
|
||||
value = value.substring(8).replaceAll("\\.", " ");
|
||||
if (value.length() > 1000) {
|
||||
value = value.substring(0, 1000);
|
||||
final int pos = value.lastIndexOf(" ");
|
||||
if (pos > 0) {
|
||||
value = value.substring(0, pos);
|
||||
}
|
||||
}
|
||||
if (sb.indexOf(" " + value + " ") < 0) {
|
||||
sb.append(value).append(' ');
|
||||
@@ -381,7 +378,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
* @return if the append was successful.
|
||||
*/
|
||||
private boolean appendWeightedSearch(StringBuilder sb, String field, String searchText, Set<String> weightedText) {
|
||||
sb.append(' ').append(field).append(":( ");
|
||||
sb.append(field).append(":(");
|
||||
|
||||
final String cleanText = cleanseText(searchText);
|
||||
|
||||
@@ -392,6 +389,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
if (weightedText == null || weightedText.isEmpty()) {
|
||||
LuceneUtils.appendEscapedLuceneQuery(sb, cleanText);
|
||||
} else {
|
||||
boolean addSpace = false;
|
||||
final StringTokenizer tokens = new StringTokenizer(cleanText);
|
||||
while (tokens.hasMoreElements()) {
|
||||
final String word = tokens.nextToken();
|
||||
@@ -403,14 +401,20 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
LuceneUtils.appendEscapedLuceneQuery(temp, word);
|
||||
temp.append(WEIGHTING_BOOST);
|
||||
if (!word.equalsIgnoreCase(weightedStr)) {
|
||||
temp.append(' ');
|
||||
if (temp.length() > 0) {
|
||||
temp.append(' ');
|
||||
}
|
||||
LuceneUtils.appendEscapedLuceneQuery(temp, weightedStr);
|
||||
temp.append(WEIGHTING_BOOST);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
sb.append(' ');
|
||||
if (addSpace) {
|
||||
sb.append(' ');
|
||||
} else {
|
||||
addSpace = true;
|
||||
}
|
||||
if (temp == null) {
|
||||
LuceneUtils.appendEscapedLuceneQuery(sb, word);
|
||||
} else {
|
||||
@@ -418,7 +422,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
}
|
||||
}
|
||||
sb.append(" ) ");
|
||||
sb.append(")");
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -465,8 +469,8 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//TODO - does this nullify some of the fuzzy matching that happens in the lucene search?
|
||||
// for instance CPE some-component and in the evidence we have SomeComponent.
|
||||
if (collectionContainsString(dependency.getProductEvidence(), entry.getProduct())
|
||||
&& collectionContainsString(dependency.getVendorEvidence(), entry.getVendor())) {
|
||||
if (collectionContainsString(dependency.getEvidence(EvidenceType.PRODUCT), entry.getProduct())
|
||||
&& collectionContainsString(dependency.getEvidence(EvidenceType.VENDOR), entry.getVendor())) {
|
||||
//&& collectionContainsVersion(dependency.getVersionEvidence(), entry.getVersion())
|
||||
isValid = true;
|
||||
}
|
||||
@@ -476,11 +480,11 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
/**
|
||||
* Used to determine if the EvidenceCollection contains a specific string.
|
||||
*
|
||||
* @param ec an EvidenceCollection
|
||||
* @param evidence an of evidence object to check
|
||||
* @param text the text to search for
|
||||
* @return whether or not the EvidenceCollection contains the string
|
||||
*/
|
||||
private boolean collectionContainsString(EvidenceCollection ec, String text) {
|
||||
private boolean collectionContainsString(Set<Evidence> evidence, String text) {
|
||||
//TODO - likely need to change the split... not sure if this will work for CPE with special chars
|
||||
if (text == null) {
|
||||
return false;
|
||||
@@ -488,7 +492,11 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
final String[] words = text.split("[\\s_-]");
|
||||
final List<String> list = new ArrayList<>();
|
||||
String tempWord = null;
|
||||
final CharArraySet stopWords = SearchFieldAnalyzer.getStopWords();
|
||||
for (String word : words) {
|
||||
if (stopWords.contains(word)) {
|
||||
continue;
|
||||
}
|
||||
/*
|
||||
single letter words should be concatenated with the next word.
|
||||
so { "m", "core", "sample" } -> { "mcore", "sample" }
|
||||
@@ -513,11 +521,24 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
if (list.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
boolean contains = true;
|
||||
boolean isValid = true;
|
||||
for (String word : list) {
|
||||
contains &= ec.containsUsedString(word);
|
||||
boolean found = false;
|
||||
for (Evidence e : evidence) {
|
||||
if (e.getValue().toLowerCase().contains(word.toLowerCase())) {
|
||||
if ("http".equals(word) && e.getValue().contains("http:")) {
|
||||
continue;
|
||||
}
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
isValid &= found;
|
||||
if (!isValid) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return contains;
|
||||
return isValid;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -530,7 +551,10 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
* dependency.
|
||||
*/
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (skipEcosystems.contains(dependency.getEcosystem())) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
determineCPE(dependency);
|
||||
} catch (CorruptIndexException ex) {
|
||||
@@ -561,6 +585,9 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
protected boolean determineIdentifiers(Dependency dependency, String vendor, String product,
|
||||
Confidence currentConfidence) throws UnsupportedEncodingException {
|
||||
final Set<VulnerableSoftware> cpes = cve.getCPEs(vendor, product);
|
||||
if (cpes.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
DependencyVersion bestGuess = new DependencyVersion("-");
|
||||
Confidence bestGuessConf = null;
|
||||
boolean hasBroadMatch = false;
|
||||
@@ -570,7 +597,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
// if there lower confidence evidence when the current (highest) version number
|
||||
// is newer then anything in the NVD.
|
||||
for (Confidence conf : Confidence.values()) {
|
||||
for (Evidence evidence : dependency.getVersionEvidence().iterator(conf)) {
|
||||
for (Evidence evidence : dependency.getIterator(EvidenceType.VERSION, conf)) {
|
||||
final DependencyVersion evVer = DependencyVersionUtil.parseVersion(evidence.getValue());
|
||||
if (evVer == null) {
|
||||
continue;
|
||||
@@ -36,6 +36,8 @@ import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.List;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
import org.owasp.dependencycheck.utils.Downloader;
|
||||
@@ -49,6 +51,7 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -72,28 +75,26 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
private static final String SUPPORTED_EXTENSIONS = "jar";
|
||||
|
||||
/**
|
||||
* The analyzer should be disabled if there are errors, so this is a flag to
|
||||
* determine if such an error has occurred.
|
||||
* There may be temporary issues when connecting to MavenCentral. In order
|
||||
* to compensate for 99% of the issues, we perform a retry before finally
|
||||
* failing the analysis.
|
||||
*/
|
||||
private volatile boolean errorFlag = false;
|
||||
private static final int NUMBER_OF_TRIES = 5;
|
||||
|
||||
/**
|
||||
* The searcher itself.
|
||||
*/
|
||||
private CentralSearch searcher;
|
||||
/**
|
||||
* Field indicating if the analyzer is enabled.
|
||||
*/
|
||||
private final boolean enabled = checkEnabled();
|
||||
|
||||
/**
|
||||
* Determine whether to enable this analyzer or not.
|
||||
* Initializes the analyzer with the configured settings.
|
||||
*
|
||||
* @return whether the analyzer should be enabled
|
||||
* @param settings the configured settings to use
|
||||
*/
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
public void initialize(Settings settings) {
|
||||
super.initialize(settings);
|
||||
setEnabled(checkEnabled());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -106,9 +107,9 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
boolean retVal = false;
|
||||
|
||||
try {
|
||||
if (Settings.getBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED)) {
|
||||
if (!Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)
|
||||
|| NexusAnalyzer.DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL))) {
|
||||
if (getSettings().getBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED)) {
|
||||
if (!getSettings().getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)
|
||||
|| NexusAnalyzer.DEFAULT_URL.equals(getSettings().getString(Settings.KEYS.ANALYZER_NEXUS_URL))) {
|
||||
LOGGER.debug("Enabling the Central analyzer");
|
||||
retVal = true;
|
||||
} else {
|
||||
@@ -126,20 +127,19 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the analyzer once before any analysis is performed.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException if there's an error during initialization
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
LOGGER.debug("Initializing Central analyzer");
|
||||
LOGGER.debug("Central analyzer enabled: {}", isEnabled());
|
||||
if (isEnabled()) {
|
||||
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_CENTRAL_URL);
|
||||
LOGGER.debug("Central Analyzer URL: {}", searchUrl);
|
||||
try {
|
||||
searcher = new CentralSearch(new URL(searchUrl));
|
||||
searcher = new CentralSearch(getSettings());
|
||||
} catch (MalformedURLException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("The configured URL to Maven Central is malformed: " + searchUrl, ex);
|
||||
throw new InitializationException("The configured URL to Maven Central is malformed", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -194,18 +194,14 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
@Override
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (errorFlag || !isEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
final List<MavenArtifact> mas = searcher.searchSha1(dependency.getSha1sum());
|
||||
final List<MavenArtifact> mas = fetchMavenArtifacts(dependency);
|
||||
final Confidence confidence = mas.size() > 1 ? Confidence.HIGH : Confidence.HIGHEST;
|
||||
for (MavenArtifact ma : mas) {
|
||||
LOGGER.debug("Central analyzer found artifact ({}) for dependency ({})", ma, dependency.getFileName());
|
||||
dependency.addAsEvidence("central", ma, confidence);
|
||||
boolean pomAnalyzed = false;
|
||||
for (Evidence e : dependency.getVendorEvidence()) {
|
||||
for (Evidence e : dependency.getEvidence(EvidenceType.VENDOR)) {
|
||||
if ("pom".equals(e.getSource())) {
|
||||
pomAnalyzed = true;
|
||||
break;
|
||||
@@ -214,7 +210,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (!pomAnalyzed && ma.getPomUrl() != null) {
|
||||
File pomFile = null;
|
||||
try {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
final File baseDir = getSettings().getTempDirectory();
|
||||
pomFile = File.createTempFile("pom", ".xml", baseDir);
|
||||
if (!pomFile.delete()) {
|
||||
LOGGER.warn("Unable to fetch pom.xml for {} from Central; "
|
||||
@@ -222,7 +218,8 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.debug("Unable to delete temp file");
|
||||
}
|
||||
LOGGER.debug("Downloading {}", ma.getPomUrl());
|
||||
Downloader.fetchFile(new URL(ma.getPomUrl()), pomFile);
|
||||
final Downloader downloader = new Downloader(getSettings());
|
||||
downloader.fetchFile(new URL(ma.getPomUrl()), pomFile);
|
||||
PomUtils.analyzePOM(dependency, pomFile);
|
||||
|
||||
} catch (DownloadFailedException ex) {
|
||||
@@ -242,8 +239,61 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
} catch (FileNotFoundException fnfe) {
|
||||
LOGGER.debug("Artifact not found in repository: '{}", dependency.getFileName());
|
||||
} catch (IOException ioe) {
|
||||
LOGGER.debug("Could not connect to Central search", ioe);
|
||||
errorFlag = true;
|
||||
final String message = "Could not connect to Central search. Analysis failed.";
|
||||
LOGGER.error(message, ioe);
|
||||
throw new AnalysisException(message, ioe);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads the corresponding list of MavenArtifacts of the given
|
||||
* dependency from MavenCentral.
|
||||
* <p>
|
||||
* As the connection to MavenCentral is known to be unreliable, we implement
|
||||
* a simple retry logic in order to compensate for 99% of the issues.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @return the downloaded list of MavenArtifacts
|
||||
* @throws FileNotFoundException if the specified artifact is not found
|
||||
* @throws IOException if connecting to MavenCentral finally failed
|
||||
*/
|
||||
protected List<MavenArtifact> fetchMavenArtifacts(Dependency dependency) throws IOException {
|
||||
IOException lastException = null;
|
||||
long sleepingTimeBetweenRetriesInMillis = 1000;
|
||||
int triesLeft = NUMBER_OF_TRIES;
|
||||
while (triesLeft-- > 0) {
|
||||
try {
|
||||
return searcher.searchSha1(dependency.getSha1sum());
|
||||
} catch (FileNotFoundException fnfe) {
|
||||
// retry does not make sense, just throw the exception
|
||||
throw fnfe;
|
||||
} catch (IOException ioe) {
|
||||
LOGGER.debug("Could not connect to Central search (tries left: {}): {}",
|
||||
triesLeft, ioe.getMessage());
|
||||
lastException = ioe;
|
||||
|
||||
if (triesLeft > 0) {
|
||||
try {
|
||||
Thread.sleep(sleepingTimeBetweenRetriesInMillis);
|
||||
sleepingTimeBetweenRetriesInMillis *= 2;
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final String message = "Finally failed connecting to Central search."
|
||||
+ " Giving up after " + NUMBER_OF_TRIES + " tries.";
|
||||
throw new IOException(message, lastException);
|
||||
}
|
||||
|
||||
/**
|
||||
* Method used by unit tests to setup the analyzer.
|
||||
*
|
||||
* @param searcher the Central Search object to use.
|
||||
*/
|
||||
protected void setCentralSearch(CentralSearch searcher) {
|
||||
this.searcher = searcher;
|
||||
}
|
||||
}
|
||||
@@ -23,13 +23,14 @@ import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
@@ -41,8 +42,15 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
@ThreadSafe
|
||||
public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* A descriptor for the type of dependencies processed or added by this
|
||||
* analyzer.
|
||||
*/
|
||||
public static final String DEPENDENCY_ECOSYSTEM = "CocoaPod";
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
@@ -83,7 +91,7 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() {
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
@@ -122,6 +130,7 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
|
||||
dependency.setEcosystem(DEPENDENCY_ECOSYSTEM);
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
|
||||
@@ -134,21 +143,35 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
contents = contents.substring(matcher.end());
|
||||
final String blockVariable = matcher.group(1);
|
||||
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
final EvidenceCollection version = dependency.getVersionEvidence();
|
||||
|
||||
final String name = addStringEvidence(product, contents, blockVariable, "name", "name", Confidence.HIGHEST);
|
||||
final String name = determineEvidence(contents, blockVariable, "name");
|
||||
if (!name.isEmpty()) {
|
||||
vendor.addEvidence(PODSPEC, "name_project", name, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, PODSPEC, "name_project", name, Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, PODSPEC, "name_project", name, Confidence.HIGHEST);
|
||||
dependency.setName(name);
|
||||
}
|
||||
final String summary = determineEvidence(contents, blockVariable, "summary");
|
||||
if (!summary.isEmpty()) {
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, PODSPEC, "summary", summary, Confidence.HIGHEST);
|
||||
}
|
||||
addStringEvidence(product, contents, blockVariable, "summary", "summary", Confidence.HIGHEST);
|
||||
|
||||
addStringEvidence(vendor, contents, blockVariable, "author", "authors?", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "homepage", "homepage", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "license", "licen[cs]es?", Confidence.HIGHEST);
|
||||
final String author = determineEvidence(contents, blockVariable, "authors?");
|
||||
if (!author.isEmpty()) {
|
||||
dependency.addEvidence(EvidenceType.VENDOR, PODSPEC, "author", author, Confidence.HIGHEST);
|
||||
}
|
||||
final String homepage = determineEvidence(contents, blockVariable, "homepage");
|
||||
if (!homepage.isEmpty()) {
|
||||
dependency.addEvidence(EvidenceType.VENDOR, PODSPEC, "homepage", homepage, Confidence.HIGHEST);
|
||||
}
|
||||
final String license = determineEvidence(contents, blockVariable, "licen[cs]es?");
|
||||
if (!license.isEmpty()) {
|
||||
dependency.setLicense(license);
|
||||
}
|
||||
|
||||
addStringEvidence(version, contents, blockVariable, "version", "version", Confidence.HIGHEST);
|
||||
final String version = determineEvidence(contents, blockVariable, "version");
|
||||
if (!version.isEmpty()) {
|
||||
dependency.addEvidence(EvidenceType.VERSION, PODSPEC, "version", version, Confidence.HIGHEST);
|
||||
dependency.setVersion(version);
|
||||
}
|
||||
}
|
||||
|
||||
setPackagePath(dependency);
|
||||
@@ -158,16 +181,12 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Extracts evidence from the contents and adds it to the given evidence
|
||||
* collection.
|
||||
*
|
||||
* @param evidences the evidence collection to update
|
||||
* @param contents the text to extract evidence from
|
||||
* @param blockVariable the block variable within the content to search for
|
||||
* @param field the name of the field being searched for
|
||||
* @param fieldPattern the field pattern within the contents to search for
|
||||
* @param confidence the confidence level of the evidence if found
|
||||
* @return the string that was added as evidence
|
||||
* @return the evidence
|
||||
*/
|
||||
private String addStringEvidence(EvidenceCollection evidences, String contents,
|
||||
String blockVariable, String field, String fieldPattern, Confidence confidence) {
|
||||
private String determineEvidence(String contents, String blockVariable, String fieldPattern) {
|
||||
String value = "";
|
||||
|
||||
//capture array value between [ ]
|
||||
@@ -184,9 +203,6 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
value = matcher.group(2);
|
||||
}
|
||||
}
|
||||
if (value.length() > 0) {
|
||||
evidences.addEvidence(PODSPEC, field, value, confidence);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -34,9 +34,7 @@ import org.slf4j.LoggerFactory;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
|
||||
/**
|
||||
* Used to analyze a composer.lock file for a composer PHP app.
|
||||
@@ -46,6 +44,12 @@ import java.security.NoSuchAlgorithmException;
|
||||
@Experimental
|
||||
public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* A descriptor for the type of dependencies processed or added by this
|
||||
* analyzer.
|
||||
*/
|
||||
public static final String DEPENDENCY_ECOSYSTEM = "Composer";
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
@@ -79,17 +83,13 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @param engine a reference to the dependency-check engine
|
||||
* @throws InitializationException thrown if an exception occurs getting an
|
||||
* instance of SHA1
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
getSha1MessageDigest();
|
||||
} catch (IllegalStateException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
|
||||
}
|
||||
protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -103,20 +103,33 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
try (FileInputStream fis = new FileInputStream(dependency.getActualFile())) {
|
||||
final ComposerLockParser clp = new ComposerLockParser(fis);
|
||||
LOGGER.info("Checking composer.lock file {}", dependency.getActualFilePath());
|
||||
LOGGER.debug("Checking composer.lock file {}", dependency.getActualFilePath());
|
||||
clp.process();
|
||||
//if dependencies are found in the lock, then there is always an empty shell dependency left behind for the
|
||||
//composer.lock. The first pass through, reuse the top level dependency, and add new ones for the rest.
|
||||
boolean processedAtLeastOneDep = false;
|
||||
for (ComposerDependency dep : clp.getDependencies()) {
|
||||
final Dependency d = new Dependency(dependency.getActualFile());
|
||||
d.setDisplayFileName(String.format("%s:%s/%s", dependency.getDisplayFileName(), dep.getGroup(), dep.getProject()));
|
||||
final String filePath = String.format("%s:%s/%s", dependency.getFilePath(), dep.getGroup(), dep.getProject());
|
||||
final MessageDigest sha1 = getSha1MessageDigest();
|
||||
final String filePath = String.format("%s:%s/%s/%s", dependency.getFilePath(), dep.getGroup(), dep.getProject(), dep.getVersion());
|
||||
d.setName(dep.getProject());
|
||||
d.setVersion(dep.getVersion());
|
||||
d.setEcosystem(DEPENDENCY_ECOSYSTEM);
|
||||
d.setFilePath(filePath);
|
||||
d.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes(Charset.defaultCharset()))));
|
||||
d.getVendorEvidence().addEvidence(COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST);
|
||||
d.getProductEvidence().addEvidence(COMPOSER_LOCK, "product", dep.getProject(), Confidence.HIGHEST);
|
||||
d.getVersionEvidence().addEvidence(COMPOSER_LOCK, "version", dep.getVersion(), Confidence.HIGHEST);
|
||||
LOGGER.info("Adding dependency {}", d);
|
||||
engine.getDependencies().add(d);
|
||||
d.setSha1sum(Checksum.getSHA1Checksum(filePath));
|
||||
d.setMd5sum(Checksum.getMD5Checksum(filePath));
|
||||
d.addEvidence(EvidenceType.VENDOR, COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST);
|
||||
d.addEvidence(EvidenceType.PRODUCT, COMPOSER_LOCK, "product", dep.getProject(), Confidence.HIGHEST);
|
||||
d.addEvidence(EvidenceType.VERSION, COMPOSER_LOCK, "version", dep.getVersion(), Confidence.HIGHEST);
|
||||
LOGGER.debug("Adding dependency {}", d.getDisplayFileName());
|
||||
engine.addDependency(d);
|
||||
//make sure we only remove the main dependency if we went through this loop at least once.
|
||||
processedAtLeastOneDep = true;
|
||||
}
|
||||
// remove the dependency at the end because it's referenced in the loop itself.
|
||||
// double check the name to be sure we only remove the generic entry.
|
||||
if (processedAtLeastOneDep && dependency.getDisplayFileName().equalsIgnoreCase("composer.lock")) {
|
||||
LOGGER.debug("Removing main redundant dependency {}", dependency.getDisplayFileName());
|
||||
engine.removeDependency(dependency);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.warn("Error opening dependency {}", dependency.getActualFilePath());
|
||||
@@ -154,18 +167,4 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return AnalysisPhase.INFORMATION_COLLECTION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sha1 message digest.
|
||||
*
|
||||
* @return the sha1 message digest
|
||||
*/
|
||||
private MessageDigest getSha1MessageDigest() {
|
||||
try {
|
||||
return MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
LOGGER.error(e.getMessage());
|
||||
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,21 +17,19 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
|
||||
|
||||
/**
|
||||
* The suppression analyzer processes an externally defined XML document that complies with the suppressions.xsd schema.
|
||||
* Any identified CPE entries within the dependencies that match will be removed.
|
||||
* The suppression analyzer processes an externally defined XML document that
|
||||
* complies with the suppressions.xsd schema. Any identified CPE entries within
|
||||
* the dependencies that match will be removed.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
@@ -60,19 +58,6 @@ public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
|
||||
if (getRules() == null || getRules().size() <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (final SuppressionRule rule : getRules()) {
|
||||
rule.process(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
@@ -17,15 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import com.vdurmont.semver4j.Semver;
|
||||
import com.vdurmont.semver4j.Semver.SemverType;
|
||||
import com.vdurmont.semver4j.SemverException;
|
||||
import java.io.File;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
@@ -47,37 +46,19 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
@ThreadSafe
|
||||
public class DependencyBundlingAnalyzer extends AbstractDependencyComparingAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyBundlingAnalyzer.class);
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
|
||||
/**
|
||||
* A pattern for obtaining the first part of a filename.
|
||||
*/
|
||||
private static final Pattern STARTING_TEXT_PATTERN = Pattern.compile("^[a-zA-Z0-9]*");
|
||||
|
||||
/**
|
||||
* a flag indicating if this analyzer has run. This analyzer only runs once.
|
||||
*/
|
||||
private boolean analyzed = false;
|
||||
|
||||
/**
|
||||
* Returns a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once. Note this is currently only used in the unit tests.
|
||||
*
|
||||
* @return a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once
|
||||
*/
|
||||
protected synchronized boolean getAnalyzed() {
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
@@ -106,19 +87,6 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it only runs once and then
|
||||
* operates on <em>all</em> dependencies.
|
||||
*
|
||||
* @return whether or not parallel processing is enabled
|
||||
* @see #analyze(Dependency, Engine)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
@@ -132,65 +100,56 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to have the same
|
||||
* base path and the same set of identifiers they are likely related. The
|
||||
* related dependencies are bundled into a single reportable item.
|
||||
* Evaluates the dependencies
|
||||
*
|
||||
* @param ignore this analyzer ignores the dependency being analyzed
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR
|
||||
* file.
|
||||
* @param dependency a dependency to compare
|
||||
* @param nextDependency a dependency to compare
|
||||
* @param dependenciesToRemove a set of dependencies that will be removed
|
||||
* @return true if a dependency is removed; otherwise false
|
||||
*/
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
if (!analyzed) {
|
||||
analyzed = true;
|
||||
final Set<Dependency> dependenciesToRemove = new HashSet<>();
|
||||
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator();
|
||||
//for (Dependency nextDependency : engine.getDependencies()) {
|
||||
while (mainIterator.hasNext()) {
|
||||
final Dependency dependency = mainIterator.next();
|
||||
if (mainIterator.hasNext() && !dependenciesToRemove.contains(dependency)) {
|
||||
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
|
||||
while (subIterator.hasNext()) {
|
||||
final Dependency nextDependency = subIterator.next();
|
||||
if (hashesMatch(dependency, nextDependency)) {
|
||||
if (!containedInWar(dependency.getFilePath())
|
||||
&& !containedInWar(nextDependency.getFilePath())) {
|
||||
if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
} else if (isShadedJar(dependency, nextDependency)) {
|
||||
if (dependency.getFileName().toLowerCase().endsWith("pom.xml")) {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
nextDependency.getRelatedDependencies().remove(dependency);
|
||||
break;
|
||||
} else {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
dependency.getRelatedDependencies().remove(nextDependency);
|
||||
}
|
||||
} else if (cpeIdentifiersMatch(dependency, nextDependency)
|
||||
&& hasSameBasePath(dependency, nextDependency)
|
||||
&& vulnCountMatches(dependency, nextDependency)
|
||||
&& fileNameMatch(dependency, nextDependency)) {
|
||||
if (isCore(dependency, nextDependency)) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
}
|
||||
protected boolean evaluateDependencies(final Dependency dependency, final Dependency nextDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
if (hashesMatch(dependency, nextDependency)) {
|
||||
if (!containedInWar(dependency.getFilePath())
|
||||
&& !containedInWar(nextDependency.getFilePath())) {
|
||||
if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
return true; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
//removing dependencies here as ensuring correctness and avoiding ConcurrentUpdateExceptions
|
||||
// was difficult because of the inner iterator.
|
||||
engine.getDependencies().removeAll(dependenciesToRemove);
|
||||
} else if (isShadedJar(dependency, nextDependency)) {
|
||||
if (dependency.getFileName().toLowerCase().endsWith("pom.xml")) {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
nextDependency.removeRelatedDependencies(dependency);
|
||||
return true;
|
||||
} else {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
dependency.removeRelatedDependencies(nextDependency);
|
||||
}
|
||||
} else if (cpeIdentifiersMatch(dependency, nextDependency)
|
||||
&& hasSameBasePath(dependency, nextDependency)
|
||||
&& vulnCountMatches(dependency, nextDependency)
|
||||
&& fileNameMatch(dependency, nextDependency)) {
|
||||
if (isCore(dependency, nextDependency)) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
return true; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
} else if (ecoSystemIs(AbstractNpmAnalyzer.NPM_DEPENDENCY_ECOSYSTEM, dependency, nextDependency)
|
||||
&& namesAreEqual(dependency, nextDependency)
|
||||
&& npmVersionsMatch(dependency.getVersion(), nextDependency.getVersion())) {
|
||||
|
||||
if (!dependency.isVirtual()) {
|
||||
DependencyMergingAnalyzer.mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
DependencyMergingAnalyzer.mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -203,17 +162,19 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
* removed from the main analysis loop, this function adds to this
|
||||
* collection
|
||||
*/
|
||||
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
public static void mergeDependencies(final Dependency dependency,
|
||||
final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
dependency.addRelatedDependency(relatedDependency);
|
||||
final Iterator<Dependency> i = relatedDependency.getRelatedDependencies().iterator();
|
||||
while (i.hasNext()) {
|
||||
dependency.addRelatedDependency(i.next());
|
||||
i.remove();
|
||||
for (Dependency d : relatedDependency.getRelatedDependencies()) {
|
||||
dependency.addRelatedDependency(d);
|
||||
relatedDependency.removeRelatedDependencies(d);
|
||||
}
|
||||
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) {
|
||||
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
|
||||
}
|
||||
dependenciesToRemove.add(relatedDependency);
|
||||
if (dependenciesToRemove != null) {
|
||||
dependenciesToRemove.add(relatedDependency);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -237,9 +198,10 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
if (tmp <= 0) {
|
||||
return path;
|
||||
}
|
||||
if (tmp > 0) {
|
||||
pos = tmp + 1;
|
||||
}
|
||||
//below is always true
|
||||
//if (tmp > 0) {
|
||||
pos = tmp + 1;
|
||||
//}
|
||||
tmp = path.indexOf(File.separator, pos);
|
||||
if (tmp > 0) {
|
||||
pos = tmp + 1;
|
||||
@@ -439,6 +401,10 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
* between the two collections match; otherwise false
|
||||
*/
|
||||
private boolean isShadedJar(Dependency dependency, Dependency nextDependency) {
|
||||
if (dependency == null || dependency.getFileName() == null
|
||||
|| nextDependency == null || nextDependency.getFileName() == null) {
|
||||
return false;
|
||||
}
|
||||
final String mainName = dependency.getFileName().toLowerCase();
|
||||
final String nextName = nextDependency.getFileName().toLowerCase();
|
||||
if (mainName.endsWith(".jar") && nextName.endsWith("pom.xml")) {
|
||||
@@ -502,4 +468,108 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
return filePath != null && filePath.matches(".*\\.(ear|war)[\\\\/].*");
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the dependency ecosystem is equal in the given dependencies.
|
||||
*
|
||||
* @param ecoSystem the ecosystem to validate against
|
||||
* @param dependency a dependency to compare
|
||||
* @param nextDependency a dependency to compare
|
||||
* @return true if the ecosystem is equal in both dependencies; otherwise
|
||||
* false
|
||||
*/
|
||||
private boolean ecoSystemIs(String ecoSystem, Dependency dependency, Dependency nextDependency) {
|
||||
return ecoSystem.equals(dependency.getEcosystem()) && ecoSystem.equals(nextDependency.getEcosystem());
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the dependency name is equal in the given dependencies.
|
||||
*
|
||||
* @param dependency a dependency to compare
|
||||
* @param nextDependency a dependency to compare
|
||||
* @return true if the name is equal in both dependencies; otherwise false
|
||||
*/
|
||||
private boolean namesAreEqual(Dependency dependency, Dependency nextDependency) {
|
||||
return dependency.getName() != null && dependency.getName().equals(nextDependency.getName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the dependency version is equal in the given dependencies.
|
||||
* This method attempts to evaluate version range checks.
|
||||
*
|
||||
* @param current a dependency version to compare
|
||||
* @param next a dependency version to compare
|
||||
* @return true if the version is equal in both dependencies; otherwise
|
||||
* false
|
||||
*/
|
||||
public static boolean npmVersionsMatch(String current, String next) {
|
||||
String left = current;
|
||||
String right = next;
|
||||
if (left == null || right == null) {
|
||||
return false;
|
||||
}
|
||||
if (left.equals(right) || "*".equals(left) || "*".equals(right)) {
|
||||
return true;
|
||||
}
|
||||
if (left.contains(" ")) { // we have a version string from package.json
|
||||
if (right.contains(" ")) { // we can't evaluate this ">=1.5.4 <2.0.0" vs "2 || 3"
|
||||
return false;
|
||||
}
|
||||
if (!right.matches("^\\d.*$")) {
|
||||
right = stripLeadingNonNumeric(right);
|
||||
if (right == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
try {
|
||||
final Semver v = new Semver(right, SemverType.NPM);
|
||||
return v.satisfies(left);
|
||||
} catch (SemverException ex) {
|
||||
LOGGER.trace("ignore", ex);
|
||||
}
|
||||
} else {
|
||||
if (!left.matches("^\\d.*$")) {
|
||||
left = stripLeadingNonNumeric(left);
|
||||
if (left == null || left.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
try {
|
||||
Semver v = new Semver(left, SemverType.NPM);
|
||||
if (!right.isEmpty() && v.satisfies(right)) {
|
||||
return true;
|
||||
}
|
||||
if (!right.contains(" ")) {
|
||||
left = current;
|
||||
right = stripLeadingNonNumeric(right);
|
||||
if (right != null) {
|
||||
v = new Semver(right, SemverType.NPM);
|
||||
return v.satisfies(left);
|
||||
}
|
||||
}
|
||||
} catch (SemverException ex) {
|
||||
LOGGER.trace("ignore", ex);
|
||||
} catch (NullPointerException ex) {
|
||||
LOGGER.error("SemVer comparison error: left:\"{}\", right:\"{}\"", left, right);
|
||||
LOGGER.debug("SemVer comparison resulted in NPE", ex);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Strips leading non-numeric values from the start of the string. If no
|
||||
* numbers are present this will return null.
|
||||
*
|
||||
* @param str the string to modify
|
||||
* @return the string without leading non-numeric characters
|
||||
*/
|
||||
private static String stripLeadingNonNumeric(String str) {
|
||||
for (int x = 0; x < str.length(); x++) {
|
||||
if (Character.isDigit(str.codePointAt(x))) {
|
||||
return str.substring(x);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -18,13 +18,10 @@
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -36,31 +33,12 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyMergingAnalyzer extends AbstractAnalyzer {
|
||||
public class DependencyMergingAnalyzer extends AbstractDependencyComparingAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyMergingAnalyzer.class);
|
||||
/**
|
||||
* a flag indicating if this analyzer has run. This analyzer only runs once.
|
||||
*/
|
||||
private boolean analyzed = false;
|
||||
|
||||
/**
|
||||
* Returns a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once. Note this is currently only used in the unit tests.
|
||||
*
|
||||
* @return a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once
|
||||
*/
|
||||
protected synchronized boolean getAnalyzed() {
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
@@ -90,18 +68,6 @@ public class DependencyMergingAnalyzer extends AbstractAnalyzer {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it only runs once and then
|
||||
* operates on <em>all</em> dependencies.
|
||||
*
|
||||
* @return whether or not parallel processing is enabled
|
||||
* @see #analyze(Dependency, Engine)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
@@ -112,55 +78,36 @@ public class DependencyMergingAnalyzer extends AbstractAnalyzer {
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_DEPENDENCY_MERGING_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to be the same
|
||||
* dependency created by more multiple FileTypeAnalyzers (i.e. a gemspec
|
||||
* dependency and a dependency from the Bundle Audit Analyzer. The
|
||||
* dependencies are then merged into a single reportable item.
|
||||
* Evaluates the dependencies
|
||||
*
|
||||
* @param ignore this analyzer ignores the dependency being analyzed
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR
|
||||
* file.
|
||||
* @param dependency a dependency to compare
|
||||
* @param nextDependency a dependency to compare
|
||||
* @param dependenciesToRemove a set of dependencies that will be removed
|
||||
* @return true if a dependency is removed; otherwise false
|
||||
*/
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
if (!analyzed) {
|
||||
analyzed = true;
|
||||
final Set<Dependency> dependenciesToRemove = new HashSet<>();
|
||||
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator();
|
||||
//for (Dependency nextDependency : engine.getDependencies()) {
|
||||
while (mainIterator.hasNext()) {
|
||||
final Dependency dependency = mainIterator.next();
|
||||
if (mainIterator.hasNext() && !dependenciesToRemove.contains(dependency)) {
|
||||
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
|
||||
while (subIterator.hasNext()) {
|
||||
final Dependency nextDependency = subIterator.next();
|
||||
Dependency main;
|
||||
if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
} else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
protected boolean evaluateDependencies(final Dependency dependency, final Dependency nextDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
Dependency main;
|
||||
//CSOFF: InnerAssignment
|
||||
if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
return true; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
} else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
return true; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
//removing dependencies here as ensuring correctness and avoiding ConcurrentUpdateExceptions
|
||||
// was difficult because of the inner iterator.
|
||||
engine.getDependencies().removeAll(dependenciesToRemove);
|
||||
}
|
||||
//CSON: InnerAssignment
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -173,22 +120,28 @@ public class DependencyMergingAnalyzer extends AbstractAnalyzer {
|
||||
* removed from the main analysis loop, this function adds to this
|
||||
* collection
|
||||
*/
|
||||
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
public static void mergeDependencies(final Dependency dependency, final Dependency relatedDependency,
|
||||
final Set<Dependency> dependenciesToRemove) {
|
||||
LOGGER.debug("Merging '{}' into '{}'", relatedDependency.getFilePath(), dependency.getFilePath());
|
||||
dependency.addRelatedDependency(relatedDependency);
|
||||
dependency.getVendorEvidence().getEvidence().addAll(relatedDependency.getVendorEvidence().getEvidence());
|
||||
dependency.getProductEvidence().getEvidence().addAll(relatedDependency.getProductEvidence().getEvidence());
|
||||
dependency.getVersionEvidence().getEvidence().addAll(relatedDependency.getVersionEvidence().getEvidence());
|
||||
for (Evidence e : relatedDependency.getEvidence(EvidenceType.VENDOR)) {
|
||||
dependency.addEvidence(EvidenceType.VENDOR, e);
|
||||
}
|
||||
for (Evidence e : relatedDependency.getEvidence(EvidenceType.PRODUCT)) {
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, e);
|
||||
}
|
||||
for (Evidence e : relatedDependency.getEvidence(EvidenceType.VERSION)) {
|
||||
dependency.addEvidence(EvidenceType.VERSION, e);
|
||||
}
|
||||
|
||||
final Iterator<Dependency> i = relatedDependency.getRelatedDependencies().iterator();
|
||||
while (i.hasNext()) {
|
||||
dependency.addRelatedDependency(i.next());
|
||||
i.remove();
|
||||
for (Dependency d : relatedDependency.getRelatedDependencies()) {
|
||||
dependency.addRelatedDependency(d);
|
||||
relatedDependency.removeRelatedDependencies(d);
|
||||
}
|
||||
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) {
|
||||
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
|
||||
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
|
||||
if (dependenciesToRemove != null) {
|
||||
dependenciesToRemove.add(relatedDependency);
|
||||
}
|
||||
dependenciesToRemove.add(relatedDependency);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -22,15 +22,18 @@ import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
@@ -44,17 +47,37 @@ import org.slf4j.LoggerFactory;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(FalsePositiveAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The file filter used to find DLL and EXE.
|
||||
*/
|
||||
private static final FileFilter DLL_EXE_FILTER = FileFilterBuilder.newInstance().addExtensions("dll", "exe").build();
|
||||
/**
|
||||
* Regex to identify core java libraries and a few other commonly
|
||||
* misidentified ones.
|
||||
*/
|
||||
public static final Pattern CORE_JAVA = Pattern.compile("^cpe:/a:(sun|oracle|ibm):(j2[ems]e|"
|
||||
+ "java(_platform_micro_edition|_runtime_environment|_se|virtual_machine|se_development_kit|fx)?|"
|
||||
+ "jdk|jre|jsse)($|:.*)");
|
||||
/**
|
||||
* Regex to identify core jsf libraries.
|
||||
*/
|
||||
public static final Pattern CORE_JAVA_JSF = Pattern.compile("^cpe:/a:(sun|oracle|ibm):jsf($|:.*)");
|
||||
/**
|
||||
* Regex to identify core java library files. This is currently incomplete.
|
||||
*/
|
||||
public static final Pattern CORE_FILES = Pattern.compile("(^|/)((alt[-])?rt|jsse|jfxrt|jfr|jce|javaws|deploy|charsets)\\.jar$");
|
||||
/**
|
||||
* Regex to identify core jsf java library files. This is currently
|
||||
* incomplete.
|
||||
*/
|
||||
public static final Pattern CORE_JSF_FILES = Pattern.compile("(^|/)jsf[-][^/]*\\.jar$");
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
@@ -136,19 +159,19 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (mustContain
|
||||
!= null) {
|
||||
final Iterator<Identifier> itr = dependency.getIdentifiers().iterator();
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
if (mustContain != null) {
|
||||
final Set<Identifier> removalSet = new HashSet<>();
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".contains(i.getType())
|
||||
&& i.getValue() != null
|
||||
&& i.getValue().startsWith("cpe:/a:springsource:")
|
||||
&& !i.getValue().toLowerCase().contains(mustContain)) {
|
||||
itr.remove();
|
||||
//dependency.getIdentifiers().remove(i);
|
||||
removalSet.add(i);
|
||||
}
|
||||
}
|
||||
for (Identifier i : removalSet) {
|
||||
dependency.removeIdentifier(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -199,42 +222,21 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
//how did we get here?
|
||||
LOGGER.debug("currentVersion and nextVersion are both null?");
|
||||
} else if (currentVersion == null && nextVersion != null) {
|
||||
dependency.getIdentifiers().remove(currentId);
|
||||
dependency.removeIdentifier(currentId);
|
||||
} else if (nextVersion == null && currentVersion != null) {
|
||||
dependency.getIdentifiers().remove(nextId);
|
||||
dependency.removeIdentifier(nextId);
|
||||
} else if (currentVersion.length() < nextVersion.length()) {
|
||||
if (nextVersion.startsWith(currentVersion) || "-".equals(currentVersion)) {
|
||||
dependency.getIdentifiers().remove(currentId);
|
||||
dependency.removeIdentifier(currentId);
|
||||
}
|
||||
} else if (currentVersion.startsWith(nextVersion) || "-".equals(nextVersion)) {
|
||||
dependency.getIdentifiers().remove(nextId);
|
||||
dependency.removeIdentifier(nextId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Regex to identify core java libraries and a few other commonly
|
||||
* misidentified ones.
|
||||
*/
|
||||
public static final Pattern CORE_JAVA = Pattern.compile("^cpe:/a:(sun|oracle|ibm):(j2[ems]e|"
|
||||
+ "java(_platform_micro_edition|_runtime_environment|_se|virtual_machine|se_development_kit|fx)?|"
|
||||
+ "jdk|jre|jsse)($|:.*)");
|
||||
|
||||
/**
|
||||
* Regex to identify core jsf libraries.
|
||||
*/
|
||||
public static final Pattern CORE_JAVA_JSF = Pattern.compile("^cpe:/a:(sun|oracle|ibm):jsf($|:.*)");
|
||||
/**
|
||||
* Regex to identify core java library files. This is currently incomplete.
|
||||
*/
|
||||
public static final Pattern CORE_FILES = Pattern.compile("(^|/)((alt[-])?rt|jsse|jfxrt|jfr|jce|javaws|deploy|charsets)\\.jar$");
|
||||
/**
|
||||
* Regex to identify core jsf java library files. This is currently
|
||||
* incomplete.
|
||||
*/
|
||||
public static final Pattern CORE_JSF_FILES = Pattern.compile("(^|/)jsf[-][^/]*\\.jar$");
|
||||
|
||||
/**
|
||||
* Removes any CPE entries for the JDK/JRE unless the filename ends with
|
||||
@@ -243,21 +245,22 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
* @param dependency the dependency to remove JRE CPEs from
|
||||
*/
|
||||
private void removeJreEntries(Dependency dependency) {
|
||||
final Set<Identifier> identifiers = dependency.getIdentifiers();
|
||||
final Iterator<Identifier> itr = identifiers.iterator();
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
final Set<Identifier> removalSet = new HashSet<>();
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
final Matcher coreCPE = CORE_JAVA.matcher(i.getValue());
|
||||
final Matcher coreFiles = CORE_FILES.matcher(dependency.getFileName());
|
||||
if (coreCPE.matches() && !coreFiles.matches()) {
|
||||
itr.remove();
|
||||
removalSet.add(i);
|
||||
}
|
||||
final Matcher coreJsfCPE = CORE_JAVA_JSF.matcher(i.getValue());
|
||||
final Matcher coreJsfFiles = CORE_JSF_FILES.matcher(dependency.getFileName());
|
||||
if (coreJsfCPE.matches() && !coreJsfFiles.matches()) {
|
||||
itr.remove();
|
||||
removalSet.add(i);
|
||||
}
|
||||
}
|
||||
for (Identifier i : removalSet) {
|
||||
dependency.removeIdentifier(i);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -288,9 +291,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
*/
|
||||
private void removeBadMatches(Dependency dependency) {
|
||||
final Set<Identifier> identifiers = dependency.getIdentifiers();
|
||||
final Iterator<Identifier> itr = identifiers.iterator();
|
||||
protected void removeBadMatches(Dependency dependency) {
|
||||
|
||||
/* TODO - can we utilize the pom's groupid and artifactId to filter??? most of
|
||||
* these are due to low quality data. Other idea would be to say any CPE
|
||||
@@ -299,8 +300,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
//Set<Evidence> groupId = dependency.getVendorEvidence().getEvidence("pom", "groupid");
|
||||
//Set<Evidence> artifactId = dependency.getVendorEvidence().getEvidence("pom", "artifactid");
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
//TODO move this startsWith expression to the base suppression file
|
||||
if ("cpe".equals(i.getType())) {
|
||||
if ((i.getValue().matches(".*c\\+\\+.*")
|
||||
@@ -324,7 +324,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".tgz")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".ear")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".war"))) {
|
||||
itr.remove();
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
} else if ((i.getValue().startsWith("cpe:/a:jquery:jquery")
|
||||
|| i.getValue().startsWith("cpe:/a:prototypejs:prototype")
|
||||
|| i.getValue().startsWith("cpe:/a:yahoo:yui"))
|
||||
@@ -332,7 +333,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
|| dependency.getFileName().toLowerCase().endsWith("pom.xml")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".dll")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".exe"))) {
|
||||
itr.remove();
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
} else if ((i.getValue().startsWith("cpe:/a:microsoft:excel")
|
||||
|| i.getValue().startsWith("cpe:/a:microsoft:word")
|
||||
|| i.getValue().startsWith("cpe:/a:microsoft:visio")
|
||||
@@ -343,16 +345,36 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".ear")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".war")
|
||||
|| dependency.getFileName().toLowerCase().endsWith("pom.xml"))) {
|
||||
itr.remove();
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
} else if (i.getValue().startsWith("cpe:/a:apache:maven")
|
||||
&& !dependency.getFileName().toLowerCase().matches("maven-core-[\\d\\.]+\\.jar")) {
|
||||
itr.remove();
|
||||
} else if (i.getValue().startsWith("cpe:/a:m-core:m-core")
|
||||
&& !dependency.getEvidenceUsed().containsUsedString("m-core")) {
|
||||
itr.remove();
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
} else if (i.getValue().startsWith("cpe:/a:m-core:m-core")) {
|
||||
boolean found = false;
|
||||
for (Evidence e : dependency.getEvidence(EvidenceType.PRODUCT)) {
|
||||
if ("m-core".equalsIgnoreCase(e.getValue())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
for (Evidence e : dependency.getEvidence(EvidenceType.VENDOR)) {
|
||||
if ("m-core".equalsIgnoreCase(e.getValue())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
}
|
||||
} else if (i.getValue().startsWith("cpe:/a:jboss:jboss")
|
||||
&& !dependency.getFileName().toLowerCase().matches("jboss-?[\\d\\.-]+(GA)?\\.jar")) {
|
||||
itr.remove();
|
||||
//itr.remove();
|
||||
dependency.removeIdentifier(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -365,31 +387,30 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
* @param dependency the dependency to analyze
|
||||
*/
|
||||
private void removeWrongVersionMatches(Dependency dependency) {
|
||||
final Set<Identifier> identifiers = dependency.getIdentifiers();
|
||||
final Iterator<Identifier> itr = identifiers.iterator();
|
||||
|
||||
final Set<Identifier> identifiersToRemove = new HashSet<>();
|
||||
final String fileName = dependency.getFileName();
|
||||
if (fileName != null && fileName.contains("axis2")) {
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(i.getType())) {
|
||||
final String cpe = i.getValue();
|
||||
if (cpe != null && (cpe.startsWith("cpe:/a:apache:axis:") || "cpe:/a:apache:axis".equals(cpe))) {
|
||||
itr.remove();
|
||||
identifiersToRemove.add(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (fileName != null && fileName.contains("axis")) {
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(i.getType())) {
|
||||
final String cpe = i.getValue();
|
||||
if (cpe != null && (cpe.startsWith("cpe:/a:apache:axis2:") || "cpe:/a:apache:axis2".equals(cpe))) {
|
||||
itr.remove();
|
||||
identifiersToRemove.add(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Identifier i : identifiersToRemove) {
|
||||
dependency.removeIdentifier(i);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -408,23 +429,38 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
|| identifier.getValue().startsWith("cpe:/a:oracle:opensso_enterprise:")
|
||||
|| identifier.getValue().startsWith("cpe:/a:sun:opensso_enterprise:")
|
||||
|| identifier.getValue().startsWith("cpe:/a:sun:opensso:"))) {
|
||||
final String newCpe = String.format("cpe:/a:sun:opensso_enterprise:%s", identifier.getValue().substring(22));
|
||||
final String newCpe2 = String.format("cpe:/a:oracle:opensso_enterprise:%s", identifier.getValue().substring(22));
|
||||
final String newCpe3 = String.format("cpe:/a:sun:opensso:%s", identifier.getValue().substring(22));
|
||||
final String newCpe4 = String.format("cpe:/a:oracle:opensso:%s", identifier.getValue().substring(22));
|
||||
final String[] parts = identifier.getValue().split(":");
|
||||
final int pos = parts[0].length() + parts[1].length() + parts[2].length() + parts[3].length() + 4;
|
||||
final String newCpe = String.format("cpe:/a:sun:opensso_enterprise:%s", identifier.getValue().substring(pos));
|
||||
final String newCpe2 = String.format("cpe:/a:oracle:opensso_enterprise:%s", identifier.getValue().substring(pos));
|
||||
final String newCpe3 = String.format("cpe:/a:sun:opensso:%s", identifier.getValue().substring(pos));
|
||||
final String newCpe4 = String.format("cpe:/a:oracle:opensso:%s", identifier.getValue().substring(pos));
|
||||
try {
|
||||
dependency.addIdentifier("cpe",
|
||||
newCpe,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe, "UTF-8")));
|
||||
dependency.addIdentifier("cpe",
|
||||
newCpe2,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe2, "UTF-8")));
|
||||
dependency.addIdentifier("cpe",
|
||||
newCpe3,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe3, "UTF-8")));
|
||||
dependency.addIdentifier("cpe",
|
||||
newCpe4,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe4, "UTF-8")));
|
||||
dependency.addIdentifier("cpe", newCpe,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe, "UTF-8")),
|
||||
identifier.getConfidence());
|
||||
dependency.addIdentifier("cpe", newCpe2,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe2, "UTF-8")),
|
||||
identifier.getConfidence());
|
||||
dependency.addIdentifier("cpe", newCpe3,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe3, "UTF-8")),
|
||||
identifier.getConfidence());
|
||||
dependency.addIdentifier("cpe", newCpe4,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe4, "UTF-8")),
|
||||
identifier.getConfidence());
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
}
|
||||
}
|
||||
if ("cpe".equals(identifier.getType()) && identifier.getValue() != null
|
||||
&& identifier.getValue().startsWith("cpe:/a:apache:santuario_xml_security_for_java:")) {
|
||||
final String[] parts = identifier.getValue().split(":");
|
||||
final int pos = parts[0].length() + parts[1].length() + parts[2].length() + parts[3].length() + 4;
|
||||
final String newCpe = String.format("cpe:/a:apache:xml_security_for_java:%s", identifier.getValue().substring(pos));
|
||||
try {
|
||||
dependency.addIdentifier("cpe", newCpe,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe, "UTF-8")),
|
||||
identifier.getConfidence());
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
}
|
||||
@@ -446,7 +482,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
String parentPath = dependency.getFilePath().toLowerCase();
|
||||
if (parentPath.contains(".jar")) {
|
||||
parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4);
|
||||
final List<Dependency> dependencies = engine.getDependencies();
|
||||
final Dependency[] dependencies = engine.getDependencies();
|
||||
final Dependency parent = findDependency(parentPath, dependencies);
|
||||
if (parent != null) {
|
||||
boolean remove = false;
|
||||
@@ -464,7 +500,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
}
|
||||
if (remove) {
|
||||
dependencies.remove(dependency);
|
||||
engine.removeDependency(dependency);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -476,10 +512,10 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
* dependencies.
|
||||
*
|
||||
* @param dependencyPath the path of the dependency to return
|
||||
* @param dependencies the collection of dependencies to search
|
||||
* @param dependencies the array of dependencies to search
|
||||
* @return the dependency object for the given path, otherwise null
|
||||
*/
|
||||
private Dependency findDependency(String dependencyPath, List<Dependency> dependencies) {
|
||||
private Dependency findDependency(String dependencyPath, Dependency[] dependencies) {
|
||||
for (Dependency d : dependencies) {
|
||||
if (d.getFilePath().equalsIgnoreCase(dependencyPath)) {
|
||||
return d;
|
||||
@@ -18,25 +18,46 @@
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.io.IOCase;
|
||||
import org.apache.commons.io.filefilter.NameFileFilter;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceType;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
*
|
||||
* Takes a dependency and analyzes the filename and determines the hashes.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class FileNameAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* Python init files
|
||||
*/
|
||||
//CSOFF: WhitespaceAfter
|
||||
private static final NameFileFilter IGNORED_FILES = new NameFileFilter(new String[]{
|
||||
"__init__.py",
|
||||
"__init__.pyc",
|
||||
"__init__.pyo",
|
||||
"composer.lock",
|
||||
"configure.in",
|
||||
"configure.ac",
|
||||
"Gemfile.lock",
|
||||
"METADATA",
|
||||
"PKG-INFO",
|
||||
"package.json",
|
||||
"Package.swift",}, IOCase.INSENSITIVE);
|
||||
//CSON: WhitespaceAfter
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
@@ -66,6 +87,7 @@ public class FileNameAnalyzer extends AbstractAnalyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
@@ -78,16 +100,6 @@ public class FileNameAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Python init files
|
||||
*/
|
||||
//CSOFF: WhitespaceAfter
|
||||
private static final NameFileFilter IGNORED_FILES = new NameFileFilter(new String[]{
|
||||
"__init__.py",
|
||||
"__init__.pyc",
|
||||
"__init__.pyo",});
|
||||
//CSON: WhitespaceAfter
|
||||
|
||||
/**
|
||||
* Collects information about the file name.
|
||||
*
|
||||
@@ -111,21 +123,16 @@ public class FileNameAnalyzer extends AbstractAnalyzer {
|
||||
// a shade. This should hopefully correct for cases like log4j.jar or
|
||||
// struts2-core.jar
|
||||
if (version.getVersionParts() == null || version.getVersionParts().size() < 2) {
|
||||
dependency.getVersionEvidence().addEvidence("file", "version",
|
||||
version.toString(), Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.VERSION, "file", "version", version.toString(), Confidence.MEDIUM);
|
||||
} else {
|
||||
dependency.getVersionEvidence().addEvidence("file", "version",
|
||||
version.toString(), Confidence.HIGHEST);
|
||||
dependency.addEvidence(EvidenceType.VERSION, "file", "version", version.toString(), Confidence.HIGHEST);
|
||||
}
|
||||
dependency.getVersionEvidence().addEvidence("file", "name",
|
||||
packageName, Confidence.MEDIUM);
|
||||
dependency.addEvidence(EvidenceType.VERSION, "file", "name", packageName, Confidence.MEDIUM);
|
||||
}
|
||||
|
||||
if (!IGNORED_FILES.accept(f)) {
|
||||
dependency.getProductEvidence().addEvidence("file", "name",
|
||||
packageName, Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("file", "name",
|
||||
packageName, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.PRODUCT, "file", "name", packageName, Confidence.HIGH);
|
||||
dependency.addEvidence(EvidenceType.VENDOR, "file", "name", packageName, Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user