mirror of
https://github.com/ysoftdevs/DependencyCheck.git
synced 2026-01-14 15:53:36 +01:00
Compare commits
1323 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e246757f47 | ||
|
|
4172300799 | ||
|
|
f39f754b7b | ||
|
|
c59615f452 | ||
|
|
847bed2fa0 | ||
|
|
a9af15f6f8 | ||
|
|
92519ae955 | ||
|
|
2d90aca1f2 | ||
|
|
f29ed38c34 | ||
|
|
df8d4fd77c | ||
|
|
baa2e2c6ff | ||
|
|
9d5769bb69 | ||
|
|
4cdfa804ee | ||
|
|
523cd23b6b | ||
|
|
61866e9e76 | ||
|
|
ff7fbdc98d | ||
|
|
b625d642ea | ||
|
|
8733a85ebb | ||
|
|
5ab5a7b72b | ||
|
|
3cb8b9fa9e | ||
|
|
429039bf1c | ||
|
|
29d28c3408 | ||
|
|
372d484440 | ||
|
|
eac47800a3 | ||
|
|
86a85db12b | ||
|
|
4ab6cd278c | ||
|
|
233a068c8b | ||
|
|
d9f0ffa742 | ||
|
|
8d63ee19ed | ||
|
|
1fb74e1a27 | ||
|
|
c94ab6108c | ||
|
|
bf285e19ab | ||
|
|
b1ceca73e4 | ||
|
|
f3aca63b61 | ||
|
|
fca107d287 | ||
|
|
64b6964fff | ||
|
|
6af0842838 | ||
|
|
4c49adf1ba | ||
|
|
5f4e4fab56 | ||
|
|
146d7e3fbf | ||
|
|
4d22800747 | ||
|
|
541a7f8180 | ||
|
|
f205cf79c9 | ||
|
|
d8bb6488b7 | ||
|
|
4324563c0a | ||
|
|
bad03660b1 | ||
|
|
20b1ff38f9 | ||
|
|
def78a3cfd | ||
|
|
a41158a716 | ||
|
|
63ad13ff7a | ||
|
|
dd92ec675f | ||
|
|
6e1512f7d9 | ||
|
|
287b1df3fd | ||
|
|
38bf9b4ddb | ||
|
|
f9d3a9d8d8 | ||
|
|
309a5d9bcb | ||
|
|
60e661d3a4 | ||
|
|
c33257d266 | ||
|
|
1dbc183567 | ||
|
|
bf258146da | ||
|
|
bb927b447e | ||
|
|
d91b4c3151 | ||
|
|
91dbb39f18 | ||
|
|
35ae8fd660 | ||
|
|
d854917090 | ||
|
|
32ebf6c8ed | ||
|
|
edd4191d47 | ||
|
|
0cce49506a | ||
|
|
1c053469e9 | ||
|
|
610e97ef7f | ||
|
|
5a678d2ccb | ||
|
|
8db61a4d1e | ||
|
|
f47c6b07f4 | ||
|
|
bd3af45db9 | ||
|
|
a271d422f6 | ||
|
|
4dd6dedaa4 | ||
|
|
10ee569096 | ||
|
|
1474855305 | ||
|
|
0202bc11d4 | ||
|
|
e7072ea04c | ||
|
|
8f2c755f21 | ||
|
|
e513a79bd2 | ||
|
|
dd17f7393f | ||
|
|
32f38bf892 | ||
|
|
d5c3eeaf28 | ||
|
|
bfa67fcba7 | ||
|
|
37a556dcc0 | ||
|
|
fe61f298f0 | ||
|
|
9786c9bf82 | ||
|
|
668161081a | ||
|
|
4978f9dcba | ||
|
|
a6ca2e3895 | ||
|
|
6ecf55be91 | ||
|
|
13bd63dac8 | ||
|
|
db5ff1bfca | ||
|
|
42f2385bb2 | ||
|
|
e9556bbbf0 | ||
|
|
316b936326 | ||
|
|
6838b9b950 | ||
|
|
cdfe5d0c9a | ||
|
|
1610f14c47 | ||
|
|
85ab894b94 | ||
|
|
ddbca24f33 | ||
|
|
6b9acac8c4 | ||
|
|
2333bee5fd | ||
|
|
2ad08d2367 | ||
|
|
1337686013 | ||
|
|
41041bfd18 | ||
|
|
e693e53630 | ||
|
|
b99e13a337 | ||
|
|
3bbc485968 | ||
|
|
e0b549e427 | ||
|
|
75207169e3 | ||
|
|
e07f568237 | ||
|
|
e2cd99d40d | ||
|
|
27f2682a98 | ||
|
|
34a2110e9a | ||
|
|
96ba51db4f | ||
|
|
9c6053a60a | ||
|
|
358367ef9e | ||
|
|
a12bc44ecd | ||
|
|
773ac019f8 | ||
|
|
e751b7b814 | ||
|
|
824aa23b9b | ||
|
|
b7b97960a6 | ||
|
|
40f0e907e1 | ||
|
|
5ff0dc885d | ||
|
|
e70a0ee238 | ||
|
|
9338697079 | ||
|
|
4018a4e1de | ||
|
|
e8788dd2a4 | ||
|
|
e70c2f2b05 | ||
|
|
5ed0583039 | ||
|
|
f76d7295f9 | ||
|
|
6e280c4958 | ||
|
|
48b4ef1944 | ||
|
|
9150df964f | ||
|
|
b2237394e1 | ||
|
|
b3a0f7ad26 | ||
|
|
782ba42abc | ||
|
|
74b93ce602 | ||
|
|
e907c40f17 | ||
|
|
13a9dedb1e | ||
|
|
b37698f245 | ||
|
|
d30d000346 | ||
|
|
446239a5bd | ||
|
|
ac25aa795b | ||
|
|
f117a9ded0 | ||
|
|
947d38ccd2 | ||
|
|
23f7996db8 | ||
|
|
9fdff51f26 | ||
|
|
9b43bf004a | ||
|
|
5d73faa1f0 | ||
|
|
9e70279b31 | ||
|
|
9e671d1065 | ||
|
|
7e2c4af0b3 | ||
|
|
11f9092a65 | ||
|
|
6017e5c217 | ||
|
|
b2149ff4b9 | ||
|
|
1a5177c576 | ||
|
|
7020c9931a | ||
|
|
9bc43e2e8e | ||
|
|
26a4e7451e | ||
|
|
3470d33bdc | ||
|
|
51c96894b4 | ||
|
|
7fc2be6a0a | ||
|
|
110c97bc15 | ||
|
|
8d51d8fa1f | ||
|
|
4b02a567e0 | ||
|
|
5a939ec108 | ||
|
|
d9c4480627 | ||
|
|
9388340e23 | ||
|
|
2285d2ef4b | ||
|
|
f84aea0040 | ||
|
|
452969cc92 | ||
|
|
128a600f18 | ||
|
|
7dd9a52e78 | ||
|
|
ff341b7228 | ||
|
|
92a8b4ca85 | ||
|
|
384199b28d | ||
|
|
44edcabe15 | ||
|
|
1a5e9884fc | ||
|
|
cda81315d2 | ||
|
|
d7100e54d1 | ||
|
|
989caead9c | ||
|
|
a9d3b627f1 | ||
|
|
99a1606df1 | ||
|
|
6326513c63 | ||
|
|
f6cfae595a | ||
|
|
0794efcf41 | ||
|
|
b9ea82f2c1 | ||
|
|
8b705b3370 | ||
|
|
c684607a4d | ||
|
|
b00833c2de | ||
|
|
0ca6bc6ab6 | ||
|
|
60faddff9b | ||
|
|
b35da8ad4b | ||
|
|
79887c148a | ||
|
|
1ae3457ee6 | ||
|
|
d2154c9d29 | ||
|
|
40ede24a99 | ||
|
|
5960ba919d | ||
|
|
f6aaaa8815 | ||
|
|
6f1b20c936 | ||
|
|
7734a50427 | ||
|
|
aef118d375 | ||
|
|
22cae71999 | ||
|
|
29d127303c | ||
|
|
5574f1c24f | ||
|
|
9457744571 | ||
|
|
19243c479c | ||
|
|
e868ce8328 | ||
|
|
ffa846c05a | ||
|
|
dde1791476 | ||
|
|
45438a7f06 | ||
|
|
c980e77ea3 | ||
|
|
176d3ddefa | ||
|
|
98d783d448 | ||
|
|
bcd6634d8a | ||
|
|
0b260cef2a | ||
|
|
6a68abbd67 | ||
|
|
9fcf23c802 | ||
|
|
5c2c08e051 | ||
|
|
1f254997e1 | ||
|
|
4f95af0864 | ||
|
|
6ff39be9d2 | ||
|
|
6cf5a47971 | ||
|
|
56da53c700 | ||
|
|
7091e10795 | ||
|
|
34765c5741 | ||
|
|
36c139872a | ||
|
|
1e77cec677 | ||
|
|
e95e3fb2d0 | ||
|
|
39c2234e38 | ||
|
|
f4fff5d9cb | ||
|
|
659785f972 | ||
|
|
85c04f6e3e | ||
|
|
bef117cbe8 | ||
|
|
46dd7cf86e | ||
|
|
9ed5a97267 | ||
|
|
cc2da70db2 | ||
|
|
cedd93e774 | ||
|
|
632e1692eb | ||
|
|
4861592d2a | ||
|
|
22e6d4edf3 | ||
|
|
e9bd7ff72f | ||
|
|
e7228fb489 | ||
|
|
96c03a68f2 | ||
|
|
4f6f248421 | ||
|
|
a8f14c86fd | ||
|
|
36de3d1e25 | ||
|
|
48bc4570e1 | ||
|
|
94b272dbae | ||
|
|
c093edf459 | ||
|
|
0164feffcc | ||
|
|
8cd377b99f | ||
|
|
74282c8ac5 | ||
|
|
d2158e5e44 | ||
|
|
9ea16ad1d1 | ||
|
|
45941adb71 | ||
|
|
c4d662fd2b | ||
|
|
d9ce3cda66 | ||
|
|
6bd7d6b078 | ||
|
|
84c6dd5dfa | ||
|
|
71e7412f15 | ||
|
|
d22c920b35 | ||
|
|
f7a0982ca0 | ||
|
|
bed04150e1 | ||
|
|
ba15de2218 | ||
|
|
e9ec89dc9c | ||
|
|
d09f75658c | ||
|
|
62f92db181 | ||
|
|
27a98f4244 | ||
|
|
f0a3482eda | ||
|
|
5f76843c4a | ||
|
|
c6ea92cff9 | ||
|
|
c253308284 | ||
|
|
9ae9c111e3 | ||
|
|
4894372eee | ||
|
|
7cf040653f | ||
|
|
034bd4dba0 | ||
|
|
af12a2161c | ||
|
|
57fcf6fde3 | ||
|
|
c5757dc5f4 | ||
|
|
6d5d5ceb7b | ||
|
|
2fa8507d69 | ||
|
|
f23003ead3 | ||
|
|
c996f6b436 | ||
|
|
d2ee66a1c4 | ||
|
|
26b0dd5ef5 | ||
|
|
ad4149a259 | ||
|
|
9611c3b478 | ||
|
|
cead88d221 | ||
|
|
c1e1a6bb4f | ||
|
|
6212a5f740 | ||
|
|
b3d9ea3c47 | ||
|
|
cd51989354 | ||
|
|
b705ae5f0c | ||
|
|
13b53537fa | ||
|
|
7d05aa6073 | ||
|
|
85de173086 | ||
|
|
d264d804c8 | ||
|
|
8272da615e | ||
|
|
857b993d51 | ||
|
|
a71edf584e | ||
|
|
461d7fec0e | ||
|
|
5e3da035dd | ||
|
|
ebb52995a5 | ||
|
|
519b82c620 | ||
|
|
84682d07c6 | ||
|
|
960eeb19af | ||
|
|
ab3920f8f1 | ||
|
|
f5f5857897 | ||
|
|
1c400b410e | ||
|
|
cc751aa224 | ||
|
|
c20892ee3e | ||
|
|
32ab53c9e1 | ||
|
|
d0a7d9eb42 | ||
|
|
a1a9602509 | ||
|
|
cf97c89fe0 | ||
|
|
8895bc85ea | ||
|
|
1a9976c6ca | ||
|
|
f47ebf6145 | ||
|
|
0380715311 | ||
|
|
80ad16c7fa | ||
|
|
e56e9035b6 | ||
|
|
73f22d32d2 | ||
|
|
c3bc56eebc | ||
|
|
35cc14815e | ||
|
|
9be91474f6 | ||
|
|
adf949bf08 | ||
|
|
c6bf41b8ba | ||
|
|
bc656c6218 | ||
|
|
f46226d055 | ||
|
|
00d4ee47de | ||
|
|
c5ffc21660 | ||
|
|
d89b1fdc6a | ||
|
|
8324287bd6 | ||
|
|
6be161a546 | ||
|
|
027350e1ba | ||
|
|
a2309e1c2e | ||
|
|
c34dc97bd4 | ||
|
|
7e8749146e | ||
|
|
8680ecd033 | ||
|
|
4e4417c7af | ||
|
|
7909bbbbe9 | ||
|
|
6fd831e688 | ||
|
|
59a4825c70 | ||
|
|
1ba3681457 | ||
|
|
78becffb2e | ||
|
|
e7efd7070b | ||
|
|
ec6471e8c7 | ||
|
|
b01ae2c6d3 | ||
|
|
ef4a260615 | ||
|
|
f6b80630dd | ||
|
|
f43589589d | ||
|
|
06b59cf79b | ||
|
|
a2187205e0 | ||
|
|
52f269a289 | ||
|
|
310ca967a1 | ||
|
|
c4b423cb0f | ||
|
|
8a6c940aaf | ||
|
|
b295e927b7 | ||
|
|
63d24737dd | ||
|
|
60ce02ba28 | ||
|
|
95939ed66c | ||
|
|
7f609a35be | ||
|
|
f7b534f1ee | ||
|
|
cd5f9e2f13 | ||
|
|
e79da72711 | ||
|
|
1ba081959b | ||
|
|
578dc63652 | ||
|
|
fccd683b50 | ||
|
|
f3d3a25856 | ||
|
|
6d70c92795 | ||
|
|
3c525d8e3a | ||
|
|
a6b47c7c43 | ||
|
|
5b52f01f3d | ||
|
|
d13bbd43f3 | ||
|
|
0394d1a24f | ||
|
|
446222e127 | ||
|
|
05d7aa898d | ||
|
|
73f7fc1d51 | ||
|
|
f0262466d4 | ||
|
|
1ecde9bbc1 | ||
|
|
ae5a766092 | ||
|
|
6a807bc002 | ||
|
|
c0384bb0ee | ||
|
|
2906b315b3 | ||
|
|
425fd65bd8 | ||
|
|
7d83362a85 | ||
|
|
0b26894112 | ||
|
|
17f810a720 | ||
|
|
71ef8061f9 | ||
|
|
353b17690f | ||
|
|
6790727260 | ||
|
|
e129f7db85 | ||
|
|
ea942398e3 | ||
|
|
5ad72cae3f | ||
|
|
5f945bc696 | ||
|
|
6f451736ba | ||
|
|
30856f4a4f | ||
|
|
413c71eb0a | ||
|
|
9d1408be20 | ||
|
|
f21f371751 | ||
|
|
2b761279e4 | ||
|
|
d5e8f54214 | ||
|
|
83f83d4eee | ||
|
|
b0f4ab9ba5 | ||
|
|
06dad8f79c | ||
|
|
83ab122ddf | ||
|
|
8a42fe4ae1 | ||
|
|
94c6778b89 | ||
|
|
c0e5973517 | ||
|
|
1e7bbfa7c1 | ||
|
|
dc7245ff6e | ||
|
|
ffaf7b40e9 | ||
|
|
4de3fb1f2a | ||
|
|
99355d993a | ||
|
|
d25f6e813c | ||
|
|
043f8e0523 | ||
|
|
5fcf2a2623 | ||
|
|
ee77fccffd | ||
|
|
f1422adf75 | ||
|
|
189da08885 | ||
|
|
c2b1742582 | ||
|
|
9e63ac6d5b | ||
|
|
4d7ab8b187 | ||
|
|
4de9818bee | ||
|
|
7a2e1fd221 | ||
|
|
d0ca800a23 | ||
|
|
35ffd56ea9 | ||
|
|
84b992d3a1 | ||
|
|
9e46364759 | ||
|
|
0f37c2b59c | ||
|
|
33852ea7e3 | ||
|
|
4fbed1cdac | ||
|
|
42c61ab457 | ||
|
|
8c6b9f9c68 | ||
|
|
abebecac4a | ||
|
|
87efe429da | ||
|
|
35128b0bd4 | ||
|
|
186cb2270f | ||
|
|
deda02f879 | ||
|
|
bcc2478ef7 | ||
|
|
8d54654482 | ||
|
|
08318107c1 | ||
|
|
a5e77c85a6 | ||
|
|
1e8d2aff75 | ||
|
|
bc0a0f9902 | ||
|
|
da82f975e4 | ||
|
|
48af120db8 | ||
|
|
8722eae766 | ||
|
|
53776936ca | ||
|
|
dca465b801 | ||
|
|
43cd115dc7 | ||
|
|
e7ba08e52c | ||
|
|
9df12e6ff2 | ||
|
|
b5c7fb747c | ||
|
|
a40a4afe80 | ||
|
|
739f595f13 | ||
|
|
e07e892969 | ||
|
|
d4a6c58cc8 | ||
|
|
d644431a4e | ||
|
|
33bbb50b43 | ||
|
|
f89d7df305 | ||
|
|
3b02cd0e39 | ||
|
|
52cd50e0a8 | ||
|
|
996a970081 | ||
|
|
6c0b65acd4 | ||
|
|
f4df263dfe | ||
|
|
8c659acc82 | ||
|
|
7aba2429af | ||
|
|
ab48d2c2ff | ||
|
|
0b699d45bf | ||
|
|
54beafa262 | ||
|
|
531d4923eb | ||
|
|
b160a4d1dd | ||
|
|
ca54daf456 | ||
|
|
a22fc550b3 | ||
|
|
0650d93953 | ||
|
|
5633258fa7 | ||
|
|
12278cda58 | ||
|
|
84d1f08fda | ||
|
|
c184292a57 | ||
|
|
4cdfcb9f9d | ||
|
|
343a78917c | ||
|
|
ff7d0fdb9d | ||
|
|
db26b46be0 | ||
|
|
d77a70c360 | ||
|
|
42f4ae65d1 | ||
|
|
88daac31d2 | ||
|
|
ac04c173a8 | ||
|
|
8401494fbc | ||
|
|
97af118cb9 | ||
|
|
091e6026bc | ||
|
|
c798ede7bf | ||
|
|
225851f067 | ||
|
|
9dd65ecf70 | ||
|
|
1a9cc4b6be | ||
|
|
a612f206bf | ||
|
|
e51031c62a | ||
|
|
e30c29ef50 | ||
|
|
91ddcadbcd | ||
|
|
8c145860e5 | ||
|
|
a19dd7687e | ||
|
|
550d6ca083 | ||
|
|
b425411357 | ||
|
|
a1f0cf749d | ||
|
|
22e0d1c74e | ||
|
|
cdc07047aa | ||
|
|
c832c2da28 | ||
|
|
8daa713639 | ||
|
|
e0a2966706 | ||
|
|
354bfa14f9 | ||
|
|
46b91702ba | ||
|
|
de9516e368 | ||
|
|
3924e07e5c | ||
|
|
76bcbb5a7e | ||
|
|
8022381d1c | ||
|
|
feb1233081 | ||
|
|
36eefd0836 | ||
|
|
0e31e59759 | ||
|
|
4a4c1e75da | ||
|
|
b0bfd2292a | ||
|
|
7214b24357 | ||
|
|
24637f496f | ||
|
|
d8ecde5265 | ||
|
|
28840c6209 | ||
|
|
1696213406 | ||
|
|
6f315ac765 | ||
|
|
a485307d92 | ||
|
|
3d3b861ba0 | ||
|
|
4b33ed25d5 | ||
|
|
e264880c7b | ||
|
|
ef8212701f | ||
|
|
492157a502 | ||
|
|
2605bc182e | ||
|
|
fe8dfdd804 | ||
|
|
bd917bc990 | ||
|
|
c5c32f683f | ||
|
|
5506e58c98 | ||
|
|
5af2d49b18 | ||
|
|
0fd35a4925 | ||
|
|
7ed20b1244 | ||
|
|
efa6a78255 | ||
|
|
8b58df3b34 | ||
|
|
0d2a090e1f | ||
|
|
7860d635a9 | ||
|
|
ba91c9fa9b | ||
|
|
b3630e0d5e | ||
|
|
f752285912 | ||
|
|
5a150d9b0e | ||
|
|
f0aa185832 | ||
|
|
9592f058d4 | ||
|
|
f630794e22 | ||
|
|
93636e89c5 | ||
|
|
585002c25c | ||
|
|
412ccc1be1 | ||
|
|
8b1306a36c | ||
|
|
81026e8dca | ||
|
|
dd440c8f9f | ||
|
|
76f3e4b27e | ||
|
|
5f5d3fdb66 | ||
|
|
853c92b87d | ||
|
|
00080f2abc | ||
|
|
55414208a3 | ||
|
|
5091499563 | ||
|
|
944b54d920 | ||
|
|
d023b2b2ff | ||
|
|
b45f9f514b | ||
|
|
239a9383e0 | ||
|
|
2190c0229c | ||
|
|
01ef14dc92 | ||
|
|
7b0784843c | ||
|
|
6fc805369e | ||
|
|
9e29939cd3 | ||
|
|
d750abca22 | ||
|
|
31df2fa131 | ||
|
|
6355a29a7a | ||
|
|
86a2b38340 | ||
|
|
9cb2b58557 | ||
|
|
2b0e2e8d0d | ||
|
|
cf46767196 | ||
|
|
ffc1034b5a | ||
|
|
46bb19de9b | ||
|
|
70bc7a6d01 | ||
|
|
3164505273 | ||
|
|
3d84fcd037 | ||
|
|
578fa32243 | ||
|
|
fc00b7d1cc | ||
|
|
d7351bd3e5 | ||
|
|
e7224c8f05 | ||
|
|
b97622f45b | ||
|
|
0e15f3b703 | ||
|
|
6604c0da89 | ||
|
|
e0b8be20b3 | ||
|
|
46965d8c96 | ||
|
|
66e92f00ee | ||
|
|
4a137b4e8e | ||
|
|
9d5ff28098 | ||
|
|
313b114da5 | ||
|
|
1b6bfc6338 | ||
|
|
49fd89f34a | ||
|
|
a2e862886e | ||
|
|
62f6c7c5a9 | ||
|
|
2294ed1ce1 | ||
|
|
c8a1c6a318 | ||
|
|
600ed66d5b | ||
|
|
512b17555c | ||
|
|
dc7849c9e8 | ||
|
|
6a99a51b91 | ||
|
|
8c7fa022a0 | ||
|
|
cca694a580 | ||
|
|
3a7f95b9b1 | ||
|
|
3a84dc3962 | ||
|
|
5961a96a4c | ||
|
|
a22382505f | ||
|
|
5faef75415 | ||
|
|
fed60907dc | ||
|
|
ce7e360b70 | ||
|
|
0b3def38b8 | ||
|
|
25a15dea8c | ||
|
|
e204971a6c | ||
|
|
d5b3a118bc | ||
|
|
3396cb2887 | ||
|
|
3c5beea218 | ||
|
|
e544384dd5 | ||
|
|
0e90f460f4 | ||
|
|
921efc4d2b | ||
|
|
1b3b4a5906 | ||
|
|
5c8b374352 | ||
|
|
e05cef6886 | ||
|
|
cb39ecacf9 | ||
|
|
e6816f94eb | ||
|
|
8b5dbeab44 | ||
|
|
29c21c3611 | ||
|
|
e05bed8d65 | ||
|
|
1b2210aba0 | ||
|
|
7fb1b1d57b | ||
|
|
a3adf71a1d | ||
|
|
51d81fab5d | ||
|
|
2ed5dc153a | ||
|
|
5f8f156bee | ||
|
|
eb03c90d7a | ||
|
|
fc05471086 | ||
|
|
b9db2dd89f | ||
|
|
de7fe21a4f | ||
|
|
56f9a7c4f9 | ||
|
|
df569a5ae2 | ||
|
|
acb9d04c51 | ||
|
|
09c4708a22 | ||
|
|
b346dfe0a3 | ||
|
|
5f259cb88c | ||
|
|
fb2aff3310 | ||
|
|
3c4c65c28c | ||
|
|
15885e3e8c | ||
|
|
5508c60e85 | ||
|
|
ffc341e4b9 | ||
|
|
41a68f7b25 | ||
|
|
041d3c5312 | ||
|
|
8e8b462bc8 | ||
|
|
efbc76e06f | ||
|
|
67a44d2adc | ||
|
|
92a35b929a | ||
|
|
e5744dd63f | ||
|
|
f2f3d050bd | ||
|
|
0cbecbe3a0 | ||
|
|
51a8b5a058 | ||
|
|
aaf716e54b | ||
|
|
209fcc7946 | ||
|
|
a5cb131806 | ||
|
|
8fbeb5f5d5 | ||
|
|
a92bdfe30d | ||
|
|
7f130ff036 | ||
|
|
b704f72854 | ||
|
|
e21f8a97ac | ||
|
|
a8ff403809 | ||
|
|
22097c0a25 | ||
|
|
92e7d9cf80 | ||
|
|
54d921f275 | ||
|
|
08d7b3dbce | ||
|
|
6949b3c229 | ||
|
|
b0ca38bd29 | ||
|
|
cf173ee9e7 | ||
|
|
aa9908b34a | ||
|
|
640674ef72 | ||
|
|
0c69ab80bb | ||
|
|
662557c2f3 | ||
|
|
346b2c31d2 | ||
|
|
62dbf99557 | ||
|
|
99b140adaa | ||
|
|
387d577d4f | ||
|
|
ab7eee7db9 | ||
|
|
487a45f01b | ||
|
|
60665c6bd8 | ||
|
|
8fc9a3d6d1 | ||
|
|
05a05f7e88 | ||
|
|
0c5bdfd7b7 | ||
|
|
626e93c7e3 | ||
|
|
b588c4c900 | ||
|
|
c52a0d88df | ||
|
|
84838d19d9 | ||
|
|
faf335a181 | ||
|
|
5c25351884 | ||
|
|
520f3cb09a | ||
|
|
e234246618 | ||
|
|
5d1d378f61 | ||
|
|
cef3bb7424 | ||
|
|
ccb03f2763 | ||
|
|
1f6168366b | ||
|
|
cd5bf85245 | ||
|
|
f2778e5d28 | ||
|
|
c2e6065ed7 | ||
|
|
fccba5f7fd | ||
|
|
3f230c5a05 | ||
|
|
dc849c3891 | ||
|
|
2770b58a20 | ||
|
|
37519acfb8 | ||
|
|
ad8c7b3cd2 | ||
|
|
04db8d3208 | ||
|
|
666e3b1e30 | ||
|
|
dc68781c06 | ||
|
|
a7f50d147e | ||
|
|
7e639db5de | ||
|
|
19a97a1706 | ||
|
|
cd66a9ef61 | ||
|
|
f121430a5d | ||
|
|
2f518dacfc | ||
|
|
fded8b6cd3 | ||
|
|
3b6c64dc9d | ||
|
|
d742985640 | ||
|
|
a13dd58989 | ||
|
|
622b3210ae | ||
|
|
90c97ed6aa | ||
|
|
53a4dfbf88 | ||
|
|
f488c57363 | ||
|
|
0ce830ca9d | ||
|
|
30ae418c2c | ||
|
|
3b976d211f | ||
|
|
cca49b5dc2 | ||
|
|
8c2b2070c6 | ||
|
|
24b8ff26db | ||
|
|
f0d93538ae | ||
|
|
02eab65c4e | ||
|
|
d941aa7df3 | ||
|
|
b5026a45f6 | ||
|
|
79fde3ebc9 | ||
|
|
031d648585 | ||
|
|
762b2fe7d6 | ||
|
|
5db377923e | ||
|
|
c3177df739 | ||
|
|
0dc36765f1 | ||
|
|
38e61ebd8d | ||
|
|
529b9739b5 | ||
|
|
a014ca7d8a | ||
|
|
83701f7d0d | ||
|
|
b2500939f3 | ||
|
|
1852b9dbb2 | ||
|
|
069474fc71 | ||
|
|
e7f518264a | ||
|
|
b0b096c3f5 | ||
|
|
bfa9d04d42 | ||
|
|
7dbe58469a | ||
|
|
41b36dabc2 | ||
|
|
4a685557d9 | ||
|
|
e7ef4b6906 | ||
|
|
67502fb9d3 | ||
|
|
960283bdcf | ||
|
|
39f30eab7a | ||
|
|
24b4741aaf | ||
|
|
64f373fb43 | ||
|
|
bc1830d8eb | ||
|
|
f2a2a91682 | ||
|
|
274ac339ad | ||
|
|
1d916286ee | ||
|
|
832d54300a | ||
|
|
ba6d3bbe15 | ||
|
|
c1d0789ac7 | ||
|
|
0573d0083e | ||
|
|
e57d62b682 | ||
|
|
bb6f27b322 | ||
|
|
86f424ad37 | ||
|
|
ad81bbc761 | ||
|
|
07e868e6f6 | ||
|
|
b45700df03 | ||
|
|
93ce2a8e3a | ||
|
|
fbc4d46962 | ||
|
|
d73d138b3f | ||
|
|
14ea21d53d | ||
|
|
2b3791b83e | ||
|
|
e04809f96b | ||
|
|
9203acff9c | ||
|
|
afc1a9f077 | ||
|
|
fc57851113 | ||
|
|
1f1d3f843f | ||
|
|
b389260dec | ||
|
|
1f37a5ff8f | ||
|
|
815d60eca2 | ||
|
|
877a584a26 | ||
|
|
0c60c9ff75 | ||
|
|
6a7a868b71 | ||
|
|
b5a070b228 | ||
|
|
39f13c6e5b | ||
|
|
8c98da09f0 | ||
|
|
ed70a7200c | ||
|
|
ea4410cd16 | ||
|
|
9d9b1cbcd5 | ||
|
|
f17d8f38fb | ||
|
|
0efc9d1cd2 | ||
|
|
4f5d5f1afd | ||
|
|
c4d8d7abf4 | ||
|
|
3fad29a709 | ||
|
|
665f204c1f | ||
|
|
362b651823 | ||
|
|
49b56588b8 | ||
|
|
c7a763ffdc | ||
|
|
5435ddad9f | ||
|
|
0ecd466c4c | ||
|
|
6117e25b97 | ||
|
|
ee10f09bc6 | ||
|
|
58512e302f | ||
|
|
ce564c209b | ||
|
|
7296d49693 | ||
|
|
290fdc4c0f | ||
|
|
1fa5ae695d | ||
|
|
b2ba6d38b8 | ||
|
|
620f1b94bc | ||
|
|
c8fb5d1a9a | ||
|
|
1f763aeb72 | ||
|
|
fcfb019555 | ||
|
|
d4c1acb126 | ||
|
|
862bf43685 | ||
|
|
f83139a9ee | ||
|
|
3d938b3edf | ||
|
|
6c6ae66e36 | ||
|
|
813e423bec | ||
|
|
a9a235fc87 | ||
|
|
6e1c6b4bed | ||
|
|
2214059a63 | ||
|
|
424cfcfa0c | ||
|
|
ce871dfa3e | ||
|
|
48a6eb1f86 | ||
|
|
fb85fb5b76 | ||
|
|
c39c3cfdae | ||
|
|
e2fa7c666a | ||
|
|
f49cc6fb1f | ||
|
|
69bef59473 | ||
|
|
9e931b9eb0 | ||
|
|
b26d9ea1e0 | ||
|
|
ca5607d79e | ||
|
|
903eaed250 | ||
|
|
0859eab2dc | ||
|
|
f0f84722ba | ||
|
|
17b8ba7069 | ||
|
|
eb91152cfa | ||
|
|
08c1b6879e | ||
|
|
0077a8f67c | ||
|
|
a89cc67bd2 | ||
|
|
388415ecc2 | ||
|
|
44c5ba208d | ||
|
|
d3a51857cb | ||
|
|
11a3db5d64 | ||
|
|
f3be8ae608 | ||
|
|
0577c9121c | ||
|
|
058f51e8c1 | ||
|
|
698444caec | ||
|
|
d575df4b19 | ||
|
|
bee98513a2 | ||
|
|
3746df49ee | ||
|
|
d98f67eab9 | ||
|
|
fde415e251 | ||
|
|
5702f39181 | ||
|
|
45658afd89 | ||
|
|
ece4a51b94 | ||
|
|
837d4918f2 | ||
|
|
57ae0f1676 | ||
|
|
48e644e007 | ||
|
|
49a04fa913 | ||
|
|
df943bcf75 | ||
|
|
fdbec176fa | ||
|
|
4b2b4e5482 | ||
|
|
96768d8529 | ||
|
|
2689a08026 | ||
|
|
54be70672e | ||
|
|
480fa50af5 | ||
|
|
b51731d15f | ||
|
|
c09650a136 | ||
|
|
769fcb20d8 | ||
|
|
537c4b3a50 | ||
|
|
a75c17ac5e | ||
|
|
85604e8afa | ||
|
|
9a45c9aa7c | ||
|
|
01450bacc2 | ||
|
|
af0255ee09 | ||
|
|
df25bbb6d2 | ||
|
|
444685bc05 | ||
|
|
115f63c330 | ||
|
|
f9dbc4f7bf | ||
|
|
83263f8dee | ||
|
|
a452ade957 | ||
|
|
1f48af024e | ||
|
|
0a643d7195 | ||
|
|
c3835b9da7 | ||
|
|
bb1a96cf7a | ||
|
|
56360301d7 | ||
|
|
435cccdeae | ||
|
|
b11b472933 | ||
|
|
514f8398e2 | ||
|
|
90935fef25 | ||
|
|
9b5ce1c3a6 | ||
|
|
8ad1639b02 | ||
|
|
6d70332cd6 | ||
|
|
717f6240e3 | ||
|
|
ab782054a1 | ||
|
|
b481f01217 | ||
|
|
d0f884f5b2 | ||
|
|
51e66354b0 | ||
|
|
1efe90f445 | ||
|
|
7b47b7549d | ||
|
|
7ccb77fb57 | ||
|
|
a32fa69823 | ||
|
|
ece4cb03ad | ||
|
|
5c53b6528f | ||
|
|
9b92007eff | ||
|
|
e433809f4d | ||
|
|
d74218004a | ||
|
|
c35276e3df | ||
|
|
064236ed5b | ||
|
|
2a50dcba9d | ||
|
|
f7974b324b | ||
|
|
3bb716b060 | ||
|
|
39e465261f | ||
|
|
784b78b17c | ||
|
|
22c68ed8ef | ||
|
|
c4c670a3b1 | ||
|
|
17a05cc1d4 | ||
|
|
d452c5fabb | ||
|
|
2cf974ef02 | ||
|
|
1f0e789575 | ||
|
|
92e1fd3f28 | ||
|
|
49736a87aa | ||
|
|
d009e39842 | ||
|
|
c2b8901537 | ||
|
|
dd910cb5ec | ||
|
|
4632753f02 | ||
|
|
3fdcd12b4f | ||
|
|
94d1d611c7 | ||
|
|
27b54a0bfa | ||
|
|
1b1ecd0748 | ||
|
|
1e29d2e751 | ||
|
|
a0437bf933 | ||
|
|
73e0292a4b | ||
|
|
c393e74160 | ||
|
|
80c4666198 | ||
|
|
ea7bd1f700 | ||
|
|
b3a55cc85d | ||
|
|
036200350d | ||
|
|
e630c484ff | ||
|
|
713e9658c5 | ||
|
|
782039810e | ||
|
|
b473d8ab9c | ||
|
|
2eb6918fb3 | ||
|
|
29626666a7 | ||
|
|
dc41c9a671 | ||
|
|
83af70bb59 | ||
|
|
b293873640 | ||
|
|
8bb92815cb | ||
|
|
7a8f7199c8 | ||
|
|
6f4ce34840 | ||
|
|
8853552161 | ||
|
|
95d3d17d83 | ||
|
|
3594280b04 | ||
|
|
1e447c6e3e | ||
|
|
c41a288280 | ||
|
|
a0492fe944 | ||
|
|
1fffebd497 | ||
|
|
62c05049a7 | ||
|
|
0fdf377d45 | ||
|
|
6ca8e2644a | ||
|
|
4a3061db6d | ||
|
|
61ac81518a | ||
|
|
acd4b4371d | ||
|
|
1d20291d44 | ||
|
|
c60245ea2b | ||
|
|
9e25480baa | ||
|
|
facd803943 | ||
|
|
05c8a6282d | ||
|
|
3f9ae34203 | ||
|
|
afb85309a2 | ||
|
|
2c5e64d0d5 | ||
|
|
ddb8c432be | ||
|
|
054be314f6 | ||
|
|
e484c5754e | ||
|
|
b36c4f65e5 | ||
|
|
4bdfbcc916 | ||
|
|
519198bb61 | ||
|
|
9a7c342f91 | ||
|
|
761a5ed3dd | ||
|
|
481e753ad4 | ||
|
|
271016f0fa | ||
|
|
4493f895c6 | ||
|
|
5c32ecd8e1 | ||
|
|
0b5244d321 | ||
|
|
b6f9715174 | ||
|
|
2db1f8d2b6 | ||
|
|
4c5957ae40 | ||
|
|
38cd19de15 | ||
|
|
f66ffbdd63 | ||
|
|
8d3f08e529 | ||
|
|
4209c1c406 | ||
|
|
1cd12d0a0c | ||
|
|
7eb18e1931 | ||
|
|
d308e50e1e | ||
|
|
88569cb369 | ||
|
|
235869fc79 | ||
|
|
89166e81fb | ||
|
|
2d109b81cf | ||
|
|
5c02b4dccb | ||
|
|
e7f154b58d | ||
|
|
c0752575c6 | ||
|
|
7eb2c89f39 | ||
|
|
bf4eb07342 | ||
|
|
c856d01b52 | ||
|
|
7a535b2576 | ||
|
|
4b17fd88a3 | ||
|
|
8d1f3f723f | ||
|
|
a543fbbec9 | ||
|
|
37f50db00e | ||
|
|
ccb87f43b7 | ||
|
|
d569f39f53 | ||
|
|
fb3951772f | ||
|
|
2c00bf4040 | ||
|
|
4f8772bd77 | ||
|
|
b4c3046ab5 | ||
|
|
2441b92bc6 | ||
|
|
c39eec32f2 | ||
|
|
7837718d04 | ||
|
|
7069671471 | ||
|
|
d1dbde2890 | ||
|
|
0472471ac9 | ||
|
|
62a0b8da90 | ||
|
|
810530fabd | ||
|
|
707de56612 | ||
|
|
921b0eb229 | ||
|
|
9afb92f0ed | ||
|
|
1ded88e089 | ||
|
|
1f074ff400 | ||
|
|
4764f61b48 | ||
|
|
56424924bb | ||
|
|
4c3831ec74 | ||
|
|
ba564a6aed | ||
|
|
d529e88242 | ||
|
|
5d6ad8bc6c | ||
|
|
01769a6f38 | ||
|
|
a43cc38739 | ||
|
|
085069c2c7 | ||
|
|
816a6d057a | ||
|
|
19ab099f98 | ||
|
|
99a1097953 | ||
|
|
673cf7018b | ||
|
|
624f52882e | ||
|
|
654ab4a289 | ||
|
|
ec86dc5734 | ||
|
|
44f37c12c3 | ||
|
|
d0b704d7f4 | ||
|
|
7452942091 | ||
|
|
ea4ea680a2 | ||
|
|
ac7ae3d8df | ||
|
|
4fa5f2ac72 | ||
|
|
8b1a44fe42 | ||
|
|
c4d26f9194 | ||
|
|
79b59f2aae | ||
|
|
1eecd13ea7 | ||
|
|
1ae3a63f5c | ||
|
|
561694a991 | ||
|
|
096af016ef | ||
|
|
498835015a | ||
|
|
cafa0d6578 | ||
|
|
5444253ed6 | ||
|
|
d0ae12a167 | ||
|
|
e323c7f810 | ||
|
|
dd3758af43 | ||
|
|
94ae6e76f1 | ||
|
|
ee969a5ed9 | ||
|
|
a547a219a4 | ||
|
|
d4eba634ea | ||
|
|
0927897451 | ||
|
|
63e5a2c5ba | ||
|
|
a7f3f1d806 | ||
|
|
97d3a2986c | ||
|
|
d3b20757ef | ||
|
|
d99804f14e | ||
|
|
1b8dc71980 | ||
|
|
dc466f1480 | ||
|
|
ba6a783834 | ||
|
|
b2edf5683c | ||
|
|
dacb91b9a8 | ||
|
|
7ac71a7b2a | ||
|
|
4b44bb5426 | ||
|
|
8cd68c7c16 | ||
|
|
0ae228d6f8 | ||
|
|
6a2ed23822 | ||
|
|
fe0035fe0e | ||
|
|
aeabaf8513 | ||
|
|
43907e07c2 | ||
|
|
2413dc9a41 | ||
|
|
2d92c9d240 | ||
|
|
a24813b678 | ||
|
|
dffb2887d6 | ||
|
|
68f1c1a54c | ||
|
|
726aa7b894 | ||
|
|
44c795cd4f | ||
|
|
4d5d46d08a | ||
|
|
52cdff14bd | ||
|
|
0372167f25 | ||
|
|
005e401c7f | ||
|
|
479212dd60 | ||
|
|
0af9239906 | ||
|
|
13d871ab21 | ||
|
|
136830ce22 | ||
|
|
afe8e343b1 | ||
|
|
f7c2cdff9b | ||
|
|
a7abe97ca0 | ||
|
|
5354137c76 | ||
|
|
433c2e5916 | ||
|
|
e5fdda60fc | ||
|
|
b227cf890b | ||
|
|
0a4c3102dd | ||
|
|
54094ebc21 | ||
|
|
bf3fe6404a | ||
|
|
58888ac389 | ||
|
|
07df8ecc02 | ||
|
|
c86becb169 | ||
|
|
98a43606ce | ||
|
|
5b2353e612 | ||
|
|
a3ad598004 | ||
|
|
c02d1d73b8 | ||
|
|
77c99e1d7c | ||
|
|
055d34818a | ||
|
|
3a06503b74 | ||
|
|
cae15a8d7a | ||
|
|
70554d1158 | ||
|
|
db42da14d1 | ||
|
|
9daa9b6cca | ||
|
|
30087b5e79 | ||
|
|
c214b70459 | ||
|
|
ad0b6c28ba | ||
|
|
8127dc2620 | ||
|
|
fed5d3efc0 | ||
|
|
ed3c806869 | ||
|
|
b8a32eb086 | ||
|
|
e537ce155e | ||
|
|
be506964b0 | ||
|
|
df39b490f5 | ||
|
|
e3256e4bb9 | ||
|
|
4d01d636cc | ||
|
|
1bf07036e8 | ||
|
|
da058fcaf5 | ||
|
|
7203c91c70 | ||
|
|
44893a2a2c | ||
|
|
bdf2ca6e1d | ||
|
|
621ac3b6ec | ||
|
|
9fbf8b58a1 | ||
|
|
70ac55f983 | ||
|
|
890bb4a2c3 | ||
|
|
888f2aed97 | ||
|
|
e6707c65a5 | ||
|
|
414912de67 | ||
|
|
723ba740e0 | ||
|
|
e2389b4992 | ||
|
|
39e587085f | ||
|
|
42e77c77a9 | ||
|
|
e8353089f3 | ||
|
|
d76799cfd0 | ||
|
|
4907c20ba6 | ||
|
|
fa16a960a5 | ||
|
|
aa66e5ab15 | ||
|
|
802e4fc238 | ||
|
|
2494fec2a7 | ||
|
|
13db27854b | ||
|
|
190e17e6b9 | ||
|
|
02acd3162e | ||
|
|
47fa6e67d9 | ||
|
|
aba7b47fa8 | ||
|
|
c1ae4f8cc2 | ||
|
|
4f43793e17 | ||
|
|
8fd32ebd5e | ||
|
|
0b2059462b | ||
|
|
b0df41213a | ||
|
|
f421f30122 | ||
|
|
358ac46393 | ||
|
|
ebd98b9094 | ||
|
|
c713bb0353 | ||
|
|
d61a7b090d | ||
|
|
9f1a894b86 | ||
|
|
cc86d73719 | ||
|
|
86b1802d16 | ||
|
|
3cdf66a0a4 | ||
|
|
faeee200d3 | ||
|
|
4b34734919 | ||
|
|
765c1b8875 | ||
|
|
f89cefd9ae | ||
|
|
23dfa8645c | ||
|
|
9556e7bf51 | ||
|
|
7fa306dd9a | ||
|
|
d9a322b533 | ||
|
|
fde1b21d1f | ||
|
|
124249a35d | ||
|
|
6d6e9a6df7 | ||
|
|
77ae9dfbef | ||
|
|
4ed642ed5d | ||
|
|
556be02696 | ||
|
|
572418a2f5 | ||
|
|
fce742910e | ||
|
|
370b0cb049 | ||
|
|
76333b8647 | ||
|
|
ce7d12e850 | ||
|
|
63d92c62a5 | ||
|
|
7e2720e673 | ||
|
|
a7c0ea3602 | ||
|
|
374829ecd5 | ||
|
|
6a67c04ca2 | ||
|
|
f4ce087649 | ||
|
|
4566ce7de8 | ||
|
|
a898e61a7a | ||
|
|
6d524bdc99 | ||
|
|
1ba21f7f71 | ||
|
|
7f710e0782 | ||
|
|
acb78c18bf | ||
|
|
5b1f632035 | ||
|
|
8aca739f54 | ||
|
|
efdfcee7fc | ||
|
|
1b4cb1379a | ||
|
|
820d7f18c4 | ||
|
|
7a5a5e0211 | ||
|
|
07dda233ec | ||
|
|
02209fc039 | ||
|
|
a372882c18 | ||
|
|
91c05598b2 | ||
|
|
0130ab6356 | ||
|
|
581a3f8388 | ||
|
|
2587ad21c0 | ||
|
|
8b56349daa | ||
|
|
25f2eb69b9 | ||
|
|
46b4761f1a | ||
|
|
4d3f96f979 | ||
|
|
084371a1e3 | ||
|
|
f5aaaf1c63 | ||
|
|
316bab6fff | ||
|
|
65a5d38fc6 | ||
|
|
aa927e9168 | ||
|
|
7b0a120e66 | ||
|
|
143ce58cb3 | ||
|
|
9244b44ce6 | ||
|
|
4720312b26 | ||
|
|
d43610701b | ||
|
|
243b0b2c21 | ||
|
|
930894ced5 | ||
|
|
63ce7850e1 | ||
|
|
984a38ce91 | ||
|
|
12ce2275e0 | ||
|
|
214b88ea1c | ||
|
|
980e00e824 | ||
|
|
80276d5e4d | ||
|
|
28c2db9edc | ||
|
|
5e66f70cf0 | ||
|
|
4e41187bf3 | ||
|
|
0a09760aec | ||
|
|
500ad62470 | ||
|
|
1204d98e8d | ||
|
|
ae3596ac99 | ||
|
|
d662c693f1 | ||
|
|
1820cd0ae8 | ||
|
|
c5e144d211 | ||
|
|
e1c041a250 | ||
|
|
82dff86802 | ||
|
|
b2019d7633 | ||
|
|
f6afea0004 | ||
|
|
309eb502cd | ||
|
|
712252eb6b | ||
|
|
c3baf36eb5 | ||
|
|
771fd77088 | ||
|
|
7d3ac21e42 | ||
|
|
cc3a72f4fd | ||
|
|
d87fa374ec | ||
|
|
154a576388 | ||
|
|
29fe71b82c | ||
|
|
e960fd31fa | ||
|
|
132fb87c2c | ||
|
|
6e281e0b66 | ||
|
|
a86ff9dfd1 | ||
|
|
3ea33f1dd6 | ||
|
|
b645fd495f | ||
|
|
5e635224e2 | ||
|
|
54d8becd74 | ||
|
|
87243537e7 | ||
|
|
0604361d4e | ||
|
|
399c052129 | ||
|
|
362c7e9c04 | ||
|
|
c4843253c0 | ||
|
|
928d8dbb15 | ||
|
|
7f528d8d4a | ||
|
|
8ddf4a0e72 | ||
|
|
5c589136e5 | ||
|
|
00f10771d9 | ||
|
|
583a5cda61 | ||
|
|
f9e5ebccfd | ||
|
|
77eebb6c1b | ||
|
|
53d01e5fe1 | ||
|
|
3d08e8db06 | ||
|
|
50a3ce2036 | ||
|
|
a127948c4c | ||
|
|
36b406f7ec | ||
|
|
3c50e9f784 | ||
|
|
51e2af148e | ||
|
|
d7351f97fe | ||
|
|
51c3ebcdb8 | ||
|
|
f29e4a5d36 | ||
|
|
1fa59270c1 | ||
|
|
d5c1fda958 | ||
|
|
b0e34fd062 | ||
|
|
8fbf8df0bd |
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*.html linguist-documentation
|
||||
(^|/)site/) linguist-documentation
|
||||
18
.github/issue_template.md
vendored
Normal file
18
.github/issue_template.md
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
### Reporting Bugs/Errors
|
||||
When reporting errors, 99% of the time log file output is required. Please post the log file as a [gist](https://gist.github.com/) and provide a link in the new issue.
|
||||
|
||||
### Reporting False Positives
|
||||
When reporting a false positive please include:
|
||||
- The location of the dependency (Maven GAV, URL to download the dependency, etc.)
|
||||
- The CPE that is believed to be false positive
|
||||
- Please report the CPE not the CVE
|
||||
|
||||
#### Example
|
||||
False positive on library foo.jar - reported as cpe:/a:apache:tomcat:7.0
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>org.sample</groupId>
|
||||
<artifactId>foo</artifactId>
|
||||
<version>1.0</version>
|
||||
</dependency>
|
||||
```
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,4 +1,6 @@
|
||||
*/target/**
|
||||
# IntelliJ test run side-effects
|
||||
dependency-check-core/data/
|
||||
# Intellij project files
|
||||
*.iml
|
||||
*.ipr
|
||||
@@ -24,3 +26,5 @@ _site/**
|
||||
.LCKpom.xml~
|
||||
#coverity
|
||||
/cov-int/
|
||||
/dependency-check-core/nbproject/
|
||||
cov-scan.bat
|
||||
3
.travis.yml
Normal file
3
.travis.yml
Normal file
@@ -0,0 +1,3 @@
|
||||
language: java
|
||||
jdk: oraclejdk7
|
||||
script: mvn install -DreleaseTesting
|
||||
14
Dockerfile
Normal file
14
Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM java:8
|
||||
|
||||
MAINTAINER Timo Pagel <dependencycheckmaintainer@timo-pagel.de>
|
||||
|
||||
RUN wget -O /tmp/current.txt http://jeremylong.github.io/DependencyCheck/current.txt && current=$(cat /tmp/current.txt) && wget https://dl.bintray.com/jeremy-long/owasp/dependency-check-$current-release.zip && unzip dependency-check-$current-release.zip && mv dependency-check /usr/share/
|
||||
|
||||
RUN useradd -ms /bin/bash dockeruser && chown -R dockeruser:dockeruser /usr/share/dependency-check && mkdir /report && chown -R dockeruser:dockeruser /report
|
||||
USER dockeruser
|
||||
|
||||
VOLUME "/src /usr/share/dependency-check/data /report"
|
||||
|
||||
WORKDIR /report
|
||||
|
||||
ENTRYPOINT ["/usr/share/dependency-check/bin/dependency-check.sh", "--scan", "/src"]
|
||||
73
README.md
73
README.md
@@ -1,3 +1,5 @@
|
||||
[](https://travis-ci.org/jeremylong/DependencyCheck) [](https://www.apache.org/licenses/LICENSE-2.0.txt) [](https://scan.coverity.com/projects/dependencycheck)
|
||||
|
||||
Dependency-Check
|
||||
================
|
||||
|
||||
@@ -9,27 +11,35 @@ Current Releases
|
||||
-------------
|
||||
### Jenkins Plugin
|
||||
|
||||
For instructions on the use of the Jenkins plugin please see the [Jenkins dependency-check page](http://wiki.jenkins-ci.org/x/CwDgAQ).
|
||||
For instructions on the use of the Jenkins plugin please see the [OWASP Dependency-Check Plugin page](https://wiki.jenkins-ci.org/display/JENKINS/OWASP+Dependency-Check+Plugin).
|
||||
|
||||
### Command Line
|
||||
|
||||
More detailed instructions can be found on the [dependency-check github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-cli/installation.html).
|
||||
The latest CLI can be downloaded from bintray's [dependency-check page](https://bintray.com/jeremy-long/owasp/dependency-check).
|
||||
More detailed instructions can be found on the
|
||||
[dependency-check github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-cli/).
|
||||
The latest CLI can be downloaded from bintray's
|
||||
[dependency-check page](https://bintray.com/jeremy-long/owasp/dependency-check).
|
||||
|
||||
On *nix
|
||||
```
|
||||
$ ./bin/dependency-check.sh -h
|
||||
$ ./bin/dependency-check.sh --app Testing --out . --scan [path to jar files to be scanned]
|
||||
$ ./bin/dependency-check.sh --project Testing --out . --scan [path to jar files to be scanned]
|
||||
```
|
||||
On Windows
|
||||
```
|
||||
> bin/dependency-check.bat -h
|
||||
> bin/dependency-check.bat --app Testing --out . --scan [path to jar files to be scanned]
|
||||
> bin/dependency-check.bat --project Testing --out . --scan [path to jar files to be scanned]
|
||||
```
|
||||
On Mac with [Homebrew](http://brew.sh)
|
||||
```
|
||||
$ brew update && brew install dependency-check
|
||||
$ dependency-check -h
|
||||
$ dependency-check --project Testing --out . --scan [path to jar files to be scanned]
|
||||
```
|
||||
|
||||
### Maven Plugin
|
||||
|
||||
More detailed instructions can be found on the [dependency-check-maven github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-maven/usage.html).
|
||||
More detailed instructions can be found on the [dependency-check-maven github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-maven).
|
||||
The plugin can be configured using the following:
|
||||
|
||||
```xml
|
||||
@@ -58,31 +68,66 @@ The plugin can be configured using the following:
|
||||
|
||||
### Ant Task
|
||||
|
||||
For instructions on the use of the Ant Task, please see the [dependency-check-ant github page](http://jeremylong.github.io/DependencyCheck/dependency-check-ant/installation.html).
|
||||
For instructions on the use of the Ant Task, please see the [dependency-check-ant github page](http://jeremylong.github.io/DependencyCheck/dependency-check-ant).
|
||||
|
||||
Development Usage
|
||||
-------------
|
||||
The following instructions outline how to compile and use the current snapshot. While every intention is to maintain a stable snapshot it is recommended
|
||||
that the release versions listed above be used.
|
||||
|
||||
Note, currently the install goal may take a long time to execute the integration tests. However, if this takes more then 30 minutes it is likely that the
|
||||
download of data from the NVD is having an issue. This issue is still being researched and a solution should be published soon.
|
||||
The repository has some large files due to test resources. The team has tried to cleanup the history as much as possible.
|
||||
However, it is recommended that you perform a shallow clone to save yourself time:
|
||||
|
||||
```bash
|
||||
git clone --depth 1 git@github.com:jeremylong/DependencyCheck.git
|
||||
```
|
||||
|
||||
On *nix
|
||||
```
|
||||
$ mvn install
|
||||
$ ./dependency-check-cli/target/release/bin/dependency-check.sh -h
|
||||
$ ./dependency-check-cli/target/release/bin/dependency-check.sh --app Testing --out . --scan ./src/test/resources
|
||||
$ ./dependency-check-cli/target/release/bin/dependency-check.sh --project Testing --out . --scan ./src/test/resources
|
||||
```
|
||||
On Windows
|
||||
```
|
||||
> mvn install
|
||||
> dependency-check-cli/target/release/bin/dependency-check.bat -h
|
||||
> dependency-check-cli/target/release/bin/dependency-check.bat --app Testing --out . --scan ./src/test/resources
|
||||
> dependency-check-cli/target/release/bin/dependency-check.bat --project Testing --out . --scan ./src/test/resources
|
||||
```
|
||||
|
||||
Then load the resulting 'DependencyCheck-Report.html' into your favorite browser.
|
||||
|
||||
### Docker
|
||||
|
||||
In the following example it is assumed that the source to be checked is in the actual directory. A persistent data directory and a persistent report directory is used so that the container can be destroyed after running it to make sure that you use the newst version, always.
|
||||
```
|
||||
# After the first run, feel free to change the owner of the directories to the owner of the creted files and the permissions to 744
|
||||
DATA_DIRECTORY=$HOME/OWASP-Dependency-Check/data
|
||||
REPORT_DIRECTORY=/$HOME/OWASP-Dependency-Check/reports
|
||||
|
||||
if [ ! -d $DATA_DIRECTORY ]; then
|
||||
echo "Initially creating persistent directories"
|
||||
mkdir -p $DATA_DIRECTORY
|
||||
chmod -R 777 $DATA_DIRECTORY
|
||||
|
||||
mkdir -p $REPORT_DIRECTORY
|
||||
chmod -R 777 $REPORT_DIRECTORY
|
||||
fi
|
||||
|
||||
docker pull owasp/dependency-check # Make sure it is the actual version
|
||||
|
||||
docker run --rm \
|
||||
--volume $(pwd):/src \
|
||||
--volume $DATA_DIRECTORY:/usr/share/dependency-check/data \
|
||||
--volume $REPORT_DIRECTORY:/report \
|
||||
--name dependency-check \
|
||||
dc \
|
||||
--suppression "/src/security/dependency-check-suppression.xml"\
|
||||
--format "ALL" \
|
||||
--project "My OWASP Dependency Check Projekt" \
|
||||
```
|
||||
|
||||
|
||||
Mailing List
|
||||
------------
|
||||
|
||||
@@ -95,9 +140,9 @@ Archive: [google group](https://groups.google.com/forum/#!forum/dependency-check
|
||||
Copyright & License
|
||||
-
|
||||
|
||||
Dependency-Check is Copyright (c) 2012-2014 Jeremy Long. All Rights Reserved.
|
||||
Dependency-Check is Copyright (c) 2012-2016 Jeremy Long. All Rights Reserved.
|
||||
|
||||
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://github.com/jeremylong/DependencyCheck/dependency-check-cli/blob/master/LICENSE.txt) file for the full license.
|
||||
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/LICENSE.txt) file for the full license.
|
||||
|
||||
Dependency-Check makes use of several other open source libraries. Please see the [NOTICE.txt] [notices] file for more information.
|
||||
|
||||
@@ -105,4 +150,4 @@ Dependency-Check makes use of several other open source libraries. Please see th
|
||||
[wiki]: https://github.com/jeremylong/DependencyCheck/wiki
|
||||
[subscribe]: mailto:dependency-check+subscribe@googlegroups.com
|
||||
[post]: mailto:dependency-check@googlegroups.com
|
||||
[notices]: https://github.com/jeremylong/DependencyCheck/blob/master/NOTICES.txt
|
||||
[notices]: https://github.com/jeremylong/DependencyCheck/blob/master/NOTICE.txt
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
-----------------------------
|
||||
---begin dependency-check----
|
||||
-----------------------------
|
||||
dependency-check
|
||||
OWASP dependency-check
|
||||
|
||||
Copyright (c) 2012-2013 Jeremy Long. All Rights Reserved.
|
||||
Copyright (c) 2012-2015 Jeremy Long. All Rights Reserved.
|
||||
|
||||
The licenses for the software listed below can be found in the META-INF/licenses/[dependency name].
|
||||
|
||||
@@ -19,11 +16,3 @@ An original copy of the license agreement can be found at: http://www.h2database
|
||||
This product includes data from the Common Weakness Enumeration (CWE): http://cwe.mitre.org/
|
||||
|
||||
This product downloads and utilizes data from the National Vulnerability Database hosted by NIST: http://nvd.nist.gov/download.cfm
|
||||
|
||||
-----------------------------
|
||||
---end dependency-check------
|
||||
-----------------------------
|
||||
|
||||
Notices below are from dependent libraries and have been included via maven-shade-plugin.
|
||||
|
||||
-----------------------------
|
||||
@@ -6,7 +6,7 @@ performed are a "best effort" and as such, there could be false positives as wel
|
||||
vulnerabilities in 3rd party components is a well-known problem and is currently documented in the 2013 OWASP
|
||||
Top 10 as [A9 - Using Components with Known Vulnerabilities](https://www.owasp.org/index.php/Top_10_2013-A9-Using_Components_with_Known_Vulnerabilities).
|
||||
|
||||
Documentation and links to production binary releases can be found on the [github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-ant/installation.html).
|
||||
Documentation and links to production binary releases can be found on the [github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-ant/index.html).
|
||||
|
||||
Mailing List
|
||||
------------
|
||||
@@ -20,6 +20,6 @@ Copyright & License
|
||||
|
||||
Dependency-Check is Copyright (c) 2012-2014 Jeremy Long. All Rights Reserved.
|
||||
|
||||
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://github.com/jeremylong/DependencyCheck/dependency-check-cli/blob/master/LICENSE.txt) file for the full license.
|
||||
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/LICENSE.txt) file for the full license.
|
||||
|
||||
Dependency-Check-Ant makes use of other open source libraries. Please see the [NOTICE.txt](https://github.com/jeremylong/DependencyCheck/dependency-check-ant/blob/master/NOTICES.txt) file for more information.
|
||||
Dependency-Check-Ant makes use of other open source libraries. Please see the [NOTICE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/dependency-check-ant/NOTICE.txt) file for more information.
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.2.11</version>
|
||||
<version>1.4.5</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-ant</artifactId>
|
||||
@@ -190,38 +190,36 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
<addClasspath>true</addClasspath>
|
||||
<classpathPrefix>lib/</classpathPrefix>
|
||||
</manifest>
|
||||
</archive>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-shade-plugin</artifactId>
|
||||
<version>2.3</version>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<configuration>
|
||||
<transformers>
|
||||
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
|
||||
<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
|
||||
<resource>META-INF/NOTICE.txt</resource>
|
||||
</transformer>
|
||||
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
|
||||
<resource>META-INF/NOTICE</resource>
|
||||
</transformer>
|
||||
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
|
||||
<resource>META-INF/LICENSE</resource>
|
||||
</transformer>
|
||||
</transformers>
|
||||
<attach>false</attach> <!-- don't install/deploy this archive -->
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>create-distribution</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>shade</goal>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<descriptors>
|
||||
<descriptor>src/main/assembly/release.xml</descriptor>
|
||||
</descriptors>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
@@ -229,9 +227,6 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>cobertura-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<!--instrumentation>
|
||||
<ignoreTrivial>true</ignoreTrivial>
|
||||
</instrumentation-->
|
||||
<check>
|
||||
<branchRate>85</branchRate>
|
||||
<lineRate>85</lineRate>
|
||||
@@ -261,6 +256,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<argLine>-Dfile.encoding=UTF-8</argLine>
|
||||
<systemProperties>
|
||||
<property>
|
||||
<name>data.directory</name>
|
||||
@@ -273,100 +269,10 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
</build>
|
||||
<reporting>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-project-info-reports-plugin</artifactId>
|
||||
<version>2.7</version>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<reports>
|
||||
<report>summary</report>
|
||||
<report>license</report>
|
||||
<report>help</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>2.9.1</version>
|
||||
<configuration>
|
||||
<failOnError>false</failOnError>
|
||||
<bottom>Copyright© 2012-15 Jeremy Long. All Rights Reserved.</bottom>
|
||||
</configuration>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<id>default</id>
|
||||
<reports>
|
||||
<report>javadoc</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>versions-maven-plugin</artifactId>
|
||||
<version>2.1</version>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<reports>
|
||||
<report>dependency-updates-report</report>
|
||||
<report>plugin-updates-report</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jxr-plugin</artifactId>
|
||||
<version>2.4</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>cobertura-maven-plugin</artifactId>
|
||||
<version>2.6</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-report-plugin</artifactId>
|
||||
<version>2.16</version>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<reports>
|
||||
<report>report-only</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>taglist-maven-plugin</artifactId>
|
||||
<version>2.4</version>
|
||||
<configuration>
|
||||
<tagListOptions>
|
||||
<tagClasses>
|
||||
<tagClass>
|
||||
<displayName>Todo Work</displayName>
|
||||
<tags>
|
||||
<tag>
|
||||
<matchString>todo</matchString>
|
||||
<matchType>ignoreCase</matchType>
|
||||
</tag>
|
||||
<tag>
|
||||
<matchString>FIXME</matchString>
|
||||
<matchType>exact</matchType>
|
||||
</tag>
|
||||
</tags>
|
||||
</tagClass>
|
||||
</tagClasses>
|
||||
</tagListOptions>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>2.11</version>
|
||||
<version>${reporting.checkstyle-plugin.version}</version>
|
||||
<configuration>
|
||||
<enableRulesSummary>false</enableRulesSummary>
|
||||
<enableFilesSummary>false</enableFilesSummary>
|
||||
@@ -379,7 +285,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-pmd-plugin</artifactId>
|
||||
<version>3.0.1</version>
|
||||
<version>${reporting.pmd-plugin.version}</version>
|
||||
<configuration>
|
||||
<targetJdk>1.6</targetJdk>
|
||||
<linkXref>true</linkXref>
|
||||
@@ -395,11 +301,6 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
</rulesets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>findbugs-maven-plugin</artifactId>
|
||||
<version>2.5.3</version>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</reporting>
|
||||
<dependencies>
|
||||
@@ -423,12 +324,11 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
|
||||
<dependency>
|
||||
<groupId>org.apache.ant</groupId>
|
||||
<artifactId>ant</artifactId>
|
||||
<version>1.9.4</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.ant</groupId>
|
||||
<artifactId>ant-testutil</artifactId>
|
||||
<version>1.9.4</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
@@ -12,18 +12,25 @@
|
||||
<format>zip</format>
|
||||
</formats>
|
||||
<includeBaseDirectory>false</includeBaseDirectory>
|
||||
<fileSets>
|
||||
<!--fileSets>
|
||||
<fileSet>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<outputDirectory>dependency-check</outputDirectory>
|
||||
<directory>${project.build.directory}</directory>
|
||||
<includes>
|
||||
<include>dependency-check*.jar</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
</fileSets-->
|
||||
<files>
|
||||
<file>
|
||||
<source>${project.build.directory}/${project.artifactId}-${project.version}.jar</source>
|
||||
<outputDirectory>dependency-check-ant</outputDirectory>
|
||||
<destName>dependency-check-ant.jar</destName>
|
||||
</file>
|
||||
</files>
|
||||
<dependencySets>
|
||||
<dependencySet>
|
||||
<outputDirectory>/lib</outputDirectory>
|
||||
<outputDirectory>dependency-check-ant/lib</outputDirectory>
|
||||
<scope>runtime</scope>
|
||||
</dependencySet>
|
||||
</dependencySets>
|
||||
|
||||
@@ -0,0 +1,278 @@
|
||||
/*
|
||||
* This file is part of dependency-check-ant.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.ant.logging;
|
||||
|
||||
import org.apache.tools.ant.Project;
|
||||
import org.apache.tools.ant.Task;
|
||||
import org.slf4j.helpers.FormattingTuple;
|
||||
import org.slf4j.helpers.MarkerIgnoringBase;
|
||||
import org.slf4j.helpers.MessageFormatter;
|
||||
|
||||
/**
|
||||
* An instance of {@link org.slf4j.Logger} which simply calls the log method on
|
||||
* the delegate Ant task.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
public class AntLoggerAdapter extends MarkerIgnoringBase {
|
||||
|
||||
/**
|
||||
* serialization UID.
|
||||
*/
|
||||
private static final long serialVersionUID = -1337;
|
||||
/**
|
||||
* A reference to the Ant task used for logging.
|
||||
*/
|
||||
private transient Task task;
|
||||
|
||||
/**
|
||||
* Constructs an Ant Logger Adapter.
|
||||
*
|
||||
* @param task the Ant Task to use for logging
|
||||
*/
|
||||
public AntLoggerAdapter(Task task) {
|
||||
super();
|
||||
this.task = task;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the current Ant task to use for logging.
|
||||
*
|
||||
* @param task the Ant task to use for logging
|
||||
*/
|
||||
public void setTask(Task task) {
|
||||
this.task = task;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isTraceEnabled() {
|
||||
// Might be a more efficient way to do this, but Ant doesn't enable or disable
|
||||
// various levels globally - it just fires things at registered Listeners.
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trace(String msg) {
|
||||
if (task != null) {
|
||||
task.log(msg, Project.MSG_VERBOSE);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trace(String format, Object arg) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arg);
|
||||
task.log(tp.getMessage(), Project.MSG_VERBOSE);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trace(String format, Object arg1, Object arg2) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
|
||||
task.log(tp.getMessage(), Project.MSG_VERBOSE);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trace(String format, Object... arguments) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arguments);
|
||||
task.log(tp.getMessage(), Project.MSG_VERBOSE);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trace(String msg, Throwable t) {
|
||||
if (task != null) {
|
||||
task.log(msg, t, Project.MSG_VERBOSE);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDebugEnabled() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void debug(String msg) {
|
||||
if (task != null) {
|
||||
task.log(msg, Project.MSG_DEBUG);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void debug(String format, Object arg) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arg);
|
||||
task.log(tp.getMessage(), Project.MSG_DEBUG);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void debug(String format, Object arg1, Object arg2) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
|
||||
task.log(tp.getMessage(), Project.MSG_DEBUG);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void debug(String format, Object... arguments) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arguments);
|
||||
task.log(tp.getMessage(), Project.MSG_DEBUG);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void debug(String msg, Throwable t) {
|
||||
if (task != null) {
|
||||
task.log(msg, t, Project.MSG_DEBUG);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInfoEnabled() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String msg) {
|
||||
if (task != null) {
|
||||
task.log(msg, Project.MSG_INFO);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String format, Object arg) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arg);
|
||||
task.log(tp.getMessage(), Project.MSG_INFO);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String format, Object arg1, Object arg2) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
|
||||
task.log(tp.getMessage(), Project.MSG_INFO);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String format, Object... arguments) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arguments);
|
||||
task.log(tp.getMessage(), Project.MSG_INFO);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(String msg, Throwable t) {
|
||||
if (task != null) {
|
||||
task.log(msg, t, Project.MSG_INFO);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isWarnEnabled() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String msg) {
|
||||
if (task != null) {
|
||||
task.log(msg, Project.MSG_WARN);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String format, Object arg) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arg);
|
||||
task.log(tp.getMessage(), Project.MSG_WARN);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String format, Object... arguments) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arguments);
|
||||
task.log(tp.getMessage(), Project.MSG_WARN);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String format, Object arg1, Object arg2) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
|
||||
task.log(tp.getMessage(), Project.MSG_WARN);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void warn(String msg, Throwable t) {
|
||||
if (task != null) {
|
||||
task.log(msg, t, Project.MSG_WARN);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isErrorEnabled() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String msg) {
|
||||
if (task != null) {
|
||||
task.log(msg, Project.MSG_ERR);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String format, Object arg) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arg);
|
||||
task.log(tp.getMessage(), Project.MSG_ERR);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String format, Object arg1, Object arg2) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
|
||||
task.log(tp.getMessage(), Project.MSG_ERR);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String format, Object... arguments) {
|
||||
if (task != null) {
|
||||
final FormattingTuple tp = MessageFormatter.format(format, arguments);
|
||||
task.log(tp.getMessage(), Project.MSG_ERR);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void error(String msg, Throwable t) {
|
||||
if (task != null) {
|
||||
task.log(msg, t, Project.MSG_ERR);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
* This file is part of dependency-check-ant.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.ant.logging;
|
||||
|
||||
import org.apache.tools.ant.Task;
|
||||
import org.slf4j.ILoggerFactory;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
/**
|
||||
* An implementation of {@link org.slf4j.ILoggerFactory} which always returns {@link AntLoggerAdapter} instances.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
public class AntLoggerFactory implements ILoggerFactory {
|
||||
|
||||
/**
|
||||
* A reference to the Ant logger Adapter.
|
||||
*/
|
||||
private final AntLoggerAdapter antLoggerAdapter;
|
||||
|
||||
/**
|
||||
* Constructs a new Ant Logger Factory.
|
||||
*
|
||||
* @param task the Ant task to use for logging
|
||||
*/
|
||||
public AntLoggerFactory(Task task) {
|
||||
super();
|
||||
this.antLoggerAdapter = new AntLoggerAdapter(task);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Ant logger adapter.
|
||||
*
|
||||
* @param name ignored in this implementation
|
||||
* @return the Ant logger adapter
|
||||
*/
|
||||
@Override
|
||||
public Logger getLogger(String name) {
|
||||
return antLoggerAdapter;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
/**
|
||||
* This package includes the Ant task definitions.
|
||||
*/
|
||||
package org.owasp.dependencycheck.ant.logging;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,170 @@
|
||||
/*
|
||||
* This file is part of dependency-check-ant.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.taskdefs;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import org.apache.tools.ant.BuildException;
|
||||
import org.apache.tools.ant.Project;
|
||||
import org.apache.tools.ant.Task;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.impl.StaticLoggerBinder;
|
||||
|
||||
/**
|
||||
* An Ant task definition to execute dependency-check during an Ant build.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class Purge extends Task {
|
||||
|
||||
/**
|
||||
* The properties file location.
|
||||
*/
|
||||
private static final String PROPERTIES_FILE = "task.properties";
|
||||
|
||||
/**
|
||||
* Construct a new DependencyCheckTask.
|
||||
*/
|
||||
public Purge() {
|
||||
super();
|
||||
// Call this before Dependency Check Core starts logging anything - this way, all SLF4J messages from
|
||||
// core end up coming through this tasks logger
|
||||
StaticLoggerBinder.getSingleton().setTask(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* The location of the data directory that contains
|
||||
*/
|
||||
private String dataDirectory = null;
|
||||
|
||||
/**
|
||||
* Get the value of dataDirectory.
|
||||
*
|
||||
* @return the value of dataDirectory
|
||||
*/
|
||||
public String getDataDirectory() {
|
||||
return dataDirectory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of dataDirectory.
|
||||
*
|
||||
* @param dataDirectory new value of dataDirectory
|
||||
*/
|
||||
public void setDataDirectory(String dataDirectory) {
|
||||
this.dataDirectory = dataDirectory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates if dependency-check should fail the build if an exception
|
||||
* occurs.
|
||||
*/
|
||||
private boolean failOnError = true;
|
||||
|
||||
/**
|
||||
* Get the value of failOnError.
|
||||
*
|
||||
* @return the value of failOnError
|
||||
*/
|
||||
public boolean isFailOnError() {
|
||||
return failOnError;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of failOnError.
|
||||
*
|
||||
* @param failOnError new value of failOnError
|
||||
*/
|
||||
public void setFailOnError(boolean failOnError) {
|
||||
this.failOnError = failOnError;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() throws BuildException {
|
||||
populateSettings();
|
||||
File db;
|
||||
try {
|
||||
db = new File(Settings.getDataDirectory(), "dc.h2.db");
|
||||
if (db.exists()) {
|
||||
if (db.delete()) {
|
||||
log("Database file purged; local copy of the NVD has been removed", Project.MSG_INFO);
|
||||
} else {
|
||||
final String msg = String.format("Unable to delete '%s'; please delete the file manually", db.getAbsolutePath());
|
||||
if (this.failOnError) {
|
||||
throw new BuildException(msg);
|
||||
}
|
||||
log(msg, Project.MSG_ERR);
|
||||
}
|
||||
} else {
|
||||
final String msg = String.format("Unable to purge database; the database file does not exists: %s", db.getAbsolutePath());
|
||||
if (this.failOnError) {
|
||||
throw new BuildException(msg);
|
||||
}
|
||||
log(msg, Project.MSG_ERR);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
final String msg = "Unable to delete the database";
|
||||
if (this.failOnError) {
|
||||
throw new BuildException(msg);
|
||||
}
|
||||
log(msg, Project.MSG_ERR);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes the properties supplied and updates the dependency-check settings.
|
||||
* Additionally, this sets the system properties required to change the
|
||||
* proxy server, port, and connection timeout.
|
||||
*
|
||||
* @throws BuildException thrown if the properties file cannot be read.
|
||||
*/
|
||||
protected void populateSettings() throws BuildException {
|
||||
Settings.initialize();
|
||||
InputStream taskProperties = null;
|
||||
try {
|
||||
taskProperties = this.getClass().getClassLoader().getResourceAsStream(PROPERTIES_FILE);
|
||||
Settings.mergeProperties(taskProperties);
|
||||
} catch (IOException ex) {
|
||||
final String msg = "Unable to load the dependency-check ant task.properties file.";
|
||||
if (this.failOnError) {
|
||||
throw new BuildException(msg, ex);
|
||||
}
|
||||
log(msg, ex, Project.MSG_WARN);
|
||||
} finally {
|
||||
if (taskProperties != null) {
|
||||
try {
|
||||
taskProperties.close();
|
||||
} catch (IOException ex) {
|
||||
log("", ex, Project.MSG_DEBUG);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (dataDirectory != null) {
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
|
||||
} else {
|
||||
final File jarPath = new File(Purge.class.getProtectionDomain().getCodeSource().getLocation().getPath());
|
||||
final File base = jarPath.getParentFile();
|
||||
final String sub = Settings.getString(Settings.KEYS.DATA_DIRECTORY);
|
||||
final File dataDir = new File(base, sub);
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,453 @@
|
||||
/*
|
||||
* This file is part of dependency-check-ant.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.taskdefs;
|
||||
|
||||
import org.apache.tools.ant.BuildException;
|
||||
import org.apache.tools.ant.Project;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.impl.StaticLoggerBinder;
|
||||
|
||||
/**
|
||||
* An Ant task definition to execute dependency-check update. This will download
|
||||
* the latest data from the National Vulnerability Database (NVD) and store a
|
||||
* copy in the local database.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class Update extends Purge {
|
||||
|
||||
/**
|
||||
* Construct a new UpdateTask.
|
||||
*/
|
||||
public Update() {
|
||||
super();
|
||||
// Call this before Dependency Check Core starts logging anything - this way, all SLF4J messages from
|
||||
// core end up coming through this tasks logger
|
||||
StaticLoggerBinder.getSingleton().setTask(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* The Proxy Server.
|
||||
*/
|
||||
private String proxyServer;
|
||||
|
||||
/**
|
||||
* Get the value of proxyServer.
|
||||
*
|
||||
* @return the value of proxyServer
|
||||
*/
|
||||
public String getProxyServer() {
|
||||
return proxyServer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of proxyServer.
|
||||
*
|
||||
* @param server new value of proxyServer
|
||||
*/
|
||||
public void setProxyServer(String server) {
|
||||
this.proxyServer = server;
|
||||
}
|
||||
|
||||
/**
|
||||
* The Proxy Port.
|
||||
*/
|
||||
private String proxyPort;
|
||||
|
||||
/**
|
||||
* Get the value of proxyPort.
|
||||
*
|
||||
* @return the value of proxyPort
|
||||
*/
|
||||
public String getProxyPort() {
|
||||
return proxyPort;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of proxyPort.
|
||||
*
|
||||
* @param proxyPort new value of proxyPort
|
||||
*/
|
||||
public void setProxyPort(String proxyPort) {
|
||||
this.proxyPort = proxyPort;
|
||||
}
|
||||
/**
|
||||
* The Proxy username.
|
||||
*/
|
||||
private String proxyUsername;
|
||||
|
||||
/**
|
||||
* Get the value of proxyUsername.
|
||||
*
|
||||
* @return the value of proxyUsername
|
||||
*/
|
||||
public String getProxyUsername() {
|
||||
return proxyUsername;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of proxyUsername.
|
||||
*
|
||||
* @param proxyUsername new value of proxyUsername
|
||||
*/
|
||||
public void setProxyUsername(String proxyUsername) {
|
||||
this.proxyUsername = proxyUsername;
|
||||
}
|
||||
/**
|
||||
* The Proxy password.
|
||||
*/
|
||||
private String proxyPassword;
|
||||
|
||||
/**
|
||||
* Get the value of proxyPassword.
|
||||
*
|
||||
* @return the value of proxyPassword
|
||||
*/
|
||||
public String getProxyPassword() {
|
||||
return proxyPassword;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of proxyPassword.
|
||||
*
|
||||
* @param proxyPassword new value of proxyPassword
|
||||
*/
|
||||
public void setProxyPassword(String proxyPassword) {
|
||||
this.proxyPassword = proxyPassword;
|
||||
}
|
||||
/**
|
||||
* The Connection Timeout.
|
||||
*/
|
||||
private String connectionTimeout;
|
||||
|
||||
/**
|
||||
* Get the value of connectionTimeout.
|
||||
*
|
||||
* @return the value of connectionTimeout
|
||||
*/
|
||||
public String getConnectionTimeout() {
|
||||
return connectionTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of connectionTimeout.
|
||||
*
|
||||
* @param connectionTimeout new value of connectionTimeout
|
||||
*/
|
||||
public void setConnectionTimeout(String connectionTimeout) {
|
||||
this.connectionTimeout = connectionTimeout;
|
||||
}
|
||||
/**
|
||||
* The database driver name; such as org.h2.Driver.
|
||||
*/
|
||||
private String databaseDriverName;
|
||||
|
||||
/**
|
||||
* Get the value of databaseDriverName.
|
||||
*
|
||||
* @return the value of databaseDriverName
|
||||
*/
|
||||
public String getDatabaseDriverName() {
|
||||
return databaseDriverName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of databaseDriverName.
|
||||
*
|
||||
* @param databaseDriverName new value of databaseDriverName
|
||||
*/
|
||||
public void setDatabaseDriverName(String databaseDriverName) {
|
||||
this.databaseDriverName = databaseDriverName;
|
||||
}
|
||||
|
||||
/**
|
||||
* The path to the database driver JAR file if it is not on the class path.
|
||||
*/
|
||||
private String databaseDriverPath;
|
||||
|
||||
/**
|
||||
* Get the value of databaseDriverPath.
|
||||
*
|
||||
* @return the value of databaseDriverPath
|
||||
*/
|
||||
public String getDatabaseDriverPath() {
|
||||
return databaseDriverPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of databaseDriverPath.
|
||||
*
|
||||
* @param databaseDriverPath new value of databaseDriverPath
|
||||
*/
|
||||
public void setDatabaseDriverPath(String databaseDriverPath) {
|
||||
this.databaseDriverPath = databaseDriverPath;
|
||||
}
|
||||
/**
|
||||
* The database connection string.
|
||||
*/
|
||||
private String connectionString;
|
||||
|
||||
/**
|
||||
* Get the value of connectionString.
|
||||
*
|
||||
* @return the value of connectionString
|
||||
*/
|
||||
public String getConnectionString() {
|
||||
return connectionString;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of connectionString.
|
||||
*
|
||||
* @param connectionString new value of connectionString
|
||||
*/
|
||||
public void setConnectionString(String connectionString) {
|
||||
this.connectionString = connectionString;
|
||||
}
|
||||
/**
|
||||
* The user name for connecting to the database.
|
||||
*/
|
||||
private String databaseUser;
|
||||
|
||||
/**
|
||||
* Get the value of databaseUser.
|
||||
*
|
||||
* @return the value of databaseUser
|
||||
*/
|
||||
public String getDatabaseUser() {
|
||||
return databaseUser;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of databaseUser.
|
||||
*
|
||||
* @param databaseUser new value of databaseUser
|
||||
*/
|
||||
public void setDatabaseUser(String databaseUser) {
|
||||
this.databaseUser = databaseUser;
|
||||
}
|
||||
|
||||
/**
|
||||
* The password to use when connecting to the database.
|
||||
*/
|
||||
private String databasePassword;
|
||||
|
||||
/**
|
||||
* Get the value of databasePassword.
|
||||
*
|
||||
* @return the value of databasePassword
|
||||
*/
|
||||
public String getDatabasePassword() {
|
||||
return databasePassword;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of databasePassword.
|
||||
*
|
||||
* @param databasePassword new value of databasePassword
|
||||
*/
|
||||
public void setDatabasePassword(String databasePassword) {
|
||||
this.databasePassword = databasePassword;
|
||||
}
|
||||
|
||||
/**
|
||||
* The url for the modified NVD CVE (1.2 schema).
|
||||
*/
|
||||
private String cveUrl12Modified;
|
||||
|
||||
/**
|
||||
* Get the value of cveUrl12Modified.
|
||||
*
|
||||
* @return the value of cveUrl12Modified
|
||||
*/
|
||||
public String getCveUrl12Modified() {
|
||||
return cveUrl12Modified;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of cveUrl12Modified.
|
||||
*
|
||||
* @param cveUrl12Modified new value of cveUrl12Modified
|
||||
*/
|
||||
public void setCveUrl12Modified(String cveUrl12Modified) {
|
||||
this.cveUrl12Modified = cveUrl12Modified;
|
||||
}
|
||||
|
||||
/**
|
||||
* The url for the modified NVD CVE (2.0 schema).
|
||||
*/
|
||||
private String cveUrl20Modified;
|
||||
|
||||
/**
|
||||
* Get the value of cveUrl20Modified.
|
||||
*
|
||||
* @return the value of cveUrl20Modified
|
||||
*/
|
||||
public String getCveUrl20Modified() {
|
||||
return cveUrl20Modified;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of cveUrl20Modified.
|
||||
*
|
||||
* @param cveUrl20Modified new value of cveUrl20Modified
|
||||
*/
|
||||
public void setCveUrl20Modified(String cveUrl20Modified) {
|
||||
this.cveUrl20Modified = cveUrl20Modified;
|
||||
}
|
||||
|
||||
/**
|
||||
* Base Data Mirror URL for CVE 1.2.
|
||||
*/
|
||||
private String cveUrl12Base;
|
||||
|
||||
/**
|
||||
* Get the value of cveUrl12Base.
|
||||
*
|
||||
* @return the value of cveUrl12Base
|
||||
*/
|
||||
public String getCveUrl12Base() {
|
||||
return cveUrl12Base;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of cveUrl12Base.
|
||||
*
|
||||
* @param cveUrl12Base new value of cveUrl12Base
|
||||
*/
|
||||
public void setCveUrl12Base(String cveUrl12Base) {
|
||||
this.cveUrl12Base = cveUrl12Base;
|
||||
}
|
||||
|
||||
/**
|
||||
* Data Mirror URL for CVE 2.0.
|
||||
*/
|
||||
private String cveUrl20Base;
|
||||
|
||||
/**
|
||||
* Get the value of cveUrl20Base.
|
||||
*
|
||||
* @return the value of cveUrl20Base
|
||||
*/
|
||||
public String getCveUrl20Base() {
|
||||
return cveUrl20Base;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of cveUrl20Base.
|
||||
*
|
||||
* @param cveUrl20Base new value of cveUrl20Base
|
||||
*/
|
||||
public void setCveUrl20Base(String cveUrl20Base) {
|
||||
this.cveUrl20Base = cveUrl20Base;
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of hours to wait before re-checking for updates.
|
||||
*/
|
||||
private Integer cveValidForHours;
|
||||
|
||||
/**
|
||||
* Get the value of cveValidForHours.
|
||||
*
|
||||
* @return the value of cveValidForHours
|
||||
*/
|
||||
public Integer getCveValidForHours() {
|
||||
return cveValidForHours;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of cveValidForHours.
|
||||
*
|
||||
* @param cveValidForHours new value of cveValidForHours
|
||||
*/
|
||||
public void setCveValidForHours(Integer cveValidForHours) {
|
||||
this.cveValidForHours = cveValidForHours;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the update by initializing the settings, downloads the NVD XML
|
||||
* data, and then processes the data storing it in the local database.
|
||||
*
|
||||
* @throws BuildException thrown if a connection to the local database
|
||||
* cannot be made.
|
||||
*/
|
||||
@Override
|
||||
public void execute() throws BuildException {
|
||||
populateSettings();
|
||||
Engine engine = null;
|
||||
try {
|
||||
engine = new Engine(Update.class.getClassLoader());
|
||||
try {
|
||||
engine.doUpdates();
|
||||
} catch (UpdateException ex) {
|
||||
if (this.isFailOnError()) {
|
||||
throw new BuildException(ex);
|
||||
}
|
||||
log(ex.getMessage(), Project.MSG_ERR);
|
||||
}
|
||||
} catch (DatabaseException ex) {
|
||||
final String msg = "Unable to connect to the dependency-check database; unable to update the NVD data";
|
||||
if (this.isFailOnError()) {
|
||||
throw new BuildException(msg, ex);
|
||||
}
|
||||
log(msg, Project.MSG_ERR);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
if (engine != null) {
|
||||
engine.cleanup();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes the properties supplied and updates the dependency-check settings.
|
||||
* Additionally, this sets the system properties required to change the
|
||||
* proxy server, port, and connection timeout.
|
||||
*
|
||||
* @throws BuildException thrown when an invalid setting is configured.
|
||||
*/
|
||||
@Override
|
||||
protected void populateSettings() throws BuildException {
|
||||
super.populateSettings();
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
|
||||
if (cveValidForHours != null) {
|
||||
if (cveValidForHours >= 0) {
|
||||
Settings.setInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
} else {
|
||||
throw new BuildException("Invalid setting: `cpeValidForHours` must be 0 or greater");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
/**
|
||||
* This package includes the Ant task definitions.
|
||||
* This package includes the a slf4j logging implementation that wraps the Ant logger.
|
||||
*/
|
||||
package org.owasp.dependencycheck.taskdefs;
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
/*
|
||||
* This file is part of dependency-check-ant.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
|
||||
*/
|
||||
package org.slf4j.impl;
|
||||
|
||||
import org.apache.tools.ant.Task;
|
||||
import org.owasp.dependencycheck.ant.logging.AntLoggerFactory;
|
||||
import org.slf4j.ILoggerFactory;
|
||||
import org.slf4j.spi.LoggerFactoryBinder;
|
||||
|
||||
/**
|
||||
* The binding of org.slf4j.LoggerFactory class with an actual instance of
|
||||
* org.slf4j.ILoggerFactory is performed using information returned by this
|
||||
* class.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
//CSOFF: FinalClass
|
||||
public class StaticLoggerBinder implements LoggerFactoryBinder {
|
||||
//CSON: FinalClass
|
||||
|
||||
/**
|
||||
* The unique instance of this class
|
||||
*/
|
||||
private static final StaticLoggerBinder SINGLETON = new StaticLoggerBinder();
|
||||
|
||||
/**
|
||||
* Return the singleton of this class.
|
||||
*
|
||||
* @return the StaticLoggerBinder singleton
|
||||
*/
|
||||
public static final StaticLoggerBinder getSingleton() {
|
||||
return SINGLETON;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ant tasks have the log method we actually want to call. So we hang onto
|
||||
* the task as a delegate
|
||||
*/
|
||||
private Task task = null;
|
||||
|
||||
/**
|
||||
* Set the Task which will this is to log through.
|
||||
*
|
||||
* @param task the task through which to log
|
||||
*/
|
||||
public void setTask(Task task) {
|
||||
this.task = task;
|
||||
loggerFactory = new AntLoggerFactory(task);
|
||||
}
|
||||
|
||||
/**
|
||||
* Declare the version of the SLF4J API this implementation is compiled
|
||||
* against. The value of this filed is usually modified with each release.
|
||||
*/
|
||||
// to avoid constant folding by the compiler, this field must *not* be final
|
||||
//CSOFF: StaticVariableName
|
||||
//CSOFF: VisibilityModifier
|
||||
public static String REQUESTED_API_VERSION = "1.7.12"; // final
|
||||
//CSON: VisibilityModifier
|
||||
//CSON: StaticVariableName
|
||||
|
||||
/**
|
||||
* The logger factory class string.
|
||||
*/
|
||||
private static final String LOGGER_FACTORY_CLASS = AntLoggerFactory.class.getName();
|
||||
|
||||
/**
|
||||
* The ILoggerFactory instance returned by the {@link #getLoggerFactory}
|
||||
* method should always be the smae object
|
||||
*/
|
||||
private ILoggerFactory loggerFactory;
|
||||
|
||||
/**
|
||||
* Constructs a new static logger binder.
|
||||
*/
|
||||
private StaticLoggerBinder() {
|
||||
loggerFactory = new AntLoggerFactory(task);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the logger factory.
|
||||
*
|
||||
* @return the logger factory
|
||||
*/
|
||||
@Override
|
||||
public ILoggerFactory getLoggerFactory() {
|
||||
return loggerFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the logger factory class string.
|
||||
*
|
||||
* @return the logger factory class string
|
||||
*/
|
||||
@Override
|
||||
public String getLoggerFactoryClassStr() {
|
||||
return LOGGER_FACTORY_CLASS;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
/**
|
||||
* This package contains the static binder for the slf4j-ant logger.
|
||||
*/
|
||||
package org.slf4j.impl;
|
||||
@@ -0,0 +1,3 @@
|
||||
dependency-check=org.owasp.dependencycheck.taskdefs.Check
|
||||
dependency-check-purge=org.owasp.dependencycheck.taskdefs.Purge
|
||||
dependency-check-update=org.owasp.dependencycheck.taskdefs.Update
|
||||
@@ -1,23 +0,0 @@
|
||||
handlers=java.util.logging.ConsoleHandler, java.util.logging.FileHandler
|
||||
|
||||
# logging levels
|
||||
# FINEST, FINER, FINE, CONFIG, INFO, WARNING and SEVERE.
|
||||
|
||||
# Configure the ConsoleHandler.
|
||||
java.util.logging.ConsoleHandler.level=INFO
|
||||
|
||||
#org.owasp.dependencycheck.data.nvdcve.xml
|
||||
|
||||
# Configure the FileHandler.
|
||||
#java.util.logging.FileHandler.formatter=java.util.logging.SimpleFormatter
|
||||
#java.util.logging.FileHandler.level=FINEST
|
||||
|
||||
# The following special tokens can be used in the pattern property
|
||||
# which specifies the location and name of the log file.
|
||||
# / - standard path separator
|
||||
# %t - system temporary directory
|
||||
# %h - value of the user.home system property
|
||||
# %g - generation number for rotating logs
|
||||
# %u - unique number to avoid conflicts
|
||||
# FileHandler writes to %h/demo0.log by default.
|
||||
#java.util.logging.FileHandler.pattern=./target/dependency-check.log
|
||||
@@ -1,2 +1,2 @@
|
||||
# the path to the data directory
|
||||
data.directory=dependency-check-data
|
||||
data.directory=data/3.0
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
# define custom tasks here
|
||||
|
||||
dependencycheck=org.owasp.dependencycheck.taskdefs.DependencyCheckTask
|
||||
20
dependency-check-ant/src/site/markdown/config-purge.md
Normal file
20
dependency-check-ant/src/site/markdown/config-purge.md
Normal file
@@ -0,0 +1,20 @@
|
||||
Configuration
|
||||
====================
|
||||
The dependency-check-purge task deletes the local copy of the NVD. This task
|
||||
should rarely be used, if ever. This is included as a convenience method in
|
||||
the rare circumstance that the local H2 database becomes corrupt.
|
||||
|
||||
```xml
|
||||
<target name="dependency-check-purge" description="Dependency-Check purge">
|
||||
<dependency-check-purge />
|
||||
</target>
|
||||
```
|
||||
|
||||
Configuration: dependency-check-purge Task
|
||||
--------------------
|
||||
The following properties can be set on the dependency-check-purge task.
|
||||
|
||||
Property | Description | Default Value
|
||||
----------------------|------------------------------------------------------------------------|------------------
|
||||
dataDirectory | Data directory that is used to store the local copy of the NVD | data
|
||||
failOnError | Whether the build should fail if there is an error executing the purge | true
|
||||
45
dependency-check-ant/src/site/markdown/config-update.md
Normal file
45
dependency-check-ant/src/site/markdown/config-update.md
Normal file
@@ -0,0 +1,45 @@
|
||||
Configuration
|
||||
====================
|
||||
The dependency-check-update task downloads and updates the local copy of the NVD.
|
||||
There are several reasons that one may want to use this task; primarily, creating
|
||||
an update that will be run only once a day or once every few days (but not greater
|
||||
then 7 days) and then use the `autoUpdate="false"` setting on individual
|
||||
dependency-check scans. See [Internet Access Required](https://jeremylong.github.io/DependencyCheck/data/index.html)
|
||||
for more information on why this task would be used.
|
||||
|
||||
```xml
|
||||
<target name="dependency-check-update" description="Dependency-Check Update">
|
||||
<dependency-check-update />
|
||||
</target>
|
||||
```
|
||||
|
||||
Configuration: dependency-check-update Task
|
||||
--------------------
|
||||
The following properties can be set on the dependency-check task.
|
||||
|
||||
Property | Description | Default Value
|
||||
----------------------|------------------------------------|------------------
|
||||
proxyServer | The Proxy Server. |
|
||||
proxyPort | The Proxy Port. |
|
||||
proxyUsername | Defines the proxy user name. |
|
||||
proxyPassword | Defines the proxy password. |
|
||||
connectionTimeout | The URL Connection Timeout. |
|
||||
failOnError | Whether the build should fail if there is an error executing the update | true
|
||||
|
||||
Advanced Configuration
|
||||
====================
|
||||
The following properties can be configured in the plugin. However, they are less frequently changed. One exception
|
||||
may be the cvedUrl properties, which can be used to host a mirror of the NVD within an enterprise environment.
|
||||
|
||||
Property | Description | Default Value
|
||||
---------------------|-------------------------------------------------------------------------------------------------------|------------------
|
||||
cveUrl12Modified | URL for the modified CVE 1.2. | https://nvd.nist.gov/download/nvdcve-Modified.xml.gz
|
||||
cveUrl20Modified | URL for the modified CVE 2.0. | https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-Modified.xml.gz
|
||||
cveUrl12Base | Base URL for each year's CVE 1.2, the %d will be replaced with the year. | https://nvd.nist.gov/download/nvdcve-%d.xml.gz
|
||||
cveUrl20Base | Base URL for each year's CVE 2.0, the %d will be replaced with the year. | https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz
|
||||
dataDirectory | Data directory that is used to store the local copy of the NVD. This should generally not be changed. | data
|
||||
databaseDriverName | The name of the database driver. Example: org.h2.Driver. |
|
||||
databaseDriverPath | The path to the database driver JAR file; only used if the driver is not in the class path. |
|
||||
connectionString | The connection string used to connect to the database. |
|
||||
databaseUser | The username used when connecting to the database. |
|
||||
databasePassword | The password used when connecting to the database. |
|
||||
@@ -1,5 +1,11 @@
|
||||
Configuration
|
||||
====================
|
||||
Once dependency-check-ant has been [installed](index.html) the defined tasks can be used.
|
||||
|
||||
* dependency-check - the primary task used to check the project dependencies. Configuration options are below.
|
||||
* dependency-check-purge - deletes the local copy of the NVD; this should rarely be used (if ever). See the [purge configuration](config-purge.html) for more information.
|
||||
* dependency-check-update - downloads and updates the local copy of the NVD. See the [update configuration](config-update.html) for more information.
|
||||
|
||||
To configure the dependency-check task you can add it to a target and include a
|
||||
file based [resource collection](http://ant.apache.org/manual/Types/resources.html#collection)
|
||||
such as a [FileSet](http://ant.apache.org/manual/Types/fileset.html), [DirSet](http://ant.apache.org/manual/Types/dirset.html),
|
||||
@@ -8,7 +14,7 @@ the project's dependencies.
|
||||
|
||||
```xml
|
||||
<target name="dependency-check" description="Dependency-Check Analysis">
|
||||
<dependency-check applicationname="Hello World"
|
||||
<dependency-check projectname="Hello World"
|
||||
reportoutputdirectory="${basedir}"
|
||||
reportformat="ALL">
|
||||
|
||||
@@ -19,25 +25,27 @@ the project's dependencies.
|
||||
</target>
|
||||
```
|
||||
|
||||
Configuration
|
||||
====================
|
||||
The following properties can be set on the dependency-check-maven plugin.
|
||||
Configuration: dependency-check Task
|
||||
--------------------
|
||||
The following properties can be set on the dependency-check task.
|
||||
|
||||
Property | Description | Default Value
|
||||
---------------------|------------------------------------|------------------
|
||||
autoUpdate | Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. | true
|
||||
updateOnly | If set to true only the update phase of dependency-check will be executed; no scan will be executed and no report will be generated. | false
|
||||
externalReport | When using as a Site plugin this parameter sets whether or not the external report format should be used. | false
|
||||
outputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target'
|
||||
failBuildOnCVSS | Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which means since the CVSS scores are 0-10, by default the build will never fail. | 11
|
||||
format | The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML
|
||||
logFile | The file path to write verbose logging information. |
|
||||
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../suppression.html) |
|
||||
proxyServer | The Proxy Server. |
|
||||
proxyPort | The Proxy Port. |
|
||||
proxyUsername | Defines the proxy user name. |
|
||||
proxyPassword | Defines the proxy password. |
|
||||
connectionTimeout | The URL Connection Timeout. |
|
||||
Property | Description | Default Value
|
||||
----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------
|
||||
autoUpdate | Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. | true
|
||||
cveValidForHours | Sets the number of hours to wait before checking for new updates from the NVD | 4
|
||||
failBuildOnCVSS | Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which means since the CVSS scores are 0-10, by default the build will never fail. | 11
|
||||
failOnError | Whether the build should fail if there is an error executing the dependency-check analysis | true
|
||||
projectName | The name of the project being scanned. | Dependency-Check
|
||||
reportFormat | The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML
|
||||
reportOutputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target'
|
||||
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html) |
|
||||
hintsFile | The file path to the XML hints file \- used to resolve [false negatives](../general/hints.html) |
|
||||
proxyServer | The Proxy Server; see the [proxy configuration](../data/proxy.html) page for more information. |
|
||||
proxyPort | The Proxy Port. |
|
||||
proxyUsername | Defines the proxy user name. |
|
||||
proxyPassword | Defines the proxy password. |
|
||||
connectionTimeout | The URL Connection Timeout. |
|
||||
enableExperimental | Enable the [experimental analyzers](../analyzers/index.html). If not enabled the experimental analyzers (see below) will not be loaded or used. | false
|
||||
|
||||
Analyzer Configuration
|
||||
====================
|
||||
@@ -47,31 +55,43 @@ Note, that specific analyzers will automatically disable themselves if no file
|
||||
types that they support are detected - so specifically disabling them may not
|
||||
be needed.
|
||||
|
||||
Property | Description | Default Value
|
||||
------------------------|---------------------------------------------------------------------------|------------------
|
||||
archiveAnalyzerEnabled | Sets whether the Archive Analyzer will be used. | true
|
||||
zipExtensions | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. |
|
||||
jarAnalyzer | Sets whether the Jar Analyzer will be used. | true
|
||||
centralAnalyzerEnabled | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer (see below). | true
|
||||
nexusAnalyzerEnabled | Sets whether Nexus Analyzer will be used. This analyzer is superceded by the Central Analyzer; however, you can configure this to run against a Nexus Pro installation. | true
|
||||
nexusUrl | Defines the Nexus web service endpoint (example http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. |
|
||||
nexusUsesProxy | Whether or not the defined proxy should be used when connecting to Nexus. | true
|
||||
nuspecAnalyzerEnabled | Sets whether or not the .NET Nuget Nuspec Analyzer will be used. | true
|
||||
assemblyAnalyzerEnabled | Sets whether or not the .NET Assembly Analyzer should be used. | true
|
||||
pathToMono | The path to Mono for .NET assembly analysis on non-windows systems. |
|
||||
Property | Description | Default Value
|
||||
------------------------------|-----------------------------------------------------------------------------------|------------------
|
||||
archiveAnalyzerEnabled | Sets whether the Archive Analyzer will be used. | true
|
||||
zipExtensions | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. |
|
||||
jarAnalyzer | Sets whether the Jar Analyzer will be used. | true
|
||||
centralAnalyzerEnabled | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer (see below). | true
|
||||
nexusAnalyzerEnabled | Sets whether Nexus Analyzer will be used. This analyzer is superceded by the Central Analyzer; however, you can configure this to run against a Nexus Pro installation. | true
|
||||
nexusUrl | Defines the Nexus web service endpoint (example http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. |
|
||||
nexusUsesProxy | Whether or not the defined proxy should be used when connecting to Nexus. | true
|
||||
pyDistributionAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Python Distribution Analyzer will be used. | true
|
||||
pyPackageAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Python Package Analyzer will be used. | true
|
||||
rubygemsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Ruby Gemspec Analyzer will be used. | true
|
||||
opensslAnalyzerEnabled | Sets whether the openssl Analyzer should be used. | true
|
||||
cmakeAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) CMake Analyzer should be used. | true
|
||||
autoconfAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) autoconf Analyzer should be used. | true
|
||||
composerAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer should be used. | true
|
||||
nodeAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Node.js Analyzer should be used. | true
|
||||
nuspecAnalyzerEnabled | Sets whether the .NET Nuget Nuspec Analyzer will be used. | true
|
||||
cocoapodsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer should be used. | true
|
||||
bundleAuditAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Bundle Audit Analyzer should be used. | true
|
||||
bundleAuditPath | Sets the path to the bundle audit executable; only used if bundle audit analyzer is enabled and experimental analyzers are enabled. |
|
||||
swiftPackageManagerAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Switft Package Analyzer should be used. | true
|
||||
assemblyAnalyzerEnabled | Sets whether the .NET Assembly Analyzer should be used. | true
|
||||
pathToMono | The path to Mono for .NET assembly analysis on non-windows systems. |
|
||||
|
||||
Advanced Configuration
|
||||
====================
|
||||
The following properties can be configured in the plugin. However, they are less frequently changed. One exception
|
||||
may be the cvedUrl properties, which can be used to host a mirror of the NVD within an enterprise environment.
|
||||
|
||||
Property | Description | Default Value
|
||||
---------------------|-------------------------------------------------------------------------|------------------
|
||||
cveUrl12Modified | URL for the modified CVE 1.2 | http://nvd.nist.gov/download/nvdcve-modified.xml
|
||||
cveUrl20Modified | URL for the modified CVE 2.0 | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-modified.xml
|
||||
cveUrl12Base | Base URL for each year's CVE 1.2, the %d will be replaced with the year | http://nvd.nist.gov/download/nvdcve-%d.xml
|
||||
cveUrl20Base | Base URL for each year's CVE 2.0, the %d will be replaced with the year | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml
|
||||
dataDirectory | Data directory to hold SQL CVEs contents. This should generally not be changed. |
|
||||
Property | Description | Default Value
|
||||
---------------------|--------------------------------------------------------------------------|------------------
|
||||
cveUrl12Modified | URL for the modified CVE 1.2. | http://nvd.nist.gov/download/nvdcve-modified.xml
|
||||
cveUrl20Modified | URL for the modified CVE 2.0. | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-modified.xml
|
||||
cveUrl12Base | Base URL for each year's CVE 1.2, the %d will be replaced with the year. | http://nvd.nist.gov/download/nvdcve-%d.xml
|
||||
cveUrl20Base | Base URL for each year's CVE 2.0, the %d will be replaced with the year. | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml
|
||||
dataDirectory | Data directory that is used to store the local copy of the NVD. This should generally not be changed. | data
|
||||
databaseDriverName | The name of the database driver. Example: org.h2.Driver. |
|
||||
databaseDriverPath | The path to the database driver JAR file; only used if the driver is not in the class path. |
|
||||
connectionString | The connection string used to connect to the database. |
|
||||
|
||||
@@ -7,23 +7,28 @@ identifiers, and the associated Common Vulnerability and Exposure (CVE) entries.
|
||||
|
||||
Installation
|
||||
====================
|
||||
Download dependency-check-ant from [bintray here](http://dl.bintray.com/jeremy-long/owasp/dependency-check-ant-${project.version}.jar).
|
||||
To install dependency-check-ant place the dependency-check-ant-${project.version}.jar into
|
||||
the lib directory of your Ant instalation directory. Once installed you can add
|
||||
the taskdef to you build.xml and add the task to a new or existing target:
|
||||
1. Download dependency-check-ant from [bintray here](http://dl.bintray.com/jeremy-long/owasp/dependency-check-ant-${project.version}-release.zip).
|
||||
2. Unzip the archive
|
||||
3. Add the taskdef to your build.xml:
|
||||
|
||||
```xml
|
||||
<taskdef name="dependency-check" classname="org.owasp.dependencycheck.taskdefs.DependencyCheckTask"/>
|
||||
```
|
||||
```xml
|
||||
<!-- Set the value to the installation directory's path -->
|
||||
<property name="dependency-check.home" value="C:/tools/dependency-check-ant"/>
|
||||
<path id="dependency-check.path">
|
||||
<pathelement location="${dependency-check.home}/dependency-check-ant.jar"/>
|
||||
<fileset dir="${dependency-check.home}/lib">
|
||||
<include name="*.jar"/>
|
||||
</fileset>
|
||||
</path>
|
||||
<taskdef resource="dependency-check-taskdefs.properties">
|
||||
<classpath refid="dependency-check.path" />
|
||||
</taskdef>
|
||||
```
|
||||
4. Use the defined taskdefs:
|
||||
* [dependency-check](configuration.html) - the primary task used to check the project dependencies.
|
||||
* [dependency-check-purge](config-purge.html) - deletes the local copy of the NVD; this should rarely be used (if ever).
|
||||
* [dependency-check-update](config-update.html) - downloads and updates the local copy of the NVD.
|
||||
|
||||
If you do not want to install dependency-check-ant into your ant's lib directory when you define the task def you
|
||||
must add the classpath to the taskdef:
|
||||
|
||||
```xml
|
||||
<taskdef name="dependency-check" classname="org.owasp.dependencycheck.taskdefs.DependencyCheckTask">
|
||||
<classpath path="[path]/[to]/dependency-check-ant-${project.version}.jar"/>
|
||||
</taskdef>
|
||||
```
|
||||
|
||||
It is important to understand that the first time this task is executed it may
|
||||
take 10 minutes or more as it downloads and processes the data from the National
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
Usage
|
||||
====================
|
||||
First, add the dependency-check-ant taskdef to your build.xml (see the [installation guide](installation.html)):
|
||||
|
||||
```xml
|
||||
<taskdef name="dependency-check" classname="org.owasp.dependencycheck.taskdefs.DependencyCheckTask"/>
|
||||
```
|
||||
|
||||
Or
|
||||
|
||||
```xml
|
||||
<taskdef name="dependency-check" classname="org.owasp.dependencycheck.taskdefs.DependencyCheckTask">
|
||||
<classpath path="[path]/[to]/dependency-check-ant-${project.version}.jar"/>
|
||||
</taskdef>
|
||||
```
|
||||
|
||||
Next, add the task to a target of your choosing:
|
||||
|
||||
```xml
|
||||
<target name="dependency-check" description="Dependency-Check Analysis">
|
||||
<dependency-check applicationname="Hello World"
|
||||
autoupdate="true"
|
||||
reportoutputdirectory="${basedir}"
|
||||
reportformat="HTML">
|
||||
|
||||
<fileset dir="lib">
|
||||
<include name="**/*.jar"/>
|
||||
</fileset>
|
||||
</dependency-check>
|
||||
</target>
|
||||
```
|
||||
|
||||
See the [configuration guide](configuration.html) for more information.
|
||||
@@ -27,8 +27,7 @@ Copyright (c) 2013 Jeremy Long. All Rights Reserved.
|
||||
<item name="dependency-check" href="../index.html"/>
|
||||
</breadcrumbs>
|
||||
<menu name="Getting Started">
|
||||
<item name="Installation" href="installation.html"/>
|
||||
<item name="Usage" href="usage.html"/>
|
||||
<item name="Installation" href="index.html"/>
|
||||
<item name="Configuration" href="configuration.html"/>
|
||||
</menu>
|
||||
<menu ref="reports" />
|
||||
|
||||
@@ -18,34 +18,41 @@
|
||||
package org.owasp.dependencycheck.taskdefs;
|
||||
|
||||
import java.io.File;
|
||||
import org.apache.tools.ant.BuildFileTest;
|
||||
|
||||
import org.apache.tools.ant.BuildException;
|
||||
import org.apache.tools.ant.BuildFileRule;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.owasp.dependencycheck.data.nvdcve.BaseDBTestCase;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.owasp.dependencycheck.BaseDBTestCase;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyCheckTaskTest extends BuildFileTest {
|
||||
//TODO: The use of deprecated class BuildFileTestcan possibly
|
||||
//be replaced with BuildFileRule. However, it currently isn't included in the ant-testutil jar.
|
||||
//This should be fixed in ant-testutil 1.9.5, so we can check back once that has been released.
|
||||
//Reference: http://mail-archives.apache.org/mod_mbox/ant-user/201406.mbox/%3C000001cf87ba$8949b690$9bdd23b0$@de%3E
|
||||
public class DependencyCheckTaskTest {
|
||||
|
||||
@Rule
|
||||
public BuildFileRule buildFileRule = new BuildFileRule();
|
||||
|
||||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
@Before
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
Settings.initialize();
|
||||
BaseDBTestCase.ensureDBExists();
|
||||
final String buildFile = this.getClass().getClassLoader().getResource("build.xml").getPath();
|
||||
configureProject(buildFile);
|
||||
buildFileRule.configureProject(buildFile);
|
||||
}
|
||||
|
||||
@After
|
||||
@Override
|
||||
public void tearDown() {
|
||||
//no cleanup...
|
||||
//executeTarget("cleanup");
|
||||
@@ -63,7 +70,7 @@ public class DependencyCheckTaskTest extends BuildFileTest {
|
||||
throw new Exception("Unable to delete 'target/DependencyCheck-Report.html' prior to test.");
|
||||
}
|
||||
}
|
||||
executeTarget("test.fileset");
|
||||
buildFileRule.executeTarget("test.fileset");
|
||||
|
||||
assertTrue("DependencyCheck report was not generated", report.exists());
|
||||
|
||||
@@ -82,7 +89,7 @@ public class DependencyCheckTaskTest extends BuildFileTest {
|
||||
throw new Exception("Unable to delete 'target/DependencyCheck-Report.xml' prior to test.");
|
||||
}
|
||||
}
|
||||
executeTarget("test.filelist");
|
||||
buildFileRule.executeTarget("test.filelist");
|
||||
|
||||
assertTrue("DependencyCheck report was not generated", report.exists());
|
||||
}
|
||||
@@ -100,7 +107,7 @@ public class DependencyCheckTaskTest extends BuildFileTest {
|
||||
throw new Exception("Unable to delete 'target/DependencyCheck-Vulnerability.html' prior to test.");
|
||||
}
|
||||
}
|
||||
executeTarget("test.dirset");
|
||||
buildFileRule.executeTarget("test.dirset");
|
||||
assertTrue("DependencyCheck report was not generated", report.exists());
|
||||
}
|
||||
|
||||
@@ -109,7 +116,7 @@ public class DependencyCheckTaskTest extends BuildFileTest {
|
||||
*/
|
||||
@Test
|
||||
public void testGetFailBuildOnCVSS() {
|
||||
expectBuildException("failCVSS", "asdfasdfscore");
|
||||
System.out.println(this.getOutput());
|
||||
expectedException.expect(BuildException.class);
|
||||
buildFileRule.executeTarget("failCVSS");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project name="Dependency-Check Test Build" default="test.fileset" basedir=".">
|
||||
|
||||
<taskdef name="dependency-check" classname="org.owasp.dependencycheck.taskdefs.DependencyCheckTask" />
|
||||
<taskdef name="dependency-check" classname="org.owasp.dependencycheck.taskdefs.Check" />
|
||||
|
||||
<target name="test.fileset">
|
||||
<dependency-check
|
||||
|
||||
@@ -5,7 +5,7 @@ performed are a "best effort" and as such, there could be false positives as wel
|
||||
vulnerabilities in 3rd party components is a well-known problem and is currently documented in the 2013 OWASP
|
||||
Top 10 as [A9 - Using Components with Known Vulnerabilities](https://www.owasp.org/index.php/Top_10_2013-A9-Using_Components_with_Known_Vulnerabilities).
|
||||
|
||||
Documentation and links to production binary releases can be found on the [github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-cli/installation.html).
|
||||
Documentation and links to production binary releases can be found on the [github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-cli/index.html).
|
||||
|
||||
Mailing List
|
||||
------------
|
||||
@@ -19,6 +19,6 @@ Copyright & License
|
||||
|
||||
Dependency-Check is Copyright (c) 2012-2014 Jeremy Long. All Rights Reserved.
|
||||
|
||||
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://github.com/jeremylong/DependencyCheck/dependency-check-cli/blob/master/LICENSE.txt) file for the full license.
|
||||
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/LICENSE.txt) file for the full license.
|
||||
|
||||
Dependency-Check Command Line makes use of other open source libraries. Please see the [NOTICE.txt](https://github.com/jeremylong/DependencyCheck/dependency-check-cli/blob/master/NOTICES.txt) file for more information.
|
||||
Dependency-Check Command Line makes use of other open source libraries. Please see the [NOTICE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/dependency-check-cli/NOTICE.txt) file for more information.
|
||||
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.2.11</version>
|
||||
<version>1.4.5</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-cli</artifactId>
|
||||
@@ -44,6 +44,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
<directory>src/main/resources</directory>
|
||||
<includes>
|
||||
<include>**/*.properties</include>
|
||||
<include>logback.xml</include>
|
||||
</includes>
|
||||
<filtering>true</filtering>
|
||||
</resource>
|
||||
@@ -109,6 +110,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<argLine>-Dfile.encoding=UTF-8</argLine>
|
||||
<systemProperties>
|
||||
<property>
|
||||
<name>cpe</name>
|
||||
@@ -123,10 +125,6 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
</systemProperties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>appassembler-maven-plugin</artifactId>
|
||||
@@ -142,6 +140,8 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
<binFileExtensions>
|
||||
<unix>.sh</unix>
|
||||
</binFileExtensions>
|
||||
<configurationDirectory>plugins/*</configurationDirectory>
|
||||
<includeConfigurationDirectoryInClasspath>true</includeConfigurationDirectoryInClasspath>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
@@ -177,100 +177,10 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
</build>
|
||||
<reporting>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-project-info-reports-plugin</artifactId>
|
||||
<version>2.7</version>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<reports>
|
||||
<report>summary</report>
|
||||
<report>license</report>
|
||||
<report>help</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>2.9.1</version>
|
||||
<configuration>
|
||||
<failOnError>false</failOnError>
|
||||
<bottom>Copyright© 2012-15 Jeremy Long. All Rights Reserved.</bottom>
|
||||
</configuration>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<id>default</id>
|
||||
<reports>
|
||||
<report>javadoc</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>versions-maven-plugin</artifactId>
|
||||
<version>2.1</version>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<reports>
|
||||
<report>dependency-updates-report</report>
|
||||
<report>plugin-updates-report</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jxr-plugin</artifactId>
|
||||
<version>2.4</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>cobertura-maven-plugin</artifactId>
|
||||
<version>2.6</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-report-plugin</artifactId>
|
||||
<version>2.16</version>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<reports>
|
||||
<report>report-only</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>taglist-maven-plugin</artifactId>
|
||||
<version>2.4</version>
|
||||
<configuration>
|
||||
<tagListOptions>
|
||||
<tagClasses>
|
||||
<tagClass>
|
||||
<displayName>Todo Work</displayName>
|
||||
<tags>
|
||||
<tag>
|
||||
<matchString>todo</matchString>
|
||||
<matchType>ignoreCase</matchType>
|
||||
</tag>
|
||||
<tag>
|
||||
<matchString>FIXME</matchString>
|
||||
<matchType>exact</matchType>
|
||||
</tag>
|
||||
</tags>
|
||||
</tagClass>
|
||||
</tagClasses>
|
||||
</tagListOptions>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>2.11</version>
|
||||
<version>${reporting.checkstyle-plugin.version}</version>
|
||||
<configuration>
|
||||
<enableRulesSummary>false</enableRulesSummary>
|
||||
<enableFilesSummary>false</enableFilesSummary>
|
||||
@@ -283,7 +193,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-pmd-plugin</artifactId>
|
||||
<version>3.1</version>
|
||||
<version>${reporting.pmd-plugin.version}</version>
|
||||
<configuration>
|
||||
<targetJdk>1.6</targetJdk>
|
||||
<linkXref>true</linkXref>
|
||||
@@ -299,18 +209,12 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
</rulesets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>findbugs-maven-plugin</artifactId>
|
||||
<version>2.5.3</version>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</reporting>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
<version>1.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.owasp</groupId>
|
||||
@@ -322,5 +226,27 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
|
||||
<artifactId>dependency-check-utils</artifactId>
|
||||
<version>${project.parent.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.ant</groupId>
|
||||
<artifactId>ant</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.ant</groupId>
|
||||
<artifactId>ant-launcher</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
|
||||
@@ -3,8 +3,7 @@
|
||||
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2
|
||||
http://maven.apache.org/xsd/assembly-1.1.2.xsd"
|
||||
>
|
||||
http://maven.apache.org/xsd/assembly-1.1.2.xsd">
|
||||
<id>release</id>
|
||||
<formats>
|
||||
<format>zip</format>
|
||||
@@ -12,25 +11,48 @@
|
||||
<includeBaseDirectory>false</includeBaseDirectory>
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<directory>${project.build.directory}/release</directory>
|
||||
<outputDirectory>dependency-check/bin</outputDirectory>
|
||||
<directory>${project.build.directory}/release/bin</directory>
|
||||
<includes>
|
||||
<include>*.sh</include>
|
||||
</includes>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<outputDirectory>dependency-check/bin</outputDirectory>
|
||||
<directory>${project.build.directory}/release/bin</directory>
|
||||
<includes>
|
||||
<include>*.bat</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<outputDirectory>dependency-check/repo</outputDirectory>
|
||||
<directory>${project.build.directory}/release/repo</directory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>.</directory>
|
||||
<outputDirectory>dependency-check/plugins</outputDirectory>
|
||||
<excludes>
|
||||
<exclude>*/**</exclude>
|
||||
</excludes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<outputDirectory>dependency-check</outputDirectory>
|
||||
<includes>
|
||||
<include>LICENSE*</include>
|
||||
<include>NOTICE*</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<outputDirectory>licenses</outputDirectory>
|
||||
<outputDirectory>dependency-check/licenses</outputDirectory>
|
||||
<directory>${basedir}/src/main/resources/META-INF/licenses</directory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<outputDirectory>licenses</outputDirectory>
|
||||
<outputDirectory>dependency-check/licenses</outputDirectory>
|
||||
<directory>${basedir}/../dependency-check-core/src/main/resources/META-INF/licenses</directory>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<outputDirectory>dependency-check</outputDirectory>
|
||||
<directory>${basedir}</directory>
|
||||
<includes>
|
||||
<include>README.md</include>
|
||||
@@ -38,21 +60,4 @@
|
||||
</includes>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
<!--
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<directory>${project.build.directory}</directory>
|
||||
<includes>
|
||||
<include>dependency-check*.jar</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
<dependencySets>
|
||||
<dependencySet>
|
||||
<outputDirectory>/lib</outputDirectory>
|
||||
<scope>runtime</scope>
|
||||
</dependencySet>
|
||||
</dependencySets>
|
||||
-->
|
||||
</assembly>
|
||||
@@ -17,26 +17,33 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import ch.qos.logback.classic.LoggerContext;
|
||||
import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.org.apache.tools.ant.DirectoryScanner;
|
||||
import org.apache.tools.ant.DirectoryScanner;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.reporting.ReportGenerator;
|
||||
import org.owasp.dependencycheck.utils.LogUtils;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import ch.qos.logback.core.FileAppender;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.exception.ExceptionCollection;
|
||||
import org.owasp.dependencycheck.exception.ReportException;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.slf4j.impl.StaticLoggerBinder;
|
||||
|
||||
/**
|
||||
* The command line interface for the DependencyCheck application.
|
||||
@@ -45,15 +52,10 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
*/
|
||||
public class App {
|
||||
|
||||
/**
|
||||
* The location of the log properties configuration file.
|
||||
*/
|
||||
private static final String LOG_PROPERTIES_FILE = "log.properties";
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(App.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(App.class);
|
||||
|
||||
/**
|
||||
* The main method for the application.
|
||||
@@ -61,21 +63,26 @@ public class App {
|
||||
* @param args the command line arguments
|
||||
*/
|
||||
public static void main(String[] args) {
|
||||
int exitCode = 0;
|
||||
try {
|
||||
Settings.initialize();
|
||||
final App app = new App();
|
||||
app.run(args);
|
||||
exitCode = app.run(args);
|
||||
LOGGER.debug("Exit code: " + exitCode);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
}
|
||||
System.exit(exitCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Main CLI entry-point into the application.
|
||||
*
|
||||
* @param args the command line arguments
|
||||
* @return the exit code to return
|
||||
*/
|
||||
public void run(String[] args) {
|
||||
public int run(String[] args) {
|
||||
int exitCode = 0;
|
||||
final CliParser cli = new CliParser();
|
||||
|
||||
try {
|
||||
@@ -83,90 +90,174 @@ public class App {
|
||||
} catch (FileNotFoundException ex) {
|
||||
System.err.println(ex.getMessage());
|
||||
cli.printHelp();
|
||||
return;
|
||||
return -1;
|
||||
} catch (ParseException ex) {
|
||||
System.err.println(ex.getMessage());
|
||||
cli.printHelp();
|
||||
return;
|
||||
return -2;
|
||||
}
|
||||
|
||||
final InputStream in = App.class.getClassLoader().getResourceAsStream(LOG_PROPERTIES_FILE);
|
||||
LogUtils.prepareLogger(in, cli.getVerboseLog());
|
||||
if (cli.getVerboseLog() != null) {
|
||||
prepareLogger(cli.getVerboseLog());
|
||||
}
|
||||
|
||||
if (cli.isGetVersion()) {
|
||||
if (cli.isPurge()) {
|
||||
if (cli.getConnectionString() != null) {
|
||||
LOGGER.error("Unable to purge the database when using a non-default connection string");
|
||||
exitCode = -3;
|
||||
} else {
|
||||
try {
|
||||
populateSettings(cli);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.error(ex.getMessage());
|
||||
LOGGER.debug("Error loading properties file", ex);
|
||||
exitCode = -4;
|
||||
}
|
||||
File db;
|
||||
try {
|
||||
db = new File(Settings.getDataDirectory(), "dc.h2.db");
|
||||
if (db.exists()) {
|
||||
if (db.delete()) {
|
||||
LOGGER.info("Database file purged; local copy of the NVD has been removed");
|
||||
} else {
|
||||
LOGGER.error("Unable to delete '{}'; please delete the file manually", db.getAbsolutePath());
|
||||
exitCode = -5;
|
||||
}
|
||||
} else {
|
||||
LOGGER.error("Unable to purge database; the database file does not exists: {}", db.getAbsolutePath());
|
||||
exitCode = -6;
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.error("Unable to delete the database");
|
||||
exitCode = -7;
|
||||
}
|
||||
}
|
||||
} else if (cli.isGetVersion()) {
|
||||
cli.printVersionInfo();
|
||||
} else if (cli.isUpdateOnly()) {
|
||||
populateSettings(cli);
|
||||
runUpdateOnly();
|
||||
} else if (cli.isRunScan()) {
|
||||
populateSettings(cli);
|
||||
try {
|
||||
runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getApplicationName(), cli.getScanFiles(), cli.getExcludeList());
|
||||
populateSettings(cli);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.error(ex.getMessage());
|
||||
LOGGER.debug("Error loading properties file", ex);
|
||||
exitCode = -4;
|
||||
}
|
||||
try {
|
||||
runUpdateOnly();
|
||||
} catch (UpdateException ex) {
|
||||
LOGGER.error(ex.getMessage());
|
||||
exitCode = -8;
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.error(ex.getMessage());
|
||||
exitCode = -9;
|
||||
}
|
||||
} else if (cli.isRunScan()) {
|
||||
try {
|
||||
populateSettings(cli);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.error(ex.getMessage());
|
||||
LOGGER.debug("Error loading properties file", ex);
|
||||
exitCode = -4;
|
||||
}
|
||||
try {
|
||||
final String[] scanFiles = cli.getScanFiles();
|
||||
if (scanFiles != null) {
|
||||
exitCode = runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getProjectName(), scanFiles,
|
||||
cli.getExcludeList(), cli.getSymLinkDepth(), cli.getFailOnCVSS());
|
||||
} else {
|
||||
LOGGER.error("No scan files configured");
|
||||
}
|
||||
} catch (InvalidScanPathException ex) {
|
||||
LOGGER.log(Level.SEVERE, "An invalid scan path was detected; unable to scan '//*' paths");
|
||||
LOGGER.error("An invalid scan path was detected; unable to scan '//*' paths");
|
||||
exitCode = -10;
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.error(ex.getMessage());
|
||||
exitCode = -11;
|
||||
} catch (ReportException ex) {
|
||||
LOGGER.error(ex.getMessage());
|
||||
exitCode = -12;
|
||||
} catch (ExceptionCollection ex) {
|
||||
if (ex.isFatal()) {
|
||||
exitCode = -13;
|
||||
LOGGER.error("One or more fatal errors occurred");
|
||||
} else {
|
||||
exitCode = -14;
|
||||
}
|
||||
for (Throwable e : ex.getExceptions()) {
|
||||
LOGGER.error(e.getMessage());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
cli.printHelp();
|
||||
}
|
||||
return exitCode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the specified directories and writes the dependency reports to the reportDirectory.
|
||||
* Scans the specified directories and writes the dependency reports to the
|
||||
* reportDirectory.
|
||||
*
|
||||
* @param reportDirectory the path to the directory where the reports will be written
|
||||
* @param reportDirectory the path to the directory where the reports will
|
||||
* be written
|
||||
* @param outputFormat the output format of the report
|
||||
* @param applicationName the application name for the report
|
||||
* @param files the files/directories to scan
|
||||
* @param excludes the patterns for files/directories to exclude
|
||||
* @param symLinkDepth the depth that symbolic links will be followed
|
||||
* @param cvssFailScore the score to fail on if a vulnerability is found
|
||||
* @return the exit code if there was an error
|
||||
*
|
||||
* @throws InvalidScanPathException thrown if the path to scan starts with "//"
|
||||
* @throws InvalidScanPathException thrown if the path to scan starts with
|
||||
* "//"
|
||||
* @throws ReportException thrown when the report cannot be generated
|
||||
* @throws DatabaseException thrown when there is an error connecting to the
|
||||
* database
|
||||
* @throws ExceptionCollection thrown when an exception occurs during
|
||||
* analysis; there may be multiple exceptions contained within the
|
||||
* collection.
|
||||
*/
|
||||
private void runScan(String reportDirectory, String outputFormat, String applicationName, String[] files,
|
||||
String[] excludes) throws InvalidScanPathException {
|
||||
private int runScan(String reportDirectory, String outputFormat, String applicationName, String[] files,
|
||||
String[] excludes, int symLinkDepth, int cvssFailScore) throws InvalidScanPathException, DatabaseException,
|
||||
ExceptionCollection, ReportException {
|
||||
Engine engine = null;
|
||||
int retCode = 0;
|
||||
try {
|
||||
engine = new Engine();
|
||||
List<String> antStylePaths = new ArrayList<String>();
|
||||
if (excludes == null || excludes.length == 0) {
|
||||
for (String file : files) {
|
||||
if (file.contains("*") || file.contains("?")) {
|
||||
antStylePaths.add(file);
|
||||
} else {
|
||||
engine.scan(file);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
antStylePaths = Arrays.asList(files);
|
||||
final List<String> antStylePaths = new ArrayList<String>();
|
||||
for (String file : files) {
|
||||
final String antPath = ensureCanonicalPath(file);
|
||||
antStylePaths.add(antPath);
|
||||
}
|
||||
|
||||
final Set<File> paths = new HashSet<File>();
|
||||
for (String file : antStylePaths) {
|
||||
LOGGER.debug("Scanning {}", file);
|
||||
final DirectoryScanner scanner = new DirectoryScanner();
|
||||
String include = file.replace('\\', '/');
|
||||
File baseDir;
|
||||
|
||||
if (include.startsWith("//")) {
|
||||
throw new InvalidScanPathException("Unable to scan paths specified by //");
|
||||
} else if (include.startsWith("./")) {
|
||||
baseDir = new File(".");
|
||||
include = include.substring(2);
|
||||
} else if (include.startsWith("/")) {
|
||||
baseDir = new File("/");
|
||||
include = include.substring(1);
|
||||
} else if (include.contains("/")) {
|
||||
final int pos = include.indexOf('/');
|
||||
final String tmp = include.substring(0, pos);
|
||||
if (tmp.contains("*") || tmp.contains("?")) {
|
||||
baseDir = new File(".");
|
||||
} else {
|
||||
final int pos = getLastFileSeparator(include);
|
||||
final String tmpBase = include.substring(0, pos);
|
||||
final String tmpInclude = include.substring(pos + 1);
|
||||
if (tmpInclude.indexOf('*') >= 0 || tmpInclude.indexOf('?') >= 0
|
||||
|| (new File(include)).isFile()) {
|
||||
baseDir = new File(tmpBase);
|
||||
include = tmpInclude;
|
||||
} else {
|
||||
baseDir = new File(tmp);
|
||||
include = include.substring(pos + 1);
|
||||
baseDir = new File(tmpBase, tmpInclude);
|
||||
include = "**/*";
|
||||
}
|
||||
} else { //no path info - must just be a file in the working directory
|
||||
baseDir = new File(".");
|
||||
}
|
||||
scanner.setBasedir(baseDir);
|
||||
scanner.setIncludes(include);
|
||||
final String[] includes = {include};
|
||||
scanner.setIncludes(includes);
|
||||
scanner.setMaxLevelsOfSymlinks(symLinkDepth);
|
||||
if (symLinkDepth <= 0) {
|
||||
scanner.setFollowSymlinks(false);
|
||||
}
|
||||
if (excludes != null && excludes.length > 0) {
|
||||
scanner.addExcludes(excludes);
|
||||
}
|
||||
@@ -174,13 +265,22 @@ public class App {
|
||||
if (scanner.getIncludedFilesCount() > 0) {
|
||||
for (String s : scanner.getIncludedFiles()) {
|
||||
final File f = new File(baseDir, s);
|
||||
LOGGER.debug("Found file {}", f.toString());
|
||||
paths.add(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
engine.scan(paths);
|
||||
|
||||
engine.analyzeDependencies();
|
||||
ExceptionCollection exCol = null;
|
||||
try {
|
||||
engine.analyzeDependencies();
|
||||
} catch (ExceptionCollection ex) {
|
||||
if (ex.isFatal()) {
|
||||
throw ex;
|
||||
}
|
||||
exCol = ex;
|
||||
}
|
||||
final List<Dependency> dependencies = engine.getDependencies();
|
||||
DatabaseProperties prop = null;
|
||||
CveDB cve = null;
|
||||
@@ -188,8 +288,6 @@ public class App {
|
||||
cve = new CveDB();
|
||||
cve.open();
|
||||
prop = cve.getDatabaseProperties();
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.log(Level.FINE, "Unable to retrieve DB Properties", ex);
|
||||
} finally {
|
||||
if (cve != null) {
|
||||
cve.close();
|
||||
@@ -198,16 +296,31 @@ public class App {
|
||||
final ReportGenerator report = new ReportGenerator(applicationName, dependencies, engine.getAnalyzers(), prop);
|
||||
try {
|
||||
report.generateReports(reportDirectory, outputFormat);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.SEVERE, "There was an IO error while attempting to generate the report.");
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.log(Level.SEVERE, "There was an error while attempting to generate the report.");
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
} catch (ReportException ex) {
|
||||
if (exCol != null) {
|
||||
exCol.addException(ex);
|
||||
throw exCol;
|
||||
} else {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to connect to the dependency-check database; analysis has stopped");
|
||||
LOGGER.log(Level.FINE, "", ex);
|
||||
if (exCol != null && exCol.getExceptions().size() > 0) {
|
||||
throw exCol;
|
||||
}
|
||||
|
||||
//Set the exit code based on whether we found a high enough vulnerability
|
||||
for (Dependency dep : dependencies) {
|
||||
if (!dep.getVulnerabilities().isEmpty()) {
|
||||
for (Vulnerability vuln : dep.getVulnerabilities()) {
|
||||
LOGGER.debug("VULNERABILITY FOUND " + dep.getDisplayFileName());
|
||||
if (vuln.getCvssScore() > cvssFailScore) {
|
||||
retCode = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retCode;
|
||||
} finally {
|
||||
if (engine != null) {
|
||||
engine.cleanup();
|
||||
@@ -217,15 +330,16 @@ public class App {
|
||||
|
||||
/**
|
||||
* Only executes the update phase of dependency-check.
|
||||
*
|
||||
* @throws UpdateException thrown if there is an error updating
|
||||
* @throws DatabaseException thrown if a fatal error occurred and a
|
||||
* connection to the database could not be established
|
||||
*/
|
||||
private void runUpdateOnly() {
|
||||
private void runUpdateOnly() throws UpdateException, DatabaseException {
|
||||
Engine engine = null;
|
||||
try {
|
||||
engine = new Engine();
|
||||
engine.doUpdates();
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to connect to the dependency-check database; analysis has stopped");
|
||||
LOGGER.log(Level.FINE, "", ex);
|
||||
} finally {
|
||||
if (engine != null) {
|
||||
engine.cleanup();
|
||||
@@ -236,11 +350,13 @@ public class App {
|
||||
/**
|
||||
* Updates the global Settings.
|
||||
*
|
||||
* @param cli a reference to the CLI Parser that contains the command line arguments used to set the corresponding settings in
|
||||
* the core engine.
|
||||
* @param cli a reference to the CLI Parser that contains the command line
|
||||
* arguments used to set the corresponding settings in the core engine.
|
||||
*
|
||||
* @throws InvalidSettingException thrown when a user defined properties
|
||||
* file is unable to be loaded.
|
||||
*/
|
||||
private void populateSettings(CliParser cli) {
|
||||
|
||||
private void populateSettings(CliParser cli) throws InvalidSettingException {
|
||||
final boolean autoUpdate = cli.isAutoUpdate();
|
||||
final String connectionTimeout = cli.getConnectionTimeout();
|
||||
final String proxyServer = cli.getProxyServer();
|
||||
@@ -250,14 +366,7 @@ public class App {
|
||||
final String dataDirectory = cli.getDataDirectory();
|
||||
final File propertiesFile = cli.getPropertiesFile();
|
||||
final String suppressionFile = cli.getSuppressionFile();
|
||||
final boolean jarDisabled = cli.isJarDisabled();
|
||||
final boolean archiveDisabled = cli.isArchiveDisabled();
|
||||
final boolean pyDistDisabled = cli.isPythonDistributionDisabled();
|
||||
final boolean pyPkgDisabled = cli.isPythonPackageDisabled();
|
||||
final boolean assemblyDisabled = cli.isAssemblyDisabled();
|
||||
final boolean nuspecDisabled = cli.isNuspecDisabled();
|
||||
final boolean centralDisabled = cli.isCentralDisabled();
|
||||
final boolean nexusDisabled = cli.isNexusDisabled();
|
||||
final String hintsFile = cli.getHintsFile();
|
||||
final String nexusUrl = cli.getNexusUrl();
|
||||
final String databaseDriverName = cli.getDatabaseDriverName();
|
||||
final String databaseDriverPath = cli.getDatabaseDriverPath();
|
||||
@@ -266,18 +375,20 @@ public class App {
|
||||
final String databasePassword = cli.getDatabasePassword();
|
||||
final String additionalZipExtensions = cli.getAdditionalZipExtensions();
|
||||
final String pathToMono = cli.getPathToMono();
|
||||
final String cveMod12 = cli.getModifiedCve12Url();
|
||||
final String cveMod20 = cli.getModifiedCve20Url();
|
||||
final String cveBase12 = cli.getBaseCve12Url();
|
||||
final String cveBase20 = cli.getBaseCve20Url();
|
||||
final Integer cveValidForHours = cli.getCveValidForHours();
|
||||
final boolean experimentalEnabled = cli.isExperimentalEnabled();
|
||||
|
||||
if (propertiesFile != null) {
|
||||
try {
|
||||
Settings.mergeProperties(propertiesFile);
|
||||
} catch (FileNotFoundException ex) {
|
||||
final String msg = String.format("Unable to load properties file '%s'", propertiesFile.getPath());
|
||||
LOGGER.log(Level.SEVERE, msg);
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
throw new InvalidSettingException("Unable to find properties file '" + propertiesFile.getPath() + "'", ex);
|
||||
} catch (IOException ex) {
|
||||
final String msg = String.format("Unable to find properties file '%s'", propertiesFile.getPath());
|
||||
LOGGER.log(Level.SEVERE, msg);
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
throw new InvalidSettingException("Error reading properties file '" + propertiesFile.getPath() + "'", ex);
|
||||
}
|
||||
}
|
||||
// We have to wait until we've merged the properties before attempting to set whether we use
|
||||
@@ -297,59 +408,139 @@ public class App {
|
||||
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
|
||||
}
|
||||
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
if (proxyServer != null && !proxyServer.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
}
|
||||
if (proxyPort != null && !proxyPort.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
}
|
||||
if (proxyUser != null && !proxyUser.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.PROXY_USERNAME, proxyUser);
|
||||
}
|
||||
if (proxyPass != null && !proxyPass.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.PROXY_PASSWORD, proxyPass);
|
||||
}
|
||||
if (connectionTimeout != null && !connectionTimeout.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
}
|
||||
if (suppressionFile != null && !suppressionFile.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
}
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPass);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
|
||||
Settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
|
||||
|
||||
//File Type Analyzer Settings
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !jarDisabled);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !archiveDisabled);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !pyDistDisabled);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, !pyPkgDisabled);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, !nuspecDisabled);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, !assemblyDisabled);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, experimentalEnabled);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !cli.isJarDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !cli.isArchiveDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !cli.isPythonDistributionDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, !cli.isPythonPackageDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, !cli.isAutoconfDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CMAKE_ENABLED, !cli.isCmakeDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, !cli.isNuspecDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, !cli.isAssemblyDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, !cli.isBundleAuditDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isComposerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isNodeJsDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, !cli.isSwiftPackageAnalyzerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, !cli.isCocoapodsAnalyzerDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isRubyGemspecDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isCentralDisabled());
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !cli.isNexusDisabled());
|
||||
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !centralDisabled);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !nexusDisabled);
|
||||
if (nexusUrl != null && !nexusUrl.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, cli.getPathToBundleAudit());
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, additionalZipExtensions);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
if (cveBase12 != null && !cveBase12.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, cveBase12);
|
||||
Settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, cveBase20);
|
||||
Settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, cveMod12);
|
||||
Settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, cveMod20);
|
||||
}
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_PROXY, nexusUsesProxy);
|
||||
if (databaseDriverName != null && !databaseDriverName.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file appender and adds it to logback.
|
||||
*
|
||||
* @param verboseLog the path to the verbose log file
|
||||
*/
|
||||
private void prepareLogger(String verboseLog) {
|
||||
final StaticLoggerBinder loggerBinder = StaticLoggerBinder.getSingleton();
|
||||
final LoggerContext context = (LoggerContext) loggerBinder.getLoggerFactory();
|
||||
|
||||
final PatternLayoutEncoder encoder = new PatternLayoutEncoder();
|
||||
encoder.setPattern("%d %C:%L%n%-5level - %msg%n");
|
||||
encoder.setContext(context);
|
||||
encoder.start();
|
||||
final FileAppender<ILoggingEvent> fa = new FileAppender<ILoggingEvent>();
|
||||
fa.setAppend(true);
|
||||
fa.setEncoder(encoder);
|
||||
fa.setContext(context);
|
||||
fa.setFile(verboseLog);
|
||||
final File f = new File(verboseLog);
|
||||
String name = f.getName();
|
||||
final int i = name.lastIndexOf('.');
|
||||
if (i > 1) {
|
||||
name = name.substring(0, i);
|
||||
}
|
||||
if (databaseDriverPath != null && !databaseDriverPath.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
fa.setName(name);
|
||||
fa.start();
|
||||
final ch.qos.logback.classic.Logger rootLogger = context.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME);
|
||||
rootLogger.addAppender(fa);
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a path and resolves it to be a canonical & absolute path. The
|
||||
* caveats are that this method will take an Ant style file selector path
|
||||
* (../someDir/**\/*.jar) and convert it to an absolute/canonical path (at
|
||||
* least to the left of the first * or ?).
|
||||
*
|
||||
* @param path the path to canonicalize
|
||||
* @return the canonical path
|
||||
*/
|
||||
protected String ensureCanonicalPath(String path) {
|
||||
String basePath;
|
||||
String wildCards = null;
|
||||
final String file = path.replace('\\', '/');
|
||||
if (file.contains("*") || file.contains("?")) {
|
||||
|
||||
int pos = getLastFileSeparator(file);
|
||||
if (pos < 0) {
|
||||
return file;
|
||||
}
|
||||
pos += 1;
|
||||
basePath = file.substring(0, pos);
|
||||
wildCards = file.substring(pos);
|
||||
} else {
|
||||
basePath = file;
|
||||
}
|
||||
if (connectionString != null && !connectionString.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
|
||||
File f = new File(basePath);
|
||||
try {
|
||||
f = f.getCanonicalFile();
|
||||
if (wildCards != null) {
|
||||
f = new File(f, wildCards);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.warn("Invalid path '{}' was provided.", path);
|
||||
LOGGER.debug("Invalid path provided", ex);
|
||||
}
|
||||
if (databaseUser != null && !databaseUser.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.DB_USER, databaseUser);
|
||||
}
|
||||
if (databasePassword != null && !databasePassword.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
}
|
||||
if (additionalZipExtensions != null && !additionalZipExtensions.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, additionalZipExtensions);
|
||||
}
|
||||
if (pathToMono != null && !pathToMono.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
return f.getAbsolutePath().replace('\\', '/');
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the position of the last file separator.
|
||||
*
|
||||
* @param file a file path
|
||||
* @return the position of the last file separator
|
||||
*/
|
||||
private int getLastFileSeparator(String file) {
|
||||
if (file.contains("*") || file.contains("?")) {
|
||||
int p1 = file.indexOf('*');
|
||||
int p2 = file.indexOf('?');
|
||||
p1 = p1 > 0 ? p1 : file.length();
|
||||
p2 = p2 > 0 ? p2 : file.length();
|
||||
int pos = p1 < p2 ? p1 : p2;
|
||||
pos = file.lastIndexOf('/', pos);
|
||||
return pos;
|
||||
} else {
|
||||
return file.lastIndexOf('/');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -22,7 +22,12 @@ package org.owasp.dependencycheck;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
class InvalidScanPathException extends Exception {
|
||||
public class InvalidScanPathException extends Exception {
|
||||
|
||||
/**
|
||||
* The serial version UID for serialization.
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates a new InvalidScanPathException.
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
handlers=java.util.logging.ConsoleHandler
|
||||
#, java.util.logging.FileHandler
|
||||
|
||||
# logging levels
|
||||
# FINEST, FINER, FINE, CONFIG, INFO, WARNING and SEVERE.
|
||||
|
||||
# Configure the ConsoleHandler.
|
||||
java.util.logging.ConsoleHandler.level=INFO
|
||||
|
||||
# Configure the FileHandler.
|
||||
java.util.logging.FileHandler.formatter=java.util.logging.SimpleFormatter
|
||||
java.util.logging.FileHandler.level=FINE
|
||||
|
||||
# The following special tokens can be used in the pattern property
|
||||
# which specifies the location and name of the log file.
|
||||
# / - standard path separator
|
||||
# %t - system temporary directory
|
||||
# %h - value of the user.home system property
|
||||
# %g - generation number for rotating logs
|
||||
# %u - unique number to avoid conflicts
|
||||
# FileHandler writes to %h/demo0.log by default.
|
||||
java.util.logging.FileHandler.pattern=./dependency-check.log
|
||||
16
dependency-check-cli/src/main/resources/logback.xml
Normal file
16
dependency-check-cli/src/main/resources/logback.xml
Normal file
@@ -0,0 +1,16 @@
|
||||
<configuration>
|
||||
<contextName>dependency-check</contextName>
|
||||
<!-- Logging configuration -->
|
||||
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<Target>System.out</Target>
|
||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
|
||||
<level>INFO</level>
|
||||
</filter>
|
||||
<encoder>
|
||||
<pattern>[%level] %msg%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
<root level="DEBUG">
|
||||
<appender-ref ref="console"/>
|
||||
</root>
|
||||
</configuration>
|
||||
@@ -5,37 +5,55 @@ The following table lists the command line arguments:
|
||||
|
||||
Short | Argument Name | Parameter | Description | Requirement
|
||||
-------|-----------------------|-----------------|-------------|------------
|
||||
\-a | \-\-app | \<name\> | The name of the application being scanned. This is a required argument. | Required
|
||||
| \-\-project | \<name\> | The name of the project being scanned. | Required
|
||||
\-s | \-\-scan | \<path\> | The path to scan \- this option can be specified multiple times. It is also possible to specify Ant style paths (e.g. directory/**/*.jar). | Required
|
||||
| \-\-exclude | \<pattern\> | The path patterns to exclude from the scan \- this option can be specified multiple times. This accepts Ant style path patterns (e.g. **/exclude/**) . | Optional
|
||||
| \-\-exclude | \<pattern\> | The path patterns to exclude from the scan \- this option can be specified multiple times. This accepts Ant style path patterns (e.g. **/exclude/**). | Optional
|
||||
| \-\-symLink | \<depth\> | The depth that symbolic links will be followed; the default is 0 meaning symbolic links will not be followed. | Optional
|
||||
\-o | \-\-out | \<path\> | The folder to write reports to. This defaults to the current directory. If the format is not set to ALL one could specify a specific file name. | Optional
|
||||
\-f | \-\-format | \<format\> | The output format to write to (XML, HTML, VULN, ALL). The default is HTML. | Required
|
||||
| \-\-failOnCvss | \<score\> | If the score set between 0 and 10 the exit code from dependency-check will indicate if a vulnerability with a CVSS score equal to or higher was identified. | Optional
|
||||
\-l | \-\-log | \<file\> | The file path to write verbose logging information. | Optional
|
||||
\-n | \-\-noupdate | | Disables the automatic updating of the CPE data. | Optional
|
||||
| \-\-suppression | \<file\> | The file path to the suppression XML file; used to suppress [false positives](../suppression.html). | Optional
|
||||
| \-\-suppression | \<file\> | The file path to the suppression XML file; used to suppress [false positives](../general/suppression.html). | Optional
|
||||
\-h | \-\-help | | Print the help message. | Optional
|
||||
| \-\-advancedHelp | | Print the advanced help message. | Optional
|
||||
\-v | \-\-version | | Print the version information. | Optional
|
||||
| \-\-cveValidForHours | \<hours\> | The number of hours to wait before checking for new updates from the NVD. The default is 4 hours. | Optional
|
||||
| \-\-experimental | | Enable the [experimental analyzers](../analyzers/index.html). If not set the analyzers marked as experimental below will not be loaded or used. | Optional
|
||||
|
||||
Advanced Options
|
||||
================
|
||||
Short | Argument Name | Parameter | Description | Default Value
|
||||
-------|-----------------------|-----------------|----------------------------------------------------------------------------------|-------------------
|
||||
| \-\-cveUrl12Modified | \<url\> | URL for the modified CVE 1.2 | https://nvd.nist.gov/download/nvdcve-Modified.xml.gz
|
||||
| \-\-cveUrl20Modified | \<url\> | URL for the modified CVE 2.0 | https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-Modified.xml.gz
|
||||
| \-\-cveUrl12Base | \<url\> | Base URL for each year's CVE 1.2, the %d will be replaced with the year | https://nvd.nist.gov/download/nvdcve-%d.xml.gz
|
||||
| \-\-cveUrl20Base | \<url\> | Base URL for each year's CVE 2.0, the %d will be replaced with the year | https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz
|
||||
\-P | \-\-propertyfile | \<file\> | Specifies a file that contains properties to use instead of applicaion defaults. |
|
||||
| \-\-updateonly | | If set only the update phase of dependency-check will be executed; no scan will be executed and no report will be generated. |
|
||||
| \-\-disablePyDist | | Sets whether the Python Distribution Analyzer will be used. | false
|
||||
| \-\-disablePyPkg | | Sets whether the Python Package Analyzer will be used. | false
|
||||
| \-\-disableArchive | | Sets whether the Archive Analyzer will be used. | false
|
||||
| \-\-disablePyDist | | Sets whether the [experimental](../analyzers/index.html) Python Distribution Analyzer will be used. | false
|
||||
| \-\-disablePyPkg | | Sets whether the [experimental](../analyzers/index.html) Python Package Analyzer will be used. | false
|
||||
| \-\-disableNodeJS | | Sets whether the [experimental](../analyzers/index.html) Node.js Package Analyzer will be used. | false
|
||||
| \-\-disableRubygems | | Sets whether the [experimental](../analyzers/index.html) Ruby Gemspec Analyzer will be used. | false
|
||||
| \-\-disableBundleAudit | | Sets whether the [experimental](../analyzers/index.html) Ruby Bundler Audit Analyzer will be used. | false
|
||||
| \-\-disableCocoapodsAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer will be used. | false
|
||||
| \-\-disableSwiftPackageManagerAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Swift Package Manager Analyzer will be used. | false
|
||||
| \-\-disableAutoconf | | Sets whether the [experimental](../analyzers/index.html) Autoconf Analyzer will be used. | false
|
||||
| \-\-disableOpenSSL | | Sets whether the OpenSSL Analyzer will be used. | false
|
||||
| \-\-disableCmake | | Sets whether the [experimental](../analyzers/index.html) Cmake Analyzer will be disabled. | false
|
||||
| \-\-disableArchive | | Sets whether the Archive Analyzer will be disabled. | false
|
||||
| \-\-zipExtensions | \<strings\> | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. |
|
||||
| \-\-disableJar | | Sets whether the Jar Analyzer will be used. | false
|
||||
| \-\-disableJar | | Sets whether the Jar Analyzer will be disabled. | false
|
||||
| \-\-disableComposer | | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer will be disabled. | false
|
||||
| \-\-disableCentral | | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer. | false
|
||||
| \-\-disableNexus | | Sets whether the Nexus Analyzer will be used. Note, this has been superceded by the Central Analyzer. However, you can configure the Nexus URL to utilize an internally hosted Nexus Pro server. | false
|
||||
| \-\-nexus | \<url\> | The url to the Nexus Server's web service end point (example: http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. |
|
||||
| \-\-nexusUsesProxy | \<true\|false\> | Whether or not the defined proxy should be used when connecting to Nexus. | true
|
||||
| \-\-disableNuspec | | Sets whether or not the .NET Nuget Nuspec Analyzer will be used. | false
|
||||
| \-\-disableAssembly | | Sets whether or not the .NET Assembly Analyzer should be used. | false
|
||||
| \-\-pathToMono | \<path\> | The path to Mono for .NET Assembly analysis on non-windows systems. |
|
||||
| \-\-proxyserver | \<server\> | The proxy server to use when downloading resources. |
|
||||
| \-\-mono | \<path\> | The path to Mono for .NET Assembly analysis on non-windows systems. |
|
||||
| \-\-bundleAudit | | The path to the bundle-audit executable. |
|
||||
| \-\-proxyserver | \<server\> | The proxy server to use when downloading resources; see the [proxy configuration](../data/proxy.html) page for more information. |
|
||||
| \-\-proxyport | \<port\> | The proxy port to use when downloading resources. |
|
||||
| \-\-connectiontimeout | \<timeout\> | The connection timeout (in milliseconds) to use when downloading resources. |
|
||||
| \-\-proxypass | \<pass\> | The proxy password to use when downloading resources. |
|
||||
@@ -46,3 +64,4 @@ Short | Argument Name | Paramete
|
||||
| \-\-dbPassword | \<password\> | The password for connecting to the database. |
|
||||
| \-\-dbUser | \<user\> | The username used to connect to the database. |
|
||||
\-d | \-\-data | \<path\> | The location of the data directory used to store persistent data. This option should generally not be set. |
|
||||
| \-\-purge | | Delete the local copy of the NVD. This is used to force a refresh of the data. |
|
||||
@@ -9,19 +9,23 @@ Installation & Usage
|
||||
====================
|
||||
Download the dependency-check command line tool [here](http://dl.bintray.com/jeremy-long/owasp/dependency-check-${project.version}-release.zip).
|
||||
Extract the zip file to a location on your computer and put the 'bin' directory into the
|
||||
path environment variable. On \*nix systems you will likely need to make the shell
|
||||
script executable:
|
||||
path environment variable.
|
||||
|
||||
$ chmod +777 dependency-check.sh
|
||||
|
||||
To scan a folder on the system you can run:
|
||||
#set( $H = '#' )
|
||||
|
||||
$H$H$H Homebrew
|
||||
$ brew install dependency-check
|
||||
|
||||
This puts an executable `dependency-check` script in the `/bin` directory of
|
||||
your homebrew installation.
|
||||
|
||||
To scan a folder on the system you can run:
|
||||
|
||||
$H$H$H Windows
|
||||
dependency-check.bat --app "My App Name" --scan "c:\java\application\lib"
|
||||
dependency-check.bat --project "My App Name" --scan "c:\java\application\lib"
|
||||
|
||||
$H$H$H *nix
|
||||
dependency-check.sh --app "My App Name" --scan "/java/application/lib"
|
||||
dependency-check.sh --project "My App Name" --scan "/java/application/lib"
|
||||
|
||||
To view the command line arguments, see the <a href="arguments.html">arguments page</a>, or you can run:
|
||||
|
||||
@@ -29,4 +33,4 @@ $H$H$H Windows
|
||||
dependency-check.bat --help
|
||||
|
||||
$H$H$H *nix
|
||||
dependency-check.sh --help
|
||||
dependency-check.sh --help
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 The OWASP Foundatio. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author jeremy
|
||||
*/
|
||||
public class AppTest {
|
||||
|
||||
public AppTest() {
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() {
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownClass() {
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of ensureCanonicalPath method, of class App.
|
||||
*/
|
||||
@Test
|
||||
public void testEnsureCanonicalPath() {
|
||||
String file = "../*.jar";
|
||||
App instance = new App();
|
||||
String result = instance.ensureCanonicalPath(file);
|
||||
assertFalse(result.contains(".."));
|
||||
assertTrue(result.endsWith("*.jar"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of ensureCanonicalPath method, of class App.
|
||||
*/
|
||||
@Test
|
||||
public void testEnsureCanonicalPath2() {
|
||||
String file = "../some/skip/../path/file.txt";
|
||||
App instance = new App();
|
||||
String expResult = "/some/path/file.txt";
|
||||
String result = instance.ensureCanonicalPath(file);
|
||||
assertTrue("result=" + result, result.endsWith(expResult));
|
||||
}
|
||||
}
|
||||
@@ -115,6 +115,63 @@ public class CliParserTest {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of parse method with failOnCVSS without an argument
|
||||
*
|
||||
* @throws Exception thrown when an exception occurs.
|
||||
*/
|
||||
@Test
|
||||
public void testParse_failOnCVSSNoArg() throws Exception {
|
||||
|
||||
String[] args = {"--failOnCVSS"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
try {
|
||||
instance.parse(args);
|
||||
} catch (ParseException ex) {
|
||||
Assert.assertTrue(ex.getMessage().contains("Missing argument"));
|
||||
}
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
Assert.assertFalse(instance.isGetHelp());
|
||||
Assert.assertFalse(instance.isRunScan());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of parse method with failOnCVSS invalid argument. It should default to 11
|
||||
*
|
||||
* @throws Exception thrown when an exception occurs.
|
||||
*/
|
||||
@Test
|
||||
public void testParse_failOnCVSSInvalidArgument() throws Exception {
|
||||
|
||||
String[] args = {"--failOnCVSS","bad"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
instance.parse(args);
|
||||
Assert.assertEquals("Default should be 11", 11, instance.getFailOnCVSS());
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
Assert.assertFalse(instance.isGetHelp());
|
||||
Assert.assertFalse(instance.isRunScan());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of parse method with failOnCVSS invalid argument. It should default to 11
|
||||
*
|
||||
* @throws Exception thrown when an exception occurs.
|
||||
*/
|
||||
@Test
|
||||
public void testParse_failOnCVSSValidArgument() throws Exception {
|
||||
|
||||
String[] args = {"--failOnCVSS","6"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
instance.parse(args);
|
||||
Assert.assertEquals(6, instance.getFailOnCVSS());
|
||||
Assert.assertFalse(instance.isGetVersion());
|
||||
Assert.assertFalse(instance.isGetHelp());
|
||||
Assert.assertFalse(instance.isRunScan());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of parse method with jar and cpe args, of class CliParser.
|
||||
*
|
||||
@@ -196,7 +253,7 @@ public class CliParserTest {
|
||||
*/
|
||||
@Test
|
||||
public void testParse_scan_withFileExists() throws Exception {
|
||||
File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").getPath());
|
||||
File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").toURI().getPath());
|
||||
String[] args = {"-scan", path.getCanonicalPath(), "-out", "./", "-app", "test"};
|
||||
|
||||
CliParser instance = new CliParser();
|
||||
|
||||
@@ -17,7 +17,7 @@ Copyright & License
|
||||
|
||||
Dependency-Check is Copyright (c) 2012-2014 Jeremy Long. All Rights Reserved.
|
||||
|
||||
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://github.com/jeremylong/DependencyCheck/dependency-check-cli/blob/master/LICENSE.txt) file for the full license.
|
||||
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/LICENSE.txt) file for the full license.
|
||||
|
||||
Dependency-Check makes use of several other open source libraries. Please see the [NOTICE.txt] [notices] file for more information.
|
||||
|
||||
@@ -25,4 +25,4 @@ Dependency-Check makes use of several other open source libraries. Please see th
|
||||
[wiki]: https://github.com/jeremylong/DependencyCheck/wiki
|
||||
[subscribe]: mailto:dependency-check+subscribe@googlegroups.com
|
||||
[post]: mailto:dependency-check@googlegroups.com
|
||||
[notices]: https://github.com/jeremylong/DependencyCheck/blob/master/NOTICES.txt
|
||||
[notices]: https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/NOTICE.txt
|
||||
@@ -20,7 +20,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<parent>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-parent</artifactId>
|
||||
<version>1.2.11</version>
|
||||
<version>1.4.5</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dependency-check-core</artifactId>
|
||||
@@ -83,9 +83,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
</testResource>
|
||||
<testResource>
|
||||
<directory>${basedir}/src/test/resources</directory>
|
||||
<excludes>
|
||||
<exclude>**/mysql-connector-java-5.1.27-bin.jar</exclude>
|
||||
</excludes>
|
||||
<filtering>false</filtering>
|
||||
</testResource>
|
||||
</testResources>
|
||||
@@ -101,7 +98,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>${project.build.directory}/test-classes</outputDirectory>
|
||||
<includeScope>provided</includeScope>
|
||||
<includeScope>test</includeScope>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
@@ -110,19 +107,17 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>jar</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>jar</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>test-jar</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>test-jar</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<includes>
|
||||
<include>**/*.class</include>
|
||||
</includes>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
@@ -180,6 +175,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<argLine>-Dfile.encoding=UTF-8</argLine>
|
||||
<systemProperties>
|
||||
<property>
|
||||
<name>data.directory</name>
|
||||
@@ -205,84 +201,21 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<name>data.directory</name>
|
||||
<value>${project.build.directory}/data</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>temp.directory</name>
|
||||
<value>${project.build.directory}/temp</value>
|
||||
</property>
|
||||
</systemProperties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<compilerArgument>-Xlint:unchecked</compilerArgument>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<reporting>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-project-info-reports-plugin</artifactId>
|
||||
<version>2.7</version>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<reports>
|
||||
<report>summary</report>
|
||||
<report>license</report>
|
||||
<report>help</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>2.9.1</version>
|
||||
<configuration>
|
||||
<failOnError>false</failOnError>
|
||||
<bottom>Copyright© 2012-15 Jeremy Long. All Rights Reserved.</bottom>
|
||||
</configuration>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<id>default</id>
|
||||
<reports>
|
||||
<report>javadoc</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>versions-maven-plugin</artifactId>
|
||||
<version>2.1</version>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<reports>
|
||||
<report>dependency-updates-report</report>
|
||||
<report>plugin-updates-report</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jxr-plugin</artifactId>
|
||||
<version>2.4</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>cobertura-maven-plugin</artifactId>
|
||||
<version>2.6</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-report-plugin</artifactId>
|
||||
<version>2.16</version>
|
||||
<reportSets>
|
||||
<reportSet>
|
||||
<reports>
|
||||
<report>report-only</report>
|
||||
</reports>
|
||||
</reportSet>
|
||||
<reportSet>
|
||||
<id>integration-tests</id>
|
||||
<reports>
|
||||
@@ -292,34 +225,10 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
</reportSet>
|
||||
</reportSets>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>taglist-maven-plugin</artifactId>
|
||||
<version>2.4</version>
|
||||
<configuration>
|
||||
<tagListOptions>
|
||||
<tagClasses>
|
||||
<tagClass>
|
||||
<displayName>Todo Work</displayName>
|
||||
<tags>
|
||||
<tag>
|
||||
<matchString>todo</matchString>
|
||||
<matchType>ignoreCase</matchType>
|
||||
</tag>
|
||||
<tag>
|
||||
<matchString>FIXME</matchString>
|
||||
<matchType>exact</matchType>
|
||||
</tag>
|
||||
</tags>
|
||||
</tagClass>
|
||||
</tagClasses>
|
||||
</tagListOptions>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-checkstyle-plugin</artifactId>
|
||||
<version>2.11</version>
|
||||
<version>${reporting.checkstyle-plugin.version}</version>
|
||||
<configuration>
|
||||
<enableRulesSummary>false</enableRulesSummary>
|
||||
<enableFilesSummary>false</enableFilesSummary>
|
||||
@@ -332,7 +241,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-pmd-plugin</artifactId>
|
||||
<version>3.1</version>
|
||||
<version>${reporting.pmd-plugin.version}</version>
|
||||
<configuration>
|
||||
<targetJdk>1.6</targetJdk>
|
||||
<linkXref>true</linkXref>
|
||||
@@ -348,15 +257,29 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
</rulesets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>findbugs-maven-plugin</artifactId>
|
||||
<version>2.5.3</version>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</reporting>
|
||||
<dependencies>
|
||||
<!-- Note, to stay compatible with Jenkins installations only JARs compiled to 1.6 can be used -->
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>annotations</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</dependency>
|
||||
<!-- Set this to test so that each project that uses this has to have its own implementation of SLF4J -->
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-utils</artifactId>
|
||||
@@ -365,7 +288,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-test-framework</artifactId>
|
||||
<version>${apache.lucene.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -373,120 +295,113 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<artifactId>jmockit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>annotations</artifactId>
|
||||
<version>3.0.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>1.9</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>2.4</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
<version>2.6</version>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
<version>${apache.lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-common</artifactId>
|
||||
<version>${apache.lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-queryparser</artifactId>
|
||||
<version>${apache.lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.velocity</groupId>
|
||||
<artifactId>velocity</artifactId>
|
||||
<version>1.7</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.h2database</groupId>
|
||||
<artifactId>h2</artifactId>
|
||||
<version>1.3.176</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish</groupId>
|
||||
<artifactId>javax.json</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jsoup</groupId>
|
||||
<artifactId>jsoup</artifactId>
|
||||
<version>1.7.2</version>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.mail</groupId>
|
||||
<artifactId>mailapi</artifactId>
|
||||
</dependency>
|
||||
<!-- The following dependencies are only used during testing -->
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.scm</groupId>
|
||||
<artifactId>maven-scm-provider-cvsexe</artifactId>
|
||||
<version>1.8.1</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-webmvc</artifactId>
|
||||
<version>2.5.5</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.security</groupId>
|
||||
<artifactId>spring-security-web</artifactId>
|
||||
<version>3.0.0.RELEASE</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.hazelcast</groupId>
|
||||
<artifactId>hazelcast</artifactId>
|
||||
<version>2.5</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.sf.ehcache</groupId>
|
||||
<artifactId>ehcache-core</artifactId>
|
||||
<version>2.2.0</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.struts</groupId>
|
||||
<artifactId>struts2-core</artifactId>
|
||||
<version>2.1.2</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jetty</artifactId>
|
||||
<version>6.1.0</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.axis2</groupId>
|
||||
<artifactId>axis2-spring</artifactId>
|
||||
<version>1.4.1</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.axis2</groupId>
|
||||
<artifactId>axis2-adb</artifactId>
|
||||
<version>1.4.1</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -494,7 +409,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<artifactId>daytrader-ear</artifactId>
|
||||
<version>2.1.7</version>
|
||||
<type>ear</type>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -502,7 +417,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<artifactId>war</artifactId>
|
||||
<version>4.0</version>
|
||||
<type>war</type>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -510,41 +425,50 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<artifactId>dojo-war</artifactId>
|
||||
<version>1.3.0</version>
|
||||
<type>war</type>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.openjpa</groupId>
|
||||
<artifactId>openjpa</artifactId>
|
||||
<version>2.0.1</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.inject</groupId>
|
||||
<artifactId>guice</artifactId>
|
||||
<version>3.0</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.retry</groupId>
|
||||
<artifactId>spring-retry</artifactId>
|
||||
<version>1.1.0.RELEASE</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>uk.ltd.getahead</groupId>
|
||||
<artifactId>dwr</artifactId>
|
||||
<version>1.1.1</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.mail</groupId>
|
||||
<artifactId>mailapi</artifactId>
|
||||
<version>1.5.2</version>
|
||||
<groupId>xalan</groupId>
|
||||
<artifactId>xalan</artifactId>
|
||||
<version>2.7.0</version>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.thoughtworks.xstream</groupId>
|
||||
<artifactId>xstream</artifactId>
|
||||
<version>1.4.8</version>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<profiles>
|
||||
@@ -553,7 +477,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<activation>
|
||||
<property>
|
||||
<name>mysql</name>
|
||||
<!--value>test</value-->
|
||||
</property>
|
||||
</activation>
|
||||
<build>
|
||||
@@ -561,7 +484,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>2.18.1</version>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
@@ -569,12 +491,11 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
<version>2.18.1</version>
|
||||
<configuration>
|
||||
<systemProperties>
|
||||
<property>
|
||||
<name>data.driver_path</name>
|
||||
<value>${basedir}/${driver_path}</value>
|
||||
<value>${driver_path}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>data.driver_name</name>
|
||||
@@ -602,14 +523,75 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
</build>
|
||||
</profile>
|
||||
<profile>
|
||||
<!-- The following profile adds additional
|
||||
dependencies that are only used during testing.
|
||||
Additionally, these are only added when using "allTests" to
|
||||
make the build slightly faster in most cases. -->
|
||||
<id>Postgresql-IntegrationTest</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>postgresql</name>
|
||||
</property>
|
||||
</activation>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
<version>9.4-1204-jdbc42</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemProperties>
|
||||
<property>
|
||||
<name>data.driver_path</name>
|
||||
<value>${driver_path}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>data.driver_name</name>
|
||||
<value>${driver_name}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>data.connection_string</name>
|
||||
<value>${connection_string}</value>
|
||||
</property>
|
||||
</systemProperties>
|
||||
<includes>
|
||||
<include>**/*MySQLTest.java</include>
|
||||
</includes>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>integration-test</goal>
|
||||
<goal>verify</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
<!--
|
||||
The following profile adds additional dependencies that are only
|
||||
used during testing.
|
||||
|
||||
TODO move the following FP tests to a seperate invoker test in the
|
||||
maven plugin project. Add checks against the XML to validate that
|
||||
these do not report FP.
|
||||
-->
|
||||
<!--profile>
|
||||
<id>False Positive Tests</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>allTests</name>
|
||||
<name>releaseTesting</name>
|
||||
</property>
|
||||
</activation>
|
||||
<dependencies>
|
||||
@@ -617,158 +599,136 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
<groupId>org.apache.xmlgraphics</groupId>
|
||||
<artifactId>batik-util</artifactId>
|
||||
<version>1.7</version>
|
||||
<scope>provided</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.thoughtworks.xstream</groupId>
|
||||
<artifactId>xstream</artifactId>
|
||||
<version>1.4.2</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.ws.security</groupId>
|
||||
<artifactId>wss4j</artifactId>
|
||||
<version>1.5.7</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ganyo</groupId>
|
||||
<artifactId>gcm-server</artifactId>
|
||||
<version>1.0.2</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.python</groupId>
|
||||
<artifactId>jython-standalone</artifactId>
|
||||
<version>2.7-b1</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jruby</groupId>
|
||||
<artifactId>jruby-complete</artifactId>
|
||||
<version>1.7.4</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jruby</groupId>
|
||||
<artifactId>jruby</artifactId>
|
||||
<version>1.6.3</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.core</groupId>
|
||||
<artifactId>jersey-client</artifactId>
|
||||
<version>2.12</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-client</artifactId>
|
||||
<version>1.11.1</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.faces</groupId>
|
||||
<artifactId>jsf-impl</artifactId>
|
||||
<version>2.2.8-02</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.inject</groupId>
|
||||
<artifactId>guice</artifactId>
|
||||
<version>3.0</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.opensaml</groupId>
|
||||
<artifactId>xmltooling</artifactId>
|
||||
<version>1.4.1</version>
|
||||
<scope>provided</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-webmvc</artifactId>
|
||||
<version>3.2.12.RELEASE</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.code.gson</groupId>
|
||||
<artifactId>gson</artifactId>
|
||||
<version>2.3.1</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.gerrit</groupId>
|
||||
<artifactId>gerrit-extension-api</artifactId>
|
||||
<version>2.11</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.apis</groupId>
|
||||
<artifactId>google-api-services-sqladmin</artifactId>
|
||||
<version>v1beta4-rev5-1.20.0</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.gwt.google-apis</groupId>
|
||||
<artifactId>gwt-gears</artifactId>
|
||||
<version>1.2.1</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mozilla</groupId>
|
||||
<artifactId>rhino</artifactId>
|
||||
<version>1.7.6</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>com.microsoft.windowsazure</groupId>
|
||||
<artifactId>microsoft-azure-api-media</artifactId>
|
||||
<version>0.5.0</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.microsoft.windowsazure</groupId>
|
||||
<artifactId>microsoft-azure-api-management-sql</artifactId>
|
||||
<version>0.5.0</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.microsoft.bingads</groupId>
|
||||
<artifactId>microsoft.bingads</artifactId>
|
||||
<version>9.3.4</version>
|
||||
<scope>provided</scope>
|
||||
<scope>test</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</profile>
|
||||
</profile-->
|
||||
</profiles>
|
||||
<properties>
|
||||
<!-- new versions of lucene are compiled with JDK 1.7 and cannot be used ubiquitously in Jenkins
|
||||
this, we cannot upgrade beyond 4.7.2 -->
|
||||
<apache.lucene.version>4.7.2</apache.lucene.version>
|
||||
</properties>
|
||||
</project>
|
||||
|
||||
@@ -0,0 +1,130 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 Stefan Neuhaus. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import org.owasp.dependencycheck.analyzer.Analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
/**
|
||||
* Task to support parallelism of dependency-check analysis. Analyses a single
|
||||
* {@link Dependency} by a specific {@link Analyzer}.
|
||||
*
|
||||
* @author Stefan Neuhaus
|
||||
*/
|
||||
class AnalysisTask implements Callable<Void> {
|
||||
|
||||
/**
|
||||
* Instance of the logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AnalysisTask.class);
|
||||
|
||||
/**
|
||||
* A reference to the analyzer.
|
||||
*/
|
||||
private final Analyzer analyzer;
|
||||
/**
|
||||
* The dependency to analyze.
|
||||
*/
|
||||
private final Dependency dependency;
|
||||
/**
|
||||
* A reference to the dependency-check engine.
|
||||
*/
|
||||
private final Engine engine;
|
||||
/**
|
||||
* The list of exceptions that may occur during analysis.
|
||||
*/
|
||||
private final List<Throwable> exceptions;
|
||||
/**
|
||||
* A reference to the global settings object.
|
||||
*/
|
||||
private final Settings settings;
|
||||
|
||||
/**
|
||||
* Creates a new analysis task.
|
||||
*
|
||||
* @param analyzer a reference of the analyzer to execute
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the dependency-check engine
|
||||
* @param exceptions exceptions that occur during analysis will be added to
|
||||
* this collection of exceptions
|
||||
* @param settings a reference to the global settings object; this is
|
||||
* necessary so that when the thread is started the dependencies have a
|
||||
* correct reference to the global settings.
|
||||
*/
|
||||
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions, Settings settings) {
|
||||
this.analyzer = analyzer;
|
||||
this.dependency = dependency;
|
||||
this.engine = engine;
|
||||
this.exceptions = exceptions;
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the analysis task.
|
||||
*
|
||||
* @return null
|
||||
* @throws Exception thrown if unable to execute the analysis task
|
||||
*/
|
||||
@Override
|
||||
public Void call() {
|
||||
try {
|
||||
Settings.setInstance(settings);
|
||||
|
||||
if (shouldAnalyze()) {
|
||||
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
|
||||
try {
|
||||
analyzer.analyze(dependency, engine);
|
||||
} catch (AnalysisException ex) {
|
||||
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
|
||||
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
exceptions.add(ex);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
Settings.cleanup(false);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the analyzer can analyze the given dependency.
|
||||
*
|
||||
* @return whether or not the analyzer can analyze the dependency
|
||||
*/
|
||||
boolean shouldAnalyze() {
|
||||
if (analyzer instanceof FileTypeAnalyzer) {
|
||||
final FileTypeAnalyzer fileTypeAnalyzer = (FileTypeAnalyzer) analyzer;
|
||||
return fileTypeAnalyzer.accept(dependency.getActualFile());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -17,20 +17,10 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.owasp.dependencycheck.analyzer.AnalysisPhase;
|
||||
import org.owasp.dependencycheck.analyzer.Analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.AnalyzerService;
|
||||
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
@@ -38,46 +28,71 @@ import org.owasp.dependencycheck.data.update.CachedWebDataSource;
|
||||
import org.owasp.dependencycheck.data.update.UpdateService;
|
||||
import org.owasp.dependencycheck.data.update.exception.UpdateException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.ExceptionCollection;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.exception.NoDataException;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CancellationException;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Scans files, directories, etc. for Dependencies. Analyzers are loaded and used to process the files found by the scan, if a
|
||||
* file is encountered and an Analyzer is associated with the file type then the file is turned into a dependency.
|
||||
* Scans files, directories, etc. for Dependencies. Analyzers are loaded and
|
||||
* used to process the files found by the scan, if a file is encountered and an
|
||||
* Analyzer is associated with the file type then the file is turned into a
|
||||
* dependency.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class Engine {
|
||||
public class Engine implements FileFilter {
|
||||
|
||||
/**
|
||||
* The list of dependencies.
|
||||
*/
|
||||
private List<Dependency> dependencies = new ArrayList<Dependency>();
|
||||
private final List<Dependency> dependencies = Collections.synchronizedList(new ArrayList<Dependency>());
|
||||
/**
|
||||
* A Map of analyzers grouped by Analysis phase.
|
||||
*/
|
||||
private EnumMap<AnalysisPhase, List<Analyzer>> analyzers = new EnumMap<AnalysisPhase, List<Analyzer>>(AnalysisPhase.class);
|
||||
private final Map<AnalysisPhase, List<Analyzer>> analyzers = new EnumMap<AnalysisPhase, List<Analyzer>>(AnalysisPhase.class);
|
||||
|
||||
/**
|
||||
* A Map of analyzers grouped by Analysis phase.
|
||||
*/
|
||||
private Set<FileTypeAnalyzer> fileTypeAnalyzers = new HashSet<FileTypeAnalyzer>();
|
||||
private final Set<FileTypeAnalyzer> fileTypeAnalyzers = new HashSet<FileTypeAnalyzer>();
|
||||
|
||||
/**
|
||||
* The ClassLoader to use when dynamically loading Analyzer and Update services.
|
||||
* The ClassLoader to use when dynamically loading Analyzer and Update
|
||||
* services.
|
||||
*/
|
||||
private ClassLoader serviceClassLoader = Thread.currentThread().getContextClassLoader();
|
||||
/**
|
||||
* The Logger for use throughout the class.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(Engine.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Engine.class);
|
||||
|
||||
/**
|
||||
* Creates a new Engine.
|
||||
*
|
||||
* @throws DatabaseException thrown if there is an error connecting to the database
|
||||
* @throws DatabaseException thrown if there is an error connecting to the
|
||||
* database
|
||||
*/
|
||||
public Engine() throws DatabaseException {
|
||||
initializeEngine();
|
||||
@@ -87,7 +102,8 @@ public class Engine {
|
||||
* Creates a new Engine.
|
||||
*
|
||||
* @param serviceClassLoader a reference the class loader being used
|
||||
* @throws DatabaseException thrown if there is an error connecting to the database
|
||||
* @throws DatabaseException thrown if there is an error connecting to the
|
||||
* database
|
||||
*/
|
||||
public Engine(ClassLoader serviceClassLoader) throws DatabaseException {
|
||||
this.serviceClassLoader = serviceClassLoader;
|
||||
@@ -95,9 +111,11 @@ public class Engine {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Engine using the specified classloader to dynamically load Analyzer and Update services.
|
||||
* Creates a new Engine using the specified classloader to dynamically load
|
||||
* Analyzer and Update services.
|
||||
*
|
||||
* @throws DatabaseException thrown if there is an error connecting to the database
|
||||
* @throws DatabaseException thrown if there is an error connecting to the
|
||||
* database
|
||||
*/
|
||||
protected final void initializeEngine() throws DatabaseException {
|
||||
ConnectionFactory.initialize();
|
||||
@@ -112,7 +130,8 @@ public class Engine {
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the analyzers specified in the configuration file (or system properties).
|
||||
* Loads the analyzers specified in the configuration file (or system
|
||||
* properties).
|
||||
*/
|
||||
private void loadAnalyzers() {
|
||||
if (!analyzers.isEmpty()) {
|
||||
@@ -123,9 +142,8 @@ public class Engine {
|
||||
}
|
||||
|
||||
final AnalyzerService service = new AnalyzerService(serviceClassLoader);
|
||||
final Iterator<Analyzer> iterator = service.getAnalyzers();
|
||||
while (iterator.hasNext()) {
|
||||
final Analyzer a = iterator.next();
|
||||
final List<Analyzer> iterator = service.getAnalyzers();
|
||||
for (Analyzer a : iterator) {
|
||||
analyzers.get(a.getAnalysisPhase()).add(a);
|
||||
if (a instanceof FileTypeAnalyzer) {
|
||||
this.fileTypeAnalyzers.add((FileTypeAnalyzer) a);
|
||||
@@ -144,11 +162,17 @@ public class Engine {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the dependencies identified.
|
||||
* Get the dependencies identified. The returned list is a reference to the
|
||||
* engine's synchronized list. <b>You must synchronize on the returned
|
||||
* list</b> when you modify and iterate over it from multiple threads. E.g.
|
||||
* this holds for analyzers supporting parallel processing during their
|
||||
* analysis phase.
|
||||
*
|
||||
* @return the dependencies identified
|
||||
* @see Collections#synchronizedList(List)
|
||||
* @see Analyzer#supportsParallelProcessing()
|
||||
*/
|
||||
public List<Dependency> getDependencies() {
|
||||
public synchronized List<Dependency> getDependencies() {
|
||||
return dependencies;
|
||||
}
|
||||
|
||||
@@ -158,23 +182,40 @@ public class Engine {
|
||||
* @param dependencies the dependencies
|
||||
*/
|
||||
public void setDependencies(List<Dependency> dependencies) {
|
||||
this.dependencies = dependencies;
|
||||
synchronized (this.dependencies) {
|
||||
this.dependencies.clear();
|
||||
this.dependencies.addAll(dependencies);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans an array of files or directories. If a directory is specified, it will be scanned recursively. Any dependencies
|
||||
* identified are added to the dependency collection.
|
||||
* Scans an array of files or directories. If a directory is specified, it
|
||||
* will be scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param paths an array of paths to files or directories to be analyzed
|
||||
* @return the list of dependencies scanned
|
||||
*
|
||||
* @since v0.3.2.5
|
||||
*/
|
||||
public List<Dependency> scan(String[] paths) {
|
||||
return scan(paths, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans an array of files or directories. If a directory is specified, it
|
||||
* will be scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param paths an array of paths to files or directories to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(String[] paths, String projectReference) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
for (String path : paths) {
|
||||
final File file = new File(path);
|
||||
final List<Dependency> d = scan(file);
|
||||
final List<Dependency> d = scan(path, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
@@ -183,30 +224,61 @@ public class Engine {
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a given file or directory. If a directory is specified, it will be scanned recursively. Any dependencies identified
|
||||
* are added to the dependency collection.
|
||||
* Scans a given file or directory. If a directory is specified, it will be
|
||||
* scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param path the path to a file or directory to be analyzed
|
||||
* @return the list of dependencies scanned
|
||||
*/
|
||||
public List<Dependency> scan(String path) {
|
||||
final File file = new File(path);
|
||||
return scan(file);
|
||||
return scan(path, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans an array of files or directories. If a directory is specified, it will be scanned recursively. Any dependencies
|
||||
* identified are added to the dependency collection.
|
||||
* Scans a given file or directory. If a directory is specified, it will be
|
||||
* scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param path the path to a file or directory to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(String path, String projectReference) {
|
||||
final File file = new File(path);
|
||||
return scan(file, projectReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans an array of files or directories. If a directory is specified, it
|
||||
* will be scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param files an array of paths to files or directories to be analyzed.
|
||||
* @return the list of dependencies
|
||||
*
|
||||
* @since v0.3.2.5
|
||||
*/
|
||||
public List<Dependency> scan(File[] files) {
|
||||
return scan(files, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans an array of files or directories. If a directory is specified, it
|
||||
* will be scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param files an array of paths to files or directories to be analyzed.
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(File[] files, String projectReference) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
for (File file : files) {
|
||||
final List<Dependency> d = scan(file);
|
||||
final List<Dependency> d = scan(file, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
@@ -215,38 +287,33 @@ public class Engine {
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a list of files or directories. If a directory is specified, it will be scanned recursively. Any dependencies
|
||||
* identified are added to the dependency collection.
|
||||
* Scans a collection of files or directories. If a directory is specified,
|
||||
* it will be scanned recursively. Any dependencies identified are added to
|
||||
* the dependency collection.
|
||||
*
|
||||
* @param files a set of paths to files or directories to be analyzed
|
||||
* @return the list of dependencies scanned
|
||||
*
|
||||
* @since v0.3.2.5
|
||||
*/
|
||||
public List<Dependency> scan(Set<File> files) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
for (File file : files) {
|
||||
final List<Dependency> d = scan(file);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
}
|
||||
return deps;
|
||||
public List<Dependency> scan(Collection<File> files) {
|
||||
return scan(files, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a list of files or directories. If a directory is specified, it will be scanned recursively. Any dependencies
|
||||
* identified are added to the dependency collection.
|
||||
* Scans a collection of files or directories. If a directory is specified,
|
||||
* it will be scanned recursively. Any dependencies identified are added to
|
||||
* the dependency collection.
|
||||
*
|
||||
* @param files a set of paths to files or directories to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
*
|
||||
* @since v0.3.2.5
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(List<File> files) {
|
||||
public List<Dependency> scan(Collection<File> files, String projectReference) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
for (File file : files) {
|
||||
final List<Dependency> d = scan(file);
|
||||
final List<Dependency> d = scan(file, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
@@ -255,21 +322,35 @@ public class Engine {
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a given file or directory. If a directory is specified, it will be scanned recursively. Any dependencies identified
|
||||
* are added to the dependency collection.
|
||||
* Scans a given file or directory. If a directory is specified, it will be
|
||||
* scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param file the path to a file or directory to be analyzed
|
||||
* @return the list of dependencies scanned
|
||||
*
|
||||
* @since v0.3.2.4
|
||||
*
|
||||
*/
|
||||
public List<Dependency> scan(File file) {
|
||||
return scan(file, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a given file or directory. If a directory is specified, it will be
|
||||
* scanned recursively. Any dependencies identified are added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param file the path to a file or directory to be analyzed
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of dependencies scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
public List<Dependency> scan(File file, String projectReference) {
|
||||
if (file.exists()) {
|
||||
if (file.isDirectory()) {
|
||||
return scanDirectory(file);
|
||||
return scanDirectory(file, projectReference);
|
||||
} else {
|
||||
final Dependency d = scanFile(file);
|
||||
final Dependency d = scanFile(file, projectReference);
|
||||
if (d != null) {
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
deps.add(d);
|
||||
@@ -281,23 +362,38 @@ public class Engine {
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively scans files and directories. Any dependencies identified are added to the dependency collection.
|
||||
* Recursively scans files and directories. Any dependencies identified are
|
||||
* added to the dependency collection.
|
||||
*
|
||||
* @param dir the directory to scan
|
||||
* @return the list of Dependency objects scanned
|
||||
*/
|
||||
protected List<Dependency> scanDirectory(File dir) {
|
||||
return scanDirectory(dir, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively scans files and directories. Any dependencies identified are
|
||||
* added to the dependency collection.
|
||||
*
|
||||
* @param dir the directory to scan
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the list of Dependency objects scanned
|
||||
* @since v1.4.4
|
||||
*/
|
||||
protected List<Dependency> scanDirectory(File dir, String projectReference) {
|
||||
final File[] files = dir.listFiles();
|
||||
final List<Dependency> deps = new ArrayList<Dependency>();
|
||||
if (files != null) {
|
||||
for (File f : files) {
|
||||
if (f.isDirectory()) {
|
||||
final List<Dependency> d = scanDirectory(f);
|
||||
final List<Dependency> d = scanDirectory(f, projectReference);
|
||||
if (d != null) {
|
||||
deps.addAll(d);
|
||||
}
|
||||
} else {
|
||||
final Dependency d = scanFile(f);
|
||||
final Dependency d = scanFile(f, projectReference);
|
||||
deps.add(d);
|
||||
}
|
||||
}
|
||||
@@ -306,107 +402,134 @@ public class Engine {
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a specified file. If a dependency is identified it is added to the dependency collection.
|
||||
* Scans a specified file. If a dependency is identified it is added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param file The file to scan
|
||||
* @return the scanned dependency
|
||||
*/
|
||||
protected Dependency scanFile(File file) {
|
||||
if (!file.isFile()) {
|
||||
final String msg = String.format("Path passed to scanFile(File) is not a file: %s. Skipping the file.", file.toString());
|
||||
LOGGER.log(Level.FINE, msg);
|
||||
return null;
|
||||
}
|
||||
final String fileName = file.getName();
|
||||
String extension = FileUtils.getFileExtension(fileName);
|
||||
if (null == extension) {
|
||||
extension = fileName;
|
||||
}
|
||||
return scanFile(file, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a specified file. If a dependency is identified it is added to the
|
||||
* dependency collection.
|
||||
*
|
||||
* @param file The file to scan
|
||||
* @param projectReference the name of the project or scope in which the
|
||||
* dependency was identified
|
||||
* @return the scanned dependency
|
||||
* @since v1.4.4
|
||||
*/
|
||||
protected Dependency scanFile(File file, String projectReference) {
|
||||
Dependency dependency = null;
|
||||
if (supportsExtension(extension)) {
|
||||
dependency = new Dependency(file);
|
||||
if (extension == null ? fileName == null : extension.equals(fileName)) {
|
||||
dependency.setFileExtension(extension);
|
||||
if (file.isFile()) {
|
||||
if (accept(file)) {
|
||||
dependency = new Dependency(file);
|
||||
if (projectReference != null) {
|
||||
dependency.addProjectReference(projectReference);
|
||||
}
|
||||
final String sha1 = dependency.getSha1sum();
|
||||
boolean found = false;
|
||||
synchronized (dependencies) {
|
||||
if (sha1 != null) {
|
||||
for (Dependency existing : dependencies) {
|
||||
if (sha1.equals(existing.getSha1sum())) {
|
||||
found = true;
|
||||
if (projectReference != null) {
|
||||
existing.addProjectReference(projectReference);
|
||||
}
|
||||
if (existing.getActualFilePath() != null && dependency.getActualFilePath() != null
|
||||
&& !existing.getActualFilePath().equals(dependency.getActualFilePath())) {
|
||||
existing.addRelatedDependency(dependency);
|
||||
} else {
|
||||
dependency = existing;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
dependencies.add(dependency);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file);
|
||||
}
|
||||
dependencies.add(dependency);
|
||||
}
|
||||
return dependency;
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs the analyzers against all of the dependencies.
|
||||
* Runs the analyzers against all of the dependencies. Since the mutable
|
||||
* dependencies list is exposed via {@link #getDependencies()}, this method
|
||||
* iterates over a copy of the dependencies list. Thus, the potential for
|
||||
* {@link java.util.ConcurrentModificationException}s is avoided, and
|
||||
* analyzers may safely add or remove entries from the dependencies list.
|
||||
* <p>
|
||||
* Every effort is made to complete analysis on the dependencies. In some
|
||||
* cases an exception will occur with part of the analysis being performed
|
||||
* which may not affect the entire analysis. If an exception occurs it will
|
||||
* be included in the thrown exception collection.
|
||||
*
|
||||
* @throws ExceptionCollection a collections of any exceptions that occurred
|
||||
* during analysis
|
||||
*/
|
||||
public void analyzeDependencies() {
|
||||
public void analyzeDependencies() throws ExceptionCollection {
|
||||
final List<Throwable> exceptions = Collections.synchronizedList(new ArrayList<Throwable>());
|
||||
boolean autoUpdate = true;
|
||||
try {
|
||||
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.log(Level.FINE, "Invalid setting for auto-update; using true.");
|
||||
LOGGER.debug("Invalid setting for auto-update; using true.");
|
||||
exceptions.add(ex);
|
||||
}
|
||||
if (autoUpdate) {
|
||||
doUpdates();
|
||||
try {
|
||||
doUpdates();
|
||||
} catch (UpdateException ex) {
|
||||
exceptions.add(ex);
|
||||
LOGGER.warn("Unable to update Cached Web DataSource, using local "
|
||||
+ "data instead. Results may not include recent vulnerabilities.");
|
||||
LOGGER.debug("Update Error", ex);
|
||||
}
|
||||
}
|
||||
|
||||
//need to ensure that data exists
|
||||
try {
|
||||
ensureDataExists();
|
||||
} catch (NoDataException ex) {
|
||||
final String msg = String.format("%s%n%nUnable to continue dependency-check analysis.", ex.getMessage());
|
||||
LOGGER.log(Level.SEVERE, msg);
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
return;
|
||||
throwFatalExceptionCollection("Unable to continue dependency-check analysis.", ex, exceptions);
|
||||
} catch (DatabaseException ex) {
|
||||
final String msg = String.format("%s%n%nUnable to continue dependency-check analysis.", ex.getMessage());
|
||||
LOGGER.log(Level.SEVERE, msg);
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
return;
|
||||
|
||||
throwFatalExceptionCollection("Unable to connect to the dependency-check database.", ex, exceptions);
|
||||
}
|
||||
|
||||
final String logHeader = String.format("%n"
|
||||
+ "----------------------------------------------------%n"
|
||||
+ "BEGIN ANALYSIS%n"
|
||||
+ "----------------------------------------------------");
|
||||
LOGGER.log(Level.FINE, logHeader);
|
||||
LOGGER.log(Level.INFO, "Analysis Starting");
|
||||
LOGGER.debug("\n----------------------------------------------------\nBEGIN ANALYSIS\n----------------------------------------------------");
|
||||
LOGGER.info("Analysis Started");
|
||||
final long analysisStart = System.currentTimeMillis();
|
||||
|
||||
// analysis phases
|
||||
for (AnalysisPhase phase : AnalysisPhase.values()) {
|
||||
final List<Analyzer> analyzerList = analyzers.get(phase);
|
||||
|
||||
for (Analyzer a : analyzerList) {
|
||||
a = initializeAnalyzer(a);
|
||||
for (final Analyzer analyzer : analyzerList) {
|
||||
final long analyzerStart = System.currentTimeMillis();
|
||||
try {
|
||||
initializeAnalyzer(analyzer);
|
||||
} catch (InitializationException ex) {
|
||||
exceptions.add(ex);
|
||||
continue;
|
||||
}
|
||||
|
||||
/* need to create a copy of the collection because some of the
|
||||
* analyzers may modify it. This prevents ConcurrentModificationExceptions.
|
||||
* This is okay for adds/deletes because it happens per analyzer.
|
||||
*/
|
||||
final String msg = String.format("Begin Analyzer '%s'", a.getName());
|
||||
LOGGER.log(Level.FINE, msg);
|
||||
final Set<Dependency> dependencySet = new HashSet<Dependency>();
|
||||
dependencySet.addAll(dependencies);
|
||||
for (Dependency d : dependencySet) {
|
||||
boolean shouldAnalyze = true;
|
||||
if (a instanceof FileTypeAnalyzer) {
|
||||
final FileTypeAnalyzer fAnalyzer = (FileTypeAnalyzer) a;
|
||||
shouldAnalyze = fAnalyzer.supportsExtension(d.getFileExtension());
|
||||
}
|
||||
if (shouldAnalyze) {
|
||||
final String msgFile = String.format("Begin Analysis of '%s'", d.getActualFilePath());
|
||||
LOGGER.log(Level.FINE, msgFile);
|
||||
try {
|
||||
a.analyze(d, this);
|
||||
} catch (AnalysisException ex) {
|
||||
final String exMsg = String.format("An error occurred while analyzing '%s'.", d.getActualFilePath());
|
||||
LOGGER.log(Level.WARNING, exMsg);
|
||||
LOGGER.log(Level.FINE, "", ex);
|
||||
} catch (Throwable ex) {
|
||||
final String axMsg = String.format("An unexpected error occurred during analysis of '%s'", d.getActualFilePath());
|
||||
//final AnalysisException ax = new AnalysisException(axMsg, ex);
|
||||
LOGGER.log(Level.WARNING, axMsg);
|
||||
LOGGER.log(Level.FINE, "", ex);
|
||||
}
|
||||
}
|
||||
if (analyzer.isEnabled()) {
|
||||
executeAnalysisTasks(analyzer, exceptions);
|
||||
|
||||
final long analyzerDurationMillis = System.currentTimeMillis() - analyzerStart;
|
||||
final long analyzerDurationSeconds = TimeUnit.MILLISECONDS.toSeconds(analyzerDurationMillis);
|
||||
LOGGER.info("Finished {} ({} seconds)", analyzer.getName(), analyzerDurationSeconds);
|
||||
} else {
|
||||
LOGGER.debug("Skipping {} (not enabled)", analyzer.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -418,12 +541,82 @@ public class Engine {
|
||||
}
|
||||
}
|
||||
|
||||
final String logFooter = String.format("%n"
|
||||
+ "----------------------------------------------------%n"
|
||||
+ "END ANALYSIS%n"
|
||||
+ "----------------------------------------------------");
|
||||
LOGGER.log(Level.FINE, logFooter);
|
||||
LOGGER.log(Level.INFO, "Analysis Complete");
|
||||
LOGGER.debug("\n----------------------------------------------------\nEND ANALYSIS\n----------------------------------------------------");
|
||||
final long analysisDurationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - analysisStart);
|
||||
LOGGER.info("Analysis Complete ({} seconds)", analysisDurationSeconds);
|
||||
if (exceptions.size() > 0) {
|
||||
throw new ExceptionCollection("One or more exceptions occurred during dependency-check analysis", exceptions);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes executes the analyzer using multiple threads.
|
||||
*
|
||||
* @param exceptions a collection of exceptions that occurred during
|
||||
* analysis
|
||||
* @param analyzer the analyzer to execute
|
||||
* @throws ExceptionCollection thrown if exceptions occurred during analysis
|
||||
*/
|
||||
void executeAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) throws ExceptionCollection {
|
||||
LOGGER.debug("Starting {}", analyzer.getName());
|
||||
final List<AnalysisTask> analysisTasks = getAnalysisTasks(analyzer, exceptions);
|
||||
final ExecutorService executorService = getExecutorService(analyzer);
|
||||
|
||||
try {
|
||||
final List<Future<Void>> results = executorService.invokeAll(analysisTasks, 10, TimeUnit.MINUTES);
|
||||
|
||||
// ensure there was no exception during execution
|
||||
for (Future<Void> result : results) {
|
||||
try {
|
||||
result.get();
|
||||
} catch (ExecutionException e) {
|
||||
throwFatalExceptionCollection("Analysis task failed with a fatal exception.", e, exceptions);
|
||||
} catch (CancellationException e) {
|
||||
throwFatalExceptionCollection("Analysis task timed out.", e, exceptions);
|
||||
}
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
throwFatalExceptionCollection("Analysis has been interrupted.", e, exceptions);
|
||||
} finally {
|
||||
executorService.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the analysis tasks for the dependencies.
|
||||
*
|
||||
* @param analyzer the analyzer to create tasks for
|
||||
* @param exceptions the collection of exceptions to collect
|
||||
* @return a collection of analysis tasks
|
||||
*/
|
||||
List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
|
||||
final List<AnalysisTask> result = new ArrayList<AnalysisTask>();
|
||||
synchronized (dependencies) {
|
||||
for (final Dependency dependency : dependencies) {
|
||||
final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions, Settings.getInstance());
|
||||
result.add(task);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the executor service for a given analyzer.
|
||||
*
|
||||
* @param analyzer the analyzer to obtain an executor
|
||||
* @return the executor service
|
||||
*/
|
||||
ExecutorService getExecutorService(Analyzer analyzer) {
|
||||
if (analyzer.supportsParallelProcessing()) {
|
||||
// just a fair trade-off that should be reasonable for all analyzer types
|
||||
final int maximumNumberOfThreads = 4 * Runtime.getRuntime().availableProcessors();
|
||||
|
||||
LOGGER.debug("Parallel processing with up to {} threads: {}.", maximumNumberOfThreads, analyzer.getName());
|
||||
return Executors.newFixedThreadPool(maximumNumberOfThreads);
|
||||
} else {
|
||||
LOGGER.debug("Parallel processing is not supported: {}.", analyzer.getName());
|
||||
return Executors.newSingleThreadExecutor();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -431,21 +624,31 @@ public class Engine {
|
||||
*
|
||||
* @param analyzer the analyzer to initialize
|
||||
* @return the initialized analyzer
|
||||
* @throws InitializationException thrown when there is a problem
|
||||
* initializing the analyzer
|
||||
*/
|
||||
protected Analyzer initializeAnalyzer(Analyzer analyzer) {
|
||||
protected Analyzer initializeAnalyzer(Analyzer analyzer) throws InitializationException {
|
||||
try {
|
||||
final String msg = String.format("Initializing %s", analyzer.getName());
|
||||
LOGGER.log(Level.FINE, msg);
|
||||
LOGGER.debug("Initializing {}", analyzer.getName());
|
||||
analyzer.initialize();
|
||||
} catch (Throwable ex) {
|
||||
final String msg = String.format("Exception occurred initializing %s.", analyzer.getName());
|
||||
LOGGER.log(Level.SEVERE, msg);
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
} catch (InitializationException ex) {
|
||||
LOGGER.error("Exception occurred initializing {}.", analyzer.getName());
|
||||
LOGGER.debug("", ex);
|
||||
try {
|
||||
analyzer.close();
|
||||
} catch (Throwable ex1) {
|
||||
LOGGER.log(Level.FINEST, null, ex1);
|
||||
LOGGER.trace("", ex1);
|
||||
}
|
||||
throw ex;
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.error("Unexpected exception occurred initializing {}.", analyzer.getName());
|
||||
LOGGER.debug("", ex);
|
||||
try {
|
||||
analyzer.close();
|
||||
} catch (Throwable ex1) {
|
||||
LOGGER.trace("", ex1);
|
||||
}
|
||||
throw new InitializationException("Unexpected Exception", ex);
|
||||
}
|
||||
return analyzer;
|
||||
}
|
||||
@@ -456,37 +659,35 @@ public class Engine {
|
||||
* @param analyzer the analyzer to close
|
||||
*/
|
||||
protected void closeAnalyzer(Analyzer analyzer) {
|
||||
final String msg = String.format("Closing Analyzer '%s'", analyzer.getName());
|
||||
LOGGER.log(Level.FINE, msg);
|
||||
LOGGER.debug("Closing Analyzer '{}'", analyzer.getName());
|
||||
try {
|
||||
analyzer.close();
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.log(Level.FINEST, null, ex);
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cycles through the cached web data sources and calls update on all of them.
|
||||
* Cycles through the cached web data sources and calls update on all of
|
||||
* them.
|
||||
*
|
||||
* @throws UpdateException thrown if the operation fails
|
||||
*/
|
||||
public void doUpdates() {
|
||||
public void doUpdates() throws UpdateException {
|
||||
LOGGER.info("Checking for updates");
|
||||
final long updateStart = System.currentTimeMillis();
|
||||
final UpdateService service = new UpdateService(serviceClassLoader);
|
||||
final Iterator<CachedWebDataSource> iterator = service.getDataSources();
|
||||
while (iterator.hasNext()) {
|
||||
final CachedWebDataSource source = iterator.next();
|
||||
try {
|
||||
source.update();
|
||||
} catch (UpdateException ex) {
|
||||
LOGGER.log(Level.WARNING,
|
||||
"Unable to update Cached Web DataSource, using local data instead. Results may not include recent vulnerabilities.");
|
||||
LOGGER.log(Level.FINE, String.format("Unable to update details for %s", source.getClass().getName()), ex);
|
||||
}
|
||||
source.update();
|
||||
}
|
||||
LOGGER.info("Check for updates complete");
|
||||
LOGGER.info("Check for updates complete ({} ms)", System.currentTimeMillis() - updateStart);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a full list of all of the analyzers. This is useful for reporting which analyzers where used.
|
||||
* Returns a full list of all of the analyzers. This is useful for reporting
|
||||
* which analyzers where used.
|
||||
*
|
||||
* @return a list of Analyzers
|
||||
*/
|
||||
@@ -502,18 +703,20 @@ public class Engine {
|
||||
/**
|
||||
* Checks all analyzers to see if an extension is supported.
|
||||
*
|
||||
* @param ext a file extension
|
||||
* @return true or false depending on whether or not the file extension is supported
|
||||
* @param file a file extension
|
||||
* @return true or false depending on whether or not the file extension is
|
||||
* supported
|
||||
*/
|
||||
public boolean supportsExtension(String ext) {
|
||||
if (ext == null) {
|
||||
@Override
|
||||
public boolean accept(File file) {
|
||||
if (file == null) {
|
||||
return false;
|
||||
}
|
||||
boolean scan = false;
|
||||
for (FileTypeAnalyzer a : this.fileTypeAnalyzers) {
|
||||
/* note, we can't break early on this loop as the analyzers need to know if
|
||||
they have files to work on prior to initialization */
|
||||
scan |= a.supportsExtension(ext);
|
||||
scan |= a.accept(file);
|
||||
}
|
||||
return scan;
|
||||
}
|
||||
@@ -528,10 +731,22 @@ public class Engine {
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the CPE Index to ensure documents exists. If none exist a NoDataException is thrown.
|
||||
* Adds a file type analyzer. This has been added solely to assist in unit
|
||||
* testing the Engine.
|
||||
*
|
||||
* @param fta the file type analyzer to add
|
||||
*/
|
||||
protected void addFileTypeAnalyzer(FileTypeAnalyzer fta) {
|
||||
this.fileTypeAnalyzers.add(fta);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the CPE Index to ensure documents exists. If none exist a
|
||||
* NoDataException is thrown.
|
||||
*
|
||||
* @throws NoDataException thrown if no data exists in the CPE Index
|
||||
* @throws DatabaseException thrown if there is an exception opening the database
|
||||
* @throws DatabaseException thrown if there is an exception opening the
|
||||
* database
|
||||
*/
|
||||
private void ensureDataExists() throws NoDataException, DatabaseException {
|
||||
final CveDB cve = new CveDB();
|
||||
@@ -546,4 +761,20 @@ public class Engine {
|
||||
cve.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs and throws a fatal exception collection.
|
||||
*
|
||||
* @param message the exception message
|
||||
* @param throwable the cause
|
||||
* @param exceptions a collection of exception to include
|
||||
* @throws ExceptionCollection a collection of exceptions that occurred
|
||||
* during analysis
|
||||
*/
|
||||
private void throwFatalExceptionCollection(String message, Throwable throwable, List<Throwable> exceptions) throws ExceptionCollection {
|
||||
LOGGER.error("{}\n\n{}", throwable.getMessage(), message);
|
||||
LOGGER.debug("", throwable);
|
||||
exceptions.add(throwable);
|
||||
throw new ExceptionCollection(message, exceptions, true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,8 +20,6 @@ package org.owasp.dependencycheck.agent;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
@@ -29,19 +27,24 @@ import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.exception.ExceptionCollection;
|
||||
import org.owasp.dependencycheck.exception.ScanAgentException;
|
||||
import org.owasp.dependencycheck.reporting.ReportGenerator;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class provides a way to easily conduct a scan solely based on existing evidence metadata rather than collecting evidence
|
||||
* from the files themselves. This class is based on the Ant task and Maven plugin with the exception that it takes a list of
|
||||
* dependencies that can be programmatically added from data in a spreadsheet, database or some other datasource and conduct a
|
||||
* scan based on this pre-defined evidence.
|
||||
* This class provides a way to easily conduct a scan solely based on existing
|
||||
* evidence metadata rather than collecting evidence from the files themselves.
|
||||
* This class is based on the Ant task and Maven plugin with the exception that
|
||||
* it takes a list of dependencies that can be programmatically added from data
|
||||
* in a spreadsheet, database or some other datasource and conduct a scan based
|
||||
* on this pre-defined evidence.
|
||||
*
|
||||
* <h2>Example:</h2>
|
||||
* <pre>
|
||||
* List<Dependency> dependencies = new ArrayList<Dependency>();
|
||||
* List<Dependency> dependencies = new ArrayList<Dependency>();
|
||||
* Dependency dependency = new Dependency(new File(FileUtils.getBitBucket()));
|
||||
* dependency.getProductEvidence().addEvidence("my-datasource", "name", "Jetty", Confidence.HIGH);
|
||||
* dependency.getVersionEvidence().addEvidence("my-datasource", "version", "5.1.10", Confidence.HIGH);
|
||||
@@ -55,7 +58,7 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
* scan.execute();
|
||||
* </pre>
|
||||
*
|
||||
* @author Steve Springett <steve.springett@owasp.org>
|
||||
* @author Steve Springett
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
public class DependencyCheckScanAgent {
|
||||
@@ -67,7 +70,7 @@ public class DependencyCheckScanAgent {
|
||||
/**
|
||||
* Logger for use throughout the class.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(DependencyCheckScanAgent.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyCheckScanAgent.class);
|
||||
/**
|
||||
* The application name for the report.
|
||||
*/
|
||||
@@ -138,7 +141,8 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the destination directory for the generated Dependency-Check report.
|
||||
* Specifies the destination directory for the generated Dependency-Check
|
||||
* report.
|
||||
*/
|
||||
private String reportOutputDirectory;
|
||||
|
||||
@@ -161,9 +165,11 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which
|
||||
* means since the CVSS scores are 0-10, by default the build will never fail and the CVSS score is set to 11. The valid range
|
||||
* for the fail build on CVSS is 0 to 11, where anything above 10 will not cause the build to fail.
|
||||
* Specifies if the build should be failed if a CVSS score above a specified
|
||||
* level is identified. The default is 11 which means since the CVSS scores
|
||||
* are 0-10, by default the build will never fail and the CVSS score is set
|
||||
* to 11. The valid range for the fail build on CVSS is 0 to 11, where
|
||||
* anything above 10 will not cause the build to fail.
|
||||
*/
|
||||
private float failBuildOnCVSS = 11;
|
||||
|
||||
@@ -186,8 +192,8 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. Default
|
||||
* is true.
|
||||
* Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not
|
||||
* recommended that this be turned to false. Default is true.
|
||||
*/
|
||||
private boolean autoUpdate = true;
|
||||
|
||||
@@ -233,8 +239,9 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
|
||||
/**
|
||||
* The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the
|
||||
* Site plugin unless the externalReport is set to true. Default is HTML.
|
||||
* The report format to be generated (HTML, XML, VULN, ALL). This
|
||||
* configuration option has no affect if using this within the Site plugin
|
||||
* unless the externalReport is set to true. Default is HTML.
|
||||
*/
|
||||
private ReportGenerator.Format reportFormat = ReportGenerator.Format.HTML;
|
||||
|
||||
@@ -283,7 +290,9 @@ public class DependencyCheckScanAgent {
|
||||
* Get the value of proxyServer.
|
||||
*
|
||||
* @return the value of proxyServer
|
||||
* @deprecated use {@link org.owasp.dependencycheck.agent.DependencyCheckScanAgent#getProxyServer()} instead
|
||||
* @deprecated use
|
||||
* {@link org.owasp.dependencycheck.agent.DependencyCheckScanAgent#getProxyServer()}
|
||||
* instead
|
||||
*/
|
||||
@Deprecated
|
||||
public String getProxyUrl() {
|
||||
@@ -694,8 +703,8 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
|
||||
/**
|
||||
* Additional ZIP File extensions to add analyze. This should be a comma-separated list of file extensions to treat like ZIP
|
||||
* files.
|
||||
* Additional ZIP File extensions to add analyze. This should be a
|
||||
* comma-separated list of file extensions to treat like ZIP files.
|
||||
*/
|
||||
private String zipExtensions;
|
||||
|
||||
@@ -836,12 +845,17 @@ public class DependencyCheckScanAgent {
|
||||
* Executes the Dependency-Check on the dependent libraries.
|
||||
*
|
||||
* @return the Engine used to scan the dependencies.
|
||||
* @throws org.owasp.dependencycheck.data.nvdcve.DatabaseException thrown if there is an exception connecting to the database
|
||||
* @throws ExceptionCollection a collection of one or more exceptions that
|
||||
* occurred during analysis.
|
||||
*/
|
||||
private Engine executeDependencyCheck() throws DatabaseException {
|
||||
private Engine executeDependencyCheck() throws ExceptionCollection {
|
||||
populateSettings();
|
||||
Engine engine = null;
|
||||
engine = new Engine();
|
||||
final Engine engine;
|
||||
try {
|
||||
engine = new Engine();
|
||||
} catch (DatabaseException ex) {
|
||||
throw new ExceptionCollection(ex, true);
|
||||
}
|
||||
engine.setDependencies(this.dependencies);
|
||||
engine.analyzeDependencies();
|
||||
return engine;
|
||||
@@ -861,7 +875,7 @@ public class DependencyCheckScanAgent {
|
||||
cve.open();
|
||||
prop = cve.getDatabaseProperties();
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.log(Level.FINE, "Unable to retrieve DB Properties", ex);
|
||||
LOGGER.debug("Unable to retrieve DB Properties", ex);
|
||||
} finally {
|
||||
if (cve != null) {
|
||||
cve.close();
|
||||
@@ -871,19 +885,20 @@ public class DependencyCheckScanAgent {
|
||||
try {
|
||||
r.generateReports(outDirectory.getCanonicalPath(), this.reportFormat.name());
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.SEVERE,
|
||||
LOGGER.error(
|
||||
"Unexpected exception occurred during analysis; please see the verbose error log for more details.");
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
LOGGER.debug("", ex);
|
||||
} catch (Throwable ex) {
|
||||
LOGGER.log(Level.SEVERE,
|
||||
LOGGER.error(
|
||||
"Unexpected exception occurred during analysis; please see the verbose error log for more details.");
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
LOGGER.debug("", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes the properties supplied and updates the dependency-check settings. Additionally, this sets the system properties
|
||||
* required to change the proxy server, port, and connection timeout.
|
||||
* Takes the properties supplied and updates the dependency-check settings.
|
||||
* Additionally, this sets the system properties required to change the
|
||||
* proxy server, port, and connection timeout.
|
||||
*/
|
||||
private void populateSettings() {
|
||||
Settings.initialize();
|
||||
@@ -898,74 +913,36 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
|
||||
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
|
||||
|
||||
if (proxyServer != null && !proxyServer.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
}
|
||||
if (proxyPort != null && !proxyPort.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
}
|
||||
if (proxyUsername != null && !proxyUsername.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.PROXY_USERNAME, proxyUsername);
|
||||
}
|
||||
if (proxyPassword != null && !proxyPassword.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
|
||||
}
|
||||
if (connectionTimeout != null && !connectionTimeout.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
}
|
||||
if (suppressionFile != null && !suppressionFile.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
}
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
|
||||
if (centralUrl != null && !centralUrl.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.ANALYZER_CENTRAL_URL, centralUrl);
|
||||
}
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_CENTRAL_URL, centralUrl);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
|
||||
if (nexusUrl != null && !nexusUrl.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
}
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_PROXY, nexusUsesProxy);
|
||||
if (databaseDriverName != null && !databaseDriverName.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
}
|
||||
if (databaseDriverPath != null && !databaseDriverPath.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
}
|
||||
if (connectionString != null && !connectionString.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
}
|
||||
if (databaseUser != null && !databaseUser.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.DB_USER, databaseUser);
|
||||
}
|
||||
if (databasePassword != null && !databasePassword.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
}
|
||||
if (zipExtensions != null && !zipExtensions.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
|
||||
}
|
||||
if (cveUrl12Modified != null && !cveUrl12Modified.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
|
||||
}
|
||||
if (cveUrl20Modified != null && !cveUrl20Modified.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
|
||||
}
|
||||
if (cveUrl12Base != null && !cveUrl12Base.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
|
||||
}
|
||||
if (cveUrl20Base != null && !cveUrl20Base.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
|
||||
}
|
||||
if (pathToMono != null && !pathToMono.isEmpty()) {
|
||||
Settings.setString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
}
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
|
||||
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
|
||||
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the dependency-check and generates the report.
|
||||
*
|
||||
* @return a reference to the engine used to perform the scan.
|
||||
* @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if there is an exception executing the scan.
|
||||
* @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if
|
||||
* there is an exception executing the scan.
|
||||
*/
|
||||
public Engine execute() throws ScanAgentException {
|
||||
Engine engine = null;
|
||||
@@ -980,10 +957,12 @@ public class DependencyCheckScanAgent {
|
||||
if (this.failBuildOnCVSS <= 10) {
|
||||
checkForFailure(engine.getDependencies());
|
||||
}
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.log(Level.SEVERE,
|
||||
"Unable to connect to the dependency-check database; analysis has stopped");
|
||||
LOGGER.log(Level.FINE, "", ex);
|
||||
} catch (ExceptionCollection ex) {
|
||||
if (ex.isFatal()) {
|
||||
LOGGER.error("A fatal exception occurred during analysis; analysis has stopped. Please see the debug log for more details.");
|
||||
LOGGER.debug("", ex);
|
||||
}
|
||||
throw new ScanAgentException("One or more exceptions occurred during analysis; please see the debug log for more details.", ex);
|
||||
} finally {
|
||||
Settings.cleanup(true);
|
||||
if (engine != null) {
|
||||
@@ -994,11 +973,12 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to see if a vulnerability has been identified with a CVSS score that is above the threshold set in the
|
||||
* configuration.
|
||||
* Checks to see if a vulnerability has been identified with a CVSS score
|
||||
* that is above the threshold set in the configuration.
|
||||
*
|
||||
* @param dependencies the list of dependency objects
|
||||
* @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if there is an exception executing the scan.
|
||||
* @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if
|
||||
* there is an exception executing the scan.
|
||||
*/
|
||||
private void checkForFailure(List<Dependency> dependencies) throws ScanAgentException {
|
||||
final StringBuilder ids = new StringBuilder();
|
||||
@@ -1026,7 +1006,8 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a warning message listing a summary of dependencies and their associated CPE and CVE entries.
|
||||
* Generates a warning message listing a summary of dependencies and their
|
||||
* associated CPE and CVE entries.
|
||||
*
|
||||
* @param dependencies a list of dependency objects
|
||||
*/
|
||||
@@ -1058,10 +1039,9 @@ public class DependencyCheckScanAgent {
|
||||
}
|
||||
}
|
||||
if (summary.length() > 0) {
|
||||
final String msg = String.format("%n%n"
|
||||
+ "One or more dependencies were identified with known vulnerabilities:%n%n%s"
|
||||
+ "%n%nSee the dependency-check report for more details.%n%n", summary.toString());
|
||||
LOGGER.log(Level.WARNING, msg);
|
||||
LOGGER.warn("\n\nOne or more dependencies were identified with known vulnerabilities:\n\n{}\n\n"
|
||||
+ "See the dependency-check report for more details.\n\n",
|
||||
summary.toString());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,20 +17,123 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Base class for analyzers to avoid code duplication of initialize and close as
|
||||
* most analyzers do not need these methods.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public abstract class AbstractAnalyzer implements Analyzer {
|
||||
|
||||
/**
|
||||
* The initialize method does nothing for this Analyzer.
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractAnalyzer.class);
|
||||
/**
|
||||
* A flag indicating whether or not the analyzer is enabled.
|
||||
*/
|
||||
private volatile boolean enabled = true;
|
||||
|
||||
/**
|
||||
* Get the value of enabled.
|
||||
*
|
||||
* @return the value of enabled
|
||||
*/
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of enabled.
|
||||
*
|
||||
* @param enabled new value of enabled
|
||||
*/
|
||||
public void setEnabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
protected abstract String getAnalyzerEnabledSettingKey();
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
* dependencies within the engine.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
protected abstract void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException;
|
||||
|
||||
/**
|
||||
* Initializes a given Analyzer. This will be skipped if the analyzer is disabled.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
protected void initializeAnalyzer() throws InitializationException {
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes a given Analyzer. This will be skipped if the analyzer is disabled.
|
||||
*
|
||||
* @throws Exception thrown if there is an exception
|
||||
*/
|
||||
protected void closeAnalyzer() throws Exception {
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
* dependencies within the engine.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
@Override
|
||||
public void initialize() throws Exception {
|
||||
//do nothing
|
||||
public final void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (this.isEnabled()) {
|
||||
analyzeDependency(dependency, engine);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The initialize method does nothing for this Analyzer.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public final void initialize() throws InitializationException {
|
||||
final String key = getAnalyzerEnabledSettingKey();
|
||||
try {
|
||||
this.setEnabled(Settings.getBoolean(key, true));
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.warn("Invalid setting for property '{}'", key);
|
||||
LOGGER.debug("", ex);
|
||||
}
|
||||
|
||||
if (isEnabled()) {
|
||||
initializeAnalyzer();
|
||||
} else {
|
||||
LOGGER.debug("{} has been disabled", getName());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -39,7 +142,20 @@ public abstract class AbstractAnalyzer implements Analyzer {
|
||||
* @throws Exception thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
//do nothing
|
||||
public final void close() throws Exception {
|
||||
if (isEnabled()) {
|
||||
closeAnalyzer();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The default is to support parallel processing.
|
||||
*
|
||||
* @return true
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,46 +17,37 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* The base FileTypeAnalyzer that all analyzers that have specific file types they analyze should extend.
|
||||
* The base FileTypeAnalyzer that all analyzers that have specific file types
|
||||
* they analyze should extend.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implements FileTypeAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constructor">
|
||||
/**
|
||||
* Base constructor that all children must call. This checks the configuration to determine if the analyzer is
|
||||
* enabled.
|
||||
*/
|
||||
public AbstractFileTypeAnalyzer() {
|
||||
reset();
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Field definitions">
|
||||
//<editor-fold defaultstate="collapsed" desc="Field definitions, getters, and setters ">
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(AbstractFileTypeAnalyzer.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractFileTypeAnalyzer.class);
|
||||
/**
|
||||
* Whether the file type analyzer detected any files it needs to analyze.
|
||||
*/
|
||||
private boolean filesMatched = false;
|
||||
|
||||
/**
|
||||
* Get the value of filesMatched. A flag indicating whether the scan included any file types this analyzer supports.
|
||||
* Get the value of filesMatched. A flag indicating whether the scan
|
||||
* included any file types this analyzer supports.
|
||||
*
|
||||
* @return the value of filesMatched
|
||||
*/
|
||||
@@ -65,7 +56,8 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of filesMatched. A flag indicating whether the scan included any file types this analyzer supports.
|
||||
* Set the value of filesMatched. A flag indicating whether the scan
|
||||
* included any file types this analyzer supports.
|
||||
*
|
||||
* @param filesMatched new value of filesMatched
|
||||
*/
|
||||
@@ -73,153 +65,74 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
this.filesMatched = filesMatched;
|
||||
}
|
||||
|
||||
/**
|
||||
* A flag indicating whether or not the analyzer is enabled.
|
||||
*/
|
||||
private boolean enabled = true;
|
||||
|
||||
/**
|
||||
* Get the value of enabled.
|
||||
*
|
||||
* @return the value of enabled
|
||||
*/
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value of enabled.
|
||||
*
|
||||
* @param enabled new value of enabled
|
||||
*/
|
||||
public void setEnabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Abstract methods children must implement">
|
||||
/**
|
||||
* <p>
|
||||
* Returns a list of supported file extensions. An example would be an analyzer that inspected java jar files. The
|
||||
* getSupportedExtensions function would return a set with a single element "jar".</p>
|
||||
*
|
||||
* <p>
|
||||
* <b>Note:</b> when implementing this the extensions returned MUST be lowercase.</p>
|
||||
*
|
||||
* @return The file extensions supported by this analyzer.
|
||||
*
|
||||
* <p>
|
||||
* If the analyzer returns null it will not cause additional files to be analyzed but will be executed against every
|
||||
* file loaded</p>
|
||||
*/
|
||||
protected abstract Set<String> getSupportedExtensions();
|
||||
|
||||
/**
|
||||
* Initializes the file type analyzer.
|
||||
*
|
||||
* @throws Exception thrown if there is an exception during initialization
|
||||
*/
|
||||
protected abstract void initializeFileTypeAnalyzer() throws Exception;
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted,
|
||||
* scanned, and added to the list of dependencies within the engine.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
protected abstract void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
protected abstract String getAnalyzerEnabledSettingKey();
|
||||
|
||||
//</editor-fold>
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="Final implementations for the Analyzer interface">
|
||||
/**
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @throws Exception thrown if there is an exception during initialization
|
||||
* @throws InitializationException thrown if there is an exception during
|
||||
* initialization
|
||||
*/
|
||||
@Override
|
||||
public final void initialize() throws Exception {
|
||||
protected final void initializeAnalyzer() throws InitializationException {
|
||||
if (filesMatched) {
|
||||
initializeFileTypeAnalyzer();
|
||||
} else {
|
||||
enabled = false;
|
||||
this.setEnabled(false);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the enabled flag on the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public final void reset() {
|
||||
final String key = getAnalyzerEnabledSettingKey();
|
||||
try {
|
||||
enabled = Settings.getBoolean(key, true);
|
||||
} catch (InvalidSettingException ex) {
|
||||
String msg = String.format("Invalid setting for property '%s'", key);
|
||||
LOGGER.log(Level.WARNING, msg);
|
||||
LOGGER.log(Level.FINE, "", ex);
|
||||
msg = String.format("%s has been disabled", getName());
|
||||
LOGGER.log(Level.WARNING, msg);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted,
|
||||
* scanned, and added to the list of dependencies within the engine.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
@Override
|
||||
public final void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (enabled) {
|
||||
analyzeFileType(dependency, engine);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether or not this analyzer can process the given extension.
|
||||
*
|
||||
* @param extension the file extension to test for support.
|
||||
* @return whether or not the specified file extension is supported by this analyzer.
|
||||
*/
|
||||
@Override
|
||||
public final boolean supportsExtension(String extension) {
|
||||
if (!enabled) {
|
||||
return false;
|
||||
}
|
||||
final Set<String> ext = getSupportedExtensions();
|
||||
if (ext == null) {
|
||||
final String msg = String.format("The '%s' analyzer is misconfigured and does not have any file extensions;"
|
||||
+ " it will be disabled", getName());
|
||||
LOGGER.log(Level.SEVERE, msg);
|
||||
return false;
|
||||
} else {
|
||||
final boolean match = ext.contains(extension);
|
||||
if (match) {
|
||||
filesMatched = match;
|
||||
}
|
||||
return match;
|
||||
}
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Static utility methods">
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="Abstract methods children must implement">
|
||||
/**
|
||||
* <p>
|
||||
* Utility method to help in the creation of the extensions set. This constructs a new Set that can be used in a
|
||||
* final static declaration.</p>
|
||||
* Returns the {@link java.io.FileFilter} used to determine which files are
|
||||
* to be analyzed. An example would be an analyzer that inspected Java jar
|
||||
* files. Implementors may use
|
||||
* {@link org.owasp.dependencycheck.utils.FileFilterBuilder}.</p>
|
||||
* <p>
|
||||
* If the analyzer returns null it will not cause additional files to be
|
||||
* analyzed, but will be executed against every file loaded.</p>
|
||||
*
|
||||
* @return the file filter used to determine which files are to be analyzed
|
||||
*/
|
||||
protected abstract FileFilter getFileFilter();
|
||||
|
||||
/**
|
||||
* Initializes the file type analyzer.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception during
|
||||
* initialization
|
||||
*/
|
||||
protected abstract void initializeFileTypeAnalyzer() throws InitializationException;
|
||||
|
||||
//</editor-fold>
|
||||
/**
|
||||
* Determines if the file can be analyzed by the analyzer.
|
||||
*
|
||||
* @param pathname the path to the file
|
||||
* @return true if the file can be analyzed by the given analyzer; otherwise
|
||||
* false
|
||||
*/
|
||||
@Override
|
||||
public boolean accept(File pathname) {
|
||||
final FileFilter filter = getFileFilter();
|
||||
boolean accepted = false;
|
||||
if (null == filter) {
|
||||
LOGGER.error("The '{}' analyzer is misconfigured and does not have a file filter; it will be disabled", getName());
|
||||
} else if (this.isEnabled()) {
|
||||
accepted = filter.accept(pathname);
|
||||
if (accepted) {
|
||||
filesMatched = true;
|
||||
}
|
||||
}
|
||||
return accepted;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Utility method to help in the creation of the extensions set. This
|
||||
* constructs a new Set that can be used in a final static declaration.</p>
|
||||
* <p>
|
||||
* This implementation was copied from
|
||||
* http://stackoverflow.com/questions/2041778/initialize-java-hashset-values-by-construction</p>
|
||||
@@ -228,10 +141,8 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
|
||||
* @return a Set of strings.
|
||||
*/
|
||||
protected static Set<String> newHashSet(String... strings) {
|
||||
final Set<String> set = new HashSet<String>();
|
||||
|
||||
final Set<String> set = new HashSet<String>(strings.length);
|
||||
Collections.addAll(set, strings);
|
||||
return set;
|
||||
}
|
||||
//</editor-fold>
|
||||
}
|
||||
|
||||
@@ -24,19 +24,22 @@ import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.suppression.SuppressionParseException;
|
||||
import org.owasp.dependencycheck.suppression.SuppressionParser;
|
||||
import org.owasp.dependencycheck.suppression.SuppressionRule;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.xml.suppression.SuppressionParseException;
|
||||
import org.owasp.dependencycheck.xml.suppression.SuppressionParser;
|
||||
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
import org.owasp.dependencycheck.utils.Downloader;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
* Abstract base suppression analyzer that contains methods for parsing the suppression xml file.
|
||||
* Abstract base suppression analyzer that contains methods for parsing the
|
||||
* suppression xml file.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -45,7 +48,7 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
/**
|
||||
* The Logger for use throughout the class
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(AbstractSuppressionAnalyzer.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractSuppressionAnalyzer.class);
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
@@ -61,12 +64,15 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
/**
|
||||
* The initialize method loads the suppression XML file.
|
||||
*
|
||||
* @throws Exception thrown if there is an exception
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public void initialize() throws Exception {
|
||||
super.initialize();
|
||||
loadSuppressionData();
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
loadSuppressionData();
|
||||
} catch (SuppressionParseException ex) {
|
||||
throw new InitializationException("Error initializing the suppression analyzer", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -101,9 +107,10 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
final SuppressionParser parser = new SuppressionParser();
|
||||
File file = null;
|
||||
try {
|
||||
rules = parser.parseSuppressionRules(this.getClass().getClassLoader().getResourceAsStream("dependencycheck-base-suppression.xml"));
|
||||
} catch (SuppressionParseException ex) {
|
||||
LOGGER.log(Level.FINE, "Unable to parse the base suppression data file", ex);
|
||||
final InputStream in = this.getClass().getClassLoader().getResourceAsStream("dependencycheck-base-suppression.xml");
|
||||
rules = parser.parseSuppressionRules(in);
|
||||
} catch (SAXException ex) {
|
||||
throw new SuppressionParseException("Unable to parse the base suppression data file", ex);
|
||||
}
|
||||
final String suppressionFilePath = Settings.getString(Settings.KEYS.SUPPRESSION_FILE);
|
||||
if (suppressionFilePath == null) {
|
||||
@@ -123,30 +130,42 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
} else {
|
||||
file = new File(suppressionFilePath);
|
||||
InputStream suppressionsFromClasspath = null;
|
||||
if (!file.exists()) {
|
||||
final InputStream suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath);
|
||||
if (suppressionsFromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("suppression", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throwSuppressionParseException("Unable to locate suppressions file in classpath", ex);
|
||||
try {
|
||||
suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath);
|
||||
if (suppressionsFromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("suppression", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throwSuppressionParseException("Unable to locate suppressions file in classpath", ex);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (suppressionsFromClasspath != null) {
|
||||
try {
|
||||
suppressionsFromClasspath.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Failed to close stream", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (file != null) {
|
||||
if (!file.exists()) {
|
||||
final String msg = String.format("Suppression file '%s' does not exists", file.getPath());
|
||||
LOGGER.warn(msg);
|
||||
throw new SuppressionParseException(msg);
|
||||
}
|
||||
try {
|
||||
//rules = parser.parseSuppressionRules(file);
|
||||
rules.addAll(parser.parseSuppressionRules(file));
|
||||
LOGGER.log(Level.FINE, rules.size() + " suppression rules were loaded.");
|
||||
LOGGER.debug("{} suppression rules were loaded.", rules.size());
|
||||
} catch (SuppressionParseException ex) {
|
||||
final String msg = String.format("Unable to parse suppression xml file '%s'", file.getPath());
|
||||
LOGGER.log(Level.WARNING, msg);
|
||||
LOGGER.log(Level.WARNING, ex.getMessage());
|
||||
LOGGER.log(Level.FINE, "", ex);
|
||||
LOGGER.warn("Unable to parse suppression xml file '{}'", file.getPath());
|
||||
LOGGER.warn(ex.getMessage());
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
@@ -154,6 +173,8 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
throwSuppressionParseException("Unable to fetch the configured suppression file", ex);
|
||||
} catch (MalformedURLException ex) {
|
||||
throwSuppressionParseException("Configured suppression file has an invalid URL", ex);
|
||||
} catch (SuppressionParseException ex) {
|
||||
throw ex;
|
||||
} catch (IOException ex) {
|
||||
throwSuppressionParseException("Unable to create temp file for suppressions", ex);
|
||||
} finally {
|
||||
@@ -168,11 +189,12 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
|
||||
*
|
||||
* @param message the exception message
|
||||
* @param exception the cause of the exception
|
||||
* @throws SuppressionParseException throws the generated SuppressionParseException
|
||||
* @throws SuppressionParseException throws the generated
|
||||
* SuppressionParseException
|
||||
*/
|
||||
private void throwSuppressionParseException(String message, Exception exception) throws SuppressionParseException {
|
||||
LOGGER.log(Level.WARNING, message);
|
||||
LOGGER.log(Level.FINE, "", exception);
|
||||
LOGGER.warn(message);
|
||||
LOGGER.debug("", exception);
|
||||
throw new SuppressionParseException(message, exception);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,10 +28,18 @@ public enum AnalysisPhase {
|
||||
* Initialization phase.
|
||||
*/
|
||||
INITIAL,
|
||||
/**
|
||||
* Pre information collection phase.
|
||||
*/
|
||||
PRE_INFORMATION_COLLECTION,
|
||||
/**
|
||||
* Information collection phase.
|
||||
*/
|
||||
INFORMATION_COLLECTION,
|
||||
/**
|
||||
* Post information collection phase.
|
||||
*/
|
||||
POST_INFORMATION_COLLECTION,
|
||||
/**
|
||||
* Pre identifier analysis phase.
|
||||
*/
|
||||
|
||||
@@ -20,24 +20,28 @@ package org.owasp.dependencycheck.analyzer;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* An interface that defines an Analyzer that is used to identify Dependencies. An analyzer will collect information
|
||||
* about the dependency in the form of Evidence.
|
||||
* An interface that defines an Analyzer that is used to identify Dependencies.
|
||||
* An analyzer will collect information about the dependency in the form of
|
||||
* Evidence.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public interface Analyzer {
|
||||
|
||||
/**
|
||||
* Analyzes the given dependency. The analysis could be anything from identifying an Identifier for the dependency,
|
||||
* to finding vulnerabilities, etc. Additionally, if the analyzer collects enough information to add a description
|
||||
* or license information for the dependency it should be added.
|
||||
* Analyzes the given dependency. The analysis could be anything from
|
||||
* identifying an Identifier for the dependency, to finding vulnerabilities,
|
||||
* etc. Additionally, if the analyzer collects enough information to add a
|
||||
* description or license information for the dependency it should be added.
|
||||
*
|
||||
* @param dependency a dependency to analyze.
|
||||
* @param engine the engine that is scanning the dependencies - this is useful if we need to check other
|
||||
* dependencies
|
||||
* @throws AnalysisException is thrown if there is an error analyzing the dependency file
|
||||
* @param engine the engine that is scanning the dependencies - this is
|
||||
* useful if we need to check other dependencies
|
||||
* @throws AnalysisException is thrown if there is an error analyzing the
|
||||
* dependency file
|
||||
*/
|
||||
void analyze(Dependency dependency, Engine engine) throws AnalysisException;
|
||||
|
||||
@@ -56,16 +60,33 @@ public interface Analyzer {
|
||||
AnalysisPhase getAnalysisPhase();
|
||||
|
||||
/**
|
||||
* The initialize method is called (once) prior to the analyze method being called on all of the dependencies.
|
||||
* The initialize method is called (once) prior to the analyze method being
|
||||
* called on all of the dependencies.
|
||||
*
|
||||
* @throws Exception is thrown if an exception occurs initializing the analyzer.
|
||||
* @throws InitializationException is thrown if an exception occurs
|
||||
* initializing the analyzer.
|
||||
*/
|
||||
void initialize() throws Exception;
|
||||
void initialize() throws InitializationException;
|
||||
|
||||
/**
|
||||
* The close method is called after all of the dependencies have been analyzed.
|
||||
* The close method is called after all of the dependencies have been
|
||||
* analyzed.
|
||||
*
|
||||
* @throws Exception is thrown if an exception occurs closing the analyzer.
|
||||
*/
|
||||
void close() throws Exception;
|
||||
|
||||
/**
|
||||
* Returns whether multiple instances of the same type of analyzer can run in parallel.
|
||||
* Note that running analyzers of different types in parallel is not supported at all.
|
||||
*
|
||||
* @return {@code true} if the analyzer supports parallel processing, {@code false} else
|
||||
*/
|
||||
boolean supportsParallelProcessing();
|
||||
/**
|
||||
* Get the value of enabled.
|
||||
*
|
||||
* @return the value of enabled
|
||||
*/
|
||||
boolean isEnabled();
|
||||
}
|
||||
|
||||
@@ -17,8 +17,13 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ServiceLoader;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* The Analyzer Service Loader. This class loads all services that implement
|
||||
@@ -27,11 +32,15 @@ import java.util.ServiceLoader;
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class AnalyzerService {
|
||||
/**
|
||||
* The Logger for use throughout the class.
|
||||
*/
|
||||
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(AnalyzerService.class);
|
||||
|
||||
/**
|
||||
* The service loader for analyzers.
|
||||
*/
|
||||
private final ServiceLoader<Analyzer> loader;
|
||||
private final ServiceLoader<Analyzer> service;
|
||||
|
||||
/**
|
||||
* Creates a new instance of AnalyzerService.
|
||||
@@ -39,15 +48,31 @@ public class AnalyzerService {
|
||||
* @param classLoader the ClassLoader to use when dynamically loading Analyzer and Update services
|
||||
*/
|
||||
public AnalyzerService(ClassLoader classLoader) {
|
||||
loader = ServiceLoader.load(Analyzer.class, classLoader);
|
||||
service = ServiceLoader.load(Analyzer.class, classLoader);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an Iterator for all instances of the Analyzer interface.
|
||||
* Returns a list of all instances of the Analyzer interface.
|
||||
*
|
||||
* @return an iterator of Analyzers.
|
||||
* @return a list of Analyzers.
|
||||
*/
|
||||
public Iterator<Analyzer> getAnalyzers() {
|
||||
return loader.iterator();
|
||||
public List<Analyzer> getAnalyzers() {
|
||||
final List<Analyzer> analyzers = new ArrayList<Analyzer>();
|
||||
final Iterator<Analyzer> iterator = service.iterator();
|
||||
boolean experimentalEnabled = false;
|
||||
try {
|
||||
experimentalEnabled = Settings.getBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, false);
|
||||
} catch (InvalidSettingException ex) {
|
||||
LOGGER.error("invalide experimental setting", ex);
|
||||
}
|
||||
while (iterator.hasNext()) {
|
||||
final Analyzer a = iterator.next();
|
||||
if (!experimentalEnabled && a.getClass().isAnnotationPresent(Experimental.class)) {
|
||||
continue;
|
||||
}
|
||||
LOGGER.debug("Loaded Analyzer {}", a.getName());
|
||||
analyzers.add(a);
|
||||
}
|
||||
return analyzers;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,21 +18,17 @@
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import org.apache.commons.compress.archivers.ArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.ArchiveInputStream;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||
@@ -40,19 +36,28 @@ import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
|
||||
import org.apache.commons.compress.archivers.zip.ZipFile;
|
||||
import org.apache.commons.compress.compressors.CompressorInputStream;
|
||||
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
|
||||
import org.apache.commons.compress.compressors.bzip2.BZip2Utils;
|
||||
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
|
||||
import org.apache.commons.compress.compressors.gzip.GzipUtils;
|
||||
import org.apache.commons.compress.utils.IOUtils;
|
||||
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.analyzer.exception.ArchiveExtractionException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* An analyzer that extracts files from archives and ensures any supported files contained within the archive are added
|
||||
* to the dependency list.</p>
|
||||
* An analyzer that extracts files from archives and ensures any supported files
|
||||
* contained within the archive are added to the dependency list.</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -61,13 +66,10 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(ArchiveAnalyzer.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ArchiveAnalyzer.class);
|
||||
/**
|
||||
* The buffer size to use when extracting files from the archive.
|
||||
*/
|
||||
private static final int BUFFER_SIZE = 4096;
|
||||
/**
|
||||
* The count of directories created during analysis. This is used for creating temporary directories.
|
||||
* The count of directories created during analysis. This is used for
|
||||
* creating temporary directories.
|
||||
*/
|
||||
private static int dirCount = 0;
|
||||
/**
|
||||
@@ -75,7 +77,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private File tempFileLocation = null;
|
||||
/**
|
||||
* The max scan depth that the analyzer will recursively extract nested archives.
|
||||
* The max scan depth that the analyzer will recursively extract nested
|
||||
* archives.
|
||||
*/
|
||||
private static final int MAX_SCAN_DEPTH = Settings.getInt("archive.scan.depth", 3);
|
||||
/**
|
||||
@@ -97,35 +100,43 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private static final Set<String> ZIPPABLES = newHashSet("zip", "ear", "war", "jar", "sar", "apk", "nupkg");
|
||||
/**
|
||||
* The set of file extensions supported by this analyzer. Note for developers, any additions to this list will need
|
||||
* to be explicitly handled in extractFiles().
|
||||
* The set of file extensions supported by this analyzer. Note for
|
||||
* developers, any additions to this list will need to be explicitly handled
|
||||
* in {@link #extractFiles(File, File, Engine)}.
|
||||
*/
|
||||
private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz");
|
||||
private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2");
|
||||
|
||||
/**
|
||||
* The set of file extensions to remove from the engine's collection of dependencies.
|
||||
* Detects files with extensions to remove from the engine's collection of
|
||||
* dependencies.
|
||||
*/
|
||||
private static final Set<String> REMOVE_FROM_ANALYSIS = newHashSet("zip", "tar", "gz", "tgz"); //TODO add nupkg, apk, sar?
|
||||
private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance().addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2")
|
||||
.build();
|
||||
|
||||
static {
|
||||
final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
|
||||
if (additionalZipExt != null) {
|
||||
final Set<String> ext = new HashSet<String>(Arrays.asList(additionalZipExt));
|
||||
ZIPPABLES.addAll(ext);
|
||||
final String[] ext = additionalZipExt.split("\\s*,\\s*");
|
||||
Collections.addAll(ZIPPABLES, ext);
|
||||
}
|
||||
EXTENSIONS.addAll(ZIPPABLES);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of file EXTENSIONS supported by this analyzer.
|
||||
*
|
||||
* @return a list of file EXTENSIONS supported by this analyzer.
|
||||
* The file filter used to filter supported files.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
|
||||
|
||||
@Override
|
||||
public Set<String> getSupportedExtensions() {
|
||||
return EXTENSIONS;
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects files with .zip extension.
|
||||
*/
|
||||
private static final FileFilter ZIP_FILTER = FileFilterBuilder.newInstance().addExtensions("zip").build();
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
@@ -148,7 +159,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's enabled property.
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@@ -160,119 +172,183 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* The initialize method does nothing for this Analyzer.
|
||||
*
|
||||
* @throws Exception is thrown if there is an exception deleting or creating temporary files
|
||||
* @throws InitializationException is thrown if there is an exception
|
||||
* deleting or creating temporary files
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws Exception {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
tempFileLocation = File.createTempFile("check", "tmp", baseDir);
|
||||
if (!tempFileLocation.delete()) {
|
||||
final String msg = String.format("Unable to delete temporary file '%s'.", tempFileLocation.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
if (!tempFileLocation.mkdirs()) {
|
||||
final String msg = String.format("Unable to create directory '%s'.", tempFileLocation.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
tempFileLocation = File.createTempFile("check", "tmp", baseDir);
|
||||
if (!tempFileLocation.delete()) {
|
||||
setEnabled(false);
|
||||
final String msg = String.format("Unable to delete temporary file '%s'.", tempFileLocation.getAbsolutePath());
|
||||
throw new InitializationException(msg);
|
||||
}
|
||||
if (!tempFileLocation.mkdirs()) {
|
||||
setEnabled(false);
|
||||
final String msg = String.format("Unable to create directory '%s'.", tempFileLocation.getAbsolutePath());
|
||||
throw new InitializationException(msg);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create a temporary file", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The close method deletes any temporary files and directories created during analysis.
|
||||
* The close method deletes any temporary files and directories created
|
||||
* during analysis.
|
||||
*
|
||||
* @throws Exception thrown if there is an exception deleting temporary files
|
||||
* @throws Exception thrown if there is an exception deleting temporary
|
||||
* files
|
||||
*/
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
public void closeAnalyzer() throws Exception {
|
||||
if (tempFileLocation != null && tempFileLocation.exists()) {
|
||||
LOGGER.log(Level.FINE, "Attempting to delete temporary files");
|
||||
LOGGER.debug("Attempting to delete temporary files");
|
||||
final boolean success = FileUtils.delete(tempFileLocation);
|
||||
if (!success && tempFileLocation != null && tempFileLocation.exists() && tempFileLocation.list().length > 0) {
|
||||
LOGGER.log(Level.WARNING, "Failed to delete some temporary files, see the log for more details");
|
||||
if (!success && tempFileLocation.exists()) {
|
||||
final String[] l = tempFileLocation.list();
|
||||
if (l != null && l.length > 0) {
|
||||
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted,
|
||||
* scanned, and added to the list of dependencies within the engine.
|
||||
* Does not support parallel processing as it both modifies and iterates
|
||||
* over the engine's list of dependencies.
|
||||
*
|
||||
* @see #analyzeDependency(Dependency, Engine)
|
||||
* @see #findMoreDependencies(Engine, File)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a given dependency. If the dependency is an archive, such as a
|
||||
* WAR or EAR, the contents are extracted, scanned, and added to the list of
|
||||
* dependencies within the engine.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException thrown if there is an analysis exception
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
final File f = new File(dependency.getActualFilePath());
|
||||
final File tmpDir = getNextTempDirectory();
|
||||
extractFiles(f, tmpDir, engine);
|
||||
|
||||
//make a copy
|
||||
List<Dependency> dependencies = new ArrayList<Dependency>(engine.getDependencies());
|
||||
engine.scan(tmpDir);
|
||||
List<Dependency> newDependencies = engine.getDependencies();
|
||||
if (dependencies.size() != newDependencies.size()) {
|
||||
//get the new dependencies
|
||||
final Set<Dependency> dependencySet = new HashSet<Dependency>();
|
||||
dependencySet.addAll(newDependencies);
|
||||
dependencySet.removeAll(dependencies);
|
||||
final List<Dependency> dependencySet = findMoreDependencies(engine, tmpDir);
|
||||
|
||||
if (dependencySet != null && !dependencySet.isEmpty()) {
|
||||
for (Dependency d : dependencySet) {
|
||||
//fix the dependency's display name and path
|
||||
final String displayPath = String.format("%s%s",
|
||||
dependency.getFilePath(),
|
||||
d.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
|
||||
final String displayName = String.format("%s: %s",
|
||||
dependency.getFileName(),
|
||||
d.getFileName());
|
||||
d.setFilePath(displayPath);
|
||||
d.setFileName(displayName);
|
||||
if (d.getFilePath().startsWith(tmpDir.getAbsolutePath())) {
|
||||
//fix the dependency's display name and path
|
||||
final String displayPath = String.format("%s%s",
|
||||
dependency.getFilePath(),
|
||||
d.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
|
||||
final String displayName = String.format("%s: %s",
|
||||
dependency.getFileName(),
|
||||
d.getFileName());
|
||||
d.setFilePath(displayPath);
|
||||
d.setFileName(displayName);
|
||||
d.setProjectReferences(dependency.getProjectReferences());
|
||||
|
||||
//TODO - can we get more evidence from the parent? EAR contains module name, etc.
|
||||
//analyze the dependency (i.e. extract files) if it is a supported type.
|
||||
if (this.supportsExtension(d.getFileExtension()) && scanDepth < MAX_SCAN_DEPTH) {
|
||||
scanDepth += 1;
|
||||
analyze(d, engine);
|
||||
scanDepth -= 1;
|
||||
//TODO - can we get more evidence from the parent? EAR contains module name, etc.
|
||||
//analyze the dependency (i.e. extract files) if it is a supported type.
|
||||
if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) {
|
||||
scanDepth += 1;
|
||||
analyze(d, engine);
|
||||
scanDepth -= 1;
|
||||
}
|
||||
} else {
|
||||
for (Dependency sub : dependencySet) {
|
||||
if (sub.getFilePath().startsWith(tmpDir.getAbsolutePath())) {
|
||||
final String displayPath = String.format("%s%s",
|
||||
dependency.getFilePath(),
|
||||
sub.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
|
||||
final String displayName = String.format("%s: %s",
|
||||
dependency.getFileName(),
|
||||
sub.getFileName());
|
||||
sub.setFilePath(displayPath);
|
||||
sub.setFileName(displayName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.REMOVE_FROM_ANALYSIS.contains(dependency.getFileExtension())) {
|
||||
if ("zip".equals(dependency.getFileExtension()) && isZipFileActuallyJarFile(dependency)) {
|
||||
final File tdir = getNextTempDirectory();
|
||||
final String fileName = dependency.getFileName();
|
||||
|
||||
LOGGER.info(String.format("The zip file '%s' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName));
|
||||
|
||||
final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyFile(tdir, tmpLoc);
|
||||
dependencies = new ArrayList<Dependency>(engine.getDependencies());
|
||||
engine.scan(tmpLoc);
|
||||
newDependencies = engine.getDependencies();
|
||||
if (dependencies.size() != newDependencies.size()) {
|
||||
//get the new dependencies
|
||||
final Set<Dependency> dependencySet = new HashSet<Dependency>();
|
||||
dependencySet.addAll(newDependencies);
|
||||
dependencySet.removeAll(dependencies);
|
||||
if (dependencySet.size() != 1) {
|
||||
LOGGER.info("Deep copy of ZIP to JAR file resulted in more then one dependency?");
|
||||
}
|
||||
for (Dependency d : dependencySet) {
|
||||
//fix the dependency's display name and path
|
||||
d.setFilePath(dependency.getFilePath());
|
||||
d.setDisplayFileName(dependency.getFileName());
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
final String msg = String.format("Unable to perform deep copy on '%s'", dependency.getActualFile().getPath());
|
||||
LOGGER.log(Level.FINE, msg, ex);
|
||||
}
|
||||
}
|
||||
if (REMOVE_FROM_ANALYSIS.accept(dependency.getActualFile())) {
|
||||
addDisguisedJarsToDependencies(dependency, engine);
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
Collections.sort(engine.getDependencies());
|
||||
}
|
||||
|
||||
/**
|
||||
* If a zip file was identified as a possible JAR, this method will add the
|
||||
* zip to the list of dependencies.
|
||||
*
|
||||
* @param dependency the zip file
|
||||
* @param engine the engine
|
||||
* @throws AnalysisException thrown if there is an issue
|
||||
*/
|
||||
private void addDisguisedJarsToDependencies(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (ZIP_FILTER.accept(dependency.getActualFile()) && isZipFileActuallyJarFile(dependency)) {
|
||||
final File tdir = getNextTempDirectory();
|
||||
final String fileName = dependency.getFileName();
|
||||
|
||||
LOGGER.info("The zip file '{}' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName);
|
||||
final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar");
|
||||
//store the archives sha1 and change it so that the engine doesn't think the zip and jar file are the same
|
||||
// and add it is a related dependency.
|
||||
final String archiveSha1 = dependency.getSha1sum();
|
||||
try {
|
||||
dependency.setSha1sum("");
|
||||
org.apache.commons.io.FileUtils.copyFile(dependency.getActualFile(), tmpLoc);
|
||||
final List<Dependency> dependencySet = findMoreDependencies(engine, tmpLoc);
|
||||
if (dependencySet != null && !dependencySet.isEmpty()) {
|
||||
for (Dependency d : dependencySet) {
|
||||
//fix the dependency's display name and path
|
||||
if (d.getActualFile().equals(tmpLoc)) {
|
||||
d.setFilePath(dependency.getFilePath());
|
||||
d.setDisplayFileName(dependency.getFileName());
|
||||
} else {
|
||||
for (Dependency sub : d.getRelatedDependencies()) {
|
||||
if (sub.getActualFile().equals(tmpLoc)) {
|
||||
sub.setFilePath(dependency.getFilePath());
|
||||
sub.setDisplayFileName(dependency.getFileName());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Unable to perform deep copy on '{}'", dependency.getActualFile().getPath(), ex);
|
||||
} finally {
|
||||
dependency.setSha1sum(archiveSha1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan the given file/folder, and return any new dependencies found.
|
||||
*
|
||||
* @param engine used to scan
|
||||
* @param file target of scanning
|
||||
* @return any dependencies that weren't known to the engine before
|
||||
*/
|
||||
private static List<Dependency> findMoreDependencies(Engine engine, File file) {
|
||||
final List<Dependency> added = engine.scan(file);
|
||||
return added;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the next temporary directory to extract an archive too.
|
||||
*
|
||||
@@ -302,43 +378,118 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException thrown if the archive is not found
|
||||
*/
|
||||
private void extractFiles(File archive, File destination, Engine engine) throws AnalysisException {
|
||||
if (archive == null || destination == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
FileInputStream fis = null;
|
||||
try {
|
||||
fis = new FileInputStream(archive);
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
throw new AnalysisException("Archive file was not found.", ex);
|
||||
}
|
||||
final String archiveExt = FileUtils.getFileExtension(archive.getName()).toLowerCase();
|
||||
try {
|
||||
if (ZIPPABLES.contains(archiveExt)) {
|
||||
extractArchive(new ZipArchiveInputStream(new BufferedInputStream(fis)), destination, engine);
|
||||
} else if ("tar".equals(archiveExt)) {
|
||||
extractArchive(new TarArchiveInputStream(new BufferedInputStream(fis)), destination, engine);
|
||||
} else if ("gz".equals(archiveExt) || "tgz".equals(archiveExt)) {
|
||||
final String uncompressedName = GzipUtils.getUncompressedFilename(archive.getName());
|
||||
final String uncompressedExt = FileUtils.getFileExtension(uncompressedName).toLowerCase();
|
||||
if (engine.supportsExtension(uncompressedExt)) {
|
||||
decompressFile(new GzipCompressorInputStream(new BufferedInputStream(fis)), new File(destination, uncompressedName));
|
||||
}
|
||||
if (archive != null && destination != null) {
|
||||
String archiveExt = FileUtils.getFileExtension(archive.getName());
|
||||
if (archiveExt == null) {
|
||||
return;
|
||||
}
|
||||
} catch (ArchiveExtractionException ex) {
|
||||
final String msg = String.format("Exception extracting archive '%s'.", archive.getName());
|
||||
LOGGER.log(Level.WARNING, msg);
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
} catch (IOException ex) {
|
||||
final String msg = String.format("Exception reading archive '%s'.", archive.getName());
|
||||
LOGGER.log(Level.WARNING, msg);
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
} finally {
|
||||
archiveExt = archiveExt.toLowerCase();
|
||||
|
||||
final FileInputStream fis;
|
||||
try {
|
||||
fis.close();
|
||||
fis = new FileInputStream(archive);
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
throw new AnalysisException("Archive file was not found.", ex);
|
||||
}
|
||||
BufferedInputStream in = null;
|
||||
ZipArchiveInputStream zin = null;
|
||||
TarArchiveInputStream tin = null;
|
||||
GzipCompressorInputStream gin = null;
|
||||
BZip2CompressorInputStream bzin = null;
|
||||
try {
|
||||
if (ZIPPABLES.contains(archiveExt)) {
|
||||
in = new BufferedInputStream(fis);
|
||||
ensureReadableJar(archiveExt, in);
|
||||
zin = new ZipArchiveInputStream(in);
|
||||
extractArchive(zin, destination, engine);
|
||||
} else if ("tar".equals(archiveExt)) {
|
||||
in = new BufferedInputStream(fis);
|
||||
tin = new TarArchiveInputStream(in);
|
||||
extractArchive(tin, destination, engine);
|
||||
} else if ("gz".equals(archiveExt) || "tgz".equals(archiveExt)) {
|
||||
final String uncompressedName = GzipUtils.getUncompressedFilename(archive.getName());
|
||||
final File f = new File(destination, uncompressedName);
|
||||
if (engine.accept(f)) {
|
||||
in = new BufferedInputStream(fis);
|
||||
gin = new GzipCompressorInputStream(in);
|
||||
decompressFile(gin, f);
|
||||
}
|
||||
} else if ("bz2".equals(archiveExt) || "tbz2".equals(archiveExt)) {
|
||||
final String uncompressedName = BZip2Utils.getUncompressedFilename(archive.getName());
|
||||
final File f = new File(destination, uncompressedName);
|
||||
if (engine.accept(f)) {
|
||||
in = new BufferedInputStream(fis);
|
||||
bzin = new BZip2CompressorInputStream(in);
|
||||
decompressFile(bzin, f);
|
||||
}
|
||||
}
|
||||
} catch (ArchiveExtractionException ex) {
|
||||
LOGGER.warn("Exception extracting archive '{}'.", archive.getName());
|
||||
LOGGER.debug("", ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
LOGGER.warn("Exception reading archive '{}'.", archive.getName());
|
||||
LOGGER.debug("", ex);
|
||||
} finally {
|
||||
//overly verbose and not needed... but keeping it anyway due to
|
||||
//having issue with file handles being left open
|
||||
FileUtils.close(fis);
|
||||
FileUtils.close(in);
|
||||
FileUtils.close(zin);
|
||||
FileUtils.close(tin);
|
||||
FileUtils.close(gin);
|
||||
FileUtils.close(bzin);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the file being scanned is a JAR that begins with '#!/bin' which
|
||||
* indicates it is a fully executable jar. If a fully executable JAR is
|
||||
* identified the input stream will be advanced to the start of the actual
|
||||
* JAR file ( skipping the script).
|
||||
*
|
||||
* @see
|
||||
* <a href="http://docs.spring.io/spring-boot/docs/1.3.0.BUILD-SNAPSHOT/reference/htmlsingle/#deployment-install">Installing
|
||||
* Spring Boot Applications</a>
|
||||
* @param archiveExt the file extension
|
||||
* @param in the input stream
|
||||
* @throws IOException thrown if there is an error reading the stream
|
||||
*/
|
||||
private void ensureReadableJar(final String archiveExt, BufferedInputStream in) throws IOException {
|
||||
if ("jar".equals(archiveExt) && in.markSupported()) {
|
||||
in.mark(7);
|
||||
final byte[] b = new byte[7];
|
||||
final int read = in.read(b);
|
||||
if (read == 7
|
||||
&& b[0] == '#'
|
||||
&& b[1] == '!'
|
||||
&& b[2] == '/'
|
||||
&& b[3] == 'b'
|
||||
&& b[4] == 'i'
|
||||
&& b[5] == 'n'
|
||||
&& b[6] == '/') {
|
||||
boolean stillLooking = true;
|
||||
int chr, nxtChr;
|
||||
while (stillLooking && (chr = in.read()) != -1) {
|
||||
if (chr == '\n' || chr == '\r') {
|
||||
in.mark(4);
|
||||
if ((chr = in.read()) != -1) {
|
||||
if (chr == 'P' && (chr = in.read()) != -1) {
|
||||
if (chr == 'K' && (chr = in.read()) != -1) {
|
||||
if ((chr == 3 || chr == 5 || chr == 7) && (nxtChr = in.read()) != -1) {
|
||||
if (nxtChr == chr + 1) {
|
||||
stillLooking = false;
|
||||
in.reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
in.reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -349,83 +500,58 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @param input the archive to extract files from
|
||||
* @param destination the location to write the files too
|
||||
* @param engine the dependency-check engine
|
||||
* @throws ArchiveExtractionException thrown if there is an exception extracting files from the archive
|
||||
* @throws ArchiveExtractionException thrown if there is an exception
|
||||
* extracting files from the archive
|
||||
*/
|
||||
private void extractArchive(ArchiveInputStream input, File destination, Engine engine) throws ArchiveExtractionException {
|
||||
ArchiveEntry entry;
|
||||
try {
|
||||
while ((entry = input.getNextEntry()) != null) {
|
||||
final File file = new File(destination, entry.getName());
|
||||
if (entry.isDirectory()) {
|
||||
final File d = new File(destination, entry.getName());
|
||||
if (!d.exists()) {
|
||||
if (!d.mkdirs()) {
|
||||
final String msg = String.format("Unable to create directory '%s'.", d.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
final File file = new File(destination, entry.getName());
|
||||
final String ext = FileUtils.getFileExtension(file.getName());
|
||||
if (engine.supportsExtension(ext)) {
|
||||
final String extracting = String.format("Extracting '%s'", file.getPath());
|
||||
LOGGER.fine(extracting);
|
||||
BufferedOutputStream bos = null;
|
||||
FileOutputStream fos = null;
|
||||
try {
|
||||
final File parent = file.getParentFile();
|
||||
if (!parent.isDirectory()) {
|
||||
if (!parent.mkdirs()) {
|
||||
final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
}
|
||||
fos = new FileOutputStream(file);
|
||||
bos = new BufferedOutputStream(fos, BUFFER_SIZE);
|
||||
int count;
|
||||
final byte[] data = new byte[BUFFER_SIZE];
|
||||
while ((count = input.read(data, 0, BUFFER_SIZE)) != -1) {
|
||||
bos.write(data, 0, count);
|
||||
}
|
||||
bos.flush();
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
final String msg = String.format("Unable to find file '%s'.", file.getName());
|
||||
throw new AnalysisException(msg, ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
final String msg = String.format("IO Exception while parsing file '%s'.", file.getName());
|
||||
throw new AnalysisException(msg, ex);
|
||||
} finally {
|
||||
if (bos != null) {
|
||||
try {
|
||||
bos.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINEST, null, ex);
|
||||
}
|
||||
}
|
||||
if (fos != null) {
|
||||
try {
|
||||
fos.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINEST, null, ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!file.exists() && !file.mkdirs()) {
|
||||
final String msg = String.format("Unable to create directory '%s'.", file.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
} else if (engine.accept(file)) {
|
||||
extractAcceptedFile(input, file);
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new ArchiveExtractionException(ex);
|
||||
} catch (Throwable ex) {
|
||||
throw new ArchiveExtractionException(ex);
|
||||
} finally {
|
||||
if (input != null) {
|
||||
try {
|
||||
input.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINEST, null, ex);
|
||||
}
|
||||
FileUtils.close(input);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts a file from an archive.
|
||||
*
|
||||
* @param input the archives input stream
|
||||
* @param file the file to extract
|
||||
* @throws AnalysisException thrown if there is an error
|
||||
*/
|
||||
private static void extractAcceptedFile(ArchiveInputStream input, File file) throws AnalysisException {
|
||||
LOGGER.debug("Extracting '{}'", file.getPath());
|
||||
FileOutputStream fos = null;
|
||||
try {
|
||||
final File parent = file.getParentFile();
|
||||
if (!parent.isDirectory() && !parent.mkdirs()) {
|
||||
final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
fos = new FileOutputStream(file);
|
||||
IOUtils.copy(input, fos);
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
final String msg = String.format("Unable to find file '%s'.", file.getName());
|
||||
throw new AnalysisException(msg, ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("", ex);
|
||||
final String msg = String.format("IO Exception while parsing file '%s'.", file.getName());
|
||||
throw new AnalysisException(msg, ex);
|
||||
} finally {
|
||||
FileUtils.close(fos);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -434,33 +560,23 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*
|
||||
* @param inputStream the compressed file
|
||||
* @param outputFile the location to write the decompressed file
|
||||
* @throws ArchiveExtractionException thrown if there is an exception decompressing the file
|
||||
* @throws ArchiveExtractionException thrown if there is an exception
|
||||
* decompressing the file
|
||||
*/
|
||||
private void decompressFile(CompressorInputStream inputStream, File outputFile) throws ArchiveExtractionException {
|
||||
final String msg = String.format("Decompressing '%s'", outputFile.getPath());
|
||||
LOGGER.fine(msg);
|
||||
LOGGER.debug("Decompressing '{}'", outputFile.getPath());
|
||||
FileOutputStream out = null;
|
||||
try {
|
||||
out = new FileOutputStream(outputFile);
|
||||
final byte[] buffer = new byte[BUFFER_SIZE];
|
||||
int n = 0;
|
||||
while (-1 != (n = inputStream.read(buffer))) {
|
||||
out.write(buffer, 0, n);
|
||||
}
|
||||
IOUtils.copy(inputStream, out);
|
||||
} catch (FileNotFoundException ex) {
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
LOGGER.debug("", ex);
|
||||
throw new ArchiveExtractionException(ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
LOGGER.debug("", ex);
|
||||
throw new ArchiveExtractionException(ex);
|
||||
} finally {
|
||||
if (out != null) {
|
||||
try {
|
||||
out.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINEST, null, ex);
|
||||
}
|
||||
}
|
||||
FileUtils.close(out);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -490,7 +606,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINE, String.format("Unable to unzip zip file '%s'", dependency.getFilePath()), ex);
|
||||
LOGGER.debug("Unable to unzip zip file '{}'", dependency.getFilePath(), ex);
|
||||
} finally {
|
||||
ZipFile.closeQuietly(zip);
|
||||
}
|
||||
|
||||
@@ -17,33 +17,39 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.w3c.dom.Document;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.apache.commons.lang3.SystemUtils;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
|
||||
/**
|
||||
* Analyzer for getting company, product, and version information from a .NET assembly.
|
||||
* Analyzer for getting company, product, and version information from a .NET
|
||||
* assembly.
|
||||
*
|
||||
* @author colezlaw
|
||||
*
|
||||
@@ -61,37 +67,34 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* The list of supported extensions
|
||||
*/
|
||||
private static final Set<String> SUPPORTED_EXTENSIONS = newHashSet("dll", "exe");
|
||||
private static final String[] SUPPORTED_EXTENSIONS = {"dll", "exe"};
|
||||
/**
|
||||
* The temp value for GrokAssembly.exe
|
||||
*/
|
||||
private File grokAssemblyExe = null;
|
||||
/**
|
||||
* The DocumentBuilder for parsing the XML
|
||||
*/
|
||||
private DocumentBuilder builder;
|
||||
/**
|
||||
* Logger
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(AssemblyAnalyzer.class.getName(), "dependencycheck-resources");
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(AssemblyAnalyzer.class);
|
||||
|
||||
/**
|
||||
* Builds the beginnings of a List for ProcessBuilder
|
||||
*
|
||||
* @return the list of arguments to begin populating the ProcessBuilder
|
||||
*/
|
||||
private List<String> buildArgumentList() {
|
||||
protected List<String> buildArgumentList() {
|
||||
// Use file.separator as a wild guess as to whether this is Windows
|
||||
final List<String> args = new ArrayList<String>();
|
||||
if (!"\\".equals(System.getProperty("file.separator"))) {
|
||||
if (!SystemUtils.IS_OS_WINDOWS) {
|
||||
if (Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) {
|
||||
args.add(Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH));
|
||||
} else {
|
||||
} else if (isInPath("mono")) {
|
||||
args.add("mono");
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
args.add(grokAssemblyExe.getPath());
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
@@ -103,48 +106,52 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException if anything goes sideways
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine)
|
||||
public void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
if (grokAssemblyExe == null) {
|
||||
LOGGER.warning("analyzer.AssemblyAnalyzer.notdeployed");
|
||||
LOGGER.warn("GrokAssembly didn't get deployed");
|
||||
return;
|
||||
}
|
||||
|
||||
final List<String> args = buildArgumentList();
|
||||
if (args == null) {
|
||||
LOGGER.warn("Assembly Analyzer was unable to execute");
|
||||
return;
|
||||
}
|
||||
args.add(dependency.getActualFilePath());
|
||||
final ProcessBuilder pb = new ProcessBuilder(args);
|
||||
BufferedReader rdr = null;
|
||||
Document doc = null;
|
||||
try {
|
||||
final Process proc = pb.start();
|
||||
// Try evacuating the error stream
|
||||
rdr = new BufferedReader(new InputStreamReader(proc.getErrorStream(), "UTF-8"));
|
||||
String line = null;
|
||||
// CHECKSTYLE:OFF
|
||||
while (rdr.ready() && (line = rdr.readLine()) != null) {
|
||||
LOGGER.log(Level.WARNING, "analyzer.AssemblyAnalyzer.grokassembly.stderr", line);
|
||||
}
|
||||
// CHECKSTYLE:ON
|
||||
int rc = 0;
|
||||
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
|
||||
|
||||
doc = builder.parse(proc.getInputStream());
|
||||
|
||||
// Try evacuating the error stream
|
||||
final String errorStream = IOUtils.toString(proc.getErrorStream(), "UTF-8");
|
||||
if (null != errorStream && !errorStream.isEmpty()) {
|
||||
LOGGER.warn("Error from GrokAssembly: {}", errorStream);
|
||||
}
|
||||
|
||||
int rc = 0;
|
||||
try {
|
||||
rc = proc.waitFor();
|
||||
} catch (InterruptedException ie) {
|
||||
return;
|
||||
}
|
||||
if (rc == 3) {
|
||||
LOGGER.log(Level.FINE, "analyzer.AssemblyAnalyzer.notassembly", dependency.getActualFilePath());
|
||||
LOGGER.debug("{} is not a .NET assembly or executable and as such cannot be analyzed by dependency-check",
|
||||
dependency.getActualFilePath());
|
||||
return;
|
||||
} else if (rc != 0) {
|
||||
LOGGER.log(Level.WARNING, "analyzer.AssemblyAnalyzer.grokassembly.rc", rc);
|
||||
LOGGER.warn("Return code {} from GrokAssembly", rc);
|
||||
}
|
||||
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
|
||||
// First, see if there was an error
|
||||
final String error = xpath.evaluate("/assembly/error", doc);
|
||||
if (error != null && !"".equals(error)) {
|
||||
if (error != null && !error.isEmpty()) {
|
||||
throw new AnalysisException(error);
|
||||
}
|
||||
|
||||
@@ -166,131 +173,142 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
product, Confidence.HIGH));
|
||||
}
|
||||
|
||||
} catch (ParserConfigurationException pce) {
|
||||
throw new AnalysisException("Error initializing the assembly analyzer", pce);
|
||||
} catch (IOException ioe) {
|
||||
throw new AnalysisException(ioe);
|
||||
} catch (SAXException saxe) {
|
||||
throw new AnalysisException("Couldn't parse GrokAssembly result", saxe);
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
LOGGER.error("Failed to read the Assembly Analyzer results. "
|
||||
+ "On some systems mono-runtime and mono-devel need to be installed.");
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
throw new AnalysisException("Couldn't parse Assembly Analzyzer results (GrokAssembly)", saxe);
|
||||
} catch (XPathExpressionException xpe) {
|
||||
// This shouldn't happen
|
||||
throw new AnalysisException(xpe);
|
||||
} finally {
|
||||
if (rdr != null) {
|
||||
try {
|
||||
rdr.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINEST, "ignore", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the analyzer. In this case, extract GrokAssembly.exe to a temporary location.
|
||||
* Initialize the analyzer. In this case, extract GrokAssembly.exe to a
|
||||
* temporary location.
|
||||
*
|
||||
* @throws Exception if anything goes wrong
|
||||
* @throws InitializationException thrown if anything goes wrong
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws Exception {
|
||||
final File tempFile = File.createTempFile("GKA", ".exe", Settings.getTempDirectory());
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
final File tempFile;
|
||||
try {
|
||||
tempFile = File.createTempFile("GKA", ".exe", Settings.getTempDirectory());
|
||||
} catch (IOException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create temporary file for the assembly analyzerr", ex);
|
||||
}
|
||||
FileOutputStream fos = null;
|
||||
InputStream is = null;
|
||||
try {
|
||||
fos = new FileOutputStream(tempFile);
|
||||
is = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe");
|
||||
final byte[] buff = new byte[4096];
|
||||
int bread = -1;
|
||||
while ((bread = is.read(buff)) >= 0) {
|
||||
fos.write(buff, 0, bread);
|
||||
}
|
||||
IOUtils.copy(is, fos);
|
||||
|
||||
grokAssemblyExe = tempFile;
|
||||
// Set the temp file to get deleted when we're done
|
||||
grokAssemblyExe.deleteOnExit();
|
||||
LOGGER.log(Level.FINE, "analyzer.AssemblyAnalyzer.grokassembly.deployed", grokAssemblyExe.getPath());
|
||||
LOGGER.debug("Extracted GrokAssembly.exe to {}", grokAssemblyExe.getPath());
|
||||
} catch (IOException ioe) {
|
||||
this.setEnabled(false);
|
||||
LOGGER.log(Level.WARNING, "analyzer.AssemblyAnalyzer.grokassembly.notdeployed", ioe.getMessage());
|
||||
throw new AnalysisException("Could not extract GrokAssembly.exe", ioe);
|
||||
LOGGER.warn("Could not extract GrokAssembly.exe: {}", ioe.getMessage());
|
||||
throw new InitializationException("Could not extract GrokAssembly.exe", ioe);
|
||||
} finally {
|
||||
if (fos != null) {
|
||||
try {
|
||||
fos.close();
|
||||
} catch (Throwable e) {
|
||||
LOGGER.fine("Error closing output stream");
|
||||
LOGGER.debug("Error closing output stream");
|
||||
}
|
||||
}
|
||||
if (is != null) {
|
||||
try {
|
||||
is.close();
|
||||
} catch (Throwable e) {
|
||||
LOGGER.fine("Error closing input stream");
|
||||
LOGGER.debug("Error closing input stream");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now, need to see if GrokAssembly actually runs from this location.
|
||||
final List<String> args = buildArgumentList();
|
||||
BufferedReader rdr = null;
|
||||
//TODO this creates an "unreported" error - if someone doesn't look
|
||||
// at the command output this could easily be missed (especially in an
|
||||
// Ant or Maven build.
|
||||
//
|
||||
// We need to create a non-fatal warning error type that will
|
||||
// get added to the report.
|
||||
//TOOD this idea needs to get replicated to the bundle audit analyzer.
|
||||
if (args == null) {
|
||||
setEnabled(false);
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
LOGGER.error(".NET Assembly Analyzer could not be initialized and at least one "
|
||||
+ "'exe' or 'dll' was scanned. The 'mono' executable could not be found on "
|
||||
+ "the path; either disable the Assembly Analyzer or configure the path mono. "
|
||||
+ "On some systems mono-runtime and mono-devel need to be installed.");
|
||||
LOGGER.error("----------------------------------------------------");
|
||||
return;
|
||||
}
|
||||
try {
|
||||
final ProcessBuilder pb = new ProcessBuilder(args);
|
||||
final Process p = pb.start();
|
||||
// Try evacuating the error stream
|
||||
rdr = new BufferedReader(new InputStreamReader(p.getErrorStream(), "UTF-8"));
|
||||
// CHECKSTYLE:OFF
|
||||
while (rdr.ready() && rdr.readLine() != null) {
|
||||
// We expect this to complain
|
||||
}
|
||||
// CHECKSTYLE:ON
|
||||
final Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(p.getInputStream());
|
||||
IOUtils.copy(p.getErrorStream(), NullOutputStream.NULL_OUTPUT_STREAM);
|
||||
|
||||
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
|
||||
final Document doc = builder.parse(p.getInputStream());
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final String error = xpath.evaluate("/assembly/error", doc);
|
||||
if (p.waitFor() != 1 || error == null || "".equals(error)) {
|
||||
LOGGER.warning("An error occurred with the .NET AssemblyAnalyzer, please see the log for more details.");
|
||||
LOGGER.fine("GrokAssembly.exe is not working properly");
|
||||
if (p.waitFor() != 1 || error == null || error.isEmpty()) {
|
||||
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer, please see the log for more details.");
|
||||
LOGGER.debug("GrokAssembly.exe is not working properly");
|
||||
grokAssemblyExe = null;
|
||||
this.setEnabled(false);
|
||||
throw new AnalysisException("Could not execute .NET AssemblyAnalyzer");
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Could not execute .NET AssemblyAnalyzer");
|
||||
}
|
||||
} catch (InitializationException e) {
|
||||
setEnabled(false);
|
||||
throw e;
|
||||
} catch (Throwable e) {
|
||||
if (e instanceof AnalysisException) {
|
||||
throw (AnalysisException) e;
|
||||
} else {
|
||||
LOGGER.warning("analyzer.AssemblyAnalyzer.grokassembly.initialization.failed");
|
||||
LOGGER.log(Level.FINE, "analyzer.AssemblyAnalyzer.grokassembly.initialization.message", e.getMessage());
|
||||
this.setEnabled(false);
|
||||
throw new AnalysisException("An error occured with the .NET AssemblyAnalyzer", e);
|
||||
}
|
||||
} finally {
|
||||
if (rdr != null) {
|
||||
try {
|
||||
rdr.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINEST, "ignore", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
super.close();
|
||||
try {
|
||||
if (grokAssemblyExe != null && !grokAssemblyExe.delete()) {
|
||||
grokAssemblyExe.deleteOnExit();
|
||||
}
|
||||
} catch (SecurityException se) {
|
||||
LOGGER.fine("analyzer.AssemblyAnalyzer.grokassembly.notdeleted");
|
||||
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer;\n"
|
||||
+ "this can be ignored unless you are scanning .NET DLLs. Please see the log for more details.");
|
||||
LOGGER.debug("Could not execute GrokAssembly {}", e.getMessage());
|
||||
setEnabled(false);
|
||||
throw new InitializationException("An error occurred with the .NET AssemblyAnalyzer", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the set of extensions supported by this analyzer.
|
||||
* Removes resources used from the local file system.
|
||||
*
|
||||
* @return the list of supported extensions
|
||||
* @throws Exception thrown if there is a problem closing the analyzer
|
||||
*/
|
||||
@Override
|
||||
public Set<String> getSupportedExtensions() {
|
||||
return SUPPORTED_EXTENSIONS;
|
||||
public void closeAnalyzer() throws Exception {
|
||||
try {
|
||||
if (grokAssemblyExe != null && !grokAssemblyExe.delete()) {
|
||||
LOGGER.debug("Unable to delete temporary GrokAssembly.exe; attempting delete on exit");
|
||||
grokAssemblyExe.deleteOnExit();
|
||||
}
|
||||
} catch (SecurityException se) {
|
||||
LOGGER.debug("Can't delete temporary GrokAssembly.exe");
|
||||
grokAssemblyExe.deleteOnExit();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The File Filter used to filter supported extensions.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(
|
||||
SUPPORTED_EXTENSIONS).build();
|
||||
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -314,7 +332,8 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's enabled property.
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@@ -322,4 +341,29 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests to see if a file is in the system path. <b>Note</b> - the current
|
||||
* implementation only works on non-windows platforms. For purposes of the
|
||||
* AssemblyAnalyzer this is okay as this is only needed on Mac/*nix.
|
||||
*
|
||||
* @param file the executable to look for
|
||||
* @return <code>true</code> if the file exists; otherwise
|
||||
* <code>false</code>
|
||||
*/
|
||||
private boolean isInPath(String file) {
|
||||
final ProcessBuilder pb = new ProcessBuilder("which", file);
|
||||
try {
|
||||
final Process proc = pb.start();
|
||||
final int retCode = proc.waitFor();
|
||||
if (retCode == 0) {
|
||||
return true;
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Path seach failed for " + file);
|
||||
} catch (InterruptedException ex) {
|
||||
LOGGER.debug("Path seach failed for " + file);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,278 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.UrlStringUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* Used to analyze Autoconf input files named configure.ac or configure.in.
|
||||
* Files simply named "configure" are also analyzed, assuming they are generated
|
||||
* by Autoconf, and contain certain special package descriptor variables.
|
||||
*
|
||||
* @author Dale Visser
|
||||
* @see <a href="https://www.gnu.org/software/autoconf/">Autoconf - GNU Project
|
||||
* - Free Software Foundation (FSF)</a>
|
||||
*/
|
||||
@Experimental
|
||||
public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* Autoconf output filename.
|
||||
*/
|
||||
private static final String CONFIGURE = "configure";
|
||||
|
||||
/**
|
||||
* Autoconf input filename.
|
||||
*/
|
||||
private static final String CONFIGURE_IN = "configure.in";
|
||||
|
||||
/**
|
||||
* Autoconf input filename.
|
||||
*/
|
||||
private static final String CONFIGURE_AC = "configure.ac";
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Autoconf Analyzer";
|
||||
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* The set of file extensions supported by this analyzer.
|
||||
*/
|
||||
private static final String[] EXTENSIONS = {"ac", "in"};
|
||||
|
||||
/**
|
||||
* Matches AC_INIT variables in the output configure script.
|
||||
*/
|
||||
private static final Pattern PACKAGE_VAR = Pattern.compile(
|
||||
"PACKAGE_(.+?)='(.*?)'", Pattern.DOTALL | Pattern.CASE_INSENSITIVE);
|
||||
|
||||
/**
|
||||
* Matches AC_INIT statement in configure.ac file.
|
||||
*/
|
||||
private static final Pattern AC_INIT_PATTERN;
|
||||
|
||||
static {
|
||||
// each instance of param or sep_param has a capture group
|
||||
final String param = "\\[{0,2}(.+?)\\]{0,2}";
|
||||
final String sepParam = "\\s*,\\s*" + param;
|
||||
// Group 1: Package
|
||||
// Group 2: Version
|
||||
// Group 3: optional
|
||||
// Group 4: Bug report address (if it exists)
|
||||
// Group 5: optional
|
||||
// Group 6: Tarname (if it exists)
|
||||
// Group 7: optional
|
||||
// Group 8: URL (if it exists)
|
||||
AC_INIT_PATTERN = Pattern.compile(String.format(
|
||||
"AC_INIT\\(%s%s(%s)?(%s)?(%s)?\\s*\\)", param, sepParam,
|
||||
sepParam, sepParam, sepParam), Pattern.DOTALL
|
||||
| Pattern.CASE_INSENSITIVE);
|
||||
}
|
||||
|
||||
/**
|
||||
* The file filter used to determine which files this analyzer supports.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addFilenames(CONFIGURE).addExtensions(
|
||||
EXTENSIONS).build();
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
final File actualFile = dependency.getActualFile();
|
||||
final String name = actualFile.getName();
|
||||
if (name.startsWith(CONFIGURE)) {
|
||||
final File parent = actualFile.getParentFile();
|
||||
final String parentName = parent.getName();
|
||||
dependency.setDisplayFileName(parentName + "/" + name);
|
||||
final boolean isOutputScript = CONFIGURE.equals(name);
|
||||
if (isOutputScript || CONFIGURE_AC.equals(name)
|
||||
|| CONFIGURE_IN.equals(name)) {
|
||||
final String contents = getFileContents(actualFile);
|
||||
if (!contents.isEmpty()) {
|
||||
if (isOutputScript) {
|
||||
extractConfigureScriptEvidence(dependency, name,
|
||||
contents);
|
||||
} else {
|
||||
gatherEvidence(dependency, name, contents);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts evidence from the configuration.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param name the name of the source of evidence
|
||||
* @param contents the contents to analyze for evidence
|
||||
*/
|
||||
private void extractConfigureScriptEvidence(Dependency dependency,
|
||||
final String name, final String contents) {
|
||||
final Matcher matcher = PACKAGE_VAR.matcher(contents);
|
||||
while (matcher.find()) {
|
||||
final String variable = matcher.group(1);
|
||||
final String value = matcher.group(2);
|
||||
if (!value.isEmpty()) {
|
||||
if (variable.endsWith("NAME")) {
|
||||
dependency.getProductEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGHEST);
|
||||
} else if ("VERSION".equals(variable)) {
|
||||
dependency.getVersionEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGHEST);
|
||||
} else if ("BUGREPORT".equals(variable)) {
|
||||
dependency.getVendorEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGH);
|
||||
} else if ("URL".equals(variable)) {
|
||||
dependency.getVendorEvidence().addEvidence(name, variable,
|
||||
value, Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the contents of a given file.
|
||||
*
|
||||
* @param actualFile the file to read
|
||||
* @return the contents of the file
|
||||
* @throws AnalysisException thrown if there is an IO Exception
|
||||
*/
|
||||
private String getFileContents(final File actualFile)
|
||||
throws AnalysisException {
|
||||
try {
|
||||
return FileUtils.readFileToString(actualFile, Charset.defaultCharset()).trim();
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gathers evidence from a given file
|
||||
*
|
||||
* @param dependency the dependency to add evidence to
|
||||
* @param name the source of the evidence
|
||||
* @param contents the evidence to analyze
|
||||
*/
|
||||
private void gatherEvidence(Dependency dependency, final String name,
|
||||
String contents) {
|
||||
final Matcher matcher = AC_INIT_PATTERN.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
final EvidenceCollection productEvidence = dependency
|
||||
.getProductEvidence();
|
||||
productEvidence.addEvidence(name, "Package", matcher.group(1),
|
||||
Confidence.HIGHEST);
|
||||
dependency.getVersionEvidence().addEvidence(name,
|
||||
"Package Version", matcher.group(2), Confidence.HIGHEST);
|
||||
final EvidenceCollection vendorEvidence = dependency
|
||||
.getVendorEvidence();
|
||||
if (null != matcher.group(3)) {
|
||||
vendorEvidence.addEvidence(name, "Bug report address",
|
||||
matcher.group(4), Confidence.HIGH);
|
||||
}
|
||||
if (null != matcher.group(5)) {
|
||||
productEvidence.addEvidence(name, "Tarname", matcher.group(6),
|
||||
Confidence.HIGH);
|
||||
}
|
||||
if (null != matcher.group(7)) {
|
||||
final String url = matcher.group(8);
|
||||
if (UrlStringUtils.isUrl(url)) {
|
||||
vendorEvidence.addEvidence(name, "URL", url,
|
||||
Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the file type analyzer.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception during
|
||||
* initialization
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
// No initialization needed.
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,255 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.Checksum;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Used to analyze CMake build files, and collect information that can be used
|
||||
* to determine the associated CPE.</p>
|
||||
* <p>
|
||||
* Note: This analyzer catches straightforward invocations of the project
|
||||
* command, plus some other observed patterns of version inclusion in real CMake
|
||||
* projects. Many projects make use of older versions of CMake and/or use custom
|
||||
* "homebrew" ways to insert version information. Hopefully as the newer CMake
|
||||
* call pattern grows in usage, this analyzer allow more CPEs to be
|
||||
* identified.</p>
|
||||
*
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@Experimental
|
||||
public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CMakeAnalyzer.class);
|
||||
|
||||
/**
|
||||
* Used when compiling file scanning regex patterns.
|
||||
*/
|
||||
private static final int REGEX_OPTIONS = Pattern.DOTALL
|
||||
| Pattern.CASE_INSENSITIVE | Pattern.MULTILINE;
|
||||
|
||||
/**
|
||||
* Regex to extract the product information.
|
||||
*/
|
||||
private static final Pattern PROJECT = Pattern.compile(
|
||||
"^ *project *\\([ \\n]*(\\w+)[ \\n]*.*?\\)", REGEX_OPTIONS);
|
||||
|
||||
/**
|
||||
* Regex to extract product and version information.
|
||||
*
|
||||
* Group 1: Product
|
||||
*
|
||||
* Group 2: Version
|
||||
*/
|
||||
private static final Pattern SET_VERSION = Pattern
|
||||
.compile(
|
||||
"^ *set\\s*\\(\\s*(\\w+)_version\\s+\"?(\\d+(?:\\.\\d+)+)[\\s\"]?\\)",
|
||||
REGEX_OPTIONS);
|
||||
|
||||
/**
|
||||
* Detects files that can be analyzed.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(".cmake")
|
||||
.addFilenames("CMakeLists.txt").build();
|
||||
|
||||
/**
|
||||
* Returns the name of the CMake analyzer.
|
||||
*
|
||||
* @return the name of the analyzer
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return "CMake Analyzer";
|
||||
}
|
||||
|
||||
/**
|
||||
* Tell that we are used for information collection.
|
||||
*
|
||||
* @return INFORMATION_COLLECTION
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return AnalysisPhase.INFORMATION_COLLECTION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the set of supported file extensions.
|
||||
*
|
||||
* @return the set of supported file extensions
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @throws InitializationException thrown if an exception occurs getting an
|
||||
* instance of SHA1
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
getSha1MessageDigest();
|
||||
} catch (IllegalStateException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes python packages and adds evidence to the dependency.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param engine the engine being used to perform the scan
|
||||
* @throws AnalysisException thrown if there is an unrecoverable error
|
||||
* analyzing the dependency
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
final File file = dependency.getActualFile();
|
||||
final String parentName = file.getParentFile().getName();
|
||||
final String name = file.getName();
|
||||
dependency.setDisplayFileName(String.format("%s%c%s", parentName, File.separatorChar, name));
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(file, Charset.defaultCharset()).trim();
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
|
||||
if (StringUtils.isNotBlank(contents)) {
|
||||
final Matcher m = PROJECT.matcher(contents);
|
||||
int count = 0;
|
||||
while (m.find()) {
|
||||
count++;
|
||||
LOGGER.debug(String.format(
|
||||
"Found project command match with %d groups: %s",
|
||||
m.groupCount(), m.group(0)));
|
||||
final String group = m.group(1);
|
||||
LOGGER.debug("Group 1: " + group);
|
||||
dependency.getProductEvidence().addEvidence(name, "Project",
|
||||
group, Confidence.HIGH);
|
||||
}
|
||||
LOGGER.debug("Found {} matches.", count);
|
||||
analyzeSetVersionCommand(dependency, engine, contents);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the version information from the contents. If more then one
|
||||
* version is found additional dependencies are added to the dependency
|
||||
* list.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param engine the dependency-check engine
|
||||
* @param contents the version information
|
||||
*/
|
||||
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings(
|
||||
value = "DM_DEFAULT_ENCODING",
|
||||
justification = "Default encoding is only used if UTF-8 is not available")
|
||||
private void analyzeSetVersionCommand(Dependency dependency, Engine engine, String contents) {
|
||||
Dependency currentDep = dependency;
|
||||
|
||||
final Matcher m = SET_VERSION.matcher(contents);
|
||||
int count = 0;
|
||||
while (m.find()) {
|
||||
count++;
|
||||
LOGGER.debug("Found project command match with {} groups: {}",
|
||||
m.groupCount(), m.group(0));
|
||||
String product = m.group(1);
|
||||
final String version = m.group(2);
|
||||
LOGGER.debug("Group 1: " + product);
|
||||
LOGGER.debug("Group 2: " + version);
|
||||
final String aliasPrefix = "ALIASOF_";
|
||||
if (product.startsWith(aliasPrefix)) {
|
||||
product = product.replaceFirst(aliasPrefix, "");
|
||||
}
|
||||
if (count > 1) {
|
||||
//TODO - refactor so we do not assign to the parameter (checkstyle)
|
||||
currentDep = new Dependency(dependency.getActualFile());
|
||||
currentDep.setDisplayFileName(String.format("%s:%s", dependency.getDisplayFileName(), product));
|
||||
final String filePath = String.format("%s:%s", dependency.getFilePath(), product);
|
||||
currentDep.setFilePath(filePath);
|
||||
|
||||
byte[] path;
|
||||
try {
|
||||
path = filePath.getBytes("UTF-8");
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
path = filePath.getBytes();
|
||||
}
|
||||
final MessageDigest sha1 = getSha1MessageDigest();
|
||||
currentDep.setSha1sum(Checksum.getHex(sha1.digest(path)));
|
||||
engine.getDependencies().add(currentDep);
|
||||
}
|
||||
final String source = currentDep.getDisplayFileName();
|
||||
currentDep.getProductEvidence().addEvidence(source, "Product",
|
||||
product, Confidence.MEDIUM);
|
||||
currentDep.getVersionEvidence().addEvidence(source, "Version",
|
||||
version, Confidence.MEDIUM);
|
||||
}
|
||||
LOGGER.debug(String.format("Found %d matches.", count));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_CMAKE_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sha1 message digest.
|
||||
*
|
||||
* @return the sha1 message digest
|
||||
*/
|
||||
private MessageDigest getSha1MessageDigest() {
|
||||
try {
|
||||
return MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
LOGGER.error(e.getMessage());
|
||||
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -25,8 +25,8 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.apache.commons.lang3.builder.CompareToBuilder;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.queryparser.classic.ParseException;
|
||||
@@ -47,21 +47,26 @@ import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* CPEAnalyzer is a utility class that takes a project dependency and attempts to discern if there is an associated CPE. It uses
|
||||
* the evidence contained within the dependency to search the Lucene index.
|
||||
* CPEAnalyzer is a utility class that takes a project dependency and attempts
|
||||
* to discern if there is an associated CPE. It uses the evidence contained
|
||||
* within the dependency to search the Lucene index.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class CPEAnalyzer implements Analyzer {
|
||||
public class CPEAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(CPEAnalyzer.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CPEAnalyzer.class);
|
||||
/**
|
||||
* The maximum number of query results to return.
|
||||
*/
|
||||
@@ -71,15 +76,18 @@ public class CPEAnalyzer implements Analyzer {
|
||||
*/
|
||||
static final String WEIGHTING_BOOST = "^5";
|
||||
/**
|
||||
* A string representation of a regular expression defining characters utilized within the CPE Names.
|
||||
* A string representation of a regular expression defining characters
|
||||
* utilized within the CPE Names.
|
||||
*/
|
||||
static final String CLEANSE_CHARACTER_RX = "[^A-Za-z0-9 ._-]";
|
||||
/**
|
||||
* A string representation of a regular expression used to remove all but alpha characters.
|
||||
* A string representation of a regular expression used to remove all but
|
||||
* alpha characters.
|
||||
*/
|
||||
static final String CLEANSE_NONALPHA_RX = "[^A-Za-z]*";
|
||||
/**
|
||||
* The additional size to add to a new StringBuilder to account for extra data that will be written into the string.
|
||||
* The additional size to add to a new StringBuilder to account for extra
|
||||
* data that will be written into the string.
|
||||
*/
|
||||
static final int STRING_BUILDER_BUFFER = 20;
|
||||
/**
|
||||
@@ -115,35 +123,55 @@ public class CPEAnalyzer implements Analyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return AnalysisPhase.IDENTIFIER_ANALYSIS;
|
||||
}
|
||||
|
||||
/**
|
||||
* The default is to support parallel processing.
|
||||
* @return false
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Creates the CPE Lucene Index.
|
||||
*
|
||||
* @throws Exception is thrown if there is an issue opening the index.
|
||||
* @throws InitializationException is thrown if there is an issue opening
|
||||
* the index.
|
||||
*/
|
||||
@Override
|
||||
public void initialize() throws Exception {
|
||||
this.open();
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
this.open();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Exception initializing the Lucene Index", ex);
|
||||
throw new InitializationException("An exception occurred initializing the Lucene Index", ex);
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.debug("Exception accessing the database", ex);
|
||||
throw new InitializationException("An exception occurred accessing the database", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the data source.
|
||||
*
|
||||
* @throws IOException when the Lucene directory to be queried does not exist or is corrupt.
|
||||
* @throws DatabaseException when the database throws an exception. This usually occurs when the database is in use by another
|
||||
* process.
|
||||
* @throws IOException when the Lucene directory to be queried does not
|
||||
* exist or is corrupt.
|
||||
* @throws DatabaseException when the database throws an exception. This
|
||||
* usually occurs when the database is in use by another process.
|
||||
*/
|
||||
public void open() throws IOException, DatabaseException {
|
||||
LOGGER.log(Level.FINE, "Opening the CVE Database");
|
||||
cve = new CveDB();
|
||||
cve.open();
|
||||
LOGGER.log(Level.FINE, "Creating the Lucene CPE Index");
|
||||
cpe = CpeMemoryIndex.getInstance();
|
||||
try {
|
||||
cpe.open(cve);
|
||||
} catch (IndexException ex) {
|
||||
LOGGER.log(Level.FINE, "IndexException", ex);
|
||||
throw new DatabaseException(ex);
|
||||
if (!isOpen()) {
|
||||
cve = new CveDB();
|
||||
cve.open();
|
||||
cpe = CpeMemoryIndex.getInstance();
|
||||
try {
|
||||
final long creationStart = System.currentTimeMillis();
|
||||
cpe.open(cve);
|
||||
final long creationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - creationStart);
|
||||
LOGGER.info("Created CPE Index ({} seconds)", creationSeconds);
|
||||
} catch (IndexException ex) {
|
||||
LOGGER.debug("IndexException", ex);
|
||||
throw new DatabaseException(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,12 +179,14 @@ public class CPEAnalyzer implements Analyzer {
|
||||
* Closes the data sources.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
public void closeAnalyzer() {
|
||||
if (cpe != null) {
|
||||
cpe.close();
|
||||
cpe = null;
|
||||
}
|
||||
if (cve != null) {
|
||||
cve.close();
|
||||
cve = null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -165,8 +195,9 @@ public class CPEAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the data store of CPE entries, trying to identify the CPE for the given dependency based on the evidence contained
|
||||
* within. The dependency passed in is updated with any identified CPE values.
|
||||
* Searches the data store of CPE entries, trying to identify the CPE for
|
||||
* the given dependency based on the evidence contained within. The
|
||||
* dependency passed in is updated with any identified CPE values.
|
||||
*
|
||||
* @param dependency the dependency to search for CPE entries on.
|
||||
* @throws CorruptIndexException is thrown when the Lucene index is corrupt.
|
||||
@@ -180,25 +211,25 @@ public class CPEAnalyzer implements Analyzer {
|
||||
for (Confidence confidence : Confidence.values()) {
|
||||
if (dependency.getVendorEvidence().contains(confidence)) {
|
||||
vendors = addEvidenceWithoutDuplicateTerms(vendors, dependency.getVendorEvidence(), confidence);
|
||||
LOGGER.fine(String.format("vendor search: %s", vendors));
|
||||
LOGGER.debug("vendor search: {}", vendors);
|
||||
}
|
||||
if (dependency.getProductEvidence().contains(confidence)) {
|
||||
products = addEvidenceWithoutDuplicateTerms(products, dependency.getProductEvidence(), confidence);
|
||||
LOGGER.fine(String.format("product search: %s", products));
|
||||
LOGGER.debug("product search: {}", products);
|
||||
}
|
||||
if (!vendors.isEmpty() && !products.isEmpty()) {
|
||||
final List<IndexEntry> entries = searchCPE(vendors, products, dependency.getProductEvidence().getWeighting(),
|
||||
dependency.getVendorEvidence().getWeighting());
|
||||
final List<IndexEntry> entries = searchCPE(vendors, products, dependency.getVendorEvidence().getWeighting(),
|
||||
dependency.getProductEvidence().getWeighting());
|
||||
if (entries == null) {
|
||||
continue;
|
||||
}
|
||||
boolean identifierAdded = false;
|
||||
for (IndexEntry e : entries) {
|
||||
LOGGER.fine(String.format("Verifying entry: %s", e.toString()));
|
||||
LOGGER.debug("Verifying entry: {}", e);
|
||||
if (verifyEntry(e, dependency)) {
|
||||
final String vendor = e.getVendor();
|
||||
final String product = e.getProduct();
|
||||
LOGGER.fine(String.format("identified vendor/product: %s/%s", vendor, product));
|
||||
LOGGER.debug("identified vendor/product: {}/{}", vendor, product);
|
||||
identifierAdded |= determineIdentifiers(dependency, vendor, product, confidence);
|
||||
}
|
||||
}
|
||||
@@ -210,9 +241,10 @@ public class CPEAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the text created by concatenating the text and the values from the EvidenceCollection (filtered for a specific
|
||||
* confidence). This attempts to prevent duplicate terms from being added.<br/<br/> Note, if the evidence is longer then 200
|
||||
* characters it will be truncated.
|
||||
* Returns the text created by concatenating the text and the values from
|
||||
* the EvidenceCollection (filtered for a specific confidence). This
|
||||
* attempts to prevent duplicate terms from being added.<br/<br/> Note, if
|
||||
* the evidence is longer then 200 characters it will be truncated.
|
||||
*
|
||||
* @param text the base text.
|
||||
* @param ec an EvidenceCollection
|
||||
@@ -243,17 +275,19 @@ public class CPEAnalyzer implements Analyzer {
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Searches the Lucene CPE index to identify possible CPE entries associated with the supplied vendor, product, and
|
||||
* version.</p>
|
||||
* Searches the Lucene CPE index to identify possible CPE entries associated
|
||||
* with the supplied vendor, product, and version.</p>
|
||||
*
|
||||
* <p>
|
||||
* If either the vendorWeightings or productWeightings lists have been populated this data is used to add weighting factors to
|
||||
* the search.</p>
|
||||
* If either the vendorWeightings or productWeightings lists have been
|
||||
* populated this data is used to add weighting factors to the search.</p>
|
||||
*
|
||||
* @param vendor the text used to search the vendor field
|
||||
* @param product the text used to search the product field
|
||||
* @param vendorWeightings a list of strings to use to add weighting factors to the vendor field
|
||||
* @param productWeightings Adds a list of strings that will be used to add weighting factors to the product search
|
||||
* @param vendorWeightings a list of strings to use to add weighting factors
|
||||
* to the vendor field
|
||||
* @param productWeightings Adds a list of strings that will be used to add
|
||||
* weighting factors to the product search
|
||||
* @return a list of possible CPE values
|
||||
*/
|
||||
protected List<IndexEntry> searchCPE(String vendor, String product,
|
||||
@@ -281,29 +315,31 @@ public class CPEAnalyzer implements Analyzer {
|
||||
}
|
||||
return ret;
|
||||
} catch (ParseException ex) {
|
||||
final String msg = String.format("Unable to parse: %s", searchString);
|
||||
LOGGER.log(Level.WARNING, "An error occured querying the CPE data. See the log for more details.");
|
||||
LOGGER.log(Level.INFO, msg, ex);
|
||||
LOGGER.warn("An error occurred querying the CPE data. See the log for more details.");
|
||||
LOGGER.info("Unable to parse: {}", searchString, ex);
|
||||
} catch (IOException ex) {
|
||||
final String msg = String.format("IO Error with search string: %s", searchString);
|
||||
LOGGER.log(Level.WARNING, "An error occured reading CPE data. See the log for more details.");
|
||||
LOGGER.log(Level.INFO, msg, ex);
|
||||
LOGGER.warn("An error occurred reading CPE data. See the log for more details.");
|
||||
LOGGER.info("IO Error with search string: {}", searchString, ex);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Builds a Lucene search string by properly escaping data and constructing a valid search query.</p>
|
||||
* Builds a Lucene search string by properly escaping data and constructing
|
||||
* a valid search query.</p>
|
||||
*
|
||||
* <p>
|
||||
* If either the possibleVendor or possibleProducts lists have been populated this data is used to add weighting factors to
|
||||
* the search string generated.</p>
|
||||
* If either the possibleVendor or possibleProducts lists have been
|
||||
* populated this data is used to add weighting factors to the search string
|
||||
* generated.</p>
|
||||
*
|
||||
* @param vendor text to search the vendor field
|
||||
* @param product text to search the product field
|
||||
* @param vendorWeighting a list of strings to apply to the vendor to boost the terms weight
|
||||
* @param productWeightings a list of strings to apply to the product to boost the terms weight
|
||||
* @param vendorWeighting a list of strings to apply to the vendor to boost
|
||||
* the terms weight
|
||||
* @param productWeightings a list of strings to apply to the product to
|
||||
* boost the terms weight
|
||||
* @return the Lucene query
|
||||
*/
|
||||
protected String buildSearch(String vendor, String product,
|
||||
@@ -324,21 +360,25 @@ public class CPEAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* This method constructs a Lucene query for a given field. The searchText is split into separate words and if the word is
|
||||
* within the list of weighted words then an additional weighting is applied to the term as it is appended into the query.
|
||||
* This method constructs a Lucene query for a given field. The searchText
|
||||
* is split into separate words and if the word is within the list of
|
||||
* weighted words then an additional weighting is applied to the term as it
|
||||
* is appended into the query.
|
||||
*
|
||||
* @param sb a StringBuilder that the query text will be appended to.
|
||||
* @param field the field within the Lucene index that the query is searching.
|
||||
* @param field the field within the Lucene index that the query is
|
||||
* searching.
|
||||
* @param searchText text used to construct the query.
|
||||
* @param weightedText a list of terms that will be considered higher importance when searching.
|
||||
* @param weightedText a list of terms that will be considered higher
|
||||
* importance when searching.
|
||||
* @return if the append was successful.
|
||||
*/
|
||||
private boolean appendWeightedSearch(StringBuilder sb, String field, String searchText, Set<String> weightedText) {
|
||||
sb.append(" ").append(field).append(":( ");
|
||||
sb.append(' ').append(field).append(":( ");
|
||||
|
||||
final String cleanText = cleanseText(searchText);
|
||||
|
||||
if ("".equals(cleanText)) {
|
||||
if (cleanText.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -348,20 +388,27 @@ public class CPEAnalyzer implements Analyzer {
|
||||
final StringTokenizer tokens = new StringTokenizer(cleanText);
|
||||
while (tokens.hasMoreElements()) {
|
||||
final String word = tokens.nextToken();
|
||||
String temp = null;
|
||||
StringBuilder temp = null;
|
||||
for (String weighted : weightedText) {
|
||||
final String weightedStr = cleanseText(weighted);
|
||||
if (equalsIgnoreCaseAndNonAlpha(word, weightedStr)) {
|
||||
temp = LuceneUtils.escapeLuceneQuery(word) + WEIGHTING_BOOST;
|
||||
temp = new StringBuilder(word.length() + 2);
|
||||
LuceneUtils.appendEscapedLuceneQuery(temp, word);
|
||||
temp.append(WEIGHTING_BOOST);
|
||||
if (!word.equalsIgnoreCase(weightedStr)) {
|
||||
temp += " " + LuceneUtils.escapeLuceneQuery(weightedStr) + WEIGHTING_BOOST;
|
||||
temp.append(' ');
|
||||
LuceneUtils.appendEscapedLuceneQuery(temp, weightedStr);
|
||||
temp.append(WEIGHTING_BOOST);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
sb.append(' ');
|
||||
if (temp == null) {
|
||||
temp = LuceneUtils.escapeLuceneQuery(word);
|
||||
LuceneUtils.appendEscapedLuceneQuery(sb, word);
|
||||
} else {
|
||||
sb.append(temp);
|
||||
}
|
||||
sb.append(" ").append(temp);
|
||||
}
|
||||
}
|
||||
sb.append(" ) ");
|
||||
@@ -369,7 +416,8 @@ public class CPEAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes characters from the input text that are not used within the CPE index.
|
||||
* Removes characters from the input text that are not used within the CPE
|
||||
* index.
|
||||
*
|
||||
* @param text is the text to remove the characters from.
|
||||
* @return the text having removed some characters.
|
||||
@@ -379,7 +427,8 @@ public class CPEAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two strings after lower casing them and removing the non-alpha characters.
|
||||
* Compares two strings after lower casing them and removing the non-alpha
|
||||
* characters.
|
||||
*
|
||||
* @param l string one to compare.
|
||||
* @param r string two to compare.
|
||||
@@ -396,8 +445,9 @@ public class CPEAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the CPE Identified matches the dependency. This validates that the product, vendor, and version information
|
||||
* for the CPE are contained within the dependencies evidence.
|
||||
* Ensures that the CPE Identified matches the dependency. This validates
|
||||
* that the product, vendor, and version information for the CPE are
|
||||
* contained within the dependencies evidence.
|
||||
*
|
||||
* @param entry a CPE entry.
|
||||
* @param dependency the dependency that the CPE entries could be for.
|
||||
@@ -406,6 +456,8 @@ public class CPEAnalyzer implements Analyzer {
|
||||
private boolean verifyEntry(final IndexEntry entry, final Dependency dependency) {
|
||||
boolean isValid = false;
|
||||
|
||||
//TODO - does this nullify some of the fuzzy matching that happens in the lucene search?
|
||||
// for instance CPE some-component and in the evidence we have SomeComponent.
|
||||
if (collectionContainsString(dependency.getProductEvidence(), entry.getProduct())
|
||||
&& collectionContainsString(dependency.getVendorEvidence(), entry.getVendor())) {
|
||||
//&& collectionContainsVersion(dependency.getVersionEvidence(), entry.getVersion())
|
||||
@@ -462,14 +514,16 @@ public class CPEAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a dependency and attempts to determine if there are any CPE identifiers for this dependency.
|
||||
* Analyzes a dependency and attempts to determine if there are any CPE
|
||||
* identifiers for this dependency.
|
||||
*
|
||||
* @param dependency The Dependency to analyze.
|
||||
* @param engine The analysis engine
|
||||
* @throws AnalysisException is thrown if there is an issue analyzing the dependency.
|
||||
* @throws AnalysisException is thrown if there is an issue analyzing the
|
||||
* dependency.
|
||||
*/
|
||||
@Override
|
||||
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected synchronized void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
try {
|
||||
determineCPE(dependency);
|
||||
} catch (CorruptIndexException ex) {
|
||||
@@ -482,15 +536,19 @@ public class CPEAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a list of CPE values from the CveDB based on the vendor and product passed in. The list is then validated to find
|
||||
* only CPEs that are valid for the given dependency. It is possible that the CPE identified is a best effort "guess" based on
|
||||
* the vendor, product, and version information.
|
||||
* Retrieves a list of CPE values from the CveDB based on the vendor and
|
||||
* product passed in. The list is then validated to find only CPEs that are
|
||||
* valid for the given dependency. It is possible that the CPE identified is
|
||||
* a best effort "guess" based on the vendor, product, and version
|
||||
* information.
|
||||
*
|
||||
* @param dependency the Dependency being analyzed
|
||||
* @param vendor the vendor for the CPE being analyzed
|
||||
* @param product the product for the CPE being analyzed
|
||||
* @param currentConfidence the current confidence being used during analysis
|
||||
* @return <code>true</code> if an identifier was added to the dependency; otherwise <code>false</code>
|
||||
* @param currentConfidence the current confidence being used during
|
||||
* analysis
|
||||
* @return <code>true</code> if an identifier was added to the dependency;
|
||||
* otherwise <code>false</code>
|
||||
* @throws UnsupportedEncodingException is thrown if UTF-8 is not supported
|
||||
*/
|
||||
protected boolean determineIdentifiers(Dependency dependency, String vendor, String product,
|
||||
@@ -500,10 +558,11 @@ public class CPEAnalyzer implements Analyzer {
|
||||
Confidence bestGuessConf = null;
|
||||
boolean hasBroadMatch = false;
|
||||
final List<IdentifierMatch> collected = new ArrayList<IdentifierMatch>();
|
||||
|
||||
//TODO the following algorithm incorrectly identifies things as a lower version
|
||||
// if there lower confidence evidence when the current (highest) version number
|
||||
// is newer then anything in the NVD.
|
||||
for (Confidence conf : Confidence.values()) {
|
||||
// if (conf.compareTo(currentConfidence) > 0) {
|
||||
// break;
|
||||
// }
|
||||
for (Evidence evidence : dependency.getVersionEvidence().iterator(conf)) {
|
||||
final DependencyVersion evVer = DependencyVersionUtil.parseVersion(evidence.getValue());
|
||||
if (evVer == null) {
|
||||
@@ -511,8 +570,8 @@ public class CPEAnalyzer implements Analyzer {
|
||||
}
|
||||
for (VulnerableSoftware vs : cpes) {
|
||||
DependencyVersion dbVer;
|
||||
if (vs.getRevision() != null && !vs.getRevision().isEmpty()) {
|
||||
dbVer = DependencyVersionUtil.parseVersion(vs.getVersion() + "." + vs.getRevision());
|
||||
if (vs.getUpdate() != null && !vs.getUpdate().isEmpty()) {
|
||||
dbVer = DependencyVersionUtil.parseVersion(vs.getVersion() + '.' + vs.getUpdate());
|
||||
} else {
|
||||
dbVer = DependencyVersionUtil.parseVersion(vs.getVersion());
|
||||
}
|
||||
@@ -525,15 +584,14 @@ public class CPEAnalyzer implements Analyzer {
|
||||
final String url = String.format(NVD_SEARCH_URL, URLEncoder.encode(vs.getName(), "UTF-8"));
|
||||
final IdentifierMatch match = new IdentifierMatch("cpe", vs.getName(), url, IdentifierConfidence.EXACT_MATCH, conf);
|
||||
collected.add(match);
|
||||
} else {
|
||||
|
||||
//TODO the following isn't quite right is it? need to think about this guessing game a bit more.
|
||||
if (evVer.getVersionParts().size() <= dbVer.getVersionParts().size()
|
||||
&& evVer.matchesAtLeastThreeLevels(dbVer)) {
|
||||
if (bestGuessConf == null || bestGuessConf.compareTo(conf) > 0) {
|
||||
if (bestGuess.getVersionParts().size() < dbVer.getVersionParts().size()) {
|
||||
bestGuess = dbVer;
|
||||
bestGuessConf = conf;
|
||||
}
|
||||
} else if (evVer.getVersionParts().size() <= dbVer.getVersionParts().size()
|
||||
&& evVer.matchesAtLeastThreeLevels(dbVer)) {
|
||||
if (bestGuessConf == null || bestGuessConf.compareTo(conf) > 0) {
|
||||
if (bestGuess.getVersionParts().size() < dbVer.getVersionParts().size()) {
|
||||
bestGuess = dbVer;
|
||||
bestGuessConf = conf;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -578,6 +636,17 @@ public class CPEAnalyzer implements Analyzer {
|
||||
return identifierAdded;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_CPE_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* The confidence whether the identifier is an exact match, or a best guess.
|
||||
*/
|
||||
@@ -592,14 +661,16 @@ public class CPEAnalyzer implements Analyzer {
|
||||
*/
|
||||
BEST_GUESS,
|
||||
/**
|
||||
* The entire vendor/product group must be added (without a guess at version) because there is a CVE with a VS that only
|
||||
* specifies vendor/product.
|
||||
* The entire vendor/product group must be added (without a guess at
|
||||
* version) because there is a CVE with a VS that only specifies
|
||||
* vendor/product.
|
||||
*/
|
||||
BROAD_MATCH
|
||||
}
|
||||
|
||||
/**
|
||||
* A simple object to hold an identifier and carry information about the confidence in the identifier.
|
||||
* A simple object to hold an identifier and carry information about the
|
||||
* confidence in the identifier.
|
||||
*/
|
||||
private static class IdentifierMatch implements Comparable<IdentifierMatch> {
|
||||
|
||||
@@ -609,8 +680,10 @@ public class CPEAnalyzer implements Analyzer {
|
||||
* @param type the type of identifier (such as CPE)
|
||||
* @param value the value of the identifier
|
||||
* @param url the URL of the identifier
|
||||
* @param identifierConfidence the confidence in the identifier: best guess or exact match
|
||||
* @param evidenceConfidence the confidence of the evidence used to find the identifier
|
||||
* @param identifierConfidence the confidence in the identifier: best
|
||||
* guess or exact match
|
||||
* @param evidenceConfidence the confidence of the evidence used to find
|
||||
* the identifier
|
||||
*/
|
||||
IdentifierMatch(String type, String value, String url, IdentifierConfidence identifierConfidence, Confidence evidenceConfidence) {
|
||||
this.identifier = new Identifier(type, value, url);
|
||||
@@ -741,21 +814,19 @@ public class CPEAnalyzer implements Analyzer {
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Standard implementation of compareTo that compares identifier confidence, evidence confidence, and then the identifier.
|
||||
* Standard implementation of compareTo that compares identifier
|
||||
* confidence, evidence confidence, and then the identifier.
|
||||
*
|
||||
* @param o the IdentifierMatch to compare to
|
||||
* @return the natural ordering of IdentifierMatch
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(IdentifierMatch o) {
|
||||
int conf = this.confidence.compareTo(o.confidence);
|
||||
if (conf == 0) {
|
||||
conf = this.evidenceConfidence.compareTo(o.evidenceConfidence);
|
||||
if (conf == 0) {
|
||||
conf = identifier.compareTo(o.identifier);
|
||||
}
|
||||
}
|
||||
return conf;
|
||||
return new CompareToBuilder()
|
||||
.append(confidence, o.confidence)
|
||||
.append(evidenceConfidence, o.evidenceConfidence)
|
||||
.append(identifier, o.identifier)
|
||||
.toComparison();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,14 +17,6 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
@@ -34,14 +26,26 @@ import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.xml.pom.PomUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.List;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
import org.owasp.dependencycheck.utils.Downloader;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
* Analyzer which will attempt to locate a dependency, and the GAV information, by querying Central for the dependency's SHA-1
|
||||
* digest.
|
||||
* Analyzer which will attempt to locate a dependency, and the GAV information,
|
||||
* by querying Central for the dependency's SHA-1 digest.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@@ -50,7 +54,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(CentralAnalyzer.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CentralAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
@@ -65,12 +69,13 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* The types of files on which this will work.
|
||||
*/
|
||||
private static final Set<String> SUPPORTED_EXTENSIONS = newHashSet("jar");
|
||||
private static final String SUPPORTED_EXTENSIONS = "jar";
|
||||
|
||||
/**
|
||||
* The analyzer should be disabled if there are errors, so this is a flag to determine if such an error has occurred.
|
||||
* The analyzer should be disabled if there are errors, so this is a flag to
|
||||
* determine if such an error has occurred.
|
||||
*/
|
||||
private boolean errorFlag = false;
|
||||
private volatile boolean errorFlag = false;
|
||||
|
||||
/**
|
||||
* The searcher itself.
|
||||
@@ -94,7 +99,8 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Determines if this analyzer is enabled.
|
||||
*
|
||||
* @return <code>true</code> if the analyzer is enabled; otherwise <code>false</code>
|
||||
* @return <code>true</code> if the analyzer is enabled; otherwise
|
||||
* <code>false</code>
|
||||
*/
|
||||
private boolean checkEnabled() {
|
||||
boolean retval = false;
|
||||
@@ -103,7 +109,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
if (Settings.getBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED)) {
|
||||
if (!Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)
|
||||
|| NexusAnalyzer.DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL))) {
|
||||
LOGGER.fine("Enabling the Central analyzer");
|
||||
LOGGER.debug("Enabling the Central analyzer");
|
||||
retval = true;
|
||||
} else {
|
||||
LOGGER.info("Nexus analyzer is enabled, disabling the Central Analyzer");
|
||||
@@ -112,7 +118,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
LOGGER.info("Central analyzer disabled");
|
||||
}
|
||||
} catch (InvalidSettingException ise) {
|
||||
LOGGER.warning("Invalid setting. Disabling the Central analyzer");
|
||||
LOGGER.warn("Invalid setting. Disabling the Central analyzer");
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
@@ -120,16 +126,21 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the analyzer once before any analysis is performed.
|
||||
*
|
||||
* @throws Exception if there's an error during initialization
|
||||
* @throws InitializationException if there's an error during initialization
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws Exception {
|
||||
LOGGER.fine("Initializing Central analyzer");
|
||||
LOGGER.fine(String.format("Central analyzer enabled: %s", isEnabled()));
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
LOGGER.debug("Initializing Central analyzer");
|
||||
LOGGER.debug("Central analyzer enabled: {}", isEnabled());
|
||||
if (isEnabled()) {
|
||||
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_CENTRAL_URL);
|
||||
LOGGER.fine(String.format("Central Analyzer URL: %s", searchUrl));
|
||||
searcher = new CentralSearch(new URL(searchUrl));
|
||||
LOGGER.debug("Central Analyzer URL: {}", searchUrl);
|
||||
try {
|
||||
searcher = new CentralSearch(new URL(searchUrl));
|
||||
} catch (MalformedURLException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("The configured URL to Maven Central is malformed: " + searchUrl, ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -144,7 +155,8 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to to reference the analyzer's enabled property.
|
||||
* Returns the key used in the properties file to to reference the
|
||||
* analyzer's enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key.
|
||||
*/
|
||||
@@ -164,13 +176,13 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the extensions for which this Analyzer runs.
|
||||
*
|
||||
* @return the extensions for which this Analyzer runs
|
||||
* The file filter used to determine which files this analyzer supports.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(SUPPORTED_EXTENSIONS).build();
|
||||
|
||||
@Override
|
||||
public Set<String> getSupportedExtensions() {
|
||||
return SUPPORTED_EXTENSIONS;
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -181,7 +193,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException when there's an exception during analysis
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (errorFlag || !isEnabled()) {
|
||||
return;
|
||||
}
|
||||
@@ -190,7 +202,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final List<MavenArtifact> mas = searcher.searchSha1(dependency.getSha1sum());
|
||||
final Confidence confidence = mas.size() > 1 ? Confidence.HIGH : Confidence.HIGHEST;
|
||||
for (MavenArtifact ma : mas) {
|
||||
LOGGER.fine(String.format("Central analyzer found artifact (%s) for dependency (%s)", ma.toString(), dependency.getFileName()));
|
||||
LOGGER.debug("Central analyzer found artifact ({}) for dependency ({})", ma, dependency.getFileName());
|
||||
dependency.addAsEvidence("central", ma, confidence);
|
||||
boolean pomAnalyzed = false;
|
||||
for (Evidence e : dependency.getVendorEvidence()) {
|
||||
@@ -205,21 +217,20 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
pomFile = File.createTempFile("pom", ".xml", baseDir);
|
||||
if (!pomFile.delete()) {
|
||||
final String msg = String.format("Unable to fetch pom.xml for %s from Central; "
|
||||
LOGGER.warn("Unable to fetch pom.xml for {} from Central; "
|
||||
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
|
||||
LOGGER.warning(msg);
|
||||
LOGGER.fine("Unable to delete temp file");
|
||||
LOGGER.debug("Unable to delete temp file");
|
||||
}
|
||||
LOGGER.fine(String.format("Downloading %s", ma.getPomUrl()));
|
||||
LOGGER.debug("Downloading {}", ma.getPomUrl());
|
||||
Downloader.fetchFile(new URL(ma.getPomUrl()), pomFile);
|
||||
PomUtils.analyzePOM(dependency, pomFile);
|
||||
|
||||
} catch (DownloadFailedException ex) {
|
||||
final String msg = String.format("Unable to download pom.xml for %s from Central; "
|
||||
LOGGER.warn("Unable to download pom.xml for {} from Central; "
|
||||
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
|
||||
LOGGER.warning(msg);
|
||||
} finally {
|
||||
if (pomFile != null && !FileUtils.deleteQuietly(pomFile)) {
|
||||
if (pomFile != null && pomFile.exists() && !FileUtils.deleteQuietly(pomFile)) {
|
||||
LOGGER.debug("Failed to delete temporary pom file {}", pomFile.toString());
|
||||
pomFile.deleteOnExit();
|
||||
}
|
||||
}
|
||||
@@ -227,13 +238,12 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
}
|
||||
} catch (IllegalArgumentException iae) {
|
||||
LOGGER.info(String.format("invalid sha1-hash on %s", dependency.getFileName()));
|
||||
LOGGER.info("invalid sha1-hash on {}", dependency.getFileName());
|
||||
} catch (FileNotFoundException fnfe) {
|
||||
LOGGER.fine(String.format("Artifact not found in repository: '%s", dependency.getFileName()));
|
||||
LOGGER.debug("Artifact not found in repository: '{}", dependency.getFileName());
|
||||
} catch (IOException ioe) {
|
||||
LOGGER.log(Level.FINE, "Could not connect to Central search", ioe);
|
||||
LOGGER.debug("Could not connect to Central search", ioe);
|
||||
errorFlag = true;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,205 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
* This analyzer is used to analyze SWIFT and Objective-C packages by collecting
|
||||
* information from .podspec files. CocoaPods dependency manager see
|
||||
* https://cocoapods.org/.
|
||||
*
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
// private static final Logger LOGGER = LoggerFactory.getLogger(CocoaPodsAnalyzer.class);
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "CocoaPods Package Analyzer";
|
||||
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* The file name to scan.
|
||||
*/
|
||||
public static final String PODSPEC = "podspec";
|
||||
/**
|
||||
* Filter that detects files named "*.podspec".
|
||||
*/
|
||||
private static final FileFilter PODSPEC_FILTER = FileFilterBuilder.newInstance().addExtensions(PODSPEC).build();
|
||||
|
||||
/**
|
||||
* The capture group #1 is the block variable. e.g. "Pod::Spec.new do
|
||||
* |spec|"
|
||||
*/
|
||||
private static final Pattern PODSPEC_BLOCK_PATTERN = Pattern.compile("Pod::Spec\\.new\\s+?do\\s+?\\|(.+?)\\|");
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return PODSPEC_FILTER;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_COCOAPODS_ENABLED;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
final Matcher matcher = PODSPEC_BLOCK_PATTERN.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
contents = contents.substring(matcher.end());
|
||||
final String blockVariable = matcher.group(1);
|
||||
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
final EvidenceCollection version = dependency.getVersionEvidence();
|
||||
|
||||
final String name = addStringEvidence(product, contents, blockVariable, "name", "name", Confidence.HIGHEST);
|
||||
if (!name.isEmpty()) {
|
||||
vendor.addEvidence(PODSPEC, "name_project", name, Confidence.HIGHEST);
|
||||
}
|
||||
addStringEvidence(product, contents, blockVariable, "summary", "summary", Confidence.HIGHEST);
|
||||
|
||||
addStringEvidence(vendor, contents, blockVariable, "author", "authors?", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "homepage", "homepage", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "license", "licen[cs]es?", Confidence.HIGHEST);
|
||||
|
||||
addStringEvidence(version, contents, blockVariable, "version", "version", Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
setPackagePath(dependency);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts evidence from the contents and adds it to the given evidence
|
||||
* collection.
|
||||
*
|
||||
* @param evidences the evidence collection to update
|
||||
* @param contents the text to extract evidence from
|
||||
* @param blockVariable the block variable within the content to search for
|
||||
* @param field the name of the field being searched for
|
||||
* @param fieldPattern the field pattern within the contents to search for
|
||||
* @param confidence the confidence level of the evidence if found
|
||||
* @return the string that was added as evidence
|
||||
*/
|
||||
private String addStringEvidence(EvidenceCollection evidences, String contents,
|
||||
String blockVariable, String field, String fieldPattern, Confidence confidence) {
|
||||
String value = "";
|
||||
|
||||
//capture array value between [ ]
|
||||
final Matcher arrayMatcher = Pattern.compile(
|
||||
String.format("\\s*?%s\\.%s\\s*?=\\s*?\\{\\s*?(.*?)\\s*?\\}", blockVariable, fieldPattern),
|
||||
Pattern.CASE_INSENSITIVE).matcher(contents);
|
||||
if (arrayMatcher.find()) {
|
||||
value = arrayMatcher.group(1);
|
||||
} else { //capture single value between quotes
|
||||
final Matcher matcher = Pattern.compile(
|
||||
String.format("\\s*?%s\\.%s\\s*?=\\s*?(['\"])(.*?)\\1", blockVariable, fieldPattern),
|
||||
Pattern.CASE_INSENSITIVE).matcher(contents);
|
||||
if (matcher.find()) {
|
||||
value = matcher.group(2);
|
||||
}
|
||||
}
|
||||
if (value.length() > 0) {
|
||||
evidences.addEvidence(PODSPEC, field, value, confidence);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the package path on the given dependency.
|
||||
*
|
||||
* @param dep the dependency to update
|
||||
*/
|
||||
private void setPackagePath(Dependency dep) {
|
||||
final File file = new File(dep.getFilePath());
|
||||
final String parent = file.getParent();
|
||||
if (parent != null) {
|
||||
dep.setPackagePath(parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,181 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.composer.ComposerDependency;
|
||||
import org.owasp.dependencycheck.data.composer.ComposerException;
|
||||
import org.owasp.dependencycheck.data.composer.ComposerLockParser;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.Checksum;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
|
||||
/**
|
||||
* Used to analyze a composer.lock file for a composer PHP app.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
@Experimental
|
||||
public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ComposerLockAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The analyzer name.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Composer.lock analyzer";
|
||||
|
||||
/**
|
||||
* composer.json.
|
||||
*/
|
||||
private static final String COMPOSER_LOCK = "composer.lock";
|
||||
|
||||
/**
|
||||
* The FileFilter.
|
||||
*/
|
||||
private static final FileFilter FILE_FILTER = FileFilterBuilder.newInstance().addFilenames(COMPOSER_LOCK).build();
|
||||
|
||||
/**
|
||||
* Returns the FileFilter.
|
||||
*
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILE_FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the analyzer.
|
||||
*
|
||||
* @throws InitializationException thrown if an exception occurs getting an
|
||||
* instance of SHA1
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
getSha1MessageDigest();
|
||||
} catch (IllegalStateException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Entry point for the analyzer.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
* @param engine the engine scanning
|
||||
* @throws AnalysisException if there's a failure during analysis
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
FileInputStream fis = null;
|
||||
try {
|
||||
fis = new FileInputStream(dependency.getActualFile());
|
||||
final ComposerLockParser clp = new ComposerLockParser(fis);
|
||||
LOGGER.info("Checking composer.lock file {}", dependency.getActualFilePath());
|
||||
clp.process();
|
||||
for (ComposerDependency dep : clp.getDependencies()) {
|
||||
final Dependency d = new Dependency(dependency.getActualFile());
|
||||
d.setDisplayFileName(String.format("%s:%s/%s", dependency.getDisplayFileName(), dep.getGroup(), dep.getProject()));
|
||||
final String filePath = String.format("%s:%s/%s", dependency.getFilePath(), dep.getGroup(), dep.getProject());
|
||||
final MessageDigest sha1 = getSha1MessageDigest();
|
||||
d.setFilePath(filePath);
|
||||
d.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes(Charset.defaultCharset()))));
|
||||
d.getVendorEvidence().addEvidence(COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST);
|
||||
d.getProductEvidence().addEvidence(COMPOSER_LOCK, "product", dep.getProject(), Confidence.HIGHEST);
|
||||
d.getVersionEvidence().addEvidence(COMPOSER_LOCK, "version", dep.getVersion(), Confidence.HIGHEST);
|
||||
LOGGER.info("Adding dependency {}", d);
|
||||
engine.getDependencies().add(d);
|
||||
}
|
||||
} catch (FileNotFoundException fnfe) {
|
||||
LOGGER.warn("Error opening dependency {}", dependency.getActualFilePath());
|
||||
} catch (ComposerException ce) {
|
||||
LOGGER.warn("Error parsing composer.json {}", dependency.getActualFilePath(), ce);
|
||||
} finally {
|
||||
if (fis != null) {
|
||||
try {
|
||||
fis.close();
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug("Unable to close file", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the key to determine whether the analyzer is enabled.
|
||||
*
|
||||
* @return the key specifying whether the analyzer is enabled
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the analyzer's name.
|
||||
*
|
||||
* @return the analyzer's name
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase this analyzer should run under.
|
||||
*
|
||||
* @return the analysis phase
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return AnalysisPhase.INFORMATION_COLLECTION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sha1 message digest.
|
||||
*
|
||||
* @return the sha1 message digest
|
||||
*/
|
||||
private MessageDigest getSha1MessageDigest() {
|
||||
try {
|
||||
return MessageDigest.getInstance("SHA1");
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
LOGGER.error(e.getMessage());
|
||||
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,7 +20,8 @@ package org.owasp.dependencycheck.analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.suppression.SuppressionRule;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
|
||||
|
||||
/**
|
||||
* The suppression analyzer processes an externally defined XML document that complies with the suppressions.xsd schema.
|
||||
@@ -62,7 +63,7 @@ public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
|
||||
//</editor-fold>
|
||||
|
||||
@Override
|
||||
public void analyze(final Dependency dependency, final Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
|
||||
if (getRules() == null || getRules().size() <= 0) {
|
||||
return;
|
||||
@@ -72,4 +73,15 @@ public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
|
||||
rule.process(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_CPE_SUPPRESSION_ENABLED;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,8 +22,6 @@ import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
@@ -32,34 +30,52 @@ import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
import org.owasp.dependencycheck.utils.LogUtils;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This analyzer ensures dependencies that should be grouped together, to remove excess noise from the report, are grouped. An
|
||||
* example would be Spring, Spring Beans, Spring MVC, etc. If they are all for the same version and have the same relative path
|
||||
* then these should be grouped into a single dependency under the core/main library.</p>
|
||||
* This analyzer ensures dependencies that should be grouped together, to remove
|
||||
* excess noise from the report, are grouped. An example would be Spring, Spring
|
||||
* Beans, Spring MVC, etc. If they are all for the same version and have the
|
||||
* same relative path then these should be grouped into a single dependency
|
||||
* under the core/main library.</p>
|
||||
* <p>
|
||||
* Note, this grouping only works on dependencies with identified CVE entries</p>
|
||||
* Note, this grouping only works on dependencies with identified CVE
|
||||
* entries</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(DependencyBundlingAnalyzer.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyBundlingAnalyzer.class);
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
|
||||
/**
|
||||
* A pattern for obtaining the first part of a filename.
|
||||
*/
|
||||
private static final Pattern STARTING_TEXT_PATTERN = Pattern.compile("^[a-zA-Z0-9]*");
|
||||
|
||||
/**
|
||||
* a flag indicating if this analyzer has run. This analyzer only runs once.
|
||||
*/
|
||||
private boolean analyzed = false;
|
||||
|
||||
/**
|
||||
* Returns a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once. Note this is currently only used in the unit tests.
|
||||
*
|
||||
* @return a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once
|
||||
*/
|
||||
protected boolean getAnalyzed() {
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
@@ -69,13 +85,14 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.PRE_FINDING_ANALYSIS;
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.FINAL;
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
@@ -85,21 +102,47 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to have the same base path and the same set of identifiers they are
|
||||
* likely related. The related dependencies are bundled into a single reportable item.
|
||||
* Does not support parallel processing as it only runs once and then
|
||||
* operates on <em>all</em> dependencies.
|
||||
*
|
||||
* @return whether or not parallel processing is enabled
|
||||
* @see #analyze(Dependency, Engine)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_DEPENDENCY_BUNDLING_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to have the same
|
||||
* base path and the same set of identifiers they are likely related. The
|
||||
* related dependencies are bundled into a single reportable item.
|
||||
*
|
||||
* @param ignore this analyzer ignores the dependency being analyzed
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR file.
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR
|
||||
* file.
|
||||
*/
|
||||
@Override
|
||||
public void analyze(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
if (!analyzed) {
|
||||
analyzed = true;
|
||||
final Set<Dependency> dependenciesToRemove = new HashSet<Dependency>();
|
||||
@@ -111,7 +154,8 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
|
||||
while (subIterator.hasNext()) {
|
||||
final Dependency nextDependency = subIterator.next();
|
||||
if (hashesMatch(dependency, nextDependency)) {
|
||||
if (hashesMatch(dependency, nextDependency) && !containedInWar(dependency.getFilePath())
|
||||
&& !containedInWar(nextDependency.getFilePath())) {
|
||||
if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
@@ -125,10 +169,11 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
break;
|
||||
} else {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
nextDependency.getRelatedDependencies().remove(nextDependency);
|
||||
dependency.getRelatedDependencies().remove(nextDependency);
|
||||
}
|
||||
} else if (cpeIdentifiersMatch(dependency, nextDependency)
|
||||
&& hasSameBasePath(dependency, nextDependency)
|
||||
&& vulnCountMatches(dependency, nextDependency)
|
||||
&& fileNameMatch(dependency, nextDependency)) {
|
||||
if (isCore(dependency, nextDependency)) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
@@ -150,10 +195,11 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
* Adds the relatedDependency to the dependency's related dependencies.
|
||||
*
|
||||
* @param dependency the main dependency
|
||||
* @param relatedDependency a collection of dependencies to be removed from the main analysis loop, this is the source of
|
||||
* dependencies to remove
|
||||
* @param dependenciesToRemove a collection of dependencies that will be removed from the main analysis loop, this function
|
||||
* adds to this collection
|
||||
* @param relatedDependency a collection of dependencies to be removed from
|
||||
* the main analysis loop, this is the source of dependencies to remove
|
||||
* @param dependenciesToRemove a collection of dependencies that will be
|
||||
* removed from the main analysis loop, this function adds to this
|
||||
* collection
|
||||
*/
|
||||
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
dependency.addRelatedDependency(relatedDependency);
|
||||
@@ -169,13 +215,19 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to trim a maven repo to a common base path. This is typically [drive]\[repo_location]\repository\[path1]\[path2].
|
||||
* Attempts to trim a maven repo to a common base path. This is typically
|
||||
* [drive]\[repo_location]\repository\[path1]\[path2].
|
||||
*
|
||||
* @param path the path to trim
|
||||
* @return a string representing the base path.
|
||||
*/
|
||||
private String getBaseRepoPath(final String path) {
|
||||
int pos = path.indexOf("repository" + File.separator) + 11;
|
||||
int pos;
|
||||
if (path.contains("local-repo")) {
|
||||
pos = path.indexOf("local-repo" + File.separator) + 11;
|
||||
} else {
|
||||
pos = path.indexOf("repository" + File.separator) + 11;
|
||||
}
|
||||
if (pos < 0) {
|
||||
return path;
|
||||
}
|
||||
@@ -194,11 +246,13 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the file names (and version if it exists) of the two dependencies are sufficiently similar.
|
||||
* Returns true if the file names (and version if it exists) of the two
|
||||
* dependencies are sufficiently similar.
|
||||
*
|
||||
* @param dependency1 a dependency2 to compare
|
||||
* @param dependency2 a dependency2 to compare
|
||||
* @return true if the identifiers in the two supplied dependencies are equal
|
||||
* @return true if the identifiers in the two supplied dependencies are
|
||||
* equal
|
||||
*/
|
||||
private boolean fileNameMatch(Dependency dependency1, Dependency dependency2) {
|
||||
if (dependency1 == null || dependency1.getFileName() == null
|
||||
@@ -211,10 +265,8 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
//version check
|
||||
final DependencyVersion version1 = DependencyVersionUtil.parseVersion(fileName1);
|
||||
final DependencyVersion version2 = DependencyVersionUtil.parseVersion(fileName2);
|
||||
if (version1 != null && version2 != null) {
|
||||
if (!version1.equals(version2)) {
|
||||
return false;
|
||||
}
|
||||
if (version1 != null && version2 != null && !version1.equals(version2)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
//filename check
|
||||
@@ -228,11 +280,13 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the CPE identifiers in the two supplied dependencies are equal.
|
||||
* Returns true if the CPE identifiers in the two supplied dependencies are
|
||||
* equal.
|
||||
*
|
||||
* @param dependency1 a dependency2 to compare
|
||||
* @param dependency2 a dependency2 to compare
|
||||
* @return true if the identifiers in the two supplied dependencies are equal
|
||||
* @return true if the identifiers in the two supplied dependencies are
|
||||
* equal
|
||||
*/
|
||||
private boolean cpeIdentifiersMatch(Dependency dependency1, Dependency dependency2) {
|
||||
if (dependency1 == null || dependency1.getIdentifiers() == null
|
||||
@@ -262,13 +316,23 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
}
|
||||
}
|
||||
}
|
||||
if (LogUtils.isVerboseLoggingEnabled()) {
|
||||
final String msg = String.format("IdentifiersMatch=%s (%s, %s)", matches, dependency1.getFileName(), dependency2.getFileName());
|
||||
LOGGER.log(Level.FINE, msg);
|
||||
}
|
||||
LOGGER.debug("IdentifiersMatch={} ({}, {})", matches, dependency1.getFileName(), dependency2.getFileName());
|
||||
return matches;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the two dependencies have the same vulnerability count.
|
||||
*
|
||||
* @param dependency1 a dependency2 to compare
|
||||
* @param dependency2 a dependency2 to compare
|
||||
* @return true if the two dependencies have the same vulnerability count
|
||||
*/
|
||||
private boolean vulnCountMatches(Dependency dependency1, Dependency dependency2) {
|
||||
return dependency1.getVulnerabilities() != null && dependency2.getVulnerabilities() != null
|
||||
&& dependency1.getVulnerabilities().size() == dependency2.getVulnerabilities().size();
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the two dependencies have the same base path.
|
||||
*
|
||||
@@ -286,11 +350,14 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
String right = rFile.getParent();
|
||||
if (left == null) {
|
||||
return right == null;
|
||||
} else if (right == null) {
|
||||
return false;
|
||||
}
|
||||
if (left.equalsIgnoreCase(right)) {
|
||||
return true;
|
||||
}
|
||||
if (left.matches(".*[/\\\\]repository[/\\\\].*") && right.matches(".*[/\\\\]repository[/\\\\].*")) {
|
||||
|
||||
if (left.matches(".*[/\\\\](repository|local-repo)[/\\\\].*") && right.matches(".*[/\\\\](repository|local-repo)[/\\\\].*")) {
|
||||
left = getBaseRepoPath(left);
|
||||
right = getBaseRepoPath(right);
|
||||
}
|
||||
@@ -307,12 +374,13 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
}
|
||||
|
||||
/**
|
||||
* This is likely a very broken attempt at determining if the 'left' dependency is the 'core' library in comparison to the
|
||||
* 'right' library.
|
||||
* This is likely a very broken attempt at determining if the 'left'
|
||||
* dependency is the 'core' library in comparison to the 'right' library.
|
||||
*
|
||||
* @param left the dependency to test
|
||||
* @param right the dependency to test against
|
||||
* @return a boolean indicating whether or not the left dependency should be considered the "core" version.
|
||||
* @return a boolean indicating whether or not the left dependency should be
|
||||
* considered the "core" version.
|
||||
*/
|
||||
boolean isCore(Dependency left, Dependency right) {
|
||||
final String leftName = left.getFileName().toLowerCase();
|
||||
@@ -327,10 +395,6 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
|| !rightName.contains("core") && leftName.contains("core")
|
||||
|| !rightName.contains("kernel") && leftName.contains("kernel")) {
|
||||
returnVal = true;
|
||||
// } else if (leftName.matches(".*struts2\\-core.*") && rightName.matches(".*xwork\\-core.*")) {
|
||||
// returnVal = true;
|
||||
// } else if (rightName.matches(".*struts2\\-core.*") && leftName.matches(".*xwork\\-core.*")) {
|
||||
// returnVal = false;
|
||||
} else {
|
||||
/*
|
||||
* considered splitting the names up and comparing the components,
|
||||
@@ -343,19 +407,18 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
*/
|
||||
returnVal = leftName.length() <= rightName.length();
|
||||
}
|
||||
if (LogUtils.isVerboseLoggingEnabled()) {
|
||||
final String msg = String.format("IsCore=%s (%s, %s)", returnVal, left.getFileName(), right.getFileName());
|
||||
LOGGER.log(Level.FINE, msg);
|
||||
}
|
||||
LOGGER.debug("IsCore={} ({}, {})", returnVal, left.getFileName(), right.getFileName());
|
||||
return returnVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the SHA1 hashes of two dependencies to determine if they are equal.
|
||||
* Compares the SHA1 hashes of two dependencies to determine if they are
|
||||
* equal.
|
||||
*
|
||||
* @param dependency1 a dependency object to compare
|
||||
* @param dependency2 a dependency object to compare
|
||||
* @return true if the sha1 hashes of the two dependencies match; otherwise false
|
||||
* @return true if the sha1 hashes of the two dependencies match; otherwise
|
||||
* false
|
||||
*/
|
||||
private boolean hashesMatch(Dependency dependency1, Dependency dependency2) {
|
||||
if (dependency1 == null || dependency2 == null || dependency1.getSha1sum() == null || dependency2.getSha1sum() == null) {
|
||||
@@ -365,12 +428,13 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the jar is shaded and the created pom.xml identified the same CPE as the jar - if so, the pom.xml dependency
|
||||
* should be removed.
|
||||
* Determines if the jar is shaded and the created pom.xml identified the
|
||||
* same CPE as the jar - if so, the pom.xml dependency should be removed.
|
||||
*
|
||||
* @param dependency a dependency to check
|
||||
* @param nextDependency another dependency to check
|
||||
* @return true if on of the dependencies is a pom.xml and the identifiers between the two collections match; otherwise false
|
||||
* @return true if on of the dependencies is a pom.xml and the identifiers
|
||||
* between the two collections match; otherwise false
|
||||
*/
|
||||
private boolean isShadedJar(Dependency dependency, Dependency nextDependency) {
|
||||
final String mainName = dependency.getFileName().toLowerCase();
|
||||
@@ -384,14 +448,18 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines which path is shortest; if path lengths are equal then we use compareTo of the string method to determine if the
|
||||
* first path is smaller.
|
||||
* Determines which path is shortest; if path lengths are equal then we use
|
||||
* compareTo of the string method to determine if the first path is smaller.
|
||||
*
|
||||
* @param left the first path to compare
|
||||
* @param right the second path to compare
|
||||
* @return <code>true</code> if the leftPath is the shortest; otherwise <code>false</code>
|
||||
* @return <code>true</code> if the leftPath is the shortest; otherwise
|
||||
* <code>false</code>
|
||||
*/
|
||||
protected boolean firstPathIsShortest(String left, String right) {
|
||||
if (left.contains("dctemp")) {
|
||||
return false;
|
||||
}
|
||||
final String leftPath = left.replace('\\', '/');
|
||||
final String rightPath = right.replace('\\', '/');
|
||||
|
||||
@@ -421,4 +489,15 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the given file path is contained within a war or ear file.
|
||||
*
|
||||
* @param filePath the file path to check
|
||||
* @return true if the path contains '.war\' or '.ear\'.
|
||||
*/
|
||||
private boolean containedInWar(String filePath) {
|
||||
return filePath == null ? false : filePath.matches(".*\\.(ear|war)[\\\\/].*");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,283 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This analyzer will merge dependencies, created from different source, into a
|
||||
* single dependency.</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class DependencyMergingAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyMergingAnalyzer.class);
|
||||
/**
|
||||
* a flag indicating if this analyzer has run. This analyzer only runs once.
|
||||
*/
|
||||
private boolean analyzed = false;
|
||||
|
||||
/**
|
||||
* Returns a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once. Note this is currently only used in the unit tests.
|
||||
*
|
||||
* @return a flag indicating if this analyzer has run. This analyzer only
|
||||
* runs once
|
||||
*/
|
||||
protected boolean getAnalyzed() {
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Dependency Merging Analyzer";
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.POST_INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does not support parallel processing as it only runs once and then
|
||||
* operates on <em>all</em> dependencies.
|
||||
*
|
||||
* @return whether or not parallel processing is enabled
|
||||
* @see #analyze(Dependency, Engine)
|
||||
*/
|
||||
@Override
|
||||
public boolean supportsParallelProcessing() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_DEPENDENCY_MERGING_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Analyzes a set of dependencies. If they have been found to be the same
|
||||
* dependency created by more multiple FileTypeAnalyzers (i.e. a gemspec
|
||||
* dependency and a dependency from the Bundle Audit Analyzer. The
|
||||
* dependencies are then merged into a single reportable item.
|
||||
*
|
||||
* @param ignore this analyzer ignores the dependency being analyzed
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR
|
||||
* file.
|
||||
*/
|
||||
@Override
|
||||
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
|
||||
if (!analyzed) {
|
||||
analyzed = true;
|
||||
final Set<Dependency> dependenciesToRemove = new HashSet<Dependency>();
|
||||
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator();
|
||||
//for (Dependency nextDependency : engine.getDependencies()) {
|
||||
while (mainIterator.hasNext()) {
|
||||
final Dependency dependency = mainIterator.next();
|
||||
if (mainIterator.hasNext() && !dependenciesToRemove.contains(dependency)) {
|
||||
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
|
||||
while (subIterator.hasNext()) {
|
||||
final Dependency nextDependency = subIterator.next();
|
||||
Dependency main = null;
|
||||
if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
} else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
|
||||
if (main == dependency) {
|
||||
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
|
||||
} else {
|
||||
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
|
||||
break; //since we merged into the next dependency - skip forward to the next in mainIterator
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
//removing dependencies here as ensuring correctness and avoiding ConcurrentUpdateExceptions
|
||||
// was difficult because of the inner iterator.
|
||||
engine.getDependencies().removeAll(dependenciesToRemove);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the relatedDependency to the dependency's related dependencies.
|
||||
*
|
||||
* @param dependency the main dependency
|
||||
* @param relatedDependency a collection of dependencies to be removed from
|
||||
* the main analysis loop, this is the source of dependencies to remove
|
||||
* @param dependenciesToRemove a collection of dependencies that will be
|
||||
* removed from the main analysis loop, this function adds to this
|
||||
* collection
|
||||
*/
|
||||
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
|
||||
LOGGER.debug("Merging '{}' into '{}'", relatedDependency.getFilePath(), dependency.getFilePath());
|
||||
dependency.addRelatedDependency(relatedDependency);
|
||||
dependency.getVendorEvidence().getEvidence().addAll(relatedDependency.getVendorEvidence().getEvidence());
|
||||
dependency.getProductEvidence().getEvidence().addAll(relatedDependency.getProductEvidence().getEvidence());
|
||||
dependency.getVersionEvidence().getEvidence().addAll(relatedDependency.getVersionEvidence().getEvidence());
|
||||
|
||||
final Iterator<Dependency> i = relatedDependency.getRelatedDependencies().iterator();
|
||||
while (i.hasNext()) {
|
||||
dependency.addRelatedDependency(i.next());
|
||||
i.remove();
|
||||
}
|
||||
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) {
|
||||
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
|
||||
}
|
||||
dependenciesToRemove.add(relatedDependency);
|
||||
}
|
||||
|
||||
/**
|
||||
* Bundling Ruby gems that are identified from different .gemspec files but
|
||||
* denote the same package path. This happens when Ruby bundler installs an
|
||||
* application's dependencies by running "bundle install".
|
||||
*
|
||||
* @param dependency1 dependency to compare
|
||||
* @param dependency2 dependency to compare
|
||||
* @return true if the the dependencies being analyzed appear to be the
|
||||
* same; otherwise false
|
||||
*/
|
||||
private boolean isSameRubyGem(Dependency dependency1, Dependency dependency2) {
|
||||
if (dependency1 == null || dependency2 == null
|
||||
|| !dependency1.getFileName().endsWith(".gemspec")
|
||||
|| !dependency2.getFileName().endsWith(".gemspec")
|
||||
|| dependency1.getPackagePath() == null
|
||||
|| dependency2.getPackagePath() == null) {
|
||||
return false;
|
||||
}
|
||||
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
|
||||
}
|
||||
|
||||
/**
|
||||
* Ruby gems installed by "bundle install" can have zero or more *.gemspec
|
||||
* files, all of which have the same packagePath and should be grouped. If
|
||||
* one of these gemspec is from <parent>/specifications/*.gemspec, because
|
||||
* it is a stub with fully resolved gem meta-data created by Ruby bundler,
|
||||
* this dependency should be the main one. Otherwise, use dependency2 as
|
||||
* main.
|
||||
*
|
||||
* This method returns null if any dependency is not from *.gemspec, or the
|
||||
* two do not have the same packagePath. In this case, they should not be
|
||||
* grouped.
|
||||
*
|
||||
* @param dependency1 dependency to compare
|
||||
* @param dependency2 dependency to compare
|
||||
* @return the main dependency; or null if a gemspec is not included in the
|
||||
* analysis
|
||||
*/
|
||||
private Dependency getMainGemspecDependency(Dependency dependency1, Dependency dependency2) {
|
||||
if (isSameRubyGem(dependency1, dependency2)) {
|
||||
final File lFile = dependency1.getActualFile();
|
||||
final File left = lFile.getParentFile();
|
||||
if (left != null && left.getName().equalsIgnoreCase("specifications")) {
|
||||
return dependency1;
|
||||
}
|
||||
return dependency2;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bundling same swift dependencies with the same packagePath but identified
|
||||
* by different file type analyzers.
|
||||
*
|
||||
* @param dependency1 dependency to test
|
||||
* @param dependency2 dependency to test
|
||||
* @return <code>true</code> if the dependencies appear to be the same;
|
||||
* otherwise <code>false</code>
|
||||
*/
|
||||
private boolean isSameSwiftPackage(Dependency dependency1, Dependency dependency2) {
|
||||
if (dependency1 == null || dependency2 == null
|
||||
|| (!dependency1.getFileName().endsWith(".podspec")
|
||||
&& !dependency1.getFileName().equals("Package.swift"))
|
||||
|| (!dependency2.getFileName().endsWith(".podspec")
|
||||
&& !dependency2.getFileName().equals("Package.swift"))
|
||||
|| dependency1.getPackagePath() == null
|
||||
|| dependency2.getPackagePath() == null) {
|
||||
return false;
|
||||
}
|
||||
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines which of the swift dependencies should be considered the
|
||||
* primary.
|
||||
*
|
||||
* @param dependency1 the first swift dependency to compare
|
||||
* @param dependency2 the second swift dependency to compare
|
||||
* @return the primary swift dependency
|
||||
*/
|
||||
private Dependency getMainSwiftDependency(Dependency dependency1, Dependency dependency2) {
|
||||
if (isSameSwiftPackage(dependency1, dependency2)) {
|
||||
if (dependency1.getFileName().endsWith(".podspec")) {
|
||||
return dependency1;
|
||||
}
|
||||
return dependency2;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Annotation used to flag an analyzer as experimental.
|
||||
*
|
||||
* @author jeremy long
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target(ElementType.TYPE)
|
||||
public @interface Experimental {
|
||||
|
||||
}
|
||||
@@ -17,6 +17,7 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.FileFilter;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
@@ -25,8 +26,6 @@ import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
@@ -34,6 +33,10 @@ import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This analyzer attempts to remove some well known false positives - specifically regarding the java runtime.
|
||||
@@ -45,7 +48,13 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(FalsePositiveAnalyzer.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(FalsePositiveAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The file filter used to find DLL and EXE.
|
||||
*/
|
||||
private static final FileFilter DLL_EXE_FILTER = FileFilterBuilder.newInstance().addExtensions("dll", "exe").build();
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
@@ -61,6 +70,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
@@ -70,9 +80,20 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_FALSE_POSITIVE_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
@@ -83,7 +104,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR file.
|
||||
*/
|
||||
@Override
|
||||
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
removeJreEntries(dependency);
|
||||
removeBadMatches(dependency);
|
||||
removeBadSpringMatches(dependency);
|
||||
@@ -103,7 +124,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("maven".contains(i.getType())) {
|
||||
if (i.getValue() != null && i.getValue().startsWith("org.springframework.")) {
|
||||
final int endPoint = i.getValue().indexOf(":", 19);
|
||||
final int endPoint = i.getValue().indexOf(':', 19);
|
||||
if (endPoint >= 0) {
|
||||
mustContain = i.getValue().substring(19, endPoint).toLowerCase();
|
||||
break;
|
||||
@@ -146,8 +167,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
@SuppressWarnings("null")
|
||||
private void removeSpuriousCPE(Dependency dependency) {
|
||||
final List<Identifier> ids = new ArrayList<Identifier>();
|
||||
ids.addAll(dependency.getIdentifiers());
|
||||
final List<Identifier> ids = new ArrayList<Identifier>(dependency.getIdentifiers());
|
||||
Collections.sort(ids);
|
||||
final ListIterator<Identifier> mainItr = ids.listIterator();
|
||||
while (mainItr.hasNext()) {
|
||||
@@ -171,7 +191,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
final String nextVersion = nextCpe.getVersion();
|
||||
if (currentVersion == null && nextVersion == null) {
|
||||
//how did we get here?
|
||||
LOGGER.log(Level.FINE, "currentVersion and nextVersion are both null?");
|
||||
LOGGER.debug("currentVersion and nextVersion are both null?");
|
||||
} else if (currentVersion == null && nextVersion != null) {
|
||||
dependency.getIdentifiers().remove(currentId);
|
||||
} else if (nextVersion == null && currentVersion != null) {
|
||||
@@ -248,15 +268,15 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
try {
|
||||
cpe.parseName(value);
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
LOGGER.log(Level.FINEST, null, ex);
|
||||
LOGGER.trace("", ex);
|
||||
return null;
|
||||
}
|
||||
return cpe;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes bad CPE matches for a dependency. Unfortunately, right now these are hard-coded patches for specific
|
||||
* problems identified when testing this on a LARGE volume of jar files.
|
||||
* Removes bad CPE matches for a dependency. Unfortunately, right now these are hard-coded patches for specific problems
|
||||
* identified when testing this on a LARGE volume of jar files.
|
||||
*
|
||||
* @param dependency the dependency to analyze
|
||||
*/
|
||||
@@ -273,7 +293,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
//Set<Evidence> artifactId = dependency.getVendorEvidence().getEvidence("pom", "artifactid");
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
//TODO move this startsWith expression to a configuration file?
|
||||
//TODO move this startsWith expression to the base suppression file
|
||||
if ("cpe".equals(i.getType())) {
|
||||
if ((i.getValue().matches(".*c\\+\\+.*")
|
||||
|| i.getValue().startsWith("cpe:/a:file:file")
|
||||
@@ -288,7 +308,14 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".dll")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".exe")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".nuspec")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".nupkg"))) {
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".zip")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".sar")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".apk")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".tar")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".gz")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".tgz")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".ear")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".war"))) {
|
||||
itr.remove();
|
||||
} else if ((i.getValue().startsWith("cpe:/a:jquery:jquery")
|
||||
|| i.getValue().startsWith("cpe:/a:prototypejs:prototype")
|
||||
@@ -302,8 +329,11 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
|| i.getValue().startsWith("cpe:/a:microsoft:word")
|
||||
|| i.getValue().startsWith("cpe:/a:microsoft:visio")
|
||||
|| i.getValue().startsWith("cpe:/a:microsoft:powerpoint")
|
||||
|| i.getValue().startsWith("cpe:/a:microsoft:office"))
|
||||
|| i.getValue().startsWith("cpe:/a:microsoft:office")
|
||||
|| i.getValue().startsWith("cpe:/a:core_ftp:core_ftp"))
|
||||
&& (dependency.getFileName().toLowerCase().endsWith(".jar")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".ear")
|
||||
|| dependency.getFileName().toLowerCase().endsWith(".war")
|
||||
|| dependency.getFileName().toLowerCase().endsWith("pom.xml"))) {
|
||||
itr.remove();
|
||||
} else if (i.getValue().startsWith("cpe:/a:apache:maven")
|
||||
@@ -354,26 +384,23 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* There are some known CPE entries, specifically regarding sun and oracle products due to the acquisition and
|
||||
* changes in product names, that based on given evidence we can add the related CPE entries to ensure a complete
|
||||
* list of CVE entries.
|
||||
* There are some known CPE entries, specifically regarding sun and oracle products due to the acquisition and changes in
|
||||
* product names, that based on given evidence we can add the related CPE entries to ensure a complete list of CVE entries.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
*/
|
||||
private void addFalseNegativeCPEs(Dependency dependency) {
|
||||
//TODO move this to the hint analyzer
|
||||
final Iterator<Identifier> itr = dependency.getIdentifiers().iterator();
|
||||
while (itr.hasNext()) {
|
||||
final Identifier i = itr.next();
|
||||
if ("cpe".equals(i.getType()) && i.getValue() != null
|
||||
&& (i.getValue().startsWith("cpe:/a:oracle:opensso:")
|
||||
|| i.getValue().startsWith("cpe:/a:oracle:opensso_enterprise:")
|
||||
|| i.getValue().startsWith("cpe:/a:sun:opensso_enterprise:")
|
||||
|| i.getValue().startsWith("cpe:/a:sun:opensso:"))) {
|
||||
final String newCpe = String.format("cpe:/a:sun:opensso_enterprise:%s", i.getValue().substring(22));
|
||||
final String newCpe2 = String.format("cpe:/a:oracle:opensso_enterprise:%s", i.getValue().substring(22));
|
||||
final String newCpe3 = String.format("cpe:/a:sun:opensso:%s", i.getValue().substring(22));
|
||||
final String newCpe4 = String.format("cpe:/a:oracle:opensso:%s", i.getValue().substring(22));
|
||||
for (final Identifier identifier : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(identifier.getType()) && identifier.getValue() != null
|
||||
&& (identifier.getValue().startsWith("cpe:/a:oracle:opensso:")
|
||||
|| identifier.getValue().startsWith("cpe:/a:oracle:opensso_enterprise:")
|
||||
|| identifier.getValue().startsWith("cpe:/a:sun:opensso_enterprise:")
|
||||
|| identifier.getValue().startsWith("cpe:/a:sun:opensso:"))) {
|
||||
final String newCpe = String.format("cpe:/a:sun:opensso_enterprise:%s", identifier.getValue().substring(22));
|
||||
final String newCpe2 = String.format("cpe:/a:oracle:opensso_enterprise:%s", identifier.getValue().substring(22));
|
||||
final String newCpe3 = String.format("cpe:/a:sun:opensso:%s", identifier.getValue().substring(22));
|
||||
final String newCpe4 = String.format("cpe:/a:oracle:opensso:%s", identifier.getValue().substring(22));
|
||||
try {
|
||||
dependency.addIdentifier("cpe",
|
||||
newCpe,
|
||||
@@ -388,48 +415,49 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
newCpe4,
|
||||
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe4, "UTF-8")));
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
LOGGER.debug("", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes duplicate entries identified that are contained within JAR files. These occasionally crop up due to POM
|
||||
* entries or other types of files (such as DLLs and EXEs) being contained within the JAR.
|
||||
* Removes duplicate entries identified that are contained within JAR files. These occasionally crop up due to POM entries or
|
||||
* other types of files (such as DLLs and EXEs) being contained within the JAR.
|
||||
*
|
||||
* @param dependency the dependency that might be a duplicate
|
||||
* @param engine the engine used to scan all dependencies
|
||||
*/
|
||||
private void removeDuplicativeEntriesFromJar(Dependency dependency, Engine engine) {
|
||||
if (dependency.getFileName().toLowerCase().endsWith("pom.xml")
|
||||
|| "dll".equals(dependency.getFileExtension())
|
||||
|| "exe".equals(dependency.getFileExtension())) {
|
||||
|| DLL_EXE_FILTER.accept(dependency.getActualFile())) {
|
||||
String parentPath = dependency.getFilePath().toLowerCase();
|
||||
if (parentPath.contains(".jar")) {
|
||||
parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4);
|
||||
final Dependency parent = findDependency(parentPath, engine.getDependencies());
|
||||
if (parent != null) {
|
||||
boolean remove = false;
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(i.getType())) {
|
||||
final String trimmedCPE = trimCpeToVendor(i.getValue());
|
||||
for (Identifier parentId : parent.getIdentifiers()) {
|
||||
if ("cpe".equals(parentId.getType()) && parentId.getValue().startsWith(trimmedCPE)) {
|
||||
remove |= true;
|
||||
final List<Dependency> dependencies = engine.getDependencies();
|
||||
synchronized (dependencies) {
|
||||
final Dependency parent = findDependency(parentPath, dependencies);
|
||||
if (parent != null) {
|
||||
boolean remove = false;
|
||||
for (Identifier i : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(i.getType())) {
|
||||
final String trimmedCPE = trimCpeToVendor(i.getValue());
|
||||
for (Identifier parentId : parent.getIdentifiers()) {
|
||||
if ("cpe".equals(parentId.getType()) && parentId.getValue().startsWith(trimmedCPE)) {
|
||||
remove |= true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!remove) { //we can escape early
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (!remove) { //we can escape early
|
||||
return;
|
||||
if (remove) {
|
||||
dependencies.remove(dependency);
|
||||
}
|
||||
}
|
||||
if (remove) {
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -457,8 +485,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|
||||
*/
|
||||
private String trimCpeToVendor(String value) {
|
||||
//cpe:/a:jruby:jruby:1.0.8
|
||||
final int pos1 = value.indexOf(":", 7); //right of vendor
|
||||
final int pos2 = value.indexOf(":", pos1 + 1); //right of product
|
||||
final int pos1 = value.indexOf(':', 7); //right of vendor
|
||||
final int pos2 = value.indexOf(':', pos1 + 1); //right of product
|
||||
if (pos2 < 0) {
|
||||
return value;
|
||||
} else {
|
||||
|
||||
@@ -18,12 +18,16 @@
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.commons.io.filefilter.NameFileFilter;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -31,7 +35,7 @@ import org.owasp.dependencycheck.utils.DependencyVersionUtil;
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public class FileNameAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
@@ -48,6 +52,7 @@ public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
@@ -57,59 +62,70 @@ public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_FILE_NAME_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Python init files
|
||||
*/
|
||||
//CSOFF: WhitespaceAfter
|
||||
private static final NameFileFilter IGNORED_FILES = new NameFileFilter(new String[]{
|
||||
"__init__.py",
|
||||
"__init__.pyc",
|
||||
"__init__.pyo",});
|
||||
//CSON: WhitespaceAfter
|
||||
|
||||
/**
|
||||
* Collects information about the file name.
|
||||
*
|
||||
* @param dependency the dependency to analyze.
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR file.
|
||||
* @throws AnalysisException is thrown if there is an error reading the JAR
|
||||
* file.
|
||||
*/
|
||||
@Override
|
||||
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
|
||||
//strip any path information that may get added by ArchiveAnalyzer, etc.
|
||||
final File f = dependency.getActualFile();
|
||||
String fileName = f.getName();
|
||||
|
||||
//remove file extension
|
||||
final int pos = fileName.lastIndexOf(".");
|
||||
if (pos > 0) {
|
||||
fileName = fileName.substring(0, pos);
|
||||
}
|
||||
final String fileName = FilenameUtils.removeExtension(f.getName());
|
||||
|
||||
//add version evidence
|
||||
final DependencyVersion version = DependencyVersionUtil.parseVersion(fileName);
|
||||
final String packageName = DependencyVersionUtil.parsePreVersion(fileName);
|
||||
if (version != null) {
|
||||
// If the version number is just a number like 2 or 23, reduce the confidence
|
||||
// a shade. This should hopefully correct for cases like log4j.jar or
|
||||
// struts2-core.jar
|
||||
if (version.getVersionParts() == null || version.getVersionParts().size() < 2) {
|
||||
dependency.getVersionEvidence().addEvidence("file", "name",
|
||||
dependency.getVersionEvidence().addEvidence("file", "version",
|
||||
version.toString(), Confidence.MEDIUM);
|
||||
} else {
|
||||
dependency.getVersionEvidence().addEvidence("file", "name",
|
||||
dependency.getVersionEvidence().addEvidence("file", "version",
|
||||
version.toString(), Confidence.HIGHEST);
|
||||
}
|
||||
dependency.getVersionEvidence().addEvidence("file", "name",
|
||||
fileName, Confidence.MEDIUM);
|
||||
packageName, Confidence.MEDIUM);
|
||||
}
|
||||
|
||||
//add as vendor and product evidence
|
||||
if (fileName.contains("-")) {
|
||||
if (!IGNORED_FILES.accept(f)) {
|
||||
dependency.getProductEvidence().addEvidence("file", "name",
|
||||
fileName, Confidence.HIGHEST);
|
||||
packageName, Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("file", "name",
|
||||
fileName, Confidence.HIGHEST);
|
||||
} else {
|
||||
dependency.getProductEvidence().addEvidence("file", "name",
|
||||
fileName, Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("file", "name",
|
||||
fileName, Confidence.HIGH);
|
||||
packageName, Confidence.HIGH);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,23 +17,13 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.FileFilter;
|
||||
|
||||
/**
|
||||
* An Analyzer that scans specific file types.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public interface FileTypeAnalyzer extends Analyzer {
|
||||
public interface FileTypeAnalyzer extends Analyzer, FileFilter {
|
||||
|
||||
/**
|
||||
* Returns whether or not this analyzer can process the given extension.
|
||||
*
|
||||
* @param extension the file extension to test for support.
|
||||
* @return whether or not the specified file extension is supported by this analyzer.
|
||||
*/
|
||||
boolean supportsExtension(String extension);
|
||||
|
||||
/**
|
||||
* Resets the analyzers state.
|
||||
*/
|
||||
void reset();
|
||||
}
|
||||
|
||||
@@ -17,21 +17,41 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.xml.suppression.PropertyType;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
import org.owasp.dependencycheck.utils.Downloader;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.xml.hints.VendorDuplicatingHintRule;
|
||||
import org.owasp.dependencycheck.xml.hints.HintParseException;
|
||||
import org.owasp.dependencycheck.xml.hints.HintParser;
|
||||
import org.owasp.dependencycheck.xml.hints.HintRule;
|
||||
import org.owasp.dependencycheck.xml.hints.Hints;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
* This analyzer adds evidence to dependencies to enhance the accuracy of
|
||||
* library identification.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public class HintAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
@@ -62,18 +82,110 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_HINT_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* The initialize method does nothing for this Analyzer.
|
||||
*
|
||||
* @throws InitializationException thrown if there is an exception
|
||||
*/
|
||||
@Override
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
loadHintRules();
|
||||
} catch (HintParseException ex) {
|
||||
LOGGER.debug("Unable to parse hint file", ex);
|
||||
throw new InitializationException("Unable to parse the hint file", ex);
|
||||
}
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* The HintAnalyzer uses knowledge about a dependency to add additional information to help in identification of identifiers
|
||||
* or vulnerabilities.
|
||||
* The Logger for use throughout the class
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(HintAnalyzer.class);
|
||||
/**
|
||||
* The name of the hint rule file
|
||||
*/
|
||||
private static final String HINT_RULE_FILE_NAME = "dependencycheck-base-hint.xml";
|
||||
/**
|
||||
* The collection of hints.
|
||||
*/
|
||||
private Hints hints;
|
||||
|
||||
/**
|
||||
* The HintAnalyzer uses knowledge about a dependency to add additional
|
||||
* information to help in identification of identifiers or vulnerabilities.
|
||||
*
|
||||
* @param dependency The dependency being analyzed
|
||||
* @param engine The scanning engine
|
||||
* @throws AnalysisException is thrown if there is an exception analyzing the dependency.
|
||||
* @throws AnalysisException is thrown if there is an exception analyzing
|
||||
* the dependency.
|
||||
*/
|
||||
@Override
|
||||
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
for (HintRule hint : hints.getHintRules()) {
|
||||
boolean shouldAdd = false;
|
||||
for (Evidence given : hint.getGivenVendor()) {
|
||||
if (dependency.getVendorEvidence().getEvidence().contains(given)) {
|
||||
shouldAdd = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!shouldAdd) {
|
||||
for (Evidence given : hint.getGivenProduct()) {
|
||||
if (dependency.getProductEvidence().getEvidence().contains(given)) {
|
||||
shouldAdd = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!shouldAdd) {
|
||||
for (PropertyType pt : hint.getFilenames()) {
|
||||
if (pt.matches(dependency.getFileName())) {
|
||||
shouldAdd = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (shouldAdd) {
|
||||
for (Evidence e : hint.getAddVendor()) {
|
||||
dependency.getVendorEvidence().addEvidence(e);
|
||||
}
|
||||
for (Evidence e : hint.getAddProduct()) {
|
||||
dependency.getProductEvidence().addEvidence(e);
|
||||
}
|
||||
for (Evidence e : hint.getAddVersion()) {
|
||||
dependency.getVersionEvidence().addEvidence(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final Iterator<Evidence> itr = dependency.getVendorEvidence().iterator();
|
||||
final List<Evidence> newEntries = new ArrayList<Evidence>();
|
||||
while (itr.hasNext()) {
|
||||
final Evidence e = itr.next();
|
||||
for (VendorDuplicatingHintRule dhr : hints.getVendorDuplicatingHintRules()) {
|
||||
if (dhr.getValue().equalsIgnoreCase(e.getValue(false))) {
|
||||
newEntries.add(new Evidence(e.getSource() + " (hint)",
|
||||
e.getName(), dhr.getDuplicate(), e.getConfidence()));
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Evidence e : newEntries) {
|
||||
dependency.getVendorEvidence().addEvidence(e);
|
||||
}
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Old implementation">
|
||||
/*
|
||||
final Evidence springTest1 = new Evidence("Manifest",
|
||||
"Implementation-Title",
|
||||
"Spring Framework",
|
||||
@@ -89,30 +201,70 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
"spring-core",
|
||||
Confidence.HIGH);
|
||||
|
||||
final Evidence springTest4 = new Evidence("Manifest",
|
||||
"Bundle-Vendor",
|
||||
"SpringSource",
|
||||
Confidence.HIGH);
|
||||
|
||||
final Evidence springTest5 = new Evidence("jar",
|
||||
final Evidence springTest4 = new Evidence("jar",
|
||||
"package name",
|
||||
"springframework",
|
||||
Confidence.LOW);
|
||||
|
||||
final Evidence springSecurityTest1 = new Evidence("Manifest",
|
||||
"Bundle-Name",
|
||||
"Spring Security Core",
|
||||
Confidence.MEDIUM);
|
||||
|
||||
final Evidence springSecurityTest2 = new Evidence("pom",
|
||||
"artifactid",
|
||||
"spring-security-core",
|
||||
Confidence.HIGH);
|
||||
|
||||
final Evidence symfony = new Evidence("composer.lock",
|
||||
"vendor",
|
||||
"symfony",
|
||||
Confidence.HIGHEST);
|
||||
|
||||
final Evidence zendframeworkVendor = new Evidence("composer.lock",
|
||||
"vendor",
|
||||
"zendframework",
|
||||
Confidence.HIGHEST);
|
||||
|
||||
final Evidence zendframeworkProduct = new Evidence("composer.lock",
|
||||
"product",
|
||||
"zendframework",
|
||||
Confidence.HIGHEST);
|
||||
|
||||
//springsource/vware problem
|
||||
final Set<Evidence> product = dependency.getProductEvidence().getEvidence();
|
||||
final Set<Evidence> vendor = dependency.getVendorEvidence().getEvidence();
|
||||
|
||||
if (product.contains(springTest1) || product.contains(springTest2) || product.contains(springTest3)
|
||||
|| (dependency.getFileName().contains("spring") && (product.contains(springTest5) || vendor.contains(springTest5)))) {
|
||||
|| (dependency.getFileName().contains("spring") && product.contains(springTest4))) {
|
||||
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource spring framework", Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "SpringSource", Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "pivotal", Confidence.HIGH);
|
||||
}
|
||||
|
||||
if (vendor.contains(springTest4)) {
|
||||
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource_spring_framework", Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "pivotal", Confidence.HIGH);
|
||||
}
|
||||
|
||||
if (product.contains(springSecurityTest1) || product.contains(springSecurityTest2)) {
|
||||
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource_spring_security", Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "SpringSource", Confidence.HIGH);
|
||||
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
|
||||
}
|
||||
|
||||
if (vendor.contains(symfony)) {
|
||||
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "sensiolabs", Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
if (vendor.contains(zendframeworkVendor)) {
|
||||
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "zend", Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
if (product.contains(zendframeworkProduct)) {
|
||||
dependency.getProductEvidence().addEvidence("hint analyzer", "vendor", "zend_framework", Confidence.HIGHEST);
|
||||
}
|
||||
|
||||
//sun/oracle problem
|
||||
@@ -131,6 +283,90 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
|
||||
for (Evidence e : newEntries) {
|
||||
dependency.getVendorEvidence().addEvidence(e);
|
||||
}
|
||||
*/
|
||||
//</editor-fold>
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the hint rules file.
|
||||
*
|
||||
* @throws HintParseException thrown if the XML cannot be parsed.
|
||||
*/
|
||||
private void loadHintRules() throws HintParseException {
|
||||
final HintParser parser = new HintParser();
|
||||
File file = null;
|
||||
try {
|
||||
hints = parser.parseHints(this.getClass().getClassLoader().getResourceAsStream(HINT_RULE_FILE_NAME));
|
||||
} catch (HintParseException ex) {
|
||||
LOGGER.error("Unable to parse the base hint data file");
|
||||
LOGGER.debug("Unable to parse the base hint data file", ex);
|
||||
} catch (SAXException ex) {
|
||||
LOGGER.error("Unable to parse the base hint data file");
|
||||
LOGGER.debug("Unable to parse the base hint data file", ex);
|
||||
}
|
||||
final String filePath = Settings.getString(Settings.KEYS.HINTS_FILE);
|
||||
if (filePath == null) {
|
||||
return;
|
||||
}
|
||||
boolean deleteTempFile = false;
|
||||
try {
|
||||
final Pattern uriRx = Pattern.compile("^(https?|file)\\:.*", Pattern.CASE_INSENSITIVE);
|
||||
if (uriRx.matcher(filePath).matches()) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("hint", "xml");
|
||||
final URL url = new URL(filePath);
|
||||
try {
|
||||
Downloader.fetchFile(url, file, false);
|
||||
} catch (DownloadFailedException ex) {
|
||||
Downloader.fetchFile(url, file, true);
|
||||
}
|
||||
} else {
|
||||
file = new File(filePath);
|
||||
if (!file.exists()) {
|
||||
InputStream fromClasspath = null;
|
||||
try {
|
||||
fromClasspath = this.getClass().getClassLoader().getResourceAsStream(filePath);
|
||||
if (fromClasspath != null) {
|
||||
deleteTempFile = true;
|
||||
file = FileUtils.getTempFile("hint", "xml");
|
||||
try {
|
||||
org.apache.commons.io.FileUtils.copyInputStreamToFile(fromClasspath, file);
|
||||
} catch (IOException ex) {
|
||||
throw new HintParseException("Unable to locate hints file in classpath", ex);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (fromClasspath != null) {
|
||||
fromClasspath.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (file != null) {
|
||||
try {
|
||||
final Hints newHints = parser.parseHints(file);
|
||||
hints.getHintRules().addAll(newHints.getHintRules());
|
||||
hints.getVendorDuplicatingHintRules().addAll(newHints.getVendorDuplicatingHintRules());
|
||||
LOGGER.debug("{} hint rules were loaded.", hints.getHintRules().size());
|
||||
LOGGER.debug("{} duplicating hint rules were loaded.", hints.getVendorDuplicatingHintRules().size());
|
||||
} catch (HintParseException ex) {
|
||||
LOGGER.warn("Unable to parse hint rule xml file '{}'", file.getPath());
|
||||
LOGGER.warn(ex.getMessage());
|
||||
LOGGER.debug("", ex);
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
} catch (DownloadFailedException ex) {
|
||||
throw new HintParseException("Unable to fetch the configured hint file", ex);
|
||||
} catch (MalformedURLException ex) {
|
||||
throw new HintParseException("Configured hint file has an invalid URL", ex);
|
||||
} catch (IOException ex) {
|
||||
throw new HintParseException("Unable to create temp file for hints", ex);
|
||||
} finally {
|
||||
if (deleteTempFile && file != null) {
|
||||
FileUtils.delete(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,141 +0,0 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2014 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
*
|
||||
* Used to analyze a JavaScript file to gather information to aid in identification of a CPE identifier.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class JavaScriptAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(JavaScriptAnalyzer.class.getName());
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "JavaScript Analyzer";
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
|
||||
/**
|
||||
* The set of file extensions supported by this analyzer.
|
||||
*/
|
||||
private static final Set<String> EXTENSIONS = newHashSet("js");
|
||||
|
||||
/**
|
||||
* Returns a list of file EXTENSIONS supported by this analyzer.
|
||||
*
|
||||
* @return a list of file EXTENSIONS supported by this analyzer.
|
||||
*/
|
||||
@Override
|
||||
public Set<String> getSupportedExtensions() {
|
||||
return EXTENSIONS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
//</editor-fold>
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_JAVASCRIPT_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads a specified JavaScript file and collects information from the copyright information contained within.
|
||||
*
|
||||
* @param dependency the dependency to analyze.
|
||||
* @param engine the engine that is scanning the dependencies
|
||||
* @throws AnalysisException is thrown if there is an error reading the JavaScript file.
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
BufferedReader fin = null;
|
||||
try {
|
||||
// /\*([^\*][^/]|[\r\n\f])+?\*/
|
||||
final Pattern extractComments = Pattern.compile("(/\\*([^*]|[\\r\\n]|(\\*+([^*/]|[\\r\\n])))*\\*+/)|(//.*)", Pattern.MULTILINE);
|
||||
File file = dependency.getActualFile();
|
||||
fin = new BufferedReader(new FileReader(file));
|
||||
StringBuilder sb = new StringBuilder(2000);
|
||||
String text;
|
||||
while ((text = fin.readLine()) != null) {
|
||||
sb.append(text);
|
||||
}
|
||||
} catch (FileNotFoundException ex) {
|
||||
final String msg = String.format("Dependency file not found: '%s'", dependency.getActualFilePath());
|
||||
throw new AnalysisException(msg, ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.SEVERE, null, ex);
|
||||
} finally {
|
||||
if (fin != null) {
|
||||
try {
|
||||
fin.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINEST, null, ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws Exception {
|
||||
|
||||
}
|
||||
}
|
||||
@@ -17,14 +17,6 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
@@ -34,21 +26,35 @@ import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.xml.pom.PomUtils;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.DownloadFailedException;
|
||||
import org.owasp.dependencycheck.utils.Downloader;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.InvalidSettingException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
* Analyzer which will attempt to locate a dependency on a Nexus service by SHA-1 digest of the dependency.
|
||||
* Analyzer which will attempt to locate a dependency on a Nexus service by
|
||||
* SHA-1 digest of the dependency.
|
||||
*
|
||||
* There are two settings which govern this behavior:
|
||||
*
|
||||
* <ul>
|
||||
* <li>{@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_NEXUS_ENABLED} determines whether this analyzer is even
|
||||
* enabled. This can be overridden by setting the system property.</li>
|
||||
* <li>{@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_NEXUS_URL} the URL to a Nexus service to search by SHA-1.
|
||||
* There is an expected <code>%s</code> in this where the SHA-1 will get entered.</li>
|
||||
* <li>{@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_NEXUS_ENABLED}
|
||||
* determines whether this analyzer is even enabled. This can be overridden by
|
||||
* setting the system property.</li>
|
||||
* <li>{@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_NEXUS_URL}
|
||||
* the URL to a Nexus service to search by SHA-1. There is an expected
|
||||
* <code>%s</code> in this where the SHA-1 will get entered.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @author colezlaw
|
||||
@@ -56,14 +62,15 @@ import org.owasp.dependencycheck.utils.Settings;
|
||||
public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The default URL - this will be used by the CentralAnalyzer to determine whether to enable this.
|
||||
* The default URL - this will be used by the CentralAnalyzer to determine
|
||||
* whether to enable this.
|
||||
*/
|
||||
public static final String DEFAULT_URL = "https://repository.sonatype.org/service/local/";
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(NexusAnalyzer.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(NexusAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
@@ -78,8 +85,12 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* The types of files on which this will work.
|
||||
*/
|
||||
private static final Set<String> SUPPORTED_EXTENSIONS = newHashSet("jar");
|
||||
private static final String SUPPORTED_EXTENSIONS = "jar";
|
||||
|
||||
/**
|
||||
* Whether or not the Nexus analyzer should use a proxy if configured.
|
||||
*/
|
||||
private boolean useProxy;
|
||||
/**
|
||||
* The Nexus Search to be set up for this analyzer.
|
||||
*/
|
||||
@@ -93,7 +104,8 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Determines if this analyzer is enabled
|
||||
*
|
||||
* @return <code>true</code> if the analyzer is enabled; otherwise <code>false</code>
|
||||
* @return <code>true</code> if the analyzer is enabled; otherwise
|
||||
* <code>false</code>
|
||||
*/
|
||||
private boolean checkEnabled() {
|
||||
/* Enable this analyzer ONLY if the Nexus URL has been set to something
|
||||
@@ -102,15 +114,15 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
boolean retval = false;
|
||||
try {
|
||||
if ((!DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL)))
|
||||
if (!DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL))
|
||||
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)) {
|
||||
LOGGER.info("Enabling Nexus analyzer");
|
||||
retval = true;
|
||||
} else {
|
||||
LOGGER.fine("Nexus analyzer disabled, using Central instead");
|
||||
LOGGER.debug("Nexus analyzer disabled, using Central instead");
|
||||
}
|
||||
} catch (InvalidSettingException ise) {
|
||||
LOGGER.warning("Invalid setting. Disabling Nexus analyzer");
|
||||
LOGGER.warn("Invalid setting. Disabling Nexus analyzer");
|
||||
}
|
||||
|
||||
return retval;
|
||||
@@ -129,26 +141,25 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Initializes the analyzer once before any analysis is performed.
|
||||
*
|
||||
* @throws Exception if there's an error during initialization
|
||||
* @throws InitializationException if there's an error during initialization
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws Exception {
|
||||
LOGGER.fine("Initializing Nexus Analyzer");
|
||||
LOGGER.fine(String.format("Nexus Analyzer enabled: %s", isEnabled()));
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
LOGGER.debug("Initializing Nexus Analyzer");
|
||||
LOGGER.debug("Nexus Analyzer enabled: {}", isEnabled());
|
||||
if (isEnabled()) {
|
||||
useProxy = useProxy();
|
||||
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL);
|
||||
LOGGER.fine(String.format("Nexus Analyzer URL: %s", searchUrl));
|
||||
LOGGER.debug("Nexus Analyzer URL: {}", searchUrl);
|
||||
try {
|
||||
searcher = new NexusSearch(new URL(searchUrl));
|
||||
searcher = new NexusSearch(new URL(searchUrl), useProxy);
|
||||
if (!searcher.preflightRequest()) {
|
||||
LOGGER.warning("There was an issue getting Nexus status. Disabling analyzer.");
|
||||
setEnabled(false);
|
||||
throw new InitializationException("There was an issue getting Nexus status. Disabling analyzer.");
|
||||
}
|
||||
} catch (MalformedURLException mue) {
|
||||
// I know that initialize can throw an exception, but we'll
|
||||
// just disable the analyzer if the URL isn't valid
|
||||
LOGGER.warning(String.format("Property %s not a valid URL. Nexus Analyzer disabled", searchUrl));
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Malformed URL to Nexus: " + searchUrl, mue);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -164,7 +175,8 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's enabled property.
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@@ -184,13 +196,18 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the extensions for which this Analyzer runs.
|
||||
* The file filter used to determine which files this analyzer supports.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(SUPPORTED_EXTENSIONS).build();
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return the extensions for which this Analyzer runs
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
public Set<String> getSupportedExtensions() {
|
||||
return SUPPORTED_EXTENSIONS;
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -201,7 +218,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException when there's an exception during analysis
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
if (!isEnabled()) {
|
||||
return;
|
||||
}
|
||||
@@ -209,7 +226,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final MavenArtifact ma = searcher.searchSha1(dependency.getSha1sum());
|
||||
dependency.addAsEvidence("nexus", ma, Confidence.HIGH);
|
||||
boolean pomAnalyzed = false;
|
||||
LOGGER.fine("POM URL " + ma.getPomUrl());
|
||||
LOGGER.debug("POM URL {}", ma.getPomUrl());
|
||||
for (Evidence e : dependency.getVendorEvidence()) {
|
||||
if ("pom".equals(e.getSource())) {
|
||||
pomAnalyzed = true;
|
||||
@@ -222,34 +239,48 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
pomFile = File.createTempFile("pom", ".xml", baseDir);
|
||||
if (!pomFile.delete()) {
|
||||
final String msg = String.format("Unable to fetch pom.xml for %s from Nexus repository; "
|
||||
LOGGER.warn("Unable to fetch pom.xml for {} from Nexus repository; "
|
||||
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
|
||||
LOGGER.warning(msg);
|
||||
LOGGER.fine("Unable to delete temp file");
|
||||
LOGGER.debug("Unable to delete temp file");
|
||||
}
|
||||
LOGGER.fine(String.format("Downloading %s", ma.getPomUrl()));
|
||||
LOGGER.debug("Downloading {}", ma.getPomUrl());
|
||||
Downloader.fetchFile(new URL(ma.getPomUrl()), pomFile);
|
||||
PomUtils.analyzePOM(dependency, pomFile);
|
||||
} catch (DownloadFailedException ex) {
|
||||
final String msg = String.format("Unable to download pom.xml for %s from Nexus repository; "
|
||||
LOGGER.warn("Unable to download pom.xml for {} from Nexus repository; "
|
||||
+ "this could result in undetected CPE/CVEs.", dependency.getFileName());
|
||||
LOGGER.warning(msg);
|
||||
} finally {
|
||||
if (pomFile != null && !FileUtils.deleteQuietly(pomFile)) {
|
||||
if (pomFile != null && pomFile.exists() && !FileUtils.deleteQuietly(pomFile)) {
|
||||
LOGGER.debug("Failed to delete temporary pom file {}", pomFile.toString());
|
||||
pomFile.deleteOnExit();
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IllegalArgumentException iae) {
|
||||
//dependency.addAnalysisException(new AnalysisException("Invalid SHA-1"));
|
||||
LOGGER.info(String.format("invalid sha-1 hash on %s", dependency.getFileName()));
|
||||
LOGGER.info("invalid sha-1 hash on {}", dependency.getFileName());
|
||||
} catch (FileNotFoundException fnfe) {
|
||||
//dependency.addAnalysisException(new AnalysisException("Artifact not found on repository"));
|
||||
LOGGER.fine(String.format("Artifact not found in repository '%s'", dependency.getFileName()));
|
||||
LOGGER.log(Level.FINE, fnfe.getMessage(), fnfe);
|
||||
LOGGER.debug("Artifact not found in repository '{}'", dependency.getFileName());
|
||||
LOGGER.debug(fnfe.getMessage(), fnfe);
|
||||
} catch (IOException ioe) {
|
||||
//dependency.addAnalysisException(new AnalysisException("Could not connect to repository", ioe));
|
||||
LOGGER.log(Level.FINE, "Could not connect to nexus repository", ioe);
|
||||
LOGGER.debug("Could not connect to nexus repository", ioe);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if a proxy should be used.
|
||||
*
|
||||
* @return {@code true} if a proxy should be used
|
||||
*/
|
||||
public static boolean useProxy() {
|
||||
try {
|
||||
return Settings.getString(Settings.KEYS.PROXY_SERVER) != null
|
||||
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
|
||||
} catch (InvalidSettingException ise) {
|
||||
LOGGER.warn("Failed to parse proxy settings.", ise);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,191 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonException;
|
||||
import javax.json.JsonObject;
|
||||
import javax.json.JsonReader;
|
||||
import javax.json.JsonString;
|
||||
import javax.json.JsonValue;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* Used to analyze Node Package Manager (npm) package.json files, and collect
|
||||
* information that can be used to determine the associated CPE.
|
||||
*
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@Experimental
|
||||
public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(NodePackageAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Node.js Package Analyzer";
|
||||
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* The file name to scan.
|
||||
*/
|
||||
public static final String PACKAGE_JSON = "package.json";
|
||||
/**
|
||||
* Filter that detects files named "package.json".
|
||||
*/
|
||||
private static final FileFilter PACKAGE_JSON_FILTER = FileFilterBuilder.newInstance()
|
||||
.addFilenames(PACKAGE_JSON).build();
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return PACKAGE_JSON_FILTER;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
final File file = dependency.getActualFile();
|
||||
JsonReader jsonReader;
|
||||
try {
|
||||
jsonReader = Json.createReader(FileUtils.openInputStream(file));
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
try {
|
||||
final JsonObject json = jsonReader.readObject();
|
||||
final EvidenceCollection productEvidence = dependency.getProductEvidence();
|
||||
final EvidenceCollection vendorEvidence = dependency.getVendorEvidence();
|
||||
if (json.containsKey("name")) {
|
||||
final Object value = json.get("name");
|
||||
if (value instanceof JsonString) {
|
||||
final String valueString = ((JsonString) value).getString();
|
||||
productEvidence.addEvidence(PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
|
||||
vendorEvidence.addEvidence(PACKAGE_JSON, "name_project", String.format("%s_project", valueString), Confidence.LOW);
|
||||
} else {
|
||||
LOGGER.warn("JSON value not string as expected: {}", value);
|
||||
}
|
||||
}
|
||||
addToEvidence(json, productEvidence, "description");
|
||||
addToEvidence(json, vendorEvidence, "author");
|
||||
addToEvidence(json, dependency.getVersionEvidence(), "version");
|
||||
dependency.setDisplayFileName(String.format("%s/%s", file.getParentFile().getName(), file.getName()));
|
||||
} catch (JsonException e) {
|
||||
LOGGER.warn("Failed to parse package.json file.", e);
|
||||
} finally {
|
||||
jsonReader.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds information to an evidence collection from the node json
|
||||
* configuration.
|
||||
*
|
||||
* @param json information from node.js
|
||||
* @param collection a set of evidence about a dependency
|
||||
* @param key the key to obtain the data from the json information
|
||||
*/
|
||||
private void addToEvidence(JsonObject json, EvidenceCollection collection, String key) {
|
||||
if (json.containsKey(key)) {
|
||||
final JsonValue value = json.get(key);
|
||||
if (value instanceof JsonString) {
|
||||
collection.addEvidence(PACKAGE_JSON, key, ((JsonString) value).getString(), Confidence.HIGHEST);
|
||||
} else if (value instanceof JsonObject) {
|
||||
final JsonObject jsonObject = (JsonObject) value;
|
||||
for (final Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
|
||||
final String property = entry.getKey();
|
||||
final JsonValue subValue = entry.getValue();
|
||||
if (subValue instanceof JsonString) {
|
||||
collection.addEvidence(PACKAGE_JSON,
|
||||
String.format("%s.%s", key, property),
|
||||
((JsonString) subValue).getString(),
|
||||
Confidence.HIGHEST);
|
||||
} else {
|
||||
LOGGER.warn("JSON sub-value not string as expected: {}", subValue);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
LOGGER.warn("JSON value not string or JSON object as expected: {}", value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,12 +17,6 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.nuget.NugetPackage;
|
||||
@@ -31,7 +25,16 @@ import org.owasp.dependencycheck.data.nuget.NuspecParser;
|
||||
import org.owasp.dependencycheck.data.nuget.XPathNuspecParser;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* Analyzer which will parse a Nuspec file to gather module information.
|
||||
@@ -43,7 +46,7 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(NuspecAnalyzer.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(NuspecAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
@@ -58,15 +61,15 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* The types of files on which this will work.
|
||||
*/
|
||||
private static final Set<String> SUPPORTED_EXTENSIONS = newHashSet("nuspec");
|
||||
private static final String SUPPORTED_EXTENSIONS = "nuspec";
|
||||
|
||||
/**
|
||||
* Initializes the analyzer once before any analysis is performed.
|
||||
*
|
||||
* @throws Exception if there's an error during initialization
|
||||
* @throws InitializationException if there's an error during initialization
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws Exception {
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -80,7 +83,8 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's enabled property.
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@@ -100,13 +104,19 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the extensions for which this Analyzer runs.
|
||||
* The file filter used to determine which files this analyzer supports.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(
|
||||
SUPPORTED_EXTENSIONS).build();
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return the extensions for which this Analyzer runs
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
public Set<String> getSupportedExtensions() {
|
||||
return SUPPORTED_EXTENSIONS;
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -117,8 +127,8 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @throws AnalysisException when there's an exception during analysis
|
||||
*/
|
||||
@Override
|
||||
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
LOGGER.log(Level.FINE, "Checking Nuspec file {0}", dependency.toString());
|
||||
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
LOGGER.debug("Checking Nuspec file {}", dependency);
|
||||
try {
|
||||
final NuspecParser parser = new XPathNuspecParser();
|
||||
NugetPackage np = null;
|
||||
@@ -135,7 +145,7 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
try {
|
||||
fis.close();
|
||||
} catch (IOException e) {
|
||||
LOGGER.fine("Error closing input stream");
|
||||
LOGGER.debug("Error closing input stream");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,15 +27,23 @@ import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Identifier;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* NvdCveAnalyzer is a utility class that takes a project dependency and attempts to discern if there is an associated
|
||||
* CVEs. It uses the the identifiers found by other analyzers to lookup the CVE data.
|
||||
* NvdCveAnalyzer is a utility class that takes a project dependency and
|
||||
* attempts to discern if there is an associated CVEs. It uses the the
|
||||
* identifiers found by other analyzers to lookup the CVE data.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class NvdCveAnalyzer implements Analyzer {
|
||||
public class NvdCveAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
/**
|
||||
* The Logger for use throughout the class
|
||||
*/
|
||||
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(NvdCveAnalyzer.class);
|
||||
/**
|
||||
* The maximum number of query results to return.
|
||||
*/
|
||||
@@ -51,7 +59,8 @@ public class NvdCveAnalyzer implements Analyzer {
|
||||
* @throws SQLException thrown when there is a SQL Exception
|
||||
* @throws IOException thrown when there is an IO Exception
|
||||
* @throws DatabaseException thrown when there is a database exceptions
|
||||
* @throws ClassNotFoundException thrown if the h2 database driver cannot be loaded
|
||||
* @throws ClassNotFoundException thrown if the h2 database driver cannot be
|
||||
* loaded
|
||||
*/
|
||||
public void open() throws SQLException, IOException, DatabaseException, ClassNotFoundException {
|
||||
cveDB = new CveDB();
|
||||
@@ -62,7 +71,7 @@ public class NvdCveAnalyzer implements Analyzer {
|
||||
* Closes the data source.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
public void closeAnalyzer() {
|
||||
cveDB.close();
|
||||
cveDB = null;
|
||||
}
|
||||
@@ -73,13 +82,13 @@ public class NvdCveAnalyzer implements Analyzer {
|
||||
* @return true or false.
|
||||
*/
|
||||
public boolean isOpen() {
|
||||
return (cveDB != null);
|
||||
return cveDB != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the CVE Database is closed.
|
||||
*
|
||||
* @throws Throwable when a throwable is thrown.
|
||||
* @throws Throwable an exception raised by this method
|
||||
*/
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
@@ -90,14 +99,16 @@ public class NvdCveAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a dependency and attempts to determine if there are any CPE identifiers for this dependency.
|
||||
* Analyzes a dependency and attempts to determine if there are any CPE
|
||||
* identifiers for this dependency.
|
||||
*
|
||||
* @param dependency The Dependency to analyze
|
||||
* @param engine The analysis engine
|
||||
* @throws AnalysisException is thrown if there is an issue analyzing the dependency
|
||||
* @throws AnalysisException thrown if there is an issue analyzing the
|
||||
* dependency
|
||||
*/
|
||||
@Override
|
||||
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
for (Identifier id : dependency.getIdentifiers()) {
|
||||
if ("cpe".equals(id.getType())) {
|
||||
try {
|
||||
@@ -143,12 +154,38 @@ public class NvdCveAnalyzer implements Analyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the database used to gather NVD CVE data.
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @throws Exception is thrown if there is an issue opening the index.
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
public void initialize() throws Exception {
|
||||
this.open();
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_NVD_CVE_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the database used to gather NVD CVE data.
|
||||
*
|
||||
* @throws InitializationException is thrown if there is an issue opening
|
||||
* the index.
|
||||
*/
|
||||
@Override
|
||||
public void initializeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
this.open();
|
||||
} catch (SQLException ex) {
|
||||
LOGGER.debug("SQL Exception initializing NvdCveAnalyzer", ex);
|
||||
throw new InitializationException(ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("IO Exception initializing NvdCveAnalyzer", ex);
|
||||
throw new InitializationException(ex);
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.debug("Database Exception initializing NvdCveAnalyzer", ex);
|
||||
throw new InitializationException(ex);
|
||||
} catch (ClassNotFoundException ex) {
|
||||
LOGGER.debug("Exception initializing NvdCveAnalyzer", ex);
|
||||
throw new InitializationException(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,214 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* Used to analyze OpenSSL source code present in the file system.
|
||||
*
|
||||
* @author Dale Visser
|
||||
*/
|
||||
public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* Hexadecimal.
|
||||
*/
|
||||
private static final int HEXADECIMAL = 16;
|
||||
/**
|
||||
* Filename to analyze. All other .h files get removed from consideration.
|
||||
*/
|
||||
private static final String OPENSSLV_H = "opensslv.h";
|
||||
|
||||
/**
|
||||
* Filter that detects files named "__init__.py".
|
||||
*/
|
||||
private static final FileFilter OPENSSLV_FILTER = FileFilterBuilder.newInstance().addFilenames(OPENSSLV_H).build();
|
||||
/**
|
||||
* Open SSL Version number pattern.
|
||||
*/
|
||||
private static final Pattern VERSION_PATTERN = Pattern.compile(
|
||||
"define\\s+OPENSSL_VERSION_NUMBER\\s+0x([0-9a-zA-Z]{8})L", Pattern.DOTALL
|
||||
| Pattern.CASE_INSENSITIVE);
|
||||
/**
|
||||
* The offset of the major version number.
|
||||
*/
|
||||
private static final int MAJOR_OFFSET = 28;
|
||||
/**
|
||||
* The mask for the minor version number.
|
||||
*/
|
||||
private static final long MINOR_MASK = 0x0ff00000L;
|
||||
/**
|
||||
* The offset of the minor version number.
|
||||
*/
|
||||
private static final int MINOR_OFFSET = 20;
|
||||
/**
|
||||
* The max for the fix version.
|
||||
*/
|
||||
private static final long FIX_MASK = 0x000ff000L;
|
||||
/**
|
||||
* The offset for the fix version.
|
||||
*/
|
||||
private static final int FIX_OFFSET = 12;
|
||||
/**
|
||||
* The mask for the patch version.
|
||||
*/
|
||||
private static final long PATCH_MASK = 0x00000ff0L;
|
||||
/**
|
||||
* The offset for the patch version.
|
||||
*/
|
||||
private static final int PATCH_OFFSET = 4;
|
||||
/**
|
||||
* Number of letters.
|
||||
*/
|
||||
private static final int NUM_LETTERS = 26;
|
||||
/**
|
||||
* The status mask.
|
||||
*/
|
||||
private static final int STATUS_MASK = 0x0000000f;
|
||||
|
||||
/**
|
||||
* Returns the open SSL version as a string.
|
||||
*
|
||||
* @param openSSLVersionConstant The open SSL version
|
||||
* @return the version of openssl
|
||||
*/
|
||||
static String getOpenSSLVersion(long openSSLVersionConstant) {
|
||||
final long major = openSSLVersionConstant >>> MAJOR_OFFSET;
|
||||
final long minor = (openSSLVersionConstant & MINOR_MASK) >>> MINOR_OFFSET;
|
||||
final long fix = (openSSLVersionConstant & FIX_MASK) >>> FIX_OFFSET;
|
||||
final long patchLevel = (openSSLVersionConstant & PATCH_MASK) >>> PATCH_OFFSET;
|
||||
final String patch = 0 == patchLevel || patchLevel > NUM_LETTERS ? "" : String.valueOf((char) (patchLevel + 'a' - 1));
|
||||
final int statusCode = (int) (openSSLVersionConstant & STATUS_MASK);
|
||||
final String status = 0xf == statusCode ? "" : (0 == statusCode ? "-dev" : "-beta" + statusCode);
|
||||
return String.format("%d.%d.%d%s%s", major, minor, fix, patch, status);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the Python Package Analyzer.
|
||||
*
|
||||
* @return the name of the analyzer
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return "OpenSSL Source Analyzer";
|
||||
}
|
||||
|
||||
/**
|
||||
* Tell that we are used for information collection.
|
||||
*
|
||||
* @return INFORMATION_COLLECTION
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return AnalysisPhase.INFORMATION_COLLECTION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the set of supported file extensions.
|
||||
*
|
||||
* @return the set of supported file extensions
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return OPENSSLV_FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
* No-op initializer implementation.
|
||||
*
|
||||
* @throws InitializationException never thrown
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
// Nothing to do here.
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes python packages and adds evidence to the dependency.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param engine the engine being used to perform the scan
|
||||
* @throws AnalysisException thrown if there is an unrecoverable error
|
||||
* analyzing the dependency
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
final File file = dependency.getActualFile();
|
||||
final String parentName = file.getParentFile().getName();
|
||||
boolean found = false;
|
||||
final String contents = getFileContents(file);
|
||||
if (!contents.isEmpty()) {
|
||||
final Matcher matcher = VERSION_PATTERN.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
dependency.getVersionEvidence().addEvidence(OPENSSLV_H, "Version Constant",
|
||||
getOpenSSLVersion(Long.parseLong(matcher.group(1), HEXADECIMAL)), Confidence.HIGH);
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
if (found) {
|
||||
dependency.setDisplayFileName(parentName + File.separatorChar + OPENSSLV_H);
|
||||
dependency.getVendorEvidence().addEvidence(OPENSSLV_H, "Vendor", "OpenSSL", Confidence.HIGHEST);
|
||||
dependency.getProductEvidence().addEvidence(OPENSSLV_H, "Product", "OpenSSL", Confidence.HIGHEST);
|
||||
} else {
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the contents of a given file.
|
||||
*
|
||||
* @param actualFile the file to read
|
||||
* @return the contents of the file
|
||||
* @throws AnalysisException thrown if there is an IO Exception
|
||||
*/
|
||||
private String getFileContents(final File actualFile)
|
||||
throws AnalysisException {
|
||||
try {
|
||||
return FileUtils.readFileToString(actualFile, Charset.defaultCharset()).trim();
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the setting for the analyzer enabled setting key.
|
||||
*
|
||||
* @return the setting for the analyzer enabled setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_OPENSSL_ENABLED;
|
||||
}
|
||||
}
|
||||
@@ -19,42 +19,46 @@ package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FilenameFilter;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import javax.mail.MessagingException;
|
||||
import javax.mail.internet.InternetHeaders;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import org.apache.commons.io.filefilter.NameFileFilter;
|
||||
import org.apache.commons.io.filefilter.SuffixFileFilter;
|
||||
import org.apache.commons.io.input.AutoCloseInputStream;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.mail.MessagingException;
|
||||
import javax.mail.internet.InternetHeaders;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.ExtractionException;
|
||||
import org.owasp.dependencycheck.utils.ExtractionUtil;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.FileUtils;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.UrlStringUtils;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* Used to analyze a Wheel or egg distribution files, or their contents in unzipped form, and collect information that can be used
|
||||
* to determine the associated CPE.
|
||||
* Used to analyze a Wheel or egg distribution files, or their contents in
|
||||
* unzipped form, and collect information that can be used to determine the
|
||||
* associated CPE.
|
||||
*
|
||||
* @author Dale Visser <dvisser@ida.org>
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@Experimental
|
||||
public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* Name of egg metatdata files to analyze.
|
||||
* Name of egg metadata files to analyze.
|
||||
*/
|
||||
private static final String PKG_INFO = "PKG-INFO";
|
||||
|
||||
@@ -66,13 +70,14 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger
|
||||
.getLogger(PythonDistributionAnalyzer.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory
|
||||
.getLogger(PythonDistributionAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The count of directories created during analysis. This is used for creating temporary directories.
|
||||
* The count of directories created during analysis. This is used for
|
||||
* creating temporary directories.
|
||||
*/
|
||||
private static int dirCount = 0;
|
||||
private static final AtomicInteger DIR_COUNT = new AtomicInteger(0);
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
@@ -86,13 +91,17 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* The set of file extensions supported by this analyzer.
|
||||
*/
|
||||
private static final Set<String> EXTENSIONS = newHashSet("whl", "egg",
|
||||
"zip", METADATA, PKG_INFO);
|
||||
private static final String[] EXTENSIONS = {"whl", "egg", "zip"};
|
||||
|
||||
/**
|
||||
* Used to match on egg archive candidate extenssions.
|
||||
* Used to match on egg archive candidate extensions.
|
||||
*/
|
||||
private static final Pattern EGG_OR_ZIP = Pattern.compile("egg|zip");
|
||||
private static final FileFilter EGG_OR_ZIP = FileFilterBuilder.newInstance().addExtensions("egg", "zip").build();
|
||||
|
||||
/**
|
||||
* Used to detect files with a .whl extension.
|
||||
*/
|
||||
private static final FileFilter WHL_FILTER = FileFilterBuilder.newInstance().addExtensions("whl").build();
|
||||
|
||||
/**
|
||||
* The parent directory for the individual directories per archive.
|
||||
@@ -100,7 +109,8 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
private File tempFileLocation;
|
||||
|
||||
/**
|
||||
* Filter that detects *.dist-info files (but doesn't verify they are directories.
|
||||
* Filter that detects *.dist-info files (but doesn't verify they are
|
||||
* directories.
|
||||
*/
|
||||
private static final FilenameFilter DIST_INFO_FILTER = new SuffixFileFilter(
|
||||
".dist-info");
|
||||
@@ -114,23 +124,29 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
/**
|
||||
* Filter that detects files named "METADATA".
|
||||
*/
|
||||
private static final FilenameFilter METADATA_FILTER = new NameFileFilter(
|
||||
private static final NameFileFilter METADATA_FILTER = new NameFileFilter(
|
||||
METADATA);
|
||||
|
||||
/**
|
||||
* Filter that detects files named "PKG-INFO".
|
||||
*/
|
||||
private static final FilenameFilter PKG_INFO_FILTER = new NameFileFilter(
|
||||
private static final NameFileFilter PKG_INFO_FILTER = new NameFileFilter(
|
||||
PKG_INFO);
|
||||
|
||||
/**
|
||||
* Returns a list of file EXTENSIONS supported by this analyzer.
|
||||
* The file filter used to determine which files this analyzer supports.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addFileFilters(
|
||||
METADATA_FILTER, PKG_INFO_FILTER).addExtensions(EXTENSIONS).build();
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return a list of file EXTENSIONS supported by this analyzer.
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
public Set<String> getSupportedExtensions() {
|
||||
return EXTENSIONS;
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -148,12 +164,14 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's enabled property.
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@@ -163,18 +181,16 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
if ("whl".equals(dependency.getFileExtension())) {
|
||||
final File actualFile = dependency.getActualFile();
|
||||
if (WHL_FILTER.accept(actualFile)) {
|
||||
collectMetadataFromArchiveFormat(dependency, DIST_INFO_FILTER,
|
||||
METADATA_FILTER);
|
||||
} else if (EGG_OR_ZIP.matcher(
|
||||
StringUtils.stripToEmpty(dependency.getFileExtension()))
|
||||
.matches()) {
|
||||
} else if (EGG_OR_ZIP.accept(actualFile)) {
|
||||
collectMetadataFromArchiveFormat(dependency, EGG_INFO_FILTER,
|
||||
PKG_INFO_FILTER);
|
||||
} else {
|
||||
final File actualFile = dependency.getActualFile();
|
||||
final String name = actualFile.getName();
|
||||
final boolean metadata = METADATA.equals(name);
|
||||
if (metadata || PKG_INFO.equals(name)) {
|
||||
@@ -197,13 +213,14 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @param dependency the archive being scanned
|
||||
* @param folderFilter the filter to apply to the folder
|
||||
* @param metadataFilter the filter to apply to the meta data
|
||||
* @throws AnalysisException thrown when there is a problem analyzing the dependency
|
||||
* @throws AnalysisException thrown when there is a problem analyzing the
|
||||
* dependency
|
||||
*/
|
||||
private void collectMetadataFromArchiveFormat(Dependency dependency,
|
||||
FilenameFilter folderFilter, FilenameFilter metadataFilter)
|
||||
throws AnalysisException {
|
||||
final File temp = getNextTempDirectory();
|
||||
LOGGER.fine(String.format("%s exists? %b", temp, temp.exists()));
|
||||
LOGGER.debug("{} exists? {}", temp, temp.exists());
|
||||
try {
|
||||
ExtractionUtil.extractFilesUsingFilter(
|
||||
new File(dependency.getActualFilePath()), temp,
|
||||
@@ -212,32 +229,43 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
throw new AnalysisException(ex);
|
||||
}
|
||||
|
||||
collectWheelMetadata(
|
||||
dependency,
|
||||
getMatchingFile(getMatchingFile(temp, folderFilter),
|
||||
metadataFilter));
|
||||
File matchingFile = getMatchingFile(temp, folderFilter);
|
||||
if (matchingFile != null) {
|
||||
matchingFile = getMatchingFile(matchingFile, metadataFilter);
|
||||
if (matchingFile != null) {
|
||||
collectWheelMetadata(dependency, matchingFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes sure a usable temporary directory is available.
|
||||
*
|
||||
* @throws Exception an AnalyzeException is thrown when the temp directory cannot be created
|
||||
* @throws InitializationException an AnalyzeException is thrown when the
|
||||
* temp directory cannot be created
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws Exception {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
tempFileLocation = File.createTempFile("check", "tmp", baseDir);
|
||||
if (!tempFileLocation.delete()) {
|
||||
final String msg = String.format(
|
||||
"Unable to delete temporary file '%s'.",
|
||||
tempFileLocation.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
if (!tempFileLocation.mkdirs()) {
|
||||
final String msg = String.format(
|
||||
"Unable to create directory '%s'.",
|
||||
tempFileLocation.getAbsolutePath());
|
||||
throw new AnalysisException(msg);
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
final File baseDir = Settings.getTempDirectory();
|
||||
tempFileLocation = File.createTempFile("check", "tmp", baseDir);
|
||||
if (!tempFileLocation.delete()) {
|
||||
setEnabled(false);
|
||||
final String msg = String.format(
|
||||
"Unable to delete temporary file '%s'.",
|
||||
tempFileLocation.getAbsolutePath());
|
||||
throw new InitializationException(msg);
|
||||
}
|
||||
if (!tempFileLocation.mkdirs()) {
|
||||
setEnabled(false);
|
||||
final String msg = String.format(
|
||||
"Unable to create directory '%s'.",
|
||||
tempFileLocation.getAbsolutePath());
|
||||
throw new InitializationException(msg);
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create a temporary file", ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -245,13 +273,15 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* Deletes any files extracted from the Wheel during analysis.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
public void closeAnalyzer() {
|
||||
if (tempFileLocation != null && tempFileLocation.exists()) {
|
||||
LOGGER.log(Level.FINE, "Attempting to delete temporary files");
|
||||
LOGGER.debug("Attempting to delete temporary files");
|
||||
final boolean success = FileUtils.delete(tempFileLocation);
|
||||
if (!success) {
|
||||
LOGGER.log(Level.WARNING,
|
||||
"Failed to delete some temporary files, see the log for more details");
|
||||
if (!success && tempFileLocation.exists()) {
|
||||
final String[] l = tempFileLocation.list();
|
||||
if (l != null && l.length > 0) {
|
||||
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -261,10 +291,8 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param file a reference to the manifest/properties file
|
||||
* @throws AnalysisException thrown when there is an error
|
||||
*/
|
||||
private static void collectWheelMetadata(Dependency dependency, File file)
|
||||
throws AnalysisException {
|
||||
private static void collectWheelMetadata(Dependency dependency, File file) {
|
||||
final InternetHeaders headers = getManifestProperties(file);
|
||||
addPropertyToEvidence(headers, dependency.getVersionEvidence(),
|
||||
"Version", Confidence.HIGHEST);
|
||||
@@ -298,14 +326,15 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
private static void addPropertyToEvidence(InternetHeaders headers,
|
||||
EvidenceCollection evidence, String property, Confidence confidence) {
|
||||
final String value = headers.getHeader(property, null);
|
||||
LOGGER.fine(String.format("Property: %s, Value: %s", property, value));
|
||||
LOGGER.debug("Property: {}, Value: {}", property, value);
|
||||
if (StringUtils.isNotBlank(value)) {
|
||||
evidence.addEvidence(METADATA, property, value, confidence);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of files that match the given filter, this does not recursively scan the directory.
|
||||
* Returns a list of files that match the given filter, this does not
|
||||
* recursively scan the directory.
|
||||
*
|
||||
* @param folder the folder to filter
|
||||
* @param filter the filter to apply to the files in the directory
|
||||
@@ -329,22 +358,32 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
private static InternetHeaders getManifestProperties(File manifest) {
|
||||
final InternetHeaders result = new InternetHeaders();
|
||||
if (null == manifest) {
|
||||
LOGGER.fine("Manifest file not found.");
|
||||
LOGGER.debug("Manifest file not found.");
|
||||
} else {
|
||||
InputStream in = null;
|
||||
try {
|
||||
result.load(new AutoCloseInputStream(new BufferedInputStream(
|
||||
new FileInputStream(manifest))));
|
||||
in = new BufferedInputStream(new FileInputStream(manifest));
|
||||
result.load(in);
|
||||
} catch (MessagingException e) {
|
||||
LOGGER.log(Level.WARNING, e.getMessage(), e);
|
||||
LOGGER.warn(e.getMessage(), e);
|
||||
} catch (FileNotFoundException e) {
|
||||
LOGGER.log(Level.WARNING, e.getMessage(), e);
|
||||
LOGGER.warn(e.getMessage(), e);
|
||||
} finally {
|
||||
if (in != null) {
|
||||
try {
|
||||
in.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("failed to close input stream", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the next temporary destingation directory for extracting an archive.
|
||||
* Retrieves the next temporary destination directory for extracting an
|
||||
* archive.
|
||||
*
|
||||
* @return a directory
|
||||
* @throws AnalysisException thrown if unable to create temporary directory
|
||||
@@ -355,7 +394,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
// getting an exception for some directories not being able to be
|
||||
// created; might be because the directory already exists?
|
||||
do {
|
||||
dirCount += 1;
|
||||
final int dirCount = DIR_COUNT.incrementAndGet();
|
||||
directory = new File(tempFileLocation, String.valueOf(dirCount));
|
||||
} while (directory.exists());
|
||||
if (!directory.mkdirs()) {
|
||||
|
||||
@@ -17,18 +17,6 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.filefilter.NameFileFilter;
|
||||
import org.apache.commons.io.filefilter.SuffixFileFilter;
|
||||
@@ -37,14 +25,25 @@ import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.UrlStringUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
|
||||
/**
|
||||
* Used to analyze a Python package, and collect information that can be used to determine the associated CPE.
|
||||
* Used to analyze a Python package, and collect information that can be used to
|
||||
* determine the associated CPE.
|
||||
*
|
||||
* @author Dale Visser <dvisser@ida.org>
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@Experimental
|
||||
public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
@@ -53,17 +52,10 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
private static final int REGEX_OPTIONS = Pattern.DOTALL
|
||||
| Pattern.CASE_INSENSITIVE;
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger
|
||||
.getLogger(PythonDistributionAnalyzer.class.getName());
|
||||
|
||||
/**
|
||||
* Filename extensions for files to be analyzed.
|
||||
*/
|
||||
private static final Set<String> EXTENSIONS = Collections
|
||||
.unmodifiableSet(Collections.singleton("py"));
|
||||
private static final String EXTENSIONS = "py";
|
||||
|
||||
/**
|
||||
* Pattern for matching the module docstring in a source file.
|
||||
@@ -134,22 +126,27 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the set of supported file extensions.
|
||||
* The file filter used to determine which files this analyzer supports.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return the set of supported file extensions
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
protected Set<String> getSupportedExtensions() {
|
||||
return EXTENSIONS;
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
* No-op initializer implementation.
|
||||
*
|
||||
* @throws Exception never thrown
|
||||
* @throws InitializationException never thrown
|
||||
*/
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws Exception {
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
// Nothing to do here.
|
||||
}
|
||||
|
||||
@@ -170,36 +167,38 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param engine the engine being used to perform the scan
|
||||
* @throws AnalysisException thrown if there is an unrecoverable error analyzing the dependency
|
||||
* @throws AnalysisException thrown if there is an unrecoverable error
|
||||
* analyzing the dependency
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeFileType(Dependency dependency, Engine engine)
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
final File file = dependency.getActualFile();
|
||||
final File parent = file.getParentFile();
|
||||
final String parentName = parent.getName();
|
||||
boolean found = false;
|
||||
if (INIT_PY_FILTER.accept(file)) {
|
||||
for (final File sourcefile : parent.listFiles(PY_FILTER)) {
|
||||
found |= analyzeFileContents(dependency, sourcefile);
|
||||
}
|
||||
}
|
||||
if (found) {
|
||||
//by definition, the containing folder of __init__.py is considered the package, even the file is empty:
|
||||
//"The __init__.py files are required to make Python treat the directories as containing packages"
|
||||
//see section "6.4 Packages" from https://docs.python.org/2/tutorial/modules.html;
|
||||
dependency.setDisplayFileName(parentName + "/__init__.py");
|
||||
dependency.getProductEvidence().addEvidence(file.getName(),
|
||||
"PackageName", parentName, Confidence.MEDIUM);
|
||||
"PackageName", parentName, Confidence.HIGHEST);
|
||||
|
||||
final File[] fileList = parent.listFiles(PY_FILTER);
|
||||
if (fileList != null) {
|
||||
for (final File sourceFile : fileList) {
|
||||
analyzeFileContents(dependency, sourceFile);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// copy, alter and set in case some other thread is iterating over
|
||||
final List<Dependency> deps = new ArrayList<Dependency>(
|
||||
engine.getDependencies());
|
||||
deps.remove(dependency);
|
||||
engine.setDependencies(deps);
|
||||
engine.getDependencies().remove(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This should gather information from leading docstrings, file comments, and assignments to __version__, __title__,
|
||||
* __summary__, __uri__, __url__, __home*page__, __author__, and their all caps equivalents.
|
||||
* This should gather information from leading docstrings, file comments,
|
||||
* and assignments to __version__, __title__, __summary__, __uri__, __url__,
|
||||
* __home*page__, __author__, and their all caps equivalents.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param file the file name to analyze
|
||||
@@ -208,12 +207,12 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
*/
|
||||
private boolean analyzeFileContents(Dependency dependency, File file)
|
||||
throws AnalysisException {
|
||||
String contents = "";
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(file).trim();
|
||||
contents = FileUtils.readFileToString(file, Charset.defaultCharset()).trim();
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occured while reading dependency file.", e);
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
boolean found = false;
|
||||
if (!contents.isEmpty()) {
|
||||
@@ -234,14 +233,10 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
.getVendorEvidence();
|
||||
found |= gatherEvidence(AUTHOR_PATTERN, contents, source,
|
||||
vendorEvidence, "SourceAuthor", Confidence.MEDIUM);
|
||||
try {
|
||||
found |= gatherHomePageEvidence(URI_PATTERN, vendorEvidence,
|
||||
source, "URL", contents);
|
||||
found |= gatherHomePageEvidence(HOMEPAGE_PATTERN,
|
||||
vendorEvidence, source, "HomePage", contents);
|
||||
} catch (MalformedURLException e) {
|
||||
LOGGER.warning(e.getMessage());
|
||||
}
|
||||
found |= gatherHomePageEvidence(URI_PATTERN, vendorEvidence,
|
||||
source, "URL", contents);
|
||||
found |= gatherHomePageEvidence(HOMEPAGE_PATTERN,
|
||||
vendorEvidence, source, "HomePage", contents);
|
||||
}
|
||||
return found;
|
||||
}
|
||||
@@ -277,11 +272,10 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
* @param name the name of the evidence
|
||||
* @param contents the home page URL
|
||||
* @return true if evidence was collected; otherwise false
|
||||
* @throws MalformedURLException thrown if the URL is malformed
|
||||
*/
|
||||
private boolean gatherHomePageEvidence(Pattern pattern,
|
||||
EvidenceCollection evidence, String source, String name,
|
||||
String contents) throws MalformedURLException {
|
||||
String contents) {
|
||||
final Matcher matcher = pattern.matcher(contents);
|
||||
boolean found = false;
|
||||
if (matcher.find()) {
|
||||
@@ -295,7 +289,8 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
}
|
||||
|
||||
/**
|
||||
* Gather evidence from a Python source file usin the given string assignment regex pattern.
|
||||
* Gather evidence from a Python source file using the given string
|
||||
* assignment regex pattern.
|
||||
*
|
||||
* @param pattern to scan contents with
|
||||
* @param contents of Python source file
|
||||
|
||||
@@ -0,0 +1,513 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Reference;
|
||||
import org.owasp.dependencycheck.dependency.Vulnerability;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Used to analyze Ruby Bundler Gemspec.lock files utilizing the 3rd party
|
||||
* bundle-audit tool.
|
||||
*
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@Experimental
|
||||
public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(RubyBundleAuditAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Ruby Bundle Audit Analyzer";
|
||||
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.PRE_INFORMATION_COLLECTION;
|
||||
/**
|
||||
* The filter defining which files will be analyzed.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addFilenames("Gemfile.lock").build();
|
||||
/**
|
||||
* Name.
|
||||
*/
|
||||
public static final String NAME = "Name: ";
|
||||
/**
|
||||
* Version.
|
||||
*/
|
||||
public static final String VERSION = "Version: ";
|
||||
/**
|
||||
* Advisory.
|
||||
*/
|
||||
public static final String ADVISORY = "Advisory: ";
|
||||
/**
|
||||
* Criticality.
|
||||
*/
|
||||
public static final String CRITICALITY = "Criticality: ";
|
||||
|
||||
/**
|
||||
* The DAL.
|
||||
*/
|
||||
private CveDB cvedb;
|
||||
|
||||
/**
|
||||
* @return a filter that accepts files named Gemfile.lock
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Launch bundle-audit.
|
||||
*
|
||||
* @param folder directory that contains bundle audit
|
||||
* @return a handle to the process
|
||||
* @throws AnalysisException thrown when there is an issue launching bundle
|
||||
* audit
|
||||
*/
|
||||
private Process launchBundleAudit(File folder) throws AnalysisException {
|
||||
if (!folder.isDirectory()) {
|
||||
throw new AnalysisException(String.format("%s should have been a directory.", folder.getAbsolutePath()));
|
||||
}
|
||||
final List<String> args = new ArrayList<String>();
|
||||
final String bundleAuditPath = Settings.getString(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH);
|
||||
File bundleAudit = null;
|
||||
if (bundleAuditPath != null) {
|
||||
bundleAudit = new File(bundleAuditPath);
|
||||
if (!bundleAudit.isFile()) {
|
||||
LOGGER.warn("Supplied `bundleAudit` path is incorrect: " + bundleAuditPath);
|
||||
bundleAudit = null;
|
||||
}
|
||||
}
|
||||
args.add(bundleAudit != null && bundleAudit.isFile() ? bundleAudit.getAbsolutePath() : "bundle-audit");
|
||||
args.add("check");
|
||||
args.add("--verbose");
|
||||
final ProcessBuilder builder = new ProcessBuilder(args);
|
||||
builder.directory(folder);
|
||||
try {
|
||||
LOGGER.info("Launching: " + args + " from " + folder);
|
||||
return builder.start();
|
||||
} catch (IOException ioe) {
|
||||
throw new AnalysisException("bundle-audit failure", ioe);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the analyzer. In this case, extract GrokAssembly.exe to a
|
||||
* temporary location.
|
||||
*
|
||||
* @throws InitializationException if anything goes wrong
|
||||
*/
|
||||
@Override
|
||||
public void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
try {
|
||||
cvedb = new CveDB();
|
||||
cvedb.open();
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.warn("Exception opening the database");
|
||||
LOGGER.debug("error", ex);
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Error connecting to the database", ex);
|
||||
}
|
||||
// Now, need to see if bundle-audit actually runs from this location.
|
||||
Process process = null;
|
||||
try {
|
||||
process = launchBundleAudit(Settings.getTempDirectory());
|
||||
} catch (AnalysisException ae) {
|
||||
|
||||
setEnabled(false);
|
||||
cvedb.close();
|
||||
cvedb = null;
|
||||
final String msg = String.format("Exception from bundle-audit process: %s. Disabling %s", ae.getCause(), ANALYZER_NAME);
|
||||
throw new InitializationException(msg, ae);
|
||||
} catch (IOException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to create temporary file, the Ruby Bundle Audit Analyzer will be disabled", ex);
|
||||
}
|
||||
|
||||
final int exitValue;
|
||||
try {
|
||||
exitValue = process.waitFor();
|
||||
} catch (InterruptedException ex) {
|
||||
setEnabled(false);
|
||||
final String msg = String.format("Bundle-audit process was interupted. Disabling %s", ANALYZER_NAME);
|
||||
throw new InitializationException(msg);
|
||||
}
|
||||
if (0 == exitValue) {
|
||||
setEnabled(false);
|
||||
final String msg = String.format("Unexpected exit code from bundle-audit process. Disabling %s: %s", ANALYZER_NAME, exitValue);
|
||||
throw new InitializationException(msg);
|
||||
} else {
|
||||
BufferedReader reader = null;
|
||||
try {
|
||||
reader = new BufferedReader(new InputStreamReader(process.getErrorStream(), "UTF-8"));
|
||||
if (!reader.ready()) {
|
||||
LOGGER.warn("Bundle-audit error stream unexpectedly not ready. Disabling " + ANALYZER_NAME);
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Bundle-audit error stream unexpectedly not ready.");
|
||||
} else {
|
||||
final String line = reader.readLine();
|
||||
if (line == null || !line.contains("Errno::ENOENT")) {
|
||||
LOGGER.warn("Unexpected bundle-audit output. Disabling {}: {}", ANALYZER_NAME, line);
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unexpected bundle-audit output.");
|
||||
}
|
||||
}
|
||||
} catch (UnsupportedEncodingException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unexpected bundle-audit encoding.", ex);
|
||||
} catch (IOException ex) {
|
||||
setEnabled(false);
|
||||
throw new InitializationException("Unable to read bundle-audit output.", ex);
|
||||
} finally {
|
||||
if (null != reader) {
|
||||
try {
|
||||
reader.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Error closing reader", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isEnabled()) {
|
||||
LOGGER.info(ANALYZER_NAME + " is enabled. It is necessary to manually run \"bundle-audit update\" "
|
||||
+ "occasionally to keep its database up to date.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* If {@link #analyzeDependency(Dependency, Engine)} is called, then we have
|
||||
* successfully initialized, and it will be necessary to disable
|
||||
* {@link RubyGemspecAnalyzer}.
|
||||
*/
|
||||
private boolean needToDisableGemspecAnalyzer = true;
|
||||
|
||||
/**
|
||||
* Determines if the analyzer can analyze the given file type.
|
||||
*
|
||||
* @param dependency the dependency to determine if it can analyze
|
||||
* @param engine the dependency-check engine
|
||||
* @throws AnalysisException thrown if there is an analysis exception.
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
if (needToDisableGemspecAnalyzer) {
|
||||
boolean failed = true;
|
||||
final String className = RubyGemspecAnalyzer.class.getName();
|
||||
for (FileTypeAnalyzer analyzer : engine.getFileTypeAnalyzers()) {
|
||||
if (analyzer instanceof RubyBundlerAnalyzer) {
|
||||
((RubyBundlerAnalyzer) analyzer).setEnabled(false);
|
||||
LOGGER.info("Disabled " + RubyBundlerAnalyzer.class.getName() + " to avoid noisy duplicate results.");
|
||||
} else if (analyzer instanceof RubyGemspecAnalyzer) {
|
||||
((RubyGemspecAnalyzer) analyzer).setEnabled(false);
|
||||
LOGGER.info("Disabled " + className + " to avoid noisy duplicate results.");
|
||||
failed = false;
|
||||
}
|
||||
}
|
||||
if (failed) {
|
||||
LOGGER.warn("Did not find " + className + '.');
|
||||
}
|
||||
needToDisableGemspecAnalyzer = false;
|
||||
}
|
||||
final File parentFile = dependency.getActualFile().getParentFile();
|
||||
final Process process = launchBundleAudit(parentFile);
|
||||
final int exitValue;
|
||||
try {
|
||||
exitValue = process.waitFor();
|
||||
} catch (InterruptedException ie) {
|
||||
throw new AnalysisException("bundle-audit process interrupted", ie);
|
||||
}
|
||||
if (exitValue < 0 || exitValue > 1) {
|
||||
final String msg = String.format("Unexpected exit code from bundle-audit process; exit code: %s", exitValue);
|
||||
throw new AnalysisException(msg);
|
||||
}
|
||||
BufferedReader rdr = null;
|
||||
BufferedReader errReader = null;
|
||||
try {
|
||||
errReader = new BufferedReader(new InputStreamReader(process.getErrorStream(), "UTF-8"));
|
||||
while (errReader.ready()) {
|
||||
final String error = errReader.readLine();
|
||||
LOGGER.warn(error);
|
||||
}
|
||||
rdr = new BufferedReader(new InputStreamReader(process.getInputStream(), "UTF-8"));
|
||||
processBundlerAuditOutput(dependency, engine, rdr);
|
||||
} catch (IOException ioe) {
|
||||
LOGGER.warn("bundle-audit failure", ioe);
|
||||
} finally {
|
||||
if (errReader != null) {
|
||||
try {
|
||||
errReader.close();
|
||||
} catch (IOException ioe) {
|
||||
LOGGER.warn("bundle-audit close failure", ioe);
|
||||
}
|
||||
}
|
||||
if (null != rdr) {
|
||||
try {
|
||||
rdr.close();
|
||||
} catch (IOException ioe) {
|
||||
LOGGER.warn("bundle-audit close failure", ioe);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes the bundler audit output.
|
||||
*
|
||||
* @param original the dependency
|
||||
* @param engine the dependency-check engine
|
||||
* @param rdr the reader of the report
|
||||
* @throws IOException thrown if the report cannot be read.
|
||||
*/
|
||||
private void processBundlerAuditOutput(Dependency original, Engine engine, BufferedReader rdr) throws IOException {
|
||||
final String parentName = original.getActualFile().getParentFile().getName();
|
||||
final String fileName = original.getFileName();
|
||||
final String filePath = original.getFilePath();
|
||||
Dependency dependency = null;
|
||||
Vulnerability vulnerability = null;
|
||||
String gem = null;
|
||||
final Map<String, Dependency> map = new HashMap<String, Dependency>();
|
||||
boolean appendToDescription = false;
|
||||
while (rdr.ready()) {
|
||||
final String nextLine = rdr.readLine();
|
||||
if (null == nextLine) {
|
||||
break;
|
||||
} else if (nextLine.startsWith(NAME)) {
|
||||
appendToDescription = false;
|
||||
gem = nextLine.substring(NAME.length());
|
||||
if (!map.containsKey(gem)) {
|
||||
map.put(gem, createDependencyForGem(engine, parentName, fileName, filePath, gem));
|
||||
}
|
||||
dependency = map.get(gem);
|
||||
LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine));
|
||||
} else if (nextLine.startsWith(VERSION)) {
|
||||
vulnerability = createVulnerability(parentName, dependency, gem, nextLine);
|
||||
} else if (nextLine.startsWith(ADVISORY)) {
|
||||
setVulnerabilityName(parentName, dependency, vulnerability, nextLine);
|
||||
} else if (nextLine.startsWith(CRITICALITY)) {
|
||||
addCriticalityToVulnerability(parentName, vulnerability, nextLine);
|
||||
} else if (nextLine.startsWith("URL: ")) {
|
||||
addReferenceToVulnerability(parentName, vulnerability, nextLine);
|
||||
} else if (nextLine.startsWith("Description:")) {
|
||||
appendToDescription = true;
|
||||
if (null != vulnerability) {
|
||||
vulnerability.setDescription("*** Vulnerability obtained from bundle-audit verbose report. "
|
||||
+ "Title link may not work. CPE below is guessed. CVSS score is estimated (-1.0 "
|
||||
+ " indicates unknown). See link below for full details. *** ");
|
||||
}
|
||||
} else if (appendToDescription) {
|
||||
if (null != vulnerability) {
|
||||
vulnerability.setDescription(vulnerability.getDescription() + nextLine + "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the vulnerability name.
|
||||
*
|
||||
* @param parentName the parent name
|
||||
* @param dependency the dependency
|
||||
* @param vulnerability the vulnerability
|
||||
* @param nextLine the line to parse
|
||||
*/
|
||||
private void setVulnerabilityName(String parentName, Dependency dependency, Vulnerability vulnerability, String nextLine) {
|
||||
final String advisory = nextLine.substring((ADVISORY.length()));
|
||||
if (null != vulnerability) {
|
||||
vulnerability.setName(advisory);
|
||||
}
|
||||
if (null != dependency) {
|
||||
dependency.getVulnerabilities().add(vulnerability); // needed to wait for vulnerability name to avoid NPE
|
||||
}
|
||||
LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine));
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a reference to the vulnerability.
|
||||
*
|
||||
* @param parentName the parent name
|
||||
* @param vulnerability the vulnerability
|
||||
* @param nextLine the line to parse
|
||||
*/
|
||||
private void addReferenceToVulnerability(String parentName, Vulnerability vulnerability, String nextLine) {
|
||||
final String url = nextLine.substring(("URL: ").length());
|
||||
if (null != vulnerability) {
|
||||
final Reference ref = new Reference();
|
||||
ref.setName(vulnerability.getName());
|
||||
ref.setSource("bundle-audit");
|
||||
ref.setUrl(url);
|
||||
vulnerability.getReferences().add(ref);
|
||||
}
|
||||
LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine));
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the criticality to the vulnerability
|
||||
*
|
||||
* @param parentName the parent name
|
||||
* @param vulnerability the vulnerability
|
||||
* @param nextLine the line to parse
|
||||
*/
|
||||
private void addCriticalityToVulnerability(String parentName, Vulnerability vulnerability, String nextLine) {
|
||||
if (null != vulnerability) {
|
||||
final String criticality = nextLine.substring(CRITICALITY.length()).trim();
|
||||
float score = -1.0f;
|
||||
Vulnerability v = null;
|
||||
try {
|
||||
v = cvedb.getVulnerability(vulnerability.getName());
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.debug("Unable to look up vulnerability {}", vulnerability.getName());
|
||||
}
|
||||
if (v != null) {
|
||||
score = v.getCvssScore();
|
||||
} else if ("High".equalsIgnoreCase(criticality)) {
|
||||
score = 8.5f;
|
||||
} else if ("Medium".equalsIgnoreCase(criticality)) {
|
||||
score = 5.5f;
|
||||
} else if ("Low".equalsIgnoreCase(criticality)) {
|
||||
score = 2.0f;
|
||||
}
|
||||
vulnerability.setCvssScore(score);
|
||||
}
|
||||
LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a vulnerability.
|
||||
*
|
||||
* @param parentName the parent name
|
||||
* @param dependency the dependency
|
||||
* @param gem the gem name
|
||||
* @param nextLine the line to parse
|
||||
* @return the vulnerability
|
||||
*/
|
||||
private Vulnerability createVulnerability(String parentName, Dependency dependency, String gem, String nextLine) {
|
||||
Vulnerability vulnerability = null;
|
||||
if (null != dependency) {
|
||||
final String version = nextLine.substring(VERSION.length());
|
||||
dependency.getVersionEvidence().addEvidence(
|
||||
"bundler-audit",
|
||||
"Version",
|
||||
version,
|
||||
Confidence.HIGHEST);
|
||||
vulnerability = new Vulnerability(); // don't add to dependency until we have name set later
|
||||
vulnerability.setMatchedCPE(
|
||||
String.format("cpe:/a:%1$s_project:%1$s:%2$s::~~~ruby~~", gem, version),
|
||||
null);
|
||||
vulnerability.setCvssAccessVector("-");
|
||||
vulnerability.setCvssAccessComplexity("-");
|
||||
vulnerability.setCvssAuthentication("-");
|
||||
vulnerability.setCvssAvailabilityImpact("-");
|
||||
vulnerability.setCvssConfidentialityImpact("-");
|
||||
vulnerability.setCvssIntegrityImpact("-");
|
||||
}
|
||||
LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine));
|
||||
return vulnerability;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the dependency based off of the gem.
|
||||
*
|
||||
* @param engine the engine used for scanning
|
||||
* @param parentName the gem parent
|
||||
* @param fileName the file name
|
||||
* @param filePath the file path
|
||||
* @param gem the gem name
|
||||
* @return the dependency to add
|
||||
* @throws IOException thrown if a temporary gem file could not be written
|
||||
*/
|
||||
private Dependency createDependencyForGem(Engine engine, String parentName, String fileName, String filePath, String gem) throws IOException {
|
||||
final File gemFile = new File(Settings.getTempDirectory(), gem + "_Gemfile.lock");
|
||||
if (!gemFile.createNewFile()) {
|
||||
throw new IOException("Unable to create temporary gem file");
|
||||
}
|
||||
final String displayFileName = String.format("%s%c%s:%s", parentName, File.separatorChar, fileName, gem);
|
||||
|
||||
FileUtils.write(gemFile, displayFileName, Charset.defaultCharset()); // unique contents to avoid dependency bundling
|
||||
final Dependency dependency = new Dependency(gemFile);
|
||||
dependency.getProductEvidence().addEvidence("bundler-audit", "Name", gem, Confidence.HIGHEST);
|
||||
dependency.setDisplayFileName(displayFileName);
|
||||
dependency.setFileName(fileName);
|
||||
dependency.setFilePath(filePath);
|
||||
engine.getDependencies().add(dependency);
|
||||
return dependency;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,141 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FilenameFilter;
|
||||
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
|
||||
/**
|
||||
* This analyzer accepts the fully resolved .gemspec created by the Ruby bundler
|
||||
* (http://bundler.io) for better evidence results. It also tries to resolve the
|
||||
* dependency packagePath to where the gem is actually installed. Then during
|
||||
* the {@link org.owasp.dependencycheck.analyzer.AnalysisPhase#PRE_FINDING_ANALYSIS}
|
||||
* {@link DependencyMergingAnalyzer} will merge two .gemspec dependencies
|
||||
* together if <code>Dependency.getPackagePath()</code> are the same.
|
||||
*
|
||||
* Ruby bundler creates new .gemspec files under a folder called
|
||||
* "specifications" at deploy time, in addition to the original .gemspec files
|
||||
* from source. The bundler generated .gemspec files always contain fully
|
||||
* resolved attributes thus provide more accurate evidences, whereas the
|
||||
* original .gemspec from source often contain variables for attributes that
|
||||
* can't be used for evidences.
|
||||
*
|
||||
* Note this analyzer share the same
|
||||
* {@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_RUBY_GEMSPEC_ENABLED}
|
||||
* as {@link RubyGemspecAnalyzer}, so it will enabled/disabled with
|
||||
* {@link RubyGemspecAnalyzer}.
|
||||
*
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
public class RubyBundlerAnalyzer extends RubyGemspecAnalyzer {
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Ruby Bundler Analyzer";
|
||||
|
||||
/**
|
||||
* Folder name that contains .gemspec files created by "bundle install"
|
||||
*/
|
||||
private static final String SPECIFICATIONS = "specifications";
|
||||
|
||||
/**
|
||||
* Folder name that contains the gems by "bundle install"
|
||||
*/
|
||||
private static final String GEMS = "gems";
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Only accept *.gemspec files generated by "bundle install --deployment"
|
||||
* under "specifications" folder.
|
||||
*
|
||||
* @param pathname the path name to test
|
||||
* @return true if the analyzer can process the given file; otherwise false
|
||||
*/
|
||||
@Override
|
||||
public boolean accept(File pathname) {
|
||||
|
||||
boolean accepted = super.accept(pathname);
|
||||
if (accepted) {
|
||||
final File parentDir = pathname.getParentFile();
|
||||
accepted = parentDir != null && parentDir.getName().equals(SPECIFICATIONS);
|
||||
}
|
||||
|
||||
return accepted;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
super.analyzeDependency(dependency, engine);
|
||||
|
||||
//find the corresponding gem folder for this .gemspec stub by "bundle install --deployment"
|
||||
final File gemspecFile = dependency.getActualFile();
|
||||
final String gemFileName = gemspecFile.getName();
|
||||
final String gemName = gemFileName.substring(0, gemFileName.lastIndexOf(".gemspec"));
|
||||
final File specificationsDir = gemspecFile.getParentFile();
|
||||
if (specificationsDir != null && specificationsDir.getName().equals(SPECIFICATIONS) && specificationsDir.exists()) {
|
||||
final File parentDir = specificationsDir.getParentFile();
|
||||
if (parentDir != null && parentDir.exists()) {
|
||||
final File gemsDir = new File(parentDir, GEMS);
|
||||
if (gemsDir.exists()) {
|
||||
final File[] matchingFiles = gemsDir.listFiles(new FilenameFilter() {
|
||||
@Override
|
||||
public boolean accept(File dir, String name) {
|
||||
return name.equals(gemName);
|
||||
}
|
||||
});
|
||||
|
||||
if (matchingFiles != null && matchingFiles.length > 0) {
|
||||
final String gemPath = matchingFiles[0].getAbsolutePath();
|
||||
if (dependency.getActualFilePath().equals(dependency.getFilePath())) {
|
||||
if (gemPath != null) {
|
||||
dependency.setPackagePath(gemPath);
|
||||
}
|
||||
} else {
|
||||
//.gemspec's actualFilePath and filePath are different when it's from a compressed file
|
||||
//in which case actualFilePath is the temp directory used by decompression.
|
||||
//packagePath should use the filePath of the identified gem file in "gems" folder
|
||||
final File gemspecStub = new File(dependency.getFilePath());
|
||||
final File specDir = gemspecStub.getParentFile();
|
||||
if (specDir != null && specDir.getName().equals(SPECIFICATIONS)) {
|
||||
final File gemsDir2 = new File(specDir.getParentFile(), GEMS);
|
||||
final File packageDir = new File(gemsDir2, gemName);
|
||||
dependency.setPackagePath(packageDir.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,249 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 Institute for Defense Analyses. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FilenameFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.exception.InitializationException;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Used to analyze Ruby Gem specifications and collect information that can be
|
||||
* used to determine the associated CPE. Regular expressions are used to parse
|
||||
* the well-defined Ruby syntax that forms the specification.
|
||||
*
|
||||
* @author Dale Visser
|
||||
*/
|
||||
@Experimental
|
||||
public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(RubyGemspecAnalyzer.class);
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Ruby Gemspec Analyzer";
|
||||
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* The gemspec file extension.
|
||||
*/
|
||||
private static final String GEMSPEC = "gemspec";
|
||||
|
||||
/**
|
||||
* The file filter containing the list of file extensions that can be
|
||||
* analyzed.
|
||||
*/
|
||||
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(GEMSPEC).build();
|
||||
//TODO: support Rakefile
|
||||
//= FileFilterBuilder.newInstance().addExtensions(GEMSPEC).addFilenames("Rakefile").build();
|
||||
|
||||
/**
|
||||
* The name of the version file.
|
||||
*/
|
||||
private static final String VERSION_FILE_NAME = "VERSION";
|
||||
|
||||
/**
|
||||
* @return a filter that accepts files matching the glob pattern, *.gemspec
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return FILTER;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() throws InitializationException {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* The capture group #1 is the block variable.
|
||||
*/
|
||||
private static final Pattern GEMSPEC_BLOCK_INIT = Pattern.compile("Gem::Specification\\.new\\s+?do\\s+?\\|(.+?)\\|");
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
final Matcher matcher = GEMSPEC_BLOCK_INIT.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
contents = contents.substring(matcher.end());
|
||||
final String blockVariable = matcher.group(1);
|
||||
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
final String name = addStringEvidence(product, contents, blockVariable, "name", "name", Confidence.HIGHEST);
|
||||
if (!name.isEmpty()) {
|
||||
vendor.addEvidence(GEMSPEC, "name_project", name + "_project", Confidence.LOW);
|
||||
}
|
||||
addStringEvidence(product, contents, blockVariable, "summary", "summary", Confidence.LOW);
|
||||
|
||||
addStringEvidence(vendor, contents, blockVariable, "author", "authors?", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "email", "emails?", Confidence.MEDIUM);
|
||||
addStringEvidence(vendor, contents, blockVariable, "homepage", "homepage", Confidence.HIGHEST);
|
||||
addStringEvidence(vendor, contents, blockVariable, "license", "licen[cs]es?", Confidence.HIGHEST);
|
||||
|
||||
final String value = addStringEvidence(dependency.getVersionEvidence(), contents,
|
||||
blockVariable, "version", "version", Confidence.HIGHEST);
|
||||
if (value.length() < 1) {
|
||||
addEvidenceFromVersionFile(dependency.getActualFile(), dependency.getVersionEvidence());
|
||||
}
|
||||
}
|
||||
|
||||
setPackagePath(dependency);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the specified evidence to the given evidence collection.
|
||||
*
|
||||
* @param evidences the collection to add the evidence to
|
||||
* @param contents the evidence contents
|
||||
* @param blockVariable the variable
|
||||
* @param field the field
|
||||
* @param fieldPattern the field pattern
|
||||
* @param confidence the confidence of the evidence
|
||||
* @return the evidence string value added
|
||||
*/
|
||||
private String addStringEvidence(EvidenceCollection evidences, String contents,
|
||||
String blockVariable, String field, String fieldPattern, Confidence confidence) {
|
||||
String value = "";
|
||||
|
||||
//capture array value between [ ]
|
||||
final Matcher arrayMatcher = Pattern.compile(
|
||||
String.format("\\s*?%s\\.%s\\s*?=\\s*?\\[(.*?)\\]", blockVariable, fieldPattern), Pattern.CASE_INSENSITIVE).matcher(contents);
|
||||
if (arrayMatcher.find()) {
|
||||
final String arrayValue = arrayMatcher.group(1);
|
||||
value = arrayValue.replaceAll("['\"]", "").trim(); //strip quotes
|
||||
} else { //capture single value between quotes
|
||||
final Matcher matcher = Pattern.compile(
|
||||
String.format("\\s*?%s\\.%s\\s*?=\\s*?(['\"])(.*?)\\1", blockVariable, fieldPattern), Pattern.CASE_INSENSITIVE).matcher(contents);
|
||||
if (matcher.find()) {
|
||||
value = matcher.group(2);
|
||||
}
|
||||
}
|
||||
if (value.length() > 0) {
|
||||
evidences.addEvidence(GEMSPEC, field, value, confidence);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds evidence from the version file.
|
||||
*
|
||||
* @param dependencyFile the dependency being analyzed
|
||||
* @param versionEvidences the version evidence
|
||||
*/
|
||||
private void addEvidenceFromVersionFile(File dependencyFile, EvidenceCollection versionEvidences) {
|
||||
final File parentDir = dependencyFile.getParentFile();
|
||||
if (parentDir != null) {
|
||||
final File[] matchingFiles = parentDir.listFiles(new FilenameFilter() {
|
||||
@Override
|
||||
public boolean accept(File dir, String name) {
|
||||
return name.contains(VERSION_FILE_NAME);
|
||||
}
|
||||
});
|
||||
if (matchingFiles == null) {
|
||||
return;
|
||||
}
|
||||
for (File f : matchingFiles) {
|
||||
try {
|
||||
final List<String> lines = FileUtils.readLines(f, Charset.defaultCharset());
|
||||
if (lines.size() == 1) { //TODO other checking?
|
||||
final String value = lines.get(0).trim();
|
||||
versionEvidences.addEvidence(GEMSPEC, "version", value, Confidence.HIGH);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
LOGGER.debug("Error reading gemspec", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the package path on the dependency.
|
||||
*
|
||||
* @param dep the dependency to alter
|
||||
*/
|
||||
private void setPackagePath(Dependency dep) {
|
||||
final File file = new File(dep.getFilePath());
|
||||
final String parent = file.getParent();
|
||||
if (parent != null) {
|
||||
dep.setPackagePath(parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,192 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Confidence;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.FileFilterBuilder;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
|
||||
/**
|
||||
* This analyzer is used to analyze the SWIFT Package Manager
|
||||
* (https://swift.org/package-manager/). It collects information about a package
|
||||
* from Package.swift files.
|
||||
*
|
||||
* @author Bianca Jiang (https://twitter.com/biancajiang)
|
||||
*/
|
||||
@Experimental
|
||||
public class SwiftPackageManagerAnalyzer extends AbstractFileTypeAnalyzer {
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "SWIFT Package Manager Analyzer";
|
||||
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
|
||||
|
||||
/**
|
||||
* The file name to scan.
|
||||
*/
|
||||
public static final String SPM_FILE_NAME = "Package.swift";
|
||||
|
||||
/**
|
||||
* Filter that detects files named "package.json".
|
||||
*/
|
||||
private static final FileFilter SPM_FILE_FILTER = FileFilterBuilder.newInstance().addFilenames(SPM_FILE_NAME).build();
|
||||
|
||||
/**
|
||||
* The capture group #1 is the block variable. e.g. "import
|
||||
* PackageDescription let package = Package( name: "Gloss" )"
|
||||
*/
|
||||
private static final Pattern SPM_BLOCK_PATTERN = Pattern.compile("let[^=]+=\\s*Package\\s*\\(\\s*([^)]*)\\s*\\)", Pattern.DOTALL);
|
||||
|
||||
/**
|
||||
* Returns the FileFilter
|
||||
*
|
||||
* @return the FileFilter
|
||||
*/
|
||||
@Override
|
||||
protected FileFilter getFileFilter() {
|
||||
return SPM_FILE_FILTER;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initializeFileTypeAnalyzer() {
|
||||
// NO-OP
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the key used in the properties file to reference the analyzer's
|
||||
* enabled property.
|
||||
*
|
||||
* @return the analyzer's enabled property setting key
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine)
|
||||
throws AnalysisException {
|
||||
|
||||
String contents;
|
||||
try {
|
||||
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
|
||||
} catch (IOException e) {
|
||||
throw new AnalysisException(
|
||||
"Problem occurred while reading dependency file.", e);
|
||||
}
|
||||
final Matcher matcher = SPM_BLOCK_PATTERN.matcher(contents);
|
||||
if (matcher.find()) {
|
||||
final String packageDescription = matcher.group(1);
|
||||
if (packageDescription.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
final EvidenceCollection product = dependency.getProductEvidence();
|
||||
final EvidenceCollection vendor = dependency.getVendorEvidence();
|
||||
|
||||
//SPM is currently under development for SWIFT 3. Its current metadata includes package name and dependencies.
|
||||
//Future interesting metadata: version, license, homepage, author, summary, etc.
|
||||
final String name = addStringEvidence(product, packageDescription, "name", "name", Confidence.HIGHEST);
|
||||
if (name != null && !name.isEmpty()) {
|
||||
vendor.addEvidence(SPM_FILE_NAME, "name_project", name, Confidence.HIGHEST);
|
||||
}
|
||||
}
|
||||
setPackagePath(dependency);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts evidence from the package description and adds it to the given
|
||||
* evidence collection.
|
||||
*
|
||||
* @param evidences the evidence collection to update
|
||||
* @param packageDescription the text to extract evidence from
|
||||
* @param field the name of the field being searched for
|
||||
* @param fieldPattern the field pattern within the contents to search for
|
||||
* @param confidence the confidence level of the evidence if found
|
||||
* @return the string that was added as evidence
|
||||
*/
|
||||
private String addStringEvidence(EvidenceCollection evidences,
|
||||
String packageDescription, String field, String fieldPattern, Confidence confidence) {
|
||||
String value = "";
|
||||
|
||||
final Matcher matcher = Pattern.compile(
|
||||
String.format("%s *:\\s*\"([^\"]*)", fieldPattern), Pattern.DOTALL).matcher(packageDescription);
|
||||
if (matcher.find()) {
|
||||
value = matcher.group(1);
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
value = value.trim();
|
||||
if (value.length() > 0) {
|
||||
evidences.addEvidence(SPM_FILE_NAME, field, value, confidence);
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the package path on the given dependency.
|
||||
*
|
||||
* @param dep the dependency to update
|
||||
*/
|
||||
private void setPackagePath(Dependency dep) {
|
||||
final File file = new File(dep.getFilePath());
|
||||
final String parent = file.getParent();
|
||||
if (parent != null) {
|
||||
dep.setPackagePath(parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,169 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2017 Jeremy Long. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.analyzer;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Objects;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.dependency.Evidence;
|
||||
import org.owasp.dependencycheck.dependency.EvidenceCollection;
|
||||
import org.owasp.dependencycheck.utils.DependencyVersion;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This analyzer attempts to filter out erroneous version numbers collected.
|
||||
* Initially, this will focus on JAR files that contain a POM version number
|
||||
* that matches the file name - if identified all other version information will
|
||||
* be removed.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public class VersionFilterAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
//<editor-fold defaultstate="collapsed" desc="Constaints">
|
||||
/**
|
||||
* Evidence source.
|
||||
*/
|
||||
private static final String FILE = "file";
|
||||
/**
|
||||
* Evidence source.
|
||||
*/
|
||||
private static final String POM = "pom";
|
||||
/**
|
||||
* Evidence source.
|
||||
*/
|
||||
private static final String NEXUS = "nexus";
|
||||
/**
|
||||
* Evidence source.
|
||||
*/
|
||||
private static final String CENTRAL = "central";
|
||||
/**
|
||||
* Evidence source.
|
||||
*/
|
||||
private static final String MANIFEST = "Manifest";
|
||||
/**
|
||||
* Evidence name.
|
||||
*/
|
||||
private static final String VERSION = "version";
|
||||
/**
|
||||
* Evidence name.
|
||||
*/
|
||||
private static final String IMPLEMENTATION_VERSION = "Implementation-Version";
|
||||
|
||||
/**
|
||||
* The name of the analyzer.
|
||||
*/
|
||||
private static final String ANALYZER_NAME = "Version Filter Analyzer";
|
||||
/**
|
||||
* The phase that this analyzer is intended to run in.
|
||||
*/
|
||||
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.POST_INFORMATION_COLLECTION;
|
||||
|
||||
//</editor-fold>
|
||||
//<editor-fold defaultstate="collapsed" desc="Standard implementation of Analyzer">
|
||||
/**
|
||||
* Returns the name of the analyzer.
|
||||
*
|
||||
* @return the name of the analyzer.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return ANALYZER_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the phase that the analyzer is intended to run in.
|
||||
*
|
||||
* @return the phase that the analyzer is intended to run in.
|
||||
*/
|
||||
@Override
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the setting key to determine if the analyzer is enabled.
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_VERSION_FILTER_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* The Logger for use throughout the class
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(VersionFilterAnalyzer.class);
|
||||
|
||||
/**
|
||||
* The HintAnalyzer uses knowledge about a dependency to add additional
|
||||
* information to help in identification of identifiers or vulnerabilities.
|
||||
*
|
||||
* @param dependency The dependency being analyzed
|
||||
* @param engine The scanning engine
|
||||
* @throws AnalysisException is thrown if there is an exception analyzing
|
||||
* the dependency.
|
||||
*/
|
||||
@Override
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
String fileVersion = null;
|
||||
String pomVersion = null;
|
||||
String manifestVersion = null;
|
||||
for (Evidence e : dependency.getVersionEvidence()) {
|
||||
if (FILE.equals(e.getSource()) && VERSION.equals(e.getName())) {
|
||||
fileVersion = e.getValue(Boolean.FALSE);
|
||||
} else if ((NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource())
|
||||
|| POM.equals(e.getSource())) && VERSION.equals(e.getName())) {
|
||||
pomVersion = e.getValue(Boolean.FALSE);
|
||||
} else if (MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName())) {
|
||||
manifestVersion = e.getValue(Boolean.FALSE);
|
||||
}
|
||||
}
|
||||
//ensure we have at least two not null
|
||||
if (((fileVersion == null ? 0 : 1) + (pomVersion == null ? 0 : 1) + (manifestVersion == null ? 0 : 1)) > 1) {
|
||||
final DependencyVersion dvFile = new DependencyVersion(fileVersion);
|
||||
final DependencyVersion dvPom = new DependencyVersion(pomVersion);
|
||||
final DependencyVersion dvManifest = new DependencyVersion(manifestVersion);
|
||||
final boolean fileMatch = Objects.equals(dvFile, dvPom) || Objects.equals(dvFile, dvManifest);
|
||||
final boolean manifestMatch = Objects.equals(dvManifest, dvPom) || Objects.equals(dvManifest, dvFile);
|
||||
final boolean pomMatch = Objects.equals(dvPom, dvFile) || Objects.equals(dvPom, dvManifest);
|
||||
if (fileMatch || manifestMatch || pomMatch) {
|
||||
LOGGER.debug("filtering evidence from {}", dependency.getFileName());
|
||||
final EvidenceCollection versionEvidence = dependency.getVersionEvidence();
|
||||
synchronized (versionEvidence) {
|
||||
final Iterator<Evidence> itr = versionEvidence.iterator();
|
||||
while (itr.hasNext()) {
|
||||
final Evidence e = itr.next();
|
||||
if (!(pomMatch && VERSION.equals(e.getName())
|
||||
&& (NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource()) || POM.equals(e.getSource())))
|
||||
&& !(fileMatch && VERSION.equals(e.getName()) && FILE.equals(e.getSource()))
|
||||
&& !(manifestMatch && MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName()))) {
|
||||
itr.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,11 +20,13 @@ package org.owasp.dependencycheck.analyzer;
|
||||
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
|
||||
import org.owasp.dependencycheck.Engine;
|
||||
import org.owasp.dependencycheck.dependency.Dependency;
|
||||
import org.owasp.dependencycheck.suppression.SuppressionRule;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
|
||||
|
||||
/**
|
||||
* The suppression analyzer processes an externally defined XML document that complies with the suppressions.xsd schema.
|
||||
* Any identified Vulnerability entries within the dependencies that match will be removed.
|
||||
* The suppression analyzer processes an externally defined XML document that
|
||||
* complies with the suppressions.xsd schema. Any identified Vulnerability
|
||||
* entries within the dependencies that match will be removed.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -59,10 +61,29 @@ public class VulnerabilitySuppressionAnalyzer extends AbstractSuppressionAnalyze
|
||||
public AnalysisPhase getAnalysisPhase() {
|
||||
return ANALYSIS_PHASE;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns the setting key to determine if the analyzer is enabled.</p>
|
||||
*
|
||||
* @return the key for the analyzer's enabled property
|
||||
*/
|
||||
@Override
|
||||
protected String getAnalyzerEnabledSettingKey() {
|
||||
return Settings.KEYS.ANALYZER_VULNERABILITY_SUPPRESSION_ENABLED;
|
||||
}
|
||||
//</editor-fold>
|
||||
|
||||
/**
|
||||
* Analyzes a dependency's vulnerabilities against the configured CVE
|
||||
* suppressions.
|
||||
*
|
||||
* @param dependency the dependency being analyzed
|
||||
* @param engine a reference to the engine orchestrating the analysis
|
||||
* @throws AnalysisException thrown if there is an error during analysis
|
||||
*/
|
||||
@Override
|
||||
public void analyze(final Dependency dependency, final Engine engine) throws AnalysisException {
|
||||
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
|
||||
|
||||
if (getRules() == null || getRules().size() <= 0) {
|
||||
return;
|
||||
|
||||
@@ -23,15 +23,16 @@ import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.logging.Logger;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import org.owasp.dependencycheck.data.nexus.MavenArtifact;
|
||||
import org.owasp.dependencycheck.utils.Settings;
|
||||
import org.owasp.dependencycheck.utils.URLConnectionFactory;
|
||||
import org.owasp.dependencycheck.utils.XmlUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.NodeList;
|
||||
|
||||
@@ -50,46 +51,48 @@ public class CentralSearch {
|
||||
/**
|
||||
* Whether to use the Proxy when making requests
|
||||
*/
|
||||
private boolean useProxy;
|
||||
private final boolean useProxy;
|
||||
|
||||
/**
|
||||
* Used for logging.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(CentralSearch.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CentralSearch.class);
|
||||
|
||||
/**
|
||||
* Creates a NexusSearch for the given repository URL.
|
||||
*
|
||||
* @param rootURL the URL of the repository on which searches should execute. Only parameters are added to this (so it should
|
||||
* end in /select)
|
||||
* @param rootURL the URL of the repository on which searches should
|
||||
* execute. Only parameters are added to this (so it should end in /select)
|
||||
*/
|
||||
public CentralSearch(URL rootURL) {
|
||||
this.rootURL = rootURL;
|
||||
if (null != Settings.getString(Settings.KEYS.PROXY_SERVER)) {
|
||||
useProxy = true;
|
||||
LOGGER.fine("Using proxy");
|
||||
LOGGER.debug("Using proxy");
|
||||
} else {
|
||||
useProxy = false;
|
||||
LOGGER.fine("Not using proxy");
|
||||
LOGGER.debug("Not using proxy");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the configured Central URL for the given sha1 hash. If the artifact is found, a <code>MavenArtifact</code> is
|
||||
* populated with the GAV.
|
||||
* Searches the configured Central URL for the given sha1 hash. If the
|
||||
* artifact is found, a <code>MavenArtifact</code> is populated with the
|
||||
* GAV.
|
||||
*
|
||||
* @param sha1 the SHA-1 hash string for which to search
|
||||
* @return the populated Maven GAV.
|
||||
* @throws IOException if it's unable to connect to the specified repository or if the specified artifact is not found.
|
||||
* @throws IOException if it's unable to connect to the specified repository
|
||||
* or if the specified artifact is not found.
|
||||
*/
|
||||
public List<MavenArtifact> searchSha1(String sha1) throws IOException {
|
||||
if (null == sha1 || !sha1.matches("^[0-9A-Fa-f]{40}$")) {
|
||||
throw new IllegalArgumentException("Invalid SHA1 format");
|
||||
}
|
||||
|
||||
List<MavenArtifact> result = null;
|
||||
final URL url = new URL(rootURL + String.format("?q=1:\"%s\"&wt=xml", sha1));
|
||||
|
||||
LOGGER.fine(String.format("Searching Central url %s", url.toString()));
|
||||
LOGGER.debug("Searching Central url {}", url);
|
||||
|
||||
// Determine if we need to use a proxy. The rules:
|
||||
// 1) If the proxy is set, AND the setting is set to true, use the proxy
|
||||
@@ -107,21 +110,20 @@ public class CentralSearch {
|
||||
if (conn.getResponseCode() == 200) {
|
||||
boolean missing = false;
|
||||
try {
|
||||
final DocumentBuilder builder = DocumentBuilderFactory
|
||||
.newInstance().newDocumentBuilder();
|
||||
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
|
||||
final Document doc = builder.parse(conn.getInputStream());
|
||||
final XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
final String numFound = xpath.evaluate("/response/result/@numFound", doc);
|
||||
if ("0".equals(numFound)) {
|
||||
missing = true;
|
||||
} else {
|
||||
final ArrayList<MavenArtifact> result = new ArrayList<MavenArtifact>();
|
||||
result = new ArrayList<MavenArtifact>();
|
||||
final NodeList docs = (NodeList) xpath.evaluate("/response/result/doc", doc, XPathConstants.NODESET);
|
||||
for (int i = 0; i < docs.getLength(); i++) {
|
||||
final String g = xpath.evaluate("./str[@name='g']", docs.item(i));
|
||||
LOGGER.finest(String.format("GroupId: %s", g));
|
||||
LOGGER.trace("GroupId: {}", g);
|
||||
final String a = xpath.evaluate("./str[@name='a']", docs.item(i));
|
||||
LOGGER.finest(String.format("ArtifactId: %s", a));
|
||||
LOGGER.trace("ArtifactId: {}", a);
|
||||
final String v = xpath.evaluate("./str[@name='v']", docs.item(i));
|
||||
NodeList atts = (NodeList) xpath.evaluate("./arr[@name='ec']/str", docs.item(i), XPathConstants.NODESET);
|
||||
boolean pomAvailable = false;
|
||||
@@ -143,16 +145,12 @@ public class CentralSearch {
|
||||
useHTTPS = true;
|
||||
}
|
||||
}
|
||||
|
||||
LOGGER.finest(String.format("Version: %s", v));
|
||||
LOGGER.trace("Version: {}", v);
|
||||
result.add(new MavenArtifact(g, a, v, jarAvailable, pomAvailable, useHTTPS));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
// Anything else is jacked up XML stuff that we really can't recover
|
||||
// from well
|
||||
// Anything else is jacked up XML stuff that we really can't recover from well
|
||||
throw new IOException(e.getMessage(), e);
|
||||
}
|
||||
|
||||
@@ -160,12 +158,10 @@ public class CentralSearch {
|
||||
throw new FileNotFoundException("Artifact not found in Central");
|
||||
}
|
||||
} else {
|
||||
final String msg = String.format("Could not connect to Central received response code: %d %s",
|
||||
LOGGER.debug("Could not connect to Central received response code: {} {}",
|
||||
conn.getResponseCode(), conn.getResponseMessage());
|
||||
LOGGER.fine(msg);
|
||||
throw new IOException(msg);
|
||||
throw new IOException("Could not connect to Central");
|
||||
}
|
||||
|
||||
return null;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/**
|
||||
*
|
||||
* Contains classes related to searching Maven Central.<br/><br/>
|
||||
* Contains classes related to searching Maven Central.<br><br>
|
||||
*
|
||||
* These are used to abstract Maven Central searching away from OWASP Dependency Check so they can be reused elsewhere.
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,110 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.composer;
|
||||
|
||||
/**
|
||||
* Reperesents a dependency (GAV, right now) from a Composer dependency.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
public final class ComposerDependency {
|
||||
|
||||
/**
|
||||
* The group
|
||||
*/
|
||||
private final String group;
|
||||
|
||||
/**
|
||||
* The project
|
||||
*/
|
||||
private final String project;
|
||||
|
||||
/**
|
||||
* The version
|
||||
*/
|
||||
private final String version;
|
||||
|
||||
/**
|
||||
* Create a ComposerDependency from group, project, and version.
|
||||
*
|
||||
* @param group the group
|
||||
* @param project the project
|
||||
* @param version the version
|
||||
*/
|
||||
public ComposerDependency(String group, String project, String version) {
|
||||
this.group = group;
|
||||
this.project = project;
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the group.
|
||||
*
|
||||
* @return the group
|
||||
*/
|
||||
public String getGroup() {
|
||||
return group;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the project.
|
||||
*
|
||||
* @return the project
|
||||
*/
|
||||
public String getProject() {
|
||||
return project;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the version.
|
||||
*
|
||||
* @return the version
|
||||
*/
|
||||
public String getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof ComposerDependency)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final ComposerDependency that = (ComposerDependency) o;
|
||||
|
||||
if (group != null ? !group.equals(that.group) : that.group != null) {
|
||||
return false;
|
||||
}
|
||||
if (project != null ? !project.equals(that.project) : that.project != null) {
|
||||
return false;
|
||||
}
|
||||
return !(version != null ? !version.equals(that.version) : that.version != null);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = group != null ? group.hashCode() : 0;
|
||||
result = 31 * result + (project != null ? project.hashCode() : 0);
|
||||
result = 31 * result + (version != null ? version.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.composer;
|
||||
|
||||
/**
|
||||
* Represents an exception when handling a composer.json or composer.lock file. Generally used to wrap a downstream exception.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
public class ComposerException extends RuntimeException {
|
||||
|
||||
/**
|
||||
* The serial version UID for serialization.
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Creates a ComposerException with default message.
|
||||
*/
|
||||
public ComposerException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a ComposerException with the specified message.
|
||||
*
|
||||
* @param message the exception message
|
||||
*/
|
||||
public ComposerException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Composer exception with the specified message and cause.
|
||||
*
|
||||
* @param message the message
|
||||
* @param cause the underlying cause
|
||||
*/
|
||||
public ComposerException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,124 @@
|
||||
/*
|
||||
* This file is part of dependency-check-core.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* Copyright (c) 2015 The OWASP Foundation. All Rights Reserved.
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.composer;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonArray;
|
||||
import javax.json.JsonException;
|
||||
import javax.json.JsonObject;
|
||||
import javax.json.JsonReader;
|
||||
import javax.json.stream.JsonParsingException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Parses a Composer.lock file from an input stream. In a separate class so it can hopefully be injected.
|
||||
*
|
||||
* @author colezlaw
|
||||
*/
|
||||
public class ComposerLockParser {
|
||||
|
||||
/**
|
||||
* The JsonReader for parsing JSON
|
||||
*/
|
||||
private final JsonReader jsonReader;
|
||||
|
||||
/**
|
||||
* The input stream we'll read
|
||||
*/
|
||||
private final InputStream inputStream; // NOPMD - it gets set in the constructor, read later
|
||||
|
||||
/**
|
||||
* The List of ComposerDependencies found
|
||||
*/
|
||||
private final List<ComposerDependency> composerDependencies;
|
||||
|
||||
/**
|
||||
* The LOGGER
|
||||
*/
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ComposerLockParser.class);
|
||||
|
||||
/**
|
||||
* Createas a ComposerLockParser from a JsonReader and an InputStream.
|
||||
*
|
||||
* @param inputStream the InputStream to parse
|
||||
*/
|
||||
public ComposerLockParser(InputStream inputStream) {
|
||||
LOGGER.info("Creating a ComposerLockParser");
|
||||
this.inputStream = inputStream;
|
||||
this.jsonReader = Json.createReader(inputStream);
|
||||
this.composerDependencies = new ArrayList<ComposerDependency>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the input stream to create the list of dependencies.
|
||||
*/
|
||||
public void process() {
|
||||
LOGGER.info("Beginning Composer lock processing");
|
||||
try {
|
||||
final JsonObject composer = jsonReader.readObject();
|
||||
if (composer.containsKey("packages")) {
|
||||
LOGGER.debug("Found packages");
|
||||
final JsonArray packages = composer.getJsonArray("packages");
|
||||
for (JsonObject pkg : packages.getValuesAs(JsonObject.class)) {
|
||||
if (pkg.containsKey("name")) {
|
||||
final String groupName = pkg.getString("name");
|
||||
if (groupName.indexOf('/') >= 0 && groupName.indexOf('/') <= groupName.length() - 1) {
|
||||
if (pkg.containsKey("version")) {
|
||||
final String group = groupName.substring(0, groupName.indexOf('/'));
|
||||
final String project = groupName.substring(groupName.indexOf('/') + 1);
|
||||
String version = pkg.getString("version");
|
||||
// Some version nubmers begin with v - which doesn't end up matching CPE's
|
||||
if (version.startsWith("v")) {
|
||||
version = version.substring(1);
|
||||
}
|
||||
LOGGER.debug("Got package {}/{}/{}", group, project, version);
|
||||
composerDependencies.add(new ComposerDependency(group, project, version));
|
||||
} else {
|
||||
LOGGER.debug("Group/package {} does not have a version", groupName);
|
||||
}
|
||||
} else {
|
||||
LOGGER.debug("Got a dependency with no name");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (JsonParsingException jsonpe) {
|
||||
throw new ComposerException("Error parsing stream", jsonpe);
|
||||
} catch (JsonException jsone) {
|
||||
throw new ComposerException("Error reading stream", jsone);
|
||||
} catch (IllegalStateException ise) {
|
||||
throw new ComposerException("Illegal state in composer stream", ise);
|
||||
} catch (ClassCastException cce) {
|
||||
throw new ComposerException("Not exactly composer lock", cce);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the list of dependencies.
|
||||
*
|
||||
* @return the list of dependencies
|
||||
*/
|
||||
public List<ComposerDependency> getDependencies() {
|
||||
return composerDependencies;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Model elements for PHP Composer files
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.composer;
|
||||
@@ -21,8 +21,6 @@ import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper;
|
||||
@@ -40,16 +38,17 @@ import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.owasp.dependencycheck.data.lucene.FieldAnalyzer;
|
||||
import org.owasp.dependencycheck.data.lucene.LuceneUtils;
|
||||
import org.owasp.dependencycheck.data.lucene.SearchFieldAnalyzer;
|
||||
import org.owasp.dependencycheck.data.nvdcve.CveDB;
|
||||
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
|
||||
import org.owasp.dependencycheck.utils.Pair;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* An in memory lucene index that contains the vendor/product combinations from the CPE (application) identifiers within the NVD
|
||||
* CVE data.
|
||||
* An in memory lucene index that contains the vendor/product combinations from
|
||||
* the CPE (application) identifiers within the NVD CVE data.
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -58,7 +57,7 @@ public final class CpeMemoryIndex {
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(CpeMemoryIndex.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CpeMemoryIndex.class);
|
||||
/**
|
||||
* singleton instance.
|
||||
*/
|
||||
@@ -101,11 +100,11 @@ public final class CpeMemoryIndex {
|
||||
/**
|
||||
* The search field analyzer for the product field.
|
||||
*/
|
||||
private SearchFieldAnalyzer productSearchFieldAnalyzer;
|
||||
private SearchFieldAnalyzer productFieldAnalyzer;
|
||||
/**
|
||||
* The search field analyzer for the vendor field.
|
||||
*/
|
||||
private SearchFieldAnalyzer vendorSearchFieldAnalyzer;
|
||||
private SearchFieldAnalyzer vendorFieldAnalyzer;
|
||||
|
||||
/**
|
||||
* Creates and loads data into an in memory index.
|
||||
@@ -144,51 +143,20 @@ public final class CpeMemoryIndex {
|
||||
return openState;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the indexing analyzer for the CPE Index.
|
||||
*
|
||||
* @return the CPE Analyzer.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private Analyzer createIndexingAnalyzer() {
|
||||
final Map fieldAnalyzers = new HashMap();
|
||||
fieldAnalyzers.put(Fields.DOCUMENT_KEY, new KeywordAnalyzer());
|
||||
return new PerFieldAnalyzerWrapper(new FieldAnalyzer(LuceneUtils.CURRENT_VERSION), fieldAnalyzers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an Analyzer for searching the CPE Index.
|
||||
*
|
||||
* @return the CPE Analyzer.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private Analyzer createSearchingAnalyzer() {
|
||||
final Map<String, Analyzer> fieldAnalyzers = new HashMap<String, Analyzer>();
|
||||
fieldAnalyzers.put(Fields.DOCUMENT_KEY, new KeywordAnalyzer());
|
||||
productSearchFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
|
||||
vendorSearchFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
|
||||
fieldAnalyzers.put(Fields.PRODUCT, productSearchFieldAnalyzer);
|
||||
fieldAnalyzers.put(Fields.VENDOR, vendorSearchFieldAnalyzer);
|
||||
productFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
|
||||
vendorFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
|
||||
fieldAnalyzers.put(Fields.PRODUCT, productFieldAnalyzer);
|
||||
fieldAnalyzers.put(Fields.VENDOR, vendorFieldAnalyzer);
|
||||
|
||||
return new PerFieldAnalyzerWrapper(new FieldAnalyzer(LuceneUtils.CURRENT_VERSION), fieldAnalyzers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves a CPE IndexEntry into the Lucene index.
|
||||
*
|
||||
* @param vendor the vendor to index
|
||||
* @param product the product to index
|
||||
* @param indexWriter the index writer to write the entry into
|
||||
* @throws CorruptIndexException is thrown if the index is corrupt
|
||||
* @throws IOException is thrown if an IOException occurs
|
||||
*/
|
||||
public void saveEntry(String vendor, String product, IndexWriter indexWriter) throws CorruptIndexException, IOException {
|
||||
final Document doc = new Document();
|
||||
final Field v = new TextField(Fields.VENDOR, vendor, Field.Store.YES);
|
||||
final Field p = new TextField(Fields.PRODUCT, product, Field.Store.YES);
|
||||
doc.add(v);
|
||||
doc.add(p);
|
||||
indexWriter.addDocument(doc);
|
||||
return new PerFieldAnalyzerWrapper(new KeywordAnalyzer(), fieldAnalyzers);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -203,7 +171,7 @@ public final class CpeMemoryIndex {
|
||||
try {
|
||||
indexReader.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINEST, null, ex);
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
indexReader = null;
|
||||
}
|
||||
@@ -226,16 +194,31 @@ public final class CpeMemoryIndex {
|
||||
Analyzer analyzer = null;
|
||||
IndexWriter indexWriter = null;
|
||||
try {
|
||||
analyzer = createIndexingAnalyzer();
|
||||
analyzer = createSearchingAnalyzer();
|
||||
final IndexWriterConfig conf = new IndexWriterConfig(LuceneUtils.CURRENT_VERSION, analyzer);
|
||||
indexWriter = new IndexWriter(index, conf);
|
||||
try {
|
||||
// Tip: reuse the Document and Fields for performance...
|
||||
// See "Re-use Document and Field instances" from
|
||||
// http://wiki.apache.org/lucene-java/ImproveIndexingSpeed
|
||||
final Document doc = new Document();
|
||||
final Field v = new TextField(Fields.VENDOR, Fields.VENDOR, Field.Store.YES);
|
||||
final Field p = new TextField(Fields.PRODUCT, Fields.PRODUCT, Field.Store.YES);
|
||||
doc.add(v);
|
||||
doc.add(p);
|
||||
|
||||
final Set<Pair<String, String>> data = cve.getVendorProductList();
|
||||
for (Pair<String, String> pair : data) {
|
||||
saveEntry(pair.getLeft(), pair.getRight(), indexWriter);
|
||||
//todo figure out why there are null products
|
||||
if (pair.getLeft() != null && pair.getRight() != null) {
|
||||
v.setStringValue(pair.getLeft());
|
||||
p.setStringValue(pair.getRight());
|
||||
indexWriter.addDocument(doc);
|
||||
resetFieldAnalyzer();
|
||||
}
|
||||
}
|
||||
} catch (DatabaseException ex) {
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
LOGGER.debug("", ex);
|
||||
throw new IndexException("Error reading CPE data", ex);
|
||||
}
|
||||
} catch (CorruptIndexException ex) {
|
||||
@@ -263,14 +246,14 @@ public final class CpeMemoryIndex {
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the searching analyzers
|
||||
* Resets the product and vendor field analyzers.
|
||||
*/
|
||||
private void resetSearchingAnalyzer() {
|
||||
if (productSearchFieldAnalyzer != null) {
|
||||
productSearchFieldAnalyzer.clear();
|
||||
private void resetFieldAnalyzer() {
|
||||
if (productFieldAnalyzer != null) {
|
||||
productFieldAnalyzer.clear();
|
||||
}
|
||||
if (vendorSearchFieldAnalyzer != null) {
|
||||
vendorSearchFieldAnalyzer.clear();
|
||||
if (vendorFieldAnalyzer != null) {
|
||||
vendorFieldAnalyzer.clear();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -281,14 +264,16 @@ public final class CpeMemoryIndex {
|
||||
* @param maxQueryResults the maximum number of documents to return
|
||||
* @return the TopDocs found by the search
|
||||
* @throws ParseException thrown when the searchString is invalid
|
||||
* @throws IOException is thrown if there is an issue with the underlying Index
|
||||
* @throws IOException is thrown if there is an issue with the underlying
|
||||
* Index
|
||||
*/
|
||||
public TopDocs search(String searchString, int maxQueryResults) throws ParseException, IOException {
|
||||
if (searchString == null || searchString.trim().isEmpty()) {
|
||||
throw new ParseException("Query is null or empty");
|
||||
}
|
||||
LOGGER.debug(searchString);
|
||||
final Query query = queryParser.parse(searchString);
|
||||
return indexSearcher.search(query, maxQueryResults);
|
||||
return search(query, maxQueryResults);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -301,7 +286,7 @@ public final class CpeMemoryIndex {
|
||||
* @throws IOException thrown if there is an IOException
|
||||
*/
|
||||
public TopDocs search(Query query, int maxQueryResults) throws CorruptIndexException, IOException {
|
||||
resetSearchingAnalyzer();
|
||||
resetFieldAnalyzer();
|
||||
return indexSearcher.search(query, maxQueryResults);
|
||||
}
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ public class IndexEntry implements Serializable {
|
||||
*/
|
||||
public String getDocumentId() {
|
||||
if (documentId == null && vendor != null && product != null) {
|
||||
documentId = vendor + ":" + product;
|
||||
documentId = vendor + ':' + product;
|
||||
}
|
||||
return documentId;
|
||||
}
|
||||
|
||||
@@ -17,12 +17,14 @@
|
||||
*/
|
||||
package org.owasp.dependencycheck.data.cwe;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -33,7 +35,7 @@ public final class CweDB {
|
||||
/**
|
||||
* The Logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(CweDB.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CweDB.class);
|
||||
|
||||
/**
|
||||
* Empty private constructor as this is a utility class.
|
||||
@@ -44,34 +46,34 @@ public final class CweDB {
|
||||
/**
|
||||
* A HashMap of the CWE data.
|
||||
*/
|
||||
private static final HashMap<String, String> CWE = loadData();
|
||||
private static final Map<String, String> CWE = loadData();
|
||||
|
||||
/**
|
||||
* Loads a HashMap containing the CWE data from a resource found in the jar.
|
||||
*
|
||||
* @return a HashMap of CWE data
|
||||
*/
|
||||
private static HashMap<String, String> loadData() {
|
||||
private static Map<String, String> loadData() {
|
||||
ObjectInputStream oin = null;
|
||||
try {
|
||||
final String filePath = "data/cwe.hashmap.serialized";
|
||||
final InputStream input = CweDB.class.getClassLoader().getResourceAsStream(filePath);
|
||||
oin = new ObjectInputStream(input);
|
||||
@SuppressWarnings("unchecked")
|
||||
final HashMap<String, String> ret = (HashMap<String, String>) oin.readObject();
|
||||
final Map<String, String> ret = (HashMap<String, String>) oin.readObject();
|
||||
return ret;
|
||||
} catch (ClassNotFoundException ex) {
|
||||
LOGGER.log(Level.WARNING, "Unable to load CWE data. This should not be an issue.");
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
LOGGER.warn("Unable to load CWE data. This should not be an issue.");
|
||||
LOGGER.debug("", ex);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.WARNING, "Unable to load CWE data due to an IO Error. This should not be an issue.");
|
||||
LOGGER.log(Level.FINE, null, ex);
|
||||
LOGGER.warn("Unable to load CWE data due to an IO Error. This should not be an issue.");
|
||||
LOGGER.debug("", ex);
|
||||
} finally {
|
||||
if (oin != null) {
|
||||
try {
|
||||
oin.close();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.FINEST, null, ex);
|
||||
LOGGER.trace("", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ public class CweHandler extends DefaultHandler {
|
||||
/**
|
||||
* Returns the HashMap of CWE entries (CWE-ID, Full CWE Name).
|
||||
*
|
||||
* @return a HashMap of CWE entries <String, String>
|
||||
* @return a HashMap of CWE entries <String, String>
|
||||
*/
|
||||
public HashMap<String, String> getCwe() {
|
||||
return cwe;
|
||||
|
||||
@@ -29,11 +29,15 @@ import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* A Lucene Analyzer that utilizes the WhitespaceTokenizer, WordDelimiterFilter, LowerCaseFilter, and StopFilter. The intended
|
||||
* purpose of this Analyzer is to index the CPE fields vendor and product.</p>
|
||||
* A Lucene Analyzer that utilizes the WhitespaceTokenizer, WordDelimiterFilter,
|
||||
* LowerCaseFilter, and StopFilter. The intended purpose of this Analyzer is to
|
||||
* index the CPE fields vendor and product.</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
* @deprecated the field analyzer should not be used, instead use the
|
||||
* SearchFieldAnalyzer so that the token analyzing filter is used.
|
||||
*/
|
||||
@Deprecated
|
||||
public class FieldAnalyzer extends Analyzer {
|
||||
|
||||
/**
|
||||
|
||||
@@ -77,6 +77,7 @@ public final class LuceneUtils {
|
||||
case '*':
|
||||
case '?':
|
||||
case ':':
|
||||
case '/':
|
||||
case '\\': //it is supposed to fall through here
|
||||
buf.append('\\');
|
||||
default:
|
||||
@@ -93,17 +94,12 @@ public final class LuceneUtils {
|
||||
* @return the escaped text.
|
||||
*/
|
||||
public static String escapeLuceneQuery(final CharSequence text) {
|
||||
|
||||
if (text == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
int size = text.length();
|
||||
size = size >> 1;
|
||||
final int size = text.length() << 1;
|
||||
final StringBuilder buf = new StringBuilder(size);
|
||||
|
||||
appendEscapedLuceneQuery(buf, text);
|
||||
|
||||
return buf.toString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
* <p>
|
||||
* Takes a TokenStream and adds additional tokens by concatenating pairs of words.</p>
|
||||
* <p>
|
||||
* <b>Example:</b> "Spring Framework Core" -> "Spring SpringFramework Framework FrameworkCore Core".</p>
|
||||
* <b>Example:</b> "Spring Framework Core" -> "Spring SpringFramework Framework FrameworkCore Core".</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
@@ -75,8 +75,8 @@ public final class TokenPairConcatenatingFilter extends TokenFilter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Increments the underlying TokenStream and sets CharTermAttributes to construct an expanded set of tokens by
|
||||
* concatenating tokens with the previous token.
|
||||
* Increments the underlying TokenStream and sets CharTermAttributes to construct an expanded set of tokens by concatenating
|
||||
* tokens with the previous token.
|
||||
*
|
||||
* @return whether or not we have hit the end of the TokenStream
|
||||
* @throws IOException is thrown when an IOException occurs
|
||||
@@ -112,8 +112,7 @@ public final class TokenPairConcatenatingFilter extends TokenFilter {
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Resets the Filter and clears any internal state data that may have been left-over from previous uses of the
|
||||
* Filter.</p>
|
||||
* Resets the Filter and clears any internal state data that may have been left-over from previous uses of the Filter.</p>
|
||||
* <p>
|
||||
* <b>If this Filter is re-used this method must be called between uses.</b></p>
|
||||
*/
|
||||
@@ -121,4 +120,46 @@ public final class TokenPairConcatenatingFilter extends TokenFilter {
|
||||
previousWord = null;
|
||||
words.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard hash code implementation.
|
||||
*
|
||||
* @return the hash code
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 3;
|
||||
hash = 31 * hash + (this.termAtt != null ? this.termAtt.hashCode() : 0);
|
||||
hash = 31 * hash + (this.previousWord != null ? this.previousWord.hashCode() : 0);
|
||||
hash = 31 * hash + (this.words != null ? this.words.hashCode() : 0);
|
||||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard equals implementation.
|
||||
*
|
||||
* @param obj the object to compare
|
||||
* @return true if the objects are equal; otherwise false.
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final TokenPairConcatenatingFilter other = (TokenPairConcatenatingFilter) obj;
|
||||
if (this.termAtt != other.termAtt && (this.termAtt == null || !this.termAtt.equals(other.termAtt))) {
|
||||
return false;
|
||||
}
|
||||
if ((this.previousWord == null) ? (other.previousWord != null) : !this.previousWord.equals(other.previousWord)) {
|
||||
return false;
|
||||
}
|
||||
if (this.words != other.words && (this.words == null || !this.words.equals(other.words))) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -21,25 +21,27 @@ import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.owasp.dependencycheck.utils.UrlStringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Takes a TokenStream and splits or adds tokens to correctly index version numbers.</p>
|
||||
* <p>
|
||||
* <b>Example:</b> "3.0.0.RELEASE" -> "3 3.0 3.0.0 RELEASE 3.0.0.RELEASE".</p>
|
||||
* <b>Example:</b> "3.0.0.RELEASE" -> "3 3.0 3.0.0 RELEASE 3.0.0.RELEASE".</p>
|
||||
*
|
||||
* @author Jeremy Long
|
||||
*/
|
||||
public final class UrlTokenizingFilter extends AbstractTokenizingFilter {
|
||||
|
||||
/**
|
||||
* The logger.
|
||||
*/
|
||||
private static final Logger LOGGER = Logger.getLogger(UrlTokenizingFilter.class.getName());
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(UrlTokenizingFilter.class);
|
||||
|
||||
/**
|
||||
* Constructs a new VersionTokenizingFilter.
|
||||
*
|
||||
@@ -50,8 +52,8 @@ public final class UrlTokenizingFilter extends AbstractTokenizingFilter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Increments the underlying TokenStream and sets CharTermAttributes to construct an expanded set of tokens by
|
||||
* concatenating tokens with the previous token.
|
||||
* Increments the underlying TokenStream and sets CharTermAttributes to construct an expanded set of tokens by concatenating
|
||||
* tokens with the previous token.
|
||||
*
|
||||
* @return whether or not we have hit the end of the TokenStream
|
||||
* @throws IOException is thrown when an IOException occurs
|
||||
@@ -70,7 +72,7 @@ public final class UrlTokenizingFilter extends AbstractTokenizingFilter {
|
||||
final List<String> data = UrlStringUtils.extractImportantUrlData(part);
|
||||
tokens.addAll(data);
|
||||
} catch (MalformedURLException ex) {
|
||||
LOGGER.log(Level.FINE, "error parsing " + part, ex);
|
||||
LOGGER.debug("error parsing {}", part, ex);
|
||||
tokens.add(part);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -94,13 +94,13 @@ public class MavenArtifact {
|
||||
}
|
||||
if (jarAvailable) {
|
||||
//org/springframework/spring-core/3.2.0.RELEASE/spring-core-3.2.0.RELEASE.pom
|
||||
this.artifactUrl = base + groupId.replace('.', '/') + "/" + artifactId + "/"
|
||||
+ version + "/" + artifactId + "-" + version + ".jar";
|
||||
this.artifactUrl = base + groupId.replace('.', '/') + '/' + artifactId + '/'
|
||||
+ version + '/' + artifactId + '-' + version + ".jar";
|
||||
}
|
||||
if (pomAvailable) {
|
||||
//org/springframework/spring-core/3.2.0.RELEASE/spring-core-3.2.0.RELEASE.pom
|
||||
this.pomUrl = base + groupId.replace('.', '/') + "/" + artifactId + "/"
|
||||
+ version + "/" + artifactId + "-" + version + ".pom";
|
||||
this.pomUrl = base + groupId.replace('.', '/') + '/' + artifactId + '/'
|
||||
+ version + '/' + artifactId + '-' + version + ".pom";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user