Compare commits

..

324 Commits

Author SHA1 Message Date
stevespringett
8206aa9bfd Added additional check when submitting an invalid payload to nsp. Corrected unit test. 2017-05-23 11:08:54 -05:00
Jeremy Long
dd4a1f2d56 updated for code coverage 2017-05-23 10:44:00 -05:00
Jeremy Long
b0f9935fcb updated to resolve issue #696 2017-05-23 10:44:00 -05:00
Jeremy Long
122c78648a updated code to better handle TLS errors 2017-05-21 18:04:26 -04:00
Jeremy Long
d457fd1452 fixed copyright 2017-05-21 07:45:27 -04:00
Jeremy Long
454a875593 Merge branch 'master' of https://github.com/stevespringett/DependencyCheck into stevespringett-master 2017-05-21 07:29:05 -04:00
Jeremy Long
6b5cfe1560 Merge pull request #735 from jeremylong/issue_718
Issue 718
2017-05-20 06:07:44 -04:00
Jeremy Long
f584be7f44 Merge pull request #734 from jeremylong/updateJsonReport
Update json report
2017-05-20 06:06:39 -04:00
Jeremy Long
b6a7d0ee9b Merge pull request #738 from sethjackson/suppress-unittestframework
Suppress UnitTestFramework false positives.
2017-05-20 06:06:13 -04:00
Seth Jackson
1402b20a6b Suppress UnitTestFramework false positives. 2017-05-18 13:42:20 -04:00
Jeremy Long
898412eaea default to remove auth schemas for proxy connections - added a property to disable this functionality. Fix for issue #718 2017-05-14 17:19:26 -04:00
Jeremy Long
7753b6f3c1 Merge branch 'master' into issue_718 2017-05-14 13:17:01 -04:00
Jeremy Long
ea93f315d5 updated test case for new report format 2017-05-14 09:27:51 -04:00
Jeremy Long
7f9cf5bb14 Merge branch 'master' into updateJsonReport 2017-05-14 09:26:27 -04:00
Jeremy Long
c4fe921670 updated report template 2017-05-14 09:25:56 -04:00
Jeremy Long
693c08cfd3 Merge pull request #731 from jeremylong/issue_729
Issue 729
2017-05-14 09:24:15 -04:00
Jeremy Long
9776f38b97 Merge pull request #726 from sethjackson/suppress-ef
Suppress EntityFramework false positives.
2017-05-14 08:52:40 -04:00
Jeremy Long
ff91d7cda7 Merge pull request #727 from sethjackson/patch-1
Update Windows development usage.
2017-05-14 08:51:56 -04:00
Jeremy Long
e218b8ad70 added attempt to resolve system scoped dependency with test cases 2017-05-14 07:45:55 -04:00
Jeremy Long
555b1dc1cc resolution for enhancement #729 2017-05-13 08:40:08 -04:00
Jeremy Long
523eed9319 resolved issue #686 - reports are generated even if no dependencies were analyzed 2017-05-13 08:38:43 -04:00
Jeremy Long
9c7f6daf75 updated groovy version to allow the use of newer APIs in the build scripts 2017-05-13 08:37:22 -04:00
Jeremy Long
f2037b3fab Merge pull request #723 from jeremylong/csv
Csv Report
2017-05-13 06:47:09 -04:00
Seth Jackson
c545285f7f Update Windows development usage.
So that the example invocation works in PowerShell and cmd.
2017-05-09 10:57:58 -04:00
Seth Jackson
290cd0507e Suppress EntityFramework false positives. 2017-05-09 10:32:41 -04:00
Jeremy Long
ee72f172d2 fix codacy issue 2017-05-08 21:41:05 -04:00
Jeremy Long
e721dac389 implemented CSV reports per #675 2017-05-08 07:43:39 -04:00
Jeremy Long
4c15993a44 updated documentation for issue #675 and pr #716 2017-05-08 07:42:01 -04:00
Jeremy Long
8fc42078c7 checkstyle corrections, minor restructuring, etc. 2017-05-07 18:40:25 -04:00
Jeremy Long
06d6fe4bd6 updated #716 2017-05-07 17:47:24 -04:00
Jeremy Long
96cda322a3 fixed coverity reported bug 2017-05-07 17:11:46 -04:00
Jeremy Long
0c991af58d fixed logging for #710 2017-05-07 16:52:20 -04:00
Jeremy Long
3677b5f429 Merge pull request #722 from jeremylong/falsepositives
Fix False Positives
2017-05-07 10:40:56 -04:00
Jeremy Long
fd0abd9066 updated test cases for FP 2017-05-07 10:14:50 -04:00
Jeremy Long
abb10600f7 fix for PR #716 - corrected malformed json generation 2017-05-07 10:14:09 -04:00
Jeremy Long
c9a6bb4b16 Merge pull request #721 from arno01/trivial
fix readme
2017-05-07 08:59:17 -04:00
Jeremy Long
9ff0042527 Merge branch 'master' into falsepositives 2017-05-07 08:59:16 -04:00
Andrey Arapov
0504f9c4cc fix readme 2017-05-07 10:07:28 +02:00
Jeremy Long
7b7861206b fixes for #700, #713, and #699 2017-05-06 19:25:54 -04:00
Jeremy Long
6f9207fe25 merged 2017-05-06 16:47:17 -04:00
Jeremy Long
de6cd3621b updated docker documentation 2017-05-06 16:44:04 -04:00
Jeremy Long
04b506662f Merge pull request #719 from hochzehn/improve-dockerfile
Improve dockerfile
2017-05-06 16:10:48 -04:00
Jeremy Long
c1250fc53e Merge branch 'ThomasGoeytil-json-support' 2017-05-06 15:13:06 -04:00
Jeremy Long
c2f521f528 updated to support changes from #682 2017-05-06 15:12:27 -04:00
Jeremy Long
5fde08b001 Merge branch 'json-support' of https://github.com/ThomasGoeytil/DependencyCheck into ThomasGoeytil-json-support 2017-05-06 14:51:30 -04:00
Jeremy Long
69d621b981 Merge pull request #720 from hochzehn/fix-cli-doc
Fix argument name for CLI.
2017-05-06 14:48:15 -04:00
Jeremy Long
4134fb3fef fixed sorting issue and resolved enhancement request #515 2017-05-06 12:05:30 -04:00
Jeremy Long
81b2b966ba added additional check to add proxy credentials 2017-05-06 10:48:31 -04:00
janpapenbrock
ae65ebe687 Fix argument name for CLI. 2017-05-06 11:11:13 +02:00
janpapenbrock
03f84fa77e Improve docker usage documentation. 2017-05-06 10:42:24 +02:00
janpapenbrock
ff6b3dbd4f Refactor to make URLs easier to change and reduce repetition. 2017-05-06 10:30:29 +02:00
janpapenbrock
bf7b8ccce8 Use specific user name to easily identify who is doing things here. 2017-05-06 10:20:02 +02:00
janpapenbrock
57b1895b5e Refactor user name into variable. 2017-05-06 10:19:27 +02:00
janpapenbrock
8edf65186f Clean up after download to reduce image size. 2017-05-06 10:06:48 +02:00
janpapenbrock
13d781d2b1 Re-order arguments in likeliness of being commented out. 2017-05-06 10:06:23 +02:00
janpapenbrock
7c1c99f5f9 Use script without arguments as entry point to allow running any command on the resulting container without having to override entry point. 2017-05-06 10:06:23 +02:00
janpapenbrock
0a02f43b8c Refactor Dockerfile for readability. 2017-05-06 10:06:22 +02:00
Jeremy Long
d4e50d9560 Merge pull request #717 from jeremylong/jacoco
Jacoco
2017-05-05 15:29:27 -04:00
Jeremy Long
5681e0bfdf fixed test cases 2017-05-05 15:17:39 -04:00
Jeremy Long
55bfe4cad8 Merge branch 'master' into jacoco 2017-05-05 14:41:27 -04:00
Jeremy Long
d726792be6 Merge branch 'Prakhash-Issues#665_implement' 2017-05-05 14:38:48 -04:00
Jeremy Long
5c21451760 updated to use the new schema 2017-05-05 14:15:21 -04:00
Jeremy Long
b3736ac13a updated template optional add the GAV 2017-05-05 14:15:04 -04:00
Jeremy Long
a4899de956 added new schema to support the addition of the GAV 2017-05-05 14:14:17 -04:00
Jeremy Long
e3ca70ba0d reverted changes in PR to old schemas 2017-05-05 14:13:35 -04:00
Jeremy Long
bdace1b1b7 Merge branch 'Issues#665_implement' of https://github.com/Prakhash/DependencyCheck into Prakhash-Issues#665_implement 2017-05-04 06:48:31 -04:00
Jeremy Long
567022a9b7 updated so that jacoco results can be sent to codacy 2017-05-03 06:28:30 -04:00
Thomas Gøytil
83262afd13 Added support to generate report in JSON format 2017-05-01 18:11:53 +02:00
Jeremy Long
3ff838a2cc Merge branch 'master' into jacoco 2017-04-30 17:20:23 -04:00
Jeremy Long
40b5c45ef6 Merge pull request #705 from jeremylong/issue690_threadsafe
PR #705 contains several bug fixes.
2017-04-30 17:16:12 -04:00
Jeremy Long
d2a8645dd4 expanded test cases to include additional dependencies 2017-04-29 09:39:30 -04:00
Jeremy Long
4543835a0d reset threadcount to zero as the plugin is not yet threadsafe 2017-04-29 09:33:52 -04:00
Jeremy Long
c0f41c461b reverted the threadsafe flag as the plugin is not threadsafe 2017-04-29 09:32:58 -04:00
Jeremy Long
116ef264e1 updated failing test case to expect the correct exception 2017-04-26 08:59:53 -04:00
Jeremy Long
1371dacdaa expanded test case to identify/fix threading issue 2017-04-26 08:50:39 -04:00
Jeremy Long
d252d0f29f fixed threading issue 2017-04-26 08:50:06 -04:00
Jeremy Long
3786f6ebc7 removed un-needed code from invoker build scripts 2017-04-26 07:30:33 -04:00
Jeremy Long
6813427867 updated invoker plugin to use 2 threads 2017-04-26 07:30:01 -04:00
stevespringett
9da95e592c Added NSP Analyzer Support 2017-04-26 00:40:15 -05:00
Jeremy Long
f94cf106a6 re-route invoker logs so the results can be viewed in travis 2017-04-25 08:01:09 -04:00
Jeremy Long
a67e421a5d nop 2017-04-23 08:44:24 -04:00
Jeremy Long
865db1b6c3 nop 2017-04-23 08:04:54 -04:00
Jeremy Long
31d7379a39 minor updates and added documentation 2017-04-23 07:22:53 -04:00
Jeremy Long
f473e63a61 added test case and added locking mechanism so only one update can run at any given time 2017-04-23 07:05:31 -04:00
Jeremy Long
c9ee55863f fixed merge 2017-04-12 10:42:02 -04:00
Jeremy Long
238a96184a Merge branch 'master' into issue690_threadsafe 2017-04-12 10:28:16 -04:00
Jeremy Long
44ddad8101 updated for maven thread safety 2017-04-12 10:24:51 -04:00
Jeremy Long
afa47f7dfc Merge pull request #695 from jeremylong/fix_coverity_finding
Fix coverity findings
2017-04-12 10:24:24 -04:00
Jeremy Long
f289bcd285 fixed false positive per issue #691 2017-04-04 10:03:19 -04:00
Jeremy Long
c7adb1bb65 fix for issue #684 2017-04-04 09:58:19 -04:00
Jeremy Long
4bbc5e27b5 updated 2017-04-02 13:24:07 -04:00
Jeremy Long
c877ade004 updated 2017-04-02 13:22:35 -04:00
Jeremy Long
ebd8996ad5 fixed typo 2017-04-02 13:12:45 -04:00
Jeremy Long
f31313d021 added PR template 2017-04-02 13:08:58 -04:00
Jeremy Long
6936dac9b4 updated template 2017-04-02 13:08:42 -04:00
Jeremy Long
4b2f6832fe added contributing guidelines 2017-04-02 12:58:17 -04:00
Jeremy Long
c622ff2b19 converted to using jacoco for code coverage 2017-04-02 08:16:21 -04:00
Jeremy Long
35d0f21c47 fix codacy issues 2017-04-01 10:02:24 -04:00
Jeremy Long
3066d286c5 added logo no text 2017-04-01 09:58:32 -04:00
Jeremy Long
18564e8e86 fixed merge issue 2017-04-01 08:33:13 -04:00
Jeremy Long
832cbabc7d added bh arsenal badges 2017-03-31 17:28:20 -04:00
Jeremy Long
8b764d5e17 added bh arsenal badges 2017-03-31 17:24:48 -04:00
Jeremy Long
e2a1a59543 fixed issues related to making the cveDb a singleton 2017-03-31 06:58:37 -04:00
Jeremy Long
cedb8d3db1 Merge pull request #689 from jwilk/mailto
Fix mailto URIs
2017-03-25 09:13:11 -04:00
Jeremy Long
539bd754df fixed merge 2017-03-25 09:10:41 -04:00
Jeremy Long
109f5c22e9 initial fix for CveDB singleton 2017-03-25 09:06:34 -04:00
Jeremy Long
a23d127c62 initial fix for CveDB singleton 2017-03-25 09:05:51 -04:00
Jakub Wilk
6825304100 fix mailto URIs
As per RFC 6068, there should be no slashes after "mailto:".
2017-03-24 16:09:23 +01:00
Jeremy Long
947499726a initial attempt 2017-03-15 07:36:28 -04:00
Jeremy Long
97b2e1a4da added documentation per issue https://github.com/jeremylong/dependency-check-gradle/issues/38 2017-03-14 09:06:17 -04:00
Prakhash
2b04c6a7dd changed the coordinates according to the pom file details 2017-03-13 15:52:50 +05:30
Jeremy Long
3bb6553111 Merge pull request #681 from jeremylong/java7_updates_and_cleanup
Java7 updates and cleanup
2017-03-12 19:35:12 -04:00
Jeremy Long
371dba948d checkstyle corrections 2017-03-12 18:03:27 -04:00
Jeremy Long
675349c06f fixed broken test case 2017-03-12 15:59:23 -04:00
Jeremy Long
7a88981aa4 updated to use try with resouces 2017-03-12 13:22:27 -04:00
Jeremy Long
626f6c3de2 updated to use IOUtils to copy between streams 2017-03-12 13:21:59 -04:00
Jeremy Long
5540397456 Merge pull request #680 from jeremylong/cvedb
Cvedb
2017-03-11 14:45:49 -05:00
Jeremy Long
69c6dd40a1 fixed synchronization on local variable 2017-03-11 14:24:46 -05:00
Jeremy Long
5ed6e838fc spelling corrections 2017-03-11 14:15:24 -05:00
Jeremy Long
1d32a6012a fixed possible NPE 2017-03-11 13:28:21 -05:00
Jeremy Long
b157049a7e use try with resources 2017-03-11 13:27:40 -05:00
Jeremy Long
8ea6b08a0a use try with resources 2017-03-11 13:26:56 -05:00
Jeremy Long
8856ff04ec code cleanup and java 7 exception handling improvements 2017-03-11 12:46:58 -05:00
Jeremy Long
8bfbd11a51 added test cases 2017-03-11 12:46:06 -05:00
Jeremy Long
abd843d281 simplified conditional 2017-03-11 11:11:31 -05:00
Jeremy Long
c54f9b1144 fixed throws in finally and converted to try with resources 2017-03-11 11:11:02 -05:00
Jeremy Long
318f3e14dd removed unused code for batching 2017-03-11 11:10:21 -05:00
Jeremy Long
46f227e92e updated and added test cases 2017-03-11 11:09:31 -05:00
Jeremy Long
a7b6f37503 suppressed another false positive 2017-03-10 16:52:32 -05:00
Jeremy Long
a61bba2f72 code cleanup 2017-03-10 16:40:22 -05:00
Jeremy Long
dfc6d952bd codacy cleanup 2017-03-10 15:38:00 -05:00
Jeremy Long
046f4605f9 java7 updates and cleanup 2017-03-10 15:30:48 -05:00
Jeremy Long
32590ab7ff Merge branch 'master' of github.com:jeremylong/DependencyCheck into cvedb 2017-03-10 14:00:53 -05:00
Jeremy Long
efeb084e57 added suppression rule for jcore per issue #679 2017-03-10 06:51:53 -05:00
Jeremy Long
03ec3142c3 updated threadpool size 2017-03-07 06:37:21 -05:00
Jeremy Long
679df936e7 changed CveDB to a singeton 2017-03-07 05:49:12 -05:00
Jeremy Long
5ed5764ab5 Merge branch 'stefanneuhaus-misc_performance_tweaking_and_cleanup' 2017-03-04 14:29:47 -05:00
Jeremy Long
d588092727 Merge branch 'misc_performance_tweaking_and_cleanup' of https://github.com/stefanneuhaus/DependencyCheck into stefanneuhaus-misc_performance_tweaking_and_cleanup 2017-03-04 14:20:01 -05:00
Jeremy Long
295ba0679d Merge branch 'aikebah-master' 2017-03-04 14:02:40 -05:00
Jeremy Long
bcdf26c88d Merge branch 'master' of https://github.com/aikebah/DependencyCheck into aikebah-master 2017-03-04 13:51:28 -05:00
Jeremy Long
d6e092bfa2 Merge pull request #676 from jwilk/spelling
Fix typos
2017-03-04 13:50:09 -05:00
Jeremy Long
388c1b5af1 java 7 updates 2017-03-04 13:47:53 -05:00
Jakub Wilk
717aea9a03 fix typos 2017-03-02 23:07:35 +01:00
Stefan Neuhaus
4951ee5a62 Cleanup: Codacy conformance 2017-02-28 08:23:57 +01:00
Jeremy Long
666150cf7f updated per issue #672 2017-02-27 08:35:07 -05:00
Jeremy Long
d8290c0c45 Merge pull request #674 from jeremylong/Prakhash-reportmodifier1
Prakhash reportmodifier1
2017-02-26 10:02:01 -05:00
Jeremy Long
e363e8109b added suppression notes 2017-02-26 09:16:53 -05:00
Jeremy Long
b228d08843 removed typo 2017-02-26 09:16:16 -05:00
Jeremy Long
3e08437808 updated to work with new schema 2017-02-26 07:52:02 -05:00
Jeremy Long
e0d5651b75 updated to add notes 2017-02-26 07:50:35 -05:00
Jeremy Long
59e29b7afe Merge branch 'notes' into Prakhash-reportmodifier1 2017-02-25 16:09:21 -05:00
Jeremy Long
d180208e34 interim 2017-02-25 16:08:44 -05:00
Jeremy Long
0ce1ef596c Merge branch 'reportmodifier1' of https://github.com/Prakhash/DependencyCheck into Prakhash-reportmodifier1 2017-02-25 15:55:12 -05:00
Jeremy Long
5f7486f851 updates to 673 2017-02-25 15:53:12 -05:00
Jeremy Long
03559fd106 added more suppression rules for false positives 2017-02-25 06:31:34 -05:00
Jeremy Long
d08357a1c2 fixed typo 2017-02-24 07:10:27 -05:00
Jeremy Long
c1cb87ebde Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-02-24 06:30:11 -05:00
Jeremy Long
82fd1cf4d7 additional fp added 2017-02-24 06:30:02 -05:00
Prakhash
a87391e609 formatting issues reported by the codacy is fixed 2017-02-24 14:54:45 +05:30
Prakhash
3071cfd7be formatting issues reported by the codacy is done 2017-02-24 14:43:46 +05:30
Prakhash
583c2d34d3 schema changes are added with global schema 2017-02-24 14:23:56 +05:30
Prakhash
c9640fbf04 schema file is modified with notes attribute' 2017-02-24 12:15:21 +05:30
Prakhash
192d1de944 name space changes are reverted to the original 2017-02-24 12:06:51 +05:30
Prakhash
aa0314c840 report is modified with the notes element 2017-02-24 11:03:10 +05:30
Hans Aikema
0171b859c6 Merge branch 'master' of https://github.com/jeremylong/DependencyCheck 2017-02-23 11:22:25 +01:00
Jeremy Long
d267e14b73 Merge pull request #666 from colezlaw/grokassembly
Updated GrokAssembly and added config
2017-02-22 06:32:45 -05:00
Hans Aikema
79e63f4067 Merge remote-tracking branch 'upstream/master' 2017-02-21 17:35:30 +01:00
Hans Aikema
72d7af5291 Enable running DependencyCheck on Maven 3.0 2017-02-21 17:31:46 +01:00
Stefan Neuhaus
0e313d1910 Fix issue 2017-02-21 17:06:02 +01:00
Stefan Neuhaus
6841f9a009 Fix typo in directory name 2017-02-21 16:17:36 +01:00
Stefan Neuhaus
caeec68999 Refactor CveDB
- make class thread-safe and declare so (also DatabaseProperties)
- prepared statements represented by enum: performance gain, eases experiments when tuning for performance
- minor changes/cleanup/code style
2017-02-21 14:32:24 +01:00
Stefan Neuhaus
541915a5a7 Minor improvements in NvdCve20Handler
- use addAll() instead of iterating and adding single instances for VulnerableSoftware
- define String constants for certain XML attributes: code style, could facilitate JVM optimizations
2017-02-21 14:30:22 +01:00
Stefan Neuhaus
cb75ab8cca Use Apache StringUtils.split(String, char) instead of String.split(String)
String.split() uses a regex pattern for splitting. As we simply need to split on a single fixed char using the Apache StringUtils is preferable.
2017-02-21 14:23:13 +01:00
Stefan Neuhaus
0f3845b16d cleanup: remove unused return value 2017-02-21 14:12:12 +01:00
Stefan Neuhaus
dd7128095e add license information to dbStatements_oracle.properties 2017-02-21 14:02:34 +01:00
Jeremy Long
1367be510c correct fix for issue #660; correctly handle organization from the pom 2017-02-21 07:02:05 -05:00
Jeremy Long
2ea0eb3c64 correct fix for issue #660; correctly handle organization from the pom 2017-02-21 06:40:02 -05:00
Jeremy Long
a5990ea6f3 update to #657 to allow sorted vulnerable software in repots; also, sorting an array list is faster then building a treeset 2017-02-21 06:38:31 -05:00
colezlaw
67921f5f3d Updated GrokAssembly and added config 2017-02-20 15:35:52 -05:00
Jeremy Long
d31e0453bd fix for #660 2017-02-20 07:01:05 -05:00
stevespringett
ae21424a30 Closes #664 2017-02-18 21:23:19 -06:00
Jeremy Long
3577949425 codacy recommended updates 2017-02-17 19:03:53 -05:00
Jeremy Long
0d72471502 fixed synchronization per coverity 2017-02-17 18:00:40 -05:00
Jeremy Long
17590a6d38 re-ordered badges 2017-02-17 17:58:36 -05:00
Jeremy Long
d9dcc8cc2d fixed UTF-8 BOM bug 2017-02-17 17:18:10 -05:00
Jeremy Long
df1ee5e8c6 reverted dependency-tree to resolve bug 2017-02-17 17:17:54 -05:00
Jeremy Long
3c68ebece7 plugin/dependency upgrades 2017-02-17 14:35:51 -05:00
Jeremy Long
c9e8e6cf0e codacy recommended updates 2017-02-17 14:20:43 -05:00
Jeremy Long
36945fb84d added codacy badge 2017-02-17 13:05:12 -05:00
Jeremy Long
960a2e27ab formating and codacy recommended updates 2017-02-17 12:59:17 -05:00
Jeremy Long
71724461a9 Merge branch 'stefanneuhaus-accelerate-db-update_parallelize-fetching-lastmodification-timestamps' 2017-02-17 12:14:02 -05:00
Jeremy Long
ae5a95bfb3 merge #662 2017-02-17 12:13:45 -05:00
Jeremy Long
d6c9fea354 formating and codacy recommended updates 2017-02-17 12:03:11 -05:00
Jeremy Long
d6f1351f6b Merge pull request #657 from stefanneuhaus/accelerate-db-update_get-rid-of-treeset
Accelerate CVE DB update: replace TreeSets in Vulnerability by HashSets
2017-02-17 10:48:46 -05:00
Jeremy Long
373488adb4 codacy recommended updates 2017-02-17 10:31:25 -05:00
Stefan Neuhaus
59401cc9f8 cleanup/code style 2017-02-16 20:55:26 +01:00
Stefan Neuhaus
eca0e7a852 Fix integration test 2017-02-16 20:53:48 +01:00
Stefan Neuhaus
563dc24854 Parallelize retrieval of last modification timestamps 2017-02-16 08:59:09 +01:00
Stefan Neuhaus
3a70e25983 Refactoring: Move retrieval of last modified timestamps from UpdateableNvdCve to NvdCveUpdater
- UpdateableNvdCve is from its nature more like a simple value object
- Facilitates performance optimization for retrieval of last modification timestamps
2017-02-16 08:58:50 +01:00
Stefan Neuhaus
a9fc6bf02c cleanup: remove unused stuff 2017-02-16 08:58:36 +01:00
Stefan Neuhaus
cd4f09dc86 NvdCveUpdater: Refactor thread pool concept
- Make thread pools members of the class to facilitate reuse
- Increase default max download thread pool size from 3 to 50 (should be fine for mostly blocking tasks like downloading)
2017-02-16 08:58:14 +01:00
Jeremy Long
4193718571 upgrade to Java 7 2017-02-12 17:42:19 -05:00
Stefan Neuhaus
0464626e2b Accelerate CVE DB update
Vulnerability: switch vulnerableSoftware and references from expensive TreeSet to HashSet
2017-02-11 20:46:28 +01:00
Jeremy Long
a0198e34e7 snapshot version 2017-02-09 06:04:02 -05:00
Jeremy Long
0b329bd40e added test case 2017-02-07 19:41:06 -05:00
Jeremy Long
3d33f24f09 Merge pull request #655 from suhand/master
Minor spelling fixes
2017-02-07 19:40:39 -05:00
Jeremy Long
886c02fad2 add configuration to remove FP based on parent-group/artifact from spring-boot 2017-02-07 06:24:34 -05:00
Jeremy Long
3a11504153 updated to prevent bundling of dependencies within WAR files 2017-02-07 06:22:55 -05:00
Jeremy Long
3a082ae00a minor update to #617 2017-02-07 06:06:53 -05:00
Suhan Dharmasuriya
780201845b Minor spelling fixes 2017-02-07 12:00:17 +05:30
Jeremy Long
0e0a4bb0b4 expanded hint rules so that they can remove evidence 2017-02-04 09:20:47 -05:00
Jeremy Long
5333083a78 fixed bug that caused ODC to fail if an invalid assembly was scanned 2017-01-28 08:13:27 -05:00
Jeremy Long
b8c6c86330 snapshot version 2017-01-28 08:12:51 -05:00
Jeremy Long
e246757f47 version 1.4.5 2017-01-22 17:10:42 -05:00
Jeremy Long
4172300799 added license 2017-01-22 16:11:50 -05:00
Jeremy Long
f39f754b7b reapplied fix for issue #601 2017-01-22 08:10:14 -05:00
Jeremy Long
c59615f452 patch for issue #510 and #512 2017-01-22 08:01:40 -05:00
Jeremy Long
847bed2fa0 added manifest implementation-version 2017-01-22 07:42:11 -05:00
Jeremy Long
a9af15f6f8 checkstyle/pmd suggested corrections 2017-01-21 08:47:52 -05:00
Jeremy Long
92519ae955 updated notes 2017-01-21 08:09:48 -05:00
Jeremy Long
2d90aca1f2 minor code cleanup 2017-01-21 08:05:54 -05:00
Jeremy Long
f29ed38c34 Merge pull request #644 from oosterholt/master
Add troubling JAR file name to the exception when JAR reading errors occur
2017-01-21 06:21:18 -05:00
Rick Oosterholt
df8d4fd77c Minor change: When JAR reading errors occur, at least add the file name
to the exception. Without it, finding the troubling JAR is hard.
2017-01-18 13:52:17 +01:00
Jeremy Long
baa2e2c6ff updated archetype for new analyzers to be more complete 2017-01-15 12:18:01 -05:00
Jeremy Long
9d5769bb69 Merge branch 'issue575' 2017-01-15 11:19:37 -05:00
Jeremy Long
4cdfa804ee fixed accidental commit 2017-01-14 09:43:34 -05:00
Jeremy Long
523cd23b6b filter version numbers for issue #575 2017-01-14 09:41:34 -05:00
Jeremy Long
61866e9e76 updated source version 2017-01-14 08:55:20 -05:00
Jeremy Long
ff7fbdc98d updated year to speed test case 2017-01-14 07:34:35 -05:00
Jeremy Long
b625d642ea updated documentation for #635 2017-01-14 07:31:31 -05:00
Jeremy Long
8733a85ebb patch per issue#642 2017-01-13 06:53:26 -05:00
Jeremy Long
5ab5a7b72b tuned linguist language stats 2017-01-09 20:05:37 -05:00
Jeremy Long
3cb8b9fa9e Merge branch 'hgschmie-additional_analyzers' 2017-01-08 11:52:09 -05:00
Jeremy Long
429039bf1c documentation for issue #635 2017-01-08 11:37:50 -05:00
Jeremy Long
29d28c3408 fixed PR #635 to cover other interfaces 2017-01-08 11:23:52 -05:00
Jeremy Long
372d484440 Merge branch 'additional_analyzers' of https://github.com/hgschmie/DependencyCheck into hgschmie-additional_analyzers 2017-01-08 10:33:57 -05:00
Jeremy Long
eac47800a3 added documentation for PR #636 2017-01-08 08:55:29 -05:00
Jeremy Long
86a85db12b removed for now 2017-01-08 08:54:47 -05:00
Jeremy Long
4ab6cd278c updated documentation for PR #636 2017-01-08 08:51:56 -05:00
Jeremy Long
233a068c8b Merge pull request #636 from hgschmie/fail_on_any_vuln
adds a new flag 'failBuildOnAnyVulnerability'
2017-01-08 08:19:24 -05:00
Jeremy Long
d9f0ffa742 Merge pull request #634 from hgschmie/enable_disable
rework the enabled / disabled logic
2017-01-08 08:18:12 -05:00
Jeremy Long
8d63ee19ed fix for Jenkins integration, updates to commit f47c6b0 2017-01-08 07:55:35 -05:00
Jeremy Long
1fb74e1a27 Merge pull request #639 from dejan2609/java-6-compatibility
check code against Java 1.6 API signatures
2017-01-07 06:40:47 -05:00
dejan2609
c94ab6108c check code against Java 1.6 API signatures 2017-01-04 16:42:07 +01:00
Jeremy Long
bf285e19ab added site for archetype 2017-01-02 21:59:09 -05:00
Jeremy Long
b1ceca73e4 added plugin archetype to site 2017-01-02 21:48:04 -05:00
Jeremy Long
f3aca63b61 version upgrades and added enforcer for java version 2017-01-02 21:47:27 -05:00
Jeremy Long
fca107d287 added site distribution 2017-01-02 21:46:15 -05:00
Jeremy Long
64b6964fff checkstyle corrections 2017-01-02 21:45:49 -05:00
Jeremy Long
6af0842838 added logging 2017-01-02 21:45:21 -05:00
Jeremy Long
4c49adf1ba reduced code duplication 2017-01-02 21:44:59 -05:00
Jeremy Long
5f4e4fab56 reduced code duplication 2017-01-02 21:43:51 -05:00
Jeremy Long
146d7e3fbf reduced code duplciation 2017-01-02 21:42:20 -05:00
Jeremy Long
4d22800747 fixed type 2017-01-02 21:40:57 -05:00
Jeremy Long
541a7f8180 removed unused code 2017-01-02 21:40:04 -05:00
Jeremy Long
f205cf79c9 Merge branch 'plugins' 2016-12-30 17:02:32 -05:00
Jeremy Long
d8bb6488b7 added archetype per #612 2016-12-30 17:01:09 -05:00
Jeremy Long
4324563c0a updated plugins path for #612 2016-12-30 16:42:37 -05:00
Jeremy Long
bad03660b1 added plugins directory per #612 2016-12-29 07:38:11 -05:00
Henning Schmiedehausen
20b1ff38f9 adds a new flag 'failBuildOnAnyVulnerability'
In our build system, we enable checkers based on boolean
values. Currently, the only way to enable failing the build on
vulnerabilities is by providing a numeric value (0-10) for another
property. This change adds a boolean switch that will fail the build
if any vulnerability is present (we have a strict "no vulnerabilities
in our builds" policy).
2016-12-28 17:24:26 -08:00
Henning Schmiedehausen
def78a3cfd rework the enabled / disabled logic
If an analyzer is disabled from the configuration, it should not be
initialized (because some of the may actually fail during that process
nor should the engine log in any way that those exist.

With these changes, it is possible for me to turn off unwanted
analyzers (e.g. Ruby analyzers for a java project) from the maven
plugin and not confuse my users with spurious misleading messages.
2016-12-28 16:39:25 -08:00
Henning Schmiedehausen
a41158a716 adds maven configuration switches for more analyzers 2016-12-28 16:38:28 -08:00
Jeremy Long
63ad13ff7a added enabled properties per issue #612 2016-12-27 08:46:04 -05:00
Jeremy Long
dd92ec675f fixed error in tests 2016-12-27 08:45:42 -05:00
Jeremy Long
6e1512f7d9 added enabled setting (#612) and added additional checks to see if the update should occur (#631) 2016-12-27 08:45:01 -05:00
Jeremy Long
287b1df3fd added enabled settings for all analyzers per #612 2016-12-26 09:11:26 -05:00
Jeremy Long
38bf9b4ddb checkstyle recommendations 2016-12-22 07:32:04 -05:00
Jeremy Long
f9d3a9d8d8 Merge pull request #614 from stefanneuhaus/issue-613-fix-version-comparison
Fix handling of numerical versions
2016-12-22 06:58:26 -05:00
Jeremy Long
309a5d9bcb Merge branch 'issue630' 2016-12-22 06:57:04 -05:00
Jeremy Long
60e661d3a4 updated per issue #630 2016-12-22 06:55:26 -05:00
Jeremy Long
c33257d266 addded synchronization - as this analyzer should only run synchronized 2016-12-22 06:53:35 -05:00
Jeremy Long
1dbc183567 added check for failure 2016-12-22 06:52:47 -05:00
Jeremy Long
bf258146da added test case for issue #629 and #517 2016-12-18 12:14:35 -05:00
Jeremy Long
bb927b447e updated so that the old suppression files could be processed 2016-12-18 12:12:57 -05:00
Jeremy Long
d91b4c3151 updated test case for performance of build 2016-12-18 12:12:10 -05:00
Jeremy Long
91dbb39f18 updated test for #630 2016-12-18 11:59:59 -05:00
Jeremy Long
35ae8fd660 updated test for #630 2016-12-18 11:59:30 -05:00
Jeremy Long
d854917090 changes for issue #630 2016-12-18 11:58:58 -05:00
Jeremy Long
32ebf6c8ed added phase to accomodate the fix for issue #630 2016-12-18 11:58:20 -05:00
Jeremy Long
edd4191d47 fix for #517 2016-12-16 06:29:42 -05:00
Jeremy Long
0cce49506a added validation 2016-12-10 19:58:05 -05:00
Jeremy Long
1c053469e9 fixed date format for test case 2016-12-10 19:50:09 -05:00
Jeremy Long
610e97ef7f jacks suggested change 2016-12-10 16:55:58 -05:00
Jeremy Long
5a678d2ccb removed test code 2016-12-10 16:55:38 -05:00
Jeremy Long
8db61a4d1e coverity suggested change 2016-12-10 16:42:32 -05:00
Jeremy Long
f47c6b07f4 jacks recommended change for thread safety 2016-12-05 22:41:15 -05:00
Jeremy Long
bd3af45db9 fixed code duplication 2016-12-04 16:18:01 -05:00
Jeremy Long
a271d422f6 moved similiar code to a utility function to remove code duplication 2016-12-04 11:28:53 -05:00
Jeremy Long
4dd6dedaa4 hardening the XML parser per jacks.codiscope.com 2016-12-03 17:44:49 -05:00
Jeremy Long
10ee569096 fix proposed by Jacks - synchronizing SimpleDateFormat 2016-12-03 17:43:24 -05:00
Jeremy Long
1474855305 fix proposed by Jacks - synchronizing SimpleDateFormat 2016-12-03 17:41:32 -05:00
Jeremy Long
0202bc11d4 null checking proposed by coverity 2016-12-03 17:39:57 -05:00
Stefan Neuhaus
e7072ea04c Count "0" as a positive integer 2016-12-03 22:50:20 +01:00
Jeremy Long
8f2c755f21 checkstyle correction 2016-12-03 16:23:53 -05:00
Jeremy Long
e513a79bd2 fixed issue #272 2016-12-03 15:07:33 -05:00
Jeremy Long
dd17f7393f snapshot version 2016-12-03 14:28:36 -05:00
Jeremy Long
32f38bf892 updated travis build script 2016-12-03 14:01:32 -05:00
Jeremy Long
d5c3eeaf28 Merge branch 'removeMavenEngine' 2016-12-03 13:48:03 -05:00
Jeremy Long
bfa67fcba7 fix #617 2016-12-03 13:46:25 -05:00
Jeremy Long
37a556dcc0 add integration test 2016-12-03 07:06:01 -05:00
Jeremy Long
fe61f298f0 Merge branch 'axel3rd-MavenMojosPurgeAndUpdateOnlyAggregator' 2016-12-03 06:56:01 -05:00
Jeremy Long
9786c9bf82 minor changes - planning on moving additional testing profile to an invoker test in the maven module per issue #618 2016-12-03 06:55:24 -05:00
Jeremy Long
668161081a moved the invoker plugin to a profile so that it does not execute on every build 2016-12-03 06:54:03 -05:00
Jeremy Long
4978f9dcba Merge branch 'MavenMojosPurgeAndUpdateOnlyAggregator' of https://github.com/axel3rd/DependencyCheck into axel3rd-MavenMojosPurgeAndUpdateOnlyAggregator 2016-11-22 19:57:27 -05:00
Jeremy Long
a6ca2e3895 Merge pull request #625 from axel3rd/MinorFixAndUTsWindowsSpaceDirectory
UTs on Windows when project path contains space & some exception review
2016-11-22 19:51:54 -05:00
Alix Lourme
6ecf55be91 UTs on Windows when project path contains space & some exception review 2016-11-22 23:33:40 +01:00
Jeremy Long
13bd63dac8 re-loading of properties/settings resolved by sharing the settings object amongst tasks 2016-11-22 16:40:57 -05:00
Jeremy Long
db5ff1bfca java mail - disputed CVE is considered a false positive 2016-11-22 16:38:45 -05:00
Jeremy Long
42f2385bb2 updated documentation for PR #619 2016-11-22 06:51:21 -05:00
Jeremy Long
e9556bbbf0 added analyzer initialization so that temp files get put in the correct location 2016-11-22 06:40:33 -05:00
Jeremy Long
316b936326 ensured resources are closed 2016-11-22 06:39:50 -05:00
Jeremy Long
6838b9b950 fixed logic for single pom entry in a jar 2016-11-22 06:21:30 -05:00
Jeremy Long
cdfe5d0c9a Merge pull request #619 from willowtreeapps/feature/fail-on-cvss
Adds a failOnCVSS command line option
2016-11-22 05:50:45 -05:00
Jeremy Long
1610f14c47 general code cleanup/fixes 2016-11-22 05:46:35 -05:00
Jeremy Long
85ab894b94 fixed the possible creation of two indexes 2016-11-20 06:49:28 -05:00
Alix Lourme
ddbca24f33 Maven mojos 'purge' & 'update-only' aggregator #618 2016-11-19 00:32:10 +01:00
Charlie Fairchild
6b9acac8c4 Minor Styling 2016-11-17 15:37:21 -05:00
Charlie Fairchild
2333bee5fd Adds a command line option for the CLI tool to pick what CVSS error to fail on 2016-11-16 11:25:21 -05:00
Jeremy Long
2ad08d2367 minor code cleanup 2016-11-13 16:33:39 -05:00
Stefan Neuhaus
1337686013 Fix handling of numerical versions 2016-11-13 19:37:29 +01:00
Jeremy Long
41041bfd18 updated documentation per issue #607 2016-11-12 11:21:40 -05:00
Jeremy Long
e693e53630 updated error message per issue #607 2016-11-12 11:19:48 -05:00
Jeremy Long
b99e13a337 added documentation to address issue #609 2016-11-12 11:03:25 -05:00
Jeremy Long
3bbc485968 fix index out of range exception per issue #611 2016-11-11 10:58:14 -05:00
327 changed files with 12204 additions and 5931 deletions

2
.gitattributes vendored Normal file
View File

@@ -0,0 +1,2 @@
*.html linguist-documentation
(^|/)site/) linguist-documentation

34
.github/contributing.md vendored Normal file
View File

@@ -0,0 +1,34 @@
# Contributing to OWASP dependency-check
## Reporting Bugs
- Ensure you're running the latest version of dependency-check.
- Ensure the bug has not [already been reported](https://github.com/jeremylong/DependencyCheck/issues).
- If you're unable to find an open issue addressing the problem, please [submit a new issue](https://github.com/jeremylong/DependencyCheck/issues/new).
- Please fill out the appropriate section of the bug report template provided. Please delete any sections not needed in the template.
## Reporting Vulnerabilities
- If you believe you have found a vulnerability in dependency-check itself (not that dependency-check found a vulnerability); please email jeremy.long@owasp.org.
## Asking Questions
- Your question may be answered by taking a look at the [documentataion](https://jeremylong.github.io/DependencyCheck/).
- If you still have a question consider:
- posting to the [Google Group](https://groups.google.com/forum/#!forum/dependency-check)
- opening a [new issue](https://github.com/jeremylong/DependencyCheck/issues/new)
## Enhancement Requests
- Suggest changes by [submitting a new issue](https://github.com/jeremylong/DependencyCheck/issues/new) and begin coding.
## Contributing Code
- If you have written a new feature or have fixed a bug please open a new pull request with the patch.
- Ensure the PR description clearly describes the problem and solution. Include any related issue number(s) if applicable.
- Please ensure the PR passes the automated checks performed (travis-ci, codacy, etc.)
- Please consider adding test cases for any new functionality
## Thank you for your contributions
OWASP dependency-check team

View File

@@ -1,3 +1,5 @@
Please delete any un-needed section from the following issue template:
### Reporting Bugs/Errors
When reporting errors, 99% of the time log file output is required. Please post the log file as a [gist](https://gist.github.com/) and provide a link in the new issue.

9
.github/pull_request_template.md vendored Normal file
View File

@@ -0,0 +1,9 @@
## Fixes Issue #
## Description of Change
*Please add a description of the proposed change*
## Have test cases been added to cover the new functionality?
*yes/no*

View File

@@ -1,2 +1,13 @@
language: java
jdk: oraclejdk7
script: mvn install -DreleaseTesting
env:
global:
secure: ZUzhWfpXJw/oAeDlUkDFkEJMT0T7kCN3d7ah8urkL2B0KFfKOqQagkbXkgvDa1SYud8VdcnoGa69LfkEr5IrdqW7R4bEYZAiN5swm4Z0iO8t53szVspm2f+O9jQ44O/sfOfpfLxWUUuhdc7Vbrszp+tSszxdPmssWL+f5a/mfWs=
before_install:
- sudo apt-get install jq
- wget -O ~/codacy-coverage-reporter-assembly-latest.jar $(curl https://api.github.com/repos/codacy/codacy-coverage-reporter/releases/latest | jq -r .assets[0].browser_download_url)
after_success:
- java -cp ~/codacy-coverage-reporter-assembly-latest.jar com.codacy.CodacyCoverageReporter -l Java -r build-reporting/target/site/jacoco-aggregate/jacoco.xml

View File

@@ -2,13 +2,28 @@ FROM java:8
MAINTAINER Timo Pagel <dependencycheckmaintainer@timo-pagel.de>
RUN wget -O /tmp/current.txt http://jeremylong.github.io/DependencyCheck/current.txt && current=$(cat /tmp/current.txt) && wget https://dl.bintray.com/jeremy-long/owasp/dependency-check-$current-release.zip && unzip dependency-check-$current-release.zip && mv dependency-check /usr/share/
ENV user=dependencycheck
ENV version_url=https://jeremylong.github.io/DependencyCheck/current.txt
ENV download_url=https://dl.bintray.com/jeremy-long/owasp
RUN useradd -ms /bin/bash dockeruser && chown -R dockeruser:dockeruser /usr/share/dependency-check && mkdir /report && chown -R dockeruser:dockeruser /report
USER dockeruser
RUN wget -O /tmp/current.txt ${version_url} && \
version=$(cat /tmp/current.txt) && \
file="dependency-check-${version}-release.zip" && \
wget "$download_url/$file" && \
unzip ${file} && \
rm ${file} && \
mv dependency-check /usr/share/
VOLUME "/src /usr/share/dependency-check/data /report"
RUN useradd -ms /bin/bash ${user} && \
chown -R ${user}:${user} /usr/share/dependency-check && \
mkdir /report && \
chown -R ${user}:${user} /report
USER ${user}
VOLUME ["/src" "/usr/share/dependency-check/data" "/report"]
WORKDIR /report
ENTRYPOINT ["/usr/share/dependency-check/bin/dependency-check.sh", "--scan", "/src"]
CMD ["--help"]
ENTRYPOINT ["/usr/share/dependency-check/bin/dependency-check.sh"]

View File

@@ -1,4 +1,6 @@
[![Build Status](https://travis-ci.org/jeremylong/DependencyCheck.svg?branch=master)](https://travis-ci.org/jeremylong/DependencyCheck) [![Apache 2.0 License](https://img.shields.io/badge/license-Apache%202-blue.svg)](https://www.apache.org/licenses/LICENSE-2.0.txt) [![Coverity Scan Build Status](https://scan.coverity.com/projects/1654/badge.svg)](https://scan.coverity.com/projects/dependencycheck)
[![Build Status](https://travis-ci.org/jeremylong/DependencyCheck.svg?branch=master)](https://travis-ci.org/jeremylong/DependencyCheck) [![Coverity Scan Build Status](https://scan.coverity.com/projects/1654/badge.svg)](https://scan.coverity.com/projects/dependencycheck) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/6b6021d481dc41a888c5da0d9ecf9494)](https://www.codacy.com/app/jeremylong/DependencyCheck?utm_source=github.com&amp;utm_medium=referral&amp;utm_content=jeremylong/DependencyCheck&amp;utm_campaign=Badge_Grade) [![Apache 2.0 License](https://img.shields.io/badge/license-Apache%202-blue.svg)](https://www.apache.org/licenses/LICENSE-2.0.txt)
[![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2015.svg)](https://www.toolswatch.org/2015/06/black-hat-arsenal-usa-2015-speakers-lineup/) [![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2014.svg)](https://www.toolswatch.org/2014/06/black-hat-usa-2014-arsenal-tools-speaker-list/) [![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2013.svg)](https://www.toolswatch.org/2013/06/announcement-blackhat-arsenal-usa-2013-selected-tools/)
Dependency-Check
================
@@ -91,40 +93,46 @@ $ ./dependency-check-cli/target/release/bin/dependency-check.sh --project Testin
On Windows
```
> mvn install
> dependency-check-cli/target/release/bin/dependency-check.bat -h
> dependency-check-cli/target/release/bin/dependency-check.bat --project Testing --out . --scan ./src/test/resources
> .\dependency-check-cli\target\release\bin\dependency-check.bat -h
> .\dependency-check-cli\target\release\bin\dependency-check.bat --project Testing --out . --scan ./src/test/resources
```
Then load the resulting 'DependencyCheck-Report.html' into your favorite browser.
### Docker
In the following example it is assumed that the source to be checked is in the actual directory. A persistent data directory and a persistent report directory is used so that the container can be destroyed after running it to make sure that you use the newst version, always.
```
# After the first run, feel free to change the owner of the directories to the owner of the creted files and the permissions to 744
DATA_DIRECTORY=$HOME/OWASP-Dependency-Check/data
REPORT_DIRECTORY=/$HOME/OWASP-Dependency-Check/reports
In the following example it is assumed that the source to be checked is in the current working directory. Persistent data and report directories are used, allowing you to destroy the container after running.
if [ ! -d $DATA_DIRECTORY ]; then
echo "Initially creating persistent directories"
mkdir -p $DATA_DIRECTORY
chmod -R 777 $DATA_DIRECTORY
mkdir -p $REPORT_DIRECTORY
chmod -R 777 $REPORT_DIRECTORY
```
#!/bin/sh
OWASPDC_DIRECTORY=$HOME/OWASP-Dependency-Check
DATA_DIRECTORY="$OWASPDC_DIRECTORY/data"
REPORT_DIRECTORY="$OWASPDC_DIRECTORY/reports"
if [ ! -d "$DATA_DIRECTORY" ]; then
echo "Initially creating persistent directories"
mkdir -p "$DATA_DIRECTORY"
chmod -R 777 "$DATA_DIRECTORY"
mkdir -p "$REPORT_DIRECTORY"
chmod -R 777 "$REPORT_DIRECTORY"
fi
docker pull owasp/dependency-check # Make sure it is the actual version
# Make sure we are using the latest version
docker pull owasp/dependency-check
docker run --rm \
--volume $(pwd):/src \
--volume $DATA_DIRECTORY:/usr/share/dependency-check/data \
--volume $REPORT_DIRECTORY:/report \
--name dependency-check \
dc \
--suppression "/src/security/dependency-check-suppression.xml"\
--format "ALL" \
--project "My OWASP Dependency Check Projekt" \
--volume $(pwd):/src \
--volume "$DATA_DIRECTORY":/usr/share/dependency-check/data \
--volume "$REPORT_DIRECTORY":/report \
owasp/dependency-check \
--scan /src \
--format "ALL" \
--project "My OWASP Dependency Check Project"
# Use suppression like this: (/src == $pwd)
# --suppression "/src/security/dependency-check-suppression.xml"
```
@@ -144,7 +152,7 @@ Dependency-Check is Copyright (c) 2012-2016 Jeremy Long. All Rights Reserved.
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/LICENSE.txt) file for the full license.
Dependency-Check makes use of several other open source libraries. Please see the [NOTICE.txt] [notices] file for more information.
Dependency-Check makes use of several other open source libraries. Please see the [NOTICE.txt][notices] file for more information.
[wiki]: https://github.com/jeremylong/DependencyCheck/wiki

80
build-reporting/pom.xml Normal file
View File

@@ -0,0 +1,80 @@
<!--
This file is part of dependency-check build-reporting.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Copyright (c) 2017 - Jeremy Long. All Rights Reserved.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>1.4.6-SNAPSHOT</version>
</parent>
<artifactId>build-reporting</artifactId>
<!-- begin copy from http://minds.coremedia.com/2012/09/11/problem-solved-deploy-multi-module-maven-project-site-as-github-pages/ -->
<distributionManagement>
<site>
<id>github-pages-site</id>
<name>Deployment through GitHub's site deployment plugin</name>
<url>${basedir}/../target/site/${project.version}/build-reporting</url>
</site>
</distributionManagement>
<!-- end copy -->
<dependencies>
<dependency>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-utils</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-ant</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-cli</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-maven</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<executions>
<execution>
<id>report-aggregate</id>
<phase>verify</phase>
<goals>
<goal>report-aggregate</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,5 @@
About
=====
OWASP dependency-check build reporting is used to aggregate jacoco test coverage results
so that they can be posted to [Codacy](https://www.codacy.com/app/OWASP_Reviews/DependencyCheck/dashboard)
to track code coverage.

View File

@@ -0,0 +1,32 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!--
This file is part of dependency-check build reporting.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Copyright (c) 2017 Jeremy Long. All Rights Reserved.
-->
<project name="dependency-check-build-reporting">
<bannerLeft>
<name>OWASP dependency-check build reporting</name>
<alt>OWASP dependency-check build reporting</alt>
<src>../images/dc.svg</src>
</bannerLeft>
<body>
<breadcrumbs>
<item name="dependency-check" href="../index.html"/>
</breadcrumbs>
<menu ref="Project Documentation" />
<menu ref="reports" />
</body>
</project>

View File

@@ -20,7 +20,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>1.4.4</version>
<version>1.4.6-SNAPSHOT</version>
</parent>
<artifactId>dependency-check-ant</artifactId>
@@ -223,48 +223,6 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<configuration>
<check>
<branchRate>85</branchRate>
<lineRate>85</lineRate>
<haltOnFailure>false</haltOnFailure>
<totalBranchRate>85</totalBranchRate>
<totalLineRate>85</totalLineRate>
<packageLineRate>85</packageLineRate>
<packageBranchRate>85</packageBranchRate>
<regexes>
<regex>
<pattern>.*\$.*</pattern>
<branchRate>0</branchRate>
<lineRate>0</lineRate>
</regex>
</regexes>
</check>
</configuration>
<executions>
<execution>
<goals>
<goal>clean</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<argLine>-Dfile.encoding=UTF-8</argLine>
<systemProperties>
<property>
<name>data.directory</name>
<value>${project.build.directory}/dependency-check-data</value>
</property>
</systemProperties>
</configuration>
</plugin>
</plugins>
</build>
<reporting>
@@ -288,7 +246,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<version>${reporting.pmd-plugin.version}</version>
<configuration>
<targetJdk>1.6</targetJdk>
<linkXref>true</linkXref>
<linkXRef>true</linkXRef>
<sourceEncoding>utf-8</sourceEncoding>
<excludes>
<exclude>**/generated/*.java</exclude>

View File

@@ -53,16 +53,157 @@ public class Check extends Update {
* System specific new line character.
*/
private static final String NEW_LINE = System.getProperty("line.separator", "\n").intern();
/**
* Whether the ruby gemspec analyzer should be enabled.
*/
private Boolean rubygemsAnalyzerEnabled;
/**
* Whether or not the Node.js Analyzer is enabled.
*/
private Boolean nodeAnalyzerEnabled;
/**
* Whether or not the Ruby Bundle Audit Analyzer is enabled.
*/
private Boolean bundleAuditAnalyzerEnabled;
/**
* Whether the CMake analyzer should be enabled.
*/
private Boolean cmakeAnalyzerEnabled;
/**
* Whether or not the Open SSL analyzer is enabled.
*/
private Boolean opensslAnalyzerEnabled;
/**
* Whether the python package analyzer should be enabled.
*/
private Boolean pyPackageAnalyzerEnabled;
/**
* Whether the python distribution analyzer should be enabled.
*/
private Boolean pyDistributionAnalyzerEnabled;
/**
* Whether or not the central analyzer is enabled.
*/
private Boolean centralAnalyzerEnabled;
/**
* Whether or not the nexus analyzer is enabled.
*/
private Boolean nexusAnalyzerEnabled;
/**
* The URL of a Nexus server's REST API end point
* (http://domain/nexus/service/local).
*/
private String nexusUrl;
/**
* Whether or not the defined proxy should be used when connecting to Nexus.
*/
private Boolean nexusUsesProxy;
/**
* Additional ZIP File extensions to add analyze. This should be a
* comma-separated list of file extensions to treat like ZIP files.
*/
private String zipExtensions;
/**
* The path to Mono for .NET assembly analysis on non-windows systems.
*/
private String pathToMono;
/**
* Construct a new DependencyCheckTask.
* The application name for the report.
*
* @deprecated use projectName instead.
*/
public Check() {
super();
// Call this before Dependency Check Core starts logging anything - this way, all SLF4J messages from
// core end up coming through this tasks logger
StaticLoggerBinder.getSingleton().setTask(this);
}
@Deprecated
private String applicationName = null;
/**
* The name of the project being analyzed.
*/
private String projectName = "dependency-check";
/**
* Specifies the destination directory for the generated Dependency-Check
* report.
*/
private String reportOutputDirectory = ".";
/**
* Specifies if the build should be failed if a CVSS score above a specified
* level is identified. The default is 11 which means since the CVSS scores
* are 0-10, by default the build will never fail and the CVSS score is set
* to 11. The valid range for the fail build on CVSS is 0 to 11, where
* anything above 10 will not cause the build to fail.
*/
private float failBuildOnCVSS = 11;
/**
* Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not
* recommended that this be turned to false. Default is true.
*/
private Boolean autoUpdate;
/**
* Whether only the update phase should be executed.
*
* @deprecated Use the update task instead
*/
@Deprecated
private boolean updateOnly = false;
/**
* The report format to be generated (HTML, XML, VULN, CSV, JSON, ALL). Default is
* HTML.
*/
private String reportFormat = "HTML";
/**
* The path to the suppression file.
*/
private String suppressionFile;
/**
* The path to the suppression file.
*/
private String hintsFile;
/**
* flag indicating whether or not to show a summary of findings.
*/
private boolean showSummary = true;
/**
* Whether experimental analyzers are enabled.
*/
private Boolean enableExperimental;
/**
* Whether or not the Jar Analyzer is enabled.
*/
private Boolean jarAnalyzerEnabled;
/**
* Whether or not the Archive Analyzer is enabled.
*/
private Boolean archiveAnalyzerEnabled;
/**
* Whether or not the .NET Nuspec Analyzer is enabled.
*/
private Boolean nuspecAnalyzerEnabled;
/**
* Whether or not the PHP Composer Analyzer is enabled.
*/
private Boolean composerAnalyzerEnabled;
/**
* Whether or not the .NET Assembly Analyzer is enabled.
*/
private Boolean assemblyAnalyzerEnabled;
/**
* Whether the autoconf analyzer should be enabled.
*/
private Boolean autoconfAnalyzerEnabled;
/**
* Sets the path for the bundle-audit binary.
*/
private String bundleAuditPath;
/**
* Whether or not the CocoaPods Analyzer is enabled.
*/
private Boolean cocoapodsAnalyzerEnabled;
/**
* Whether or not the Swift package Analyzer is enabled.
*/
private Boolean swiftPackageManagerAnalyzerEnabled;
//The following code was copied Apache Ant PathConvert
//BEGIN COPY from org.apache.tools.ant.taskdefs.PathConvert
/**
@@ -70,9 +211,9 @@ public class Check extends Update {
*/
private Resources path = null;
/**
* Reference to path/fileset to convert
* Reference to path/file set to convert
*/
private Reference refid = null;
private Reference refId = null;
/**
* Add an arbitrary ResourceCollection.
@@ -82,7 +223,7 @@ public class Check extends Update {
*/
public void add(ResourceCollection rc) {
if (isReference()) {
throw new BuildException("Nested elements are not allowed when using the refid attribute.");
throw new BuildException("Nested elements are not allowed when using the refId attribute.");
}
getPath().add(rc);
}
@@ -102,12 +243,12 @@ public class Check extends Update {
}
/**
* Learn whether the refid attribute of this element been set.
* Learn whether the refId attribute of this element been set.
*
* @return true if refid is valid.
* @return true if refId is valid.
*/
public boolean isReference() {
return refid != null;
return refId != null;
}
/**
@@ -116,11 +257,11 @@ public class Check extends Update {
*
* @param r the reference to a path, fileset, dirset or filelist.
*/
public void setRefid(Reference r) {
public synchronized void setRefId(Reference r) {
if (path != null) {
throw new BuildException("Nested elements are not allowed when using the refid attribute.");
throw new BuildException("Nested elements are not allowed when using the refId attribute.");
}
refid = r;
refId = r;
}
/**
@@ -131,22 +272,25 @@ public class Check extends Update {
*/
private void dealWithReferences() throws BuildException {
if (isReference()) {
final Object o = refid.getReferencedObject(getProject());
final Object o = refId.getReferencedObject(getProject());
if (!(o instanceof ResourceCollection)) {
throw new BuildException("refid '" + refid.getRefId()
throw new BuildException("refId '" + refId.getRefId()
+ "' does not refer to a resource collection.");
}
getPath().add((ResourceCollection) o);
}
}
// END COPY from org.apache.tools.ant.taskdefs
/**
* The application name for the report.
*
* @deprecated use projectName instead.
* Construct a new DependencyCheckTask.
*/
@Deprecated
private String applicationName = null;
public Check() {
super();
// Call this before Dependency Check Core starts logging anything - this way, all SLF4J messages from
// core end up coming through this tasks logger
StaticLoggerBinder.getSingleton().setTask(this);
}
/**
* Get the value of applicationName.
@@ -170,10 +314,6 @@ public class Check extends Update {
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
/**
* The name of the project being analyzed.
*/
private String projectName = "dependency-check";
/**
* Get the value of projectName.
@@ -199,12 +339,6 @@ public class Check extends Update {
this.projectName = projectName;
}
/**
* Specifies the destination directory for the generated Dependency-Check
* report.
*/
private String reportOutputDirectory = ".";
/**
* Get the value of reportOutputDirectory.
*
@@ -222,14 +356,6 @@ public class Check extends Update {
public void setReportOutputDirectory(String reportOutputDirectory) {
this.reportOutputDirectory = reportOutputDirectory;
}
/**
* Specifies if the build should be failed if a CVSS score above a specified
* level is identified. The default is 11 which means since the CVSS scores
* are 0-10, by default the build will never fail and the CVSS score is set
* to 11. The valid range for the fail build on CVSS is 0 to 11, where
* anything above 10 will not cause the build to fail.
*/
private float failBuildOnCVSS = 11;
/**
* Get the value of failBuildOnCVSS.
@@ -248,11 +374,6 @@ public class Check extends Update {
public void setFailBuildOnCVSS(float failBuildOnCVSS) {
this.failBuildOnCVSS = failBuildOnCVSS;
}
/**
* Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not
* recommended that this be turned to false. Default is true.
*/
private Boolean autoUpdate;
/**
* Get the value of autoUpdate.
@@ -271,13 +392,6 @@ public class Check extends Update {
public void setAutoUpdate(Boolean autoUpdate) {
this.autoUpdate = autoUpdate;
}
/**
* Whether only the update phase should be executed.
*
* @deprecated Use the update task instead
*/
@Deprecated
private boolean updateOnly = false;
/**
* Get the value of updateOnly.
@@ -301,12 +415,6 @@ public class Check extends Update {
this.updateOnly = updateOnly;
}
/**
* The report format to be generated (HTML, XML, VULN, ALL). Default is
* HTML.
*/
private String reportFormat = "HTML";
/**
* Get the value of reportFormat.
*
@@ -324,10 +432,6 @@ public class Check extends Update {
public void setReportFormat(ReportFormats reportFormat) {
this.reportFormat = reportFormat.getValue();
}
/**
* The path to the suppression file.
*/
private String suppressionFile;
/**
* Get the value of suppressionFile.
@@ -346,10 +450,6 @@ public class Check extends Update {
public void setSuppressionFile(String suppressionFile) {
this.suppressionFile = suppressionFile;
}
/**
* The path to the suppression file.
*/
private String hintsFile;
/**
* Get the value of hintsFile.
@@ -368,10 +468,6 @@ public class Check extends Update {
public void setHintsFile(String hintsFile) {
this.hintsFile = hintsFile;
}
/**
* flag indicating whether or not to show a summary of findings.
*/
private boolean showSummary = true;
/**
* Get the value of showSummary.
@@ -391,11 +487,6 @@ public class Check extends Update {
this.showSummary = showSummary;
}
/**
* Whether experimental analyzers are enabled.
*/
private Boolean enableExperimental;
/**
* Get the value of enableExperimental.
*
@@ -414,11 +505,6 @@ public class Check extends Update {
this.enableExperimental = enableExperimental;
}
/**
* Whether or not the Jar Analyzer is enabled.
*/
private Boolean jarAnalyzerEnabled;
/**
* Returns whether or not the analyzer is enabled.
*
@@ -436,10 +522,6 @@ public class Check extends Update {
public void setJarAnalyzerEnabled(Boolean jarAnalyzerEnabled) {
this.jarAnalyzerEnabled = jarAnalyzerEnabled;
}
/**
* Whether or not the Archive Analyzer is enabled.
*/
private Boolean archiveAnalyzerEnabled;
/**
* Returns whether or not the analyzer is enabled.
@@ -449,10 +531,6 @@ public class Check extends Update {
public Boolean isArchiveAnalyzerEnabled() {
return archiveAnalyzerEnabled;
}
/**
* Whether or not the .NET Assembly Analyzer is enabled.
*/
private Boolean assemblyAnalyzerEnabled;
/**
* Sets whether or not the analyzer is enabled.
@@ -480,10 +558,6 @@ public class Check extends Update {
public void setAssemblyAnalyzerEnabled(Boolean assemblyAnalyzerEnabled) {
this.assemblyAnalyzerEnabled = assemblyAnalyzerEnabled;
}
/**
* Whether or not the .NET Nuspec Analyzer is enabled.
*/
private Boolean nuspecAnalyzerEnabled;
/**
* Returns whether or not the analyzer is enabled.
@@ -502,10 +576,6 @@ public class Check extends Update {
public void setNuspecAnalyzerEnabled(Boolean nuspecAnalyzerEnabled) {
this.nuspecAnalyzerEnabled = nuspecAnalyzerEnabled;
}
/**
* Whether or not the PHP Composer Analyzer is enabled.
*/
private Boolean composerAnalyzerEnabled;
/**
* Get the value of composerAnalyzerEnabled.
@@ -524,10 +594,6 @@ public class Check extends Update {
public void setComposerAnalyzerEnabled(Boolean composerAnalyzerEnabled) {
this.composerAnalyzerEnabled = composerAnalyzerEnabled;
}
/**
* Whether the autoconf analyzer should be enabled.
*/
private Boolean autoconfAnalyzerEnabled;
/**
* Get the value of autoconfAnalyzerEnabled.
@@ -546,10 +612,6 @@ public class Check extends Update {
public void setAutoconfAnalyzerEnabled(Boolean autoconfAnalyzerEnabled) {
this.autoconfAnalyzerEnabled = autoconfAnalyzerEnabled;
}
/**
* Whether the CMake analyzer should be enabled.
*/
private Boolean cmakeAnalyzerEnabled;
/**
* Get the value of cmakeAnalyzerEnabled.
@@ -568,10 +630,80 @@ public class Check extends Update {
public void setCMakeAnalyzerEnabled(Boolean cmakeAnalyzerEnabled) {
this.cmakeAnalyzerEnabled = cmakeAnalyzerEnabled;
}
/**
* Whether or not the openssl analyzer is enabled.
* Returns if the Bundle Audit Analyzer is enabled.
*
* @return if the Bundle Audit Analyzer is enabled.
*/
private Boolean opensslAnalyzerEnabled;
public Boolean isBundleAuditAnalyzerEnabled() {
return bundleAuditAnalyzerEnabled;
}
/**
* Sets if the Bundle Audit Analyzer is enabled.
*
* @param bundleAuditAnalyzerEnabled whether or not the analyzer should be
* enabled
*/
public void setBundleAuditAnalyzerEnabled(Boolean bundleAuditAnalyzerEnabled) {
this.bundleAuditAnalyzerEnabled = bundleAuditAnalyzerEnabled;
}
/**
* Returns the path to the bundle audit executable.
*
* @return the path to the bundle audit executable
*/
public String getBundleAuditPath() {
return bundleAuditPath;
}
/**
* Sets the path to the bundle audit executable.
*
* @param bundleAuditPath the path to the bundle audit executable
*/
public void setBundleAuditPath(String bundleAuditPath) {
this.bundleAuditPath = bundleAuditPath;
}
/**
* Returns if the cocoapods analyzer is enabled.
*
* @return if the cocoapods analyzer is enabled
*/
public boolean isCocoapodsAnalyzerEnabled() {
return cocoapodsAnalyzerEnabled;
}
/**
* Sets whether or not the cocoapods analyzer is enabled.
*
* @param cocoapodsAnalyzerEnabled the state of the cocoapods analyzer
*/
public void setCocoapodsAnalyzerEnabled(Boolean cocoapodsAnalyzerEnabled) {
this.cocoapodsAnalyzerEnabled = cocoapodsAnalyzerEnabled;
}
/**
* Returns whether or not the Swift package Analyzer is enabled.
*
* @return whether or not the Swift package Analyzer is enabled
*/
public Boolean isSwiftPackageManagerAnalyzerEnabled() {
return swiftPackageManagerAnalyzerEnabled;
}
/**
* Sets the enabled state of the swift package manager analyzer.
*
* @param swiftPackageManagerAnalyzerEnabled the enabled state of the swift
* package manager
*/
public void setSwiftPackageManagerAnalyzerEnabled(Boolean swiftPackageManagerAnalyzerEnabled) {
this.swiftPackageManagerAnalyzerEnabled = swiftPackageManagerAnalyzerEnabled;
}
/**
* Get the value of opensslAnalyzerEnabled.
@@ -590,10 +722,6 @@ public class Check extends Update {
public void setOpensslAnalyzerEnabled(Boolean opensslAnalyzerEnabled) {
this.opensslAnalyzerEnabled = opensslAnalyzerEnabled;
}
/**
* Whether or not the Node.js Analyzer is enabled.
*/
private Boolean nodeAnalyzerEnabled;
/**
* Get the value of nodeAnalyzerEnabled.
@@ -612,10 +740,6 @@ public class Check extends Update {
public void setNodeAnalyzerEnabled(Boolean nodeAnalyzerEnabled) {
this.nodeAnalyzerEnabled = nodeAnalyzerEnabled;
}
/**
* Whether the ruby gemspec analyzer should be enabled.
*/
private Boolean rubygemsAnalyzerEnabled;
/**
* Get the value of rubygemsAnalyzerEnabled.
@@ -634,10 +758,6 @@ public class Check extends Update {
public void setRubygemsAnalyzerEnabled(Boolean rubygemsAnalyzerEnabled) {
this.rubygemsAnalyzerEnabled = rubygemsAnalyzerEnabled;
}
/**
* Whether the python package analyzer should be enabled.
*/
private Boolean pyPackageAnalyzerEnabled;
/**
* Get the value of pyPackageAnalyzerEnabled.
@@ -657,11 +777,6 @@ public class Check extends Update {
this.pyPackageAnalyzerEnabled = pyPackageAnalyzerEnabled;
}
/**
* Whether the python distribution analyzer should be enabled.
*/
private Boolean pyDistributionAnalyzerEnabled;
/**
* Get the value of pyDistributionAnalyzerEnabled.
*
@@ -681,11 +796,6 @@ public class Check extends Update {
this.pyDistributionAnalyzerEnabled = pyDistributionAnalyzerEnabled;
}
/**
* Whether or not the central analyzer is enabled.
*/
private Boolean centralAnalyzerEnabled;
/**
* Get the value of centralAnalyzerEnabled.
*
@@ -704,11 +814,6 @@ public class Check extends Update {
this.centralAnalyzerEnabled = centralAnalyzerEnabled;
}
/**
* Whether or not the nexus analyzer is enabled.
*/
private Boolean nexusAnalyzerEnabled;
/**
* Get the value of nexusAnalyzerEnabled.
*
@@ -727,12 +832,6 @@ public class Check extends Update {
this.nexusAnalyzerEnabled = nexusAnalyzerEnabled;
}
/**
* The URL of a Nexus server's REST API end point
* (http://domain/nexus/service/local).
*/
private String nexusUrl;
/**
* Get the value of nexusUrl.
*
@@ -750,10 +849,6 @@ public class Check extends Update {
public void setNexusUrl(String nexusUrl) {
this.nexusUrl = nexusUrl;
}
/**
* Whether or not the defined proxy should be used when connecting to Nexus.
*/
private Boolean nexusUsesProxy;
/**
* Get the value of nexusUsesProxy.
@@ -773,12 +868,6 @@ public class Check extends Update {
this.nexusUsesProxy = nexusUsesProxy;
}
/**
* Additional ZIP File extensions to add analyze. This should be a
* comma-separated list of file extensions to treat like ZIP files.
*/
private String zipExtensions;
/**
* Get the value of zipExtensions.
*
@@ -797,11 +886,6 @@ public class Check extends Update {
this.zipExtensions = zipExtensions;
}
/**
* The path to Mono for .NET assembly analysis on non-windows systems.
*/
private String pathToMono;
/**
* Get the value of pathToMono.
*
@@ -839,7 +923,7 @@ public class Check extends Update {
log(ex.getMessage(), Project.MSG_ERR);
}
} else {
for (Resource resource : path) {
for (Resource resource : getPath()) {
final FileProvider provider = resource.as(FileProvider.class);
if (provider != null) {
final File file = provider.getFile();
@@ -857,18 +941,13 @@ public class Check extends Update {
}
}
DatabaseProperties prop = null;
CveDB cve = null;
try {
cve = new CveDB();
cve.open();
try (CveDB cve = CveDB.getInstance()) {
prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) {
//TODO shouldn't this be a fatal exception
log("Unable to retrieve DB Properties", ex, Project.MSG_DEBUG);
} finally {
if (cve != null) {
cve.close();
}
}
final ReportGenerator reporter = new ReportGenerator(getProjectName(), engine.getDependencies(), engine.getAnalyzers(), prop);
reporter.generateReports(reportOutputDirectory, reportFormat);
@@ -906,7 +985,7 @@ public class Check extends Update {
* @throws BuildException if the task was not configured correctly.
*/
private void validateConfiguration() throws BuildException {
if (path == null) {
if (getPath() == null) {
throw new BuildException("No project dependencies have been defined to analyze.");
}
if (failBuildOnCVSS < 0 || failBuildOnCVSS > 11) {
@@ -934,6 +1013,10 @@ public class Check extends Update {
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, rubygemsAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, opensslAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_CMAKE_ENABLED, cmakeAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, swiftPackageManagerAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, cocoapodsAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, bundleAuditAnalyzerEnabled);
Settings.setStringIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, bundleAuditPath);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, autoconfAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, composerAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, nodeAnalyzerEnabled);
@@ -954,7 +1037,7 @@ public class Check extends Update {
*
* @param dependencies the list of dependency objects
* @throws BuildException thrown if a CVSS score is found that is higher
* then the threshold set
* than the threshold set
*/
private void checkForFailure(List<Dependency> dependencies) throws BuildException {
final StringBuilder ids = new StringBuilder();
@@ -971,7 +1054,7 @@ public class Check extends Update {
}
if (ids.length() > 0) {
final String msg = String.format("%n%nDependency-Check Failure:%n"
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater then '%.1f': %s%n"
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than '%.1f': %s%n"
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
throw new BuildException(msg);
}
@@ -1019,7 +1102,7 @@ public class Check extends Update {
}
/**
* An enumeration of supported report formats: "ALL", "HTML", "XML", "VULN",
* An enumeration of supported report formats: "ALL", "HTML", "XML", "CSV", "JSON", "VULN",
* etc..
*/
public static class ReportFormats extends EnumeratedAttribute {

View File

@@ -95,6 +95,12 @@ public class Purge extends Task {
this.failOnError = failOnError;
}
/**
* Executes the dependency-check purge to delete the existing local copy of
* the NVD CVE data.
*
* @throws BuildException thrown if there is a problem deleting the file(s)
*/
@Override
public void execute() throws BuildException {
populateSettings();
@@ -138,9 +144,7 @@ public class Purge extends Task {
*/
protected void populateSettings() throws BuildException {
Settings.initialize();
InputStream taskProperties = null;
try {
taskProperties = this.getClass().getClassLoader().getResourceAsStream(PROPERTIES_FILE);
try (InputStream taskProperties = this.getClass().getClassLoader().getResourceAsStream(PROPERTIES_FILE)) {
Settings.mergeProperties(taskProperties);
} catch (IOException ex) {
final String msg = "Unable to load the dependency-check ant task.properties file.";
@@ -148,14 +152,6 @@ public class Purge extends Task {
throw new BuildException(msg, ex);
}
log(msg, ex, Project.MSG_WARN);
} finally {
if (taskProperties != null) {
try {
taskProperties.close();
} catch (IOException ex) {
log("", ex, Project.MSG_DEBUG);
}
}
}
if (dataDirectory != null) {
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);

View File

@@ -34,6 +34,67 @@ import org.slf4j.impl.StaticLoggerBinder;
*/
public class Update extends Purge {
/**
* The Proxy Server.
*/
private String proxyServer;
/**
* The Proxy Port.
*/
private String proxyPort;
/**
* The Proxy username.
*/
private String proxyUsername;
/**
* The Proxy password.
*/
private String proxyPassword;
/**
* The Connection Timeout.
*/
private String connectionTimeout;
/**
* The database driver name; such as org.h2.Driver.
*/
private String databaseDriverName;
/**
* The path to the database driver JAR file if it is not on the class path.
*/
private String databaseDriverPath;
/**
* The database connection string.
*/
private String connectionString;
/**
* The user name for connecting to the database.
*/
private String databaseUser;
/**
* The password to use when connecting to the database.
*/
private String databasePassword;
/**
* The url for the modified NVD CVE (1.2 schema).
*/
private String cveUrl12Modified;
/**
* Base Data Mirror URL for CVE 1.2.
*/
private String cveUrl12Base;
/**
* Data Mirror URL for CVE 2.0.
*/
private String cveUrl20Base;
/**
* The number of hours to wait before re-checking for updates.
*/
private Integer cveValidForHours;
/**
* The url for the modified NVD CVE (2.0 schema).
*/
private String cveUrl20Modified;
/**
* Construct a new UpdateTask.
*/
@@ -44,11 +105,6 @@ public class Update extends Purge {
StaticLoggerBinder.getSingleton().setTask(this);
}
/**
* The Proxy Server.
*/
private String proxyServer;
/**
* Get the value of proxyServer.
*
@@ -67,11 +123,6 @@ public class Update extends Purge {
this.proxyServer = server;
}
/**
* The Proxy Port.
*/
private String proxyPort;
/**
* Get the value of proxyPort.
*
@@ -89,10 +140,6 @@ public class Update extends Purge {
public void setProxyPort(String proxyPort) {
this.proxyPort = proxyPort;
}
/**
* The Proxy username.
*/
private String proxyUsername;
/**
* Get the value of proxyUsername.
@@ -111,10 +158,6 @@ public class Update extends Purge {
public void setProxyUsername(String proxyUsername) {
this.proxyUsername = proxyUsername;
}
/**
* The Proxy password.
*/
private String proxyPassword;
/**
* Get the value of proxyPassword.
@@ -133,10 +176,6 @@ public class Update extends Purge {
public void setProxyPassword(String proxyPassword) {
this.proxyPassword = proxyPassword;
}
/**
* The Connection Timeout.
*/
private String connectionTimeout;
/**
* Get the value of connectionTimeout.
@@ -155,10 +194,6 @@ public class Update extends Purge {
public void setConnectionTimeout(String connectionTimeout) {
this.connectionTimeout = connectionTimeout;
}
/**
* The database driver name; such as org.h2.Driver.
*/
private String databaseDriverName;
/**
* Get the value of databaseDriverName.
@@ -178,11 +213,6 @@ public class Update extends Purge {
this.databaseDriverName = databaseDriverName;
}
/**
* The path to the database driver JAR file if it is not on the class path.
*/
private String databaseDriverPath;
/**
* Get the value of databaseDriverPath.
*
@@ -200,10 +230,6 @@ public class Update extends Purge {
public void setDatabaseDriverPath(String databaseDriverPath) {
this.databaseDriverPath = databaseDriverPath;
}
/**
* The database connection string.
*/
private String connectionString;
/**
* Get the value of connectionString.
@@ -222,10 +248,6 @@ public class Update extends Purge {
public void setConnectionString(String connectionString) {
this.connectionString = connectionString;
}
/**
* The user name for connecting to the database.
*/
private String databaseUser;
/**
* Get the value of databaseUser.
@@ -245,11 +267,6 @@ public class Update extends Purge {
this.databaseUser = databaseUser;
}
/**
* The password to use when connecting to the database.
*/
private String databasePassword;
/**
* Get the value of databasePassword.
*
@@ -268,11 +285,6 @@ public class Update extends Purge {
this.databasePassword = databasePassword;
}
/**
* The url for the modified NVD CVE (1.2 schema).
*/
private String cveUrl12Modified;
/**
* Get the value of cveUrl12Modified.
*
@@ -291,11 +303,6 @@ public class Update extends Purge {
this.cveUrl12Modified = cveUrl12Modified;
}
/**
* The url for the modified NVD CVE (2.0 schema).
*/
private String cveUrl20Modified;
/**
* Get the value of cveUrl20Modified.
*
@@ -314,11 +321,6 @@ public class Update extends Purge {
this.cveUrl20Modified = cveUrl20Modified;
}
/**
* Base Data Mirror URL for CVE 1.2.
*/
private String cveUrl12Base;
/**
* Get the value of cveUrl12Base.
*
@@ -337,11 +339,6 @@ public class Update extends Purge {
this.cveUrl12Base = cveUrl12Base;
}
/**
* Data Mirror URL for CVE 2.0.
*/
private String cveUrl20Base;
/**
* Get the value of cveUrl20Base.
*
@@ -360,11 +357,6 @@ public class Update extends Purge {
this.cveUrl20Base = cveUrl20Base;
}
/**
* The number of hours to wait before re-checking for updates.
*/
private Integer cveValidForHours;
/**
* Get the value of cveValidForHours.
*

View File

@@ -37,6 +37,11 @@ public class StaticLoggerBinder implements LoggerFactoryBinder {
* The unique instance of this class
*/
private static final StaticLoggerBinder SINGLETON = new StaticLoggerBinder();
/**
* Ant tasks have the log method we actually want to call. So we hang onto
* the task as a delegate
*/
private Task task = null;
/**
* Return the singleton of this class.
@@ -47,12 +52,6 @@ public class StaticLoggerBinder implements LoggerFactoryBinder {
return SINGLETON;
}
/**
* Ant tasks have the log method we actually want to call. So we hang onto
* the task as a delegate
*/
private Task task = null;
/**
* Set the Task which will this is to log through.
*

View File

@@ -3,7 +3,7 @@ Configuration
The dependency-check-update task downloads and updates the local copy of the NVD.
There are several reasons that one may want to use this task; primarily, creating
an update that will be run only once a day or once every few days (but not greater
then 7 days) and then use the `autoUpdate="false"` setting on individual
than 7 days) and then use the `autoUpdate="false"` setting on individual
dependency-check scans. See [Internet Access Required](https://jeremylong.github.io/DependencyCheck/data/index.html)
for more information on why this task would be used.

View File

@@ -36,11 +36,11 @@ cveValidForHours | Sets the number of hours to wait before checking for new
failBuildOnCVSS | Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which means since the CVSS scores are 0-10, by default the build will never fail. | 11
failOnError | Whether the build should fail if there is an error executing the dependency-check analysis | true
projectName | The name of the project being scanned. | Dependency-Check
reportFormat | The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML
reportFormat | The report format to be generated (HTML, XML, CSV, JSON, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML
reportOutputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target'
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html) | &nbsp;
hintsFile | The file path to the XML hints file \- used to resolve [false negatives](../general/hints.html) | &nbsp;
proxyServer | The Proxy Server; see the [proxy configuration](../data/proxy.html) page for more information. | &nbsp;
hintsFile | The file path to the XML hints file \- used to resolve [false negatives](../general/hints.html) | &nbsp;
proxyServer | The Proxy Server; see the [proxy configuration](../data/proxy.html) page for more information. | &nbsp;
proxyPort | The Proxy Port. | &nbsp;
proxyUsername | Defines the proxy user name. | &nbsp;
proxyPassword | Defines the proxy password. | &nbsp;
@@ -72,9 +72,13 @@ cmakeAnalyzerEnabled | Sets whether the [experimental](../analyzers/ind
autoconfAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) autoconf Analyzer should be used. | true
composerAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer should be used. | true
nodeAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Node.js Analyzer should be used. | true
nuspecAnalyzerEnabled | Sets whether the .NET Nuget Nuspec Analyzer will be used. | true
assemblyAnalyzerEnabled | Sets whether the .NET Assembly Analyzer should be used. | true
pathToMono | The path to Mono for .NET assembly analysis on non-windows systems. | &nbsp;
nuspecAnalyzerEnabled | Sets whether the .NET Nuget Nuspec Analyzer will be used. | true
cocoapodsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer should be used. | true
bundleAuditAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Bundle Audit Analyzer should be used. | true
bundleAuditPath | Sets the path to the bundle audit executable; only used if bundle audit analyzer is enabled and experimental analyzers are enabled. | &nbsp;
swiftPackageManagerAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Switft Package Analyzer should be used. | true
assemblyAnalyzerEnabled | Sets whether the .NET Assembly Analyzer should be used. | true
pathToMono | The path to Mono for .NET assembly analysis on non-windows systems. | &nbsp;
Advanced Configuration
====================

View File

@@ -31,7 +31,6 @@ import org.owasp.dependencycheck.utils.Settings;
import static org.junit.Assert.assertTrue;
/**
*
* @author Jeremy Long
@@ -65,15 +64,11 @@ public class DependencyCheckTaskTest {
@Test
public void testAddFileSet() throws Exception {
File report = new File("target/dependency-check-report.html");
if (report.exists()) {
if (!report.delete()) {
throw new Exception("Unable to delete 'target/DependencyCheck-Report.html' prior to test.");
}
if (report.exists() && !report.delete()) {
throw new Exception("Unable to delete 'target/DependencyCheck-Report.html' prior to test.");
}
buildFileRule.executeTarget("test.fileset");
assertTrue("DependencyCheck report was not generated", report.exists());
}
/**

View File

@@ -61,11 +61,14 @@
<target name="failCVSS">
<dependency-check
applicationName="test formatBAD"
applicationName="test failCVSS"
reportOutputDirectory="${project.build.directory}"
reportFormat="XML"
autoupdate="false"
failBuildOnCVSS="8">
failBuildOnCVSS="3">
<fileset dir="${project.build.directory}/test-classes/jars">
<include name="axis-1.4.jar"/>
</fileset>
</dependency-check>
</target>
</project>

View File

@@ -20,7 +20,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>1.4.4</version>
<version>1.4.6-SNAPSHOT</version>
</parent>
<artifactId>dependency-check-cli</artifactId>
@@ -69,62 +69,6 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<configuration>
<!--instrumentation>
<ignoreTrivial>true</ignoreTrivial>
</instrumentation-->
<check>
<branchRate>85</branchRate>
<lineRate>85</lineRate>
<haltOnFailure>false</haltOnFailure>
<totalBranchRate>85</totalBranchRate>
<totalLineRate>85</totalLineRate>
<packageLineRate>85</packageLineRate>
<packageBranchRate>85</packageBranchRate>
<regexes>
<regex>
<pattern>.*\$.*</pattern>
<branchRate>0</branchRate>
<lineRate>0</lineRate>
</regex>
<regex>
<pattern>org.owasp.dependencycheck.App</pattern>
<branchRate>0</branchRate>
<lineRate>0</lineRate>
</regex>
</regexes>
</check>
</configuration>
<executions>
<execution>
<goals>
<goal>clean</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<argLine>-Dfile.encoding=UTF-8</argLine>
<systemProperties>
<property>
<name>cpe</name>
<value>data/cpe</value>
<workingDirectory>target</workingDirectory>
</property>
<property>
<name>cve</name>
<value>data/cpe</value>
<workingDirectory>target</workingDirectory>
</property>
</systemProperties>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>appassembler-maven-plugin</artifactId>
@@ -140,6 +84,8 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<binFileExtensions>
<unix>.sh</unix>
</binFileExtensions>
<configurationDirectory>plugins/*</configurationDirectory>
<includeConfigurationDirectoryInClasspath>true</includeConfigurationDirectoryInClasspath>
</configuration>
<executions>
<execution>
@@ -194,7 +140,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<version>${reporting.pmd-plugin.version}</version>
<configuration>
<targetJdk>1.6</targetJdk>
<linkXref>true</linkXref>
<linkXRef>true</linkXRef>
<sourceEncoding>utf-8</sourceEncoding>
<excludes>
<exclude>**/generated/*.java</exclude>

View File

@@ -29,6 +29,13 @@
<outputDirectory>dependency-check/repo</outputDirectory>
<directory>${project.build.directory}/release/repo</directory>
</fileSet>
<fileSet>
<directory>.</directory>
<outputDirectory>dependency-check/plugins</outputDirectory>
<excludes>
<exclude>*/**</exclude>
</excludes>
</fileSet>
<fileSet>
<outputDirectory>dependency-check</outputDirectory>
<includes>
@@ -53,21 +60,4 @@
</includes>
</fileSet>
</fileSets>
<!--
<fileSets>
<fileSet>
<outputDirectory>/</outputDirectory>
<directory>${project.build.directory}</directory>
<includes>
<include>dependency-check*.jar</include>
</includes>
</fileSet>
</fileSets>
<dependencySets>
<dependencySet>
<outputDirectory>/lib</outputDirectory>
<scope>runtime</scope>
</dependencySet>
</dependencySets>
-->
</assembly>

View File

@@ -33,6 +33,7 @@ import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.dependency.Dependency;
import org.apache.tools.ant.DirectoryScanner;
import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.reporting.ReportGenerator;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
@@ -161,8 +162,8 @@ public class App {
try {
final String[] scanFiles = cli.getScanFiles();
if (scanFiles != null) {
runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getProjectName(), scanFiles,
cli.getExcludeList(), cli.getSymLinkDepth());
exitCode = runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getProjectName(), scanFiles,
cli.getExcludeList(), cli.getSymLinkDepth(), cli.getFailOnCVSS());
} else {
LOGGER.error("No scan files configured");
}
@@ -203,6 +204,8 @@ public class App {
* @param files the files/directories to scan
* @param excludes the patterns for files/directories to exclude
* @param symLinkDepth the depth that symbolic links will be followed
* @param cvssFailScore the score to fail on if a vulnerability is found
* @return the exit code if there was an error
*
* @throws InvalidScanPathException thrown if the path to scan starts with
* "//"
@@ -213,18 +216,20 @@ public class App {
* analysis; there may be multiple exceptions contained within the
* collection.
*/
private void runScan(String reportDirectory, String outputFormat, String applicationName, String[] files,
String[] excludes, int symLinkDepth) throws InvalidScanPathException, DatabaseException, ExceptionCollection, ReportException {
private int runScan(String reportDirectory, String outputFormat, String applicationName, String[] files,
String[] excludes, int symLinkDepth, int cvssFailScore) throws InvalidScanPathException, DatabaseException,
ExceptionCollection, ReportException {
Engine engine = null;
int retCode = 0;
try {
engine = new Engine();
final List<String> antStylePaths = new ArrayList<String>();
final List<String> antStylePaths = new ArrayList<>();
for (String file : files) {
final String antPath = ensureCanonicalPath(file);
antStylePaths.add(antPath);
}
final Set<File> paths = new HashSet<File>();
final Set<File> paths = new HashSet<>();
for (String file : antStylePaths) {
LOGGER.debug("Scanning {}", file);
final DirectoryScanner scanner = new DirectoryScanner();
@@ -278,17 +283,14 @@ public class App {
}
final List<Dependency> dependencies = engine.getDependencies();
DatabaseProperties prop = null;
CveDB cve = null;
try {
cve = new CveDB();
cve.open();
try (CveDB cve = CveDB.getInstance()) {
prop = cve.getDatabaseProperties();
} finally {
if (cve != null) {
cve.close();
}
} catch (DatabaseException ex) {
//TODO shouldn't this be a fatal exception
LOGGER.debug("Unable to retrieve DB Properties", ex);
}
final ReportGenerator report = new ReportGenerator(applicationName, dependencies, engine.getAnalyzers(), prop);
try {
report.generateReports(reportDirectory, outputFormat);
} catch (ReportException ex) {
@@ -302,12 +304,25 @@ public class App {
if (exCol != null && exCol.getExceptions().size() > 0) {
throw exCol;
}
//Set the exit code based on whether we found a high enough vulnerability
for (Dependency dep : dependencies) {
if (!dep.getVulnerabilities().isEmpty()) {
for (Vulnerability vuln : dep.getVulnerabilities()) {
LOGGER.debug("VULNERABILITY FOUND " + dep.getDisplayFileName());
if (vuln.getCvssScore() > cvssFailScore) {
retCode = 1;
}
}
}
}
return retCode;
} finally {
if (engine != null) {
engine.cleanup();
}
}
}
/**
@@ -338,8 +353,7 @@ public class App {
* @throws InvalidSettingException thrown when a user defined properties
* file is unable to be loaded.
*/
private void populateSettings(CliParser cli) throws InvalidSettingException {
final boolean autoUpdate = cli.isAutoUpdate();
protected void populateSettings(CliParser cli) throws InvalidSettingException {
final String connectionTimeout = cli.getConnectionTimeout();
final String proxyServer = cli.getProxyServer();
final String proxyPort = cli.getProxyPort();
@@ -362,7 +376,8 @@ public class App {
final String cveBase12 = cli.getBaseCve12Url();
final String cveBase20 = cli.getBaseCve20Url();
final Integer cveValidForHours = cli.getCveValidForHours();
final boolean experimentalEnabled = cli.isExperimentalEnabled();
final Boolean autoUpdate = cli.isAutoUpdate();
final Boolean experimentalEnabled = cli.isExperimentalEnabled();
if (propertiesFile != null) {
try {
@@ -375,7 +390,7 @@ public class App {
}
// We have to wait until we've merged the properties before attempting to set whether we use
// the proxy for Nexus since it could be disabled in the properties, but not explicitly stated
// on the command line
// on the command line. This is true of other boolean values set below not using the setBooleanIfNotNull.
final boolean nexusUsesProxy = cli.isNexusUsesProxy();
if (dataDirectory != null) {
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
@@ -389,7 +404,7 @@ public class App {
final File dataDir = new File(base, sub);
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
}
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
Settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUser);
@@ -400,7 +415,8 @@ public class App {
Settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
//File Type Analyzer Settings
Settings.setBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, experimentalEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, experimentalEnabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !cli.isJarDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !cli.isArchiveDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !cli.isPythonDistributionDisabled());
@@ -413,6 +429,8 @@ public class App {
Settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isComposerDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isNodeJsDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, !cli.isSwiftPackageAnalyzerDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, !cli.isCocoapodsAnalyzerDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isRubyGemspecDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isCentralDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !cli.isNexusDisabled());
@@ -448,7 +466,7 @@ public class App {
encoder.setPattern("%d %C:%L%n%-5level - %msg%n");
encoder.setContext(context);
encoder.start();
final FileAppender<ILoggingEvent> fa = new FileAppender<ILoggingEvent>();
final FileAppender<ILoggingEvent> fa = new FileAppender<>();
fa.setAppend(true);
fa.setEncoder(encoder);
fa.setContext(context);

View File

@@ -120,7 +120,7 @@ public final class CliParser {
Format.valueOf(format);
} catch (IllegalArgumentException ex) {
final String msg = String.format("An invalid 'format' of '%s' was specified. "
+ "Supported output formats are XML, HTML, VULN, or ALL", format);
+ "Supported output formats are HTML, XML, CSV, JSON, VULN, or ALL", format);
throw new ParseException(msg);
}
}
@@ -249,7 +249,7 @@ public final class CliParser {
final Option excludes = Option.builder().argName("pattern").hasArg().longOpt(ARGUMENT.EXCLUDE)
.desc("Specify and exclusion pattern. This option can be specified multiple times"
+ " and it accepts Ant style excludsions.")
+ " and it accepts Ant style exclusions.")
.build();
final Option props = Option.builder(ARGUMENT.PROP_SHORT).argName("file").hasArg().longOpt(ARGUMENT.PROP)
@@ -262,7 +262,7 @@ public final class CliParser {
.build();
final Option outputFormat = Option.builder(ARGUMENT.OUTPUT_FORMAT_SHORT).argName("format").hasArg().longOpt(ARGUMENT.OUTPUT_FORMAT)
.desc("The output format to write to (XML, HTML, VULN, ALL). The default is HTML.")
.desc("The output format to write to (XML, JSON, HTML, VULN, ALL). The default is HTML.")
.build();
final Option verboseLog = Option.builder(ARGUMENT.VERBOSE_LOG_SHORT).argName("file").hasArg().longOpt(ARGUMENT.VERBOSE_LOG)
@@ -286,7 +286,12 @@ public final class CliParser {
.build();
final Option experimentalEnabled = Option.builder().longOpt(ARGUMENT.EXPERIMENTAL)
.desc("Enables the experimental analzers.")
.desc("Enables the experimental analyzers.")
.build();
final Option failOnCVSS = Option.builder().argName("score").hasArg().longOpt(ARGUMENT.FAIL_ON_CVSS)
.desc("Specifies if the build should be failed if a CVSS score above a specified level is identified. "
+ "The default is 11; since the CVSS scores are 0-10, by default the build will never fail.")
.build();
//This is an option group because it can be specified more then once.
@@ -311,7 +316,8 @@ public final class CliParser {
.addOption(suppressionFile)
.addOption(hintsFile)
.addOption(cveValidForHours)
.addOption(experimentalEnabled);
.addOption(experimentalEnabled)
.addOption(failOnCVSS);
}
/**
@@ -431,6 +437,11 @@ public final class CliParser {
final Option disableCmakeAnalyzer = Option.builder().longOpt(ARGUMENT.DISABLE_CMAKE)
.desc("Disable the Cmake Analyzer.").build();
final Option cocoapodsAnalyzerEnabled = Option.builder().longOpt(ARGUMENT.DISABLE_COCOAPODS)
.desc("Disable the CocoaPods Analyzer.").build();
final Option swiftPackageManagerAnalyzerEnabled = Option.builder().longOpt(ARGUMENT.DISABLE_SWIFT)
.desc("Disable the swift package Analyzer.").build();
final Option disableCentralAnalyzer = Option.builder().longOpt(ARGUMENT.DISABLE_CENTRAL)
.desc("Disable the Central Analyzer. If this analyzer is disabled it is likely you also want to disable "
+ "the Nexus Analyzer.").build();
@@ -475,6 +486,8 @@ public final class CliParser {
.addOption(disableNuspecAnalyzer)
.addOption(disableCentralAnalyzer)
.addOption(disableNexusAnalyzer)
.addOption(cocoapodsAnalyzerEnabled)
.addOption(swiftPackageManagerAnalyzerEnabled)
.addOption(Option.builder().longOpt(ARGUMENT.DISABLE_NODE_JS)
.desc("Disable the Node.js Package Analyzer.").build())
.addOption(nexusUrl)
@@ -554,6 +567,32 @@ public final class CliParser {
return value;
}
/**
* Utility method to determine if one of the disable options has been set.
* If not set, this method will check the currently configured settings for
* the current value to return.
*
* Example given `--disableArchive` on the command line would cause this
* method to return true for the disable archive setting.
*
* @param argument the command line argument
* @param setting the corresponding settings key
* @return true if the disable option was set, if not set the currently
* configured value will be returned
*/
private boolean hasDisableOption(String argument, String setting) {
if (line == null || !line.hasOption(argument)) {
try {
return !Settings.getBoolean(setting);
} catch (InvalidSettingException ise) {
LOGGER.warn("Invalid property setting '{}' defaulting to false", setting);
return false;
}
} else {
return true;
}
}
/**
* Returns true if the disableJar command line argument was specified.
*
@@ -561,7 +600,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isJarDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_JAR);
return hasDisableOption(ARGUMENT.DISABLE_JAR, Settings.KEYS.ANALYZER_JAR_ENABLED);
}
/**
@@ -571,7 +610,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isArchiveDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_ARCHIVE);
return hasDisableOption(ARGUMENT.DISABLE_ARCHIVE, Settings.KEYS.ANALYZER_ARCHIVE_ENABLED);
}
/**
@@ -581,7 +620,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isNuspecDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_NUSPEC);
return hasDisableOption(ARGUMENT.DISABLE_NUSPEC, Settings.KEYS.ANALYZER_NUSPEC_ENABLED);
}
/**
@@ -591,7 +630,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isAssemblyDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_ASSEMBLY);
return hasDisableOption(ARGUMENT.DISABLE_ASSEMBLY, Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED);
}
/**
@@ -602,7 +641,7 @@ public final class CliParser {
* specified; otherwise false
*/
public boolean isBundleAuditDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_BUNDLE_AUDIT);
return hasDisableOption(ARGUMENT.DISABLE_BUNDLE_AUDIT, Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED);
}
/**
@@ -612,7 +651,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isPythonDistributionDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_PY_DIST);
return hasDisableOption(ARGUMENT.DISABLE_PY_DIST, Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED);
}
/**
@@ -622,7 +661,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isPythonPackageDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_PY_PKG);
return hasDisableOption(ARGUMENT.DISABLE_PY_PKG, Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED);
}
/**
@@ -632,7 +671,7 @@ public final class CliParser {
* argument was specified; otherwise false
*/
public boolean isRubyGemspecDisabled() {
return (null != line) && line.hasOption(ARGUMENT.DISABLE_RUBYGEMS);
return hasDisableOption(ARGUMENT.DISABLE_RUBYGEMS, Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED);
}
/**
@@ -642,7 +681,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isCmakeDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_CMAKE);
return hasDisableOption(ARGUMENT.DISABLE_CMAKE, Settings.KEYS.ANALYZER_CMAKE_ENABLED);
}
/**
@@ -652,7 +691,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isAutoconfDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_AUTOCONF);
return hasDisableOption(ARGUMENT.DISABLE_AUTOCONF, Settings.KEYS.ANALYZER_AUTOCONF_ENABLED);
}
/**
@@ -662,7 +701,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isComposerDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_COMPOSER);
return hasDisableOption(ARGUMENT.DISABLE_COMPOSER, Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED);
}
/**
@@ -672,7 +711,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isNexusDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_NEXUS);
return hasDisableOption(ARGUMENT.DISABLE_NEXUS, Settings.KEYS.ANALYZER_NEXUS_ENABLED);
}
/**
@@ -682,7 +721,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isOpenSSLDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_OPENSSL);
return hasDisableOption(ARGUMENT.DISABLE_OPENSSL, Settings.KEYS.ANALYZER_OPENSSL_ENABLED);
}
/**
@@ -692,7 +731,29 @@ public final class CliParser {
* otherwise false
*/
public boolean isNodeJsDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_NODE_JS);
return hasDisableOption(ARGUMENT.DISABLE_NODE_JS, Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED);
}
/**
* Returns true if the disableCocoapodsAnalyzer command line argument was
* specified.
*
* @return true if the disableCocoapodsAnalyzer command line argument was
* specified; otherwise false
*/
public boolean isCocoapodsAnalyzerDisabled() {
return hasDisableOption(ARGUMENT.DISABLE_COCOAPODS, Settings.KEYS.ANALYZER_COCOAPODS_ENABLED);
}
/**
* Returns true if the disableSwiftPackageManagerAnalyzer command line
* argument was specified.
*
* @return true if the disableSwiftPackageManagerAnalyzer command line
* argument was specified; otherwise false
*/
public boolean isSwiftPackageAnalyzerDisabled() {
return hasDisableOption(ARGUMENT.DISABLE_SWIFT, Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED);
}
/**
@@ -702,7 +763,7 @@ public final class CliParser {
* otherwise false
*/
public boolean isCentralDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_CENTRAL);
return hasDisableOption(ARGUMENT.DISABLE_CENTRAL, Settings.KEYS.ANALYZER_CENTRAL_ENABLED);
}
/**
@@ -994,10 +1055,10 @@ public final class CliParser {
* disabled via the command line this will return false.
*
* @return <code>true</code> if auto-update is allowed; otherwise
* <code>false</code>
* <code>null</code>
*/
public boolean isAutoUpdate() {
return line != null && !line.hasOption(ARGUMENT.DISABLE_AUTO_UPDATE);
public Boolean isAutoUpdate() {
return (line != null && line.hasOption(ARGUMENT.DISABLE_AUTO_UPDATE)) ? false : null;
}
/**
@@ -1099,10 +1160,29 @@ public final class CliParser {
/**
* Returns true if the experimental analyzers are enabled.
*
* @return true if the experimental analyzers are enabled; otherwise false
* @return true if the experimental analyzers are enabled; otherwise null
*/
public boolean isExperimentalEnabled() {
return line.hasOption(ARGUMENT.EXPERIMENTAL);
public Boolean isExperimentalEnabled() {
return (line != null && line.hasOption(ARGUMENT.EXPERIMENTAL)) ? true : null;
}
/**
* Returns the CVSS value to fail on.
*
* @return 11 if nothing is set. Otherwise it returns the int passed from
* the command line arg
*/
public int getFailOnCVSS() {
if (line.hasOption(ARGUMENT.FAIL_ON_CVSS)) {
final String value = line.getOptionValue(ARGUMENT.FAIL_ON_CVSS);
try {
return Integer.parseInt(value);
} catch (NumberFormatException nfe) {
return 11;
}
} else {
return 11;
}
}
/**
@@ -1287,8 +1367,7 @@ public final class CliParser {
*/
public static final String SUPPRESSION_FILE = "suppression";
/**
* The CLI argument name for setting the location of the hint
* file.
* The CLI argument name for setting the location of the hint file.
*/
public static final String HINTS_FILE = "hints";
/**
@@ -1328,6 +1407,14 @@ public final class CliParser {
* Disables the Cmake Analyzer.
*/
public static final String DISABLE_CMAKE = "disableCmake";
/**
* Disables the cocoapods analyzer.
*/
public static final String DISABLE_COCOAPODS = "disableCocoapodsAnalyzer";
/**
* Disables the swift package manager analyzer.
*/
public static final String DISABLE_SWIFT = "disableSwiftPackageManagerAnalyzer";
/**
* Disables the Assembly Analyzer.
*/
@@ -1408,5 +1495,9 @@ public final class CliParser {
* The CLI argument to enable the experimental analyzers.
*/
private static final String EXPERIMENTAL = "enableExperimental";
/**
* The CLI argument to enable the experimental analyzers.
*/
private static final String FAIL_ON_CVSS = "failOnCVSS";
}
}

View File

@@ -4,21 +4,22 @@ Command Line Arguments
The following table lists the command line arguments:
Short | Argument&nbsp;Name&nbsp;&nbsp; | Parameter | Description | Requirement
-------|-----------------------|-----------------|-------------|------------
| \-\-project | \<name\> | The name of the project being scanned. | Required
\-s | \-\-scan | \<path\> | The path to scan \- this option can be specified multiple times. It is also possible to specify Ant style paths (e.g. directory/**/*.jar). | Required
| \-\-exclude | \<pattern\> | The path patterns to exclude from the scan \- this option can be specified multiple times. This accepts Ant style path patterns (e.g. **/exclude/**). | Optional
| \-\-symLink | \<depth\> | The depth that symbolic links will be followed; the default is 0 meaning symbolic links will not be followed. | Optional
\-o | \-\-out | \<path\> | The folder to write reports to. This defaults to the current directory. If the format is not set to ALL one could specify a specific file name. | Optional
\-f | \-\-format | \<format\> | The output format to write to (XML, HTML, VULN, ALL). The default is HTML. | Required
\-l | \-\-log | \<file\> | The file path to write verbose logging information. | Optional
\-n | \-\-noupdate | | Disables the automatic updating of the CPE data. | Optional
| \-\-suppression | \<file\> | The file path to the suppression XML file; used to suppress [false positives](../general/suppression.html). | Optional
\-h | \-\-help | | Print the help message. | Optional
| \-\-advancedHelp | | Print the advanced help message. | Optional
\-v | \-\-version | | Print the version information. | Optional
| \-\-cveValidForHours | \<hours\> | The number of hours to wait before checking for new updates from the NVD. The default is 4 hours. | Optional
| \-\-experimental | | Enable the [experimental analyzers](../analyzers/index.html). If not set the analyzers marked as experimental below will not be loaded or used. | Optional
-------|------------------------|-----------------|-------------|------------
| \-\-project | \<name\> | The name of the project being scanned. | Required
\-s | \-\-scan | \<path\> | The path to scan \- this option can be specified multiple times. It is also possible to specify Ant style paths (e.g. directory/**/*.jar). | Required
| \-\-exclude | \<pattern\> | The path patterns to exclude from the scan \- this option can be specified multiple times. This accepts Ant style path patterns (e.g. **/exclude/**). | Optional
| \-\-symLink | \<depth\> | The depth that symbolic links will be followed; the default is 0 meaning symbolic links will not be followed. | Optional
\-o | \-\-out | \<path\> | The folder to write reports to. This defaults to the current directory. If the format is not set to ALL one could specify a specific file name. | Optional
\-f | \-\-format | \<format\> | The output format to write to (XML, HTML, CSV, JSON, VULN, ALL). The default is HTML. | Required
| \-\-failOnCvss | \<score\> | If the score set between 0 and 10 the exit code from dependency-check will indicate if a vulnerability with a CVSS score equal to or higher was identified. | Optional
\-l | \-\-log | \<file\> | The file path to write verbose logging information. | Optional
\-n | \-\-noupdate | | Disables the automatic updating of the CPE data. | Optional
| \-\-suppression | \<file\> | The file path to the suppression XML file; used to suppress [false positives](../general/suppression.html). | Optional
\-h | \-\-help | | Print the help message. | Optional
| \-\-advancedHelp | | Print the advanced help message. | Optional
\-v | \-\-version | | Print the version information. | Optional
| \-\-cveValidForHours | \<hours\> | The number of hours to wait before checking for new updates from the NVD. The default is 4 hours. | Optional
| \-\-enableExperimental | | Enable the [experimental analyzers](../analyzers/index.html). If not set the analyzers marked as experimental below will not be loaded or used. | Optional
Advanced Options
================
@@ -34,13 +35,15 @@ Short | Argument&nbsp;Name&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; | Paramete
| \-\-disablePyPkg | | Sets whether the [experimental](../analyzers/index.html) Python Package Analyzer will be used. | false
| \-\-disableNodeJS | | Sets whether the [experimental](../analyzers/index.html) Node.js Package Analyzer will be used. | false
| \-\-disableRubygems | | Sets whether the [experimental](../analyzers/index.html) Ruby Gemspec Analyzer will be used. | false
| \-\-disableBundleAudit | | Sets whether the [experimental](../analyzers/index.html) Ruby Bundler Audit Analyzer will be used. | false
| \-\-disableBundleAudit | | Sets whether the [experimental](../analyzers/index.html) Ruby Bundler Audit Analyzer will be used. | false
| \-\-disableCocoapodsAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer will be used. | false
| \-\-disableSwiftPackageManagerAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Swift Package Manager Analyzer will be used. | false
| \-\-disableAutoconf | | Sets whether the [experimental](../analyzers/index.html) Autoconf Analyzer will be used. | false
| \-\-disableOpenSSL | | Sets whether the OpenSSL Analyzer will be used. | false
| \-\-disableOpenSSL | | Sets whether the OpenSSL Analyzer will be used. | false
| \-\-disableCmake | | Sets whether the [experimental](../analyzers/index.html) Cmake Analyzer will be disabled. | false
| \-\-disableArchive | | Sets whether the Archive Analyzer will be disabled. | false
| \-\-disableArchive | | Sets whether the Archive Analyzer will be disabled. | false
| \-\-zipExtensions | \<strings\> | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. | &nbsp;
| \-\-disableJar | | Sets whether the Jar Analyzer will be disabled. | false
| \-\-disableJar | | Sets whether the Jar Analyzer will be disabled. | false
| \-\-disableComposer | | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer will be disabled. | false
| \-\-disableCentral | | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer. | false
| \-\-disableNexus | | Sets whether the Nexus Analyzer will be used. Note, this has been superceded by the Central Analyzer. However, you can configure the Nexus URL to utilize an internally hosted Nexus Pro server. | false

View File

@@ -13,16 +13,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 The OWASP Foundatio. All Rights Reserved.
* Copyright (c) 2017 The OWASP Foundatio. All Rights Reserved.
*/
package org.owasp.dependencycheck;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.io.File;
import java.io.FileNotFoundException;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.UnrecognizedOptionException;
import org.junit.Test;
import static org.junit.Assert.*;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
/**
*
@@ -30,25 +35,6 @@ import static org.junit.Assert.*;
*/
public class AppTest {
public AppTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of ensureCanonicalPath method, of class App.
*/
@@ -59,17 +45,83 @@ public class AppTest {
String result = instance.ensureCanonicalPath(file);
assertFalse(result.contains(".."));
assertTrue(result.endsWith("*.jar"));
}
/**
* Test of ensureCanonicalPath method, of class App.
*/
@Test
public void testEnsureCanonicalPath2() {
String file = "../some/skip/../path/file.txt";
App instance = new App();
file = "../some/skip/../path/file.txt";
String expResult = "/some/path/file.txt";
String result = instance.ensureCanonicalPath(file);
result = instance.ensureCanonicalPath(file);
assertTrue("result=" + result, result.endsWith(expResult));
}
@Test(expected = UnrecognizedOptionException.class)
public void testPopulateSettingsException() throws FileNotFoundException, ParseException, InvalidSettingException, URISyntaxException {
String[] args = {"-invalidPROPERTY"};
assertTrue(testBooleanProperties(args, null));
}
@Test
public void testPopulateSettings() throws FileNotFoundException, ParseException, InvalidSettingException, URISyntaxException {
File prop = new File(this.getClass().getClassLoader().getResource("sample.properties").toURI().getPath());
String[] args = {"-P", prop.getAbsolutePath()};
Map<String, Boolean> expected = new HashMap<>();
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE);
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.TRUE);
assertTrue(testBooleanProperties(args, expected));
String[] args2 = {"-n"};
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE);
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.TRUE);
assertTrue(testBooleanProperties(args2, expected));
String[] args3 = {"-h"};
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.TRUE);
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.TRUE);
assertTrue(testBooleanProperties(args3, expected));
String[] args4 = {"--disableArchive"};
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.TRUE);
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE);
assertTrue(testBooleanProperties(args4, expected));
String[] args5 = {"-P", prop.getAbsolutePath(), "--disableArchive"};
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE);
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE);
assertTrue(testBooleanProperties(args5, expected));
prop = new File(this.getClass().getClassLoader().getResource("sample2.properties").toURI().getPath());
String[] args6 = {"-P", prop.getAbsolutePath(), "--disableArchive"};
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.TRUE);
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE);
assertTrue(testBooleanProperties(args6, expected));
String[] args7 = {"-P", prop.getAbsolutePath(), "--noupdate"};
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE);
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE);
assertTrue(testBooleanProperties(args7, expected));
String[] args8 = {"-P", prop.getAbsolutePath(), "--noupdate", "--disableArchive"};
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE);
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE);
assertTrue(testBooleanProperties(args8, expected));
}
private boolean testBooleanProperties(String[] args, Map<String, Boolean> expected) throws URISyntaxException, FileNotFoundException, ParseException, InvalidSettingException {
Settings.initialize();
try {
final CliParser cli = new CliParser();
cli.parse(args);
App instance = new App();
instance.populateSettings(cli);
boolean results = true;
for (Map.Entry<String, Boolean> entry : expected.entrySet()) {
results &= Settings.getBoolean(entry.getKey()) == entry.getValue();
}
return results;
} finally {
Settings.cleanup();
}
}
}

View File

@@ -17,17 +17,14 @@
*/
package org.owasp.dependencycheck;
import org.owasp.dependencycheck.CliParser;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import org.apache.commons.cli.ParseException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.owasp.dependencycheck.utils.Settings;
@@ -48,14 +45,6 @@ public class CliParserTest {
Settings.cleanup(true);
}
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
/**
* Test of parse method, of class CliParser.
*
@@ -115,6 +104,63 @@ public class CliParserTest {
}
/**
* Test of parse method with failOnCVSS without an argument
*
* @throws Exception thrown when an exception occurs.
*/
@Test
public void testParse_failOnCVSSNoArg() throws Exception {
String[] args = {"--failOnCVSS"};
CliParser instance = new CliParser();
try {
instance.parse(args);
} catch (ParseException ex) {
Assert.assertTrue(ex.getMessage().contains("Missing argument"));
}
Assert.assertFalse(instance.isGetVersion());
Assert.assertFalse(instance.isGetHelp());
Assert.assertFalse(instance.isRunScan());
}
/**
* Test of parse method with failOnCVSS invalid argument. It should default to 11
*
* @throws Exception thrown when an exception occurs.
*/
@Test
public void testParse_failOnCVSSInvalidArgument() throws Exception {
String[] args = {"--failOnCVSS","bad"};
CliParser instance = new CliParser();
instance.parse(args);
Assert.assertEquals("Default should be 11", 11, instance.getFailOnCVSS());
Assert.assertFalse(instance.isGetVersion());
Assert.assertFalse(instance.isGetHelp());
Assert.assertFalse(instance.isRunScan());
}
/**
* Test of parse method with failOnCVSS invalid argument. It should default to 11
*
* @throws Exception thrown when an exception occurs.
*/
@Test
public void testParse_failOnCVSSValidArgument() throws Exception {
String[] args = {"--failOnCVSS","6"};
CliParser instance = new CliParser();
instance.parse(args);
Assert.assertEquals(6, instance.getFailOnCVSS());
Assert.assertFalse(instance.isGetVersion());
Assert.assertFalse(instance.isGetHelp());
Assert.assertFalse(instance.isRunScan());
}
/**
* Test of parse method with jar and cpe args, of class CliParser.
*
@@ -196,7 +242,7 @@ public class CliParserTest {
*/
@Test
public void testParse_scan_withFileExists() throws Exception {
File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").getPath());
File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").toURI().getPath());
String[] args = {"-scan", path.getCanonicalPath(), "-out", "./", "-app", "test"};
CliParser instance = new CliParser();

View File

@@ -0,0 +1,33 @@
autoupdate=false
analyzer.experimental.enabled=false
analyzer.jar.enabled=true
analyzer.archive.enabled=true
analyzer.node.package.enabled=true
analyzer.composer.lock.enabled=true
analyzer.python.distribution.enabled=true
analyzer.python.package.enabled=true
analyzer.ruby.gemspec.enabled=true
analyzer.autoconf.enabled=true
analyzer.cmake.enabled=true
analyzer.assembly.enabled=true
analyzer.nuspec.enabled=true
analyzer.openssl.enabled=true
analyzer.central.enabled=true
analyzer.nexus.enabled=false
analyzer.cocoapods.enabled=true
analyzer.swift.package.manager.enabled=true
#whether the nexus analyzer uses the proxy
analyzer.nexus.proxy=true
analyzer.cpe.enabled=true
analyzer.cpesuppression.enabled=true
analyzer.dependencybundling.enabled=true
analyzer.dependencymerging.enabled=true
analyzer.falsepositive.enabled=true
analyzer.filename.enabled=true
analyzer.hint.enabled=true
analyzer.nvdcve.enabled=true
analyzer.vulnerabilitysuppression.enabled=true
updater.nvdcve.enabled=true
updater.versioncheck.enabled=true
analyzer.versionfilter.enabled=true

View File

@@ -0,0 +1,33 @@
autoupdate=true
analyzer.experimental.enabled=true
analyzer.jar.enabled=false
analyzer.archive.enabled=false
analyzer.node.package.enabled=false
analyzer.composer.lock.enabled=false
analyzer.python.distribution.enabled=false
analyzer.python.package.enabled=false
analyzer.ruby.gemspec.enabled=false
analyzer.autoconf.enabled=false
analyzer.cmake.enabled=false
analyzer.assembly.enabled=false
analyzer.nuspec.enabled=false
analyzer.openssl.enabled=false
analyzer.central.enabled=false
analyzer.nexus.enabled=true
analyzer.cocoapods.enabled=false
analyzer.swift.package.manager.enabled=false
#whether the nexus analyzer uses the proxy
analyzer.nexus.proxy=false
analyzer.cpe.enabled=false
analyzer.cpesuppression.enabled=false
analyzer.dependencybundling.enabled=false
analyzer.dependencymerging.enabled=false
analyzer.falsepositive.enabled=false
analyzer.filename.enabled=false
analyzer.hint.enabled=false
analyzer.nvdcve.enabled=false
analyzer.vulnerabilitysuppression.enabled=false
updater.nvdcve.enabled=false
updater.versioncheck.enabled=false
analyzer.versionfilter.enabled=false

View File

@@ -20,7 +20,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<parent>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId>
<version>1.4.4</version>
<version>1.4.6-SNAPSHOT</version>
</parent>
<artifactId>dependency-check-core</artifactId>
@@ -121,93 +121,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<configuration>
<instrumentation>
<!--ignoreTrivial>true</ignoreTrivial-->
<ignores>
<ignore>.*\$KEYS\.class</ignore>
<ignore>.*\$Element\.class</ignore>
</ignores>
<excludes>
<exclude>.*\$KEYS\.class</exclude>
<exclude>.*\$Element\.class</exclude>
</excludes>
</instrumentation>
<check>
<branchRate>85</branchRate>
<lineRate>85</lineRate>
<haltOnFailure>false</haltOnFailure>
<totalBranchRate>85</totalBranchRate>
<totalLineRate>85</totalLineRate>
<packageLineRate>85</packageLineRate>
<packageBranchRate>85</packageBranchRate>
<regexes>
<regex>
<pattern>.*\$.*</pattern>
<branchRate>0</branchRate>
<lineRate>0</lineRate>
</regex>
<regex>
<pattern>org.owasp.dependencycheck.data.cpe.Fields</pattern>
<branchRate>0</branchRate>
<lineRate>0</lineRate>
</regex>
<regex>
<pattern>org.owasp.dependencycheck.App</pattern>
<branchRate>0</branchRate>
<lineRate>0</lineRate>
</regex>
</regexes>
</check>
</configuration>
<executions>
<execution>
<goals>
<goal>clean</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<argLine>-Dfile.encoding=UTF-8</argLine>
<systemProperties>
<property>
<name>data.directory</name>
<value>${project.build.directory}/data</value>
</property>
<property>
<name>temp.directory</name>
<value>${project.build.directory}/temp</value>
</property>
</systemProperties>
<excludes>
<exclude>**/*IntegrationTest.java</exclude>
<exclude>**/*MySQLTest.java</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<configuration>
<systemProperties>
<property>
<name>data.directory</name>
<value>${project.build.directory}/data</value>
</property>
<property>
<name>temp.directory</name>
<value>${project.build.directory}/temp</value>
</property>
</systemProperties>
</configuration>
</plugin>
</plugins>
</build>
<reporting>
@@ -244,7 +157,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<version>${reporting.pmd-plugin.version}</version>
<configuration>
<targetJdk>1.6</targetJdk>
<linkXref>true</linkXref>
<linkXRef>true</linkXRef>
<sourceEncoding>utf-8</sourceEncoding>
<excludes>
<exclude>**/generated/*.java</exclude>
@@ -261,6 +174,10 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</reporting>
<dependencies>
<!-- Note, to stay compatible with Jenkins installations only JARs compiled to 1.6 can be used -->
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>annotations</artifactId>
@@ -336,6 +253,10 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<groupId>com.sun.mail</groupId>
<artifactId>mailapi</artifactId>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
</dependency>
<!-- The following dependencies are only used during testing -->
<dependency>
<groupId>org.apache.maven.scm</groupId>
@@ -466,6 +387,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<scope>test</scope>
<optional>true</optional>
</dependency>
</dependencies>
<profiles>
<profile>
@@ -477,13 +399,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
@@ -503,7 +418,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</property>
</systemProperties>
<includes>
<include>**/*MySQLTest.java</include>
<include>**/*MySqlIT.java</include>
</includes>
</configuration>
<executions>
@@ -534,13 +449,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
@@ -560,7 +468,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</property>
</systemProperties>
<includes>
<include>**/*MySQLTest.java</include>
<include>**/*MySqlIT.java</include>
</includes>
</configuration>
<executions>
@@ -575,15 +483,19 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</plugins>
</build>
</profile>
<profile>
<!-- The following profile adds additional
dependencies that are only used during testing.
Additionally, these are only added when using "allTests" to
make the build slightly faster in most cases. -->
<!--
The following profile adds additional dependencies that are only
used during testing.
TODO move the following FP tests to a seperate invoker test in the
maven plugin project. Add checks against the XML to validate that
these do not report FP.
-->
<!--profile>
<id>False Positive Tests</id>
<activation>
<property>
<name>allTests</name>
<name>releaseTesting</name>
</property>
</activation>
<dependencies>
@@ -664,20 +576,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>3.2.12.RELEASE</version>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.3.1</version>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.google.gerrit</groupId>
<artifactId>gerrit-extension-api</artifactId>
@@ -728,6 +626,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<optional>true</optional>
</dependency>
</dependencies>
</profile>
</profile-->
</profiles>
</project>

View File

@@ -29,12 +29,12 @@ import java.util.List;
import java.util.concurrent.Callable;
/**
* Task to support parallelism of dependency-check analysis.
* Analyses a single {@link Dependency} by a specific {@link Analyzer}.
* Task to support parallelism of dependency-check analysis. Analyses a single
* {@link Dependency} by a specific {@link Analyzer}.
*
* @author Stefan Neuhaus
*/
class AnalysisTask implements Callable<Void> {
public class AnalysisTask implements Callable<Void> {
/**
* Instance of the logger.
@@ -57,6 +57,10 @@ class AnalysisTask implements Callable<Void> {
* The list of exceptions that may occur during analysis.
*/
private final List<Throwable> exceptions;
/**
* A reference to the global settings object.
*/
private final Settings settings;
/**
* Creates a new analysis task.
@@ -66,40 +70,46 @@ class AnalysisTask implements Callable<Void> {
* @param engine the dependency-check engine
* @param exceptions exceptions that occur during analysis will be added to
* this collection of exceptions
* @param settings a reference to the global settings object; this is
* necessary so that when the thread is started the dependencies have a
* correct reference to the global settings.
*/
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions) {
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions, Settings settings) {
this.analyzer = analyzer;
this.dependency = dependency;
this.engine = engine;
this.exceptions = exceptions;
this.settings = settings;
}
/**
* Executes the analysis task.
*
* @return null
* @throws Exception thrown if unable to execute the analysis task
*/
@Override
public Void call() {
Settings.initialize();
try {
Settings.setInstance(settings);
if (shouldAnalyze()) {
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
try {
analyzer.analyze(dependency, engine);
} catch (AnalysisException ex) {
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
LOGGER.debug("", ex);
exceptions.add(ex);
} catch (Throwable ex) {
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
LOGGER.debug("", ex);
exceptions.add(ex);
if (shouldAnalyze()) {
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
try {
analyzer.analyze(dependency, engine);
} catch (AnalysisException ex) {
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
LOGGER.debug("", ex);
exceptions.add(ex);
} catch (Throwable ex) {
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
LOGGER.debug("", ex);
exceptions.add(ex);
}
}
} finally {
Settings.cleanup(false);
}
return null;
}
@@ -108,7 +118,7 @@ class AnalysisTask implements Callable<Void> {
*
* @return whether or not the analyzer can analyze the dependency
*/
boolean shouldAnalyze() {
protected boolean shouldAnalyze() {
if (analyzer instanceof FileTypeAnalyzer) {
final FileTypeAnalyzer fileTypeAnalyzer = (FileTypeAnalyzer) analyzer;
return fileTypeAnalyzer.accept(dependency.getActualFile());

View File

@@ -38,6 +38,7 @@ import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -71,18 +72,22 @@ public class Engine implements FileFilter {
/**
* A Map of analyzers grouped by Analysis phase.
*/
private final Map<AnalysisPhase, List<Analyzer>> analyzers = new EnumMap<AnalysisPhase, List<Analyzer>>(AnalysisPhase.class);
private final Map<AnalysisPhase, List<Analyzer>> analyzers = new EnumMap<>(AnalysisPhase.class);
/**
* A Map of analyzers grouped by Analysis phase.
*/
private final Set<FileTypeAnalyzer> fileTypeAnalyzers = new HashSet<FileTypeAnalyzer>();
private final Set<FileTypeAnalyzer> fileTypeAnalyzers = new HashSet<>();
/**
* The ClassLoader to use when dynamically loading Analyzer and Update
* services.
*/
private ClassLoader serviceClassLoader = Thread.currentThread().getContextClassLoader();
/**
* A reference to the database.
*/
private CveDB database = null;
/**
* The Logger for use throughout the class.
*/
@@ -126,6 +131,10 @@ public class Engine implements FileFilter {
* Properly cleans up resources allocated during analysis.
*/
public void cleanup() {
if (database != null) {
database.close();
database = null;
}
ConnectionFactory.cleanup();
}
@@ -163,15 +172,16 @@ public class Engine implements FileFilter {
/**
* Get the dependencies identified. The returned list is a reference to the
* engine's synchronized list. You must synchronize on it, when you modify
* and iterate over it from multiple threads. E.g. this holds for analyzers
* supporting parallel processing during their analysis phase.
* engine's synchronized list. <b>You must synchronize on the returned
* list</b> when you modify and iterate over it from multiple threads. E.g.
* this holds for analyzers supporting parallel processing during their
* analysis phase.
*
* @return the dependencies identified
* @see Collections#synchronizedList(List)
* @see Analyzer#supportsParallelProcessing()
*/
public List<Dependency> getDependencies() {
public synchronized List<Dependency> getDependencies() {
return dependencies;
}
@@ -212,7 +222,7 @@ public class Engine implements FileFilter {
* @since v1.4.4
*/
public List<Dependency> scan(String[] paths, String projectReference) {
final List<Dependency> deps = new ArrayList<Dependency>();
final List<Dependency> deps = new ArrayList<>();
for (String path : paths) {
final List<Dependency> d = scan(path, projectReference);
if (d != null) {
@@ -275,7 +285,7 @@ public class Engine implements FileFilter {
* @since v1.4.4
*/
public List<Dependency> scan(File[] files, String projectReference) {
final List<Dependency> deps = new ArrayList<Dependency>();
final List<Dependency> deps = new ArrayList<>();
for (File file : files) {
final List<Dependency> d = scan(file, projectReference);
if (d != null) {
@@ -310,7 +320,7 @@ public class Engine implements FileFilter {
* @since v1.4.4
*/
public List<Dependency> scan(Collection<File> files, String projectReference) {
final List<Dependency> deps = new ArrayList<Dependency>();
final List<Dependency> deps = new ArrayList<>();
for (File file : files) {
final List<Dependency> d = scan(file, projectReference);
if (d != null) {
@@ -351,7 +361,7 @@ public class Engine implements FileFilter {
} else {
final Dependency d = scanFile(file, projectReference);
if (d != null) {
final List<Dependency> deps = new ArrayList<Dependency>();
final List<Dependency> deps = new ArrayList<>();
deps.add(d);
return deps;
}
@@ -383,7 +393,7 @@ public class Engine implements FileFilter {
*/
protected List<Dependency> scanDirectory(File dir, String projectReference) {
final File[] files = dir.listFiles();
final List<Dependency> deps = new ArrayList<Dependency>();
final List<Dependency> deps = new ArrayList<>();
if (files != null) {
for (File f : files) {
if (f.isDirectory()) {
@@ -477,31 +487,14 @@ public class Engine implements FileFilter {
*/
public void analyzeDependencies() throws ExceptionCollection {
final List<Throwable> exceptions = Collections.synchronizedList(new ArrayList<Throwable>());
boolean autoUpdate = true;
try {
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
} catch (InvalidSettingException ex) {
LOGGER.debug("Invalid setting for auto-update; using true.");
exceptions.add(ex);
}
if (autoUpdate) {
try {
doUpdates();
} catch (UpdateException ex) {
exceptions.add(ex);
LOGGER.warn("Unable to update Cached Web DataSource, using local "
+ "data instead. Results may not include recent vulnerabilities.");
LOGGER.debug("Update Error", ex);
}
}
initializeAndUpdateDatabase(exceptions);
//need to ensure that data exists
try {
ensureDataExists();
} catch (NoDataException ex) {
throwFatalExceptionCollection("Unable to continue dependency-check analysis.", ex, exceptions);
} catch (DatabaseException ex) {
throwFatalExceptionCollection("Unable to connect to the dependency-check database.", ex, exceptions);
}
LOGGER.debug("\n----------------------------------------------------\nBEGIN ANALYSIS\n----------------------------------------------------");
@@ -521,11 +514,15 @@ public class Engine implements FileFilter {
continue;
}
executeAnalysisTasks(analyzer, exceptions);
if (analyzer.isEnabled()) {
executeAnalysisTasks(analyzer, exceptions);
final long analyzerDurationMillis = System.currentTimeMillis() - analyzerStart;
final long analyzerDurationSeconds = TimeUnit.MILLISECONDS.toSeconds(analyzerDurationMillis);
LOGGER.info("Finished {} ({} seconds)", analyzer.getName(), analyzerDurationSeconds);
final long analyzerDurationMillis = System.currentTimeMillis() - analyzerStart;
final long analyzerDurationSeconds = TimeUnit.MILLISECONDS.toSeconds(analyzerDurationMillis);
LOGGER.info("Finished {} ({} seconds)", analyzer.getName(), analyzerDurationSeconds);
} else {
LOGGER.debug("Skipping {} (not enabled)", analyzer.getName());
}
}
}
for (AnalysisPhase phase : AnalysisPhase.values()) {
@@ -544,6 +541,47 @@ public class Engine implements FileFilter {
}
}
/**
* Performs any necessary updates and initializes the database.
*
* @param exceptions a collection to store non-fatal exceptions
* @throws ExceptionCollection thrown if fatal exceptions occur
*/
private void initializeAndUpdateDatabase(final List<Throwable> exceptions) throws ExceptionCollection {
boolean autoUpdate = true;
try {
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
} catch (InvalidSettingException ex) {
LOGGER.debug("Invalid setting for auto-update; using true.");
exceptions.add(ex);
}
if (autoUpdate) {
try {
database = CveDB.getInstance();
doUpdates();
} catch (UpdateException ex) {
exceptions.add(ex);
LOGGER.warn("Unable to update Cached Web DataSource, using local "
+ "data instead. Results may not include recent vulnerabilities.");
LOGGER.debug("Update Error", ex);
} catch (DatabaseException ex) {
throw new ExceptionCollection("Unable to connect to the database", ex);
}
} else {
try {
if (ConnectionFactory.isH2Connection() && !ConnectionFactory.h2DataFileExists()) {
throw new ExceptionCollection(new NoDataException("Autoupdate is disabled and the database does not exist"), true);
} else {
database = CveDB.getInstance();
}
} catch (IOException ex) {
throw new ExceptionCollection(new DatabaseException("Autoupdate is disabled and unable to connect to the database"), true);
} catch (DatabaseException ex) {
throwFatalExceptionCollection("Unable to connect to the dependency-check database.", ex, exceptions);
}
}
}
/**
* Executes executes the analyzer using multiple threads.
*
@@ -552,7 +590,7 @@ public class Engine implements FileFilter {
* @param analyzer the analyzer to execute
* @throws ExceptionCollection thrown if exceptions occurred during analysis
*/
void executeAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) throws ExceptionCollection {
protected void executeAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) throws ExceptionCollection {
LOGGER.debug("Starting {}", analyzer.getName());
final List<AnalysisTask> analysisTasks = getAnalysisTasks(analyzer, exceptions);
final ExecutorService executorService = getExecutorService(analyzer);
@@ -584,11 +622,11 @@ public class Engine implements FileFilter {
* @param exceptions the collection of exceptions to collect
* @return a collection of analysis tasks
*/
List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
final List<AnalysisTask> result = new ArrayList<AnalysisTask>();
protected List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
final List<AnalysisTask> result = new ArrayList<>();
synchronized (dependencies) {
for (final Dependency dependency : dependencies) {
final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions);
final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions, Settings.getInstance());
result.add(task);
}
}
@@ -601,11 +639,9 @@ public class Engine implements FileFilter {
* @param analyzer the analyzer to obtain an executor
* @return the executor service
*/
ExecutorService getExecutorService(Analyzer analyzer) {
protected ExecutorService getExecutorService(Analyzer analyzer) {
if (analyzer.supportsParallelProcessing()) {
// just a fair trade-off that should be reasonable for all analyzer types
final int maximumNumberOfThreads = 4 * Runtime.getRuntime().availableProcessors();
final int maximumNumberOfThreads = Runtime.getRuntime().availableProcessors();
LOGGER.debug("Parallel processing with up to {} threads: {}.", maximumNumberOfThreads, analyzer.getName());
return Executors.newFixedThreadPool(maximumNumberOfThreads);
} else {
@@ -618,11 +654,10 @@ public class Engine implements FileFilter {
* Initializes the given analyzer.
*
* @param analyzer the analyzer to initialize
* @return the initialized analyzer
* @throws InitializationException thrown when there is a problem
* initializing the analyzer
*/
protected Analyzer initializeAnalyzer(Analyzer analyzer) throws InitializationException {
protected void initializeAnalyzer(Analyzer analyzer) throws InitializationException {
try {
LOGGER.debug("Initializing {}", analyzer.getName());
analyzer.initialize();
@@ -645,7 +680,6 @@ public class Engine implements FileFilter {
}
throw new InitializationException("Unexpected Exception", ex);
}
return analyzer;
}
/**
@@ -687,7 +721,7 @@ public class Engine implements FileFilter {
* @return a list of Analyzers
*/
public List<Analyzer> getAnalyzers() {
final List<Analyzer> ret = new ArrayList<Analyzer>();
final List<Analyzer> ret = new ArrayList<>();
for (AnalysisPhase phase : AnalysisPhase.values()) {
final List<Analyzer> analyzerList = analyzers.get(phase);
ret.addAll(analyzerList);
@@ -740,20 +774,10 @@ public class Engine implements FileFilter {
* NoDataException is thrown.
*
* @throws NoDataException thrown if no data exists in the CPE Index
* @throws DatabaseException thrown if there is an exception opening the
* database
*/
private void ensureDataExists() throws NoDataException, DatabaseException {
final CveDB cve = new CveDB();
try {
cve.open();
if (!cve.dataExists()) {
throw new NoDataException("No documents exist");
}
} catch (DatabaseException ex) {
throw new NoDataException(ex.getMessage(), ex);
} finally {
cve.close();
private void ensureDataExists() throws NoDataException {
if (database == null || !database.dataExists()) {
throw new NoDataException("No documents exist");
}
}

View File

@@ -28,6 +28,7 @@ import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.exception.ExceptionCollection;
import org.owasp.dependencycheck.exception.ReportException;
import org.owasp.dependencycheck.exception.ScanAgentException;
import org.owasp.dependencycheck.reporting.ReportGenerator;
import org.owasp.dependencycheck.utils.Settings;
@@ -63,6 +64,7 @@ import org.slf4j.LoggerFactory;
@SuppressWarnings("unused")
public class DependencyCheckScanAgent {
//<editor-fold defaultstate="collapsed" desc="private fields">
/**
* System specific new line character.
*/
@@ -75,6 +77,141 @@ public class DependencyCheckScanAgent {
* The application name for the report.
*/
private String applicationName = "Dependency-Check";
/**
* The pre-determined dependencies to scan
*/
private List<Dependency> dependencies;
/**
* The location of the data directory that contains
*/
private String dataDirectory = null;
/**
* Specifies the destination directory for the generated Dependency-Check
* report.
*/
private String reportOutputDirectory;
/**
* Specifies if the build should be failed if a CVSS score above a specified
* level is identified. The default is 11 which means since the CVSS scores
* are 0-10, by default the build will never fail and the CVSS score is set
* to 11. The valid range for the fail build on CVSS is 0 to 11, where
* anything above 10 will not cause the build to fail.
*/
private float failBuildOnCVSS = 11;
/**
* Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not
* recommended that this be turned to false. Default is true.
*/
private boolean autoUpdate = true;
/**
* flag indicating whether or not to generate a report of findings.
*/
private boolean generateReport = true;
/**
* The report format to be generated (HTML, XML, VULN, ALL). This
* configuration option has no affect if using this within the Site plugin
* unless the externalReport is set to true. Default is HTML.
*/
private ReportGenerator.Format reportFormat = ReportGenerator.Format.HTML;
/**
* The Proxy Server.
*/
private String proxyServer;
/**
* The Proxy Port.
*/
private String proxyPort;
/**
* The Proxy username.
*/
private String proxyUsername;
/**
* The Proxy password.
*/
private String proxyPassword;
/**
* The Connection Timeout.
*/
private String connectionTimeout;
/**
* The file path used for verbose logging.
*/
private String logFile = null;
/**
* flag indicating whether or not to show a summary of findings.
*/
private boolean showSummary = true;
/**
* The path to the suppression file.
*/
private String suppressionFile;
/**
* The password to use when connecting to the database.
*/
private String databasePassword;
/**
* Whether or not the Maven Central analyzer is enabled.
*/
private boolean centralAnalyzerEnabled = true;
/**
* The URL of Maven Central.
*/
private String centralUrl;
/**
* Whether or not the nexus analyzer is enabled.
*/
private boolean nexusAnalyzerEnabled = true;
/**
* The URL of the Nexus server.
*/
private String nexusUrl;
/**
* Whether or not the defined proxy should be used when connecting to Nexus.
*/
private boolean nexusUsesProxy = true;
/**
* The database driver name; such as org.h2.Driver.
*/
private String databaseDriverName;
/**
* The path to the database driver JAR file if it is not on the class path.
*/
private String databaseDriverPath;
/**
* The database connection string.
*/
private String connectionString;
/**
* The user name for connecting to the database.
*/
private String databaseUser;
/**
* Additional ZIP File extensions to add analyze. This should be a
* comma-separated list of file extensions to treat like ZIP files.
*/
private String zipExtensions;
/**
* The url for the modified NVD CVE (1.2 schema).
*/
private String cveUrl12Modified;
/**
* The url for the modified NVD CVE (2.0 schema).
*/
private String cveUrl20Modified;
/**
* Base Data Mirror URL for CVE 1.2.
*/
private String cveUrl12Base;
/**
* Data Mirror URL for CVE 2.0.
*/
private String cveUrl20Base;
/**
* The path to Mono for .NET assembly analysis on non-windows systems.
*/
private String pathToMono;
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="getters/setters">
/**
* Get the value of applicationName.
@@ -94,11 +231,6 @@ public class DependencyCheckScanAgent {
this.applicationName = applicationName;
}
/**
* The pre-determined dependencies to scan
*/
private List<Dependency> dependencies;
/**
* Returns a list of pre-determined dependencies.
*
@@ -117,11 +249,6 @@ public class DependencyCheckScanAgent {
this.dependencies = dependencies;
}
/**
* The location of the data directory that contains
*/
private String dataDirectory = null;
/**
* Get the value of dataDirectory.
*
@@ -140,12 +267,6 @@ public class DependencyCheckScanAgent {
this.dataDirectory = dataDirectory;
}
/**
* Specifies the destination directory for the generated Dependency-Check
* report.
*/
private String reportOutputDirectory;
/**
* Get the value of reportOutputDirectory.
*
@@ -164,15 +285,6 @@ public class DependencyCheckScanAgent {
this.reportOutputDirectory = reportOutputDirectory;
}
/**
* Specifies if the build should be failed if a CVSS score above a specified
* level is identified. The default is 11 which means since the CVSS scores
* are 0-10, by default the build will never fail and the CVSS score is set
* to 11. The valid range for the fail build on CVSS is 0 to 11, where
* anything above 10 will not cause the build to fail.
*/
private float failBuildOnCVSS = 11;
/**
* Get the value of failBuildOnCVSS.
*
@@ -191,12 +303,6 @@ public class DependencyCheckScanAgent {
this.failBuildOnCVSS = failBuildOnCVSS;
}
/**
* Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not
* recommended that this be turned to false. Default is true.
*/
private boolean autoUpdate = true;
/**
* Get the value of autoUpdate.
*
@@ -215,11 +321,6 @@ public class DependencyCheckScanAgent {
this.autoUpdate = autoUpdate;
}
/**
* flag indicating whether or not to generate a report of findings.
*/
private boolean generateReport = true;
/**
* Get the value of generateReport.
*
@@ -238,13 +339,6 @@ public class DependencyCheckScanAgent {
this.generateReport = generateReport;
}
/**
* The report format to be generated (HTML, XML, VULN, ALL). This
* configuration option has no affect if using this within the Site plugin
* unless the externalReport is set to true. Default is HTML.
*/
private ReportGenerator.Format reportFormat = ReportGenerator.Format.HTML;
/**
* Get the value of reportFormat.
*
@@ -263,11 +357,6 @@ public class DependencyCheckScanAgent {
this.reportFormat = reportFormat;
}
/**
* The Proxy Server.
*/
private String proxyServer;
/**
* Get the value of proxyServer.
*
@@ -311,11 +400,6 @@ public class DependencyCheckScanAgent {
this.proxyServer = proxyUrl;
}
/**
* The Proxy Port.
*/
private String proxyPort;
/**
* Get the value of proxyPort.
*
@@ -334,11 +418,6 @@ public class DependencyCheckScanAgent {
this.proxyPort = proxyPort;
}
/**
* The Proxy username.
*/
private String proxyUsername;
/**
* Get the value of proxyUsername.
*
@@ -357,11 +436,6 @@ public class DependencyCheckScanAgent {
this.proxyUsername = proxyUsername;
}
/**
* The Proxy password.
*/
private String proxyPassword;
/**
* Get the value of proxyPassword.
*
@@ -380,11 +454,6 @@ public class DependencyCheckScanAgent {
this.proxyPassword = proxyPassword;
}
/**
* The Connection Timeout.
*/
private String connectionTimeout;
/**
* Get the value of connectionTimeout.
*
@@ -403,11 +472,6 @@ public class DependencyCheckScanAgent {
this.connectionTimeout = connectionTimeout;
}
/**
* The file path used for verbose logging.
*/
private String logFile = null;
/**
* Get the value of logFile.
*
@@ -426,11 +490,6 @@ public class DependencyCheckScanAgent {
this.logFile = logFile;
}
/**
* The path to the suppression file.
*/
private String suppressionFile;
/**
* Get the value of suppressionFile.
*
@@ -449,11 +508,6 @@ public class DependencyCheckScanAgent {
this.suppressionFile = suppressionFile;
}
/**
* flag indicating whether or not to show a summary of findings.
*/
private boolean showSummary = true;
/**
* Get the value of showSummary.
*
@@ -472,11 +526,6 @@ public class DependencyCheckScanAgent {
this.showSummary = showSummary;
}
/**
* Whether or not the Maven Central analyzer is enabled.
*/
private boolean centralAnalyzerEnabled = true;
/**
* Get the value of centralAnalyzerEnabled.
*
@@ -495,11 +544,6 @@ public class DependencyCheckScanAgent {
this.centralAnalyzerEnabled = centralAnalyzerEnabled;
}
/**
* The URL of Maven Central.
*/
private String centralUrl;
/**
* Get the value of centralUrl.
*
@@ -518,11 +562,6 @@ public class DependencyCheckScanAgent {
this.centralUrl = centralUrl;
}
/**
* Whether or not the nexus analyzer is enabled.
*/
private boolean nexusAnalyzerEnabled = true;
/**
* Get the value of nexusAnalyzerEnabled.
*
@@ -541,11 +580,6 @@ public class DependencyCheckScanAgent {
this.nexusAnalyzerEnabled = nexusAnalyzerEnabled;
}
/**
* The URL of the Nexus server.
*/
private String nexusUrl;
/**
* Get the value of nexusUrl.
*
@@ -564,11 +598,6 @@ public class DependencyCheckScanAgent {
this.nexusUrl = nexusUrl;
}
/**
* Whether or not the defined proxy should be used when connecting to Nexus.
*/
private boolean nexusUsesProxy = true;
/**
* Get the value of nexusUsesProxy.
*
@@ -587,11 +616,6 @@ public class DependencyCheckScanAgent {
this.nexusUsesProxy = nexusUsesProxy;
}
/**
* The database driver name; such as org.h2.Driver.
*/
private String databaseDriverName;
/**
* Get the value of databaseDriverName.
*
@@ -610,11 +634,6 @@ public class DependencyCheckScanAgent {
this.databaseDriverName = databaseDriverName;
}
/**
* The path to the database driver JAR file if it is not on the class path.
*/
private String databaseDriverPath;
/**
* Get the value of databaseDriverPath.
*
@@ -633,11 +652,6 @@ public class DependencyCheckScanAgent {
this.databaseDriverPath = databaseDriverPath;
}
/**
* The database connection string.
*/
private String connectionString;
/**
* Get the value of connectionString.
*
@@ -656,11 +670,6 @@ public class DependencyCheckScanAgent {
this.connectionString = connectionString;
}
/**
* The user name for connecting to the database.
*/
private String databaseUser;
/**
* Get the value of databaseUser.
*
@@ -679,11 +688,6 @@ public class DependencyCheckScanAgent {
this.databaseUser = databaseUser;
}
/**
* The password to use when connecting to the database.
*/
private String databasePassword;
/**
* Get the value of databasePassword.
*
@@ -702,12 +706,6 @@ public class DependencyCheckScanAgent {
this.databasePassword = databasePassword;
}
/**
* Additional ZIP File extensions to add analyze. This should be a
* comma-separated list of file extensions to treat like ZIP files.
*/
private String zipExtensions;
/**
* Get the value of zipExtensions.
*
@@ -726,11 +724,6 @@ public class DependencyCheckScanAgent {
this.zipExtensions = zipExtensions;
}
/**
* The url for the modified NVD CVE (1.2 schema).
*/
private String cveUrl12Modified;
/**
* Get the value of cveUrl12Modified.
*
@@ -749,11 +742,6 @@ public class DependencyCheckScanAgent {
this.cveUrl12Modified = cveUrl12Modified;
}
/**
* The url for the modified NVD CVE (2.0 schema).
*/
private String cveUrl20Modified;
/**
* Get the value of cveUrl20Modified.
*
@@ -772,11 +760,6 @@ public class DependencyCheckScanAgent {
this.cveUrl20Modified = cveUrl20Modified;
}
/**
* Base Data Mirror URL for CVE 1.2.
*/
private String cveUrl12Base;
/**
* Get the value of cveUrl12Base.
*
@@ -795,11 +778,6 @@ public class DependencyCheckScanAgent {
this.cveUrl12Base = cveUrl12Base;
}
/**
* Data Mirror URL for CVE 2.0.
*/
private String cveUrl20Base;
/**
* Get the value of cveUrl20Base.
*
@@ -818,11 +796,6 @@ public class DependencyCheckScanAgent {
this.cveUrl20Base = cveUrl20Base;
}
/**
* The path to Mono for .NET assembly analysis on non-windows systems.
*/
private String pathToMono;
/**
* Get the value of pathToMono.
*
@@ -840,6 +813,7 @@ public class DependencyCheckScanAgent {
public void setPathToMono(String pathToMono) {
this.pathToMono = pathToMono;
}
//</editor-fold>
/**
* Executes the Dependency-Check on the dependent libraries.
@@ -869,28 +843,17 @@ public class DependencyCheckScanAgent {
*/
private void generateExternalReports(Engine engine, File outDirectory) {
DatabaseProperties prop = null;
CveDB cve = null;
try {
cve = new CveDB();
cve.open();
try (CveDB cve = CveDB.getInstance()) {
prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) {
//TODO shouldn't this be a fatal exception
LOGGER.debug("Unable to retrieve DB Properties", ex);
} finally {
if (cve != null) {
cve.close();
}
}
final ReportGenerator r = new ReportGenerator(this.applicationName, engine.getDependencies(), engine.getAnalyzers(), prop);
try {
r.generateReports(outDirectory.getCanonicalPath(), this.reportFormat.name());
} catch (IOException ex) {
LOGGER.error(
"Unexpected exception occurred during analysis; please see the verbose error log for more details.");
LOGGER.debug("", ex);
} catch (Throwable ex) {
LOGGER.error(
"Unexpected exception occurred during analysis; please see the verbose error log for more details.");
} catch (IOException | ReportException ex) {
LOGGER.error("Unexpected exception occurred during analysis; please see the verbose error log for more details.");
LOGGER.debug("", ex);
}
}
@@ -998,7 +961,7 @@ public class DependencyCheckScanAgent {
}
if (ids.length() > 0) {
final String msg = String.format("%n%nDependency-Check Failure:%n"
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater then '%.1f': %s%n"
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than '%.1f': %s%n"
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
throw new ScanAgentException(msg);
@@ -1044,5 +1007,4 @@ public class DependencyCheckScanAgent {
summary.toString());
}
}
}

View File

@@ -17,24 +17,125 @@
*/
package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base class for analyzers to avoid code duplication of initialize and close
* as most analyzers do not need these methods.
* Base class for analyzers to avoid code duplication of initialize and close as
* most analyzers do not need these methods.
*
* @author Jeremy Long
*/
public abstract class AbstractAnalyzer implements Analyzer {
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractAnalyzer.class);
/**
* A flag indicating whether or not the analyzer is enabled.
*/
private volatile boolean enabled = true;
/**
* Get the value of enabled.
*
* @return the value of enabled
*/
@Override
public boolean isEnabled() {
return enabled;
}
/**
* Set the value of enabled.
*
* @param enabled new value of enabled
*/
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
protected abstract String getAnalyzerEnabledSettingKey();
/**
* Analyzes a given dependency. If the dependency is an archive, such as a
* WAR or EAR, the contents are extracted, scanned, and added to the list of
* dependencies within the engine.
*
* @param dependency the dependency to analyze
* @param engine the engine scanning
* @throws AnalysisException thrown if there is an analysis exception
*/
protected abstract void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException;
/**
* Initializes a given Analyzer. This will be skipped if the analyzer is
* disabled.
*
* @throws InitializationException thrown if there is an exception
*/
protected void initializeAnalyzer() throws InitializationException {
}
/**
* Closes a given Analyzer. This will be skipped if the analyzer is
* disabled.
*
* @throws Exception thrown if there is an exception
*/
protected void closeAnalyzer() throws Exception {
// Intentionally empty, analyzer will override this if they must close a resource.
}
/**
* Analyzes a given dependency. If the dependency is an archive, such as a
* WAR or EAR, the contents are extracted, scanned, and added to the list of
* dependencies within the engine.
*
* @param dependency the dependency to analyze
* @param engine the engine scanning
* @throws AnalysisException thrown if there is an analysis exception
*/
@Override
public final void analyze(Dependency dependency, Engine engine) throws AnalysisException {
if (this.isEnabled()) {
analyzeDependency(dependency, engine);
}
}
/**
* The initialize method does nothing for this Analyzer.
*
* @throws InitializationException thrown if there is an exception
*/
@Override
public void initialize() throws InitializationException {
//do nothing
public final void initialize() throws InitializationException {
final String key = getAnalyzerEnabledSettingKey();
try {
this.setEnabled(Settings.getBoolean(key, true));
} catch (InvalidSettingException ex) {
LOGGER.warn("Invalid setting for property '{}'", key);
LOGGER.debug("", ex);
}
if (isEnabled()) {
initializeAnalyzer();
} else {
LOGGER.debug("{} has been disabled", getName());
}
}
/**
@@ -43,12 +144,16 @@ public abstract class AbstractAnalyzer implements Analyzer {
* @throws Exception thrown if there is an exception
*/
@Override
public void close() throws Exception {
//do nothing
public final void close() throws Exception {
if (isEnabled()) {
closeAnalyzer();
}
}
/**
* The default is to support parallel processing.
*
* @return true
*/
@Override
public boolean supportsParallelProcessing() {

View File

@@ -17,11 +17,6 @@
*/
package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -40,17 +35,7 @@ import org.owasp.dependencycheck.exception.InitializationException;
*/
public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implements FileTypeAnalyzer {
//<editor-fold defaultstate="collapsed" desc="Constructor">
/**
* Base constructor that all children must call. This checks the
* configuration to determine if the analyzer is enabled.
*/
public AbstractFileTypeAnalyzer() {
reset();
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Field definitions">
//<editor-fold defaultstate="collapsed" desc="Field definitions, getters, and setters ">
/**
* The logger.
*/
@@ -80,30 +65,24 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
this.filesMatched = filesMatched;
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Final implementations for the Analyzer interface">
/**
* A flag indicating whether or not the analyzer is enabled.
*/
private volatile boolean enabled = true;
/**
* Get the value of enabled.
* Initializes the analyzer.
*
* @return the value of enabled
* @throws InitializationException thrown if there is an exception during
* initialization
*/
public boolean isEnabled() {
return enabled;
@Override
protected final void initializeAnalyzer() throws InitializationException {
if (filesMatched) {
initializeFileTypeAnalyzer();
} else {
this.setEnabled(false);
}
}
/**
* Set the value of enabled.
*
* @param enabled new value of enabled
*/
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
//</editor-fold>
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Abstract methods children must implement">
/**
* <p>
@@ -127,80 +106,21 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
*/
protected abstract void initializeFileTypeAnalyzer() throws InitializationException;
//</editor-fold>
/**
* Analyzes a given dependency. If the dependency is an archive, such as a
* WAR or EAR, the contents are extracted, scanned, and added to the list of
* dependencies within the engine.
* Determines if the file can be analyzed by the analyzer.
*
* @param dependency the dependency to analyze
* @param engine the engine scanning
* @throws AnalysisException thrown if there is an analysis exception
* @param pathname the path to the file
* @return true if the file can be analyzed by the given analyzer; otherwise
* false
*/
protected abstract void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException;
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
protected abstract String getAnalyzerEnabledSettingKey();
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Final implementations for the Analyzer interface">
/**
* Initializes the analyzer.
*
* @throws InitializationException thrown if there is an exception during
* initialization
*/
@Override
public final void initialize() throws InitializationException {
if (filesMatched) {
initializeFileTypeAnalyzer();
} else {
enabled = false;
}
}
/**
* Resets the enabled flag on the analyzer.
*/
@Override
public final void reset() {
final String key = getAnalyzerEnabledSettingKey();
try {
enabled = Settings.getBoolean(key, true);
} catch (InvalidSettingException ex) {
LOGGER.warn("Invalid setting for property '{}'", key);
LOGGER.debug("", ex);
LOGGER.warn("{} has been disabled", getName());
}
}
/**
* Analyzes a given dependency. If the dependency is an archive, such as a
* WAR or EAR, the contents are extracted, scanned, and added to the list of
* dependencies within the engine.
*
* @param dependency the dependency to analyze
* @param engine the engine scanning
* @throws AnalysisException thrown if there is an analysis exception
*/
@Override
public final void analyze(Dependency dependency, Engine engine) throws AnalysisException {
if (enabled) {
analyzeFileType(dependency, engine);
}
}
@Override
public boolean accept(File pathname) {
final FileFilter filter = getFileFilter();
boolean accepted = false;
if (null == filter) {
LOGGER.error("The '{}' analyzer is misconfigured and does not have a file filter; it will be disabled", getName());
} else if (enabled) {
} else if (this.isEnabled()) {
accepted = filter.accept(pathname);
if (accepted) {
filesMatched = true;
@@ -209,8 +129,6 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
return accepted;
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Static utility methods">
/**
* <p>
* Utility method to help in the creation of the extensions set. This
@@ -223,10 +141,8 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
* @return a Set of strings.
*/
protected static Set<String> newHashSet(String... strings) {
final Set<String> set = new HashSet<String>(strings.length);
final Set<String> set = new HashSet<>(strings.length);
Collections.addAll(set, strings);
return set;
}
//</editor-fold>
}

View File

@@ -67,8 +67,7 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
* @throws InitializationException thrown if there is an exception
*/
@Override
public void initialize() throws InitializationException {
super.initialize();
public void initializeAnalyzer() throws InitializationException {
try {
loadSuppressionData();
} catch (SuppressionParseException ex) {
@@ -108,7 +107,8 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
final SuppressionParser parser = new SuppressionParser();
File file = null;
try {
rules = parser.parseSuppressionRules(this.getClass().getClassLoader().getResourceAsStream("dependencycheck-base-suppression.xml"));
final InputStream in = this.getClass().getClassLoader().getResourceAsStream("dependencycheck-base-suppression.xml");
rules = parser.parseSuppressionRules(in);
} catch (SAXException ex) {
throw new SuppressionParseException("Unable to parse the base suppression data file", ex);
}
@@ -130,10 +130,9 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
}
} else {
file = new File(suppressionFilePath);
InputStream suppressionsFromClasspath = null;
if (!file.exists()) {
try {
suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath);
try (InputStream suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath)) {
if (suppressionsFromClasspath != null) {
deleteTempFile = true;
file = FileUtils.getTempFile("suppression", "xml");
@@ -143,14 +142,6 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
throwSuppressionParseException("Unable to locate suppressions file in classpath", ex);
}
}
} finally {
if (suppressionsFromClasspath != null) {
try {
suppressionsFromClasspath.close();
} catch (IOException ex) {
LOGGER.debug("Failed to close stream", ex);
}
}
}
}
}

View File

@@ -36,6 +36,10 @@ public enum AnalysisPhase {
* Information collection phase.
*/
INFORMATION_COLLECTION,
/**
* Post information collection phase.
*/
POST_INFORMATION_COLLECTION,
/**
* Pre identifier analysis phase.
*/

View File

@@ -83,4 +83,10 @@ public interface Analyzer {
* @return {@code true} if the analyzer supports parallel processing, {@code false} else
*/
boolean supportsParallelProcessing();
/**
* Get the value of enabled.
*
* @return the value of enabled
*/
boolean isEnabled();
}

View File

@@ -57,13 +57,13 @@ public class AnalyzerService {
* @return a list of Analyzers.
*/
public List<Analyzer> getAnalyzers() {
final List<Analyzer> analyzers = new ArrayList<Analyzer>();
final List<Analyzer> analyzers = new ArrayList<>();
final Iterator<Analyzer> iterator = service.iterator();
boolean experimentalEnabled = false;
try {
experimentalEnabled = Settings.getBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, false);
} catch (InvalidSettingException ex) {
LOGGER.error("invalide experimental setting", ex);
LOGGER.error("invalid experimental setting", ex);
}
while (iterator.hasNext()) {
final Analyzer a = iterator.next();

View File

@@ -18,7 +18,6 @@
package org.owasp.dependencycheck.analyzer;
import java.io.BufferedInputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
@@ -99,7 +98,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The set of things we can handle with Zip methods
*/
private static final Set<String> ZIPPABLES = newHashSet("zip", "ear", "war", "jar", "sar", "apk", "nupkg");
private static final Set<String> KNOWN_ZIP_EXT = newHashSet("zip", "ear", "war", "jar", "sar", "apk", "nupkg");
/**
* The set of file extensions supported by this analyzer. Note for
* developers, any additions to this list will need to be explicitly handled
@@ -107,37 +106,37 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/
private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2");
/**
* Detects files with extensions to remove from the engine's collection of
* dependencies.
*/
private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance().addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2")
.build();
static {
final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
if (additionalZipExt != null) {
final String[] ext = additionalZipExt.split("\\s*,\\s*");
Collections.addAll(ZIPPABLES, ext);
Collections.addAll(KNOWN_ZIP_EXT, ext);
}
EXTENSIONS.addAll(ZIPPABLES);
EXTENSIONS.addAll(KNOWN_ZIP_EXT);
}
/**
* Detects files with extensions to remove from the engine's collection of
* dependencies.
*/
private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance()
.addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2").build();
/**
* The file filter used to filter supported files.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
@Override
protected FileFilter getFileFilter() {
return FILTER;
}
/**
* Detects files with .zip extension.
*/
private static final FileFilter ZIP_FILTER = FileFilterBuilder.newInstance().addExtensions("zip").build();
@Override
protected FileFilter getFileFilter() {
return FILTER;
}
/**
* Returns the name of the analyzer.
*
@@ -205,7 +204,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* files
*/
@Override
public void close() throws Exception {
public void closeAnalyzer() throws Exception {
if (tempFileLocation != null && tempFileLocation.exists()) {
LOGGER.debug("Attempting to delete temporary files");
final boolean success = FileUtils.delete(tempFileLocation);
@@ -222,7 +221,9 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* Does not support parallel processing as it both modifies and iterates
* over the engine's list of dependencies.
*
* @see #analyzeFileType(Dependency, Engine)
* @return <code>true</code> if the analyzer supports parallel processing;
* otherwise <code>false</code>
* @see #analyzeDependency(Dependency, Engine)
* @see #findMoreDependencies(Engine, File)
*/
@Override
@@ -240,7 +241,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException thrown if there is an analysis exception
*/
@Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
final File f = new File(dependency.getActualFilePath());
final File tmpDir = getNextTempDirectory();
extractFiles(f, tmpDir, engine);
@@ -248,7 +249,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
//make a copy
final List<Dependency> dependencySet = findMoreDependencies(engine, tmpDir);
if (!dependencySet.isEmpty()) {
if (dependencySet != null && !dependencySet.isEmpty()) {
for (Dependency d : dependencySet) {
if (d.getFilePath().startsWith(tmpDir.getAbsolutePath())) {
//fix the dependency's display name and path
@@ -302,11 +303,11 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/
private void addDisguisedJarsToDependencies(Dependency dependency, Engine engine) throws AnalysisException {
if (ZIP_FILTER.accept(dependency.getActualFile()) && isZipFileActuallyJarFile(dependency)) {
final File tdir = getNextTempDirectory();
final File tempDir = getNextTempDirectory();
final String fileName = dependency.getFileName();
LOGGER.info("The zip file '{}' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName);
final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar");
final File tmpLoc = new File(tempDir, fileName.substring(0, fileName.length() - 3) + "jar");
//store the archives sha1 and change it so that the engine doesn't think the zip and jar file are the same
// and add it is a related dependency.
final String archiveSha1 = dependency.getSha1sum();
@@ -314,7 +315,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
dependency.setSha1sum("");
org.apache.commons.io.FileUtils.copyFile(dependency.getActualFile(), tmpLoc);
final List<Dependency> dependencySet = findMoreDependencies(engine, tmpLoc);
if (!dependencySet.isEmpty()) {
if (dependencySet != null && !dependencySet.isEmpty()) {
for (Dependency d : dependencySet) {
//fix the dependency's display name and path
if (d.getActualFile().equals(tmpLoc)) {
@@ -346,8 +347,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* @return any dependencies that weren't known to the engine before
*/
private static List<Dependency> findMoreDependencies(Engine engine, File file) {
final List<Dependency> added = engine.scan(file);
return added;
return engine.scan(file);
}
/**
@@ -399,7 +399,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
GzipCompressorInputStream gin = null;
BZip2CompressorInputStream bzin = null;
try {
if (ZIPPABLES.contains(archiveExt)) {
if (KNOWN_ZIP_EXT.contains(archiveExt)) {
in = new BufferedInputStream(fis);
ensureReadableJar(archiveExt, in);
zin = new ZipArchiveInputStream(in);
@@ -434,12 +434,12 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
} finally {
//overly verbose and not needed... but keeping it anyway due to
//having issue with file handles being left open
close(fis);
close(in);
close(zin);
close(tin);
close(gin);
close(bzin);
FileUtils.close(fis);
FileUtils.close(in);
FileUtils.close(zin);
FileUtils.close(tin);
FileUtils.close(gin);
FileUtils.close(bzin);
}
}
}
@@ -471,7 +471,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
&& b[5] == 'n'
&& b[6] == '/') {
boolean stillLooking = true;
int chr, nxtChr;
int chr;
int nxtChr;
while (stillLooking && (chr = in.read()) != -1) {
if (chr == '\n' || chr == '\r') {
in.mark(4);
@@ -518,10 +519,10 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
extractAcceptedFile(input, file);
}
}
} catch (Throwable ex) {
} catch (IOException | AnalysisException ex) {
throw new ArchiveExtractionException(ex);
} finally {
close(input);
FileUtils.close(input);
}
}
@@ -534,14 +535,12 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/
private static void extractAcceptedFile(ArchiveInputStream input, File file) throws AnalysisException {
LOGGER.debug("Extracting '{}'", file.getPath());
FileOutputStream fos = null;
try {
final File parent = file.getParentFile();
if (!parent.isDirectory() && !parent.mkdirs()) {
final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath());
throw new AnalysisException(msg);
}
fos = new FileOutputStream(file);
final File parent = file.getParentFile();
if (!parent.isDirectory() && !parent.mkdirs()) {
final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath());
throw new AnalysisException(msg);
}
try (FileOutputStream fos = new FileOutputStream(file)) {
IOUtils.copy(input, fos);
} catch (FileNotFoundException ex) {
LOGGER.debug("", ex);
@@ -551,8 +550,6 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.debug("", ex);
final String msg = String.format("IO Exception while parsing file '%s'.", file.getName());
throw new AnalysisException(msg, ex);
} finally {
close(fos);
}
}
@@ -566,34 +563,11 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/
private void decompressFile(CompressorInputStream inputStream, File outputFile) throws ArchiveExtractionException {
LOGGER.debug("Decompressing '{}'", outputFile.getPath());
FileOutputStream out = null;
try {
out = new FileOutputStream(outputFile);
try (FileOutputStream out = new FileOutputStream(outputFile)) {
IOUtils.copy(inputStream, out);
} catch (FileNotFoundException ex) {
LOGGER.debug("", ex);
throw new ArchiveExtractionException(ex);
} catch (IOException ex) {
LOGGER.debug("", ex);
throw new ArchiveExtractionException(ex);
} finally {
close(out);
}
}
/**
* Close the given {@link Closeable} instance, ignoring nulls, and logging
* any thrown {@link IOException}.
*
* @param closeable to be closed
*/
private static void close(Closeable closeable) {
if (null != closeable) {
try {
closeable.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
}
}
@@ -627,7 +601,6 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
} finally {
ZipFile.closeQuietly(zip);
}
return isJar;
}
}

View File

@@ -37,7 +37,6 @@ import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
@@ -46,6 +45,7 @@ import java.util.List;
import javax.xml.parsers.ParserConfigurationException;
import org.owasp.dependencycheck.exception.InitializationException;
import org.apache.commons.lang3.SystemUtils;
import org.owasp.dependencycheck.utils.XmlUtils;
/**
* Analyzer for getting company, product, and version information from a .NET
@@ -84,7 +84,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
*/
protected List<String> buildArgumentList() {
// Use file.separator as a wild guess as to whether this is Windows
final List<String> args = new ArrayList<String>();
final List<String> args = new ArrayList<>();
if (!SystemUtils.IS_OS_WINDOWS) {
if (Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) {
args.add(Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH));
@@ -106,7 +106,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException if anything goes sideways
*/
@Override
public void analyzeFileType(Dependency dependency, Engine engine)
public void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
if (grokAssemblyExe == null) {
LOGGER.warn("GrokAssembly didn't get deployed");
@@ -123,8 +123,8 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
Document doc = null;
try {
final Process proc = pb.start();
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
final DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
doc = builder.parse(proc.getInputStream());
// Try evacuating the error stream
@@ -144,7 +144,9 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
dependency.getActualFilePath());
return;
} else if (rc != 0) {
LOGGER.warn("Return code {} from GrokAssembly", rc);
LOGGER.debug("Return code {} from GrokAssembly; dependency-check is unable to analyze the library: {}",
rc, dependency.getActualFilePath());
return;
}
final XPath xpath = XPathFactory.newInstance().newXPath();
@@ -175,14 +177,17 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
} catch (ParserConfigurationException pce) {
throw new AnalysisException("Error initializing the assembly analyzer", pce);
} catch (IOException ioe) {
} catch (IOException | XPathExpressionException ioe) {
throw new AnalysisException(ioe);
} catch (SAXException saxe) {
throw new AnalysisException("Couldn't parse GrokAssembly result", saxe);
} catch (XPathExpressionException xpe) {
// This shouldn't happen
throw new AnalysisException(xpe);
LOGGER.error("----------------------------------------------------");
LOGGER.error("Failed to read the Assembly Analyzer results. "
+ "On some systems mono-runtime and mono-devel need to be installed.");
LOGGER.error("----------------------------------------------------");
throw new AnalysisException("Couldn't parse Assembly Analyzer results (GrokAssembly)", saxe);
}
// This shouldn't happen
}
/**
@@ -194,40 +199,27 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
@Override
public void initializeFileTypeAnalyzer() throws InitializationException {
final File tempFile;
final String cfg;
try {
tempFile = File.createTempFile("GKA", ".exe", Settings.getTempDirectory());
cfg = tempFile.getPath() + ".config";
} catch (IOException ex) {
setEnabled(false);
throw new InitializationException("Unable to create temporary file for the assembly analyzerr", ex);
throw new InitializationException("Unable to create temporary file for the assembly analyzer", ex);
}
FileOutputStream fos = null;
InputStream is = null;
try {
fos = new FileOutputStream(tempFile);
is = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe");
try (FileOutputStream fos = new FileOutputStream(tempFile);
InputStream is = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe");
FileOutputStream fosCfg = new FileOutputStream(cfg);
InputStream isCfg = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe.config")) {
IOUtils.copy(is, fos);
grokAssemblyExe = tempFile;
LOGGER.debug("Extracted GrokAssembly.exe to {}", grokAssemblyExe.getPath());
IOUtils.copy(isCfg, fosCfg);
LOGGER.debug("Extracted GrokAssembly.exe.config to {}", cfg);
} catch (IOException ioe) {
this.setEnabled(false);
LOGGER.warn("Could not extract GrokAssembly.exe: {}", ioe.getMessage());
throw new InitializationException("Could not extract GrokAssembly.exe", ioe);
} finally {
if (fos != null) {
try {
fos.close();
} catch (Throwable e) {
LOGGER.debug("Error closing output stream");
}
}
if (is != null) {
try {
is.close();
} catch (Throwable e) {
LOGGER.debug("Error closing input stream");
}
}
}
// Now, need to see if GrokAssembly actually runs from this location.
@@ -238,13 +230,14 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
//
// We need to create a non-fatal warning error type that will
// get added to the report.
//TOOD this idea needs to get replicated to the bundle audit analyzer.
//TODO this idea needs to get replicated to the bundle audit analyzer.
if (args == null) {
setEnabled(false);
LOGGER.error("----------------------------------------------------");
LOGGER.error(".NET Assembly Analyzer could not be initialized and at least one "
+ "'exe' or 'dll' was scanned. The 'mono' executable could not be found on "
+ "the path; either disable the Assembly Analyzer or configure the path mono.");
+ "the path; either disable the Assembly Analyzer or configure the path mono. "
+ "On some systems mono-runtime and mono-devel need to be installed.");
LOGGER.error("----------------------------------------------------");
return;
}
@@ -254,9 +247,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
// Try evacuating the error stream
IOUtils.copy(p.getErrorStream(), NullOutputStream.NULL_OUTPUT_STREAM);
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
final DocumentBuilder builder = factory.newDocumentBuilder();
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
final Document doc = builder.parse(p.getInputStream());
final XPath xpath = XPathFactory.newInstance().newXPath();
final String error = xpath.evaluate("/assembly/error", doc);
@@ -270,7 +261,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
} catch (InitializationException e) {
setEnabled(false);
throw e;
} catch (Throwable e) {
} catch (IOException | ParserConfigurationException | SAXException | XPathExpressionException | InterruptedException e) {
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer;\n"
+ "this can be ignored unless you are scanning .NET DLLs. Please see the log for more details.");
LOGGER.debug("Could not execute GrokAssembly {}", e.getMessage());
@@ -285,8 +276,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
* @throws Exception thrown if there is a problem closing the analyzer
*/
@Override
public void close() throws Exception {
super.close();
public void closeAnalyzer() throws Exception {
try {
if (grokAssemblyExe != null && !grokAssemblyExe.delete()) {
LOGGER.debug("Unable to delete temporary GrokAssembly.exe; attempting delete on exit");
@@ -357,10 +347,8 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
if (retCode == 0) {
return true;
}
} catch (IOException ex) {
LOGGER.debug("Path seach failed for " + file);
} catch (InterruptedException ex) {
LOGGER.debug("Path seach failed for " + file);
} catch (IOException | InterruptedException ex) {
LOGGER.debug("Path search failed for " + file, ex);
}
return false;
}

View File

@@ -154,7 +154,7 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
}
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
final File actualFile = dependency.getActualFile();
final String name = actualFile.getName();

View File

@@ -147,7 +147,7 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
* analyzing the dependency
*/
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
final File file = dependency.getActualFile();
final String parentName = file.getParentFile().getName();

View File

@@ -50,6 +50,7 @@ import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -69,26 +70,26 @@ public class CPEAnalyzer extends AbstractAnalyzer {
/**
* The maximum number of query results to return.
*/
static final int MAX_QUERY_RESULTS = 25;
private static final int MAX_QUERY_RESULTS = 25;
/**
* The weighting boost to give terms when constructing the Lucene query.
*/
static final String WEIGHTING_BOOST = "^5";
private static final String WEIGHTING_BOOST = "^5";
/**
* A string representation of a regular expression defining characters
* utilized within the CPE Names.
*/
static final String CLEANSE_CHARACTER_RX = "[^A-Za-z0-9 ._-]";
private static final String CLEANSE_CHARACTER_RX = "[^A-Za-z0-9 ._-]";
/**
* A string representation of a regular expression used to remove all but
* alpha characters.
*/
static final String CLEANSE_NONALPHA_RX = "[^A-Za-z]*";
private static final String CLEANSE_NONALPHA_RX = "[^A-Za-z]*";
/**
* The additional size to add to a new StringBuilder to account for extra
* data that will be written into the string.
*/
static final int STRING_BUILDER_BUFFER = 20;
private static final int STRING_BUILDER_BUFFER = 20;
/**
* The CPE in memory index.
*/
@@ -123,6 +124,16 @@ public class CPEAnalyzer extends AbstractAnalyzer {
return AnalysisPhase.IDENTIFIER_ANALYSIS;
}
/**
* The default is to support parallel processing.
*
* @return false
*/
@Override
public boolean supportsParallelProcessing() {
return false;
}
/**
* Creates the CPE Lucene Index.
*
@@ -130,7 +141,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
* the index.
*/
@Override
public void initialize() throws InitializationException {
public void initializeAnalyzer() throws InitializationException {
try {
this.open();
} catch (IOException ex) {
@@ -152,8 +163,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
*/
public void open() throws IOException, DatabaseException {
if (!isOpen()) {
cve = new CveDB();
cve.open();
cve = CveDB.getInstance();
cpe = CpeMemoryIndex.getInstance();
try {
final long creationStart = System.currentTimeMillis();
@@ -171,17 +181,22 @@ public class CPEAnalyzer extends AbstractAnalyzer {
* Closes the data sources.
*/
@Override
public void close() {
if (cpe != null) {
cpe.close();
cpe = null;
}
public void closeAnalyzer() {
if (cve != null) {
cve.close();
cve = null;
}
if (cpe != null) {
cpe.close();
cpe = null;
}
}
/**
* Returns whether or not the analyzer is open.
*
* @return <code>true</code> if the analyzer is open
*/
public boolean isOpen() {
return cpe != null && cpe.isOpen();
}
@@ -197,7 +212,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
* @throws ParseException is thrown when the Lucene query cannot be parsed.
*/
protected void determineCPE(Dependency dependency) throws CorruptIndexException, IOException, ParseException {
//TODO test dojo-war against this. we shold get dojo-toolkit:dojo-toolkit AND dojo-toolkit:toolkit
//TODO test dojo-war against this. we should get dojo-toolkit:dojo-toolkit AND dojo-toolkit:toolkit
String vendors = "";
String products = "";
for (Confidence confidence : Confidence.values()) {
@@ -285,7 +300,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
protected List<IndexEntry> searchCPE(String vendor, String product,
Set<String> vendorWeightings, Set<String> productWeightings) {
final List<IndexEntry> ret = new ArrayList<IndexEntry>(MAX_QUERY_RESULTS);
final List<IndexEntry> ret = new ArrayList<>(MAX_QUERY_RESULTS);
final String searchString = buildSearch(vendor, product, vendorWeightings, productWeightings);
if (searchString == null) {
@@ -471,7 +486,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
return false;
}
final String[] words = text.split("[\\s_-]");
final List<String> list = new ArrayList<String>();
final List<String> list = new ArrayList<>();
String tempWord = null;
for (String word : words) {
/*
@@ -515,7 +530,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
* dependency.
*/
@Override
public synchronized void analyze(Dependency dependency, Engine engine) throws AnalysisException {
protected synchronized void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
try {
determineCPE(dependency);
} catch (CorruptIndexException ex) {
@@ -549,7 +564,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
DependencyVersion bestGuess = new DependencyVersion("-");
Confidence bestGuessConf = null;
boolean hasBroadMatch = false;
final List<IdentifierMatch> collected = new ArrayList<IdentifierMatch>();
final List<IdentifierMatch> collected = new ArrayList<>();
//TODO the following algorithm incorrectly identifies things as a lower version
// if there lower confidence evidence when the current (highest) version number
@@ -588,11 +603,10 @@ public class CPEAnalyzer extends AbstractAnalyzer {
}
}
}
if (bestGuessConf == null || bestGuessConf.compareTo(conf) > 0) {
if (bestGuess.getVersionParts().size() < evVer.getVersionParts().size()) {
bestGuess = evVer;
bestGuessConf = conf;
}
if ((bestGuessConf == null || bestGuessConf.compareTo(conf) > 0)
&& bestGuess.getVersionParts().size() < evVer.getVersionParts().size()) {
bestGuess = evVer;
bestGuessConf = conf;
}
}
}
@@ -602,10 +616,12 @@ public class CPEAnalyzer extends AbstractAnalyzer {
final String cpeUrlName = String.format("cpe:/a:%s:%s", vendor, product);
url = String.format(NVD_SEARCH_URL, URLEncoder.encode(cpeUrlName, "UTF-8"));
}
if (bestGuessConf == null) {
if (bestGuessConf
== null) {
bestGuessConf = Confidence.LOW;
}
final IdentifierMatch match = new IdentifierMatch("cpe", cpeName, url, IdentifierConfidence.BEST_GUESS, bestGuessConf);
collected.add(match);
Collections.sort(collected);
@@ -628,6 +644,18 @@ public class CPEAnalyzer extends AbstractAnalyzer {
return identifierAdded;
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_CPE_ENABLED;
}
/**
* The confidence whether the identifier is an exact match, or a best guess.
*/
@@ -655,6 +683,19 @@ public class CPEAnalyzer extends AbstractAnalyzer {
*/
private static class IdentifierMatch implements Comparable<IdentifierMatch> {
/**
* The confidence in the evidence used to identify this match.
*/
private Confidence evidenceConfidence;
/**
* The confidence whether this is an exact match, or a best guess.
*/
private IdentifierConfidence confidence;
/**
* The CPE identifier.
*/
private Identifier identifier;
/**
* Constructs an IdentifierMatch.
*
@@ -671,12 +712,8 @@ public class CPEAnalyzer extends AbstractAnalyzer {
this.confidence = identifierConfidence;
this.evidenceConfidence = evidenceConfidence;
}
//<editor-fold defaultstate="collapsed" desc="Property implementations: evidenceConfidence, confidence, identifier">
/**
* The confidence in the evidence used to identify this match.
*/
private Confidence evidenceConfidence;
//<editor-fold defaultstate="collapsed" desc="Property implementations: evidenceConfidence, confidence, identifier">
/**
* Get the value of evidenceConfidence
*
@@ -694,10 +731,6 @@ public class CPEAnalyzer extends AbstractAnalyzer {
public void setEvidenceConfidence(Confidence evidenceConfidence) {
this.evidenceConfidence = evidenceConfidence;
}
/**
* The confidence whether this is an exact match, or a best guess.
*/
private IdentifierConfidence confidence;
/**
* Get the value of confidence.
@@ -716,10 +749,6 @@ public class CPEAnalyzer extends AbstractAnalyzer {
public void setConfidence(IdentifierConfidence confidence) {
this.confidence = confidence;
}
/**
* The CPE identifier.
*/
private Identifier identifier;
/**
* Get the value of identifier.
@@ -787,10 +816,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
if (this.confidence != other.confidence) {
return false;
}
if (this.identifier != other.identifier && (this.identifier == null || !this.identifier.equals(other.identifier))) {
return false;
}
return true;
return !(this.identifier != other.identifier && (this.identifier == null || !this.identifier.equals(other.identifier)));
}
//</editor-fold>
@@ -808,16 +834,6 @@ public class CPEAnalyzer extends AbstractAnalyzer {
.append(evidenceConfidence, o.evidenceConfidence)
.append(identifier, o.identifier)
.toComparison();
/*
int conf = this.confidence.compareTo(o.confidence);
if (conf == 0) {
conf = this.evidenceConfidence.compareTo(o.evidenceConfidence);
if (conf == 0) {
conf = identifier.compareTo(o.identifier);
}
}
return conf;
*/
}
}
}

View File

@@ -103,14 +103,14 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
* <code>false</code>
*/
private boolean checkEnabled() {
boolean retval = false;
boolean retVal = false;
try {
if (Settings.getBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED)) {
if (!Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)
|| NexusAnalyzer.DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL))) {
LOGGER.debug("Enabling the Central analyzer");
retval = true;
retVal = true;
} else {
LOGGER.info("Nexus analyzer is enabled, disabling the Central Analyzer");
}
@@ -120,7 +120,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
} catch (InvalidSettingException ise) {
LOGGER.warn("Invalid setting. Disabling the Central analyzer");
}
return retval;
return retVal;
}
/**
@@ -193,7 +193,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException when there's an exception during analysis
*/
@Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (errorFlag || !isEnabled()) {
return;
}

View File

@@ -119,7 +119,7 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
}
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
String contents;

View File

@@ -33,7 +33,7 @@ import org.slf4j.LoggerFactory;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
@@ -100,10 +100,8 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException if there's a failure during analysis
*/
@Override
protected void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
FileInputStream fis = null;
try {
fis = new FileInputStream(dependency.getActualFile());
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
try (FileInputStream fis = new FileInputStream(dependency.getActualFile())) {
final ComposerLockParser clp = new ComposerLockParser(fis);
LOGGER.info("Checking composer.lock file {}", dependency.getActualFilePath());
clp.process();
@@ -120,18 +118,10 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.info("Adding dependency {}", d);
engine.getDependencies().add(d);
}
} catch (FileNotFoundException fnfe) {
} catch (IOException ex) {
LOGGER.warn("Error opening dependency {}", dependency.getActualFilePath());
} catch (ComposerException ce) {
LOGGER.warn("Error parsing composer.json {}", dependency.getActualFilePath(), ce);
} finally {
if (fis != null) {
try {
fis.close();
} catch (Exception e) {
LOGGER.debug("Unable to close file", e);
}
}
}
}

View File

@@ -20,6 +20,7 @@ package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
/**
@@ -62,7 +63,7 @@ public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
//</editor-fold>
@Override
public void analyze(final Dependency dependency, final Engine engine) throws AnalysisException {
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (getRules() == null || getRules().size() <= 0) {
return;
@@ -72,4 +73,15 @@ public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
rule.process(dependency);
}
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_CPE_SUPPRESSION_ENABLED;
}
}

View File

@@ -30,6 +30,7 @@ import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -71,7 +72,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
* @return a flag indicating if this analyzer has run. This analyzer only
* runs once
*/
protected boolean getAnalyzed() {
protected synchronized boolean getAnalyzed() {
return analyzed;
}
@@ -84,7 +85,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
/**
* The phase that this analyzer is intended to run in.
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.PRE_FINDING_ANALYSIS;
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.FINAL;
/**
* Returns the name of the analyzer.
@@ -119,6 +120,17 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
return false;
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_DEPENDENCY_BUNDLING_ENABLED;
}
/**
* Analyzes a set of dependencies. If they have been found to have the same
* base path and the same set of identifiers they are likely related. The
@@ -130,10 +142,10 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
* file.
*/
@Override
public void analyze(Dependency ignore, Engine engine) throws AnalysisException {
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
if (!analyzed) {
analyzed = true;
final Set<Dependency> dependenciesToRemove = new HashSet<Dependency>();
final Set<Dependency> dependenciesToRemove = new HashSet<>();
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator();
//for (Dependency nextDependency : engine.getDependencies()) {
while (mainIterator.hasNext()) {
@@ -142,14 +154,15 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
while (subIterator.hasNext()) {
final Dependency nextDependency = subIterator.next();
Dependency main = null;
if (hashesMatch(dependency, nextDependency) && !containedInWar(dependency.getFilePath())
&& !containedInWar(nextDependency.getFilePath())) {
if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
if (hashesMatch(dependency, nextDependency)) {
if (!containedInWar(dependency.getFilePath())
&& !containedInWar(nextDependency.getFilePath())) {
if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
}
} else if (isShadedJar(dependency, nextDependency)) {
if (dependency.getFileName().toLowerCase().endsWith("pom.xml")) {
@@ -162,6 +175,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
}
} else if (cpeIdentifiersMatch(dependency, nextDependency)
&& hasSameBasePath(dependency, nextDependency)
&& vulnCountMatches(dependency, nextDependency)
&& fileNameMatch(dependency, nextDependency)) {
if (isCore(dependency, nextDependency)) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
@@ -169,20 +183,6 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
} else if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) {
if (main == dependency) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
} else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
if (main == dependency) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
}
}
}
@@ -224,7 +224,12 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
* @return a string representing the base path.
*/
private String getBaseRepoPath(final String path) {
int pos = path.indexOf("repository" + File.separator) + 11;
int pos;
if (path.contains("local-repo")) {
pos = path.indexOf("local-repo" + File.separator) + 11;
} else {
pos = path.indexOf("repository" + File.separator) + 11;
}
if (pos < 0) {
return path;
}
@@ -317,6 +322,19 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
return matches;
}
/**
* Returns true if the two dependencies have the same vulnerability count.
*
* @param dependency1 a dependency2 to compare
* @param dependency2 a dependency2 to compare
* @return true if the two dependencies have the same vulnerability count
*/
private boolean vulnCountMatches(Dependency dependency1, Dependency dependency2) {
return dependency1.getVulnerabilities() != null && dependency2.getVulnerabilities() != null
&& dependency1.getVulnerabilities().size() == dependency2.getVulnerabilities().size();
}
/**
* Determines if the two dependencies have the same base path.
*
@@ -341,7 +359,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
return true;
}
if (left.matches(".*[/\\\\]repository[/\\\\].*") && right.matches(".*[/\\\\]repository[/\\\\].*")) {
if (left.matches(".*[/\\\\](repository|local-repo)[/\\\\].*") && right.matches(".*[/\\\\](repository|local-repo)[/\\\\].*")) {
left = getBaseRepoPath(left);
right = getBaseRepoPath(right);
}
@@ -357,96 +375,6 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
return false;
}
/**
* Bundling Ruby gems that are identified from different .gemspec files but
* denote the same package path. This happens when Ruby bundler installs an
* application's dependencies by running "bundle install".
*
* @param dependency1 dependency to compare
* @param dependency2 dependency to compare
* @return true if the the dependencies being analyzed appear to be the
* same; otherwise false
*/
private boolean isSameRubyGem(Dependency dependency1, Dependency dependency2) {
if (dependency1 == null || dependency2 == null
|| !dependency1.getFileName().endsWith(".gemspec")
|| !dependency2.getFileName().endsWith(".gemspec")
|| dependency1.getPackagePath() == null
|| dependency2.getPackagePath() == null) {
return false;
}
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
}
/**
* Ruby gems installed by "bundle install" can have zero or more *.gemspec
* files, all of which have the same packagePath and should be grouped. If
* one of these gemspec is from <parent>/specifications/*.gemspec, because
* it is a stub with fully resolved gem meta-data created by Ruby bundler,
* this dependency should be the main one. Otherwise, use dependency2 as
* main.
*
* This method returns null if any dependency is not from *.gemspec, or the
* two do not have the same packagePath. In this case, they should not be
* grouped.
*
* @param dependency1 dependency to compare
* @param dependency2 dependency to compare
* @return the main dependency; or null if a gemspec is not included in the
* analysis
*/
private Dependency getMainGemspecDependency(Dependency dependency1, Dependency dependency2) {
if (isSameRubyGem(dependency1, dependency2)) {
final File lFile = dependency1.getActualFile();
final File left = lFile.getParentFile();
if (left != null && left.getName().equalsIgnoreCase("specifications")) {
return dependency1;
}
return dependency2;
}
return null;
}
/**
* Bundling same swift dependencies with the same packagePath but identified
* by different analyzers.
*
* @param dependency1 dependency to test
* @param dependency2 dependency to test
* @return <code>true</code> if the dependencies appear to be the same;
* otherwise <code>false</code>
*/
private boolean isSameSwiftPackage(Dependency dependency1, Dependency dependency2) {
if (dependency1 == null || dependency2 == null
|| (!dependency1.getFileName().endsWith(".podspec")
&& !dependency1.getFileName().equals("Package.swift"))
|| (!dependency2.getFileName().endsWith(".podspec")
&& !dependency2.getFileName().equals("Package.swift"))
|| dependency1.getPackagePath() == null
|| dependency2.getPackagePath() == null) {
return false;
}
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
}
/**
* Determines which of the swift dependencies should be considered the
* primary.
*
* @param dependency1 the first swift dependency to compare
* @param dependency2 the second swift dependency to compare
* @return the primary swift dependency
*/
private Dependency getMainSwiftDependency(Dependency dependency1, Dependency dependency2) {
if (isSameSwiftPackage(dependency1, dependency2)) {
if (dependency1.getFileName().endsWith(".podspec")) {
return dependency1;
}
return dependency2;
}
return null;
}
/**
* This is likely a very broken attempt at determining if the 'left'
* dependency is the 'core' library in comparison to the 'right' library.
@@ -456,7 +384,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
* @return a boolean indicating whether or not the left dependency should be
* considered the "core" version.
*/
boolean isCore(Dependency left, Dependency right) {
protected boolean isCore(Dependency left, Dependency right) {
final String leftName = left.getFileName().toLowerCase();
final String rightName = right.getFileName().toLowerCase();
@@ -469,10 +397,6 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
|| !rightName.contains("core") && leftName.contains("core")
|| !rightName.contains("kernel") && leftName.contains("kernel")) {
returnVal = true;
// } else if (leftName.matches(".*struts2\\-core.*") && rightName.matches(".*xwork\\-core.*")) {
// returnVal = true;
// } else if (rightName.matches(".*struts2\\-core.*") && leftName.matches(".*xwork\\-core.*")) {
// returnVal = false;
} else {
/*
* considered splitting the names up and comparing the components,
@@ -575,6 +499,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
* @return true if the path contains '.war\' or '.ear\'.
*/
private boolean containedInWar(String filePath) {
return filePath == null ? false : filePath.matches(".*\\.(ear|war)[\\\\/].*");
return filePath != null && filePath.matches(".*\\.(ear|war)[\\\\/].*");
}
}

View File

@@ -0,0 +1,283 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.util.HashSet;
import java.util.Iterator;
import java.util.ListIterator;
import java.util.Set;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* This analyzer will merge dependencies, created from different source, into a
* single dependency.</p>
*
* @author Jeremy Long
*/
public class DependencyMergingAnalyzer extends AbstractAnalyzer {
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
/**
* The Logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyMergingAnalyzer.class);
/**
* a flag indicating if this analyzer has run. This analyzer only runs once.
*/
private boolean analyzed = false;
/**
* Returns a flag indicating if this analyzer has run. This analyzer only
* runs once. Note this is currently only used in the unit tests.
*
* @return a flag indicating if this analyzer has run. This analyzer only
* runs once
*/
protected synchronized boolean getAnalyzed() {
return analyzed;
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/**
* The name of the analyzer.
*/
private static final String ANALYZER_NAME = "Dependency Merging Analyzer";
/**
* The phase that this analyzer is intended to run in.
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.POST_INFORMATION_COLLECTION;
/**
* Returns the name of the analyzer.
*
* @return the name of the analyzer.
*/
@Override
public String getName() {
return ANALYZER_NAME;
}
/**
* Returns the phase that the analyzer is intended to run in.
*
* @return the phase that the analyzer is intended to run in.
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
/**
* Does not support parallel processing as it only runs once and then
* operates on <em>all</em> dependencies.
*
* @return whether or not parallel processing is enabled
* @see #analyze(Dependency, Engine)
*/
@Override
public boolean supportsParallelProcessing() {
return false;
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_DEPENDENCY_MERGING_ENABLED;
}
//</editor-fold>
/**
* Analyzes a set of dependencies. If they have been found to be the same
* dependency created by more multiple FileTypeAnalyzers (i.e. a gemspec
* dependency and a dependency from the Bundle Audit Analyzer. The
* dependencies are then merged into a single reportable item.
*
* @param ignore this analyzer ignores the dependency being analyzed
* @param engine the engine that is scanning the dependencies
* @throws AnalysisException is thrown if there is an error reading the JAR
* file.
*/
@Override
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
if (!analyzed) {
analyzed = true;
final Set<Dependency> dependenciesToRemove = new HashSet<>();
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator();
//for (Dependency nextDependency : engine.getDependencies()) {
while (mainIterator.hasNext()) {
final Dependency dependency = mainIterator.next();
if (mainIterator.hasNext() && !dependenciesToRemove.contains(dependency)) {
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
while (subIterator.hasNext()) {
final Dependency nextDependency = subIterator.next();
Dependency main;
if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) {
if (main == dependency) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
} else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
if (main == dependency) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
}
}
}
}
//removing dependencies here as ensuring correctness and avoiding ConcurrentUpdateExceptions
// was difficult because of the inner iterator.
engine.getDependencies().removeAll(dependenciesToRemove);
}
}
/**
* Adds the relatedDependency to the dependency's related dependencies.
*
* @param dependency the main dependency
* @param relatedDependency a collection of dependencies to be removed from
* the main analysis loop, this is the source of dependencies to remove
* @param dependenciesToRemove a collection of dependencies that will be
* removed from the main analysis loop, this function adds to this
* collection
*/
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
LOGGER.debug("Merging '{}' into '{}'", relatedDependency.getFilePath(), dependency.getFilePath());
dependency.addRelatedDependency(relatedDependency);
dependency.getVendorEvidence().getEvidence().addAll(relatedDependency.getVendorEvidence().getEvidence());
dependency.getProductEvidence().getEvidence().addAll(relatedDependency.getProductEvidence().getEvidence());
dependency.getVersionEvidence().getEvidence().addAll(relatedDependency.getVersionEvidence().getEvidence());
final Iterator<Dependency> i = relatedDependency.getRelatedDependencies().iterator();
while (i.hasNext()) {
dependency.addRelatedDependency(i.next());
i.remove();
}
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) {
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
}
dependenciesToRemove.add(relatedDependency);
}
/**
* Bundling Ruby gems that are identified from different .gemspec files but
* denote the same package path. This happens when Ruby bundler installs an
* application's dependencies by running "bundle install".
*
* @param dependency1 dependency to compare
* @param dependency2 dependency to compare
* @return true if the the dependencies being analyzed appear to be the
* same; otherwise false
*/
private boolean isSameRubyGem(Dependency dependency1, Dependency dependency2) {
if (dependency1 == null || dependency2 == null
|| !dependency1.getFileName().endsWith(".gemspec")
|| !dependency2.getFileName().endsWith(".gemspec")
|| dependency1.getPackagePath() == null
|| dependency2.getPackagePath() == null) {
return false;
}
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
}
/**
* Ruby gems installed by "bundle install" can have zero or more *.gemspec
* files, all of which have the same packagePath and should be grouped. If
* one of these gemspec is from <parent>/specifications/*.gemspec, because
* it is a stub with fully resolved gem meta-data created by Ruby bundler,
* this dependency should be the main one. Otherwise, use dependency2 as
* main.
*
* This method returns null if any dependency is not from *.gemspec, or the
* two do not have the same packagePath. In this case, they should not be
* grouped.
*
* @param dependency1 dependency to compare
* @param dependency2 dependency to compare
* @return the main dependency; or null if a gemspec is not included in the
* analysis
*/
private Dependency getMainGemspecDependency(Dependency dependency1, Dependency dependency2) {
if (isSameRubyGem(dependency1, dependency2)) {
final File lFile = dependency1.getActualFile();
final File left = lFile.getParentFile();
if (left != null && left.getName().equalsIgnoreCase("specifications")) {
return dependency1;
}
return dependency2;
}
return null;
}
/**
* Bundling same swift dependencies with the same packagePath but identified
* by different file type analyzers.
*
* @param dependency1 dependency to test
* @param dependency2 dependency to test
* @return <code>true</code> if the dependencies appear to be the same;
* otherwise <code>false</code>
*/
private boolean isSameSwiftPackage(Dependency dependency1, Dependency dependency2) {
if (dependency1 == null || dependency2 == null
|| (!dependency1.getFileName().endsWith(".podspec")
&& !dependency1.getFileName().equals("Package.swift"))
|| (!dependency2.getFileName().endsWith(".podspec")
&& !dependency2.getFileName().equals("Package.swift"))
|| dependency1.getPackagePath() == null
|| dependency2.getPackagePath() == null) {
return false;
}
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
}
/**
* Determines which of the swift dependencies should be considered the
* primary.
*
* @param dependency1 the first swift dependency to compare
* @param dependency2 the second swift dependency to compare
* @return the primary swift dependency
*/
private Dependency getMainSwiftDependency(Dependency dependency1, Dependency dependency2) {
if (isSameSwiftPackage(dependency1, dependency2)) {
if (dependency1.getFileName().endsWith(".podspec")) {
return dependency1;
}
return dependency2;
}
return null;
}
}

View File

@@ -34,11 +34,13 @@ import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This analyzer attempts to remove some well known false positives - specifically regarding the java runtime.
* This analyzer attempts to remove some well known false positives -
* specifically regarding the java runtime.
*
* @author Jeremy Long
*/
@@ -83,17 +85,30 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_FALSE_POSITIVE_ENABLED;
}
//</editor-fold>
/**
* Analyzes the dependencies and removes bad/incorrect CPE associations based on various heuristics.
* Analyzes the dependencies and removes bad/incorrect CPE associations
* based on various heuristics.
*
* @param dependency the dependency to analyze.
* @param engine the engine that is scanning the dependencies
* @throws AnalysisException is thrown if there is an error reading the JAR file.
* @throws AnalysisException is thrown if there is an error reading the JAR
* file.
*/
@Override
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
removeJreEntries(dependency);
removeBadMatches(dependency);
removeBadSpringMatches(dependency);
@@ -106,22 +121,23 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
/**
* Removes inaccurate matches on springframework CPEs.
*
* @param dependency the dependency to test for and remove known inaccurate CPE matches
* @param dependency the dependency to test for and remove known inaccurate
* CPE matches
*/
private void removeBadSpringMatches(Dependency dependency) {
String mustContain = null;
for (Identifier i : dependency.getIdentifiers()) {
if ("maven".contains(i.getType())) {
if (i.getValue() != null && i.getValue().startsWith("org.springframework.")) {
final int endPoint = i.getValue().indexOf(':', 19);
if (endPoint >= 0) {
mustContain = i.getValue().substring(19, endPoint).toLowerCase();
break;
}
if ("maven".contains(i.getType())
&& i.getValue() != null && i.getValue().startsWith("org.springframework.")) {
final int endPoint = i.getValue().indexOf(':', 19);
if (endPoint >= 0) {
mustContain = i.getValue().substring(19, endPoint).toLowerCase();
break;
}
}
}
if (mustContain != null) {
if (mustContain
!= null) {
final Iterator<Identifier> itr = dependency.getIdentifiers().iterator();
while (itr.hasNext()) {
final Identifier i = itr.next();
@@ -138,7 +154,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
/**
* <p>
* Intended to remove spurious CPE entries. By spurious we mean duplicate, less specific CPE entries.</p>
* Intended to remove spurious CPE entries. By spurious we mean duplicate,
* less specific CPE entries.</p>
* <p>
* Example:</p>
* <code>
@@ -156,7 +173,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
*/
@SuppressWarnings("null")
private void removeSpuriousCPE(Dependency dependency) {
final List<Identifier> ids = new ArrayList<Identifier>(dependency.getIdentifiers());
final List<Identifier> ids = new ArrayList<>(dependency.getIdentifiers());
Collections.sort(ids);
final ListIterator<Identifier> mainItr = ids.listIterator();
while (mainItr.hasNext()) {
@@ -189,10 +206,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
if (nextVersion.startsWith(currentVersion) || "-".equals(currentVersion)) {
dependency.getIdentifiers().remove(currentId);
}
} else {
if (currentVersion.startsWith(nextVersion) || "-".equals(nextVersion)) {
dependency.getIdentifiers().remove(nextId);
}
} else if (currentVersion.startsWith(nextVersion) || "-".equals(nextVersion)) {
dependency.getIdentifiers().remove(nextId);
}
}
}
@@ -200,7 +215,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
}
}
/**
* Regex to identify core java libraries and a few other commonly misidentified ones.
* Regex to identify core java libraries and a few other commonly
* misidentified ones.
*/
public static final Pattern CORE_JAVA = Pattern.compile("^cpe:/a:(sun|oracle|ibm):(j2[ems]e|"
+ "java(_platform_micro_edition|_runtime_environment|_se|virtual_machine|se_development_kit|fx)?|"
@@ -215,12 +231,14 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
*/
public static final Pattern CORE_FILES = Pattern.compile("(^|/)((alt[-])?rt|jsse|jfxrt|jfr|jce|javaws|deploy|charsets)\\.jar$");
/**
* Regex to identify core jsf java library files. This is currently incomplete.
* Regex to identify core jsf java library files. This is currently
* incomplete.
*/
public static final Pattern CORE_JSF_FILES = Pattern.compile("(^|/)jsf[-][^/]*\\.jar$");
/**
* Removes any CPE entries for the JDK/JRE unless the filename ends with rt.jar
* Removes any CPE entries for the JDK/JRE unless the filename ends with
* rt.jar
*
* @param dependency the dependency to remove JRE CPEs from
*/
@@ -264,8 +282,9 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
}
/**
* Removes bad CPE matches for a dependency. Unfortunately, right now these are hard-coded patches for specific problems
* identified when testing this on a LARGE volume of jar files.
* Removes bad CPE matches for a dependency. Unfortunately, right now these
* are hard-coded patches for specific problems identified when testing this
* on a LARGE volume of jar files.
*
* @param dependency the dependency to analyze
*/
@@ -340,7 +359,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
}
/**
* Removes CPE matches for the wrong version of a dependency. Currently, this only covers Axis 1 & 2.
* Removes CPE matches for the wrong version of a dependency. Currently,
* this only covers Axis 1 & 2.
*
* @param dependency the dependency to analyze
*/
@@ -373,8 +393,10 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
}
/**
* There are some known CPE entries, specifically regarding sun and oracle products due to the acquisition and changes in
* product names, that based on given evidence we can add the related CPE entries to ensure a complete list of CVE entries.
* There are some known CPE entries, specifically regarding sun and oracle
* products due to the acquisition and changes in product names, that based
* on given evidence we can add the related CPE entries to ensure a complete
* list of CVE entries.
*
* @param dependency the dependency being analyzed
*/
@@ -411,47 +433,47 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
}
/**
* Removes duplicate entries identified that are contained within JAR files. These occasionally crop up due to POM entries or
* other types of files (such as DLLs and EXEs) being contained within the JAR.
* Removes duplicate entries identified that are contained within JAR files.
* These occasionally crop up due to POM entries or other types of files
* (such as DLLs and EXEs) being contained within the JAR.
*
* @param dependency the dependency that might be a duplicate
* @param engine the engine used to scan all dependencies
*/
private void removeDuplicativeEntriesFromJar(Dependency dependency, Engine engine) {
private synchronized void removeDuplicativeEntriesFromJar(Dependency dependency, Engine engine) {
if (dependency.getFileName().toLowerCase().endsWith("pom.xml")
|| DLL_EXE_FILTER.accept(dependency.getActualFile())) {
String parentPath = dependency.getFilePath().toLowerCase();
if (parentPath.contains(".jar")) {
parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4);
final List<Dependency> dependencies = engine.getDependencies();
synchronized (dependencies) {
final Dependency parent = findDependency(parentPath, dependencies);
if (parent != null) {
boolean remove = false;
for (Identifier i : dependency.getIdentifiers()) {
if ("cpe".equals(i.getType())) {
final String trimmedCPE = trimCpeToVendor(i.getValue());
for (Identifier parentId : parent.getIdentifiers()) {
if ("cpe".equals(parentId.getType()) && parentId.getValue().startsWith(trimmedCPE)) {
remove |= true;
}
final Dependency parent = findDependency(parentPath, dependencies);
if (parent != null) {
boolean remove = false;
for (Identifier i : dependency.getIdentifiers()) {
if ("cpe".equals(i.getType())) {
final String trimmedCPE = trimCpeToVendor(i.getValue());
for (Identifier parentId : parent.getIdentifiers()) {
if ("cpe".equals(parentId.getType()) && parentId.getValue().startsWith(trimmedCPE)) {
remove |= true;
}
}
if (!remove) { //we can escape early
return;
}
}
if (remove) {
dependencies.remove(dependency);
if (!remove) { //we can escape early
return;
}
}
if (remove) {
dependencies.remove(dependency);
}
}
}
}
}
/**
* Retrieves a given dependency, based on a given path, from a list of dependencies.
* Retrieves a given dependency, based on a given path, from a list of
* dependencies.
*
* @param dependencyPath the path of the dependency to return
* @param dependencies the collection of dependencies to search
@@ -467,7 +489,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
}
/**
* Takes a full CPE and returns the CPE trimmed to include only vendor and product.
* Takes a full CPE and returns the CPE trimmed to include only vendor and
* product.
*
* @param value the CPE value to trim
* @return a CPE value that only includes the vendor and product

View File

@@ -27,6 +27,7 @@ import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.Settings;
/**
*
@@ -65,6 +66,16 @@ public class FileNameAnalyzer extends AbstractAnalyzer {
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_FILE_NAME_ENABLED;
}
//</editor-fold>
/**
@@ -86,7 +97,7 @@ public class FileNameAnalyzer extends AbstractAnalyzer {
* file.
*/
@Override
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
//strip any path information that may get added by ArchiveAnalyzer, etc.
final File f = dependency.getActualFile();

View File

@@ -26,8 +26,4 @@ import java.io.FileFilter;
*/
public interface FileTypeAnalyzer extends Analyzer, FileFilter {
/**
* Resets the analyzers state.
*/
void reset();
}

View File

@@ -53,6 +53,19 @@ import org.xml.sax.SAXException;
*/
public class HintAnalyzer extends AbstractAnalyzer {
/**
* The Logger for use throughout the class
*/
private static final Logger LOGGER = LoggerFactory.getLogger(HintAnalyzer.class);
/**
* The name of the hint rule file
*/
private static final String HINT_RULE_FILE_NAME = "dependencycheck-base-hint.xml";
/**
* The collection of hints.
*/
private Hints hints;
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/**
* The name of the analyzer.
@@ -83,15 +96,25 @@ public class HintAnalyzer extends AbstractAnalyzer {
return ANALYSIS_PHASE;
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_HINT_ENABLED;
}
/**
* The initialize method does nothing for this Analyzer.
*
* @throws InitializationException thrown if there is an exception
*/
@Override
public void initialize() throws InitializationException {
public void initializeAnalyzer() throws InitializationException {
try {
super.initialize();
loadHintRules();
} catch (HintParseException ex) {
LOGGER.debug("Unable to parse hint file", ex);
@@ -100,19 +123,6 @@ public class HintAnalyzer extends AbstractAnalyzer {
}
//</editor-fold>
/**
* The Logger for use throughout the class
*/
private static final Logger LOGGER = LoggerFactory.getLogger(HintAnalyzer.class);
/**
* The name of the hint rule file
*/
private static final String HINT_RULE_FILE_NAME = "dependencycheck-base-hint.xml";
/**
* The collection of hints.
*/
private Hints hints;
/**
* The HintAnalyzer uses knowledge about a dependency to add additional
* information to help in identification of identifiers or vulnerabilities.
@@ -123,31 +133,40 @@ public class HintAnalyzer extends AbstractAnalyzer {
* the dependency.
*/
@Override
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
for (HintRule hint : hints.getHintRules()) {
boolean shouldAdd = false;
boolean matchFound = false;
for (Evidence given : hint.getGivenVendor()) {
if (dependency.getVendorEvidence().getEvidence().contains(given)) {
shouldAdd = true;
matchFound = true;
break;
}
}
if (!shouldAdd) {
if (!matchFound) {
for (Evidence given : hint.getGivenProduct()) {
if (dependency.getProductEvidence().getEvidence().contains(given)) {
shouldAdd = true;
matchFound = true;
break;
}
}
}
if (!shouldAdd) {
for (PropertyType pt : hint.getFilenames()) {
if (pt.matches(dependency.getFileName())) {
shouldAdd = true;
if (!matchFound) {
for (Evidence given : hint.getGivenVersion()) {
if (dependency.getVersionEvidence().getEvidence().contains(given)) {
matchFound = true;
break;
}
}
}
if (shouldAdd) {
if (!matchFound) {
for (PropertyType pt : hint.getFilenames()) {
if (pt.matches(dependency.getFileName())) {
matchFound = true;
break;
}
}
}
if (matchFound) {
for (Evidence e : hint.getAddVendor()) {
dependency.getVendorEvidence().addEvidence(e);
}
@@ -157,11 +176,26 @@ public class HintAnalyzer extends AbstractAnalyzer {
for (Evidence e : hint.getAddVersion()) {
dependency.getVersionEvidence().addEvidence(e);
}
for (Evidence e : hint.getRemoveVendor()) {
if (dependency.getVendorEvidence().getEvidence().contains(e)) {
dependency.getVendorEvidence().getEvidence().remove(e);
}
}
for (Evidence e : hint.getRemoveProduct()) {
if (dependency.getProductEvidence().getEvidence().contains(e)) {
dependency.getProductEvidence().getEvidence().remove(e);
}
}
for (Evidence e : hint.getRemoveVersion()) {
if (dependency.getVersionEvidence().getEvidence().contains(e)) {
dependency.getVersionEvidence().getEvidence().remove(e);
}
}
}
}
final Iterator<Evidence> itr = dependency.getVendorEvidence().iterator();
final List<Evidence> newEntries = new ArrayList<Evidence>();
final List<Evidence> newEntries = new ArrayList<>();
while (itr.hasNext()) {
final Evidence e = itr.next();
for (VendorDuplicatingHintRule dhr : hints.getVendorDuplicatingHintRules()) {
@@ -174,108 +208,6 @@ public class HintAnalyzer extends AbstractAnalyzer {
for (Evidence e : newEntries) {
dependency.getVendorEvidence().addEvidence(e);
}
//<editor-fold defaultstate="collapsed" desc="Old implementation">
/*
final Evidence springTest1 = new Evidence("Manifest",
"Implementation-Title",
"Spring Framework",
Confidence.HIGH);
final Evidence springTest2 = new Evidence("Manifest",
"Implementation-Title",
"org.springframework.core",
Confidence.HIGH);
final Evidence springTest3 = new Evidence("Manifest",
"Implementation-Title",
"spring-core",
Confidence.HIGH);
final Evidence springTest4 = new Evidence("jar",
"package name",
"springframework",
Confidence.LOW);
final Evidence springSecurityTest1 = new Evidence("Manifest",
"Bundle-Name",
"Spring Security Core",
Confidence.MEDIUM);
final Evidence springSecurityTest2 = new Evidence("pom",
"artifactid",
"spring-security-core",
Confidence.HIGH);
final Evidence symfony = new Evidence("composer.lock",
"vendor",
"symfony",
Confidence.HIGHEST);
final Evidence zendframeworkVendor = new Evidence("composer.lock",
"vendor",
"zendframework",
Confidence.HIGHEST);
final Evidence zendframeworkProduct = new Evidence("composer.lock",
"product",
"zendframework",
Confidence.HIGHEST);
//springsource/vware problem
final Set<Evidence> product = dependency.getProductEvidence().getEvidence();
final Set<Evidence> vendor = dependency.getVendorEvidence().getEvidence();
if (product.contains(springTest1) || product.contains(springTest2) || product.contains(springTest3)
|| (dependency.getFileName().contains("spring") && product.contains(springTest4))) {
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource spring framework", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "SpringSource", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "pivotal", Confidence.HIGH);
}
if (vendor.contains(springTest4)) {
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource_spring_framework", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "pivotal", Confidence.HIGH);
}
if (product.contains(springSecurityTest1) || product.contains(springSecurityTest2)) {
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource_spring_security", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "SpringSource", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
}
if (vendor.contains(symfony)) {
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "sensiolabs", Confidence.HIGHEST);
}
if (vendor.contains(zendframeworkVendor)) {
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "zend", Confidence.HIGHEST);
}
if (product.contains(zendframeworkProduct)) {
dependency.getProductEvidence().addEvidence("hint analyzer", "vendor", "zend_framework", Confidence.HIGHEST);
}
//sun/oracle problem
final Iterator<Evidence> itr = dependency.getVendorEvidence().iterator();
final List<Evidence> newEntries = new ArrayList<Evidence>();
while (itr.hasNext()) {
final Evidence e = itr.next();
if ("sun".equalsIgnoreCase(e.getValue(false))) {
final Evidence newEvidence = new Evidence(e.getSource() + " (hint)", e.getName(), "oracle", e.getConfidence());
newEntries.add(newEvidence);
} else if ("oracle".equalsIgnoreCase(e.getValue(false))) {
final Evidence newEvidence = new Evidence(e.getSource() + " (hint)", e.getName(), "sun", e.getConfidence());
newEntries.add(newEvidence);
}
}
for (Evidence e : newEntries) {
dependency.getVendorEvidence().addEvidence(e);
}
*/
//</editor-fold>
}
/**
@@ -288,10 +220,7 @@ public class HintAnalyzer extends AbstractAnalyzer {
File file = null;
try {
hints = parser.parseHints(this.getClass().getClassLoader().getResourceAsStream(HINT_RULE_FILE_NAME));
} catch (HintParseException ex) {
LOGGER.error("Unable to parse the base hint data file");
LOGGER.debug("Unable to parse the base hint data file", ex);
} catch (SAXException ex) {
} catch (HintParseException | SAXException ex) {
LOGGER.error("Unable to parse the base hint data file");
LOGGER.debug("Unable to parse the base hint data file", ex);
}
@@ -314,9 +243,7 @@ public class HintAnalyzer extends AbstractAnalyzer {
} else {
file = new File(filePath);
if (!file.exists()) {
InputStream fromClasspath = null;
try {
fromClasspath = this.getClass().getClassLoader().getResourceAsStream(filePath);
try (InputStream fromClasspath = this.getClass().getClassLoader().getResourceAsStream(filePath)) {
if (fromClasspath != null) {
deleteTempFile = true;
file = FileUtils.getTempFile("hint", "xml");
@@ -326,10 +253,6 @@ public class HintAnalyzer extends AbstractAnalyzer {
throw new HintParseException("Unable to locate hints file in classpath", ex);
}
}
} finally {
if (fromClasspath != null) {
fromClasspath.close();
}
}
}
}

View File

@@ -23,9 +23,10 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
@@ -43,6 +44,7 @@ import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.StringUtils;
import org.jsoup.Jsoup;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
@@ -148,15 +150,6 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* A pattern to detect HTML within text.
*/
private static final Pattern HTML_DETECTION_PATTERN = Pattern.compile("\\<[a-z]+.*/?\\>", Pattern.CASE_INSENSITIVE);
//</editor-fold>
/**
* Constructs a new JarAnalyzer.
*/
public JarAnalyzer() {
}
//<editor-fold defaultstate="collapsed" desc="All standard implmentation details of Analyzer">
/**
* The name of the analyzer.
*/
@@ -175,6 +168,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="All standard implmentation details of Analyzer">
/**
* Returns the FileFilter.
*
@@ -227,7 +222,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* file.
*/
@Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
try {
final List<ClassNameInformation> classNames = collectClassNames(dependency);
final String fileName = dependency.getFileName().toLowerCase();
@@ -243,7 +238,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
final boolean addPackagesAsEvidence = !(hasManifest && hasPOM);
analyzePackageNames(classNames, dependency, addPackagesAsEvidence);
} catch (IOException ex) {
throw new AnalysisException("Exception occurred reading the JAR file.", ex);
throw new AnalysisException("Exception occurred reading the JAR file (" + dependency.getFileName() + ").", ex);
}
}
@@ -260,49 +255,41 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* @return whether or not evidence was added to the dependency
*/
protected boolean analyzePOM(Dependency dependency, List<ClassNameInformation> classes, Engine engine) throws AnalysisException {
boolean foundSomething = false;
final JarFile jar;
try {
jar = new JarFile(dependency.getActualFilePath());
} catch (IOException ex) {
LOGGER.warn("Unable to read JarFile '{}'.", dependency.getActualFilePath());
LOGGER.trace("", ex);
return false;
}
List<String> pomEntries;
try {
pomEntries = retrievePomListing(jar);
} catch (IOException ex) {
LOGGER.warn("Unable to read Jar file entries in '{}'.", dependency.getActualFilePath());
LOGGER.trace("", ex);
return false;
}
File externalPom = null;
if (pomEntries.isEmpty()) {
final String pomPath = FilenameUtils.removeExtension(dependency.getActualFilePath()) + ".pom";
externalPom = new File(pomPath);
if (externalPom.isFile()) {
pomEntries.add(pomPath);
} else {
return false;
}
}
for (String path : pomEntries) {
LOGGER.debug("Reading pom entry: {}", path);
Properties pomProperties = null;
try {
if (externalPom == null) {
try (JarFile jar = new JarFile(dependency.getActualFilePath())) {
final List<String> pomEntries = retrievePomListing(jar);
if (pomEntries != null && pomEntries.size() <= 1) {
String path;
File pomFile;
Properties pomProperties = null;
if (pomEntries.size() == 1) {
path = pomEntries.get(0);
pomFile = extractPom(path, jar);
pomProperties = retrievePomProperties(path, jar);
} else {
path = FilenameUtils.removeExtension(dependency.getActualFilePath()) + ".pom";
pomFile = new File(path);
}
if (pomFile.isFile()) {
final Model pom = PomUtils.readPom(pomFile);
if (pom != null && pomProperties != null) {
pom.processProperties(pomProperties);
}
return pom != null && setPomEvidence(dependency, pom, classes);
} else {
return false;
}
} catch (IOException ex) {
LOGGER.trace("ignore this, failed reading a non-existent pom.properties", ex);
}
Model pom = null;
try {
if (pomEntries.size() > 1) {
//reported possible null dereference on pomEntries is on a non-feasible path
for (String path : pomEntries) {
//TODO - one of these is likely the pom for the main JAR we are analyzing
LOGGER.debug("Reading pom entry: {}", path);
try {
//extract POM to its own directory and add it as its own dependency
final Dependency newDependency = new Dependency();
pom = extractPom(path, jar, newDependency);
final Properties pomProperties = retrievePomProperties(path, jar);
final File pomFile = extractPom(path, jar);
final Model pom = PomUtils.readPom(pomFile);
pom.processProperties(pomProperties);
final String displayPath = String.format("%s%s%s",
dependency.getFilePath(),
@@ -312,29 +299,22 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
dependency.getFileName(),
File.separator,
path);
final Dependency newDependency = new Dependency();
newDependency.setActualFilePath(pomFile.getAbsolutePath());
newDependency.setFileName(displayName);
newDependency.setFilePath(displayPath);
pom.processProperties(pomProperties);
setPomEvidence(newDependency, pom, null);
engine.getDependencies().add(newDependency);
} else {
if (externalPom == null) {
pom = PomUtils.readPom(path, jar);
} else {
pom = PomUtils.readPom(externalPom);
}
if (pom != null) {
pom.processProperties(pomProperties);
foundSomething |= setPomEvidence(dependency, pom, classes);
}
} catch (AnalysisException ex) {
LOGGER.warn("An error occurred while analyzing '{}'.", dependency.getActualFilePath());
LOGGER.trace("", ex);
}
} catch (AnalysisException ex) {
LOGGER.warn("An error occurred while analyzing '{}'.", dependency.getActualFilePath());
LOGGER.trace("", ex);
}
} catch (IOException ex) {
LOGGER.warn("Unable to read JarFile '{}'.", dependency.getActualFilePath());
LOGGER.trace("", ex);
}
return foundSomething;
return false;
}
/**
@@ -344,28 +324,20 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* @param path the path to the pom.xml within the JarFile
* @param jar the JarFile to load the pom.properties from
* @return a Properties object or null if no pom.properties was found
* @throws IOException thrown if there is an exception reading the
* pom.properties
*/
private Properties retrievePomProperties(String path, final JarFile jar) throws IOException {
private Properties retrievePomProperties(String path, final JarFile jar) {
Properties pomProperties = null;
final String propPath = path.substring(0, path.length() - 7) + "pom.properies";
final ZipEntry propEntry = jar.getEntry(propPath);
if (propEntry != null) {
Reader reader = null;
try {
reader = new InputStreamReader(jar.getInputStream(propEntry), "UTF-8");
try (Reader reader = new InputStreamReader(jar.getInputStream(propEntry), "UTF-8")) {
pomProperties = new Properties();
pomProperties.load(reader);
LOGGER.debug("Read pom.properties: {}", propPath);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ex) {
LOGGER.trace("close error", ex);
}
}
} catch (UnsupportedEncodingException ex) {
LOGGER.trace("UTF-8 is not supported", ex);
} catch (IOException ex) {
LOGGER.trace("Unable to read the POM properties", ex);
}
}
return pomProperties;
@@ -380,7 +352,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* @throws IOException thrown if there is an exception reading a JarEntry
*/
private List<String> retrievePomListing(final JarFile jar) throws IOException {
final List<String> pomEntries = new ArrayList<String>();
final List<String> pomEntries = new ArrayList<>();
final Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
final JarEntry entry = entries.nextElement();
@@ -394,67 +366,29 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
}
/**
* Retrieves the specified POM from a jar file and converts it to a Model.
* Retrieves the specified POM from a jar.
*
* @param path the path to the pom.xml file within the jar file
* @param jar the jar file to extract the pom from
* @param dependency the dependency being analyzed
* @return returns the POM object
* @return returns the POM file
* @throws AnalysisException is thrown if there is an exception extracting
* or parsing the POM {@link org.owasp.dependencycheck.xml.pom.Model} object
* the file
*/
private Model extractPom(String path, JarFile jar, Dependency dependency) throws AnalysisException {
InputStream input = null;
FileOutputStream fos = null;
private File extractPom(String path, JarFile jar) throws AnalysisException {
final File tmpDir = getNextTempDirectory();
final File file = new File(tmpDir, "pom.xml");
try {
final ZipEntry entry = jar.getEntry(path);
if (entry == null) {
throw new AnalysisException(String.format("Pom (%s)does not exist in %s", path, jar.getName()));
}
input = jar.getInputStream(entry);
fos = new FileOutputStream(file);
final ZipEntry entry = jar.getEntry(path);
if (entry == null) {
throw new AnalysisException(String.format("Pom (%s) does not exist in %s", path, jar.getName()));
}
try (InputStream input = jar.getInputStream(entry);
FileOutputStream fos = new FileOutputStream(file)) {
IOUtils.copy(input, fos);
dependency.setActualFilePath(file.getAbsolutePath());
} catch (IOException ex) {
LOGGER.warn("An error occurred reading '{}' from '{}'.", path, dependency.getFilePath());
LOGGER.warn("An error occurred reading '{}' from '{}'.", path, jar.getName());
LOGGER.error("", ex);
} finally {
closeStream(fos);
closeStream(input);
}
return PomUtils.readPom(file);
}
/**
* Silently closes an input stream ignoring errors.
*
* @param stream an input stream to close
*/
private void closeStream(InputStream stream) {
if (stream != null) {
try {
stream.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
}
}
/**
* Silently closes an output stream ignoring errors.
*
* @param stream an output stream to close
*/
private void closeStream(OutputStream stream) {
if (stream != null) {
try {
stream.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
}
return file;
}
/**
@@ -468,11 +402,11 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* otherwise false
*/
public static boolean setPomEvidence(Dependency dependency, Model pom, List<ClassNameInformation> classes) {
if (pom == null) {
return false;
}
boolean foundSomething = false;
boolean addAsIdentifier = true;
if (pom == null) {
return foundSomething;
}
String groupid = pom.getGroupId();
String parentGroupId = pom.getParentGroupId();
String artifactid = pom.getArtifactId();
@@ -562,6 +496,12 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
addMatchingValues(classes, org, dependency.getVendorEvidence());
addMatchingValues(classes, org, dependency.getProductEvidence());
}
// org name
final String orgUrl = pom.getOrganizationUrl();
if (orgUrl != null && !orgUrl.isEmpty()) {
dependency.getVendorEvidence().addEvidence("pom", "organization url", orgUrl, Confidence.MEDIUM);
dependency.getProductEvidence().addEvidence("pom", "organization url", orgUrl, Confidence.LOW);
}
//pom name
final String pomName = pom.getName();
if (pomName
@@ -604,8 +544,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
*/
protected void analyzePackageNames(List<ClassNameInformation> classNames,
Dependency dependency, boolean addPackagesAsEvidence) {
final Map<String, Integer> vendorIdentifiers = new HashMap<String, Integer>();
final Map<String, Integer> productIdentifiers = new HashMap<String, Integer>();
final Map<String, Integer> vendorIdentifiers = new HashMap<>();
final Map<String, Integer> productIdentifiers = new HashMap<>();
analyzeFullyQualifiedClassNames(classNames, vendorIdentifiers, productIdentifiers);
final int classCount = classNames.size();
@@ -649,11 +589,10 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* @return whether evidence was identified parsing the manifest
* @throws IOException if there is an issue reading the JAR file
*/
protected boolean parseManifest(Dependency dependency, List<ClassNameInformation> classInformation) throws IOException {
protected boolean parseManifest(Dependency dependency, List<ClassNameInformation> classInformation)
throws IOException {
boolean foundSomething = false;
JarFile jar = null;
try {
jar = new JarFile(dependency.getActualFilePath());
try (JarFile jar = new JarFile(dependency.getActualFilePath())) {
final Manifest manifest = jar.getManifest();
if (manifest == null) {
if (!dependency.getFileName().toLowerCase().endsWith("-sources.jar")
@@ -734,11 +673,11 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
}
} else if ("build-id".equals(key)) {
int pos = value.indexOf('(');
if (pos >= 0) {
if (pos > 0) {
value = value.substring(0, pos - 1);
}
pos = value.indexOf('[');
if (pos >= 0) {
if (pos > 0) {
value = value.substring(0, pos - 1);
}
versionEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
@@ -808,10 +747,6 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
foundSomething = true;
versionEvidence.addEvidence(source, "specification-version", specificationVersion, Confidence.HIGH);
}
} finally {
if (jar != null) {
jar.close();
}
}
return foundSomething;
}
@@ -927,7 +862,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* Deletes any files extracted from the JAR during analysis.
*/
@Override
public void close() {
public void closeAnalyzer() {
if (tempFileLocation != null && tempFileLocation.exists()) {
LOGGER.debug("Attempting to delete temporary files");
final boolean success = FileUtils.delete(tempFileLocation);
@@ -964,10 +899,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* @return an list of fully qualified class names
*/
private List<ClassNameInformation> collectClassNames(Dependency dependency) {
final List<ClassNameInformation> classNames = new ArrayList<ClassNameInformation>();
JarFile jar = null;
try {
jar = new JarFile(dependency.getActualFilePath());
final List<ClassNameInformation> classNames = new ArrayList<>();
try (JarFile jar = new JarFile(dependency.getActualFilePath())) {
final Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
final JarEntry entry = entries.nextElement();
@@ -981,14 +914,6 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} catch (IOException ex) {
LOGGER.warn("Unable to open jar file '{}'.", dependency.getFileName());
LOGGER.debug("", ex);
} finally {
if (jar != null) {
try {
jar.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
}
}
return classNames;
}
@@ -1013,13 +938,11 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
if (list.size() == 2) {
addEntry(product, list.get(1));
}
if (list.size() == 3) {
} else if (list.size() == 3) {
addEntry(vendor, list.get(1));
addEntry(product, list.get(1));
addEntry(product, list.get(2));
}
if (list.size() >= 4) {
} else if (list.size() >= 4) {
addEntry(vendor, list.get(1));
addEntry(vendor, list.get(2));
addEntry(product, list.get(1));
@@ -1133,6 +1056,16 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
*/
protected static class ClassNameInformation {
/**
* The fully qualified class name.
*/
private String name;
/**
* Up to the first four levels of the package structure, excluding a
* leading "org" or "com".
*/
private final ArrayList<String> packageStructure = new ArrayList<>();
/**
* <p>
* Stores information about a given class name. This class will keep the
@@ -1140,7 +1073,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* package structure. Up to the first four levels of the package
* structure are stored, excluding a leading "org" or "com".
* Example:</p>
* <code>ClassNameInformation obj = new ClassNameInformation("org.owasp.dependencycheck.analyzer.JarAnalyzer");
* <code>ClassNameInformation obj = new ClassNameInformation("org/owasp/dependencycheck/analyzer/JarAnalyzer");
* System.out.println(obj.getName());
* for (String p : obj.getPackageStructure())
* System.out.println(p);
@@ -1158,7 +1091,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
ClassNameInformation(String className) {
name = className;
if (name.contains("/")) {
final String[] tmp = className.toLowerCase().split("/");
final String[] tmp = StringUtils.split(className.toLowerCase(), '/');
int start = 0;
int end = 3;
if ("com".equals(tmp[0]) || "org".equals(tmp[0])) {
@@ -1168,17 +1101,11 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
if (tmp.length <= end) {
end = tmp.length - 1;
}
for (int i = start; i <= end; i++) {
packageStructure.add(tmp[i]);
}
packageStructure.addAll(Arrays.asList(tmp).subList(start, end + 1));
} else {
packageStructure.add(name);
}
}
/**
* The fully qualified class name.
*/
private String name;
/**
* Get the value of name
@@ -1197,11 +1124,6 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
public void setName(String name) {
this.name = name;
}
/**
* Up to the first four levels of the package structure, excluding a
* leading "org" or "com".
*/
private final ArrayList<String> packageStructure = new ArrayList<String>();
/**
* Get the value of packageStructure

View File

@@ -87,7 +87,6 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
*/
private static final String SUPPORTED_EXTENSIONS = "jar";
private boolean useProxy;
/**
* The Nexus Search to be set up for this analyzer.
*/
@@ -145,7 +144,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.debug("Initializing Nexus Analyzer");
LOGGER.debug("Nexus Analyzer enabled: {}", isEnabled());
if (isEnabled()) {
useProxy = useProxy();
final boolean useProxy = useProxy();
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL);
LOGGER.debug("Nexus Analyzer URL: {}", searchUrl);
try {
@@ -215,7 +214,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException when there's an exception during analysis
*/
@Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (!isEnabled()) {
return;
}
@@ -265,7 +264,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.debug("Could not connect to nexus repository", ioe);
}
}
/**
* Determine if a proxy should be used.
*

View File

@@ -121,17 +121,9 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
}
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
throws AnalysisException {
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
final File file = dependency.getActualFile();
JsonReader jsonReader;
try {
jsonReader = Json.createReader(FileUtils.openInputStream(file));
} catch (IOException e) {
throw new AnalysisException(
"Problem occurred while reading dependency file.", e);
}
try {
try (JsonReader jsonReader = Json.createReader(FileUtils.openInputStream(file))) {
final JsonObject json = jsonReader.readObject();
final EvidenceCollection productEvidence = dependency.getProductEvidence();
final EvidenceCollection vendorEvidence = dependency.getVendorEvidence();
@@ -151,8 +143,8 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
dependency.setDisplayFileName(String.format("%s/%s", file.getParentFile().getName(), file.getName()));
} catch (JsonException e) {
LOGGER.warn("Failed to parse package.json file.", e);
} finally {
jsonReader.close();
} catch (IOException e) {
throw new AnalysisException("Problem occurred while reading dependency file.", e);
}
}

View File

@@ -0,0 +1,334 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2017 Steve Springett. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.data.nsp.Advisory;
import org.owasp.dependencycheck.data.nsp.NspSearch;
import org.owasp.dependencycheck.data.nsp.SanitizePackage;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import javax.json.Json;
import javax.json.JsonException;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonReader;
import javax.json.JsonString;
import javax.json.JsonValue;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.URLConnectionFailureException;
/**
* Used to analyze Node Package Manager (npm) package.json files via Node
* Security Platform (nsp).
*
* @author Steve Springett
*/
public class NspAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(NspAnalyzer.class);
/**
* The default URL to the NSP check API.
*/
public static final String DEFAULT_URL = "https://api.nodesecurity.io/check";
/**
* The file name to scan.
*/
private static final String PACKAGE_JSON = "package.json";
/**
* Filter that detects files named "package.json".
*/
private static final FileFilter PACKAGE_JSON_FILTER = FileFilterBuilder.newInstance()
.addFilenames(PACKAGE_JSON).build();
/**
* The NSP Searcher.
*/
private NspSearch searcher;
/**
* Returns the FileFilter
*
* @return the FileFilter
*/
@Override
protected FileFilter getFileFilter() {
return PACKAGE_JSON_FILTER;
}
/**
* Initializes the analyzer once before any analysis is performed.
*
* @throws InitializationException if there's an error during initialization
*/
@Override
public void initializeFileTypeAnalyzer() throws InitializationException {
LOGGER.debug("Initializing " + getName());
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_NSP_URL, DEFAULT_URL);
try {
searcher = new NspSearch(new URL(searchUrl));
} catch (MalformedURLException ex) {
setEnabled(false);
throw new InitializationException("The configured URL to Node Security Platform is malformed: " + searchUrl, ex);
}
}
/**
* Returns the name of the analyzer.
*
* @return the name of the analyzer.
*/
@Override
public String getName() {
return "Node Security Platform Analyzer";
}
/**
* Returns the phase that the analyzer is intended to run in.
*
* @return the phase that the analyzer is intended to run in.
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return AnalysisPhase.FINDING_ANALYSIS;
}
/**
* Returns the key used in the properties file to reference the analyzer's
* enabled property.x
*
* @return the analyzer's enabled property setting key
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED;
}
@Override
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
final File file = dependency.getActualFile();
try (JsonReader jsonReader = Json.createReader(FileUtils.openInputStream(file))) {
// Retrieves the contents of package.json from the Dependency
final JsonObject packageJson = jsonReader.readObject();
// Create a sanitized version of the package.json
final JsonObject sanitizedJson = SanitizePackage.sanitize(packageJson);
// Create a new 'package' object that acts as a container for the sanitized package.json
final JsonObjectBuilder builder = Json.createObjectBuilder();
final JsonObject nspPayload = builder.add("package", sanitizedJson).build();
// Submits the package payload to the nsp check service
final List<Advisory> advisories = searcher.submitPackage(nspPayload);
for (Advisory advisory : advisories) {
/*
* Create a new vulnerability out of the advisory returned by nsp.
*/
final Vulnerability vuln = new Vulnerability();
vuln.setCvssScore(advisory.getCvssScore());
vuln.setDescription(advisory.getOverview());
vuln.setName(String.valueOf(advisory.getId()));
vuln.setSource(Vulnerability.Source.NSP);
vuln.addReference(
"NSP",
"Advisory " + advisory.getId() + ": " + advisory.getTitle(),
advisory.getAdvisory()
);
/*
* Create a single vulnerable software object - these do not use CPEs unlike the NVD.
*/
final VulnerableSoftware vs = new VulnerableSoftware();
//vs.setVersion(advisory.getVulnerableVersions());
vs.setUpdate(advisory.getPatchedVersions());
vs.setName(advisory.getModule() + ":" + advisory.getVulnerableVersions());
vuln.setVulnerableSoftware(new HashSet<>(Arrays.asList(vs)));
// Add the vulnerability to package.json
dependency.getVulnerabilities().add(vuln);
}
/*
* Adds evidence about the node package itself, not any of the modules.
*/
final EvidenceCollection productEvidence = dependency.getProductEvidence();
final EvidenceCollection vendorEvidence = dependency.getVendorEvidence();
if (packageJson.containsKey("name")) {
final Object value = packageJson.get("name");
if (value instanceof JsonString) {
final String valueString = ((JsonString) value).getString();
productEvidence.addEvidence(PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
vendorEvidence.addEvidence(PACKAGE_JSON, "name_project", String.format("%s_project", valueString), Confidence.LOW);
} else {
LOGGER.warn("JSON value not string as expected: {}", value);
}
}
/*
* Processes the dependencies objects in package.json and adds all the modules as related dependencies
*/
if (packageJson.containsKey("dependencies")) {
final JsonObject dependencies = packageJson.getJsonObject("dependencies");
processPackage(dependency, dependencies, "dependencies");
}
if (packageJson.containsKey("devDependencies")) {
final JsonObject dependencies = packageJson.getJsonObject("devDependencies");
processPackage(dependency, dependencies, "devDependencies");
}
if (packageJson.containsKey("optionalDependencies")) {
final JsonObject dependencies = packageJson.getJsonObject("optionalDependencies");
processPackage(dependency, dependencies, "optionalDependencies");
}
if (packageJson.containsKey("peerDependencies")) {
final JsonObject dependencies = packageJson.getJsonObject("peerDependencies");
processPackage(dependency, dependencies, "peerDependencies");
}
if (packageJson.containsKey("bundleDependencies")) {
final JsonObject dependencies = packageJson.getJsonObject("bundleDependencies");
processPackage(dependency, dependencies, "bundleDependencies");
}
if (packageJson.containsKey("bundledDependencies")) {
final JsonObject dependencies = packageJson.getJsonObject("bundledDependencies");
processPackage(dependency, dependencies, "bundledDependencies");
}
/*
* Adds the license if defined in package.json
*/
if (packageJson.containsKey("license")) {
dependency.setLicense(packageJson.getString("license"));
}
/*
* Adds general evidence to about the package.
*/
addToEvidence(packageJson, productEvidence, "description");
addToEvidence(packageJson, vendorEvidence, "author");
addToEvidence(packageJson, dependency.getVersionEvidence(), "version");
dependency.setDisplayFileName(String.format("%s/%s", file.getParentFile().getName(), file.getName()));
} catch (URLConnectionFailureException e) {
this.setEnabled(false);
throw new AnalysisException(e.getMessage(), e);
} catch (IOException e) {
LOGGER.debug("Error reading dependency or connecting to Node Security Platform - check API", e);
this.setEnabled(false);
throw new AnalysisException(e.getMessage(), e);
} catch (JsonException e) {
throw new AnalysisException(String.format("Failed to parse %s file.", file.getPath()), e);
}
}
/**
* Processes a part of package.json (as defined by JsobObject) and update
* the specified dependency with relevant info.
*
* @param dependency the Dependency to update
* @param jsonObject the jsonObject to parse
*/
private void processPackage(Dependency dependency, JsonObject jsonObject, String depType) {
for (int i = 0; i < jsonObject.size(); i++) {
for (Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
/*
* Create identifies that include the npm module and version. Since these are defined,
* assign the highest confidence.
*/
final Identifier moduleName = new Identifier("npm", "Module", null, entry.getKey());
moduleName.setConfidence(Confidence.HIGHEST);
String version = "";
if (entry.getValue() != null && entry.getValue().getValueType() == JsonValue.ValueType.STRING) {
version = ((JsonString) entry.getValue()).getString();
}
final Identifier moduleVersion = new Identifier("npm", "Version", null, version);
moduleVersion.setConfidence(Confidence.HIGHEST);
final Identifier moduleDepType = new Identifier("npm", "Scope", null, depType);
moduleVersion.setConfidence(Confidence.HIGHEST);
/*
* Create related dependencies for each module defined in package.json. The path to the related
* dependency will not actually exist but needs to be unique (due to the use of Set in Dependency).
* The use of related dependencies is a way to specify the actual software BOM in package.json.
*/
Dependency nodeModule = new Dependency(new File(dependency.getActualFile() + "#" + entry.getKey()), true);
nodeModule.setDisplayFileName(entry.getKey());
nodeModule.setIdentifiers(new HashSet<>(Arrays.asList(moduleName, moduleVersion, moduleDepType)));
dependency.addRelatedDependency(nodeModule);
}
}
}
/**
* Adds information to an evidence collection from the node json
* configuration.
*
* @param json information from node.js
* @param collection a set of evidence about a dependency
* @param key the key to obtain the data from the json information
*/
private void addToEvidence(JsonObject json, EvidenceCollection collection, String key) {
if (json.containsKey(key)) {
final JsonValue value = json.get(key);
if (value instanceof JsonString) {
collection.addEvidence(PACKAGE_JSON, key, ((JsonString) value).getString(), Confidence.HIGHEST);
} else if (value instanceof JsonObject) {
final JsonObject jsonObject = (JsonObject) value;
for (final Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
final String property = entry.getKey();
final JsonValue subValue = entry.getValue();
if (subValue instanceof JsonString) {
collection.addEvidence(PACKAGE_JSON,
String.format("%s.%s", key, property),
((JsonString) subValue).getString(),
Confidence.HIGHEST);
} else {
LOGGER.warn("JSON sub-value not string as expected: {}", subValue);
}
}
} else {
LOGGER.warn("JSON value not string or JSON object as expected: {}", value);
}
}
}
}

View File

@@ -33,7 +33,6 @@ import org.slf4j.LoggerFactory;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import org.owasp.dependencycheck.exception.InitializationException;
/**
@@ -127,27 +126,15 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException when there's an exception during analysis
*/
@Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException {
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
LOGGER.debug("Checking Nuspec file {}", dependency);
try {
final NuspecParser parser = new XPathNuspecParser();
NugetPackage np = null;
FileInputStream fis = null;
try {
fis = new FileInputStream(dependency.getActualFilePath());
try (FileInputStream fis = new FileInputStream(dependency.getActualFilePath())) {
np = parser.parse(fis);
} catch (NuspecParseException ex) {
} catch (NuspecParseException | FileNotFoundException ex) {
throw new AnalysisException(ex);
} catch (FileNotFoundException ex) {
throw new AnalysisException(ex);
} finally {
if (fis != null) {
try {
fis.close();
} catch (IOException e) {
LOGGER.debug("Error closing input stream");
}
}
}
if (np.getOwners() != null) {

View File

@@ -28,23 +28,23 @@ import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.LoggerFactory;
/**
* NvdCveAnalyzer is a utility class that takes a project dependency and attempts to discern if there is an associated
* CVEs. It uses the the identifiers found by other analyzers to lookup the CVE data.
* NvdCveAnalyzer is a utility class that takes a project dependency and
* attempts to discern if there is an associated CVEs. It uses the the
* identifiers found by other analyzers to lookup the CVE data.
*
* @author Jeremy Long
*/
public class NvdCveAnalyzer extends AbstractAnalyzer {
/**
* The Logger for use throughout the class
*/
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(NvdCveAnalyzer.class);
/**
* The maximum number of query results to return.
*/
static final int MAX_QUERY_RESULTS = 100;
/**
* The CVE Index.
*/
@@ -56,18 +56,18 @@ public class NvdCveAnalyzer extends AbstractAnalyzer {
* @throws SQLException thrown when there is a SQL Exception
* @throws IOException thrown when there is an IO Exception
* @throws DatabaseException thrown when there is a database exceptions
* @throws ClassNotFoundException thrown if the h2 database driver cannot be loaded
* @throws ClassNotFoundException thrown if the h2 database driver cannot be
* loaded
*/
public void open() throws SQLException, IOException, DatabaseException, ClassNotFoundException {
cveDB = new CveDB();
cveDB.open();
cveDB = CveDB.getInstance();
}
/**
* Closes the data source.
*/
@Override
public void close() {
public void closeAnalyzer() {
cveDB.close();
cveDB = null;
}
@@ -82,27 +82,16 @@ public class NvdCveAnalyzer extends AbstractAnalyzer {
}
/**
* Ensures that the CVE Database is closed.
*
* @throws Throwable an exception raised by this method
*/
@Override
protected void finalize() throws Throwable {
super.finalize();
if (isOpen()) {
close();
}
}
/**
* Analyzes a dependency and attempts to determine if there are any CPE identifiers for this dependency.
* Analyzes a dependency and attempts to determine if there are any CPE
* identifiers for this dependency.
*
* @param dependency The Dependency to analyze
* @param engine The analysis engine
* @throws AnalysisException thrown if there is an issue analyzing the dependency
* @throws AnalysisException thrown if there is an issue analyzing the
* dependency
*/
@Override
public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
for (Identifier id : dependency.getIdentifiers()) {
if ("cpe".equals(id.getType())) {
try {
@@ -148,12 +137,24 @@ public class NvdCveAnalyzer extends AbstractAnalyzer {
}
/**
* Opens the database used to gather NVD CVE data.
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @throws InitializationException is thrown if there is an issue opening the index.
* @return the key for the analyzer's enabled property
*/
@Override
public void initialize() throws InitializationException {
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_NVD_CVE_ENABLED;
}
/**
* Opens the database used to gather NVD CVE data.
*
* @throws InitializationException is thrown if there is an issue opening
* the index.
*/
@Override
public void initializeAnalyzer() throws InitializationException {
try {
this.open();
} catch (SQLException ex) {

View File

@@ -102,7 +102,7 @@ public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
* @param openSSLVersionConstant The open SSL version
* @return the version of openssl
*/
static String getOpenSSLVersion(long openSSLVersionConstant) {
protected static String getOpenSSLVersion(long openSSLVersionConstant) {
final long major = openSSLVersionConstant >>> MAJOR_OFFSET;
final long minor = (openSSLVersionConstant & MINOR_MASK) >>> MINOR_OFFSET;
final long fix = (openSSLVersionConstant & FIX_MASK) >>> FIX_OFFSET;
@@ -162,7 +162,7 @@ public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
* analyzing the dependency
*/
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
final File file = dependency.getActualFile();
final String parentName = file.getParentFile().getName();

View File

@@ -181,7 +181,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
}
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
final File actualFile = dependency.getActualFile();
if (WHL_FILTER.accept(actualFile)) {
@@ -273,7 +273,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
* Deletes any files extracted from the Wheel during analysis.
*/
@Override
public void close() {
public void closeAnalyzer() {
if (tempFileLocation != null && tempFileLocation.exists()) {
LOGGER.debug("Attempting to delete temporary files");
final boolean success = FileUtils.delete(tempFileLocation);
@@ -360,22 +360,12 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
if (null == manifest) {
LOGGER.debug("Manifest file not found.");
} else {
InputStream in = null;
try {
in = new BufferedInputStream(new FileInputStream(manifest));
try (InputStream in = new BufferedInputStream(new FileInputStream(manifest))) {
result.load(in);
} catch (MessagingException e) {
} catch (MessagingException | FileNotFoundException e) {
LOGGER.warn(e.getMessage(), e);
} catch (FileNotFoundException e) {
LOGGER.warn(e.getMessage(), e);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException ex) {
LOGGER.debug("failed to close input stream", ex);
}
}
} catch (IOException ex) {
LOGGER.warn(ex.getMessage(), ex);
}
}
return result;

View File

@@ -171,7 +171,7 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
* analyzing the dependency
*/
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
final File file = dependency.getActualFile();
final File parent = file.getParentFile();

View File

@@ -113,9 +113,17 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
if (!folder.isDirectory()) {
throw new AnalysisException(String.format("%s should have been a directory.", folder.getAbsolutePath()));
}
final List<String> args = new ArrayList<String>();
final List<String> args = new ArrayList<>();
final String bundleAuditPath = Settings.getString(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH);
args.add(null == bundleAuditPath ? "bundle-audit" : bundleAuditPath);
File bundleAudit = null;
if (bundleAuditPath != null) {
bundleAudit = new File(bundleAuditPath);
if (!bundleAudit.isFile()) {
LOGGER.warn("Supplied `bundleAudit` path is incorrect: " + bundleAuditPath);
bundleAudit = null;
}
}
args.add(bundleAudit != null && bundleAudit.isFile() ? bundleAudit.getAbsolutePath() : "bundle-audit");
args.add("check");
args.add("--verbose");
final ProcessBuilder builder = new ProcessBuilder(args);
@@ -124,7 +132,8 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.info("Launching: " + args + " from " + folder);
return builder.start();
} catch (IOException ioe) {
throw new AnalysisException("bundle-audit failure", ioe);
throw new AnalysisException("bundle-audit initialization failure; this error can be ignored if you are not analyzing Ruby. "
+ "Otherwise ensure that bundle-audit is installed and the path to bundle audit is correctly specified", ioe);
}
}
@@ -137,8 +146,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
@Override
public void initializeFileTypeAnalyzer() throws InitializationException {
try {
cvedb = new CveDB();
cvedb.open();
cvedb = CveDB.getInstance();
} catch (DatabaseException ex) {
LOGGER.warn("Exception opening the database");
LOGGER.debug("error", ex);
@@ -152,8 +160,6 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
} catch (AnalysisException ae) {
setEnabled(false);
cvedb.close();
cvedb = null;
final String msg = String.format("Exception from bundle-audit process: %s. Disabling %s", ae.getCause(), ANALYZER_NAME);
throw new InitializationException(msg, ae);
} catch (IOException ex) {
@@ -166,7 +172,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
exitValue = process.waitFor();
} catch (InterruptedException ex) {
setEnabled(false);
final String msg = String.format("Bundle-audit process was interupted. Disabling %s", ANALYZER_NAME);
final String msg = String.format("Bundle-audit process was interrupted. Disabling %s", ANALYZER_NAME);
throw new InitializationException(msg);
}
if (0 == exitValue) {
@@ -174,9 +180,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
final String msg = String.format("Unexpected exit code from bundle-audit process. Disabling %s: %s", ANALYZER_NAME, exitValue);
throw new InitializationException(msg);
} else {
BufferedReader reader = null;
try {
reader = new BufferedReader(new InputStreamReader(process.getErrorStream(), "UTF-8"));
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream(), "UTF-8"))) {
if (!reader.ready()) {
LOGGER.warn("Bundle-audit error stream unexpectedly not ready. Disabling " + ANALYZER_NAME);
setEnabled(false);
@@ -195,14 +199,6 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
} catch (IOException ex) {
setEnabled(false);
throw new InitializationException("Unable to read bundle-audit output.", ex);
} finally {
if (null != reader) {
try {
reader.close();
} catch (IOException ex) {
LOGGER.debug("Error closing reader", ex);
}
}
}
}
@@ -212,6 +208,17 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
}
}
/**
* Closes the data source.
*/
@Override
public void closeAnalyzer() {
if (cvedb != null) {
cvedb.close();
cvedb = null;
}
}
/**
* Returns the name of the analyzer.
*
@@ -244,7 +251,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
}
/**
* If {@link #analyzeFileType(Dependency, Engine)} is called, then we have
* If {@link #analyzeDependency(Dependency, Engine)} is called, then we have
* successfully initialized, and it will be necessary to disable
* {@link RubyGemspecAnalyzer}.
*/
@@ -258,7 +265,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException thrown if there is an analysis exception.
*/
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
if (needToDisableGemspecAnalyzer) {
boolean failed = true;
@@ -290,35 +297,19 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
final String msg = String.format("Unexpected exit code from bundle-audit process; exit code: %s", exitValue);
throw new AnalysisException(msg);
}
BufferedReader rdr = null;
BufferedReader errReader = null;
try {
errReader = new BufferedReader(new InputStreamReader(process.getErrorStream(), "UTF-8"));
while (errReader.ready()) {
final String error = errReader.readLine();
LOGGER.warn(error);
try (BufferedReader errReader = new BufferedReader(new InputStreamReader(process.getErrorStream(), "UTF-8"))) {
while (errReader.ready()) {
final String error = errReader.readLine();
LOGGER.warn(error);
}
}
try (BufferedReader rdr = new BufferedReader(new InputStreamReader(process.getInputStream(), "UTF-8"))) {
processBundlerAuditOutput(dependency, engine, rdr);
}
rdr = new BufferedReader(new InputStreamReader(process.getInputStream(), "UTF-8"));
processBundlerAuditOutput(dependency, engine, rdr);
} catch (IOException ioe) {
LOGGER.warn("bundle-audit failure", ioe);
} finally {
if (errReader != null) {
try {
errReader.close();
} catch (IOException ioe) {
LOGGER.warn("bundle-audit close failure", ioe);
}
}
if (null != rdr) {
try {
rdr.close();
} catch (IOException ioe) {
LOGGER.warn("bundle-audit close failure", ioe);
}
}
}
}
/**
@@ -336,7 +327,7 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
Dependency dependency = null;
Vulnerability vulnerability = null;
String gem = null;
final Map<String, Dependency> map = new HashMap<String, Dependency>();
final Map<String, Dependency> map = new HashMap<>();
boolean appendToDescription = false;
while (rdr.ready()) {
final String nextLine = rdr.readLine();
@@ -365,10 +356,8 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
+ "Title link may not work. CPE below is guessed. CVSS score is estimated (-1.0 "
+ " indicates unknown). See link below for full details. *** ");
}
} else if (appendToDescription) {
if (null != vulnerability) {
vulnerability.setDescription(vulnerability.getDescription() + nextLine + "\n");
}
} else if (appendToDescription && null != vulnerability) {
vulnerability.setDescription(vulnerability.getDescription() + nextLine + "\n");
}
}
}

View File

@@ -27,8 +27,9 @@ import org.owasp.dependencycheck.dependency.Dependency;
/**
* This analyzer accepts the fully resolved .gemspec created by the Ruby bundler
* (http://bundler.io) for better evidence results. It also tries to resolve the
* dependency packagePath to where the gem is actually installed. Then during {@link org.owasp.dependencycheck.analyzer.AnalysisPhase#PRE_FINDING_ANALYSIS}
* {@link DependencyBundlingAnalyzer} will merge two .gemspec dependencies
* dependency packagePath to where the gem is actually installed. Then during
* the {@link org.owasp.dependencycheck.analyzer.AnalysisPhase#PRE_FINDING_ANALYSIS}
* {@link DependencyMergingAnalyzer} will merge two .gemspec dependencies
* together if <code>Dependency.getPackagePath()</code> are the same.
*
* Ruby bundler creates new .gemspec files under a folder called
@@ -39,8 +40,8 @@ import org.owasp.dependencycheck.dependency.Dependency;
* can't be used for evidences.
*
* Note this analyzer share the same
* {@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_RUBY_GEMSPEC_ENABLED} as
* {@link RubyGemspecAnalyzer}, so it will enabled/disabled with
* {@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_RUBY_GEMSPEC_ENABLED}
* as {@link RubyGemspecAnalyzer}, so it will enabled/disabled with
* {@link RubyGemspecAnalyzer}.
*
* @author Bianca Jiang (https://twitter.com/biancajiang)
@@ -93,9 +94,9 @@ public class RubyBundlerAnalyzer extends RubyGemspecAnalyzer {
}
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
super.analyzeFileType(dependency, engine);
super.analyzeDependency(dependency, engine);
//find the corresponding gem folder for this .gemspec stub by "bundle install --deployment"
final File gemspecFile = dependency.getActualFile();

View File

@@ -130,7 +130,7 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
private static final Pattern GEMSPEC_BLOCK_INIT = Pattern.compile("Gem::Specification\\.new\\s+?do\\s+?\\|(.+?)\\|");
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
String contents;
try {

View File

@@ -116,7 +116,7 @@ public class SwiftPackageManagerAnalyzer extends AbstractFileTypeAnalyzer {
}
@Override
protected void analyzeFileType(Dependency dependency, Engine engine)
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
String contents;

View File

@@ -0,0 +1,167 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2017 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import java.util.Iterator;
import java.util.Objects;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This analyzer attempts to filter out erroneous version numbers collected.
* Initially, this will focus on JAR files that contain a POM version number
* that matches the file name - if identified all other version information will
* be removed.
*
* @author Jeremy Long
*/
public class VersionFilterAnalyzer extends AbstractAnalyzer {
//<editor-fold defaultstate="collapsed" desc="Constants">
/**
* Evidence source.
*/
private static final String FILE = "file";
/**
* Evidence source.
*/
private static final String POM = "pom";
/**
* Evidence source.
*/
private static final String NEXUS = "nexus";
/**
* Evidence source.
*/
private static final String CENTRAL = "central";
/**
* Evidence source.
*/
private static final String MANIFEST = "Manifest";
/**
* Evidence name.
*/
private static final String VERSION = "version";
/**
* Evidence name.
*/
private static final String IMPLEMENTATION_VERSION = "Implementation-Version";
/**
* The name of the analyzer.
*/
private static final String ANALYZER_NAME = "Version Filter Analyzer";
/**
* The phase that this analyzer is intended to run in.
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.POST_INFORMATION_COLLECTION;
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Standard implementation of Analyzer">
/**
* Returns the name of the analyzer.
*
* @return the name of the analyzer.
*/
@Override
public String getName() {
return ANALYZER_NAME;
}
/**
* Returns the phase that the analyzer is intended to run in.
*
* @return the phase that the analyzer is intended to run in.
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
/**
* Returns the setting key to determine if the analyzer is enabled.
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_VERSION_FILTER_ENABLED;
}
//</editor-fold>
/**
* The Logger for use throughout the class
*/
private static final Logger LOGGER = LoggerFactory.getLogger(VersionFilterAnalyzer.class);
/**
* The HintAnalyzer uses knowledge about a dependency to add additional
* information to help in identification of identifiers or vulnerabilities.
*
* @param dependency The dependency being analyzed
* @param engine The scanning engine
* @throws AnalysisException is thrown if there is an exception analyzing
* the dependency.
*/
@Override
protected synchronized void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
String fileVersion = null;
String pomVersion = null;
String manifestVersion = null;
for (Evidence e : dependency.getVersionEvidence()) {
if (FILE.equals(e.getSource()) && VERSION.equals(e.getName())) {
fileVersion = e.getValue(Boolean.FALSE);
} else if ((NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource())
|| POM.equals(e.getSource())) && VERSION.equals(e.getName())) {
pomVersion = e.getValue(Boolean.FALSE);
} else if (MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName())) {
manifestVersion = e.getValue(Boolean.FALSE);
}
}
//ensure we have at least two not null
if (((fileVersion == null ? 0 : 1) + (pomVersion == null ? 0 : 1) + (manifestVersion == null ? 0 : 1)) > 1) {
final DependencyVersion dvFile = new DependencyVersion(fileVersion);
final DependencyVersion dvPom = new DependencyVersion(pomVersion);
final DependencyVersion dvManifest = new DependencyVersion(manifestVersion);
final boolean fileMatch = Objects.equals(dvFile, dvPom) || Objects.equals(dvFile, dvManifest);
final boolean manifestMatch = Objects.equals(dvManifest, dvPom) || Objects.equals(dvManifest, dvFile);
final boolean pomMatch = Objects.equals(dvPom, dvFile) || Objects.equals(dvPom, dvManifest);
if (fileMatch || manifestMatch || pomMatch) {
LOGGER.debug("filtering evidence from {}", dependency.getFileName());
final EvidenceCollection versionEvidence = dependency.getVersionEvidence();
final Iterator<Evidence> itr = versionEvidence.iterator();
while (itr.hasNext()) {
final Evidence e = itr.next();
if (!(pomMatch && VERSION.equals(e.getName())
&& (NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource()) || POM.equals(e.getSource())))
&& !(fileMatch && VERSION.equals(e.getName()) && FILE.equals(e.getSource()))
&& !(manifestMatch && MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName()))) {
itr.remove();
}
}
}
}
}
}

View File

@@ -20,11 +20,13 @@ package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
/**
* The suppression analyzer processes an externally defined XML document that complies with the suppressions.xsd schema.
* Any identified Vulnerability entries within the dependencies that match will be removed.
* The suppression analyzer processes an externally defined XML document that
* complies with the suppressions.xsd schema. Any identified Vulnerability
* entries within the dependencies that match will be removed.
*
* @author Jeremy Long
*/
@@ -59,10 +61,29 @@ public class VulnerabilitySuppressionAnalyzer extends AbstractSuppressionAnalyze
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_VULNERABILITY_SUPPRESSION_ENABLED;
}
//</editor-fold>
/**
* Analyzes a dependency's vulnerabilities against the configured CVE
* suppressions.
*
* @param dependency the dependency being analyzed
* @param engine a reference to the engine orchestrating the analysis
* @throws AnalysisException thrown if there is an error during analysis
*/
@Override
public void analyze(final Dependency dependency, final Engine engine) throws AnalysisException {
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (getRules() == null || getRules().size() <= 0) {
return;

View File

@@ -24,17 +24,20 @@ import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.owasp.dependencycheck.data.nexus.MavenArtifact;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.URLConnectionFactory;
import org.owasp.dependencycheck.utils.XmlUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* Class of methods to search Maven Central via Central.
@@ -110,16 +113,14 @@ public class CentralSearch {
if (conn.getResponseCode() == 200) {
boolean missing = false;
try {
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
final DocumentBuilder builder = factory.newDocumentBuilder();
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
final Document doc = builder.parse(conn.getInputStream());
final XPath xpath = XPathFactory.newInstance().newXPath();
final String numFound = xpath.evaluate("/response/result/@numFound", doc);
if ("0".equals(numFound)) {
missing = true;
} else {
result = new ArrayList<MavenArtifact>();
result = new ArrayList<>();
final NodeList docs = (NodeList) xpath.evaluate("/response/result/doc", doc, XPathConstants.NODESET);
for (int i = 0; i < docs.getLength(); i++) {
final String g = xpath.evaluate("./str[@name='g']", docs.item(i));
@@ -127,11 +128,11 @@ public class CentralSearch {
final String a = xpath.evaluate("./str[@name='a']", docs.item(i));
LOGGER.trace("ArtifactId: {}", a);
final String v = xpath.evaluate("./str[@name='v']", docs.item(i));
NodeList atts = (NodeList) xpath.evaluate("./arr[@name='ec']/str", docs.item(i), XPathConstants.NODESET);
NodeList attributes = (NodeList) xpath.evaluate("./arr[@name='ec']/str", docs.item(i), XPathConstants.NODESET);
boolean pomAvailable = false;
boolean jarAvailable = false;
for (int x = 0; x < atts.getLength(); x++) {
final String tmp = xpath.evaluate(".", atts.item(x));
for (int x = 0; x < attributes.getLength(); x++) {
final String tmp = xpath.evaluate(".", attributes.item(x));
if (".pom".equals(tmp)) {
pomAvailable = true;
} else if (".jar".equals(tmp)) {
@@ -139,10 +140,10 @@ public class CentralSearch {
}
}
atts = (NodeList) xpath.evaluate("./arr[@name='tags']/str", docs.item(i), XPathConstants.NODESET);
attributes = (NodeList) xpath.evaluate("./arr[@name='tags']/str", docs.item(i), XPathConstants.NODESET);
boolean useHTTPS = false;
for (int x = 0; x < atts.getLength(); x++) {
final String tmp = xpath.evaluate(".", atts.item(x));
for (int x = 0; x < attributes.getLength(); x++) {
final String tmp = xpath.evaluate(".", attributes.item(x));
if ("https".equals(tmp)) {
useHTTPS = true;
}
@@ -151,7 +152,7 @@ public class CentralSearch {
result.add(new MavenArtifact(g, a, v, jarAvailable, pomAvailable, useHTTPS));
}
}
} catch (Throwable e) {
} catch (ParserConfigurationException | IOException | SAXException | XPathExpressionException e) {
// Anything else is jacked up XML stuff that we really can't recover from well
throw new IOException(e.getMessage(), e);
}

View File

@@ -18,7 +18,7 @@
package org.owasp.dependencycheck.data.composer;
/**
* Reperesents a dependency (GAV, right now) from a Composer dependency.
* Represents a dependency (GAV, right now) from a Composer dependency.
*
* @author colezlaw
*/

View File

@@ -42,11 +42,6 @@ public class ComposerLockParser {
*/
private final JsonReader jsonReader;
/**
* The input stream we'll read
*/
private final InputStream inputStream; // NOPMD - it gets set in the constructor, read later
/**
* The List of ComposerDependencies found
*/
@@ -58,15 +53,14 @@ public class ComposerLockParser {
private static final Logger LOGGER = LoggerFactory.getLogger(ComposerLockParser.class);
/**
* Createas a ComposerLockParser from a JsonReader and an InputStream.
* Creates a ComposerLockParser from a JsonReader and an InputStream.
*
* @param inputStream the InputStream to parse
*/
public ComposerLockParser(InputStream inputStream) {
LOGGER.info("Creating a ComposerLockParser");
this.inputStream = inputStream;
this.jsonReader = Json.createReader(inputStream);
this.composerDependencies = new ArrayList<ComposerDependency>();
this.composerDependencies = new ArrayList<>();
}
/**
@@ -87,7 +81,7 @@ public class ComposerLockParser {
final String group = groupName.substring(0, groupName.indexOf('/'));
final String project = groupName.substring(groupName.indexOf('/') + 1);
String version = pkg.getString("version");
// Some version nubmers begin with v - which doesn't end up matching CPE's
// Some version numbers begin with v - which doesn't end up matching CPE's
if (version.startsWith("v")) {
version = version.substring(1);
}

View File

@@ -62,21 +62,6 @@ public final class CpeMemoryIndex {
* singleton instance.
*/
private static final CpeMemoryIndex INSTANCE = new CpeMemoryIndex();
/**
* private constructor for singleton.
*/
private CpeMemoryIndex() {
}
/**
* Gets the singleton instance of the CpeMemoryIndex.
*
* @return the instance of the CpeMemoryIndex
*/
public static CpeMemoryIndex getInstance() {
return INSTANCE;
}
/**
* The in memory Lucene index.
*/
@@ -105,6 +90,25 @@ public final class CpeMemoryIndex {
* The search field analyzer for the vendor field.
*/
private SearchFieldAnalyzer vendorFieldAnalyzer;
/**
* A flag indicating whether or not the index is open.
*/
private boolean openState = false;
/**
* private constructor for singleton.
*/
private CpeMemoryIndex() {
}
/**
* Gets the singleton instance of the CpeMemoryIndex.
*
* @return the instance of the CpeMemoryIndex
*/
public static CpeMemoryIndex getInstance() {
return INSTANCE;
}
/**
* Creates and loads data into an in memory index.
@@ -129,10 +133,6 @@ public final class CpeMemoryIndex {
}
}
}
/**
* A flag indicating whether or not the index is open.
*/
private boolean openState = false;
/**
* returns whether or not the index is open.
@@ -149,7 +149,7 @@ public final class CpeMemoryIndex {
* @return the CPE Analyzer.
*/
private Analyzer createSearchingAnalyzer() {
final Map<String, Analyzer> fieldAnalyzers = new HashMap<String, Analyzer>();
final Map<String, Analyzer> fieldAnalyzers = new HashMap<>();
fieldAnalyzers.put(Fields.DOCUMENT_KEY, new KeywordAnalyzer());
productFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
vendorFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
@@ -191,57 +191,35 @@ public final class CpeMemoryIndex {
* @throws IndexException thrown if there is an issue creating the index
*/
private void buildIndex(CveDB cve) throws IndexException {
Analyzer analyzer = null;
IndexWriter indexWriter = null;
try {
analyzer = createSearchingAnalyzer();
final IndexWriterConfig conf = new IndexWriterConfig(LuceneUtils.CURRENT_VERSION, analyzer);
indexWriter = new IndexWriter(index, conf);
try {
// Tip: reuse the Document and Fields for performance...
// See "Re-use Document and Field instances" from
// http://wiki.apache.org/lucene-java/ImproveIndexingSpeed
final Document doc = new Document();
final Field v = new TextField(Fields.VENDOR, Fields.VENDOR, Field.Store.YES);
final Field p = new TextField(Fields.PRODUCT, Fields.PRODUCT, Field.Store.YES);
doc.add(v);
doc.add(p);
try (Analyzer analyzer = createSearchingAnalyzer();
IndexWriter indexWriter = new IndexWriter(index, new IndexWriterConfig(LuceneUtils.CURRENT_VERSION, analyzer))) {
// Tip: reuse the Document and Fields for performance...
// See "Re-use Document and Field instances" from
// http://wiki.apache.org/lucene-java/ImproveIndexingSpeed
final Document doc = new Document();
final Field v = new TextField(Fields.VENDOR, Fields.VENDOR, Field.Store.YES);
final Field p = new TextField(Fields.PRODUCT, Fields.PRODUCT, Field.Store.YES);
doc.add(v);
doc.add(p);
final Set<Pair<String, String>> data = cve.getVendorProductList();
for (Pair<String, String> pair : data) {
//todo figure out why there are null products
if (pair.getLeft() != null && pair.getRight() != null) {
v.setStringValue(pair.getLeft());
p.setStringValue(pair.getRight());
indexWriter.addDocument(doc);
resetFieldAnalyzer();
}
final Set<Pair<String, String>> data = cve.getVendorProductList();
for (Pair<String, String> pair : data) {
if (pair.getLeft() != null && pair.getRight() != null) {
v.setStringValue(pair.getLeft());
p.setStringValue(pair.getRight());
indexWriter.addDocument(doc);
resetFieldAnalyzer();
}
} catch (DatabaseException ex) {
LOGGER.debug("", ex);
throw new IndexException("Error reading CPE data", ex);
}
indexWriter.commit();
indexWriter.close(true);
} catch (DatabaseException ex) {
LOGGER.debug("", ex);
throw new IndexException("Error reading CPE data", ex);
} catch (CorruptIndexException ex) {
throw new IndexException("Unable to close an in-memory index", ex);
} catch (IOException ex) {
throw new IndexException("Unable to close an in-memory index", ex);
} finally {
if (indexWriter != null) {
try {
try {
indexWriter.commit();
} finally {
indexWriter.close(true);
}
} catch (CorruptIndexException ex) {
throw new IndexException("Unable to close an in-memory index", ex);
} catch (IOException ex) {
throw new IndexException("Unable to close an in-memory index", ex);
}
if (analyzer != null) {
analyzer.close();
}
}
}
}
@@ -267,11 +245,12 @@ public final class CpeMemoryIndex {
* @throws IOException is thrown if there is an issue with the underlying
* Index
*/
public TopDocs search(String searchString, int maxQueryResults) throws ParseException, IOException {
public synchronized TopDocs search(String searchString, int maxQueryResults) throws ParseException, IOException {
if (searchString == null || searchString.trim().isEmpty()) {
throw new ParseException("Query is null or empty");
}
LOGGER.debug(searchString);
resetFieldAnalyzer();
final Query query = queryParser.parse(searchString);
return search(query, maxQueryResults);
}
@@ -285,7 +264,7 @@ public final class CpeMemoryIndex {
* @throws CorruptIndexException thrown if the Index is corrupt
* @throws IOException thrown if there is an IOException
*/
public TopDocs search(Query query, int maxQueryResults) throws CorruptIndexException, IOException {
public synchronized TopDocs search(Query query, int maxQueryResults) throws CorruptIndexException, IOException {
resetFieldAnalyzer();
return indexSearcher.search(query, maxQueryResults);
}

View File

@@ -20,6 +20,7 @@ package org.owasp.dependencycheck.data.cpe;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import org.apache.commons.lang3.StringUtils;
/**
* A CPE entry containing the name, vendor, product, and version.
@@ -31,7 +32,7 @@ public class IndexEntry implements Serializable {
/**
* the serial version uid.
*/
static final long serialVersionUID = 8011924485946326934L;
private static final long serialVersionUID = 8011924485946326934L;
/**
* The vendor name.
*/
@@ -143,7 +144,8 @@ public class IndexEntry implements Serializable {
*/
public void parseName(String cpeName) throws UnsupportedEncodingException {
if (cpeName != null && cpeName.length() > 7) {
final String[] data = cpeName.substring(7).split(":");
final String cpeNameWithoutPrefix = cpeName.substring(7);
final String[] data = StringUtils.split(cpeNameWithoutPrefix, ':');
if (data.length >= 1) {
vendor = URLDecoder.decode(data[0].replace("+", "%2B"), "UTF-8");
if (data.length >= 2) {
@@ -172,10 +174,7 @@ public class IndexEntry implements Serializable {
if ((this.vendor == null) ? (other.vendor != null) : !this.vendor.equals(other.vendor)) {
return false;
}
if ((this.product == null) ? (other.product != null) : !this.product.equals(other.product)) {
return false;
}
return true;
return !((this.product == null) ? (other.product != null) : !this.product.equals(other.product));
}
/**

View File

@@ -54,12 +54,10 @@ public final class CweDB {
* @return a HashMap of CWE data
*/
private static Map<String, String> loadData() {
ObjectInputStream oin = null;
try {
final String filePath = "data/cwe.hashmap.serialized";
final InputStream input = CweDB.class.getClassLoader().getResourceAsStream(filePath);
oin = new ObjectInputStream(input);
@SuppressWarnings("unchecked")
final String filePath = "data/cwe.hashmap.serialized";
try (InputStream input = CweDB.class.getClassLoader().getResourceAsStream(filePath);
ObjectInputStream oin = new ObjectInputStream(input)) {
final Map<String, String> ret = (HashMap<String, String>) oin.readObject();
return ret;
} catch (ClassNotFoundException ex) {
@@ -68,14 +66,6 @@ public final class CweDB {
} catch (IOException ex) {
LOGGER.warn("Unable to load CWE data due to an IO Error. This should not be an issue.");
LOGGER.debug("", ex);
} finally {
if (oin != null) {
try {
oin.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
}
}
return null;
}

View File

@@ -32,7 +32,7 @@ public class CweHandler extends DefaultHandler {
/**
* a HashMap containing the CWE data.
*/
private final HashMap<String, String> cwe = new HashMap<String, String>();
private final HashMap<String, String> cwe = new HashMap<>();
/**
* Returns the HashMap of CWE entries (CWE-ID, Full CWE Name).

View File

@@ -63,7 +63,7 @@ public abstract class AbstractTokenizingFilter extends TokenFilter {
*/
public AbstractTokenizingFilter(TokenStream stream) {
super(stream);
tokens = new LinkedList<String>();
tokens = new LinkedList<>();
}
/**

View File

@@ -71,7 +71,7 @@ public final class TokenPairConcatenatingFilter extends TokenFilter {
*/
public TokenPairConcatenatingFilter(TokenStream stream) {
super(stream);
words = new LinkedList<String>();
words = new LinkedList<>();
}
/**
@@ -156,10 +156,7 @@ public final class TokenPairConcatenatingFilter extends TokenFilter {
if ((this.previousWord == null) ? (other.previousWord != null) : !this.previousWord.equals(other.previousWord)) {
return false;
}
if (this.words != other.words && (this.words == null || !this.words.equals(other.words))) {
return false;
}
return true;
return !(this.words != other.words && (this.words == null || !this.words.equals(other.words)));
}
}

View File

@@ -22,16 +22,17 @@ import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.URLConnectionFactory;
import org.owasp.dependencycheck.utils.XmlUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
/**
* Class of methods to search Nexus repositories.
@@ -104,9 +105,7 @@ public class NexusSearch {
switch (conn.getResponseCode()) {
case 200:
try {
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
final DocumentBuilder builder = factory.newDocumentBuilder();
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
final Document doc = builder.parse(conn.getInputStream());
final XPath xpath = XPathFactory.newInstance().newXPath();
final String groupId = xpath
@@ -136,7 +135,7 @@ public class NexusSearch {
ma.setPomUrl(pomLink);
}
return ma;
} catch (Throwable e) {
} catch (ParserConfigurationException | IOException | SAXException | XPathExpressionException e) {
// Anything else is jacked-up XML stuff that we really can't recover
// from well
throw new IOException(e.getMessage(), e);
@@ -167,13 +166,14 @@ public class NexusSearch {
LOGGER.warn("Expected 200 result from Nexus, got {}", conn.getResponseCode());
return false;
}
final DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
final Document doc = builder.parse(conn.getInputStream());
if (!"status".equals(doc.getDocumentElement().getNodeName())) {
LOGGER.warn("Expected root node name of status, got {}", doc.getDocumentElement().getNodeName());
return false;
}
} catch (Throwable e) {
} catch (IOException | ParserConfigurationException | SAXException e) {
return false;
}

View File

@@ -0,0 +1,344 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2017 Steve Springett. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.nsp;
/**
* The response from NSP check API will respond with 0 or more advisories.
* This class defines the Advisory objects returned.
*
* @author Steve Springett
*/
public class Advisory {
/**
* The unique ID of the advisory as issued by Node Security Platform.
*/
private int id;
/**
* The timestamp of the last update to the advisory.
*/
private String updatedAt;
/**
* The timestamp of which the advisory was created.
*/
private String createdAt;
/**
* The timestamp of when the advisory was published.
*/
private String publishDate;
/**
* A detailed description of the advisory.
*/
private String overview;
/**
* Recommendations for mitigation. Typically involves updating to a newer release.
*/
private String recommendation;
/**
* The CVSS vector used to calculate the score.
*/
private String cvssVector;
/**
* The CVSS score.
*/
private float cvssScore;
/**
* The name of the Node module the advisory is for.
*/
private String module;
/**
* The version of the Node module the advisory is for.
*/
private String version;
/**
* A string representation of the versions containing the vulnerability.
*/
private String vulnerableVersions;
/**
* A string representation of the versions that have been patched.
*/
private String patchedVersions;
/**
* The title/name of the advisory.
*/
private String title;
/**
* The linear dependency path that lead to this module.
* [0] is the root with each subsequent array member leading up to the
* final (this) module.
*/
private String[] path;
/**
* The URL to the advisory.
*/
private String advisory;
/**
* Returns the unique ID of the advisory as issued by Node Security Platform.
* @return a unique ID
*/
public int getId() {
return id;
}
/**
* Sets the unique ID of the advisory as issued by Node Security Platform.
* @param id a unique ID
*/
public void setId(int id) {
this.id = id;
}
/**
* Returns the timestamp of the last update to the advisory.
* @return a timestamp
*/
public String getUpdatedAt() {
return updatedAt;
}
/**
* Sets the timestamp of the last update to the advisory.
* @param updatedAt a timestamp
*/
public void setUpdatedAt(String updatedAt) {
this.updatedAt = updatedAt;
}
/**
* Returns the timestamp of which the advisory was created.
* @return a timestamp
*/
public String getCreatedAt() {
return createdAt;
}
/**
* Sets the timestamp of which the advisory was created.
* @param createdAt a timestamp
*/
public void setCreatedAt(String createdAt) {
this.createdAt = createdAt;
}
/**
* Returns the timestamp of when the advisory was published.
* @return a timestamp
*/
public String getPublishDate() {
return publishDate;
}
/**
* Sets the timestamp of when the advisory was published.
* @param publishDate a timestamp
*/
public void setPublishDate(String publishDate) {
this.publishDate = publishDate;
}
/**
* Returns a detailed description of the advisory.
* @return the overview
*/
public String getOverview() {
return overview;
}
/**
* Sets the detailed description of the advisory.
* @param overview the overview
*/
public void setOverview(String overview) {
this.overview = overview;
}
/**
* Returns recommendations for mitigation. Typically involves updating to a newer release.
* @return recommendations
*/
public String getRecommendation() {
return recommendation;
}
/**
* Sets recommendations for mitigation. Typically involves updating to a newer release.
* @param recommendation recommendations
*/
public void setRecommendation(String recommendation) {
this.recommendation = recommendation;
}
/**
* Returns the CVSS vector used to calculate the score.
* @return the CVSS vector
*/
public String getCvssVector() {
return cvssVector;
}
/**
* Sets the CVSS vector used to calculate the score.
* @param cvssVector the CVSS vector
*/
public void setCvssVector(String cvssVector) {
this.cvssVector = cvssVector;
}
/**
* Returns the CVSS score.
* @return the CVSS score
*/
public float getCvssScore() {
return cvssScore;
}
/**
* Sets the CVSS score.
* @param cvssScore the CVSS score
*/
public void setCvssScore(float cvssScore) {
this.cvssScore = cvssScore;
}
/**
* Returns the name of the Node module the advisory is for.
* @return the name of the module
*/
public String getModule() {
return module;
}
/**
* Sets the name of the Node module the advisory is for.
* @param module the name of the4 module
*/
public void setModule(String module) {
this.module = module;
}
/**
* Returns the version of the Node module the advisory is for.
* @return the module version
*/
public String getVersion() {
return version;
}
/**
* Sets the version of the Node module the advisory is for.
* @param version the module version
*/
public void setVersion(String version) {
this.version = version;
}
/**
* Returns a string representation of the versions containing the vulnerability.
* @return the affected versions
*/
public String getVulnerableVersions() {
return vulnerableVersions;
}
/**
* Sets the string representation of the versions containing the vulnerability.
* @param vulnerableVersions the affected versions
*/
public void setVulnerableVersions(String vulnerableVersions) {
this.vulnerableVersions = vulnerableVersions;
}
/**
* Returns a string representation of the versions that have been patched.
* @return the patched versions
*/
public String getPatchedVersions() {
return patchedVersions;
}
/**
* Sets the string representation of the versions that have been patched.
* @param patchedVersions the patched versions
*/
public void setPatchedVersions(String patchedVersions) {
this.patchedVersions = patchedVersions;
}
/**
* Returns the title/name of the advisory.
* @return the title/name of the advisory
*/
public String getTitle() {
return title;
}
/**
* Sets the title/name of the advisory.
* @param title the title/name of the advisory
*/
public void setTitle(String title) {
this.title = title;
}
/**
* Returns the linear dependency path that lead to this module.
* @return the dependency path
*/
public String[] getPath() {
return path;
}
/**
* Sets the linear dependency path that lead to this module.
* @param path the dependency path
*/
public void setPath(String[] path) {
this.path = path;
}
/**
* Returns the URL to the advisory.
* @return the advisory URL
*/
public String getAdvisory() {
return advisory;
}
/**
* Sets the URL to the advisory.
* @param advisory the advisory URL
*/
public void setAdvisory(String advisory) {
this.advisory = advisory;
}
}

View File

@@ -0,0 +1,161 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2017 Steve Springett. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.nsp;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.URLConnectionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonObject;
import javax.json.JsonReader;
import org.owasp.dependencycheck.utils.URLConnectionFailureException;
/**
* Class of methods to search via Node Security Platform.
*
* @author Steve Springett
*/
public class NspSearch {
/**
* The URL for the public NSP check API.
*/
private final URL nspCheckUrl;
/**
* Whether to use the Proxy when making requests.
*/
private final boolean useProxy;
/**
* Used for logging.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(NspSearch.class);
/**
* Creates a NspSearch for the given repository URL.
*
* @param nspCheckUrl the URL to the public NSP check API
*/
public NspSearch(URL nspCheckUrl) {
this.nspCheckUrl = nspCheckUrl;
if (null != Settings.getString(Settings.KEYS.PROXY_SERVER)) {
useProxy = true;
LOGGER.debug("Using proxy");
} else {
useProxy = false;
LOGGER.debug("Not using proxy");
}
}
/**
* Submits the package.json file to the NSP public /check API and returns a
* list of zero or more Advisories.
*
* @param packageJson the package.json file retrieved from the Dependency
* @return a List of zero or more Advisory object
* @throws AnalysisException if Node Security Platform is unable to analyze the package
* @throws IOException if it's unable to connect to Node Security Platform
*/
public List<Advisory> submitPackage(JsonObject packageJson) throws AnalysisException, IOException {
try {
List<Advisory> result = new ArrayList<>();
byte[] packageDatabytes = packageJson.toString().getBytes(StandardCharsets.UTF_8);
final HttpURLConnection conn = URLConnectionFactory.createHttpURLConnection(nspCheckUrl, useProxy);
conn.setDoOutput(true);
conn.setDoInput(true);
conn.setRequestMethod("POST");
conn.setRequestProperty("X-NSP-VERSION", "2.6.2");
conn.setRequestProperty("Content-Type", "application/json");
conn.setRequestProperty("Content-Length", Integer.toString(packageDatabytes.length));
conn.connect();
try (OutputStream os = new BufferedOutputStream(conn.getOutputStream())) {
os.write(packageDatabytes);
os.flush();
}
if (conn.getResponseCode() == 200) {
try (InputStream in = new BufferedInputStream(conn.getInputStream())) {
JsonReader jsonReader = Json.createReader(in);
JsonArray array = jsonReader.readArray();
if (array != null) {
for (int i = 0; i < array.size(); i++) {
JsonObject object = array.getJsonObject(i);
Advisory advisory = new Advisory();
advisory.setId(object.getInt("id"));
advisory.setUpdatedAt(object.getString("updated_at", null));
advisory.setCreatedAt(object.getString("created_at", null));
advisory.setPublishDate(object.getString("publish_date", null));
advisory.setOverview(object.getString("overview"));
advisory.setRecommendation(object.getString("recommendation", null));
advisory.setCvssVector(object.getString("cvss_vector", null));
advisory.setCvssScore(Float.parseFloat(object.getJsonNumber("cvss_score").toString()));
advisory.setModule(object.getString("module", null));
advisory.setVersion(object.getString("version", null));
advisory.setVulnerableVersions(object.getString("vulnerable_versions", null));
advisory.setPatchedVersions(object.getString("patched_versions", null));
advisory.setTitle(object.getString("title", null));
advisory.setAdvisory(object.getString("advisory", null));
JsonArray jsonPath = object.getJsonArray("path");
List<String> stringPath = new ArrayList<>();
for (int j = 0; j < jsonPath.size(); j++) {
stringPath.add(jsonPath.getString(j));
}
advisory.setPath(stringPath.toArray(new String[stringPath.size()]));
result.add(advisory);
}
}
}
} else if (conn.getResponseCode() == 400) {
LOGGER.debug("Invalid payload submitted to Node Security Platform. Received response code: {} {}",
conn.getResponseCode(), conn.getResponseMessage());
throw new AnalysisException("Could not perform NSP analysis. Invalid payload submitted to Node Security Platform.");
} else {
LOGGER.debug("Could not connect to Node Security Platform. Received response code: {} {}",
conn.getResponseCode(), conn.getResponseMessage());
throw new IOException("Could not connect to Node Security Platform");
}
return result;
} catch (IOException ex) {
if (ex instanceof javax.net.ssl.SSLHandshakeException
&& ex.getMessage().contains("unable to find valid certification path to requested target")) {
final String msg = String.format("Unable to connect to '%s' - the Java trust store does not contain a trusted root for the cert. "
+ " Please see https://github.com/jeremylong/InstallCert for one method of updating the trusted certificates.", nspCheckUrl);
throw new URLConnectionFailureException(msg, ex);
}
throw ex;
}
}
}

View File

@@ -0,0 +1,71 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2017 Steve Springett. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.nsp;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonValue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* Class used to create a Sanitized version of package.json
* suitable for submission to the nsp/check service.
*
* @author Steve Springett
*/
public class SanitizePackage {
/**
* Specifies a whitelist of allowable objects that package.json should contain.
*/
private static final List<String> WHITELIST = new ArrayList<>(Arrays.asList(
"name",
"version",
"engine",
"dependencies",
"devDependencies",
"optionalDependencies",
"peerDependencies",
"bundleDependencies",
"bundledDependencies"
));
/**
* The NSP API only accepts a subset of objects typically found in package.json.
* This method accepts a JsonObject of a raw package.json file and returns a
* new 'sanitized' version based on a pre-defined whitelist of allowable object
* NSP accepts.
*
* @param rawPackage a raw package.json file
* @return a sanitized version of the package.json file
*/
public static JsonObject sanitize(JsonObject rawPackage) {
JsonObjectBuilder builder = Json.createObjectBuilder();
for (Map.Entry<String, JsonValue> entry: rawPackage.entrySet()) {
if (WHITELIST.contains(entry.getKey())) {
builder.add(entry.getKey(), entry.getValue());
}
}
return builder.build();
}
}

View File

@@ -0,0 +1,7 @@
/**
*
* Contains classes related to searching Node Security Platform (nsp).<br><br>
*
* These are used to abstract NSP searching away from OWASP Dependency Check so they can be reused elsewhere.
*/
package org.owasp.dependencycheck.data.nsp;

View File

@@ -53,12 +53,6 @@ public class NugetPackage {
*/
private String licenseUrl;
/**
* Creates an empty NugetPackage.
*/
public NugetPackage() {
}
/**
* Sets the id.
* @param id the id

View File

@@ -17,13 +17,18 @@
*/
package org.owasp.dependencycheck.data.nuget;
import java.io.IOException;
import java.io.InputStream;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.owasp.dependencycheck.utils.XmlUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
/**
* Parse a Nuspec file using XPath.
@@ -57,9 +62,8 @@ public class XPathNuspecParser implements NuspecParser {
@Override
public NugetPackage parse(InputStream stream) throws NuspecParseException {
try {
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
final Document d = factory.newDocumentBuilder().parse(stream);
final DocumentBuilder db = XmlUtils.buildSecureDocumentBuilder();
final Document d = db.parse(stream);
final XPath xpath = XPathFactory.newInstance().newXPath();
final NugetPackage nuspec = new NugetPackage();
@@ -78,7 +82,7 @@ public class XPathNuspecParser implements NuspecParser {
nuspec.setLicenseUrl(getOrNull((Node) xpath.evaluate("/package/metadata/licenseUrl", d, XPathConstants.NODE)));
nuspec.setTitle(getOrNull((Node) xpath.evaluate("/package/metadata/title", d, XPathConstants.NODE)));
return nuspec;
} catch (Throwable e) {
} catch (ParserConfigurationException | SAXException | IOException | XPathExpressionException | NuspecParseException e) {
throw new NuspecParseException("Unable to parse nuspec", e);
}
}

View File

@@ -241,13 +241,31 @@ public final class ConnectionFactory {
* @throws IOException thrown if the data directory does not exist and
* cannot be created
*/
private static boolean h2DataFileExists() throws IOException {
public static boolean h2DataFileExists() throws IOException {
final File dir = Settings.getDataDirectory();
final String fileName = Settings.getString(Settings.KEYS.DB_FILE_NAME);
final File file = new File(dir, fileName);
return file.exists();
}
/**
* Determines if the connection string is for an H2 database.
*
* @return true if the connection string is for an H2 database
*/
public static boolean isH2Connection() {
String connStr;
try {
connStr = Settings.getConnectionString(
Settings.KEYS.DB_CONNECTION_STRING,
Settings.KEYS.DB_FILE_NAME);
} catch (IOException ex) {
LOGGER.debug("Unable to get connectionn string", ex);
return false;
}
return connStr.startsWith("jdbc:h2:file:");
}
/**
* Creates the database structure (tables and indexes) to store the CVE
* data.
@@ -342,7 +360,7 @@ public final class ConnectionFactory {
LOGGER.warn("A new version of dependency-check is available; consider upgrading");
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, false);
} else if (e0 == c0 && e1 == c1) {
//do nothing - not sure how we got here, but just incase...
//do nothing - not sure how we got here, but just in case...
} else {
LOGGER.error("The database schema must be upgraded to use this version of dependency-check. Please see {} for more information.",
UPGRADE_HELP_URL);

View File

@@ -23,8 +23,8 @@ import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -35,6 +35,7 @@ import java.util.MissingResourceException;
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.Set;
import javax.annotation.concurrent.ThreadSafe;
import org.owasp.dependencycheck.data.cwe.CweDB;
import org.owasp.dependencycheck.dependency.Reference;
import org.owasp.dependencycheck.dependency.Vulnerability;
@@ -47,13 +48,27 @@ import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.owasp.dependencycheck.data.nvdcve.CveDB.PreparedStatementCveDb.*;
/**
* The database holding information about the NVD CVE data.
* The database holding information about the NVD CVE data. This class is safe
* to be accessed from multiple threads in parallel, however internally only one
* connection will be used.
*
* @author Jeremy Long
*/
public class CveDB {
@ThreadSafe
public final class CveDB implements AutoCloseable {
/**
* Singleton instance of the CveDB.
*/
private static CveDB instance = null;
/**
* Track the number of current users of the CveDB; so that if someone is
* using database another user cannot close the connection on them.
*/
private int usageCount = 0;
/**
* The logger.
*/
@@ -61,18 +76,132 @@ public class CveDB {
/**
* Database connection
*/
private Connection conn;
private Connection connection;
/**
* The bundle of statements used when accessing the database.
*/
private ResourceBundle statementBundle = null;
private ResourceBundle statementBundle;
/**
* Database properties object containing the 'properties' from the database
* table.
*/
private DatabaseProperties databaseProperties;
/**
* The prepared statements.
*/
private final EnumMap<PreparedStatementCveDb, PreparedStatement> preparedStatements = new EnumMap<>(PreparedStatementCveDb.class);
/**
* Creates a new CveDB object and opens the database connection. Note, the
* connection must be closed by the caller by calling the close method.
* ======= Does the underlying connection support batch operations?
* The enum value names must match the keys of the statements in the
* statement bundles "dbStatements*.properties".
*/
private boolean batchSupported;
enum PreparedStatementCveDb {
/**
* Key for SQL Statement.
*/
CLEANUP_ORPHANS,
/**
* Key for SQL Statement.
*/
COUNT_CPE,
/**
* Key for SQL Statement.
*/
DELETE_REFERENCE,
/**
* Key for SQL Statement.
*/
DELETE_SOFTWARE,
/**
* Key for SQL Statement.
*/
DELETE_VULNERABILITY,
/**
* Key for SQL Statement.
*/
INSERT_CPE,
/**
* Key for SQL Statement.
*/
INSERT_PROPERTY,
/**
* Key for SQL Statement.
*/
INSERT_REFERENCE,
/**
* Key for SQL Statement.
*/
INSERT_SOFTWARE,
/**
* Key for SQL Statement.
*/
INSERT_VULNERABILITY,
/**
* Key for SQL Statement.
*/
MERGE_PROPERTY,
/**
* Key for SQL Statement.
*/
SELECT_CPE_ENTRIES,
/**
* Key for SQL Statement.
*/
SELECT_CPE_ID,
/**
* Key for SQL Statement.
*/
SELECT_CVE_FROM_SOFTWARE,
/**
* Key for SQL Statement.
*/
SELECT_PROPERTIES,
/**
* Key for SQL Statement.
*/
SELECT_REFERENCES,
/**
* Key for SQL Statement.
*/
SELECT_SOFTWARE,
/**
* Key for SQL Statement.
*/
SELECT_VENDOR_PRODUCT_LIST,
/**
* Key for SQL Statement.
*/
SELECT_VULNERABILITY,
/**
* Key for SQL Statement.
*/
SELECT_VULNERABILITY_ID,
/**
* Key for SQL Statement.
*/
UPDATE_PROPERTY,
/**
* Key for SQL Statement.
*/
UPDATE_VULNERABILITY
}
/**
* Gets the CveDB singleton object.
*
* @return the CveDB singleton
* @throws DatabaseException thrown if there is a database error
*/
public static synchronized CveDB getInstance() throws DatabaseException {
if (instance == null) {
instance = new CveDB();
}
if (!instance.isOpen()) {
instance.open();
}
instance.usageCount += 1;
return instance;
}
/**
* Creates a new CveDB object and opens the database connection. Note, the
@@ -81,35 +210,34 @@ public class CveDB {
* @throws DatabaseException thrown if there is an exception opening the
* database.
*/
public CveDB() throws DatabaseException {
super();
private CveDB() throws DatabaseException {
}
/**
* Tries to determine the product name of the database.
*
* @param conn the database connection
* @return the product name of the database if successful, {@code null} else
*/
private static String determineDatabaseProductName(Connection conn) {
try {
open();
try {
final String databaseProductName = conn.getMetaData().getDatabaseProductName();
LOGGER.debug("Database dialect: {}", databaseProductName);
final Locale dbDialect = new Locale(databaseProductName);
statementBundle = ResourceBundle.getBundle("data/dbStatements", dbDialect);
if ("mysql".equalsIgnoreCase(databaseProductName)) {
batchSupported = false;
}
} catch (SQLException se) {
LOGGER.warn("Problem loading database specific dialect!", se);
statementBundle = ResourceBundle.getBundle("data/dbStatements");
}
databaseProperties = new DatabaseProperties(this);
} catch (DatabaseException ex) {
throw ex;
final String databaseProductName = conn.getMetaData().getDatabaseProductName();
LOGGER.debug("Database product: {}", databaseProductName);
return databaseProductName;
} catch (SQLException se) {
LOGGER.warn("Problem determining database product!", se);
return null;
}
}
/**
* Returns the database connection.
* Method added for testing, returns the current usage count of the CveDB
* singleton.
*
* @return the database connection
* @return the current usage of the CveDB singleton
*/
protected Connection getConnection() {
return conn;
protected synchronized int getUsageCount() {
return usageCount;
}
/**
@@ -119,28 +247,43 @@ public class CveDB {
* @throws DatabaseException thrown if there is an error opening the
* database connection
*/
public final synchronized void open() throws DatabaseException {
if (!isOpen()) {
conn = ConnectionFactory.getConnection();
private synchronized void open() throws DatabaseException {
if (!instance.isOpen()) {
instance.connection = ConnectionFactory.getConnection();
final String databaseProductName = determineDatabaseProductName(instance.connection);
instance.statementBundle = databaseProductName != null
? ResourceBundle.getBundle("data/dbStatements", new Locale(databaseProductName))
: ResourceBundle.getBundle("data/dbStatements");
instance.prepareStatements();
instance.databaseProperties = new DatabaseProperties(instance);
}
}
/**
* Closes the DB4O database. Close should be called on this object when it
* is done being used.
* Closes the database connection. Close should be called on this object
* when it is done being used.
*/
@Override
public synchronized void close() {
if (conn != null) {
try {
conn.close();
} catch (SQLException ex) {
LOGGER.error("There was an error attempting to close the CveDB, see the log for more details.");
LOGGER.debug("", ex);
} catch (Throwable ex) {
LOGGER.error("There was an exception attempting to close the CveDB, see the log for more details.");
LOGGER.debug("", ex);
if (instance != null) {
instance.usageCount -= 1;
if (instance.usageCount <= 0 && instance.isOpen()) {
instance.usageCount = 0;
instance.closeStatements();
try {
instance.connection.close();
} catch (SQLException ex) {
LOGGER.error("There was an error attempting to close the CveDB, see the log for more details.");
LOGGER.debug("", ex);
} catch (Throwable ex) {
LOGGER.error("There was an exception attempting to close the CveDB, see the log for more details.");
LOGGER.debug("", ex);
}
instance.statementBundle = null;
instance.preparedStatements.clear();
instance.databaseProperties = null;
instance.connection = null;
}
conn = null;
}
}
@@ -149,8 +292,54 @@ public class CveDB {
*
* @return whether the database connection is open or closed
*/
public synchronized boolean isOpen() {
return conn != null;
protected synchronized boolean isOpen() {
return connection != null;
}
/**
* Prepares all statements to be used.
*
* @throws DatabaseException thrown if there is an error preparing the
* statements
*/
private void prepareStatements() throws DatabaseException {
for (PreparedStatementCveDb key : values()) {
final String statementString = statementBundle.getString(key.name());
final PreparedStatement preparedStatement;
try {
if (key == INSERT_VULNERABILITY || key == INSERT_CPE) {
preparedStatement = connection.prepareStatement(statementString, new String[]{"id"});
} else {
preparedStatement = connection.prepareStatement(statementString);
}
} catch (SQLException exception) {
throw new DatabaseException(exception);
}
preparedStatements.put(key, preparedStatement);
}
}
/**
* Closes all prepared statements.
*/
private synchronized void closeStatements() {
for (PreparedStatement preparedStatement : preparedStatements.values()) {
DBUtils.closeStatement(preparedStatement);
}
}
/**
* Returns the specified prepared statement.
*
* @param key the prepared statement from {@link PreparedStatementCveDb} to
* return
* @return the prepared statement
* @throws SQLException thrown if a SQL Exception occurs
*/
private synchronized PreparedStatement getPreparedStatement(PreparedStatementCveDb key) throws SQLException {
final PreparedStatement preparedStatement = preparedStatements.get(key);
preparedStatement.clearParameters();
return preparedStatement;
}
/**
@@ -160,8 +349,8 @@ public class CveDB {
*/
public synchronized void commit() throws SQLException {
//temporary remove this as autocommit is on.
//if (conn != null) {
// conn.commit();
//if (isOpen()) {
// connection.commit();
//}
}
@@ -177,18 +366,23 @@ public class CveDB {
close();
super.finalize();
}
/**
* Database properties object containing the 'properties' from the database
* table.
*/
private DatabaseProperties databaseProperties;
/**
* Get the value of databaseProperties.
*
* @return the value of databaseProperties
*/
public DatabaseProperties getDatabaseProperties() {
public synchronized DatabaseProperties getDatabaseProperties() {
return databaseProperties;
}
/**
* Used within the unit tests to reload the database properties.
*
* @return the database properties
*/
protected synchronized DatabaseProperties reloadProperties() {
databaseProperties = new DatabaseProperties(this);
return databaseProperties;
}
@@ -203,11 +397,10 @@ public class CveDB {
* @return a set of vulnerable software
*/
public synchronized Set<VulnerableSoftware> getCPEs(String vendor, String product) {
final Set<VulnerableSoftware> cpe = new HashSet<VulnerableSoftware>();
final Set<VulnerableSoftware> cpe = new HashSet<>();
ResultSet rs = null;
PreparedStatement ps = null;
try {
ps = getConnection().prepareStatement(statementBundle.getString("SELECT_CPE_ENTRIES"));
final PreparedStatement ps = getPreparedStatement(SELECT_CPE_ENTRIES);
ps.setString(1, vendor);
ps.setString(2, product);
rs = ps.executeQuery();
@@ -222,7 +415,6 @@ public class CveDB {
LOGGER.debug("", ex);
} finally {
DBUtils.closeResultSet(rs);
DBUtils.closeStatement(ps);
}
return cpe;
}
@@ -235,21 +427,19 @@ public class CveDB {
* data from the DB
*/
public synchronized Set<Pair<String, String>> getVendorProductList() throws DatabaseException {
final Set<Pair<String, String>> data = new HashSet<Pair<String, String>>();
final Set<Pair<String, String>> data = new HashSet<>();
ResultSet rs = null;
PreparedStatement ps = null;
try {
ps = getConnection().prepareStatement(statementBundle.getString("SELECT_VENDOR_PRODUCT_LIST"));
final PreparedStatement ps = getPreparedStatement(SELECT_VENDOR_PRODUCT_LIST);
rs = ps.executeQuery();
while (rs.next()) {
data.add(new Pair<String, String>(rs.getString(1), rs.getString(2)));
data.add(new Pair<>(rs.getString(1), rs.getString(2)));
}
} catch (SQLException ex) {
final String msg = "An unexpected SQL Exception occurred; please see the verbose log for more details.";
throw new DatabaseException(msg, ex);
} finally {
DBUtils.closeResultSet(rs);
DBUtils.closeStatement(ps);
}
return data;
}
@@ -259,12 +449,11 @@ public class CveDB {
*
* @return the properties from the database
*/
synchronized Properties getProperties() {
public synchronized Properties getProperties() {
final Properties prop = new Properties();
PreparedStatement ps = null;
ResultSet rs = null;
try {
ps = getConnection().prepareStatement(statementBundle.getString("SELECT_PROPERTIES"));
final PreparedStatement ps = getPreparedStatement(SELECT_PROPERTIES);
rs = ps.executeQuery();
while (rs.next()) {
prop.setProperty(rs.getString(1), rs.getString(2));
@@ -273,7 +462,6 @@ public class CveDB {
LOGGER.error("An unexpected SQL Exception occurred; please see the verbose log for more details.");
LOGGER.debug("", ex);
} finally {
DBUtils.closeStatement(ps);
DBUtils.closeResultSet(rs);
}
return prop;
@@ -285,34 +473,23 @@ public class CveDB {
* @param key the property key
* @param value the property value
*/
synchronized void saveProperty(String key, String value) {
public synchronized void saveProperty(String key, String value) {
try {
try {
final PreparedStatement mergeProperty = getConnection().prepareStatement(statementBundle.getString("MERGE_PROPERTY"));
try {
mergeProperty.setString(1, key);
mergeProperty.setString(2, value);
mergeProperty.executeUpdate();
} finally {
DBUtils.closeStatement(mergeProperty);
}
final PreparedStatement mergeProperty = getPreparedStatement(MERGE_PROPERTY);
mergeProperty.setString(1, key);
mergeProperty.setString(2, value);
mergeProperty.executeUpdate();
} catch (MissingResourceException mre) {
// No Merge statement, so doing an Update/Insert...
PreparedStatement updateProperty = null;
PreparedStatement insertProperty = null;
try {
updateProperty = getConnection().prepareStatement(statementBundle.getString("UPDATE_PROPERTY"));
updateProperty.setString(1, value);
updateProperty.setString(2, key);
if (updateProperty.executeUpdate() == 0) {
insertProperty = getConnection().prepareStatement(statementBundle.getString("INSERT_PROPERTY"));
insertProperty.setString(1, key);
insertProperty.setString(2, value);
insertProperty.executeUpdate();
}
} finally {
DBUtils.closeStatement(updateProperty);
DBUtils.closeStatement(insertProperty);
final PreparedStatement updateProperty = getPreparedStatement(UPDATE_PROPERTY);
updateProperty.setString(1, value);
updateProperty.setString(2, key);
if (updateProperty.executeUpdate() == 0) {
final PreparedStatement insertProperty = getPreparedStatement(INSERT_PROPERTY);
insertProperty.setString(1, key);
insertProperty.setString(2, value);
insertProperty.executeUpdate();
}
}
} catch (SQLException ex) {
@@ -336,18 +513,17 @@ public class CveDB {
LOGGER.trace("", ex);
}
final DependencyVersion detectedVersion = parseDependencyVersion(cpe);
final List<Vulnerability> vulnerabilities = new ArrayList<Vulnerability>();
final List<Vulnerability> vulnerabilities = new ArrayList<>();
PreparedStatement ps = null;
ResultSet rs = null;
try {
ps = getConnection().prepareStatement(statementBundle.getString("SELECT_CVE_FROM_SOFTWARE"));
final PreparedStatement ps = getPreparedStatement(SELECT_CVE_FROM_SOFTWARE);
ps.setString(1, cpe.getVendor());
ps.setString(2, cpe.getProduct());
rs = ps.executeQuery();
String currentCVE = "";
final Map<String, Boolean> vulnSoftware = new HashMap<String, Boolean>();
final Map<String, Boolean> vulnSoftware = new HashMap<>();
while (rs.next()) {
final String cveId = rs.getString(1);
if (!currentCVE.equals(cveId)) { //check for match and add
@@ -377,7 +553,6 @@ public class CveDB {
throw new DatabaseException("Exception retrieving vulnerability for " + cpeStr, ex);
} finally {
DBUtils.closeResultSet(rs);
DBUtils.closeStatement(ps);
}
return vulnerabilities;
}
@@ -390,16 +565,13 @@ public class CveDB {
* @throws DatabaseException if an exception occurs
*/
public synchronized Vulnerability getVulnerability(String cve) throws DatabaseException {
PreparedStatement psV = null;
PreparedStatement psR = null;
PreparedStatement psS = null;
ResultSet rsV = null;
ResultSet rsR = null;
ResultSet rsS = null;
Vulnerability vuln = null;
try {
psV = getConnection().prepareStatement(statementBundle.getString("SELECT_VULNERABILITY"));
final PreparedStatement psV = getPreparedStatement(SELECT_VULNERABILITY);
psV.setString(1, cve);
rsV = psV.executeQuery();
if (rsV.next()) {
@@ -423,13 +595,14 @@ public class CveDB {
vuln.setCvssIntegrityImpact(rsV.getString(9));
vuln.setCvssAvailabilityImpact(rsV.getString(10));
psR = getConnection().prepareStatement(statementBundle.getString("SELECT_REFERENCES"));
final PreparedStatement psR = getPreparedStatement(SELECT_REFERENCES);
psR.setInt(1, cveId);
rsR = psR.executeQuery();
while (rsR.next()) {
vuln.addReference(rsR.getString(1), rsR.getString(2), rsR.getString(3));
}
psS = getConnection().prepareStatement(statementBundle.getString("SELECT_SOFTWARE"));
final PreparedStatement psS = getPreparedStatement(SELECT_SOFTWARE);
psS.setInt(1, cveId);
rsS = psS.executeQuery();
while (rsS.next()) {
@@ -448,9 +621,6 @@ public class CveDB {
DBUtils.closeResultSet(rsV);
DBUtils.closeResultSet(rsR);
DBUtils.closeResultSet(rsS);
DBUtils.closeStatement(psV);
DBUtils.closeStatement(psR);
DBUtils.closeStatement(psS);
}
return vuln;
}
@@ -463,52 +633,31 @@ public class CveDB {
* @throws DatabaseException is thrown if the database
*/
public synchronized void updateVulnerability(Vulnerability vuln) throws DatabaseException {
PreparedStatement selectVulnerabilityId = null;
PreparedStatement deleteVulnerability = null;
PreparedStatement deleteReferences = null;
PreparedStatement deleteSoftware = null;
PreparedStatement updateVulnerability = null;
PreparedStatement insertVulnerability = null;
PreparedStatement insertReference = null;
PreparedStatement selectCpeId = null;
PreparedStatement insertCpe = null;
PreparedStatement insertSoftware = null;
try {
selectVulnerabilityId = getConnection().prepareStatement(statementBundle.getString("SELECT_VULNERABILITY_ID"));
deleteVulnerability = getConnection().prepareStatement(statementBundle.getString("DELETE_VULNERABILITY"));
deleteReferences = getConnection().prepareStatement(statementBundle.getString("DELETE_REFERENCE"));
deleteSoftware = getConnection().prepareStatement(statementBundle.getString("DELETE_SOFTWARE"));
updateVulnerability = getConnection().prepareStatement(statementBundle.getString("UPDATE_VULNERABILITY"));
final String[] ids = {"id"};
insertVulnerability = getConnection().prepareStatement(statementBundle.getString("INSERT_VULNERABILITY"),
//Statement.RETURN_GENERATED_KEYS);
ids);
insertReference = getConnection().prepareStatement(statementBundle.getString("INSERT_REFERENCE"));
selectCpeId = getConnection().prepareStatement(statementBundle.getString("SELECT_CPE_ID"));
insertCpe = getConnection().prepareStatement(statementBundle.getString("INSERT_CPE"),
//Statement.RETURN_GENERATED_KEYS);
ids);
insertSoftware = getConnection().prepareStatement(statementBundle.getString("INSERT_SOFTWARE"));
int vulnerabilityId = 0;
final PreparedStatement selectVulnerabilityId = getPreparedStatement(SELECT_VULNERABILITY_ID);
selectVulnerabilityId.setString(1, vuln.getName());
ResultSet rs = selectVulnerabilityId.executeQuery();
if (rs.next()) {
vulnerabilityId = rs.getInt(1);
// first delete any existing vulnerability info. We don't know what was updated. yes, slower but atm easier.
deleteReferences.setInt(1, vulnerabilityId);
deleteReferences.execute();
final PreparedStatement deleteReference = getPreparedStatement(DELETE_REFERENCE);
deleteReference.setInt(1, vulnerabilityId);
deleteReference.execute();
final PreparedStatement deleteSoftware = getPreparedStatement(DELETE_SOFTWARE);
deleteSoftware.setInt(1, vulnerabilityId);
deleteSoftware.execute();
}
DBUtils.closeResultSet(rs);
rs = null;
if (vulnerabilityId != 0) {
if (vuln.getDescription().contains("** REJECT **")) {
final PreparedStatement deleteVulnerability = getPreparedStatement(DELETE_VULNERABILITY);
deleteVulnerability.setInt(1, vulnerabilityId);
deleteVulnerability.executeUpdate();
} else {
final PreparedStatement updateVulnerability = getPreparedStatement(UPDATE_VULNERABILITY);
updateVulnerability.setString(1, vuln.getDescription());
updateVulnerability.setString(2, vuln.getCwe());
updateVulnerability.setFloat(3, vuln.getCvssScore());
@@ -522,6 +671,7 @@ public class CveDB {
updateVulnerability.executeUpdate();
}
} else {
final PreparedStatement insertVulnerability = getPreparedStatement(INSERT_VULNERABILITY);
insertVulnerability.setString(1, vuln.getName());
insertVulnerability.setString(2, vuln.getDescription());
insertVulnerability.setString(3, vuln.getCwe());
@@ -542,29 +692,22 @@ public class CveDB {
throw new DatabaseException(msg, ex);
} finally {
DBUtils.closeResultSet(rs);
rs = null;
}
}
final PreparedStatement insertReference = getPreparedStatement(INSERT_REFERENCE);
for (Reference r : vuln.getReferences()) {
insertReference.setInt(1, vulnerabilityId);
insertReference.setString(2, r.getName());
insertReference.setString(3, r.getUrl());
insertReference.setString(4, r.getSource());
if (batchSupported) {
insertReference.addBatch();
} else {
insertReference.execute();
}
}
if (batchSupported) {
insertReference.executeBatch();
insertReference.execute();
}
final PreparedStatement insertSoftware = getPreparedStatement(INSERT_SOFTWARE);
for (VulnerableSoftware s : vuln.getVulnerableSoftware()) {
int cpeProductId = 0;
final PreparedStatement selectCpeId = getPreparedStatement(SELECT_CPE_ID);
selectCpeId.setString(1, s.getName());
try {
rs = selectCpeId.executeQuery();
@@ -575,10 +718,10 @@ public class CveDB {
throw new DatabaseException("Unable to get primary key for new cpe: " + s.getName(), ex);
} finally {
DBUtils.closeResultSet(rs);
rs = null;
}
if (cpeProductId == 0) {
final PreparedStatement insertCpe = getPreparedStatement(INSERT_CPE);
insertCpe.setString(1, s.getName());
insertCpe.setString(2, s.getVendor());
insertCpe.setString(3, s.getProduct());
@@ -597,39 +740,22 @@ public class CveDB {
} else {
insertSoftware.setString(3, s.getPreviousVersion());
}
if (batchSupported) {
insertSoftware.addBatch();
} else {
try {
insertSoftware.execute();
} catch (SQLException ex) {
if (ex.getMessage().contains("Duplicate entry")) {
final String msg = String.format("Duplicate software key identified in '%s:%s'", vuln.getName(), s.getName());
LOGGER.debug(msg, ex);
} else {
throw ex;
}
try {
insertSoftware.execute();
} catch (SQLException ex) {
if (ex.getMessage().contains("Duplicate entry")) {
final String msg = String.format("Duplicate software key identified in '%s:%s'", vuln.getName(), s.getName());
LOGGER.info(msg, ex);
} else {
throw ex;
}
}
}
if (batchSupported) {
insertSoftware.executeBatch();
}
} catch (SQLException ex) {
final String msg = String.format("Error updating '%s'", vuln.getName());
LOGGER.debug(msg, ex);
throw new DatabaseException(msg, ex);
} finally {
DBUtils.closeStatement(selectVulnerabilityId);
DBUtils.closeStatement(deleteReferences);
DBUtils.closeStatement(deleteSoftware);
DBUtils.closeStatement(updateVulnerability);
DBUtils.closeStatement(deleteVulnerability);
DBUtils.closeStatement(insertVulnerability);
DBUtils.closeStatement(insertReference);
DBUtils.closeStatement(selectCpeId);
DBUtils.closeStatement(insertCpe);
DBUtils.closeStatement(insertSoftware);
}
}
@@ -639,17 +765,16 @@ public class CveDB {
* @return <code>true</code> if data exists; otherwise <code>false</code>
*/
public synchronized boolean dataExists() {
Statement cs = null;
ResultSet rs = null;
try {
cs = conn.createStatement();
rs = cs.executeQuery("SELECT COUNT(*) records FROM cpeEntry");
final PreparedStatement cs = getPreparedStatement(COUNT_CPE);
rs = cs.executeQuery();
if (rs.next()) {
if (rs.getInt(1) > 0) {
return true;
}
}
} catch (SQLException ex) {
} catch (Exception ex) {
String dd;
try {
dd = Settings.getDataDirectory().getAbsolutePath();
@@ -664,7 +789,6 @@ public class CveDB {
LOGGER.debug("", ex);
} finally {
DBUtils.closeResultSet(rs);
DBUtils.closeStatement(cs);
}
return false;
}
@@ -675,17 +799,14 @@ public class CveDB {
* ensure orphan entries are removed.
*/
public synchronized void cleanupDatabase() {
PreparedStatement ps = null;
try {
ps = getConnection().prepareStatement(statementBundle.getString("CLEANUP_ORPHANS"));
final PreparedStatement ps = getPreparedStatement(CLEANUP_ORPHANS);
if (ps != null) {
ps.executeUpdate();
}
} catch (SQLException ex) {
LOGGER.error("An unexpected SQL Exception occurred; please see the verbose log for more details.");
LOGGER.debug("", ex);
} finally {
DBUtils.closeStatement(ps);
}
}
@@ -703,12 +824,12 @@ public class CveDB {
* analyzed
* @return true if the identified version is affected, otherwise false
*/
Entry<String, Boolean> getMatchingSoftware(Map<String, Boolean> vulnerableSoftware, String vendor, String product,
protected Entry<String, Boolean> getMatchingSoftware(Map<String, Boolean> vulnerableSoftware, String vendor, String product,
DependencyVersion identifiedVersion) {
final boolean isVersionTwoADifferentProduct = "apache".equals(vendor) && "struts".equals(product);
final Set<String> majorVersionsAffectingAllPrevious = new HashSet<String>();
final Set<String> majorVersionsAffectingAllPrevious = new HashSet<>();
final boolean matchesAnyPrevious = identifiedVersion == null || "-".equals(identifiedVersion.toString());
String majorVersionMatch = null;
for (Entry<String, Boolean> entry : vulnerableSoftware.entrySet()) {
@@ -737,12 +858,12 @@ public class CveDB {
if (!entry.getValue()) {
final DependencyVersion v = parseDependencyVersion(entry.getKey());
//this can't dereference a null 'majorVersionMatch' as canSkipVersions accounts for this.
if (canSkipVersions && !majorVersionMatch.equals(v.getVersionParts().get(0))) {
if (canSkipVersions && majorVersionMatch != null && !majorVersionMatch.equals(v.getVersionParts().get(0))) {
continue;
}
//this can't dereference a null 'identifiedVersion' because if it was null we would have exited
//in the above loop or just after loop (if matchesAnyPrevious return null).
if (identifiedVersion.equals(v)) {
if (identifiedVersion != null && identifiedVersion.equals(v)) {
return entry;
}
}
@@ -751,12 +872,12 @@ public class CveDB {
if (entry.getValue()) {
final DependencyVersion v = parseDependencyVersion(entry.getKey());
//this can't dereference a null 'majorVersionMatch' as canSkipVersions accounts for this.
if (canSkipVersions && !majorVersionMatch.equals(v.getVersionParts().get(0))) {
if (canSkipVersions && majorVersionMatch != null && !majorVersionMatch.equals(v.getVersionParts().get(0))) {
continue;
}
//this can't dereference a null 'identifiedVersion' because if it was null we would have exited
//in the above loop or just after loop (if matchesAnyPrevious return null).
if (entry.getValue() && identifiedVersion.compareTo(v) <= 0) {
if (entry.getValue() && identifiedVersion != null && identifiedVersion.compareTo(v) <= 0) {
if (!(isVersionTwoADifferentProduct && !identifiedVersion.getVersionParts().get(0).equals(v.getVersionParts().get(0)))) {
return entry;
}
@@ -815,7 +936,7 @@ public class CveDB {
public synchronized void deleteUnusedCpe() {
PreparedStatement ps = null;
try {
ps = getConnection().prepareStatement(statementBundle.getString("DELETE_UNUSED_DICT_CPE"));
ps = connection.prepareStatement(statementBundle.getString("DELETE_UNUSED_DICT_CPE"));
ps.executeUpdate();
} catch (SQLException ex) {
LOGGER.error("Unable to delete CPE dictionary entries", ex);
@@ -837,7 +958,7 @@ public class CveDB {
public synchronized void addCpe(String cpe, String vendor, String product) {
PreparedStatement ps = null;
try {
ps = getConnection().prepareStatement(statementBundle.getString("ADD_DICT_CPE"));
ps = connection.prepareStatement(statementBundle.getString("ADD_DICT_CPE"));
ps.setString(1, cpe);
ps.setString(2, vendor);
ps.setString(3, product);

View File

@@ -17,13 +17,14 @@
*/
package org.owasp.dependencycheck.data.nvdcve;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.TreeMap;
import javax.annotation.concurrent.ThreadSafe;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.owasp.dependencycheck.data.update.nvd.NvdCveInfo;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.slf4j.Logger;
@@ -31,9 +32,11 @@ import org.slf4j.LoggerFactory;
/**
* This is a wrapper around a set of properties that are stored in the database.
* This class is safe to be accessed from multiple threads in parallel.
*
* @author Jeremy Long
*/
@ThreadSafe
public class DatabaseProperties {
/**
@@ -41,21 +44,24 @@ public class DatabaseProperties {
*/
private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseProperties.class);
/**
* Modified key word, used as a key to store information about the modified file (i.e. the containing the last 8 days of
* updates)..
* Modified key word, used as a key to store information about the modified
* file (i.e. the containing the last 8 days of updates)..
*/
public static final String MODIFIED = "Modified";
/**
* The properties file key for the last checked field - used to store the last check time of the Modified NVD CVE xml file.
* The properties file key for the last checked field - used to store the
* last check time of the Modified NVD CVE xml file.
*/
public static final String LAST_CHECKED = "NVD CVE Checked";
/**
* The properties file key for the last updated field - used to store the last updated time of the Modified NVD CVE xml file.
* The properties file key for the last updated field - used to store the
* last updated time of the Modified NVD CVE xml file.
*/
public static final String LAST_UPDATED = "NVD CVE Modified";
/**
* Stores the last updated time for each of the NVD CVE files. These timestamps should be updated if we process the modified
* file within 7 days of the last update.
* Stores the last updated time for each of the NVD CVE files. These
* timestamps should be updated if we process the modified file within 7
* days of the last update.
*/
public static final String LAST_UPDATED_BASE = "NVD CVE ";
/**
@@ -121,7 +127,8 @@ public class DatabaseProperties {
}
/**
* Returns the property value for the given key. If the key is not contained in the underlying properties null is returned.
* Returns the property value for the given key. If the key is not contained
* in the underlying properties null is returned.
*
* @param key the property key
* @return the value of the property
@@ -131,8 +138,8 @@ public class DatabaseProperties {
}
/**
* Returns the property value for the given key. If the key is not contained in the underlying properties the default value is
* returned.
* Returns the property value for the given key. If the key is not contained
* in the underlying properties the default value is returned.
*
* @param key the property key
* @param defaultValue the default value
@@ -152,22 +159,26 @@ public class DatabaseProperties {
}
/**
* Returns a map of the meta data from the database properties. This primarily contains timestamps of when the NVD CVE
* information was last updated.
* Returns a map of the meta data from the database properties. This
* primarily contains timestamps of when the NVD CVE information was last
* updated.
*
* @return a map of the database meta data
*/
public Map<String, String> getMetaData() {
final Map<String, String> map = new TreeMap<String, String>();
final Map<String, String> map = new TreeMap<>();
for (Entry<Object, Object> entry : properties.entrySet()) {
final String key = (String) entry.getKey();
if (!"version".equals(key)) {
if (key.startsWith("NVD CVE ")) {
try {
final long epoch = Long.parseLong((String) entry.getValue());
final Date date = new Date(epoch);
final DateFormat format = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
final String formatted = format.format(date);
final DateTime date = new DateTime(epoch);
final DateTimeFormatter format = DateTimeFormat.forPattern("dd/MM/yyyy HH:mm:ss");
final String formatted = format.print(date);
// final Date date = new Date(epoch);
// final DateFormat format = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
// final String formatted = format.format(date);
map.put(key, formatted);
} catch (Throwable ex) { //deliberately being broad in this catch clause
LOGGER.debug("Unable to parse timestamp from DB", ex);

View File

@@ -75,7 +75,7 @@ public final class DriverLoader {
*/
public static Driver load(String className, String pathToDriver) throws DriverLoadException {
final URLClassLoader parent = (URLClassLoader) ClassLoader.getSystemClassLoader();
final List<URL> urls = new ArrayList<URL>();
final List<URL> urls = new ArrayList<>();
final String[] paths = pathToDriver.split(File.pathSeparator);
for (String path : paths) {
final File file = new File(path);
@@ -129,19 +129,7 @@ public final class DriverLoader {
//using the DriverShim to get around the fact that the DriverManager won't register a driver not in the base class path
DriverManager.registerDriver(shim);
return shim;
} catch (ClassNotFoundException ex) {
final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex);
} catch (InstantiationException ex) {
final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex);
} catch (IllegalAccessException ex) {
final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex);
} catch (SQLException ex) {
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | SQLException ex) {
final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex);

View File

@@ -115,7 +115,6 @@ class DriverShim implements Driver {
* @throws SQLFeatureNotSupportedException thrown if the feature is not supported
* @see java.sql.Driver#getParentLogger()
*/
@Override
public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
//return driver.getParentLogger();
Method m = null;
@@ -127,11 +126,7 @@ class DriverShim implements Driver {
if (m != null) {
try {
return (java.util.logging.Logger) m.invoke(m);
} catch (IllegalAccessException ex) {
LOGGER.trace("", ex);
} catch (IllegalArgumentException ex) {
LOGGER.trace("", ex);
} catch (InvocationTargetException ex) {
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
LOGGER.trace("", ex);
}
}

View File

@@ -1,88 +0,0 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Jeremy Long
*/
public abstract class BaseUpdater {
/**
* Static logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(BaseUpdater.class);
/**
* Information about the timestamps and URLs for data that needs to be updated.
*/
private DatabaseProperties properties;
/**
* Reference to the Cve Database.
*/
private CveDB cveDB = null;
protected CveDB getCveDB() {
return cveDB;
}
protected DatabaseProperties getProperties() {
return properties;
}
/**
* Closes the CVE and CPE data stores.
*/
protected void closeDataStores() {
if (cveDB != null) {
try {
cveDB.close();
cveDB = null;
properties = null;
} catch (Throwable ignore) {
LOGGER.trace("Error closing the database", ignore);
}
}
}
/**
* Opens the data store.
*
* @throws UpdateException thrown if a data store cannot be opened
*/
protected final void openDataStores() throws UpdateException {
if (cveDB != null) {
return;
}
try {
cveDB = new CveDB();
cveDB.open();
properties = cveDB.getDatabaseProperties();
} catch (DatabaseException ex) {
closeDataStores();
LOGGER.debug("Database Exception opening databases", ex);
throw new UpdateException("Error updating the database, please see the log file for more details.");
}
}
}

View File

@@ -16,20 +16,14 @@
* Copyright (c) 2015 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update;
/*
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
import java.util.zip.GZIPInputStream;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.apache.commons.io.FileUtils;
import static org.owasp.dependencycheck.data.nvdcve.DatabaseProperties.LAST_CPE_UPDATE;
import org.owasp.dependencycheck.data.update.cpe.CPEHandler;
import org.owasp.dependencycheck.data.update.cpe.Cpe;
@@ -37,11 +31,13 @@ import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.utils.DateUtil;
import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.ExtractionUtil;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.XmlUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
*/
/**
*
* This class is currently unused and if enabled will likely not work on MySQL
@@ -53,158 +49,114 @@ import org.xml.sax.SAXException;
* this class is not currently used. The code is being kept as a future update
* may utilize more data from the CPE XML files.
*
* @deprecated the CPE updater is not currently used.
* @author Jeremy Long
*/
public class CpeUpdater extends BaseUpdater implements CachedWebDataSource {
/**
* Static logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(CpeUpdater.class);
@Override
public void update() throws UpdateException {
try {
openDataStores();
if (updateNeeded()) {
LOGGER.info("Updating the Common Platform Enumeration (CPE)");
final File xml = downloadCpe();
final List<Cpe> cpes = processXML(xml);
getCveDB().deleteUnusedCpe();
for (Cpe cpe : cpes) {
getCveDB().addCpe(cpe.getValue(), cpe.getVendor(), cpe.getProduct());
}
final long now = System.currentTimeMillis();
getProperties().save(LAST_CPE_UPDATE, Long.toString(now));
LOGGER.info("CPE update complete");
}
} finally {
closeDataStores();
}
}
/**
* Downloads the CPE XML file.
*
* @return the file reference to the CPE.xml file
* @throws UpdateException thrown if there is an issue downloading the XML
* file
*/
private File downloadCpe() throws UpdateException {
File xml;
final URL url;
try {
url = new URL(Settings.getString(Settings.KEYS.CPE_URL));
xml = File.createTempFile("cpe", ".xml", Settings.getTempDirectory());
Downloader.fetchFile(url, xml);
if (url.toExternalForm().endsWith(".xml.gz")) {
extractGzip(xml);
}
} catch (MalformedURLException ex) {
throw new UpdateException("Invalid CPE URL", ex);
} catch (DownloadFailedException ex) {
throw new UpdateException("Unable to download CPE XML file", ex);
} catch (IOException ex) {
throw new UpdateException("Unable to create temporary file to download CPE", ex);
}
return xml;
}
/**
* Parses the CPE XML file to return a list of CPE entries.
*
* @param xml the CPE data file
* @return the list of CPE entries
* @throws UpdateException thrown if there is an issue with parsing the XML
* file
*/
private List<Cpe> processXML(final File xml) throws UpdateException {
try {
final SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
final SAXParser saxParser = factory.newSAXParser();
final CPEHandler handler = new CPEHandler();
saxParser.parse(xml, handler);
return handler.getData();
} catch (ParserConfigurationException ex) {
throw new UpdateException("Unable to parse CPE XML file due to SAX Parser Issue", ex);
} catch (SAXException ex) {
throw new UpdateException("Unable to parse CPE XML file due to SAX Parser Exception", ex);
} catch (IOException ex) {
throw new UpdateException("Unable to parse CPE XML file due to IO Failure", ex);
}
}
/**
* Checks to find the last time the CPE data was refreshed and if it needs
* to be updated.
*
* @return true if the CPE data should be refreshed
*/
private boolean updateNeeded() {
final long now = System.currentTimeMillis();
final int days = Settings.getInt(Settings.KEYS.CPE_MODIFIED_VALID_FOR_DAYS, 30);
long timestamp = 0;
final String ts = getProperties().getProperty(LAST_CPE_UPDATE);
if (ts != null && ts.matches("^[0-9]+$")) {
timestamp = Long.parseLong(ts);
}
return !DateUtil.withinDateRange(timestamp, now, days);
}
/**
* Extracts the file contained in a gzip archive. The extracted file is
* placed in the exact same path as the file specified.
*
* @param file the archive file
* @throws FileNotFoundException thrown if the file does not exist
* @throws IOException thrown if there is an error extracting the file.
*/
private void extractGzip(File file) throws FileNotFoundException, IOException {
//TODO - move this to a util class as it is duplicative of (copy of) code in the DownloadTask
final String originalPath = file.getPath();
final File gzip = new File(originalPath + ".gz");
if (gzip.isFile() && !gzip.delete()) {
LOGGER.debug("Failed to delete intial temporary file {}", gzip.toString());
gzip.deleteOnExit();
}
if (!file.renameTo(gzip)) {
throw new IOException("Unable to rename '" + file.getPath() + "'");
}
final File newfile = new File(originalPath);
final byte[] buffer = new byte[4096];
GZIPInputStream cin = null;
FileOutputStream out = null;
try {
cin = new GZIPInputStream(new FileInputStream(gzip));
out = new FileOutputStream(newfile);
int len;
while ((len = cin.read(buffer)) > 0) {
out.write(buffer, 0, len);
}
} finally {
if (cin != null) {
try {
cin.close();
} catch (IOException ex) {
LOGGER.trace("ignore", ex);
}
}
if (out != null) {
try {
out.close();
} catch (IOException ex) {
LOGGER.trace("ignore", ex);
}
}
if (gzip.isFile() && !FileUtils.deleteQuietly(gzip)) {
LOGGER.debug("Failed to delete temporary file {}", gzip.toString());
gzip.deleteOnExit();
}
}
}
@Deprecated
public class CpeUpdater { //extends BaseUpdater implements CachedWebDataSource {
//
// /**
// * Static logger.
// */
// private static final Logger LOGGER = LoggerFactory.getLogger(CpeUpdater.class);
//
// @Override
// public void update() throws UpdateException {
// /*
// //the following could be used if this were ever used.
// try {
// if (!Settings.getBoolean(Settings.KEYS.UPDATE_NVDCVE_ENABLED, true)) {
// return;
// }
// } catch (InvalidSettingException ex) {
// LOGGER.trace("invalid setting UPDATE_NVDCVE_ENABLED", ex);
// }
// */
//
// try {
// openDataStores();
// if (updateNeeded()) {
// LOGGER.info("Updating the Common Platform Enumeration (CPE)");
// final File xml = downloadCpe();
// final List<Cpe> cpes = processXML(xml);
// getCveDB().deleteUnusedCpe();
// for (Cpe cpe : cpes) {
// getCveDB().addCpe(cpe.getValue(), cpe.getVendor(), cpe.getProduct());
// }
// final long now = System.currentTimeMillis();
// getProperties().save(LAST_CPE_UPDATE, Long.toString(now));
// LOGGER.info("CPE update complete");
// }
// } finally {
// closeDataStores();
// }
// }
//
// /**
// * Downloads the CPE XML file.
// *
// * @return the file reference to the CPE.xml file
// * @throws UpdateException thrown if there is an issue downloading the XML
// * file
// */
// private File downloadCpe() throws UpdateException {
// File xml;
// final URL url;
// try {
// url = new URL(Settings.getString(Settings.KEYS.CPE_URL));
// xml = File.createTempFile("cpe", ".xml", Settings.getTempDirectory());
// Downloader.fetchFile(url, xml);
// if (url.toExternalForm().endsWith(".xml.gz")) {
// ExtractionUtil.extractGzip(xml);
// }
//
// } catch (MalformedURLException ex) {
// throw new UpdateException("Invalid CPE URL", ex);
// } catch (DownloadFailedException ex) {
// throw new UpdateException("Unable to download CPE XML file", ex);
// } catch (IOException ex) {
// throw new UpdateException("Unable to create temporary file to download CPE", ex);
// }
// return xml;
// }
//
// /**
// * Parses the CPE XML file to return a list of CPE entries.
// *
// * @param xml the CPE data file
// * @return the list of CPE entries
// * @throws UpdateException thrown if there is an issue with parsing the XML
// * file
// */
// private List<Cpe> processXML(final File xml) throws UpdateException {
// try {
// final SAXParser saxParser = XmlUtils.buildSecureSaxParser();
// final CPEHandler handler = new CPEHandler();
// saxParser.parse(xml, handler);
// return handler.getData();
// } catch (ParserConfigurationException ex) {
// throw new UpdateException("Unable to parse CPE XML file due to SAX Parser Issue", ex);
// } catch (SAXException ex) {
// throw new UpdateException("Unable to parse CPE XML file due to SAX Parser Exception", ex);
// } catch (IOException ex) {
// throw new UpdateException("Unable to parse CPE XML file due to IO Failure", ex);
// }
// }
//
// /**
// * Checks to find the last time the CPE data was refreshed and if it needs
// * to be updated.
// *
// * @return true if the CPE data should be refreshed
// */
// private boolean updateNeeded() {
// final long now = System.currentTimeMillis();
// final int days = Settings.getInt(Settings.KEYS.CPE_MODIFIED_VALID_FOR_DAYS, 30);
// long timestamp = 0;
// final String ts = getProperties().getProperty(LAST_CPE_UPDATE);
// if (ts != null && ts.matches("^[0-9]+$")) {
// timestamp = Long.parseLong(ts);
// }
// return !DateUtil.withinDateRange(timestamp, now, days);
// }
}

View File

@@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory;
/**
* Checks the gh-pages dependency-check site to determine the current released
* version number. If the released version number is greater then the running
* version number. If the released version number is greater than the running
* version number a warning is printed recommending that an upgrade be
* performed.
*
@@ -57,11 +57,6 @@ public class EngineVersionCheck implements CachedWebDataSource {
* The property key indicating when the last version check occurred.
*/
public static final String CURRENT_ENGINE_RELEASE = "CurrentEngineRelease";
/**
* Reference to the Cve Database.
*/
private CveDB cveDB = null;
/**
* The version retrieved from the database properties or web to check
* against.
@@ -98,11 +93,21 @@ public class EngineVersionCheck implements CachedWebDataSource {
*/
@Override
public void update() throws UpdateException {
try {
if (Settings.getBoolean(Settings.KEYS.AUTO_UPDATE)) {
openDatabase();
try (CveDB db = CveDB.getInstance()) {
final boolean autoupdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE, true);
final boolean enabled = Settings.getBoolean(Settings.KEYS.UPDATE_VERSION_CHECK_ENABLED, true);
final String original = Settings.getString(Settings.KEYS.CVE_ORIGINAL_MODIFIED_20_URL);
final String current = Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL);
/*
* Only update if auto-update is enabled, the engine check is
* enabled, and the NVD CVE URLs have not been modified (i.e. the
* user has not configured them to point to an internal source).
*/
if (enabled && autoupdate && original != null && original.equals(current)) {
LOGGER.debug("Begin Engine Version Check");
final DatabaseProperties properties = cveDB.getDatabaseProperties();
final DatabaseProperties properties = db.getDatabaseProperties();
final long lastChecked = Long.parseLong(properties.getProperty(ENGINE_VERSION_CHECKED_ON, "0"));
final long now = System.currentTimeMillis();
updateToVersion = properties.getProperty(CURRENT_ENGINE_RELEASE, "");
@@ -121,8 +126,6 @@ public class EngineVersionCheck implements CachedWebDataSource {
throw new UpdateException("Error occurred updating database properties.");
} catch (InvalidSettingException ex) {
LOGGER.debug("Unable to determine if autoupdate is enabled", ex);
} finally {
closeDatabase();
}
}
@@ -172,33 +175,6 @@ public class EngineVersionCheck implements CachedWebDataSource {
return false;
}
/**
* Opens the CVE and CPE data stores.
*
* @throws DatabaseException thrown if a data store cannot be opened
*/
protected final void openDatabase() throws DatabaseException {
if (cveDB != null) {
return;
}
cveDB = new CveDB();
cveDB.open();
}
/**
* Closes the CVE and CPE data stores.
*/
protected void closeDatabase() {
if (cveDB != null) {
try {
cveDB.close();
cveDB = null;
} catch (Throwable ignore) {
LOGGER.trace("Error closing the cveDB", ignore);
}
}
}
/**
* Retrieves the current released version number from the github
* documentation site.

View File

@@ -17,14 +17,26 @@
*/
package org.owasp.dependencycheck.data.update;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.net.MalformedURLException;
import java.util.Calendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.net.URL;
import java.nio.channels.FileLock;
import java.util.Date;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
@@ -36,6 +48,7 @@ import org.owasp.dependencycheck.data.update.nvd.NvdCveInfo;
import org.owasp.dependencycheck.data.update.nvd.ProcessTask;
import org.owasp.dependencycheck.data.update.nvd.UpdateableNvdCve;
import org.owasp.dependencycheck.utils.DateUtil;
import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
@@ -47,40 +60,96 @@ import org.slf4j.LoggerFactory;
*
* @author Jeremy Long
*/
public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
public class NvdCveUpdater implements CachedWebDataSource {
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(NvdCveUpdater.class);
/**
* The max thread pool size to use when downloading files.
* The thread pool size to use for CPU-intense tasks.
*/
public static final int MAX_THREAD_POOL_SIZE = Settings.getInt(Settings.KEYS.MAX_DOWNLOAD_THREAD_POOL_SIZE, 3);
private static final int PROCESSING_THREAD_POOL_SIZE = Runtime.getRuntime().availableProcessors();
/**
* The thread pool size to use when downloading files.
*/
private static final int DOWNLOAD_THREAD_POOL_SIZE = Math.round(1.5f * Runtime.getRuntime().availableProcessors());
/**
* ExecutorService for CPU-intense processing tasks.
*/
private ExecutorService processingExecutorService = null;
/**
* ExecutorService for tasks that involve blocking activities and are not
* very CPU-intense, e.g. downloading files.
*/
private ExecutorService downloadExecutorService = null;
/**
* Reference to the DAO.
*/
private CveDB cveDb = null;
/**
* The properties obtained from the database.
*/
private DatabaseProperties dbProperties = null;
/**
* Downloads the latest NVD CVE XML file from the web and imports it into
* the current CVE Database.
* the current CVE Database. A lock on a file is obtained in an attempt to
* prevent more then one thread/JVM from updating the database at the same
* time. This method may sleep upto 5 minutes.
*
* @throws UpdateException is thrown if there is an error updating the
* database
*/
@Override
public void update() throws UpdateException {
public synchronized void update() throws UpdateException {
if (isUpdateConfiguredFalse()) {
return;
}
FileLock lock = null;
RandomAccessFile ulFile = null;
File lockFile = null;
try {
openDataStores();
boolean autoUpdate = true;
try {
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
} catch (InvalidSettingException ex) {
LOGGER.debug("Invalid setting for auto-update; using true.");
if (ConnectionFactory.isH2Connection()) {
final File dir = Settings.getDataDirectory();
lockFile = new File(dir, "odc.update.lock");
if (lockFile.isFile() && getFileAge(lockFile) > 5 && !lockFile.delete()) {
LOGGER.warn("An old db update lock file was found but the system was unable to delete the file. Consider manually deleting " + lockFile.getAbsolutePath());
}
int ctr = 0;
do {
try {
if (!lockFile.exists() && lockFile.createNewFile()) {
ulFile = new RandomAccessFile(lockFile, "rw");
lock = ulFile.getChannel().lock();
}
} catch (IOException ex) {
LOGGER.trace("Expected error as another thread has likely locked the file", ex);
}
if (lock == null || !lock.isValid()) {
try {
LOGGER.debug(String.format("Sleeping thread %s for 5 seconds because we could not obtain the update lock.", Thread.currentThread().getName()));
Thread.sleep(5000);
} catch (InterruptedException ex) {
LOGGER.trace("ignorable error, sleep was interrupted.", ex);
}
}
} while (++ctr < 60 && (lock == null || !lock.isValid()));
if (lock == null || !lock.isValid()) {
throw new UpdateException("Unable to obtain the update lock, skipping the database update. Skippinig the database update.");
}
}
if (autoUpdate && checkUpdate()) {
initializeExecutorServices();
cveDb = CveDB.getInstance();
dbProperties = cveDb.getDatabaseProperties();
if (checkUpdate()) {
final UpdateableNvdCve updateable = getUpdatesNeeded();
if (updateable.isUpdateNeeded()) {
performUpdate(updateable);
}
getProperties().save(DatabaseProperties.LAST_CHECKED, Long.toString(System.currentTimeMillis()));
dbProperties.save(DatabaseProperties.LAST_CHECKED, Long.toString(System.currentTimeMillis()));
}
} catch (MalformedURLException ex) {
throw new UpdateException("NVD CVE properties files contain an invalid URL, unable to update the data to use the most current data.", ex);
@@ -92,8 +161,88 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
"If you are behind a proxy you may need to configure dependency-check to use the proxy.");
}
throw new UpdateException("Unable to download the NVD CVE data.", ex);
} catch (DatabaseException ex) {
throw new UpdateException("Database Exception, unable to update the data to use the most current data.", ex);
} catch (IOException ex) {
throw new UpdateException("Database Exception", ex);
} finally {
closeDataStores();
shutdownExecutorServices();
cveDb.close();
if (lock != null) {
try {
lock.release();
} catch (IOException ex) {
LOGGER.trace("Ignorable exception", ex);
}
}
if (ulFile != null) {
try {
ulFile.close();
} catch (IOException ex) {
LOGGER.trace("Ignorable exception", ex);
}
}
if (lockFile != null && lockFile.isFile() && !lockFile.delete()) {
LOGGER.error("Lock file '{}' was unable to be deleted. Please manually delete this file.", lockFile.toString());
}
}
}
/**
* Checks if the system is configured NOT to update.
*
* @return false if the system is configured to perform an update; otherwise
* true
*/
private boolean isUpdateConfiguredFalse() {
try {
if (!Settings.getBoolean(Settings.KEYS.UPDATE_NVDCVE_ENABLED, true)) {
return true;
}
} catch (InvalidSettingException ex) {
LOGGER.trace("invalid setting UPDATE_NVDCVE_ENABLED", ex);
}
boolean autoUpdate = true;
try {
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
} catch (InvalidSettingException ex) {
LOGGER.debug("Invalid setting for auto-update; using true.");
}
return !autoUpdate;
}
/**
* Returns the age of the file in minutes.
*
* @param file the file to calculate the age
* @return the age of the file
*/
private long getFileAge(File file) {
final Date d = new Date();
final long modified = file.lastModified();
return (d.getTime() - modified) / 1000 / 60;
}
/**
* Initialize the executor services for download and processing of the NVD
* CVE XML data.
*/
protected void initializeExecutorServices() {
processingExecutorService = Executors.newFixedThreadPool(PROCESSING_THREAD_POOL_SIZE);
downloadExecutorService = Executors.newFixedThreadPool(DOWNLOAD_THREAD_POOL_SIZE);
LOGGER.debug("#download threads: {}", DOWNLOAD_THREAD_POOL_SIZE);
LOGGER.debug("#processing threads: {}", PROCESSING_THREAD_POOL_SIZE);
}
/**
* Shutdown and cleanup of resources used by the executor services.
*/
private void shutdownExecutorServices() {
if (processingExecutorService != null) {
processingExecutorService.shutdownNow();
}
if (downloadExecutorService != null) {
downloadExecutorService.shutdownNow();
}
}
@@ -115,7 +264,7 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
if (dataExists() && 0 < validForHours) {
// ms Valid = valid (hours) x 60 min/hour x 60 sec/min x 1000 ms/sec
final long msValid = validForHours * 60L * 60L * 1000L;
final long lastChecked = Long.parseLong(getProperties().getProperty(DatabaseProperties.LAST_CHECKED, "0"));
final long lastChecked = Long.parseLong(dbProperties.getProperty(DatabaseProperties.LAST_CHECKED, "0"));
final long now = System.currentTimeMillis();
proceed = (now - lastChecked) > msValid;
if (!proceed) {
@@ -133,17 +282,10 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
* @return true if the database contains data
*/
private boolean dataExists() {
CveDB cve = null;
try {
cve = new CveDB();
cve.open();
try (CveDB cve = CveDB.getInstance()) {
return cve.dataExists();
} catch (DatabaseException ex) {
return false;
} finally {
if (cve != null) {
cve.close();
}
}
}
@@ -170,41 +312,28 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
LOGGER.info("NVD CVE requires several updates; this could take a couple of minutes.");
}
final int poolSize = (MAX_THREAD_POOL_SIZE < maxUpdates) ? MAX_THREAD_POOL_SIZE : maxUpdates;
final ExecutorService downloadExecutors = Executors.newFixedThreadPool(poolSize);
final ExecutorService processExecutor = Executors.newSingleThreadExecutor();
final Set<Future<Future<ProcessTask>>> downloadFutures = new HashSet<Future<Future<ProcessTask>>>(maxUpdates);
final Set<Future<Future<ProcessTask>>> downloadFutures = new HashSet<>(maxUpdates);
for (NvdCveInfo cve : updateable) {
if (cve.getNeedsUpdate()) {
final DownloadTask call = new DownloadTask(cve, processExecutor, getCveDB(), Settings.getInstance());
downloadFutures.add(downloadExecutors.submit(call));
final DownloadTask call = new DownloadTask(cve, processingExecutorService, cveDb, Settings.getInstance());
downloadFutures.add(downloadExecutorService.submit(call));
}
}
downloadExecutors.shutdown();
//next, move the future future processTasks to just future processTasks
final Set<Future<ProcessTask>> processFutures = new HashSet<Future<ProcessTask>>(maxUpdates);
final Set<Future<ProcessTask>> processFutures = new HashSet<>(maxUpdates);
for (Future<Future<ProcessTask>> future : downloadFutures) {
Future<ProcessTask> task = null;
Future<ProcessTask> task;
try {
task = future.get();
} catch (InterruptedException ex) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during download", ex);
throw new UpdateException("The download was interrupted", ex);
} catch (ExecutionException ex) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during download execution", ex);
throw new UpdateException("The execution of the download was interrupted", ex);
}
if (task == null) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during download");
throw new UpdateException("The download was interrupted; unable to complete the update");
} else {
@@ -219,22 +348,18 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
throw task.getException();
}
} catch (InterruptedException ex) {
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during processing", ex);
throw new UpdateException(ex);
} catch (ExecutionException ex) {
processExecutor.shutdownNow();
LOGGER.debug("Execution Exception during process", ex);
throw new UpdateException(ex);
} finally {
processExecutor.shutdown();
}
}
if (maxUpdates >= 1) { //ensure the modified file date gets written (we may not have actually updated it)
getProperties().save(updateable.get(MODIFIED));
dbProperties.save(updateable.get(MODIFIED));
LOGGER.info("Begin database maintenance.");
getCveDB().cleanupDatabase();
cveDb.cleanupDatabase();
LOGGER.info("End database maintenance.");
}
}
@@ -254,7 +379,8 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
* updated properties file
*/
protected final UpdateableNvdCve getUpdatesNeeded() throws MalformedURLException, DownloadFailedException, UpdateException {
UpdateableNvdCve updates = null;
LOGGER.info("starting getUpdatesNeeded() ...");
UpdateableNvdCve updates;
try {
updates = retrieveCurrentTimestampsFromWeb();
} catch (InvalidDataException ex) {
@@ -269,14 +395,24 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
if (updates == null) {
throw new DownloadFailedException("Unable to retrieve the timestamps of the currently published NVD CVE data");
}
if (!getProperties().isEmpty()) {
if (dbProperties != null && !dbProperties.isEmpty()) {
try {
final long lastUpdated = Long.parseLong(getProperties().getProperty(DatabaseProperties.LAST_UPDATED, "0"));
final int startYear = Settings.getInt(Settings.KEYS.CVE_START_YEAR, 2002);
final int endYear = Calendar.getInstance().get(Calendar.YEAR);
boolean needsFullUpdate = false;
for (int y = startYear; y <= endYear; y++) {
final long val = Long.parseLong(dbProperties.getProperty(DatabaseProperties.LAST_UPDATED_BASE + y, "0"));
if (val == 0) {
needsFullUpdate = true;
}
}
final long lastUpdated = Long.parseLong(dbProperties.getProperty(DatabaseProperties.LAST_UPDATED, "0"));
final long now = System.currentTimeMillis();
final int days = Settings.getInt(Settings.KEYS.CVE_MODIFIED_VALID_FOR_DAYS, 7);
if (lastUpdated == updates.getTimeStamp(MODIFIED)) {
if (!needsFullUpdate && lastUpdated == updates.getTimeStamp(MODIFIED)) {
updates.clear(); //we don't need to update anything.
} else if (DateUtil.withinDateRange(lastUpdated, now, days)) {
} else if (!needsFullUpdate && DateUtil.withinDateRange(lastUpdated, now, days)) {
for (NvdCveInfo entry : updates) {
if (MODIFIED.equals(entry.getId())) {
entry.setNeedsUpdate(true);
@@ -291,7 +427,7 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
} else {
long currentTimestamp = 0;
try {
currentTimestamp = Long.parseLong(getProperties().getProperty(DatabaseProperties.LAST_UPDATED_BASE
currentTimestamp = Long.parseLong(dbProperties.getProperty(DatabaseProperties.LAST_UPDATED_BASE
+ entry.getId(), "0"));
} catch (NumberFormatException ex) {
LOGGER.debug("Error parsing '{}' '{}' from nvdcve.lastupdated",
@@ -326,20 +462,101 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
private UpdateableNvdCve retrieveCurrentTimestampsFromWeb()
throws MalformedURLException, DownloadFailedException, InvalidDataException, InvalidSettingException {
final UpdateableNvdCve updates = new UpdateableNvdCve();
updates.add(MODIFIED, Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL),
Settings.getString(Settings.KEYS.CVE_MODIFIED_12_URL),
false);
final int start = Settings.getInt(Settings.KEYS.CVE_START_YEAR);
final int end = Calendar.getInstance().get(Calendar.YEAR);
final Map<String, Long> lastModifiedDates = retrieveLastModifiedDates(start, end);
final UpdateableNvdCve updates = new UpdateableNvdCve();
final String baseUrl20 = Settings.getString(Settings.KEYS.CVE_SCHEMA_2_0);
final String baseUrl12 = Settings.getString(Settings.KEYS.CVE_SCHEMA_1_2);
for (int i = start; i <= end; i++) {
updates.add(Integer.toString(i), String.format(baseUrl20, i),
String.format(baseUrl12, i),
true);
final String url = String.format(baseUrl20, i);
updates.add(Integer.toString(i), url, String.format(baseUrl12, i),
lastModifiedDates.get(url), true);
}
final String url = Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL);
updates.add(MODIFIED, url, Settings.getString(Settings.KEYS.CVE_MODIFIED_12_URL),
lastModifiedDates.get(url), false);
return updates;
}
/**
* Retrieves the timestamps from the NVD CVE meta data file.
*
* @param startYear the first year whose item to check for the timestamp
* @param endYear the last year whose item to check for the timestamp
* @return the timestamps from the currently published NVD CVE downloads
* page
* @throws MalformedURLException thrown if the URL for the NVD CCE Meta data
* is incorrect.
* @throws DownloadFailedException thrown if there is an error downloading
* the NVD CVE meta data file
*/
private Map<String, Long> retrieveLastModifiedDates(int startYear, int endYear)
throws MalformedURLException, DownloadFailedException {
final Set<String> urls = new HashSet<>();
final String baseUrl20 = Settings.getString(Settings.KEYS.CVE_SCHEMA_2_0);
for (int i = startYear; i <= endYear; i++) {
final String url = String.format(baseUrl20, i);
urls.add(url);
}
urls.add(Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL));
final Map<String, Future<Long>> timestampFutures = new HashMap<>();
for (String url : urls) {
final TimestampRetriever timestampRetriever = new TimestampRetriever(url);
final Future<Long> future = downloadExecutorService.submit(timestampRetriever);
timestampFutures.put(url, future);
}
final Map<String, Long> lastModifiedDates = new HashMap<>();
for (String url : urls) {
final Future<Long> timestampFuture = timestampFutures.get(url);
final long timestamp;
try {
timestamp = timestampFuture.get(60, TimeUnit.SECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
throw new DownloadFailedException(e);
}
lastModifiedDates.put(url, timestamp);
}
return lastModifiedDates;
}
/**
* Retrieves the last modified timestamp from a NVD CVE meta data file.
*/
private static class TimestampRetriever implements Callable<Long> {
/**
* The URL to obtain the timestamp from.
*/
private final String url;
/**
* Instantiates a new timestamp retriever object.
*
* @param url the URL to hit
*/
TimestampRetriever(String url) {
this.url = url;
}
@Override
public Long call() throws Exception {
LOGGER.debug("Checking for updates from: {}", url);
try {
Settings.initialize();
return Downloader.getLastModified(new URL(url));
} finally {
Settings.cleanup(false);
}
}
}
}

View File

@@ -30,7 +30,7 @@ import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* A SAX Handler that will parse the CPE XML and load it into the databse.
* A SAX Handler that will parse the CPE XML and load it into the database.
*
* @author Jeremy Long
*/
@@ -60,7 +60,7 @@ public class CPEHandler extends DefaultHandler {
/**
* The list of CPE values.
*/
private final List<Cpe> data = new ArrayList<Cpe>();
private final List<Cpe> data = new ArrayList<>();
/**
* Returns the list of CPE values.
@@ -154,35 +154,10 @@ public class CPEHandler extends DefaultHandler {
public void endElement(String uri, String localName, String qName) throws SAXException {
current.setNode(qName);
if (current.isSchemaVersionNode() && !CURRENT_SCHEMA_VERSION.equals(nodeText.toString())) {
throw new SAXException("ERROR: Unexpecgted CPE Schema Version, expected: "
throw new SAXException("ERROR: Unexpected CPE Schema Version, expected: "
+ CURRENT_SCHEMA_VERSION + ", file is: " + nodeText);
}
// } else if (current.isCpeItemNode()) {
// //do nothing
// } else if (current.isTitleNode()) {
// //do nothing
// } else if (current.isCpeListNode()) {
// //do nothing
// } else if (current.isMetaNode()) {
// //do nothing
// } else if (current.isNotesNode()) {
// //do nothing
// } else if (current.isNoteNode()) {
// //do nothing
// } else if (current.isCheckNode()) {
// //do nothing
// } else if (current.isGeneratorNode()) {
// //do nothing
// } else if (current.isProductNameNode()) {
// //do nothing
// } else if (current.isProductVersionNode()) {
// //do nothing
// else if (current.isTimestampNode()) {
// //do nothing
// } else {
// throw new SAXException("ERROR STATE: Unexpected qName '" + qName + "'");
// }
}
// <editor-fold defaultstate="collapsed" desc="The Element Class that maintains state information about the current node">

View File

@@ -17,6 +17,7 @@
*/
package org.owasp.dependencycheck.data.update.cpe;
import org.apache.commons.lang3.StringUtils;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import org.owasp.dependencycheck.data.update.exception.InvalidDataException;
@@ -36,7 +37,8 @@ public class Cpe {
*/
public Cpe(String value) throws UnsupportedEncodingException, InvalidDataException {
this.value = value;
final String[] data = value.substring(7).split(":");
final String valueWithoutPrefix = value.substring(7);
final String[] data = StringUtils.split(valueWithoutPrefix, ':');
if (data.length >= 2) {
vendor = URLDecoder.decode(data[0].replace("+", "%2B"), "UTF-8");
product = URLDecoder.decode(data[1].replace("+", "%2B"), "UTF-8");

View File

@@ -19,20 +19,17 @@ package org.owasp.dependencycheck.data.update.nvd;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.zip.GZIPInputStream;
import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.ExtractionUtil;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -48,6 +45,30 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
* The Logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(DownloadTask.class);
/**
* The CVE DB to use when processing the files.
*/
private final CveDB cveDB;
/**
* The processor service to pass the results of the download to.
*/
private final ExecutorService processorService;
/**
* The NVD CVE Meta Data.
*/
private NvdCveInfo nvdCveInfo;
/**
* A reference to the global settings object.
*/
private final Settings settings;
/**
* a file.
*/
private File first;
/**
* a file.
*/
private File second;
/**
* Simple constructor for the callable download task.
@@ -79,22 +100,6 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
this.second = file2;
}
/**
* The CVE DB to use when processing the files.
*/
private final CveDB cveDB;
/**
* The processor service to pass the results of the download to.
*/
private final ExecutorService processorService;
/**
* The NVD CVE Meta Data.
*/
private NvdCveInfo nvdCveInfo;
/**
* A reference to the global settings object.
*/
private final Settings settings;
/**
* Get the value of nvdCveInfo.
@@ -113,10 +118,6 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
public void setNvdCveInfo(NvdCveInfo nvdCveInfo) {
this.nvdCveInfo = nvdCveInfo;
}
/**
* a file.
*/
private File first;
/**
* Get the value of first.
@@ -135,10 +136,6 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
public void setFirst(File first) {
this.first = first;
}
/**
* a file.
*/
private File second;
/**
* Get the value of second.
@@ -179,10 +176,10 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
return null;
}
if (url1.toExternalForm().endsWith(".xml.gz") && !isXml(first)) {
extractGzip(first);
ExtractionUtil.extractGzip(first);
}
if (url2.toExternalForm().endsWith(".xml.gz") && !isXml(second)) {
extractGzip(second);
ExtractionUtil.extractGzip(second);
}
LOGGER.info("Download Complete for NVD CVE - {} ({} ms)", nvdCveInfo.getId(),
@@ -226,87 +223,19 @@ public class DownloadTask implements Callable<Future<ProcessTask>> {
if (file == null || !file.isFile()) {
return false;
}
InputStream is = null;
try {
is = new FileInputStream(file);
try (InputStream is = new FileInputStream(file)) {
final byte[] buf = new byte[5];
int read = 0;
try {
read = is.read(buf);
} catch (IOException ex) {
return false;
}
int read;
read = is.read(buf);
return read == 5
&& buf[0] == '<'
&& (buf[1] == '?')
&& (buf[2] == 'x' || buf[2] == 'X')
&& (buf[3] == 'm' || buf[3] == 'M')
&& (buf[4] == 'l' || buf[4] == 'L');
} catch (FileNotFoundException ex) {
} catch (IOException ex) {
LOGGER.debug("Error checking if file is xml", ex);
return false;
} finally {
if (is != null) {
try {
is.close();
} catch (IOException ex) {
LOGGER.debug("Error closing stream", ex);
}
}
}
}
/**
* Extracts the file contained in a gzip archive. The extracted file is
* placed in the exact same path as the file specified.
*
* @param file the archive file
* @throws FileNotFoundException thrown if the file does not exist
* @throws IOException thrown if there is an error extracting the file.
*/
private void extractGzip(File file) throws FileNotFoundException, IOException {
final String originalPath = file.getPath();
final File gzip = new File(originalPath + ".gz");
if (gzip.isFile() && !gzip.delete()) {
LOGGER.debug("Failed to delete initial temporary file when extracting 'gz' {}", gzip.toString());
gzip.deleteOnExit();
}
if (!file.renameTo(gzip)) {
throw new IOException("Unable to rename '" + file.getPath() + "'");
}
final File newfile = new File(originalPath);
final byte[] buffer = new byte[4096];
GZIPInputStream cin = null;
FileOutputStream out = null;
try {
cin = new GZIPInputStream(new FileInputStream(gzip));
out = new FileOutputStream(newfile);
int len;
while ((len = cin.read(buffer)) > 0) {
out.write(buffer, 0, len);
}
} finally {
if (cin != null) {
try {
cin.close();
} catch (IOException ex) {
LOGGER.trace("ignore", ex);
}
}
if (out != null) {
try {
out.close();
} catch (IOException ex) {
LOGGER.trace("ignore", ex);
}
}
if (gzip.isFile() && !FileUtils.deleteQuietly(gzip)) {
LOGGER.debug("Failed to delete temporary file when extracting 'gz' {}", gzip.toString());
gzip.deleteOnExit();
}
}
}
}

View File

@@ -93,7 +93,7 @@ public class NvdCve12Handler extends DefaultHandler {
skip = "1".equals(reject);
if (!skip) {
vulnerability = attributes.getValue("name");
software = new ArrayList<VulnerableSoftware>();
software = new ArrayList<>();
} else {
vulnerability = null;
software = null;
@@ -132,7 +132,7 @@ public class NvdCve12Handler extends DefaultHandler {
if (!CURRENT_SCHEMA_VERSION.equals(nvdVer)) {
throw new SAXNotSupportedException("Schema version " + nvdVer + " is not supported");
}
vulnerabilities = new HashMap<String, List<VulnerableSoftware>>();
vulnerabilities = new HashMap<>();
}
}

View File

@@ -33,6 +33,8 @@ import org.xml.sax.SAXException;
import org.xml.sax.SAXNotSupportedException;
import org.xml.sax.helpers.DefaultHandler;
import static org.owasp.dependencycheck.data.update.nvd.NvdCve20Handler.AttributeValues.*;
/**
* A SAX Handler that will parse the NVD CVE XML (schema version 2.0).
*
@@ -48,6 +50,19 @@ public class NvdCve20Handler extends DefaultHandler {
* the current supported schema version.
*/
private static final String CURRENT_SCHEMA_VERSION = "2.0";
/**
* a possible attribute value of the {@link AttributeValues#XML_LANG}
* attribute
*/
private static final String EN = "en";
/**
* the prefix of the node text of a CPE
*/
private static final String CPE_NODE_TEXT_PREFIX = "cpe:/a:";
/**
* the node text of an entry marked for deletion
*/
private static final String REJECT_NODE_TEXT = "** REJECT **";
/**
* the current element.
*/
@@ -73,6 +88,21 @@ public class NvdCve20Handler extends DefaultHandler {
*/
private int totalNumberOfEntries;
/**
* The total number of application entries parsed.
*/
private int totalNumberOfApplicationEntries;
/**
* the cve database.
*/
private CveDB cveDB;
/**
* A list of CVE entries and associated VulnerableSoftware entries that
* contain previous entries.
*/
private Map<String, List<VulnerableSoftware>> prevVersionVulnMap;
/**
* Get the value of totalNumberOfEntries.
*
@@ -81,10 +111,6 @@ public class NvdCve20Handler extends DefaultHandler {
public int getTotalNumberOfEntries() {
return totalNumberOfEntries;
}
/**
* The total number of application entries parsed.
*/
private int totalNumberOfApplicationEntries;
/**
* Get the value of totalNumberOfApplicationEntries.
@@ -101,30 +127,30 @@ public class NvdCve20Handler extends DefaultHandler {
if (current.isEntryNode()) {
hasApplicationCpe = false;
vulnerability = new Vulnerability();
vulnerability.setName(attributes.getValue("id"));
vulnerability.setName(attributes.getValue(ID));
} else if (current.isVulnProductNode()) {
nodeText = new StringBuilder(100);
} else if (current.isVulnReferencesNode()) {
final String lang = attributes.getValue("xml:lang");
if ("en".equals(lang)) {
final String lang = attributes.getValue(XML_LANG);
if (EN.equals(lang)) {
reference = new Reference();
} else {
reference = null;
}
} else if (reference != null && current.isVulnReferenceNode()) {
reference.setUrl(attributes.getValue("href"));
reference.setUrl(attributes.getValue(HREF));
nodeText = new StringBuilder(130);
} else if (reference != null && current.isVulnSourceNode()) {
nodeText = new StringBuilder(30);
} else if (current.isVulnSummaryNode()) {
nodeText = new StringBuilder(500);
} else if (current.isNVDNode()) {
final String nvdVer = attributes.getValue("nvd_xml_version");
final String nvdVer = attributes.getValue(NVD_XML_VERSION);
if (!CURRENT_SCHEMA_VERSION.equals(nvdVer)) {
throw new SAXNotSupportedException("Schema version " + nvdVer + " is not supported");
}
} else if (current.isVulnCWENode()) {
vulnerability.setCwe(attributes.getValue("id"));
vulnerability.setCwe(attributes.getValue(ID));
} else if (current.isCVSSScoreNode()) {
nodeText = new StringBuilder(5);
} else if (current.isCVSSAccessVectorNode()) {
@@ -158,9 +184,7 @@ public class NvdCve20Handler extends DefaultHandler {
totalNumberOfApplicationEntries += 1;
try {
saveEntry(vulnerability);
} catch (DatabaseException ex) {
throw new SAXException(ex);
} catch (CorruptIndexException ex) {
} catch (DatabaseException | CorruptIndexException ex) {
throw new SAXException(ex);
} catch (IOException ex) {
throw new SAXException(ex);
@@ -196,7 +220,7 @@ public class NvdCve20Handler extends DefaultHandler {
nodeText = null;
} else if (current.isVulnProductNode()) {
final String cpe = nodeText.toString();
if (cpe.startsWith("cpe:/a:")) {
if (cpe.startsWith(CPE_NODE_TEXT_PREFIX)) {
hasApplicationCpe = true;
vulnerability.addVulnerableSoftware(cpe);
}
@@ -212,16 +236,12 @@ public class NvdCve20Handler extends DefaultHandler {
nodeText = null;
} else if (current.isVulnSummaryNode()) {
vulnerability.setDescription(nodeText.toString());
if (nodeText.indexOf("** REJECT **") >= 0) {
if (nodeText.indexOf(REJECT_NODE_TEXT) >= 0) {
hasApplicationCpe = true; //ensure we process this to delete the vuln
}
nodeText = null;
}
}
/**
* the cve database.
*/
private CveDB cveDB;
/**
* Sets the cveDB.
@@ -231,15 +251,12 @@ public class NvdCve20Handler extends DefaultHandler {
public void setCveDB(CveDB db) {
cveDB = db;
}
/**
* A list of CVE entries and associated VulnerableSoftware entries that contain previous entries.
*/
private Map<String, List<VulnerableSoftware>> prevVersionVulnMap;
/**
* Sets the prevVersionVulnMap.
*
* @param map the map of vulnerable software with previous versions being vulnerable
* @param map the map of vulnerable software with previous versions being
* vulnerable
*/
public void setPrevVersionVulnMap(Map<String, List<VulnerableSoftware>> map) {
prevVersionVulnMap = map;
@@ -249,7 +266,8 @@ public class NvdCve20Handler extends DefaultHandler {
* Saves a vulnerability to the CVE Database.
*
* @param vuln the vulnerability to store in the database
* @throws DatabaseException thrown if there is an error writing to the database
* @throws DatabaseException thrown if there is an error writing to the
* database
* @throws CorruptIndexException is thrown if the CPE Index is corrupt
* @throws IOException thrown if there is an IOException with the CPE Index
*/
@@ -268,7 +286,8 @@ public class NvdCve20Handler extends DefaultHandler {
// <editor-fold defaultstate="collapsed" desc="The Element Class that maintains state information about the current node">
/**
* A simple class to maintain information about the current element while parsing the NVD CVE XML.
* A simple class to maintain information about the current element while
* parsing the NVD CVE XML.
*/
protected static class Element {
@@ -491,4 +510,28 @@ public class NvdCve20Handler extends DefaultHandler {
}
}
// </editor-fold>
/**
* A simple class to maintain information about the attribute values
* encountered while parsing the NVD CVE XML.
*/
protected static class AttributeValues {
/**
* An attribute in the NVD CVE Schema 2.0
*/
protected static final String ID = "id";
/**
* An attribute in the NVD CVE Schema 2.0
*/
protected static final String XML_LANG = "xml:lang";
/**
* An attribute in the NVD CVE Schema 2.0
*/
protected static final String HREF = "href";
/**
* An attribute in the NVD CVE Schema 2.0
*/
protected static final String NVD_XML_VERSION = "nvd_xml_version";
}
}

View File

@@ -18,7 +18,6 @@
package org.owasp.dependencycheck.data.update.nvd;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
@@ -26,13 +25,13 @@ import java.util.Map;
import java.util.concurrent.Callable;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.XmlUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
@@ -142,9 +141,7 @@ public class ProcessTask implements Callable<ProcessTask> {
protected void importXML(File file, File oldVersion) throws ParserConfigurationException,
SAXException, IOException, SQLException, DatabaseException, ClassNotFoundException {
final SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
final SAXParser saxParser = factory.newSAXParser();
final SAXParser saxParser = XmlUtils.buildSecureSaxParser();
final NvdCve12Handler cve12Handler = new NvdCve12Handler();
saxParser.parse(oldVersion, cve12Handler);
@@ -169,19 +166,7 @@ public class ProcessTask implements Callable<ProcessTask> {
importXML(filePair.getFirst(), filePair.getSecond());
cveDB.commit();
properties.save(filePair.getNvdCveInfo());
} catch (FileNotFoundException ex) {
throw new UpdateException(ex);
} catch (ParserConfigurationException ex) {
throw new UpdateException(ex);
} catch (SAXException ex) {
throw new UpdateException(ex);
} catch (IOException ex) {
throw new UpdateException(ex);
} catch (SQLException ex) {
throw new UpdateException(ex);
} catch (DatabaseException ex) {
throw new UpdateException(ex);
} catch (ClassNotFoundException ex) {
} catch (ParserConfigurationException | SAXException | SQLException | DatabaseException | ClassNotFoundException | IOException ex) {
throw new UpdateException(ex);
} finally {
filePair.cleanup();

View File

@@ -17,16 +17,10 @@
*/
package org.owasp.dependencycheck.data.update.nvd;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Contains a collection of updateable NvdCveInfo objects. This is used to determine which files need to be downloaded and
@@ -36,14 +30,10 @@ import org.slf4j.LoggerFactory;
*/
public class UpdateableNvdCve implements Iterable<NvdCveInfo>, Iterator<NvdCveInfo> {
/**
* A reference to the logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(UpdateableNvdCve.class);
/**
* A collection of sources of data.
*/
private final Map<String, NvdCveInfo> collection = new TreeMap<String, NvdCveInfo>();
private final Map<String, NvdCveInfo> collection = new TreeMap<>();
/**
* Returns the collection of NvdCveInfo objects. This method is mainly used for testing.
@@ -74,31 +64,16 @@ public class UpdateableNvdCve implements Iterable<NvdCveInfo>, Iterator<NvdCveIn
* @param id the key for the item to be added
* @param url the URL to download the item
* @param oldUrl the URL for the old version of the item (the NVD CVE old schema still contains useful data we need).
* @throws MalformedURLException thrown if the URL provided is invalid
* @throws DownloadFailedException thrown if the download fails.
*/
public void add(String id, String url, String oldUrl) throws MalformedURLException, DownloadFailedException {
add(id, url, oldUrl, false);
}
/**
* Adds a new entry of updateable information to the contained collection.
*
* @param id the key for the item to be added
* @param url the URL to download the item
* @param oldUrl the URL for the old version of the item (the NVD CVE old schema still contains useful data we need).
* @param timestamp the last modified date of the downloaded item
* @param needsUpdate whether or not the data needs to be updated
* @throws MalformedURLException thrown if the URL provided is invalid
* @throws DownloadFailedException thrown if the download fails.
*/
public void add(String id, String url, String oldUrl, boolean needsUpdate) throws MalformedURLException, DownloadFailedException {
public void add(String id, String url, String oldUrl, long timestamp, boolean needsUpdate) {
final NvdCveInfo item = new NvdCveInfo();
item.setNeedsUpdate(needsUpdate); //the others default to true, to make life easier later this should default to false.
item.setId(id);
item.setUrl(url);
item.setOldSchemaVersionUrl(oldUrl);
LOGGER.debug("Checking for updates from: {}", url);
item.setTimestamp(Downloader.getLastModified(new URL(url)));
item.setTimestamp(timestamp);
collection.put(id, item);
}

Some files were not shown because too many files have changed in this diff Show More