Compare commits

..

769 Commits

Author SHA1 Message Date
Jeremy Long
d2a8645dd4 expanded test cases to include additional dependencies 2017-04-29 09:39:30 -04:00
Jeremy Long
4543835a0d reset threadcount to zero as the plugin is not yet threadsafe 2017-04-29 09:33:52 -04:00
Jeremy Long
c0f41c461b reverted the threadsafe flag as the plugin is not threadsafe 2017-04-29 09:32:58 -04:00
Jeremy Long
116ef264e1 updated failing test case to expect the correct exception 2017-04-26 08:59:53 -04:00
Jeremy Long
1371dacdaa expanded test case to identify/fix threading issue 2017-04-26 08:50:39 -04:00
Jeremy Long
d252d0f29f fixed threading issue 2017-04-26 08:50:06 -04:00
Jeremy Long
3786f6ebc7 removed un-needed code from invoker build scripts 2017-04-26 07:30:33 -04:00
Jeremy Long
6813427867 updated invoker plugin to use 2 threads 2017-04-26 07:30:01 -04:00
Jeremy Long
f94cf106a6 re-route invoker logs so the results can be viewed in travis 2017-04-25 08:01:09 -04:00
Jeremy Long
a67e421a5d nop 2017-04-23 08:44:24 -04:00
Jeremy Long
865db1b6c3 nop 2017-04-23 08:04:54 -04:00
Jeremy Long
31d7379a39 minor updates and added documentation 2017-04-23 07:22:53 -04:00
Jeremy Long
f473e63a61 added test case and added locking mechanism so only one update can run at any given time 2017-04-23 07:05:31 -04:00
Jeremy Long
238a96184a Merge branch 'master' into issue690_threadsafe 2017-04-12 10:28:16 -04:00
Jeremy Long
44ddad8101 updated for maven thread safety 2017-04-12 10:24:51 -04:00
Jeremy Long
afa47f7dfc Merge pull request #695 from jeremylong/fix_coverity_finding
Fix coverity findings
2017-04-12 10:24:24 -04:00
Jeremy Long
f289bcd285 fixed false positive per issue #691 2017-04-04 10:03:19 -04:00
Jeremy Long
c7adb1bb65 fix for issue #684 2017-04-04 09:58:19 -04:00
Jeremy Long
4bbc5e27b5 updated 2017-04-02 13:24:07 -04:00
Jeremy Long
c877ade004 updated 2017-04-02 13:22:35 -04:00
Jeremy Long
ebd8996ad5 fixed typo 2017-04-02 13:12:45 -04:00
Jeremy Long
f31313d021 added PR template 2017-04-02 13:08:58 -04:00
Jeremy Long
6936dac9b4 updated template 2017-04-02 13:08:42 -04:00
Jeremy Long
4b2f6832fe added contributing guidelines 2017-04-02 12:58:17 -04:00
Jeremy Long
35d0f21c47 fix codacy issues 2017-04-01 10:02:24 -04:00
Jeremy Long
3066d286c5 added logo no text 2017-04-01 09:58:32 -04:00
Jeremy Long
18564e8e86 fixed merge issue 2017-04-01 08:33:13 -04:00
Jeremy Long
832cbabc7d added bh arsenal badges 2017-03-31 17:28:20 -04:00
Jeremy Long
8b764d5e17 added bh arsenal badges 2017-03-31 17:24:48 -04:00
Jeremy Long
e2a1a59543 fixed issues related to making the cveDb a singleton 2017-03-31 06:58:37 -04:00
Jeremy Long
cedb8d3db1 Merge pull request #689 from jwilk/mailto
Fix mailto URIs
2017-03-25 09:13:11 -04:00
Jeremy Long
539bd754df fixed merge 2017-03-25 09:10:41 -04:00
Jeremy Long
109f5c22e9 initial fix for CveDB singleton 2017-03-25 09:06:34 -04:00
Jeremy Long
a23d127c62 initial fix for CveDB singleton 2017-03-25 09:05:51 -04:00
Jakub Wilk
6825304100 fix mailto URIs
As per RFC 6068, there should be no slashes after "mailto:".
2017-03-24 16:09:23 +01:00
Jeremy Long
947499726a initial attempt 2017-03-15 07:36:28 -04:00
Jeremy Long
97b2e1a4da added documentation per issue https://github.com/jeremylong/dependency-check-gradle/issues/38 2017-03-14 09:06:17 -04:00
Jeremy Long
3bb6553111 Merge pull request #681 from jeremylong/java7_updates_and_cleanup
Java7 updates and cleanup
2017-03-12 19:35:12 -04:00
Jeremy Long
371dba948d checkstyle corrections 2017-03-12 18:03:27 -04:00
Jeremy Long
675349c06f fixed broken test case 2017-03-12 15:59:23 -04:00
Jeremy Long
7a88981aa4 updated to use try with resouces 2017-03-12 13:22:27 -04:00
Jeremy Long
626f6c3de2 updated to use IOUtils to copy between streams 2017-03-12 13:21:59 -04:00
Jeremy Long
5540397456 Merge pull request #680 from jeremylong/cvedb
Cvedb
2017-03-11 14:45:49 -05:00
Jeremy Long
69c6dd40a1 fixed synchronization on local variable 2017-03-11 14:24:46 -05:00
Jeremy Long
5ed6e838fc spelling corrections 2017-03-11 14:15:24 -05:00
Jeremy Long
1d32a6012a fixed possible NPE 2017-03-11 13:28:21 -05:00
Jeremy Long
b157049a7e use try with resources 2017-03-11 13:27:40 -05:00
Jeremy Long
8ea6b08a0a use try with resources 2017-03-11 13:26:56 -05:00
Jeremy Long
8856ff04ec code cleanup and java 7 exception handling improvements 2017-03-11 12:46:58 -05:00
Jeremy Long
8bfbd11a51 added test cases 2017-03-11 12:46:06 -05:00
Jeremy Long
abd843d281 simplified conditional 2017-03-11 11:11:31 -05:00
Jeremy Long
c54f9b1144 fixed throws in finally and converted to try with resources 2017-03-11 11:11:02 -05:00
Jeremy Long
318f3e14dd removed unused code for batching 2017-03-11 11:10:21 -05:00
Jeremy Long
46f227e92e updated and added test cases 2017-03-11 11:09:31 -05:00
Jeremy Long
a7b6f37503 suppressed another false positive 2017-03-10 16:52:32 -05:00
Jeremy Long
a61bba2f72 code cleanup 2017-03-10 16:40:22 -05:00
Jeremy Long
dfc6d952bd codacy cleanup 2017-03-10 15:38:00 -05:00
Jeremy Long
046f4605f9 java7 updates and cleanup 2017-03-10 15:30:48 -05:00
Jeremy Long
32590ab7ff Merge branch 'master' of github.com:jeremylong/DependencyCheck into cvedb 2017-03-10 14:00:53 -05:00
Jeremy Long
efeb084e57 added suppression rule for jcore per issue #679 2017-03-10 06:51:53 -05:00
Jeremy Long
03ec3142c3 updated threadpool size 2017-03-07 06:37:21 -05:00
Jeremy Long
679df936e7 changed CveDB to a singeton 2017-03-07 05:49:12 -05:00
Jeremy Long
5ed5764ab5 Merge branch 'stefanneuhaus-misc_performance_tweaking_and_cleanup' 2017-03-04 14:29:47 -05:00
Jeremy Long
d588092727 Merge branch 'misc_performance_tweaking_and_cleanup' of https://github.com/stefanneuhaus/DependencyCheck into stefanneuhaus-misc_performance_tweaking_and_cleanup 2017-03-04 14:20:01 -05:00
Jeremy Long
295ba0679d Merge branch 'aikebah-master' 2017-03-04 14:02:40 -05:00
Jeremy Long
bcdf26c88d Merge branch 'master' of https://github.com/aikebah/DependencyCheck into aikebah-master 2017-03-04 13:51:28 -05:00
Jeremy Long
d6e092bfa2 Merge pull request #676 from jwilk/spelling
Fix typos
2017-03-04 13:50:09 -05:00
Jeremy Long
388c1b5af1 java 7 updates 2017-03-04 13:47:53 -05:00
Jakub Wilk
717aea9a03 fix typos 2017-03-02 23:07:35 +01:00
Stefan Neuhaus
4951ee5a62 Cleanup: Codacy conformance 2017-02-28 08:23:57 +01:00
Jeremy Long
666150cf7f updated per issue #672 2017-02-27 08:35:07 -05:00
Jeremy Long
d8290c0c45 Merge pull request #674 from jeremylong/Prakhash-reportmodifier1
Prakhash reportmodifier1
2017-02-26 10:02:01 -05:00
Jeremy Long
e363e8109b added suppression notes 2017-02-26 09:16:53 -05:00
Jeremy Long
b228d08843 removed typo 2017-02-26 09:16:16 -05:00
Jeremy Long
3e08437808 updated to work with new schema 2017-02-26 07:52:02 -05:00
Jeremy Long
e0d5651b75 updated to add notes 2017-02-26 07:50:35 -05:00
Jeremy Long
59e29b7afe Merge branch 'notes' into Prakhash-reportmodifier1 2017-02-25 16:09:21 -05:00
Jeremy Long
d180208e34 interim 2017-02-25 16:08:44 -05:00
Jeremy Long
0ce1ef596c Merge branch 'reportmodifier1' of https://github.com/Prakhash/DependencyCheck into Prakhash-reportmodifier1 2017-02-25 15:55:12 -05:00
Jeremy Long
5f7486f851 updates to 673 2017-02-25 15:53:12 -05:00
Jeremy Long
03559fd106 added more suppression rules for false positives 2017-02-25 06:31:34 -05:00
Jeremy Long
d08357a1c2 fixed typo 2017-02-24 07:10:27 -05:00
Jeremy Long
c1cb87ebde Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-02-24 06:30:11 -05:00
Jeremy Long
82fd1cf4d7 additional fp added 2017-02-24 06:30:02 -05:00
Prakhash
a87391e609 formatting issues reported by the codacy is fixed 2017-02-24 14:54:45 +05:30
Prakhash
3071cfd7be formatting issues reported by the codacy is done 2017-02-24 14:43:46 +05:30
Prakhash
583c2d34d3 schema changes are added with global schema 2017-02-24 14:23:56 +05:30
Prakhash
c9640fbf04 schema file is modified with notes attribute' 2017-02-24 12:15:21 +05:30
Prakhash
192d1de944 name space changes are reverted to the original 2017-02-24 12:06:51 +05:30
Prakhash
aa0314c840 report is modified with the notes element 2017-02-24 11:03:10 +05:30
Hans Aikema
0171b859c6 Merge branch 'master' of https://github.com/jeremylong/DependencyCheck 2017-02-23 11:22:25 +01:00
Jeremy Long
d267e14b73 Merge pull request #666 from colezlaw/grokassembly
Updated GrokAssembly and added config
2017-02-22 06:32:45 -05:00
Hans Aikema
79e63f4067 Merge remote-tracking branch 'upstream/master' 2017-02-21 17:35:30 +01:00
Hans Aikema
72d7af5291 Enable running DependencyCheck on Maven 3.0 2017-02-21 17:31:46 +01:00
Stefan Neuhaus
0e313d1910 Fix issue 2017-02-21 17:06:02 +01:00
Stefan Neuhaus
6841f9a009 Fix typo in directory name 2017-02-21 16:17:36 +01:00
Stefan Neuhaus
caeec68999 Refactor CveDB
- make class thread-safe and declare so (also DatabaseProperties)
- prepared statements represented by enum: performance gain, eases experiments when tuning for performance
- minor changes/cleanup/code style
2017-02-21 14:32:24 +01:00
Stefan Neuhaus
541915a5a7 Minor improvements in NvdCve20Handler
- use addAll() instead of iterating and adding single instances for VulnerableSoftware
- define String constants for certain XML attributes: code style, could facilitate JVM optimizations
2017-02-21 14:30:22 +01:00
Stefan Neuhaus
cb75ab8cca Use Apache StringUtils.split(String, char) instead of String.split(String)
String.split() uses a regex pattern for splitting. As we simply need to split on a single fixed char using the Apache StringUtils is preferable.
2017-02-21 14:23:13 +01:00
Stefan Neuhaus
0f3845b16d cleanup: remove unused return value 2017-02-21 14:12:12 +01:00
Stefan Neuhaus
dd7128095e add license information to dbStatements_oracle.properties 2017-02-21 14:02:34 +01:00
Jeremy Long
1367be510c correct fix for issue #660; correctly handle organization from the pom 2017-02-21 07:02:05 -05:00
Jeremy Long
2ea0eb3c64 correct fix for issue #660; correctly handle organization from the pom 2017-02-21 06:40:02 -05:00
Jeremy Long
a5990ea6f3 update to #657 to allow sorted vulnerable software in repots; also, sorting an array list is faster then building a treeset 2017-02-21 06:38:31 -05:00
colezlaw
67921f5f3d Updated GrokAssembly and added config 2017-02-20 15:35:52 -05:00
Jeremy Long
d31e0453bd fix for #660 2017-02-20 07:01:05 -05:00
stevespringett
ae21424a30 Closes #664 2017-02-18 21:23:19 -06:00
Jeremy Long
3577949425 codacy recommended updates 2017-02-17 19:03:53 -05:00
Jeremy Long
0d72471502 fixed synchronization per coverity 2017-02-17 18:00:40 -05:00
Jeremy Long
17590a6d38 re-ordered badges 2017-02-17 17:58:36 -05:00
Jeremy Long
d9dcc8cc2d fixed UTF-8 BOM bug 2017-02-17 17:18:10 -05:00
Jeremy Long
df1ee5e8c6 reverted dependency-tree to resolve bug 2017-02-17 17:17:54 -05:00
Jeremy Long
3c68ebece7 plugin/dependency upgrades 2017-02-17 14:35:51 -05:00
Jeremy Long
c9e8e6cf0e codacy recommended updates 2017-02-17 14:20:43 -05:00
Jeremy Long
36945fb84d added codacy badge 2017-02-17 13:05:12 -05:00
Jeremy Long
960a2e27ab formating and codacy recommended updates 2017-02-17 12:59:17 -05:00
Jeremy Long
71724461a9 Merge branch 'stefanneuhaus-accelerate-db-update_parallelize-fetching-lastmodification-timestamps' 2017-02-17 12:14:02 -05:00
Jeremy Long
ae5a95bfb3 merge #662 2017-02-17 12:13:45 -05:00
Jeremy Long
d6c9fea354 formating and codacy recommended updates 2017-02-17 12:03:11 -05:00
Jeremy Long
d6f1351f6b Merge pull request #657 from stefanneuhaus/accelerate-db-update_get-rid-of-treeset
Accelerate CVE DB update: replace TreeSets in Vulnerability by HashSets
2017-02-17 10:48:46 -05:00
Jeremy Long
373488adb4 codacy recommended updates 2017-02-17 10:31:25 -05:00
Stefan Neuhaus
59401cc9f8 cleanup/code style 2017-02-16 20:55:26 +01:00
Stefan Neuhaus
eca0e7a852 Fix integration test 2017-02-16 20:53:48 +01:00
Stefan Neuhaus
563dc24854 Parallelize retrieval of last modification timestamps 2017-02-16 08:59:09 +01:00
Stefan Neuhaus
3a70e25983 Refactoring: Move retrieval of last modified timestamps from UpdateableNvdCve to NvdCveUpdater
- UpdateableNvdCve is from its nature more like a simple value object
- Facilitates performance optimization for retrieval of last modification timestamps
2017-02-16 08:58:50 +01:00
Stefan Neuhaus
a9fc6bf02c cleanup: remove unused stuff 2017-02-16 08:58:36 +01:00
Stefan Neuhaus
cd4f09dc86 NvdCveUpdater: Refactor thread pool concept
- Make thread pools members of the class to facilitate reuse
- Increase default max download thread pool size from 3 to 50 (should be fine for mostly blocking tasks like downloading)
2017-02-16 08:58:14 +01:00
Jeremy Long
4193718571 upgrade to Java 7 2017-02-12 17:42:19 -05:00
Stefan Neuhaus
0464626e2b Accelerate CVE DB update
Vulnerability: switch vulnerableSoftware and references from expensive TreeSet to HashSet
2017-02-11 20:46:28 +01:00
Jeremy Long
a0198e34e7 snapshot version 2017-02-09 06:04:02 -05:00
Jeremy Long
0b329bd40e added test case 2017-02-07 19:41:06 -05:00
Jeremy Long
3d33f24f09 Merge pull request #655 from suhand/master
Minor spelling fixes
2017-02-07 19:40:39 -05:00
Jeremy Long
886c02fad2 add configuration to remove FP based on parent-group/artifact from spring-boot 2017-02-07 06:24:34 -05:00
Jeremy Long
3a11504153 updated to prevent bundling of dependencies within WAR files 2017-02-07 06:22:55 -05:00
Jeremy Long
3a082ae00a minor update to #617 2017-02-07 06:06:53 -05:00
Suhan Dharmasuriya
780201845b Minor spelling fixes 2017-02-07 12:00:17 +05:30
Jeremy Long
0e0a4bb0b4 expanded hint rules so that they can remove evidence 2017-02-04 09:20:47 -05:00
Jeremy Long
5333083a78 fixed bug that caused ODC to fail if an invalid assembly was scanned 2017-01-28 08:13:27 -05:00
Jeremy Long
b8c6c86330 snapshot version 2017-01-28 08:12:51 -05:00
Jeremy Long
e246757f47 version 1.4.5 2017-01-22 17:10:42 -05:00
Jeremy Long
4172300799 added license 2017-01-22 16:11:50 -05:00
Jeremy Long
f39f754b7b reapplied fix for issue #601 2017-01-22 08:10:14 -05:00
Jeremy Long
c59615f452 patch for issue #510 and #512 2017-01-22 08:01:40 -05:00
Jeremy Long
847bed2fa0 added manifest implementation-version 2017-01-22 07:42:11 -05:00
Jeremy Long
a9af15f6f8 checkstyle/pmd suggested corrections 2017-01-21 08:47:52 -05:00
Jeremy Long
92519ae955 updated notes 2017-01-21 08:09:48 -05:00
Jeremy Long
2d90aca1f2 minor code cleanup 2017-01-21 08:05:54 -05:00
Jeremy Long
f29ed38c34 Merge pull request #644 from oosterholt/master
Add troubling JAR file name to the exception when JAR reading errors occur
2017-01-21 06:21:18 -05:00
Rick Oosterholt
df8d4fd77c Minor change: When JAR reading errors occur, at least add the file name
to the exception. Without it, finding the troubling JAR is hard.
2017-01-18 13:52:17 +01:00
Jeremy Long
baa2e2c6ff updated archetype for new analyzers to be more complete 2017-01-15 12:18:01 -05:00
Jeremy Long
9d5769bb69 Merge branch 'issue575' 2017-01-15 11:19:37 -05:00
Jeremy Long
4cdfa804ee fixed accidental commit 2017-01-14 09:43:34 -05:00
Jeremy Long
523cd23b6b filter version numbers for issue #575 2017-01-14 09:41:34 -05:00
Jeremy Long
61866e9e76 updated source version 2017-01-14 08:55:20 -05:00
Jeremy Long
ff7fbdc98d updated year to speed test case 2017-01-14 07:34:35 -05:00
Jeremy Long
b625d642ea updated documentation for #635 2017-01-14 07:31:31 -05:00
Jeremy Long
8733a85ebb patch per issue#642 2017-01-13 06:53:26 -05:00
Jeremy Long
5ab5a7b72b tuned linguist language stats 2017-01-09 20:05:37 -05:00
Jeremy Long
3cb8b9fa9e Merge branch 'hgschmie-additional_analyzers' 2017-01-08 11:52:09 -05:00
Jeremy Long
429039bf1c documentation for issue #635 2017-01-08 11:37:50 -05:00
Jeremy Long
29d28c3408 fixed PR #635 to cover other interfaces 2017-01-08 11:23:52 -05:00
Jeremy Long
372d484440 Merge branch 'additional_analyzers' of https://github.com/hgschmie/DependencyCheck into hgschmie-additional_analyzers 2017-01-08 10:33:57 -05:00
Jeremy Long
eac47800a3 added documentation for PR #636 2017-01-08 08:55:29 -05:00
Jeremy Long
86a85db12b removed for now 2017-01-08 08:54:47 -05:00
Jeremy Long
4ab6cd278c updated documentation for PR #636 2017-01-08 08:51:56 -05:00
Jeremy Long
233a068c8b Merge pull request #636 from hgschmie/fail_on_any_vuln
adds a new flag 'failBuildOnAnyVulnerability'
2017-01-08 08:19:24 -05:00
Jeremy Long
d9f0ffa742 Merge pull request #634 from hgschmie/enable_disable
rework the enabled / disabled logic
2017-01-08 08:18:12 -05:00
Jeremy Long
8d63ee19ed fix for Jenkins integration, updates to commit f47c6b0 2017-01-08 07:55:35 -05:00
Jeremy Long
1fb74e1a27 Merge pull request #639 from dejan2609/java-6-compatibility
check code against Java 1.6 API signatures
2017-01-07 06:40:47 -05:00
dejan2609
c94ab6108c check code against Java 1.6 API signatures 2017-01-04 16:42:07 +01:00
Jeremy Long
bf285e19ab added site for archetype 2017-01-02 21:59:09 -05:00
Jeremy Long
b1ceca73e4 added plugin archetype to site 2017-01-02 21:48:04 -05:00
Jeremy Long
f3aca63b61 version upgrades and added enforcer for java version 2017-01-02 21:47:27 -05:00
Jeremy Long
fca107d287 added site distribution 2017-01-02 21:46:15 -05:00
Jeremy Long
64b6964fff checkstyle corrections 2017-01-02 21:45:49 -05:00
Jeremy Long
6af0842838 added logging 2017-01-02 21:45:21 -05:00
Jeremy Long
4c49adf1ba reduced code duplication 2017-01-02 21:44:59 -05:00
Jeremy Long
5f4e4fab56 reduced code duplication 2017-01-02 21:43:51 -05:00
Jeremy Long
146d7e3fbf reduced code duplciation 2017-01-02 21:42:20 -05:00
Jeremy Long
4d22800747 fixed type 2017-01-02 21:40:57 -05:00
Jeremy Long
541a7f8180 removed unused code 2017-01-02 21:40:04 -05:00
Jeremy Long
f205cf79c9 Merge branch 'plugins' 2016-12-30 17:02:32 -05:00
Jeremy Long
d8bb6488b7 added archetype per #612 2016-12-30 17:01:09 -05:00
Jeremy Long
4324563c0a updated plugins path for #612 2016-12-30 16:42:37 -05:00
Jeremy Long
bad03660b1 added plugins directory per #612 2016-12-29 07:38:11 -05:00
Henning Schmiedehausen
20b1ff38f9 adds a new flag 'failBuildOnAnyVulnerability'
In our build system, we enable checkers based on boolean
values. Currently, the only way to enable failing the build on
vulnerabilities is by providing a numeric value (0-10) for another
property. This change adds a boolean switch that will fail the build
if any vulnerability is present (we have a strict "no vulnerabilities
in our builds" policy).
2016-12-28 17:24:26 -08:00
Henning Schmiedehausen
def78a3cfd rework the enabled / disabled logic
If an analyzer is disabled from the configuration, it should not be
initialized (because some of the may actually fail during that process
nor should the engine log in any way that those exist.

With these changes, it is possible for me to turn off unwanted
analyzers (e.g. Ruby analyzers for a java project) from the maven
plugin and not confuse my users with spurious misleading messages.
2016-12-28 16:39:25 -08:00
Henning Schmiedehausen
a41158a716 adds maven configuration switches for more analyzers 2016-12-28 16:38:28 -08:00
Jeremy Long
63ad13ff7a added enabled properties per issue #612 2016-12-27 08:46:04 -05:00
Jeremy Long
dd92ec675f fixed error in tests 2016-12-27 08:45:42 -05:00
Jeremy Long
6e1512f7d9 added enabled setting (#612) and added additional checks to see if the update should occur (#631) 2016-12-27 08:45:01 -05:00
Jeremy Long
287b1df3fd added enabled settings for all analyzers per #612 2016-12-26 09:11:26 -05:00
Jeremy Long
38bf9b4ddb checkstyle recommendations 2016-12-22 07:32:04 -05:00
Jeremy Long
f9d3a9d8d8 Merge pull request #614 from stefanneuhaus/issue-613-fix-version-comparison
Fix handling of numerical versions
2016-12-22 06:58:26 -05:00
Jeremy Long
309a5d9bcb Merge branch 'issue630' 2016-12-22 06:57:04 -05:00
Jeremy Long
60e661d3a4 updated per issue #630 2016-12-22 06:55:26 -05:00
Jeremy Long
c33257d266 addded synchronization - as this analyzer should only run synchronized 2016-12-22 06:53:35 -05:00
Jeremy Long
1dbc183567 added check for failure 2016-12-22 06:52:47 -05:00
Jeremy Long
bf258146da added test case for issue #629 and #517 2016-12-18 12:14:35 -05:00
Jeremy Long
bb927b447e updated so that the old suppression files could be processed 2016-12-18 12:12:57 -05:00
Jeremy Long
d91b4c3151 updated test case for performance of build 2016-12-18 12:12:10 -05:00
Jeremy Long
91dbb39f18 updated test for #630 2016-12-18 11:59:59 -05:00
Jeremy Long
35ae8fd660 updated test for #630 2016-12-18 11:59:30 -05:00
Jeremy Long
d854917090 changes for issue #630 2016-12-18 11:58:58 -05:00
Jeremy Long
32ebf6c8ed added phase to accomodate the fix for issue #630 2016-12-18 11:58:20 -05:00
Jeremy Long
edd4191d47 fix for #517 2016-12-16 06:29:42 -05:00
Jeremy Long
0cce49506a added validation 2016-12-10 19:58:05 -05:00
Jeremy Long
1c053469e9 fixed date format for test case 2016-12-10 19:50:09 -05:00
Jeremy Long
610e97ef7f jacks suggested change 2016-12-10 16:55:58 -05:00
Jeremy Long
5a678d2ccb removed test code 2016-12-10 16:55:38 -05:00
Jeremy Long
8db61a4d1e coverity suggested change 2016-12-10 16:42:32 -05:00
Jeremy Long
f47c6b07f4 jacks recommended change for thread safety 2016-12-05 22:41:15 -05:00
Jeremy Long
bd3af45db9 fixed code duplication 2016-12-04 16:18:01 -05:00
Jeremy Long
a271d422f6 moved similiar code to a utility function to remove code duplication 2016-12-04 11:28:53 -05:00
Jeremy Long
4dd6dedaa4 hardening the XML parser per jacks.codiscope.com 2016-12-03 17:44:49 -05:00
Jeremy Long
10ee569096 fix proposed by Jacks - synchronizing SimpleDateFormat 2016-12-03 17:43:24 -05:00
Jeremy Long
1474855305 fix proposed by Jacks - synchronizing SimpleDateFormat 2016-12-03 17:41:32 -05:00
Jeremy Long
0202bc11d4 null checking proposed by coverity 2016-12-03 17:39:57 -05:00
Stefan Neuhaus
e7072ea04c Count "0" as a positive integer 2016-12-03 22:50:20 +01:00
Jeremy Long
8f2c755f21 checkstyle correction 2016-12-03 16:23:53 -05:00
Jeremy Long
e513a79bd2 fixed issue #272 2016-12-03 15:07:33 -05:00
Jeremy Long
dd17f7393f snapshot version 2016-12-03 14:28:36 -05:00
Jeremy Long
32f38bf892 updated travis build script 2016-12-03 14:01:32 -05:00
Jeremy Long
d5c3eeaf28 Merge branch 'removeMavenEngine' 2016-12-03 13:48:03 -05:00
Jeremy Long
bfa67fcba7 fix #617 2016-12-03 13:46:25 -05:00
Jeremy Long
37a556dcc0 add integration test 2016-12-03 07:06:01 -05:00
Jeremy Long
fe61f298f0 Merge branch 'axel3rd-MavenMojosPurgeAndUpdateOnlyAggregator' 2016-12-03 06:56:01 -05:00
Jeremy Long
9786c9bf82 minor changes - planning on moving additional testing profile to an invoker test in the maven module per issue #618 2016-12-03 06:55:24 -05:00
Jeremy Long
668161081a moved the invoker plugin to a profile so that it does not execute on every build 2016-12-03 06:54:03 -05:00
Jeremy Long
4978f9dcba Merge branch 'MavenMojosPurgeAndUpdateOnlyAggregator' of https://github.com/axel3rd/DependencyCheck into axel3rd-MavenMojosPurgeAndUpdateOnlyAggregator 2016-11-22 19:57:27 -05:00
Jeremy Long
a6ca2e3895 Merge pull request #625 from axel3rd/MinorFixAndUTsWindowsSpaceDirectory
UTs on Windows when project path contains space & some exception review
2016-11-22 19:51:54 -05:00
Alix Lourme
6ecf55be91 UTs on Windows when project path contains space & some exception review 2016-11-22 23:33:40 +01:00
Jeremy Long
13bd63dac8 re-loading of properties/settings resolved by sharing the settings object amongst tasks 2016-11-22 16:40:57 -05:00
Jeremy Long
db5ff1bfca java mail - disputed CVE is considered a false positive 2016-11-22 16:38:45 -05:00
Jeremy Long
42f2385bb2 updated documentation for PR #619 2016-11-22 06:51:21 -05:00
Jeremy Long
e9556bbbf0 added analyzer initialization so that temp files get put in the correct location 2016-11-22 06:40:33 -05:00
Jeremy Long
316b936326 ensured resources are closed 2016-11-22 06:39:50 -05:00
Jeremy Long
6838b9b950 fixed logic for single pom entry in a jar 2016-11-22 06:21:30 -05:00
Jeremy Long
cdfe5d0c9a Merge pull request #619 from willowtreeapps/feature/fail-on-cvss
Adds a failOnCVSS command line option
2016-11-22 05:50:45 -05:00
Jeremy Long
1610f14c47 general code cleanup/fixes 2016-11-22 05:46:35 -05:00
Jeremy Long
85ab894b94 fixed the possible creation of two indexes 2016-11-20 06:49:28 -05:00
Alix Lourme
ddbca24f33 Maven mojos 'purge' & 'update-only' aggregator #618 2016-11-19 00:32:10 +01:00
Charlie Fairchild
6b9acac8c4 Minor Styling 2016-11-17 15:37:21 -05:00
Charlie Fairchild
2333bee5fd Adds a command line option for the CLI tool to pick what CVSS error to fail on 2016-11-16 11:25:21 -05:00
Jeremy Long
2ad08d2367 minor code cleanup 2016-11-13 16:33:39 -05:00
Stefan Neuhaus
1337686013 Fix handling of numerical versions 2016-11-13 19:37:29 +01:00
Jeremy Long
41041bfd18 updated documentation per issue #607 2016-11-12 11:21:40 -05:00
Jeremy Long
e693e53630 updated error message per issue #607 2016-11-12 11:19:48 -05:00
Jeremy Long
b99e13a337 added documentation to address issue #609 2016-11-12 11:03:25 -05:00
Jeremy Long
3bbc485968 fix index out of range exception per issue #611 2016-11-11 10:58:14 -05:00
Jeremy Long
e0b549e427 v1.4.4 2016-11-05 09:34:53 -04:00
Jeremy Long
75207169e3 resolved fp per #604 2016-11-05 06:29:43 -04:00
Jeremy Long
e07f568237 resolved false positive per #608 2016-11-05 06:23:06 -04:00
Jeremy Long
e2cd99d40d modified code for #606 2016-11-03 06:41:37 -04:00
Jeremy Long
27f2682a98 checkstyle corrections 2016-10-31 06:44:51 -04:00
Jeremy Long
34a2110e9a minor perforance improvement 2016-10-31 06:29:32 -04:00
Jeremy Long
96ba51db4f updated so that all scanned dependencies are correctly kept in the dependency list 2016-10-31 06:29:08 -04:00
Jeremy Long
9c6053a60a fixed logging bug 2016-10-28 19:18:20 -04:00
Jeremy Long
358367ef9e updated documentation to resolve issues #523 and #561 2016-10-28 18:58:27 -04:00
Jeremy Long
a12bc44ecd moved hard-coded configuration to properties file and added some additional debugging 2016-10-28 08:44:43 -04:00
Jeremy Long
773ac019f8 coverity recommended changes 2016-10-23 07:20:24 -04:00
Jeremy Long
e751b7b814 checkstyle correction 2016-10-23 07:02:36 -04:00
Jeremy Long
824aa23b9b updated documentation to reflect that the gradle plugin automatically registers itself when the Java plugin is used 2016-10-23 06:18:50 -04:00
Jeremy Long
b7b97960a6 improvements to the vulnerability report per issue #599 2016-10-22 07:11:36 -04:00
Jeremy Long
40f0e907e1 typo fix per #603 2016-10-22 06:02:59 -04:00
Jeremy Long
5ff0dc885d Merge branch 'master' of github.com:jeremylong/DependencyCheck 2016-10-21 07:06:55 -04:00
Jeremy Long
e70a0ee238 corrected how project references are propogated when the same dependency is analyzed more then once 2016-10-21 07:06:47 -04:00
Jeremy Long
9338697079 fixed dctemp path from being the primary dependency 2016-10-21 07:05:21 -04:00
Jeremy Long
4018a4e1de Merge pull request #602 from spyhunter99/feature/601
#601 make the dependency vulnerability count easier to pull out of th…
2016-10-21 05:39:44 -04:00
Alex
e8788dd2a4 #601 make the dependency vulnerability count easier to pull out of the html 2016-10-18 20:08:43 -04:00
Jeremy Long
e70c2f2b05 fixed issue #570 - each instance of dependency-check will have its own temporary folder 2016-10-16 07:40:18 -04:00
Jeremy Long
5ed0583039 added new temp directory creation function 2016-10-16 07:36:38 -04:00
Jeremy Long
f76d7295f9 fixed generics warning 2016-10-16 07:33:09 -04:00
Jeremy Long
6e280c4958 suppressed warnings 2016-10-16 07:32:48 -04:00
Jeremy Long
48b4ef1944 updated duration reporting to be the same format 2016-10-16 07:32:05 -04:00
Jeremy Long
9150df964f fixed error handling 2016-10-16 07:31:17 -04:00
Jeremy Long
b2237394e1 updated duration reporting to be the same format 2016-10-16 07:30:01 -04:00
Jeremy Long
b3a0f7ad26 fixed generic warnings 2016-10-16 07:28:50 -04:00
Jeremy Long
782ba42abc fixed warning regarding no uid 2016-10-16 07:28:09 -04:00
Jeremy Long
74b93ce602 fixing PR #598 2016-10-14 13:47:39 -04:00
Jeremy Long
e907c40f17 Merge pull request #595 from bloihl/master
syncing documentation hints terminology for all sub-projects
2016-10-09 16:14:23 -04:00
bloihl
13a9dedb1e Merge remote-tracking branch 'upstream/master' 2016-10-09 12:54:12 -07:00
bloihl
b37698f245 syncing references to false negatives in documentation 2016-10-09 12:52:44 -07:00
Jeremy Long
d30d000346 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2016-10-09 15:39:09 -04:00
Jeremy Long
446239a5bd clearly document Maven 3.1 or higher is required 2016-10-09 15:39:02 -04:00
Jeremy Long
ac25aa795b Merge pull request #588 from wurstbrot/master
Add Dockerfile for dependency check
2016-10-09 15:06:44 -04:00
Jeremy Long
f117a9ded0 Merge pull request #594 from stefanneuhaus/parallelize-analyzers-aftermath
Parallelize analyzers aftermath
2016-10-09 15:01:16 -04:00
Stefan Neuhaus
947d38ccd2 Merge remote-tracking branch 'upstream/master' into parallelize-analyzers-aftermath
# Conflicts:
#	dependency-check-core/src/main/java/org/owasp/dependencycheck/AnalysisTask.java
#	dependency-check-core/src/main/java/org/owasp/dependencycheck/Engine.java
2016-10-09 17:13:39 +02:00
Jeremy Long
23f7996db8 checkstyle corrections 2016-10-09 11:00:28 -04:00
Stefan Neuhaus
9fdff51f26 Merge remote-tracking branch 'upstream/master' into parallelize-analyzers-aftermath 2016-10-09 16:08:46 +02:00
Stefan Neuhaus
9b43bf004a Cleanup
- shutdown() ExecutorService after task execution
- javadoc
- improve unit test coverage
2016-10-09 16:03:36 +02:00
Jeremy Long
5d73faa1f0 updated sample report with the latest version 2016-10-09 08:11:53 -04:00
Jeremy Long
9e70279b31 updated presentation 2016-10-09 08:03:03 -04:00
Jeremy Long
9e671d1065 updated documentation per #556 2016-10-09 08:00:02 -04:00
Jeremy Long
7e2c4af0b3 Merge branch 'bloihl-master' 2016-10-09 07:13:47 -04:00
Jeremy Long
11f9092a65 fixed description 2016-10-09 07:13:35 -04:00
Jeremy Long
6017e5c217 Merge branch 'master' of https://github.com/bloihl/DependencyCheck into bloihl-master 2016-10-09 06:56:17 -04:00
Jeremy Long
b2149ff4b9 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2016-10-09 06:50:50 -04:00
Jeremy Long
1a5177c576 Merge branch 'stefanneuhaus-parallelize-analyzers' 2016-10-09 06:50:09 -04:00
Jeremy Long
7020c9931a Merge branch 'parallelize-analyzers' of https://github.com/stefanneuhaus/DependencyCheck into stefanneuhaus-parallelize-analyzers 2016-10-09 06:33:10 -04:00
Jeremy Long
9bc43e2e8e Merge pull request #590 from stefanneuhaus/cleanup
Cleanup
2016-10-08 22:07:49 -04:00
Jeremy Long
26a4e7451e Merge pull request #589 from pierre-ernst/master
Hardening
2016-10-08 22:04:07 -04:00
Stefan Neuhaus
3470d33bdc Fix build 2016-10-09 02:59:32 +02:00
Stefan Neuhaus
51c96894b4 Support parallelism for analyzers of the same type 2016-10-09 00:45:10 +02:00
Jeremy Long
7fc2be6a0a corrected checksum calculation so that files can be deleted shortly after the calculation is completed. 2016-10-08 18:05:55 -04:00
Jeremy Long
110c97bc15 ensuring no input stream is left open 2016-10-08 18:02:53 -04:00
Jeremy Long
8d51d8fa1f improved error reporting 2016-10-08 18:00:47 -04:00
Jeremy Long
4b02a567e0 improved error reporting 2016-10-08 18:00:18 -04:00
Stefan Neuhaus
5a939ec108 Provide proper error message in case the (default) property file is not available. Ran into this issue in combination with the Gradle daemon. 2016-10-08 20:00:43 +02:00
Stefan Neuhaus
d9c4480627 Fix typos 2016-10-08 19:40:04 +02:00
Jeremy Long
9388340e23 updated to resolve reported false negative: https://groups.google.com/forum/#!topic/dependency-check/LjnemiZKeZQ 2016-10-08 06:19:46 -04:00
pernst
2285d2ef4b first commit 2016-10-06 16:40:39 -04:00
Timo Pagel
f84aea0040 MOD: Use https over http and fetch current release 2016-10-06 19:38:22 +02:00
bloihl
452969cc92 Merge remote-tracking branch 'upstream/master' 2016-10-04 09:45:01 -07:00
Jeremy Long
128a600f18 fixed issue with cpeSort being null on first row if no CPE is present 2016-10-04 06:45:17 -04:00
Jeremy Long
7dd9a52e78 corrected false positive per issue #582 2016-10-04 06:20:34 -04:00
Jeremy Long
ff341b7228 corrected false positive per issue #582 2016-10-04 06:19:41 -04:00
bloihl
92a8b4ca85 Merge remote-tracking branch 'upstream/master' 2016-10-03 11:12:01 -07:00
bloihl
384199b28d fixed typo in exception and added documentation for hints schema 2016-10-03 09:52:58 -07:00
Jeremy Long
44edcabe15 fixed duplicate analysis identified in https://github.com/jeremylong/dependency-check-gradle/issues/19 2016-10-01 06:55:37 -04:00
Timo Pagel
1a5e9884fc Add usage for docker to the Readme 2016-09-23 12:26:17 +02:00
Timo Pagel
cda81315d2 Add Dockerfile with own user 2016-09-23 12:25:58 +02:00
Jeremy Long
d7100e54d1 made exitValue check more robust to cover possible future negative exit values 2016-09-21 14:21:50 -04:00
Jeremy Long
989caead9c Merge pull request #568 from xthk/bundler-return-code
fixed check for bundle-audit's return code
2016-09-21 14:07:09 -04:00
Jeremy Long
a9d3b627f1 Merge pull request #564 from awhitford/Upg20160918
Upgrades
2016-09-21 14:06:25 -04:00
Jeremy Long
99a1606df1 stopped writting the serialized dc data 2016-09-21 14:05:19 -04:00
Jeremy Long
6326513c63 improved suppression capability within the report 2016-09-21 14:04:21 -04:00
bloihl
f6cfae595a add false negatives General menu 2016-09-20 21:34:34 -07:00
bloihl
0794efcf41 add general hints document explaining false negatives 2016-09-20 21:01:27 -07:00
bloihl
b9ea82f2c1 adding hints documentation for user management of false negatives 2016-09-20 15:42:49 -07:00
bloihl
8b705b3370 update maven docs with hintsFile option 2016-09-20 15:41:26 -07:00
bloihl
c684607a4d updte gradle docs with hintsFile option 2016-09-20 15:41:02 -07:00
bloihl
b00833c2de update ant docs with hintsFile option 2016-09-20 15:40:37 -07:00
bloihl
0ca6bc6ab6 exposing hints to maven through configuration using hintsFile 2016-09-20 12:42:35 -07:00
bloihl
60faddff9b exposing hints file through ant configuration as setHintsFile 2016-09-20 12:40:07 -07:00
bloihl
b35da8ad4b exposing the hints file to the CLI with new option "--hints" 2016-09-20 12:37:58 -07:00
Tilmann Haak
79887c148a fixed check for bundle-audit's return code 2016-09-20 13:43:28 +02:00
Bob Loihl
1ae3457ee6 Merge remote-tracking branch 'upstream/master'
Syncing with master project
2016-09-19 11:36:47 -07:00
Anthony Whitford
d2154c9d29 maven-plugin-annotations 3.5 released. 2016-09-18 23:00:50 -07:00
Anthony Whitford
40ede24a99 Upgraded plugins and dependencies. 2016-09-18 22:30:12 -07:00
Jeremy Long
5960ba919d removed slf4j binding as maven 3.1 no longer requires it, see issue #552 2016-09-16 12:32:24 -04:00
Jeremy Long
f6aaaa8815 updated pre-req per issue #560 2016-09-16 10:25:40 -04:00
Jeremy Long
6f1b20c936 updated report to be able to suppress by GAV and added help text 2016-09-16 10:14:48 -04:00
Jeremy Long
7734a50427 resolve issue #554 2016-09-10 07:20:49 -04:00
Jeremy Long
aef118d375 test and fix for version number matching per issue #558 2016-09-09 06:36:56 -04:00
bloihl
22cae71999 Merge pull request #1 from jeremylong/master
updating fork to latest
2016-09-07 13:49:31 -07:00
Jeremy Long
29d127303c snapshot version 2016-09-06 20:34:22 -04:00
Jeremy Long
5574f1c24f version 1.4.3 2016-09-06 07:04:34 -04:00
Jeremy Long
9457744571 using more robust check for windows os 2016-09-06 06:42:12 -04:00
Jeremy Long
19243c479c disabling batch support for mysql to fix issue #503 - more testing needs to be done 2016-09-06 06:36:08 -04:00
Jeremy Long
e868ce8328 cleaned up file deletion code slightly 2016-09-06 06:23:55 -04:00
Jeremy Long
ffa846c05a updated compareTo so that null values are handled properly 2016-09-06 05:48:12 -04:00
Jeremy Long
dde1791476 minor rewording of a log statement 2016-09-06 05:47:44 -04:00
Jeremy Long
45438a7f06 removed temporary test code 2016-09-05 06:46:06 -04:00
Jeremy Long
c980e77ea3 added assume to skip errors when mono is not installed 2016-09-04 20:50:14 -04:00
Jeremy Long
176d3ddefa temporary fix for issue #534 2016-09-04 19:09:08 -04:00
Jeremy Long
98d783d448 added todo for NPE reasons 2016-09-04 18:51:07 -04:00
Jeremy Long
bcd6634d8a fixed NPE issues 2016-09-04 18:41:58 -04:00
Jeremy Long
0b260cef2a removed duplicated test 2016-09-04 08:00:43 -04:00
Jeremy Long
6a68abbd67 fixed unit test on non-windows 2016-09-01 06:12:35 -04:00
Jeremy Long
9fcf23c802 coverity, checkstyle, pmd, and findbugs suggested corrections 2016-09-01 05:46:09 -04:00
Jeremy Long
5c2c08e051 suppressed false positive, see issue #540 2016-08-30 06:12:17 -04:00
Jeremy Long
1f254997e1 patch to resolve issue #547 2016-08-28 07:46:42 -04:00
Jeremy Long
4f95af0864 removed config 2016-08-27 13:52:05 -04:00
Jeremy Long
6ff39be9d2 initial config 2016-08-27 13:41:29 -04:00
Jeremy Long
6cf5a47971 re-added the check for https that was accidentally removed 2016-08-27 11:43:33 -04:00
Jeremy Long
56da53c700 update for issue #523 - removed specific algorithm list to support differences in JDKs (ibm); just setting the protocol resolves the issue 2016-08-27 07:26:59 -04:00
Jeremy Long
7091e10795 added coverity badge 2016-08-23 21:19:01 -04:00
Jeremy Long
34765c5741 coverity suggested corrections - removed dead local store 2016-08-23 19:24:25 -04:00
Jeremy Long
36c139872a coverity suggested corrections 2016-08-23 19:20:54 -04:00
Jeremy Long
1e77cec677 improved error reporting for issue #547 2016-08-23 19:12:04 -04:00
Jeremy Long
e95e3fb2d0 coverity suggested corrections 2016-08-21 18:40:28 -04:00
Jeremy Long
39c2234e38 coverity suggested corrections 2016-08-21 16:51:09 -04:00
Jeremy Long
f4fff5d9cb checkstyle and formating updates 2016-08-21 15:59:47 -04:00
Jeremy Long
659785f972 checkstyle correction 2016-08-21 15:28:55 -04:00
Jeremy Long
85c04f6e3e checkstyle correction 2016-08-21 15:28:49 -04:00
Jeremy Long
bef117cbe8 coverity correction 2016-08-21 15:28:10 -04:00
Jeremy Long
46dd7cf86e checkstyle correction 2016-08-21 15:27:34 -04:00
Jeremy Long
9ed5a97267 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2016-08-21 14:41:08 -04:00
Jeremy Long
cc2da70db2 updated ignore list 2016-08-21 14:41:01 -04:00
Jeremy Long
cedd93e774 coverity suggested corrections 2016-08-21 14:40:07 -04:00
Jeremy Long
632e1692eb Merge pull request #541 from biancajiang/swift_support
cocoapods and swift package manager support
2016-08-21 08:03:52 -04:00
Jeremy Long
4861592d2a assume no NPE due to issue with mock and some versions of the JDK 2016-08-21 07:25:37 -04:00
Jeremy Long
22e6d4edf3 updated jdk used by travis 2016-08-21 07:24:54 -04:00
Jeremy Long
e9bd7ff72f Merge branch 'master' of github.com:jeremylong/DependencyCheck 2016-08-21 07:11:56 -04:00
Jeremy Long
e7228fb489 updated jdk used by travis 2016-08-21 07:11:17 -04:00
Jeremy Long
96c03a68f2 Merge pull request #545 from colezlaw/grokassemblyfix
Updated GrokAssembly to deal with non-UTF-8 chars in types
2016-08-20 12:24:21 -04:00
Jeremy Long
4f6f248421 reworked aggregation mojo to resolve issues #325, #386, and #531 2016-08-20 12:15:49 -04:00
Will Stranathan
a8f14c86fd Updated GrokAssembly to deal with non-UTF-8 chars in types 2016-08-20 09:34:15 -04:00
Jeremy Long
36de3d1e25 removed unnecassary stacktrace from logs per issue #544 2016-08-18 09:59:21 -04:00
Jeremy Long
48bc4570e1 Merge pull request #542 from bodewig/document_skip_and_scanConfigurations
document skipConfigurations and scanConfigurations for gradle plugin
2016-08-14 06:48:19 -04:00
Stefan Bodewig
94b272dbae document skipConfigurations and scanConfigurations
closes jeremylong/dependency-check-gradle/#12
2016-08-14 11:10:38 +02:00
bjiang
c093edf459 update copyright and javadoc 2016-08-12 17:12:12 -04:00
bjiang
0164feffcc Merge branch 'master' into swift_support 2016-08-12 16:35:12 -04:00
bjiang
8cd377b99f use value of specification-version as version from Manifest 2016-08-12 13:32:25 -04:00
bjiang
74282c8ac5 filter out version from jar filename for name 2016-08-12 13:15:29 -04:00
Jeremy Long
d2158e5e44 fixed typo 2016-08-11 21:12:47 -04:00
Jeremy Long
9ea16ad1d1 skipped patch for Java 1.6 & 1.7 if the JRE is at least 1.8 - see issue #523 2016-08-11 20:59:26 -04:00
Jeremy Long
45941adb71 fixed type per issue #533 2016-08-11 20:55:36 -04:00
Jeremy Long
c4d662fd2b patch for issue #536 2016-08-11 20:49:27 -04:00
Jeremy Long
d9ce3cda66 snapshot version 2016-08-11 20:09:34 -04:00
Jeremy Long
6bd7d6b078 version 1.4.2 2016-07-31 08:01:47 -04:00
Jeremy Long
84c6dd5dfa resolved gradle issue 14 - https://github.com/jeremylong/dependency-check-gradle/issues/14 2016-07-31 07:34:09 -04:00
Jeremy Long
71e7412f15 corrected example 2016-07-31 07:32:30 -04:00
Jeremy Long
d22c920b35 version 1.4.1 2016-07-30 06:52:48 -04:00
Jeremy Long
f7a0982ca0 checkstyle corrections 2016-07-29 06:12:40 -04:00
Jeremy Long
bed04150e1 reverted H2 upgrade due to issues with Jenkins and Java 6 compatability 2016-07-27 06:23:56 -04:00
Jeremy Long
ba15de2218 improved error handling 2016-07-27 06:04:56 -04:00
Jeremy Long
e9ec89dc9c improved error handling 2016-07-27 06:04:08 -04:00
Jeremy Long
d09f75658c minor formating correction 2016-07-24 08:47:27 -04:00
Jeremy Long
62f92db181 added issue template 2016-07-24 08:44:09 -04:00
Jeremy Long
27a98f4244 checckstyle corrections 2016-07-24 08:12:57 -04:00
Jeremy Long
f0a3482eda findbugs correction 2016-07-24 08:07:39 -04:00
Jeremy Long
5f76843c4a findbugs correction 2016-07-24 08:06:54 -04:00
Jeremy Long
c6ea92cff9 added links to the SBT plugin 2016-07-24 07:33:28 -04:00
Jeremy Long
c253308284 checkstyle corrections 2016-07-23 07:45:48 -04:00
Jeremy Long
9ae9c111e3 checkstyle corrections 2016-07-23 07:13:09 -04:00
Jeremy Long
4894372eee minor code quality issues corrected 2016-07-23 06:50:11 -04:00
Jeremy Long
7cf040653f upgraded h2 db version 2016-07-22 06:29:01 -04:00
Jeremy Long
034bd4dba0 testing fix to resolve connection issues with NVD 2016-07-19 07:04:24 -04:00
Jeremy Long
af12a2161c testing fix to resolve connection issues with NVD 2016-07-19 06:54:25 -04:00
Jeremy Long
57fcf6fde3 testing connection errors 2016-07-17 08:18:47 -04:00
Jeremy Long
c5757dc5f4 updates to resolve issue #215 2016-07-17 07:19:56 -04:00
Jeremy Long
6d5d5ceb7b Updated exception handling so that issue #215 can be resolved 2016-07-14 06:31:54 -04:00
bjiang
2fa8507d69 merge owasp 1.4.1 2016-07-12 16:22:05 -04:00
Jeremy Long
f23003ead3 fields can be final 2016-07-10 07:13:08 -04:00
Jeremy Long
c996f6b436 improved exception handling as part of resolution for #215 2016-07-10 07:12:43 -04:00
Jeremy Long
d2ee66a1c4 there was no need to extend IOException 2016-07-10 07:11:03 -04:00
Jeremy Long
26b0dd5ef5 updated javadoc 2016-07-10 06:56:26 -04:00
Jeremy Long
ad4149a259 updated documentation for PR #528 2016-07-10 06:27:40 -04:00
Jeremy Long
9611c3b478 Merge pull request #528 from felfert/master
Thanks for the PR!
2016-07-10 06:13:09 -04:00
Jeremy Long
cead88d221 reworked initialization exceptions as part of planned resolution for issue #215 2016-07-09 07:39:00 -04:00
Jeremy Long
c1e1a6bb4f cleaned up imports 2016-07-09 07:35:36 -04:00
Fritz Elfert
6212a5f740 Compatibility fixes for MariaDB JDBC driver 2016-07-08 22:27:10 +02:00
Jeremy Long
b3d9ea3c47 minor code reorg 2016-07-07 06:18:54 -04:00
Jeremy Long
cd51989354 Merge pull request #526 from nicolastrres/master
Updating gradle dependencyCheck documentation
2016-07-07 06:05:12 -04:00
nicolastrres
b705ae5f0c Updating gradle dependencyCheck documentation 2016-07-06 14:57:24 -03:00
Jeremy Long
13b53537fa incorrectly set quick query value during recheck - see issue #523 2016-07-06 06:48:10 -04:00
Jeremy Long
7d05aa6073 added logging for issue #523 2016-07-06 06:44:43 -04:00
Jeremy Long
85de173086 fixed StackOverflowError from issue #523 2016-07-06 06:32:57 -04:00
Jeremy Long
d264d804c8 patches and test case update for issue #522 2016-07-05 09:09:58 -04:00
Jeremy Long
8272da615e improved test cases to debug issue #522 2016-07-04 08:43:43 -04:00
Jeremy Long
857b993d51 ensured analyzers were correctly initialized and closed 2016-07-04 07:55:53 -04:00
Jeremy Long
a71edf584e additional testing added 2016-07-04 07:55:19 -04:00
Jeremy Long
461d7fec0e fixed typo 2016-07-04 07:54:57 -04:00
Jeremy Long
5e3da035dd resolved merge conflict with #525 2016-07-04 07:11:45 -04:00
Jeremy Long
ebb52995a5 converted hint analyzer to use an externalized configuration file to simplify the resolution of issue #522 2016-07-04 07:10:07 -04:00
Jeremy Long
519b82c620 minor cleanup of code/comments 2016-07-04 07:07:07 -04:00
Jeremy Long
84682d07c6 converted hint analyzer to use an externalized configuration file to simplify the resolution of issue #522 2016-07-04 07:06:17 -04:00
Jeremy Long
960eeb19af converted hint analyzer to use an externalized configuration file to simplify the resolution of issue #522 2016-07-04 07:05:31 -04:00
Hans Joachim Desserud
ab3920f8f1 Replace raw Iterator with for each 2016-07-02 16:29:32 +02:00
Hans Joachim Desserud
f5f5857897 Add missing @Overrides 2016-07-02 16:23:24 +02:00
Hans Joachim Desserud
1c400b410e Remove unused imports 2016-07-02 16:23:16 +02:00
Jeremy Long
cc751aa224 updated to skip custom scripts in executable scripts 2016-06-27 19:39:17 -04:00
Jeremy Long
c20892ee3e removed stack traces from build 2016-06-27 08:46:46 -04:00
Jeremy Long
32ab53c9e1 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2016-06-26 07:32:23 -04:00
Jeremy Long
d0a7d9eb42 added test for issue #454 to ensure fully executable jar 2016-06-26 07:32:10 -04:00
Jeremy Long
a1a9602509 added support for fully executable jar files per issue #454 2016-06-26 07:31:17 -04:00
Jeremy Long
cf97c89fe0 fully exectuable jar for testing resolution for issue #454 2016-06-26 07:30:39 -04:00
Jeremy Long
8895bc85ea Merge pull request #521 from awhitford/Upg20160624
Plugin and Dependency Upgrades
2016-06-25 06:53:49 -04:00
Anthony Whitford
1a9976c6ca commons-compress 1.12, maven-jar-plugin 3.0.2, maven-source-plugin 3.0.1, maven-javadoc-plugin 2.10.4. 2016-06-24 23:51:12 -07:00
Anthony Whitford
f47ebf6145 jMockit 1.24, jSoup 1.9.2. 2016-06-24 23:35:24 -07:00
Jeremy Long
0380715311 resolved issue #514 2016-06-24 07:09:10 -04:00
Jeremy Long
80ad16c7fa updated to correctly label groovy's dependency scope 2016-06-22 06:48:11 -04:00
Jeremy Long
e56e9035b6 updated to correctly label groovy's dependency scope 2016-06-22 06:38:10 -04:00
Jeremy Long
73f22d32d2 fixed typo 2016-06-22 06:34:50 -04:00
Jeremy Long
c3bc56eebc additional suppressions 2016-06-18 07:33:30 -04:00
Jeremy Long
35cc14815e added property to solve issue #500 2016-06-18 07:32:57 -04:00
Jeremy Long
9be91474f6 staging 2016-06-18 06:46:28 -04:00
Jeremy Long
adf949bf08 added logging of URL 2016-06-18 06:18:16 -04:00
Jeremy Long
c6bf41b8ba staging 2016-06-18 06:17:55 -04:00
Jeremy Long
bc656c6218 version 1.4.0 2016-06-16 06:49:24 -04:00
Jeremy Long
f46226d055 updated documentation 2016-06-16 06:49:07 -04:00
bjiang
00d4ee47de merge upstream 2016-06-15 13:54:49 -04:00
Jeremy Long
c5ffc21660 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2016-06-15 06:51:18 -04:00
Jeremy Long
d89b1fdc6a updated proxy configuration information 2016-06-15 06:51:05 -04:00
Jeremy Long
8324287bd6 updated proxy configuration information 2016-06-15 06:50:45 -04:00
Jeremy Long
6be161a546 updated experimental documentation 2016-06-15 06:19:06 -04:00
Jeremy Long
027350e1ba Merge pull request #516 from msrb/weightings-bug
Correctly apply weightings when searching for CPEs
2016-06-15 05:48:55 -04:00
Michal Srb
a2309e1c2e Correctly apply weightings when searching for CPEs 2016-06-14 21:34:04 +02:00
Jeremy Long
c34dc97bd4 updated snapshot version 2016-06-11 08:13:14 -04:00
Jeremy Long
7e8749146e updated documentation 2016-06-11 08:12:09 -04:00
Jeremy Long
8680ecd033 updated documentation for experimental analyzers 2016-06-08 06:40:07 -04:00
Jeremy Long
4e4417c7af checkstyle corrections 2016-06-06 18:45:39 -04:00
Jeremy Long
7909bbbbe9 corrected remaining merge conflicts that were missed earlier as they were in comments 2016-06-06 06:45:33 -04:00
Jeremy Long
6fd831e688 corrected JavaDoc links 2016-06-06 06:44:42 -04:00
Jeremy Long
59a4825c70 added license 2016-06-05 17:40:42 -04:00
Jeremy Long
1ba3681457 updated the ci 2016-06-05 17:32:57 -04:00
Jeremy Long
78becffb2e updated CI build status url 2016-06-05 17:29:29 -04:00
Jeremy Long
e7efd7070b Merge pull request #508 from albuch/h2-clean-orphans
Thanks for the PR!
2016-06-05 17:25:53 -04:00
Jeremy Long
ec6471e8c7 added notes for future enhancment 2016-06-05 17:17:38 -04:00
Jeremy Long
b01ae2c6d3 updated to speed-up the unit test 2016-06-05 17:16:43 -04:00
Jeremy Long
ef4a260615 fixed build issue with CveDB being closed before saving the property 2016-06-05 17:16:05 -04:00
Jeremy Long
f6b80630dd temporary travis debugging code 2016-06-05 08:30:22 -04:00
Jeremy Long
f43589589d fixed setup to call super 2016-06-05 08:04:45 -04:00
Jeremy Long
06b59cf79b initial 2016-06-05 07:49:59 -04:00
Jeremy Long
a2187205e0 only update last checked after updates were performed without errors 2016-06-05 06:45:13 -04:00
Jeremy Long
52f269a289 Merge branch 'jabbrwcky-batch-update' 2016-06-05 06:33:45 -04:00
Jeremy Long
310ca967a1 fixed compareTo in order to resolve issue #503 2016-06-05 06:32:49 -04:00
Jeremy Long
c4b423cb0f additional tests resources to fix issue #503 2016-06-05 06:32:11 -04:00
Alexander v. Buchholtz
8a6c940aaf Optimized CLEANUP_ORPHANS query for H2 1.4.x
Original query from dbStatements.properties writes millions of records from subselect to file system due to MAX_MEMORY_ROWS Setting http://www.h2database.com/html/grammar.html?highlight=max_memory_rows&search=MAX_MEM#set_max_memory_rows
Database maintenance task therefore takes forever.
The new query (copied from postgresql) works way faster.
2016-06-04 23:36:43 +02:00
Jeremy Long
b295e927b7 resolved merge conflict 2016-06-04 09:09:57 -04:00
Jeremy Long
63d24737dd Merge pull request #506 from jabbrwcky/issue-503
Thanks for the test cases
2016-06-04 07:47:44 -04:00
Jeremy Long
60ce02ba28 improved logging to assist in resoloving issue #503 2016-06-04 07:46:42 -04:00
Jeremy Long
95939ed66c added javadoc per checkstyle 2016-06-04 07:45:07 -04:00
Jeremy Long
7f609a35be added javadoc per checkstyle 2016-06-04 07:44:42 -04:00
Jeremy Long
f7b534f1ee checkstyle correction 2016-06-04 07:44:08 -04:00
Jeremy Long
cd5f9e2f13 findbugs correction 2016-06-04 07:42:58 -04:00
Jens Hausherr
e79da72711 Use batch update for references and vulnerable software if supported by DB. 2016-06-03 10:22:54 +02:00
Jens Hausherr
1ba081959b Accidentially dropped some imports 2016-06-03 10:09:28 +02:00
Jens Hausherr
578dc63652 Vulnerable Software: Compact toString() output; remove accessor calls for own properties 2016-06-03 09:54:25 +02:00
Jens Hausherr
fccd683b50 add toString() for Vulnerability 2016-06-03 09:52:35 +02:00
Jens Hausherr
f3d3a25856 Add more test cases 2016-06-03 09:50:28 +02:00
Jens Hausherr
6d70c92795 Add to String-Method to Reference 2016-06-03 09:41:48 +02:00
Jeremy Long
3c525d8e3a fixed issue #505 2016-06-02 19:30:38 -04:00
Jeremy Long
a6b47c7c43 clarified note 2016-06-02 19:23:51 -04:00
Jeremy Long
5b52f01f3d updated documentation for issue#498 2016-05-30 08:23:58 -04:00
Jeremy Long
d13bbd43f3 added experimental flag to force users to enable this and by doing so understand that these may not be as production ready as the Java analyzer (see issue #498) 2016-05-30 08:09:14 -04:00
Jeremy Long
0394d1a24f checkstyle correction - reduced method length 2016-05-30 07:59:53 -04:00
Jeremy Long
446222e127 removed unnecessary exclude 2016-05-30 07:59:18 -04:00
Jeremy Long
05d7aa898d minor reformatting to reduce line length (checkstyle) 2016-05-30 07:37:44 -04:00
Jeremy Long
73f7fc1d51 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2016-05-30 07:19:15 -04:00
Jeremy Long
f0262466d4 Merge pull request #504 from awhitford/Upg052816
Plugin updates
2016-05-30 07:19:17 -04:00
Anthony Whitford
1ecde9bbc1 maven-jar-plugin 3.0.0 released; maven-resources-plugin 3.0.0 released. 2016-05-28 08:27:39 -07:00
Jens Hausherr
ae5a766092 Limit split to fix #503 2016-05-27 15:07:59 +02:00
Jeremy Long
6a807bc002 checkstyle/findbugs corrections 2016-05-25 17:21:46 -04:00
Jeremy Long
c0384bb0ee Merge pull request #502 from xthk/master
Update initialize_mysql.sql
2016-05-25 06:12:52 -04:00
Tilmann H
2906b315b3 Update initialize_mysql.sql
lower cased "properties" in UPDATE statement
2016-05-25 11:36:09 +02:00
Jeremy Long
425fd65bd8 added more false positive suppressions 2016-05-21 07:09:08 -04:00
Jeremy Long
7d83362a85 removed stack trace from build when ruby and bundle-audit are not installed 2016-05-15 07:49:17 -04:00
Jeremy Long
0b26894112 checkstyle/pmd/findbugs correction(s) 2016-05-15 07:48:26 -04:00
Jeremy Long
17f810a720 implement issue #498 2016-05-15 07:30:38 -04:00
Jeremy Long
71ef8061f9 merge conflict resolved 2016-05-15 07:29:17 -04:00
Jeremy Long
353b17690f checkstyle/pmd/findbugs correction(s) 2016-05-15 07:22:52 -04:00
Jeremy Long
6790727260 ensured resources are properly closed 2016-05-15 07:02:18 -04:00
Jeremy Long
e129f7db85 Merge branch 'biancajiang-ruby_dependency' 2016-05-15 06:46:18 -04:00
Jeremy Long
ea942398e3 updated test case to use the correct parent class that allows for use of the database during testing 2016-05-15 06:45:57 -04:00
Jeremy Long
5ad72cae3f Merge branch 'ruby_dependency' of git://github.com/biancajiang/DependencyCheck into biancajiang-ruby_dependency 2016-05-14 09:45:26 -04:00
Jeremy Long
5f945bc696 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2016-05-14 07:21:07 -04:00
Jeremy Long
6f451736ba Add ability to flag analyzers as experimental so that they are not always enabled 2016-05-14 07:20:53 -04:00
Jeremy Long
30856f4a4f corrected doxia version 2016-05-14 07:19:12 -04:00
bjiang
413c71eb0a Merge branch 'ruby_dependency' into swift_support 2016-05-13 13:49:40 -04:00
bjiang
9d1408be20 do not use actual path for packagePath from compress case 2016-05-13 13:33:34 -04:00
Jeremy Long
f21f371751 Merge pull request #494 from erikerikson/master
Align documentation with current project name specification flag
2016-05-06 18:23:32 -04:00
bjiang
2b761279e4 Merge branch 'ruby_dependency' into swift_support 2016-05-06 17:59:28 -04:00
bjiang
d5e8f54214 fix RubyBundlerAnalyzer.accept 2016-05-06 17:55:21 -04:00
bjiang
83f83d4eee add RubyBundlerAnalyzerTest 2016-05-06 17:45:40 -04:00
bjiang
b0f4ab9ba5 cleanup & Rakefile support placeholder 2016-05-06 17:25:08 -04:00
bjiang
06dad8f79c javadoc 2016-05-06 17:22:39 -04:00
bjiang
83ab122ddf disable RubyBundlerAnalyzer if RubyBundleAuditAnalyzer 2016-05-06 16:42:39 -04:00
bjiang
8a42fe4ae1 javadoc 2016-05-06 16:19:59 -04:00
bjiang
94c6778b89 better javadoc 2016-05-06 16:14:16 -04:00
bjiang
c0e5973517 rename RubyBundlerAnalyzer and javadoc 2016-05-06 15:50:35 -04:00
bjiang
1e7bbfa7c1 bundle the same SWIFT package by different analyzers 2016-05-06 13:43:05 -04:00
bjiang
dc7245ff6e code cleanup 2016-05-06 12:55:59 -04:00
bjiang
ffaf7b40e9 merge from ruby_dependency 2016-05-06 10:31:17 -04:00
bjiang
4de3fb1f2a javadoc 2016-05-06 10:25:49 -04:00
bjiang
99355d993a code cleanup with more comments 2016-05-06 10:24:28 -04:00
bjiang
d25f6e813c new analyzer for Package.swift 2016-05-05 19:21:21 -04:00
bjiang
043f8e0523 cleanup 2016-05-03 15:45:08 -04:00
bjiang
5fcf2a2623 get authors field 2016-05-03 14:53:25 -04:00
Erik Erikson
ee77fccffd Align documentation with current project name specification flag
When using the "--app" flag, the following warning is produced:

 [WARN] The 'app' argument should no longer be used; use 'project' instead.

 This change updates the documentation from suggesting "--app" to "--project"
2016-05-03 10:31:00 -07:00
bjiang
f1422adf75 merge upstream 2016-05-03 13:12:05 -04:00
bjiang
189da08885 merge upstream 2016-05-03 13:05:56 -04:00
bjiang
c2b1742582 support cocoapods for swift 2016-05-03 12:41:39 -04:00
Jeremy Long
9e63ac6d5b Merge pull request #493 from awhitford/CommIO25
Commons-IO 2.5 upgrade
2016-05-02 19:26:52 -04:00
Jeremy Long
4d7ab8b187 Merge pull request #491 from mwieczorek/MSSQL_Support
MSSQL Support
2016-05-02 19:25:39 -04:00
Jeremy Long
4de9818bee original CVE used in test does not exist in the current default DB used for tests. 2016-05-01 20:16:30 -04:00
Jeremy Long
7a2e1fd221 updated bundle audit score to be more accurate 2016-05-01 15:39:12 -04:00
Jeremy Long
d0ca800a23 Merge branch 'geramirez-fix-cvss-for-bundle-audit' 2016-04-30 11:20:39 -04:00
Jeremy Long
35ffd56ea9 fixed compile issues in PR 2016-04-30 11:20:26 -04:00
Jeremy Long
84b992d3a1 Merge branch 'fix-cvss-for-bundle-audit' of git://github.com/geramirez/DependencyCheck into geramirez-fix-cvss-for-bundle-audit 2016-04-30 11:02:16 -04:00
Jeremy Long
9e46364759 updated test cases to track down build issue 2016-04-30 10:56:50 -04:00
Dave Goddard
0f37c2b59c Adding sinatra fixture
Signed-off-by: Gabriel Ramirez <gabriel.e.ramirez@gmail.com>
2016-04-29 16:17:51 -04:00
Michal Wieczorek
33852ea7e3 MSSQL Support 2016-04-27 23:35:05 +02:00
Anthony Whitford
4fbed1cdac Added Charset to avoid deprecated FileUtils methods. 2016-04-27 01:37:00 -07:00
Anthony Whitford
42c61ab457 commons-io 2.5 released; jsoup 1.9.1 released. 2016-04-27 01:22:20 -07:00
David Jahn
8c6b9f9c68 Fixed CVSS for Ruby.
this bug was discovered when scanning ruby applications and getting back
`-1` cvss. this turns out to be a problem with bundle-audit cve
database.

Our solution was to use the NVD database, which dependency check uses to
get the CVSS scores for Ruby only if the Criticality is missing from
bundle-audit output. Keep in mind there are compilation errors with the
commit atm.

Fixes #485

Signed-off-by: Gabriel Ramirez <gabriel.e.ramirez@gmail.com>
2016-04-25 09:40:54 -04:00
Jeremy Long
abebecac4a updated parser and tests to revert to old suppression schema if new schema fails 2016-04-24 09:06:00 -04:00
Jeremy Long
87efe429da fixed broken schema 2016-04-24 09:05:26 -04:00
Jeremy Long
35128b0bd4 updated 2016-04-24 09:04:22 -04:00
Jeremy Long
186cb2270f ensure updated schema is published to the site 2016-04-24 07:25:32 -04:00
Jeremy Long
deda02f879 updated suppression schema to require a CPE, CVE, or CVSS Below per issue #488 2016-04-24 07:20:11 -04:00
Jeremy Long
bcc2478ef7 snapshot version 2016-04-24 07:17:42 -04:00
Jeremy Long
8d54654482 Merge pull request #487 from awhitford/DepUpg160416
Upgraded plugins and dependencies
2016-04-17 21:02:54 -04:00
Jeremy Long
08318107c1 Merge pull request #486 from awhitford/MavenWarnings
Maven warnings
2016-04-17 21:02:35 -04:00
Anthony Whitford
a5e77c85a6 Maven Site Plugin 3.5.1, Doxia 1.7.1, Ant 1.9.7, Maven 3.3.9. 2016-04-16 11:21:24 -07:00
Anthony Whitford
1e8d2aff75 Added code to avoid an unchecked cast warning. 2016-04-16 11:08:13 -07:00
Anthony Whitford
bc0a0f9902 Added missing serialVersionUID. 2016-04-16 11:07:19 -07:00
bjiang
da82f975e4 Add test for project url from pom.xml 2016-04-15 12:30:14 -04:00
bjiang
48af120db8 add project URL evidence from pom 2016-04-15 11:28:33 -04:00
Jeremy Long
8722eae766 version 1.3.6 2016-04-10 07:06:07 -04:00
Jeremy Long
53776936ca fix FP per issue #469 2016-04-09 11:27:08 -04:00
Jeremy Long
dca465b801 fixed minor warning about file encoding during build 2016-04-09 07:31:40 -04:00
Jeremy Long
43cd115dc7 Merge pull request #482 from awhitford/DepUpg-160406
Dependency Updates
2016-04-09 06:59:29 -04:00
Jeremy Long
e7ba08e52c updated log message to assist in debugging an issue 2016-04-09 06:51:00 -04:00
Jeremy Long
9df12e6ff2 updated log message to assist in debugging an issue 2016-04-09 06:49:44 -04:00
Jeremy Long
b5c7fb747c updated log message to assist in debugging an issue 2016-04-09 06:38:37 -04:00
Anthony Whitford
a40a4afe80 SLF4J 1.7.21 released; commons-compress 1.11 released. 2016-04-06 21:39:27 -07:00
bjiang
739f595f13 improve python package identification 2016-04-05 16:12:14 -04:00
bjiang
e07e892969 Merge branch 'master' into ruby_dependency 2016-04-05 14:47:17 -04:00
Jeremy Long
d4a6c58cc8 upgrade the transitive dependency commons-collections 2016-04-05 12:08:16 -04:00
Jeremy Long
d644431a4e Merge pull request #479 from awhitford/SLF4J1720LB117
SLF4J 1.7.20 and Logback 1.1.7 released.
2016-04-03 07:41:54 -04:00
bjiang
33bbb50b43 Ruby .gemspec must before bundler analyzer to get proper package grouping 2016-04-02 13:51:15 -04:00
bjiang
f89d7df305 improve vendor evidences for .gemspec analyzer 2016-04-02 13:49:50 -04:00
bjiang
3b02cd0e39 bundling same Ruby packages from .gemspec and bundler analyzers 2016-04-02 13:48:25 -04:00
bjiang
52cd50e0a8 keep delimitor space in array value 2016-04-01 14:08:41 -04:00
bjiang
996a970081 fix version evidence name 2016-04-01 14:07:03 -04:00
bjiang
6c0b65acd4 capture licenses and homepage in Ruby bundler analyzer 2016-04-01 13:30:36 -04:00
Anthony Whitford
f4df263dfe SLF4J 1.7.20 and Logback 1.1.7 released. 2016-03-30 21:03:51 -07:00
bjiang
8c659acc82 new Ruby bundler analyzer 2016-03-30 20:20:10 -04:00
bjiang
7aba2429af merge from upstream 2016-03-28 14:23:09 -04:00
bjiang
ab48d2c2ff multiple improvements 2016-03-28 14:06:30 -04:00
Jeremy Long
0b699d45bf Merge pull request #467 from colezlaw/python-init
Patch for jeremylong/DependencyCheck/#466
2016-03-25 19:35:06 -04:00
Jeremy Long
54beafa262 Merge pull request #475 from biancajiang/master
Fix test to skip the proper test case when bundle-audit is not available
2016-03-25 19:34:34 -04:00
Jeremy Long
531d4923eb Merge pull request #470 from MrBerg/suppress-osvdb
Make it possible to suppress vulnerabilities from OSVDB
2016-03-25 19:33:43 -04:00
Jeremy Long
b160a4d1dd Merge pull request #478 from swapnilsm/master
Added primary key to "software" table
2016-03-25 19:32:45 -04:00
Swapnil S. Mahajan
ca54daf456 Added primary key to "software" table
"software" is a bridge table so there should always be only one record for a pair of cpeEntryId and cveid.
2016-03-25 16:55:53 +05:30
bjiang
a22fc550b3 #472 fix test to only skip the proper test case. 2016-03-21 11:38:52 -04:00
Jeremy Long
0650d93953 Merge pull request #474 from awhitford/SLF4J1719
SLF4J 1.7.19 released.
2016-03-21 08:18:41 -04:00
Jeremy Long
5633258fa7 Update README.md 2016-03-21 08:16:06 -04:00
Jeremy Long
12278cda58 Update README.md
Fixed broken link to documentation.
2016-03-21 08:12:39 -04:00
Jeremy Long
84d1f08fda updated documentation for NVD urls to match what is hosted by NIST 2016-03-21 07:58:02 -04:00
Jeremy Long
c184292a57 Merge pull request #473 from biancajiang/master
Handle bundle-audit not available case and fix RubyBundleAuditAnalyzer test cases
2016-03-21 07:52:17 -04:00
Anthony Whitford
4cdfcb9f9d SLF4J 1.7.19 released. 2016-03-20 20:47:07 -04:00
bjiang
343a78917c Fixed #472. Disable RubyBundleAuditAnalyzer if exception during initialize.
changes:
1. disable self during initialize before bubbling exception
2. new test case RubyBundleAuditAnalyzerTest#testMissingBundleAudit()
2016-03-20 17:06:03 -04:00
bjiang
ff7d0fdb9d #472 first fix and improve RubyBundleAuditAnalyzerTest.java
Test were failing b/c Gemfile.lock and Gemfile were missing.
The files were missing b/c parent .gitignore them.
Changes:
1. Force added new test files, and updated test with more result
validation.
2. Added error logging from bundle-audit.
3. place holder for bundle-audit install directory in test
dependencycheck.properties.
2016-03-20 15:54:24 -04:00
Jonas Berg
db26b46be0 Make it possible to suppress vulnerabilities from OSVDB 2016-03-16 13:59:23 +02:00
Will Stranathan
d77a70c360 Patch for jeremylong/DependencyCheck/#466
This does two things:
1) Updates the PythonPackageAnalyzer to HIGH evidence for __init__.py
2) Removes evidence from the FileNameAnalyzer for __init__.py[co]?

TODO: Need for the PythonPackageAnalyzer to still add evidence for
__init__.py[co] even though it won't be able to analyze the contents of
it. Also, need to work up the tree for __init__.py files to get the
parent folders (not sure why subfolders are not being inspected).
2016-03-12 15:09:43 -05:00
Jeremy Long
42f4ae65d1 Merge pull request #463 from chadjvw/master
Updated Oracle init script
2016-03-07 20:43:05 -05:00
Chad Van Wyhe
88daac31d2 Merge pull request #1 from chadjvw/oracle-init-fix
fixed trigger compilation and added version number
2016-03-07 12:59:02 -06:00
Chad Van Wyhe
ac04c173a8 fixed trigger compilation and added version number 2016-03-07 12:55:18 -06:00
Jeremy Long
8401494fbc Merge pull request #462 from thc202/issues-page-gradle-purge
Fix issues in Gradle's dependencyCheckPurge task site page
2016-03-06 18:54:03 -05:00
Jeremy Long
97af118cb9 Merge pull request #461 from thc202/broken-link-readme
Fix broken link in README.md file... Thanks!
2016-03-06 18:53:12 -05:00
thc202
091e6026bc Fix issues in Gradle's dependencyCheckPurge task site page
Fix broken link to dependencyCheckUpdate task page, remove repeated
closing character ']'.
Replace $H with # in the heading of the example.
2016-03-06 23:46:12 +00:00
thc202
c798ede7bf Fix broken link in README.md file
Correct the link to NOTICE.txt file, change from NOTICES.txt to
NOTICE.txt.
2016-03-06 23:46:04 +00:00
Jeremy Long
225851f067 Merge pull request #460 from awhitford/DepUpg160306
Dependency Upgrades
2016-03-06 18:01:03 -05:00
Jeremy Long
9dd65ecf70 Merge pull request #459 from awhitford/MPIR29
maven-project-info-reports-plugin 2.9 released.
2016-03-06 18:00:48 -05:00
Jeremy Long
1a9cc4b6be snapshot 2016-03-06 17:42:18 -05:00
Jeremy Long
a612f206bf version 1.3.5.1 2016-03-06 17:30:37 -05:00
Jeremy Long
e51031c62a fix bug in getLastProject for non-site executions 2016-03-06 17:28:40 -05:00
Anthony Whitford
e30c29ef50 SLF4J 1.7.18 released; Logback 1.1.6 released; jMockit 1.22 released. 2016-03-06 08:53:58 -08:00
Anthony Whitford
91ddcadbcd Removed maven-site-plugin from dependencyManagement. 2016-03-06 08:51:04 -08:00
Anthony Whitford
8c145860e5 maven-project-info-reports-plugin 2.9 released. 2016-03-06 08:35:09 -08:00
Jeremy Long
a19dd7687e v 1.3.6-SNAPSHOT 2016-03-05 16:13:29 -05:00
Jeremy Long
550d6ca083 v1.3.5 2016-03-05 16:08:59 -05:00
Jeremy Long
b425411357 doclint fixes 2016-03-05 13:18:42 -05:00
Jeremy Long
a1f0cf749d doclint fixes 2016-03-05 13:18:38 -05:00
Jeremy Long
22e0d1c74e doclint fixes 2016-03-05 13:18:37 -05:00
Jeremy Long
cdc07047aa doclint fixes 2016-03-05 13:18:37 -05:00
Jeremy Long
c832c2da28 doclint fixes 2016-03-05 13:18:37 -05:00
Jeremy Long
8daa713639 doclint fixes 2016-03-05 13:18:36 -05:00
Jeremy Long
e0a2966706 doclint fixes 2016-03-05 13:18:36 -05:00
Jeremy Long
354bfa14f9 doclint fixes 2016-03-05 13:18:35 -05:00
Jeremy Long
46b91702ba doclint fixes 2016-03-05 13:18:35 -05:00
Jeremy Long
de9516e368 doclint fixes 2016-03-05 13:18:35 -05:00
Jeremy Long
3924e07e5c doclint fixes 2016-03-05 13:18:34 -05:00
Jeremy Long
76bcbb5a7e doclint fixes 2016-03-05 13:18:34 -05:00
Jeremy Long
8022381d1c doclint fixes 2016-03-05 13:18:33 -05:00
Jeremy Long
feb1233081 doclint fixes 2016-03-05 13:18:33 -05:00
Jeremy Long
36eefd0836 doclint fixes 2016-03-05 13:18:32 -05:00
Jeremy Long
0e31e59759 doclint fixes 2016-03-05 13:18:32 -05:00
Jeremy Long
4a4c1e75da doclint fixes 2016-03-05 13:18:32 -05:00
Jeremy Long
b0bfd2292a doclint fixes 2016-03-05 13:18:31 -05:00
Jeremy Long
7214b24357 doclint fixes 2016-03-05 13:18:31 -05:00
Jeremy Long
24637f496f doclint fixes 2016-03-05 13:18:30 -05:00
Jeremy Long
d8ecde5265 doclint fixes 2016-03-05 13:18:30 -05:00
Jeremy Long
28840c6209 doclint fixes 2016-03-05 13:18:29 -05:00
Jeremy Long
1696213406 doclint fixes 2016-03-05 13:18:29 -05:00
Jeremy Long
6f315ac765 doclint fixes 2016-03-05 13:18:28 -05:00
Jeremy Long
a485307d92 doclint fixes 2016-03-05 13:18:28 -05:00
Jeremy Long
3d3b861ba0 doclint fixes 2016-03-05 13:18:28 -05:00
Jeremy Long
4b33ed25d5 doclint fixes 2016-03-05 13:18:27 -05:00
Jeremy Long
e264880c7b doclint fixes 2016-03-05 13:18:27 -05:00
Jeremy Long
ef8212701f doclint fixes 2016-03-05 13:18:26 -05:00
Jeremy Long
492157a502 doclint fixes 2016-03-05 13:18:26 -05:00
Jeremy Long
2605bc182e doclint fixes 2016-03-05 13:18:25 -05:00
Jeremy Long
fe8dfdd804 doclint fixes 2016-03-05 13:18:25 -05:00
Jeremy Long
bd917bc990 doclint fixes 2016-03-05 13:18:24 -05:00
Jeremy Long
c5c32f683f doclint fixes 2016-03-05 13:18:24 -05:00
Jeremy Long
5506e58c98 doclint fixes 2016-03-05 13:18:23 -05:00
Jeremy Long
5af2d49b18 doclint fixes 2016-03-05 13:18:23 -05:00
Jeremy Long
0fd35a4925 doclint fixes 2016-03-05 13:18:23 -05:00
Jeremy Long
7ed20b1244 doclint fixes 2016-03-05 13:18:22 -05:00
Jeremy Long
efa6a78255 doclint fixes 2016-03-05 13:18:22 -05:00
Jeremy Long
8b58df3b34 checkstyle/pmd/findbugs corrections 2016-03-05 07:07:53 -05:00
Jeremy Long
0d2a090e1f Merge pull request #456 from awhitford/Site35
Upgrade for Maven Site Plugin 3.5
2016-03-04 17:42:14 -05:00
Jeremy Long
7860d635a9 ensured deserialization is secure 2016-03-04 17:38:48 -05:00
Anthony Whitford
ba91c9fa9b Upgraded maven site plugin to 3.5, and doxia markdown module to 1.7. 2016-02-28 09:34:19 -08:00
Anthony Whitford
b3630e0d5e Upgraded the Fluido 1.5 skin and had to update site head for maven site plugin 3.5. See http://maven.apache.org/plugins/maven-site-plugin/examples/sitedescriptor.html#Inject_xhtml_into_head 2016-02-28 09:33:54 -08:00
Jeremy Long
f752285912 added test for parse manifest per issue #455 2016-02-27 07:14:27 -05:00
Jeremy Long
5a150d9b0e parsed additional entries in the manifest per issue #455 2016-02-27 07:13:57 -05:00
Jeremy Long
f0aa185832 added test dependency per issue #455 2016-02-27 07:12:30 -05:00
Jeremy Long
9592f058d4 add more false positives to the suppression list 2016-02-25 18:01:21 -05:00
Jeremy Long
f630794e22 added warning about site:stage with regards to the aggregate goal 2016-02-24 17:00:31 -05:00
Jeremy Long
93636e89c5 fixed broken hyperlinks 2016-02-23 20:54:24 -05:00
Jeremy Long
585002c25c resolution for issue #386 fixed the conditional so that execution occured on the last non-skipped project in the reactor 2016-02-23 20:42:10 -05:00
Jeremy Long
412ccc1be1 per issue #429 updates will only occur if the database schema and expected schema match exactly 2016-02-21 08:38:29 -05:00
Jeremy Long
8b1306a36c per issue #429 non-h2 databases may be used as long as the database schema is of the same major version and greater then or equal to the expected version. 2016-02-21 08:11:29 -05:00
Jeremy Long
81026e8dca isolate the analyze method to try and resolve multiple threads hitting the Lucene query parsers at the same time per issue #388 2016-02-20 08:18:00 -05:00
Jeremy Long
dd440c8f9f resolve issue #451 2016-02-20 08:12:14 -05:00
Jeremy Long
76f3e4b27e Merge pull request #449 from christiangalsterer/i444
Support nonProxyHosts parameter in settings.xml #444
2016-02-17 19:09:41 -05:00
Jeremy Long
5f5d3fdb66 Merge pull request #447 from kaimago/master
Oracle DB Support
2016-02-17 19:06:13 -05:00
Jeremy Long
853c92b87d Merge pull request #448 from awhitford/UpgFeb6-16
Upgraded SLF4J to 1.7.14 and the maven-compiler-plugin to 3.5.
2016-02-17 19:03:37 -05:00
Anthony Whitford
00080f2abc SLF4J 1.7.16 released; logback 1.1.5 released. 2016-02-15 10:23:07 -08:00
Anthony Whitford
55414208a3 SLF4J 1.7.15 released; maven-compiler-plugin 3.5.1 released. 2016-02-10 00:34:26 -08:00
Christian Galsterer
5091499563 [i444] Support nonProxyHosts parameter in settings.xml 2016-02-09 18:01:36 +01:00
Anthony Whitford
944b54d920 Upgraded SLF4J to 1.7.14 and the maven-compiler-plugin to 3.5. 2016-02-06 12:14:19 -08:00
Christian Galsterer
d023b2b2ff [i444] Support nonProxyHosts parameter in settings.xml 2016-02-06 16:13:01 +01:00
Jeremy Long
b45f9f514b base test case handles settings initialization 2016-02-06 08:40:33 -05:00
Jeremy Long
239a9383e0 fix for issue #446 2016-02-06 08:30:06 -05:00
Jeremy Long
2190c0229c added check to see if the file is xml prior to unzipping it per issue #441 2016-02-06 08:11:24 -05:00
I003306
01ef14dc92 Oracle DB Support 2016-02-04 15:23:57 +01:00
Jeremy Long
7b0784843c updated copyright 2016-01-31 17:23:13 -05:00
Jeremy Long
6fc805369e snapshot version 2016-01-31 17:11:37 -05:00
Jeremy Long
9e29939cd3 version 1.3.4 2016-01-31 16:50:34 -05:00
Jeremy Long
d750abca22 resolved issue with new databases not being created correctly if there was an intial download of the NVD data. 2016-01-31 08:26:23 -05:00
Jeremy Long
31df2fa131 findbugs/checkstyle corrections 2016-01-30 08:57:40 -05:00
Jeremy Long
6355a29a7a updated version to ensure there are no issues in the jenkins plugin per issue #445 2016-01-30 08:07:33 -05:00
Jeremy Long
86a2b38340 Merge pull request #440 from awhitford/DepUpg20160110
Upgrades
2016-01-24 08:40:51 -05:00
Jeremy Long
9cb2b58557 initial fix for issue #445 2016-01-24 08:35:44 -05:00
Jeremy Long
2b0e2e8d0d corrected link per issuue #443 2016-01-17 08:08:15 -05:00
Jeremy Long
cf46767196 resolution for issue #439 2016-01-16 07:39:48 -05:00
Anthony Whitford
ffc1034b5a findbugs-maven-plugin 3.0.3 released; JavaMail api 1.5.5 released; jMockit 1.21 released. 2016-01-10 10:01:33 -08:00
Jeremy Long
46bb19de9b supression rules to resolve issues #437 and #438 2016-01-10 07:45:29 -05:00
Jeremy Long
70bc7a6d01 Merge pull request #435 from awhitford/Surefire-2.19.1
Upgraded Surefire to 2.19.1 release.
2016-01-10 07:01:08 -05:00
Anthony Whitford
3164505273 Upgraded Surefire to 2.19.1 release. 2016-01-05 08:12:32 -08:00
Jeremy Long
3d84fcd037 resolves issue #433 2016-01-03 09:18:35 -05:00
Jeremy Long
578fa32243 updated to honor noupdate flag for version check and removed some complexity 2016-01-03 09:14:08 -05:00
Jeremy Long
fc00b7d1cc resolves issues #426 2016-01-03 08:51:03 -05:00
Jeremy Long
d7351bd3e5 Merge pull request #432 from awhitford/CodeTweaks20151228
Code tweaks 2015-12-28
2016-01-03 08:33:16 -05:00
Anthony Whitford
e7224c8f05 StringBuilder allocation more precise. 2015-12-31 09:25:44 -08:00
Anthony Whitford
b97622f45b Variables may be final. 2015-12-28 13:15:24 -08:00
Anthony Whitford
0e15f3b703 Add missing final keyword to local variables. 2015-12-28 13:14:31 -08:00
Anthony Whitford
6604c0da89 Default StringBuilder size should be larger than default 16. 2015-12-28 13:14:04 -08:00
Anthony Whitford
e0b8be20b3 Variable suppressionRules may be final. 2015-12-28 13:13:33 -08:00
Anthony Whitford
46965d8c96 Iterable does not need qualifying, and collection may be final. 2015-12-28 13:13:00 -08:00
Anthony Whitford
66e92f00ee Variable may be final. 2015-12-28 13:12:10 -08:00
Anthony Whitford
4a137b4e8e Use StringBuilder instead of String += concatenation. 2015-12-28 13:11:36 -08:00
Anthony Whitford
9d5ff28098 Variables can be final and the exception was unused so can be removed. 2015-12-28 13:10:37 -08:00
Anthony Whitford
313b114da5 Variables can be final. 2015-12-28 13:09:17 -08:00
Anthony Whitford
1b6bfc6338 Variables can be final. 2015-12-28 13:08:37 -08:00
Anthony Whitford
49fd89f34a Let's use logging parameters for lazy evaluation. 2015-12-28 13:07:34 -08:00
Anthony Whitford
a2e862886e Rather than an explicit StringBuilder, why not simply an implicit one? 2015-12-28 13:06:44 -08:00
Anthony Whitford
62f6c7c5a9 Rather than using instanceOf, just add a specific catch for AnalysisException. 2015-12-28 13:05:45 -08:00
Jeremy Long
2294ed1ce1 Merge pull request #430 from awhitford/PluginUpdates20151227
Plugin updates
2015-12-28 06:12:54 -05:00
Jeremy Long
c8a1c6a318 fixed issue #431 - missing dependency 2015-12-28 06:11:57 -05:00
Anthony Whitford
600ed66d5b maven-clean-plugin 3.0.0 released; maven-source-plugin 2.4 released; maven-javadoc-plugin 2.10.3 released. 2015-12-27 11:45:52 -08:00
Jeremy Long
512b17555c updated documentation for encrypted passwords per issue #417 2015-12-26 12:55:41 -05:00
Jeremy Long
dc7849c9e8 added support for encrypted passwords per issue #417 2015-12-26 07:13:40 -05:00
Jeremy Long
6a99a51b91 Merge pull request #425 from awhitford/PluginUpdate20151220
maven-pmd-plugin 3.6 released.
2015-12-25 07:07:22 -05:00
Anthony Whitford
8c7fa022a0 maven-pmd-plugin 3.6 released. 2015-12-20 10:10:55 -08:00
Jeremy Long
cca694a580 logs from issue #138 indicate multiple modules are calling a non-threadsafe operation; as such, the mojo is being marked as threadSafe=false 2015-12-18 06:36:14 -05:00
Jeremy Long
3a7f95b9b1 spelling correction 2015-12-18 06:28:11 -05:00
Jeremy Long
3a84dc3962 fixed casing per issue #418 2015-12-18 06:05:01 -05:00
Jeremy Long
5961a96a4c Merge pull request #424 from amandel/patch-1
Fix casing of properties table name.
2015-12-18 05:55:04 -05:00
Jeremy Long
a22382505f Merge pull request #420 from awhitford/Issue419
Issue #419 - Avoiding a duplicate CPE Index Created message
2015-12-18 05:51:26 -05:00
Jeremy Long
5faef75415 Merge pull request #422 from edgedalmacio/patch-1
added tomcat suppressions
2015-12-18 05:49:16 -05:00
Jeremy Long
fed60907dc snapshot version 2015-12-18 05:48:37 -05:00
Andreas Mandel
ce7e360b70 If casing of properties table name. 2015-12-17 14:06:16 +01:00
Edge Dalmacio
0b3def38b8 added tomcat suppressions
tomcat-embed-el
tomcat-jdbc
tomcat-juli
2015-12-17 15:27:17 +08:00
Anthony Whitford
25a15dea8c Issue #419 - Avoiding a duplicate CPE Index Created message and resource leak. 2015-12-14 00:52:48 -08:00
Jeremy Long
e204971a6c version 1.3.3 2015-12-10 19:44:38 -05:00
Jeremy Long
d5b3a118bc minor site tweaks 2015-12-10 19:44:26 -05:00
Jeremy Long
3396cb2887 fix for issue #416 2015-12-10 18:33:31 -05:00
Jeremy Long
3c5beea218 1.3.3-SNAPSHOT 2015-12-02 09:17:28 -05:00
Jeremy Long
e544384dd5 1.3.3-SNAPSHOT 2015-12-02 05:46:28 -05:00
Jeremy Long
0e90f460f4 reverted change, using the undocumented SERIALIZED file lock mode 2015-12-02 05:46:06 -05:00
Jeremy Long
921efc4d2b updated documentation 2015-11-30 06:50:15 -05:00
390 changed files with 51642 additions and 17225 deletions

2
.gitattributes vendored Normal file
View File

@@ -0,0 +1,2 @@
*.html linguist-documentation
(^|/)site/) linguist-documentation

34
.github/contributing.md vendored Normal file
View File

@@ -0,0 +1,34 @@
# Contributing to OWASP dependency-check
## Reporting Bugs
- Ensure you're running the latest version of dependency-check.
- Ensure the bug has not [already been reported](https://github.com/jeremylong/DependencyCheck/issues).
- If you're unable to find an open issue addressing the problem, please [submit a new issue](https://github.com/jeremylong/DependencyCheck/issues/new).
- Please fill out the appropriate section of the bug report template provided. Please delete any sections not needed in the template.
## Reporting Vulnerabilities
- If you believe you have found a vulnerability in dependency-check itself (not that dependency-check found a vulnerability); please email jeremy.long@owasp.org.
## Asking Questions
- Your question may be answered by taking a look at the [documentataion](https://jeremylong.github.io/DependencyCheck/).
- If you still have a question consider:
- posting to the [Google Group](https://groups.google.com/forum/#!forum/dependency-check)
- opening a [new issue](https://github.com/jeremylong/DependencyCheck/issues/new)
## Enhancement Requests
- Suggest changes by [submitting a new issue](https://github.com/jeremylong/DependencyCheck/issues/new) and begin coding.
## Contributing Code
- If you have written a new feature or have fixed a bug please open a new pull request with the patch.
- Ensure the PR description clearly describes the problem and solution. Include any related issue number(s) if applicable.
- Please ensure the PR passes the automated checks performed (travis-ci, codacy, etc.)
- Please consider adding test cases for any new functionality
## Thank you for your contributions
OWASP dependency-check team

20
.github/issue_template.md vendored Normal file
View File

@@ -0,0 +1,20 @@
Please delete any un-needed section from the following issue template:
### Reporting Bugs/Errors
When reporting errors, 99% of the time log file output is required. Please post the log file as a [gist](https://gist.github.com/) and provide a link in the new issue.
### Reporting False Positives
When reporting a false positive please include:
- The location of the dependency (Maven GAV, URL to download the dependency, etc.)
- The CPE that is believed to be false positive
- Please report the CPE not the CVE
#### Example
False positive on library foo.jar - reported as cpe:/a:apache:tomcat:7.0
```xml
<dependency>
<groupId>org.sample</groupId>
<artifactId>foo</artifactId>
<version>1.0</version>
</dependency>
```

9
.github/pull_request_template.md vendored Normal file
View File

@@ -0,0 +1,9 @@
## Fixes Issue #
## Description of Change
*Please add a description of the proposed change*
## Have test cases been added to cover the new functionality?
*yes/no*

2
.gitignore vendored
View File

@@ -26,3 +26,5 @@ _site/**
.LCKpom.xml~ .LCKpom.xml~
#coverity #coverity
/cov-int/ /cov-int/
/dependency-check-core/nbproject/
cov-scan.bat

3
.travis.yml Normal file
View File

@@ -0,0 +1,3 @@
language: java
jdk: oraclejdk7
script: mvn install -DreleaseTesting

14
Dockerfile Normal file
View File

@@ -0,0 +1,14 @@
FROM java:8
MAINTAINER Timo Pagel <dependencycheckmaintainer@timo-pagel.de>
RUN wget -O /tmp/current.txt http://jeremylong.github.io/DependencyCheck/current.txt && current=$(cat /tmp/current.txt) && wget https://dl.bintray.com/jeremy-long/owasp/dependency-check-$current-release.zip && unzip dependency-check-$current-release.zip && mv dependency-check /usr/share/
RUN useradd -ms /bin/bash dockeruser && chown -R dockeruser:dockeruser /usr/share/dependency-check && mkdir /report && chown -R dockeruser:dockeruser /report
USER dockeruser
VOLUME "/src /usr/share/dependency-check/data /report"
WORKDIR /report
ENTRYPOINT ["/usr/share/dependency-check/bin/dependency-check.sh", "--scan", "/src"]

View File

@@ -1,4 +1,7 @@
[![Build Status](https://dependency-check.ci.cloudbees.com/buildStatus/icon?job=dependency-check)](https://dependency-check.ci.cloudbees.com/job/dependency-check/) [![Build Status](https://travis-ci.org/jeremylong/DependencyCheck.svg?branch=master)](https://travis-ci.org/jeremylong/DependencyCheck) [![Coverity Scan Build Status](https://scan.coverity.com/projects/1654/badge.svg)](https://scan.coverity.com/projects/dependencycheck) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/6b6021d481dc41a888c5da0d9ecf9494)](https://www.codacy.com/app/jeremylong/DependencyCheck?utm_source=github.com&amp;utm_medium=referral&amp;utm_content=jeremylong/DependencyCheck&amp;utm_campaign=Badge_Grade) [![Apache 2.0 License](https://img.shields.io/badge/license-Apache%202-blue.svg)](https://www.apache.org/licenses/LICENSE-2.0.txt)
[![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2015.svg)](https://www.toolswatch.org/2015/06/black-hat-arsenal-usa-2015-speakers-lineup/) [![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2014.svg)](https://www.toolswatch.org/2014/06/black-hat-usa-2014-arsenal-tools-speaker-list/) [![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2013.svg)](https://www.toolswatch.org/2013/06/announcement-blackhat-arsenal-usa-2013-selected-tools/)
Dependency-Check Dependency-Check
================ ================
@@ -22,18 +25,18 @@ The latest CLI can be downloaded from bintray's
On *nix On *nix
``` ```
$ ./bin/dependency-check.sh -h $ ./bin/dependency-check.sh -h
$ ./bin/dependency-check.sh --app Testing --out . --scan [path to jar files to be scanned] $ ./bin/dependency-check.sh --project Testing --out . --scan [path to jar files to be scanned]
``` ```
On Windows On Windows
``` ```
> bin/dependency-check.bat -h > bin/dependency-check.bat -h
> bin/dependency-check.bat --app Testing --out . --scan [path to jar files to be scanned] > bin/dependency-check.bat --project Testing --out . --scan [path to jar files to be scanned]
``` ```
On Mac with [Homebrew](http://brew.sh) On Mac with [Homebrew](http://brew.sh)
``` ```
$ brew update && brew install dependency-check $ brew update && brew install dependency-check
$ dependency-check -h $ dependency-check -h
$ dependency-check --app Testing --out . --scan [path to jar files to be scanned] $ dependency-check --project Testing --out . --scan [path to jar files to be scanned]
``` ```
### Maven Plugin ### Maven Plugin
@@ -85,17 +88,48 @@ On *nix
``` ```
$ mvn install $ mvn install
$ ./dependency-check-cli/target/release/bin/dependency-check.sh -h $ ./dependency-check-cli/target/release/bin/dependency-check.sh -h
$ ./dependency-check-cli/target/release/bin/dependency-check.sh --app Testing --out . --scan ./src/test/resources $ ./dependency-check-cli/target/release/bin/dependency-check.sh --project Testing --out . --scan ./src/test/resources
``` ```
On Windows On Windows
``` ```
> mvn install > mvn install
> dependency-check-cli/target/release/bin/dependency-check.bat -h > dependency-check-cli/target/release/bin/dependency-check.bat -h
> dependency-check-cli/target/release/bin/dependency-check.bat --app Testing --out . --scan ./src/test/resources > dependency-check-cli/target/release/bin/dependency-check.bat --project Testing --out . --scan ./src/test/resources
``` ```
Then load the resulting 'DependencyCheck-Report.html' into your favorite browser. Then load the resulting 'DependencyCheck-Report.html' into your favorite browser.
### Docker
In the following example it is assumed that the source to be checked is in the actual directory. A persistent data directory and a persistent report directory is used so that the container can be destroyed after running it to make sure that you use the newest version, always.
```
# After the first run, feel free to change the owner of the directories to the owner of the created files and the permissions to 744
DATA_DIRECTORY=$HOME/OWASP-Dependency-Check/data
REPORT_DIRECTORY=/$HOME/OWASP-Dependency-Check/reports
if [ ! -d $DATA_DIRECTORY ]; then
echo "Initially creating persistent directories"
mkdir -p $DATA_DIRECTORY
chmod -R 777 $DATA_DIRECTORY
mkdir -p $REPORT_DIRECTORY
chmod -R 777 $REPORT_DIRECTORY
fi
docker pull owasp/dependency-check # Make sure it is the actual version
docker run --rm \
--volume $(pwd):/src \
--volume $DATA_DIRECTORY:/usr/share/dependency-check/data \
--volume $REPORT_DIRECTORY:/report \
--name dependency-check \
dc \
--suppression "/src/security/dependency-check-suppression.xml"\
--format "ALL" \
--project "My OWASP Dependency Check Project" \
```
Mailing List Mailing List
------------ ------------
@@ -108,14 +142,14 @@ Archive: [google group](https://groups.google.com/forum/#!forum/dependency-check
Copyright & License Copyright & License
- -
Dependency-Check is Copyright (c) 2012-2015 Jeremy Long. All Rights Reserved. Dependency-Check is Copyright (c) 2012-2016 Jeremy Long. All Rights Reserved.
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/LICENSE.txt) file for the full license. Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/LICENSE.txt) file for the full license.
Dependency-Check makes use of several other open source libraries. Please see the [NOTICE.txt] [notices] file for more information. Dependency-Check makes use of several other open source libraries. Please see the [NOTICE.txt][notices] file for more information.
[wiki]: https://github.com/jeremylong/DependencyCheck/wiki [wiki]: https://github.com/jeremylong/DependencyCheck/wiki
[subscribe]: mailto:dependency-check+subscribe@googlegroups.com [subscribe]: mailto:dependency-check+subscribe@googlegroups.com
[post]: mailto:dependency-check@googlegroups.com [post]: mailto:dependency-check@googlegroups.com
[notices]: https://github.com/jeremylong/DependencyCheck/blob/master/NOTICES.txt [notices]: https://github.com/jeremylong/DependencyCheck/blob/master/NOTICE.txt

View File

@@ -1,134 +1,25 @@
Dependency-Check-Gradle Dependency-Check Ant Task
========= =========
**Working in progress** Dependency-Check Ant Task can be used to check the project dependencies for published security vulnerabilities. The checks
performed are a "best effort" and as such, there could be false positives as well as false negatives. However,
vulnerabilities in 3rd party components is a well-known problem and is currently documented in the 2013 OWASP
Top 10 as [A9 - Using Components with Known Vulnerabilities](https://www.owasp.org/index.php/Top_10_2013-A9-Using_Components_with_Known_Vulnerabilities).
This is a DependencyCheck gradle plugin designed for project which use Gradle as build script. Documentation and links to production binary releases can be found on the [github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-ant/index.html).
Dependency-Check is a utility that attempts to detect publicly disclosed vulnerabilities contained within project dependencies. It does this by determining if there is a Common Platform Enumeration (CPE) identifier for a given dependency. If found, it will generate a report linking to the associated CVE entries. Mailing List
------------
========= Subscribe: [dependency-check+subscribe@googlegroups.com](mailto:dependency-check+subscribe@googlegroups.com)
## What's New Post: [dependency-check@googlegroups.com](mailto:dependency-check@googlegroups.com)
Current latest version is `0.0.8`
## Usage Copyright & License
-------------------
### Step 1, Apply dependency check gradle plugin Dependency-Check is Copyright (c) 2012-2014 Jeremy Long. All Rights Reserved.
Install from Maven central repo Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/LICENSE.txt) file for the full license.
```groovy Dependency-Check-Ant makes use of other open source libraries. Please see the [NOTICE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/dependency-check-ant/NOTICE.txt) file for more information.
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath 'org.owasp:dependency-check-gradle:1.3.2'
}
}
apply plugin: 'dependency-check-gradle'
```
### Step 2, Run gradle task
Once gradle plugin applied, run following gradle task to check dependencies:
```
gradle dependencyCheck --info
```
The reports will be generated automatically under `./reports` folder.
If your project includes multiple sub-projects, the report will be generated for each sub-project in different sub-directory.
## FAQ
> **Questions List:**
> - What if I'm behind a proxy?
> - What if my project includes multiple sub-project? How can I use this plugin for each of them including the root project?
> - How to customize the report directory?
### What if I'm behind a proxy?
Maybe you have to use proxy to access internet, in this case, you could configure proxy settings for this plugin:
```groovy
dependencyCheck {
proxy {
server = "127.0.0.1" // required, the server name or IP address of the proxy
port = 3128 // required, the port number of the proxy
// optional, the proxy server might require username
// username = "username"
// optional, the proxy server might require password
// password = "password"
}
}
```
In addition, if the proxy only allow HTTP `GET` or `POST` methods, you will find that the update process will always fail,
the root cause is that every time you run `dependencyCheck` task, it will try to query the latest timestamp to determine whether need to perform an update action,
and for performance reason the HTTP method it uses by default is `HEAD`, which probably is disabled or not supported by the proxy. To avoid this problem, you can simply change the HTTP method by below configuration:
```groovy
dependencyCheck {
quickQueryTimestamp = false // when set to false, it means use HTTP GET method to query timestamp. (default value is true)
}
```
### What if my project includes multiple sub-project? How can I use this plugin for each of them including the root project?
Try put 'apply plugin: "dependency-check"' inside the 'allprojects' or 'subprojects' if you'd like to check all sub-projects only, see below:
(1) For all projects including root project:
```groovy
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath "gradle.plugin.com.tools.security:dependency-check:0.0.8"
}
}
allprojects {
apply plugin: "dependency-check"
}
```
(2) For all sub-projects:
```groovy
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath "gradle.plugin.com.tools.security:dependency-check:0.0.8"
}
}
subprojects {
apply plugin: "dependency-check"
}
```
In this way, the dependency check will be executed for all projects (including root project) or just sub projects.
### How to customize the report directory?
By default, all reports will be placed under `./reports` folder, to change the default directory, just modify it in the configuration section like this:
```groovy
subprojects {
apply plugin: "dependency-check"
dependencyCheck {
outputDirectory = "./customized-path/security-report"
}
}
```

View File

@@ -20,7 +20,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<parent> <parent>
<groupId>org.owasp</groupId> <groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId> <artifactId>dependency-check-parent</artifactId>
<version>1.3.2</version> <version>1.4.6-SNAPSHOT</version>
</parent> </parent>
<artifactId>dependency-check-ant</artifactId> <artifactId>dependency-check-ant</artifactId>
@@ -256,6 +256,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId> <artifactId>maven-surefire-plugin</artifactId>
<configuration> <configuration>
<argLine>-Dfile.encoding=UTF-8</argLine>
<systemProperties> <systemProperties>
<property> <property>
<name>data.directory</name> <name>data.directory</name>
@@ -287,7 +288,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<version>${reporting.pmd-plugin.version}</version> <version>${reporting.pmd-plugin.version}</version>
<configuration> <configuration>
<targetJdk>1.6</targetJdk> <targetJdk>1.6</targetJdk>
<linkXref>true</linkXref> <linkXRef>true</linkXRef>
<sourceEncoding>utf-8</sourceEncoding> <sourceEncoding>utf-8</sourceEncoding>
<excludes> <excludes>
<exclude>**/generated/*.java</exclude> <exclude>**/generated/*.java</exclude>

View File

@@ -24,16 +24,21 @@ import org.slf4j.helpers.MarkerIgnoringBase;
import org.slf4j.helpers.MessageFormatter; import org.slf4j.helpers.MessageFormatter;
/** /**
* An instance of {@link org.slf4j.Logger} which simply calls the log method on the delegate Ant task. * An instance of {@link org.slf4j.Logger} which simply calls the log method on
* the delegate Ant task.
* *
* @author colezlaw * @author colezlaw
*/ */
public class AntLoggerAdapter extends MarkerIgnoringBase { public class AntLoggerAdapter extends MarkerIgnoringBase {
/**
* serialization UID.
*/
private static final long serialVersionUID = -1337;
/** /**
* A reference to the Ant task used for logging. * A reference to the Ant task used for logging.
*/ */
private Task task; private transient Task task;
/** /**
* Constructs an Ant Logger Adapter. * Constructs an Ant Logger Adapter.

View File

@@ -18,7 +18,6 @@
package org.owasp.dependencycheck.taskdefs; package org.owasp.dependencycheck.taskdefs;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.util.List; import java.util.List;
import org.apache.tools.ant.BuildException; import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project; import org.apache.tools.ant.Project;
@@ -32,9 +31,12 @@ import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.data.nvdcve.CveDB; import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException; import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties; import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.Vulnerability; import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.exception.ExceptionCollection;
import org.owasp.dependencycheck.exception.ReportException;
import org.owasp.dependencycheck.reporting.ReportGenerator; import org.owasp.dependencycheck.reporting.ReportGenerator;
import org.owasp.dependencycheck.reporting.ReportGenerator.Format; import org.owasp.dependencycheck.reporting.ReportGenerator.Format;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
@@ -51,16 +53,157 @@ public class Check extends Update {
* System specific new line character. * System specific new line character.
*/ */
private static final String NEW_LINE = System.getProperty("line.separator", "\n").intern(); private static final String NEW_LINE = System.getProperty("line.separator", "\n").intern();
/**
* Whether the ruby gemspec analyzer should be enabled.
*/
private Boolean rubygemsAnalyzerEnabled;
/**
* Whether or not the Node.js Analyzer is enabled.
*/
private Boolean nodeAnalyzerEnabled;
/**
* Whether or not the Ruby Bundle Audit Analyzer is enabled.
*/
private Boolean bundleAuditAnalyzerEnabled;
/**
* Whether the CMake analyzer should be enabled.
*/
private Boolean cmakeAnalyzerEnabled;
/**
* Whether or not the Open SSL analyzer is enabled.
*/
private Boolean opensslAnalyzerEnabled;
/**
* Whether the python package analyzer should be enabled.
*/
private Boolean pyPackageAnalyzerEnabled;
/**
* Whether the python distribution analyzer should be enabled.
*/
private Boolean pyDistributionAnalyzerEnabled;
/**
* Whether or not the central analyzer is enabled.
*/
private Boolean centralAnalyzerEnabled;
/**
* Whether or not the nexus analyzer is enabled.
*/
private Boolean nexusAnalyzerEnabled;
/**
* The URL of a Nexus server's REST API end point
* (http://domain/nexus/service/local).
*/
private String nexusUrl;
/**
* Whether or not the defined proxy should be used when connecting to Nexus.
*/
private Boolean nexusUsesProxy;
/**
* Additional ZIP File extensions to add analyze. This should be a
* comma-separated list of file extensions to treat like ZIP files.
*/
private String zipExtensions;
/**
* The path to Mono for .NET assembly analysis on non-windows systems.
*/
private String pathToMono;
/** /**
* Construct a new DependencyCheckTask. * The application name for the report.
*
* @deprecated use projectName instead.
*/ */
public Check() { @Deprecated
super(); private String applicationName = null;
// Call this before Dependency Check Core starts logging anything - this way, all SLF4J messages from /**
// core end up coming through this tasks logger * The name of the project being analyzed.
StaticLoggerBinder.getSingleton().setTask(this); */
} private String projectName = "dependency-check";
/**
* Specifies the destination directory for the generated Dependency-Check
* report.
*/
private String reportOutputDirectory = ".";
/**
* Specifies if the build should be failed if a CVSS score above a specified
* level is identified. The default is 11 which means since the CVSS scores
* are 0-10, by default the build will never fail and the CVSS score is set
* to 11. The valid range for the fail build on CVSS is 0 to 11, where
* anything above 10 will not cause the build to fail.
*/
private float failBuildOnCVSS = 11;
/**
* Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not
* recommended that this be turned to false. Default is true.
*/
private Boolean autoUpdate;
/**
* Whether only the update phase should be executed.
*
* @deprecated Use the update task instead
*/
@Deprecated
private boolean updateOnly = false;
/**
* The report format to be generated (HTML, XML, VULN, ALL). Default is
* HTML.
*/
private String reportFormat = "HTML";
/**
* The path to the suppression file.
*/
private String suppressionFile;
/**
* The path to the suppression file.
*/
private String hintsFile;
/**
* flag indicating whether or not to show a summary of findings.
*/
private boolean showSummary = true;
/**
* Whether experimental analyzers are enabled.
*/
private Boolean enableExperimental;
/**
* Whether or not the Jar Analyzer is enabled.
*/
private Boolean jarAnalyzerEnabled;
/**
* Whether or not the Archive Analyzer is enabled.
*/
private Boolean archiveAnalyzerEnabled;
/**
* Whether or not the .NET Nuspec Analyzer is enabled.
*/
private Boolean nuspecAnalyzerEnabled;
/**
* Whether or not the PHP Composer Analyzer is enabled.
*/
private Boolean composerAnalyzerEnabled;
/**
* Whether or not the .NET Assembly Analyzer is enabled.
*/
private Boolean assemblyAnalyzerEnabled;
/**
* Whether the autoconf analyzer should be enabled.
*/
private Boolean autoconfAnalyzerEnabled;
/**
* Sets the path for the bundle-audit binary.
*/
private String bundleAuditPath;
/**
* Whether or not the CocoaPods Analyzer is enabled.
*/
private Boolean cocoapodsAnalyzerEnabled;
/**
* Whether or not the Swift package Analyzer is enabled.
*/
private Boolean swiftPackageManagerAnalyzerEnabled;
//The following code was copied Apache Ant PathConvert //The following code was copied Apache Ant PathConvert
//BEGIN COPY from org.apache.tools.ant.taskdefs.PathConvert //BEGIN COPY from org.apache.tools.ant.taskdefs.PathConvert
/** /**
@@ -68,9 +211,9 @@ public class Check extends Update {
*/ */
private Resources path = null; private Resources path = null;
/** /**
* Reference to path/fileset to convert * Reference to path/file set to convert
*/ */
private Reference refid = null; private Reference refId = null;
/** /**
* Add an arbitrary ResourceCollection. * Add an arbitrary ResourceCollection.
@@ -80,14 +223,14 @@ public class Check extends Update {
*/ */
public void add(ResourceCollection rc) { public void add(ResourceCollection rc) {
if (isReference()) { if (isReference()) {
throw new BuildException("Nested elements are not allowed when using the refid attribute."); throw new BuildException("Nested elements are not allowed when using the refId attribute.");
} }
getPath().add(rc); getPath().add(rc);
} }
/** /**
* Returns the path. If the path has not been initialized yet, this class is synchronized, and will instantiate the path * Returns the path. If the path has not been initialized yet, this class is
* object. * synchronized, and will instantiate the path object.
* *
* @return the path * @return the path
*/ */
@@ -100,49 +243,54 @@ public class Check extends Update {
} }
/** /**
* Learn whether the refid attribute of this element been set. * Learn whether the refId attribute of this element been set.
* *
* @return true if refid is valid. * @return true if refId is valid.
*/ */
public boolean isReference() { public boolean isReference() {
return refid != null; return refId != null;
} }
/** /**
* Add a reference to a Path, FileSet, DirSet, or FileList defined elsewhere. * Add a reference to a Path, FileSet, DirSet, or FileList defined
* elsewhere.
* *
* @param r the reference to a path, fileset, dirset or filelist. * @param r the reference to a path, fileset, dirset or filelist.
*/ */
public void setRefid(Reference r) { public synchronized void setRefId(Reference r) {
if (path != null) { if (path != null) {
throw new BuildException("Nested elements are not allowed when using the refid attribute."); throw new BuildException("Nested elements are not allowed when using the refId attribute.");
} }
refid = r; refId = r;
} }
/** /**
* If this is a reference, this method will add the referenced resource collection to the collection of paths. * If this is a reference, this method will add the referenced resource
* collection to the collection of paths.
* *
* @throws BuildException if the reference is not to a resource collection * @throws BuildException if the reference is not to a resource collection
*/ */
private void dealWithReferences() throws BuildException { private void dealWithReferences() throws BuildException {
if (isReference()) { if (isReference()) {
final Object o = refid.getReferencedObject(getProject()); final Object o = refId.getReferencedObject(getProject());
if (!(o instanceof ResourceCollection)) { if (!(o instanceof ResourceCollection)) {
throw new BuildException("refid '" + refid.getRefId() throw new BuildException("refId '" + refId.getRefId()
+ "' does not refer to a resource collection."); + "' does not refer to a resource collection.");
} }
getPath().add((ResourceCollection) o); getPath().add((ResourceCollection) o);
} }
} }
// END COPY from org.apache.tools.ant.taskdefs // END COPY from org.apache.tools.ant.taskdefs
/** /**
* The application name for the report. * Construct a new DependencyCheckTask.
*
* @deprecated use projectName instead.
*/ */
@Deprecated public Check() {
private String applicationName = null; super();
// Call this before Dependency Check Core starts logging anything - this way, all SLF4J messages from
// core end up coming through this tasks logger
StaticLoggerBinder.getSingleton().setTask(this);
}
/** /**
* Get the value of applicationName. * Get the value of applicationName.
@@ -166,10 +314,6 @@ public class Check extends Update {
public void setApplicationName(String applicationName) { public void setApplicationName(String applicationName) {
this.applicationName = applicationName; this.applicationName = applicationName;
} }
/**
* The name of the project being analyzed.
*/
private String projectName = "dependency-check";
/** /**
* Get the value of projectName. * Get the value of projectName.
@@ -195,11 +339,6 @@ public class Check extends Update {
this.projectName = projectName; this.projectName = projectName;
} }
/**
* Specifies the destination directory for the generated Dependency-Check report.
*/
private String reportOutputDirectory = ".";
/** /**
* Get the value of reportOutputDirectory. * Get the value of reportOutputDirectory.
* *
@@ -217,12 +356,6 @@ public class Check extends Update {
public void setReportOutputDirectory(String reportOutputDirectory) { public void setReportOutputDirectory(String reportOutputDirectory) {
this.reportOutputDirectory = reportOutputDirectory; this.reportOutputDirectory = reportOutputDirectory;
} }
/**
* Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which
* means since the CVSS scores are 0-10, by default the build will never fail and the CVSS score is set to 11. The valid range
* for the fail build on CVSS is 0 to 11, where anything above 10 will not cause the build to fail.
*/
private float failBuildOnCVSS = 11;
/** /**
* Get the value of failBuildOnCVSS. * Get the value of failBuildOnCVSS.
@@ -241,11 +374,6 @@ public class Check extends Update {
public void setFailBuildOnCVSS(float failBuildOnCVSS) { public void setFailBuildOnCVSS(float failBuildOnCVSS) {
this.failBuildOnCVSS = failBuildOnCVSS; this.failBuildOnCVSS = failBuildOnCVSS;
} }
/**
* Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. Default
* is true.
*/
private Boolean autoUpdate;
/** /**
* Get the value of autoUpdate. * Get the value of autoUpdate.
@@ -264,13 +392,6 @@ public class Check extends Update {
public void setAutoUpdate(Boolean autoUpdate) { public void setAutoUpdate(Boolean autoUpdate) {
this.autoUpdate = autoUpdate; this.autoUpdate = autoUpdate;
} }
/**
* Whether only the update phase should be executed.
*
* @deprecated Use the update task instead
*/
@Deprecated
private boolean updateOnly = false;
/** /**
* Get the value of updateOnly. * Get the value of updateOnly.
@@ -294,11 +415,6 @@ public class Check extends Update {
this.updateOnly = updateOnly; this.updateOnly = updateOnly;
} }
/**
* The report format to be generated (HTML, XML, VULN, ALL). Default is HTML.
*/
private String reportFormat = "HTML";
/** /**
* Get the value of reportFormat. * Get the value of reportFormat.
* *
@@ -316,10 +432,6 @@ public class Check extends Update {
public void setReportFormat(ReportFormats reportFormat) { public void setReportFormat(ReportFormats reportFormat) {
this.reportFormat = reportFormat.getValue(); this.reportFormat = reportFormat.getValue();
} }
/**
* The path to the suppression file.
*/
private String suppressionFile;
/** /**
* Get the value of suppressionFile. * Get the value of suppressionFile.
@@ -338,10 +450,24 @@ public class Check extends Update {
public void setSuppressionFile(String suppressionFile) { public void setSuppressionFile(String suppressionFile) {
this.suppressionFile = suppressionFile; this.suppressionFile = suppressionFile;
} }
/** /**
* flag indicating whether or not to show a summary of findings. * Get the value of hintsFile.
*
* @return the value of hintsFile
*/ */
private boolean showSummary = true; public String getHintsFile() {
return hintsFile;
}
/**
* Set the value of hintsFile.
*
* @param hintsFile new value of hintsFile
*/
public void setHintsFile(String hintsFile) {
this.hintsFile = hintsFile;
}
/** /**
* Get the value of showSummary. * Get the value of showSummary.
@@ -362,9 +488,22 @@ public class Check extends Update {
} }
/** /**
* Whether or not the Jar Analyzer is enabled. * Get the value of enableExperimental.
*
* @return the value of enableExperimental
*/ */
private Boolean jarAnalyzerEnabled; public Boolean isEnableExperimental() {
return enableExperimental;
}
/**
* Set the value of enableExperimental.
*
* @param enableExperimental new value of enableExperimental
*/
public void setEnableExperimental(Boolean enableExperimental) {
this.enableExperimental = enableExperimental;
}
/** /**
* Returns whether or not the analyzer is enabled. * Returns whether or not the analyzer is enabled.
@@ -383,10 +522,6 @@ public class Check extends Update {
public void setJarAnalyzerEnabled(Boolean jarAnalyzerEnabled) { public void setJarAnalyzerEnabled(Boolean jarAnalyzerEnabled) {
this.jarAnalyzerEnabled = jarAnalyzerEnabled; this.jarAnalyzerEnabled = jarAnalyzerEnabled;
} }
/**
* Whether or not the Archive Analyzer is enabled.
*/
private Boolean archiveAnalyzerEnabled;
/** /**
* Returns whether or not the analyzer is enabled. * Returns whether or not the analyzer is enabled.
@@ -396,10 +531,6 @@ public class Check extends Update {
public Boolean isArchiveAnalyzerEnabled() { public Boolean isArchiveAnalyzerEnabled() {
return archiveAnalyzerEnabled; return archiveAnalyzerEnabled;
} }
/**
* Whether or not the .NET Assembly Analyzer is enabled.
*/
private Boolean assemblyAnalyzerEnabled;
/** /**
* Sets whether or not the analyzer is enabled. * Sets whether or not the analyzer is enabled.
@@ -427,10 +558,6 @@ public class Check extends Update {
public void setAssemblyAnalyzerEnabled(Boolean assemblyAnalyzerEnabled) { public void setAssemblyAnalyzerEnabled(Boolean assemblyAnalyzerEnabled) {
this.assemblyAnalyzerEnabled = assemblyAnalyzerEnabled; this.assemblyAnalyzerEnabled = assemblyAnalyzerEnabled;
} }
/**
* Whether or not the .NET Nuspec Analyzer is enabled.
*/
private Boolean nuspecAnalyzerEnabled;
/** /**
* Returns whether or not the analyzer is enabled. * Returns whether or not the analyzer is enabled.
@@ -449,10 +576,6 @@ public class Check extends Update {
public void setNuspecAnalyzerEnabled(Boolean nuspecAnalyzerEnabled) { public void setNuspecAnalyzerEnabled(Boolean nuspecAnalyzerEnabled) {
this.nuspecAnalyzerEnabled = nuspecAnalyzerEnabled; this.nuspecAnalyzerEnabled = nuspecAnalyzerEnabled;
} }
/**
* Whether or not the PHP Composer Analyzer is enabled.
*/
private Boolean composerAnalyzerEnabled;
/** /**
* Get the value of composerAnalyzerEnabled. * Get the value of composerAnalyzerEnabled.
@@ -471,10 +594,6 @@ public class Check extends Update {
public void setComposerAnalyzerEnabled(Boolean composerAnalyzerEnabled) { public void setComposerAnalyzerEnabled(Boolean composerAnalyzerEnabled) {
this.composerAnalyzerEnabled = composerAnalyzerEnabled; this.composerAnalyzerEnabled = composerAnalyzerEnabled;
} }
/**
* Whether the autoconf analyzer should be enabled.
*/
private Boolean autoconfAnalyzerEnabled;
/** /**
* Get the value of autoconfAnalyzerEnabled. * Get the value of autoconfAnalyzerEnabled.
@@ -493,10 +612,6 @@ public class Check extends Update {
public void setAutoconfAnalyzerEnabled(Boolean autoconfAnalyzerEnabled) { public void setAutoconfAnalyzerEnabled(Boolean autoconfAnalyzerEnabled) {
this.autoconfAnalyzerEnabled = autoconfAnalyzerEnabled; this.autoconfAnalyzerEnabled = autoconfAnalyzerEnabled;
} }
/**
* Whether the CMake analyzer should be enabled.
*/
private Boolean cmakeAnalyzerEnabled;
/** /**
* Get the value of cmakeAnalyzerEnabled. * Get the value of cmakeAnalyzerEnabled.
@@ -515,10 +630,80 @@ public class Check extends Update {
public void setCMakeAnalyzerEnabled(Boolean cmakeAnalyzerEnabled) { public void setCMakeAnalyzerEnabled(Boolean cmakeAnalyzerEnabled) {
this.cmakeAnalyzerEnabled = cmakeAnalyzerEnabled; this.cmakeAnalyzerEnabled = cmakeAnalyzerEnabled;
} }
/** /**
* Whether or not the openssl analyzer is enabled. * Returns if the Bundle Audit Analyzer is enabled.
*
* @return if the Bundle Audit Analyzer is enabled.
*/ */
private Boolean opensslAnalyzerEnabled; public Boolean isBundleAuditAnalyzerEnabled() {
return bundleAuditAnalyzerEnabled;
}
/**
* Sets if the Bundle Audit Analyzer is enabled.
*
* @param bundleAuditAnalyzerEnabled whether or not the analyzer should be
* enabled
*/
public void setBundleAuditAnalyzerEnabled(Boolean bundleAuditAnalyzerEnabled) {
this.bundleAuditAnalyzerEnabled = bundleAuditAnalyzerEnabled;
}
/**
* Returns the path to the bundle audit executable.
*
* @return the path to the bundle audit executable
*/
public String getBundleAuditPath() {
return bundleAuditPath;
}
/**
* Sets the path to the bundle audit executable.
*
* @param bundleAuditPath the path to the bundle audit executable
*/
public void setBundleAuditPath(String bundleAuditPath) {
this.bundleAuditPath = bundleAuditPath;
}
/**
* Returns if the cocoapods analyzer is enabled.
*
* @return if the cocoapods analyzer is enabled
*/
public boolean isCocoapodsAnalyzerEnabled() {
return cocoapodsAnalyzerEnabled;
}
/**
* Sets whether or not the cocoapods analyzer is enabled.
*
* @param cocoapodsAnalyzerEnabled the state of the cocoapods analyzer
*/
public void setCocoapodsAnalyzerEnabled(Boolean cocoapodsAnalyzerEnabled) {
this.cocoapodsAnalyzerEnabled = cocoapodsAnalyzerEnabled;
}
/**
* Returns whether or not the Swift package Analyzer is enabled.
*
* @return whether or not the Swift package Analyzer is enabled
*/
public Boolean isSwiftPackageManagerAnalyzerEnabled() {
return swiftPackageManagerAnalyzerEnabled;
}
/**
* Sets the enabled state of the swift package manager analyzer.
*
* @param swiftPackageManagerAnalyzerEnabled the enabled state of the swift
* package manager
*/
public void setSwiftPackageManagerAnalyzerEnabled(Boolean swiftPackageManagerAnalyzerEnabled) {
this.swiftPackageManagerAnalyzerEnabled = swiftPackageManagerAnalyzerEnabled;
}
/** /**
* Get the value of opensslAnalyzerEnabled. * Get the value of opensslAnalyzerEnabled.
@@ -537,10 +722,6 @@ public class Check extends Update {
public void setOpensslAnalyzerEnabled(Boolean opensslAnalyzerEnabled) { public void setOpensslAnalyzerEnabled(Boolean opensslAnalyzerEnabled) {
this.opensslAnalyzerEnabled = opensslAnalyzerEnabled; this.opensslAnalyzerEnabled = opensslAnalyzerEnabled;
} }
/**
* Whether or not the Node.js Analyzer is enabled.
*/
private Boolean nodeAnalyzerEnabled;
/** /**
* Get the value of nodeAnalyzerEnabled. * Get the value of nodeAnalyzerEnabled.
@@ -559,10 +740,6 @@ public class Check extends Update {
public void setNodeAnalyzerEnabled(Boolean nodeAnalyzerEnabled) { public void setNodeAnalyzerEnabled(Boolean nodeAnalyzerEnabled) {
this.nodeAnalyzerEnabled = nodeAnalyzerEnabled; this.nodeAnalyzerEnabled = nodeAnalyzerEnabled;
} }
/**
* Whether the ruby gemspec analyzer should be enabled.
*/
private Boolean rubygemsAnalyzerEnabled;
/** /**
* Get the value of rubygemsAnalyzerEnabled. * Get the value of rubygemsAnalyzerEnabled.
@@ -581,10 +758,6 @@ public class Check extends Update {
public void setRubygemsAnalyzerEnabled(Boolean rubygemsAnalyzerEnabled) { public void setRubygemsAnalyzerEnabled(Boolean rubygemsAnalyzerEnabled) {
this.rubygemsAnalyzerEnabled = rubygemsAnalyzerEnabled; this.rubygemsAnalyzerEnabled = rubygemsAnalyzerEnabled;
} }
/**
* Whether the python package analyzer should be enabled.
*/
private Boolean pyPackageAnalyzerEnabled;
/** /**
* Get the value of pyPackageAnalyzerEnabled. * Get the value of pyPackageAnalyzerEnabled.
@@ -604,11 +777,6 @@ public class Check extends Update {
this.pyPackageAnalyzerEnabled = pyPackageAnalyzerEnabled; this.pyPackageAnalyzerEnabled = pyPackageAnalyzerEnabled;
} }
/**
* Whether the python distribution analyzer should be enabled.
*/
private Boolean pyDistributionAnalyzerEnabled;
/** /**
* Get the value of pyDistributionAnalyzerEnabled. * Get the value of pyDistributionAnalyzerEnabled.
* *
@@ -621,17 +789,13 @@ public class Check extends Update {
/** /**
* Set the value of pyDistributionAnalyzerEnabled. * Set the value of pyDistributionAnalyzerEnabled.
* *
* @param pyDistributionAnalyzerEnabled new value of pyDistributionAnalyzerEnabled * @param pyDistributionAnalyzerEnabled new value of
* pyDistributionAnalyzerEnabled
*/ */
public void setPyDistributionAnalyzerEnabled(Boolean pyDistributionAnalyzerEnabled) { public void setPyDistributionAnalyzerEnabled(Boolean pyDistributionAnalyzerEnabled) {
this.pyDistributionAnalyzerEnabled = pyDistributionAnalyzerEnabled; this.pyDistributionAnalyzerEnabled = pyDistributionAnalyzerEnabled;
} }
/**
* Whether or not the central analyzer is enabled.
*/
private Boolean centralAnalyzerEnabled;
/** /**
* Get the value of centralAnalyzerEnabled. * Get the value of centralAnalyzerEnabled.
* *
@@ -650,11 +814,6 @@ public class Check extends Update {
this.centralAnalyzerEnabled = centralAnalyzerEnabled; this.centralAnalyzerEnabled = centralAnalyzerEnabled;
} }
/**
* Whether or not the nexus analyzer is enabled.
*/
private Boolean nexusAnalyzerEnabled;
/** /**
* Get the value of nexusAnalyzerEnabled. * Get the value of nexusAnalyzerEnabled.
* *
@@ -673,11 +832,6 @@ public class Check extends Update {
this.nexusAnalyzerEnabled = nexusAnalyzerEnabled; this.nexusAnalyzerEnabled = nexusAnalyzerEnabled;
} }
/**
* The URL of a Nexus server's REST API end point (http://domain/nexus/service/local).
*/
private String nexusUrl;
/** /**
* Get the value of nexusUrl. * Get the value of nexusUrl.
* *
@@ -695,10 +849,6 @@ public class Check extends Update {
public void setNexusUrl(String nexusUrl) { public void setNexusUrl(String nexusUrl) {
this.nexusUrl = nexusUrl; this.nexusUrl = nexusUrl;
} }
/**
* Whether or not the defined proxy should be used when connecting to Nexus.
*/
private Boolean nexusUsesProxy;
/** /**
* Get the value of nexusUsesProxy. * Get the value of nexusUsesProxy.
@@ -718,12 +868,6 @@ public class Check extends Update {
this.nexusUsesProxy = nexusUsesProxy; this.nexusUsesProxy = nexusUsesProxy;
} }
/**
* Additional ZIP File extensions to add analyze. This should be a comma-separated list of file extensions to treat like ZIP
* files.
*/
private String zipExtensions;
/** /**
* Get the value of zipExtensions. * Get the value of zipExtensions.
* *
@@ -742,11 +886,6 @@ public class Check extends Update {
this.zipExtensions = zipExtensions; this.zipExtensions = zipExtensions;
} }
/**
* The path to Mono for .NET assembly analysis on non-windows systems.
*/
private String pathToMono;
/** /**
* Get the value of pathToMono. * Get the value of pathToMono.
* *
@@ -775,52 +914,62 @@ public class Check extends Update {
engine = new Engine(Check.class.getClassLoader()); engine = new Engine(Check.class.getClassLoader());
if (isUpdateOnly()) { if (isUpdateOnly()) {
log("Deprecated 'UpdateOnly' property set; please use the UpdateTask instead", Project.MSG_WARN); log("Deprecated 'UpdateOnly' property set; please use the UpdateTask instead", Project.MSG_WARN);
engine.doUpdates();
} else {
try { try {
for (Resource resource : path) { engine.doUpdates();
final FileProvider provider = resource.as(FileProvider.class); } catch (UpdateException ex) {
if (provider != null) { if (this.isFailOnError()) {
final File file = provider.getFile(); throw new BuildException(ex);
if (file != null && file.exists()) { }
engine.scan(file); log(ex.getMessage(), Project.MSG_ERR);
} }
} else {
for (Resource resource : getPath()) {
final FileProvider provider = resource.as(FileProvider.class);
if (provider != null) {
final File file = provider.getFile();
if (file != null && file.exists()) {
engine.scan(file);
} }
} }
}
try {
engine.analyzeDependencies(); engine.analyzeDependencies();
DatabaseProperties prop = null; } catch (ExceptionCollection ex) {
CveDB cve = null; if (this.isFailOnError()) {
try { throw new BuildException(ex);
cve = new CveDB();
cve.open();
prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) {
log("Unable to retrieve DB Properties", ex, Project.MSG_DEBUG);
} finally {
if (cve != null) {
cve.close();
}
} }
final ReportGenerator reporter = new ReportGenerator(getProjectName(), engine.getDependencies(), engine.getAnalyzers(), prop); }
reporter.generateReports(reportOutputDirectory, reportFormat); DatabaseProperties prop = null;
try (CveDB cve = CveDB.getInstance()) {
prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) {
//TODO shouldn't this be a fatal exception
log("Unable to retrieve DB Properties", ex, Project.MSG_DEBUG);
}
if (this.failBuildOnCVSS <= 10) { final ReportGenerator reporter = new ReportGenerator(getProjectName(), engine.getDependencies(), engine.getAnalyzers(), prop);
checkForFailure(engine.getDependencies()); reporter.generateReports(reportOutputDirectory, reportFormat);
}
if (this.showSummary) { if (this.failBuildOnCVSS <= 10) {
showSummary(engine.getDependencies()); checkForFailure(engine.getDependencies());
} }
} catch (IOException ex) { if (this.showSummary) {
log("Unable to generate dependency-check report", ex, Project.MSG_DEBUG); showSummary(engine.getDependencies());
throw new BuildException("Unable to generate dependency-check report", ex);
} catch (Exception ex) {
log("An exception occurred; unable to continue task", ex, Project.MSG_DEBUG);
throw new BuildException("An exception occurred; unable to continue task", ex);
} }
} }
} catch (DatabaseException ex) { } catch (DatabaseException ex) {
log("Unable to connect to the dependency-check database; analysis has stopped", ex, Project.MSG_ERR); final String msg = "Unable to connect to the dependency-check database; analysis has stopped";
if (this.isFailOnError()) {
throw new BuildException(msg, ex);
}
log(msg, ex, Project.MSG_ERR);
} catch (ReportException ex) {
final String msg = "Unable to generate the dependency-check report";
if (this.isFailOnError()) {
throw new BuildException(msg, ex);
}
log(msg, ex, Project.MSG_ERR);
} finally { } finally {
Settings.cleanup(true); Settings.cleanup(true);
if (engine != null) { if (engine != null) {
@@ -830,12 +979,13 @@ public class Check extends Update {
} }
/** /**
* Validate the configuration to ensure the parameters have been properly configured/initialized. * Validate the configuration to ensure the parameters have been properly
* configured/initialized.
* *
* @throws BuildException if the task was not configured correctly. * @throws BuildException if the task was not configured correctly.
*/ */
private void validateConfiguration() throws BuildException { private void validateConfiguration() throws BuildException {
if (path == null) { if (getPath() == null) {
throw new BuildException("No project dependencies have been defined to analyze."); throw new BuildException("No project dependencies have been defined to analyze.");
} }
if (failBuildOnCVSS < 0 || failBuildOnCVSS > 11) { if (failBuildOnCVSS < 0 || failBuildOnCVSS > 11) {
@@ -844,8 +994,9 @@ public class Check extends Update {
} }
/** /**
* Takes the properties supplied and updates the dependency-check settings. Additionally, this sets the system properties * Takes the properties supplied and updates the dependency-check settings.
* required to change the proxy server, port, and connection timeout. * Additionally, this sets the system properties required to change the
* proxy server, port, and connection timeout.
* *
* @throws BuildException thrown when an invalid setting is configured. * @throws BuildException thrown when an invalid setting is configured.
*/ */
@@ -854,12 +1005,18 @@ public class Check extends Update {
super.populateSettings(); super.populateSettings();
Settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate); Settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile); Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, enableExperimental);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled); Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, pyDistributionAnalyzerEnabled); Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, pyDistributionAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, pyPackageAnalyzerEnabled); Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, pyPackageAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, rubygemsAnalyzerEnabled); Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, rubygemsAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, opensslAnalyzerEnabled); Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, opensslAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_CMAKE_ENABLED, cmakeAnalyzerEnabled); Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_CMAKE_ENABLED, cmakeAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, swiftPackageManagerAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, cocoapodsAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, bundleAuditAnalyzerEnabled);
Settings.setStringIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, bundleAuditPath);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, autoconfAnalyzerEnabled); Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, autoconfAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, composerAnalyzerEnabled); Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, composerAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, nodeAnalyzerEnabled); Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, nodeAnalyzerEnabled);
@@ -875,11 +1032,12 @@ public class Check extends Update {
} }
/** /**
* Checks to see if a vulnerability has been identified with a CVSS score that is above the threshold set in the * Checks to see if a vulnerability has been identified with a CVSS score
* configuration. * that is above the threshold set in the configuration.
* *
* @param dependencies the list of dependency objects * @param dependencies the list of dependency objects
* @throws BuildException thrown if a CVSS score is found that is higher then the threshold set * @throws BuildException thrown if a CVSS score is found that is higher
* than the threshold set
*/ */
private void checkForFailure(List<Dependency> dependencies) throws BuildException { private void checkForFailure(List<Dependency> dependencies) throws BuildException {
final StringBuilder ids = new StringBuilder(); final StringBuilder ids = new StringBuilder();
@@ -896,14 +1054,15 @@ public class Check extends Update {
} }
if (ids.length() > 0) { if (ids.length() > 0) {
final String msg = String.format("%n%nDependency-Check Failure:%n" final String msg = String.format("%n%nDependency-Check Failure:%n"
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater then '%.1f': %s%n" + "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than '%.1f': %s%n"
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString()); + "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
throw new BuildException(msg); throw new BuildException(msg);
} }
} }
/** /**
* Generates a warning message listing a summary of dependencies and their associated CPE and CVE entries. * Generates a warning message listing a summary of dependencies and their
* associated CPE and CVE entries.
* *
* @param dependencies a list of dependency objects * @param dependencies a list of dependency objects
*/ */
@@ -943,7 +1102,8 @@ public class Check extends Update {
} }
/** /**
* An enumeration of supported report formats: "ALL", "HTML", "XML", "VULN", etc.. * An enumeration of supported report formats: "ALL", "HTML", "XML", "VULN",
* etc..
*/ */
public static class ReportFormats extends EnumeratedAttribute { public static class ReportFormats extends EnumeratedAttribute {

View File

@@ -71,6 +71,36 @@ public class Purge extends Task {
this.dataDirectory = dataDirectory; this.dataDirectory = dataDirectory;
} }
/**
* Indicates if dependency-check should fail the build if an exception
* occurs.
*/
private boolean failOnError = true;
/**
* Get the value of failOnError.
*
* @return the value of failOnError
*/
public boolean isFailOnError() {
return failOnError;
}
/**
* Set the value of failOnError.
*
* @param failOnError new value of failOnError
*/
public void setFailOnError(boolean failOnError) {
this.failOnError = failOnError;
}
/**
* Executes the dependency-check purge to delete the existing local copy of
* the NVD CVE data.
*
* @throws BuildException thrown if there is a problem deleting the file(s)
*/
@Override @Override
public void execute() throws BuildException { public void execute() throws BuildException {
populateSettings(); populateSettings();
@@ -81,38 +111,47 @@ public class Purge extends Task {
if (db.delete()) { if (db.delete()) {
log("Database file purged; local copy of the NVD has been removed", Project.MSG_INFO); log("Database file purged; local copy of the NVD has been removed", Project.MSG_INFO);
} else { } else {
log(String.format("Unable to delete '%s'; please delete the file manually", db.getAbsolutePath()), Project.MSG_ERR); final String msg = String.format("Unable to delete '%s'; please delete the file manually", db.getAbsolutePath());
if (this.failOnError) {
throw new BuildException(msg);
}
log(msg, Project.MSG_ERR);
} }
} else { } else {
log(String.format("Unable to purge database; the database file does not exists: %s", db.getAbsolutePath()), Project.MSG_ERR); final String msg = String.format("Unable to purge database; the database file does not exists: %s", db.getAbsolutePath());
if (this.failOnError) {
throw new BuildException(msg);
}
log(msg, Project.MSG_ERR);
} }
} catch (IOException ex) { } catch (IOException ex) {
log("Unable to delete the database", Project.MSG_ERR); final String msg = "Unable to delete the database";
if (this.failOnError) {
throw new BuildException(msg);
}
log(msg, Project.MSG_ERR);
} finally { } finally {
Settings.cleanup(true); Settings.cleanup(true);
} }
} }
/** /**
* Takes the properties supplied and updates the dependency-check settings. Additionally, this sets the system properties * Takes the properties supplied and updates the dependency-check settings.
* required to change the proxy server, port, and connection timeout. * Additionally, this sets the system properties required to change the
* proxy server, port, and connection timeout.
*
* @throws BuildException thrown if the properties file cannot be read.
*/ */
protected void populateSettings() { protected void populateSettings() throws BuildException {
Settings.initialize(); Settings.initialize();
InputStream taskProperties = null; try (InputStream taskProperties = this.getClass().getClassLoader().getResourceAsStream(PROPERTIES_FILE)) {
try {
taskProperties = this.getClass().getClassLoader().getResourceAsStream(PROPERTIES_FILE);
Settings.mergeProperties(taskProperties); Settings.mergeProperties(taskProperties);
} catch (IOException ex) { } catch (IOException ex) {
log("Unable to load the dependency-check ant task.properties file.", ex, Project.MSG_WARN); final String msg = "Unable to load the dependency-check ant task.properties file.";
} finally { if (this.failOnError) {
if (taskProperties != null) { throw new BuildException(msg, ex);
try {
taskProperties.close();
} catch (IOException ex) {
log("", ex, Project.MSG_DEBUG);
}
} }
log(msg, ex, Project.MSG_WARN);
} }
if (dataDirectory != null) { if (dataDirectory != null) {
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory); Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);

View File

@@ -18,19 +18,83 @@
package org.owasp.dependencycheck.taskdefs; package org.owasp.dependencycheck.taskdefs;
import org.apache.tools.ant.BuildException; import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException; import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.impl.StaticLoggerBinder; import org.slf4j.impl.StaticLoggerBinder;
/** /**
* An Ant task definition to execute dependency-check update. This will download the latest data from the National Vulnerability * An Ant task definition to execute dependency-check update. This will download
* Database (NVD) and store a copy in the local database. * the latest data from the National Vulnerability Database (NVD) and store a
* copy in the local database.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class Update extends Purge { public class Update extends Purge {
/**
* The Proxy Server.
*/
private String proxyServer;
/**
* The Proxy Port.
*/
private String proxyPort;
/**
* The Proxy username.
*/
private String proxyUsername;
/**
* The Proxy password.
*/
private String proxyPassword;
/**
* The Connection Timeout.
*/
private String connectionTimeout;
/**
* The database driver name; such as org.h2.Driver.
*/
private String databaseDriverName;
/**
* The path to the database driver JAR file if it is not on the class path.
*/
private String databaseDriverPath;
/**
* The database connection string.
*/
private String connectionString;
/**
* The user name for connecting to the database.
*/
private String databaseUser;
/**
* The password to use when connecting to the database.
*/
private String databasePassword;
/**
* The url for the modified NVD CVE (1.2 schema).
*/
private String cveUrl12Modified;
/**
* Base Data Mirror URL for CVE 1.2.
*/
private String cveUrl12Base;
/**
* Data Mirror URL for CVE 2.0.
*/
private String cveUrl20Base;
/**
* The number of hours to wait before re-checking for updates.
*/
private Integer cveValidForHours;
/**
* The url for the modified NVD CVE (2.0 schema).
*/
private String cveUrl20Modified;
/** /**
* Construct a new UpdateTask. * Construct a new UpdateTask.
*/ */
@@ -41,11 +105,6 @@ public class Update extends Purge {
StaticLoggerBinder.getSingleton().setTask(this); StaticLoggerBinder.getSingleton().setTask(this);
} }
/**
* The Proxy Server.
*/
private String proxyServer;
/** /**
* Get the value of proxyServer. * Get the value of proxyServer.
* *
@@ -64,11 +123,6 @@ public class Update extends Purge {
this.proxyServer = server; this.proxyServer = server;
} }
/**
* The Proxy Port.
*/
private String proxyPort;
/** /**
* Get the value of proxyPort. * Get the value of proxyPort.
* *
@@ -86,10 +140,6 @@ public class Update extends Purge {
public void setProxyPort(String proxyPort) { public void setProxyPort(String proxyPort) {
this.proxyPort = proxyPort; this.proxyPort = proxyPort;
} }
/**
* The Proxy username.
*/
private String proxyUsername;
/** /**
* Get the value of proxyUsername. * Get the value of proxyUsername.
@@ -108,10 +158,6 @@ public class Update extends Purge {
public void setProxyUsername(String proxyUsername) { public void setProxyUsername(String proxyUsername) {
this.proxyUsername = proxyUsername; this.proxyUsername = proxyUsername;
} }
/**
* The Proxy password.
*/
private String proxyPassword;
/** /**
* Get the value of proxyPassword. * Get the value of proxyPassword.
@@ -130,10 +176,6 @@ public class Update extends Purge {
public void setProxyPassword(String proxyPassword) { public void setProxyPassword(String proxyPassword) {
this.proxyPassword = proxyPassword; this.proxyPassword = proxyPassword;
} }
/**
* The Connection Timeout.
*/
private String connectionTimeout;
/** /**
* Get the value of connectionTimeout. * Get the value of connectionTimeout.
@@ -152,10 +194,6 @@ public class Update extends Purge {
public void setConnectionTimeout(String connectionTimeout) { public void setConnectionTimeout(String connectionTimeout) {
this.connectionTimeout = connectionTimeout; this.connectionTimeout = connectionTimeout;
} }
/**
* The database driver name; such as org.h2.Driver.
*/
private String databaseDriverName;
/** /**
* Get the value of databaseDriverName. * Get the value of databaseDriverName.
@@ -175,11 +213,6 @@ public class Update extends Purge {
this.databaseDriverName = databaseDriverName; this.databaseDriverName = databaseDriverName;
} }
/**
* The path to the database driver JAR file if it is not on the class path.
*/
private String databaseDriverPath;
/** /**
* Get the value of databaseDriverPath. * Get the value of databaseDriverPath.
* *
@@ -197,10 +230,6 @@ public class Update extends Purge {
public void setDatabaseDriverPath(String databaseDriverPath) { public void setDatabaseDriverPath(String databaseDriverPath) {
this.databaseDriverPath = databaseDriverPath; this.databaseDriverPath = databaseDriverPath;
} }
/**
* The database connection string.
*/
private String connectionString;
/** /**
* Get the value of connectionString. * Get the value of connectionString.
@@ -219,10 +248,6 @@ public class Update extends Purge {
public void setConnectionString(String connectionString) { public void setConnectionString(String connectionString) {
this.connectionString = connectionString; this.connectionString = connectionString;
} }
/**
* The user name for connecting to the database.
*/
private String databaseUser;
/** /**
* Get the value of databaseUser. * Get the value of databaseUser.
@@ -242,11 +267,6 @@ public class Update extends Purge {
this.databaseUser = databaseUser; this.databaseUser = databaseUser;
} }
/**
* The password to use when connecting to the database.
*/
private String databasePassword;
/** /**
* Get the value of databasePassword. * Get the value of databasePassword.
* *
@@ -265,11 +285,6 @@ public class Update extends Purge {
this.databasePassword = databasePassword; this.databasePassword = databasePassword;
} }
/**
* The url for the modified NVD CVE (1.2 schema).
*/
private String cveUrl12Modified;
/** /**
* Get the value of cveUrl12Modified. * Get the value of cveUrl12Modified.
* *
@@ -288,11 +303,6 @@ public class Update extends Purge {
this.cveUrl12Modified = cveUrl12Modified; this.cveUrl12Modified = cveUrl12Modified;
} }
/**
* The url for the modified NVD CVE (2.0 schema).
*/
private String cveUrl20Modified;
/** /**
* Get the value of cveUrl20Modified. * Get the value of cveUrl20Modified.
* *
@@ -311,11 +321,6 @@ public class Update extends Purge {
this.cveUrl20Modified = cveUrl20Modified; this.cveUrl20Modified = cveUrl20Modified;
} }
/**
* Base Data Mirror URL for CVE 1.2.
*/
private String cveUrl12Base;
/** /**
* Get the value of cveUrl12Base. * Get the value of cveUrl12Base.
* *
@@ -334,11 +339,6 @@ public class Update extends Purge {
this.cveUrl12Base = cveUrl12Base; this.cveUrl12Base = cveUrl12Base;
} }
/**
* Data Mirror URL for CVE 2.0.
*/
private String cveUrl20Base;
/** /**
* Get the value of cveUrl20Base. * Get the value of cveUrl20Base.
* *
@@ -357,11 +357,6 @@ public class Update extends Purge {
this.cveUrl20Base = cveUrl20Base; this.cveUrl20Base = cveUrl20Base;
} }
/**
* The number of hours to wait before re-checking for updates.
*/
private Integer cveValidForHours;
/** /**
* Get the value of cveValidForHours. * Get the value of cveValidForHours.
* *
@@ -381,10 +376,11 @@ public class Update extends Purge {
} }
/** /**
* Executes the update by initializing the settings, downloads the NVD XML data, and then processes the data storing it in the * Executes the update by initializing the settings, downloads the NVD XML
* local database. * data, and then processes the data storing it in the local database.
* *
* @throws BuildException thrown if a connection to the local database cannot be made. * @throws BuildException thrown if a connection to the local database
* cannot be made.
*/ */
@Override @Override
public void execute() throws BuildException { public void execute() throws BuildException {
@@ -392,9 +388,20 @@ public class Update extends Purge {
Engine engine = null; Engine engine = null;
try { try {
engine = new Engine(Update.class.getClassLoader()); engine = new Engine(Update.class.getClassLoader());
engine.doUpdates(); try {
engine.doUpdates();
} catch (UpdateException ex) {
if (this.isFailOnError()) {
throw new BuildException(ex);
}
log(ex.getMessage(), Project.MSG_ERR);
}
} catch (DatabaseException ex) { } catch (DatabaseException ex) {
throw new BuildException("Unable to connect to the dependency-check database; unable to update the NVD data", ex); final String msg = "Unable to connect to the dependency-check database; unable to update the NVD data";
if (this.isFailOnError()) {
throw new BuildException(msg, ex);
}
log(msg, Project.MSG_ERR);
} finally { } finally {
Settings.cleanup(true); Settings.cleanup(true);
if (engine != null) { if (engine != null) {
@@ -404,8 +411,9 @@ public class Update extends Purge {
} }
/** /**
* Takes the properties supplied and updates the dependency-check settings. Additionally, this sets the system properties * Takes the properties supplied and updates the dependency-check settings.
* required to change the proxy server, port, and connection timeout. * Additionally, this sets the system properties required to change the
* proxy server, port, and connection timeout.
* *
* @throws BuildException thrown when an invalid setting is configured. * @throws BuildException thrown when an invalid setting is configured.
*/ */

View File

@@ -23,18 +23,25 @@ import org.slf4j.ILoggerFactory;
import org.slf4j.spi.LoggerFactoryBinder; import org.slf4j.spi.LoggerFactoryBinder;
/** /**
* The binding of {@link LoggerFactory} class with an actual instance of {@link ILoggerFactory} is performed using information * The binding of org.slf4j.LoggerFactory class with an actual instance of
* returned by this class. * org.slf4j.ILoggerFactory is performed using information returned by this
* class.
* *
* @author colezlaw * @author colezlaw
*/ */
//CSOFF: FinalClass
public class StaticLoggerBinder implements LoggerFactoryBinder { public class StaticLoggerBinder implements LoggerFactoryBinder {
//CSON: FinalClass
/** /**
* The unique instance of this class * The unique instance of this class
*
*/ */
private static final StaticLoggerBinder SINGLETON = new StaticLoggerBinder(); private static final StaticLoggerBinder SINGLETON = new StaticLoggerBinder();
/**
* Ant tasks have the log method we actually want to call. So we hang onto
* the task as a delegate
*/
private Task task = null;
/** /**
* Return the singleton of this class. * Return the singleton of this class.
@@ -45,11 +52,6 @@ public class StaticLoggerBinder implements LoggerFactoryBinder {
return SINGLETON; return SINGLETON;
} }
/**
* Ant tasks have the log method we actually want to call. So we hang onto the task as a delegate
*/
private Task task = null;
/** /**
* Set the Task which will this is to log through. * Set the Task which will this is to log through.
* *
@@ -61,16 +63,24 @@ public class StaticLoggerBinder implements LoggerFactoryBinder {
} }
/** /**
* Declare the version of the SLF4J API this implementation is compiled against. The value of this filed is usually modified * Declare the version of the SLF4J API this implementation is compiled
* with each release. * against. The value of this filed is usually modified with each release.
*/ */
// to avoid constant folding by the compiler, this field must *not* be final // to avoid constant folding by the compiler, this field must *not* be final
//CSOFF: StaticVariableName
//CSOFF: VisibilityModifier
public static String REQUESTED_API_VERSION = "1.7.12"; // final public static String REQUESTED_API_VERSION = "1.7.12"; // final
//CSON: VisibilityModifier
//CSON: StaticVariableName
/**
* The logger factory class string.
*/
private static final String LOGGER_FACTORY_CLASS = AntLoggerFactory.class.getName(); private static final String LOGGER_FACTORY_CLASS = AntLoggerFactory.class.getName();
/** /**
* The ILoggerFactory instance returned by the {@link #getLoggerFactory} method should always be the smae object * The ILoggerFactory instance returned by the {@link #getLoggerFactory}
* method should always be the smae object
*/ */
private ILoggerFactory loggerFactory; private ILoggerFactory loggerFactory;

View File

@@ -1,2 +1,2 @@
# the path to the data directory # the path to the data directory
data.directory=data data.directory=data/3.0

View File

@@ -2,7 +2,7 @@ Configuration
==================== ====================
The dependency-check-purge task deletes the local copy of the NVD. This task The dependency-check-purge task deletes the local copy of the NVD. This task
should rarely be used, if ever. This is included as a convenience method in should rarely be used, if ever. This is included as a convenience method in
the rare circumstance that the local H2 database because corrupt. the rare circumstance that the local H2 database becomes corrupt.
```xml ```xml
<target name="dependency-check-purge" description="Dependency-Check purge"> <target name="dependency-check-purge" description="Dependency-Check purge">
@@ -14,6 +14,7 @@ Configuration: dependency-check-purge Task
-------------------- --------------------
The following properties can be set on the dependency-check-purge task. The following properties can be set on the dependency-check-purge task.
Property | Description | Default Value Property | Description | Default Value
----------------------|----------------------------------------------------------------|------------------ ----------------------|------------------------------------------------------------------------|------------------
dataDirectory | Data directory that is used to store the local copy of the NVD | data dataDirectory | Data directory that is used to store the local copy of the NVD | data
failOnError | Whether the build should fail if there is an error executing the purge | true

View File

@@ -3,7 +3,7 @@ Configuration
The dependency-check-update task downloads and updates the local copy of the NVD. The dependency-check-update task downloads and updates the local copy of the NVD.
There are several reasons that one may want to use this task; primarily, creating There are several reasons that one may want to use this task; primarily, creating
an update that will be run only once a day or once every few days (but not greater an update that will be run only once a day or once every few days (but not greater
then 7 days) and then use the `autoUpdate="false"` setting on individual than 7 days) and then use the `autoUpdate="false"` setting on individual
dependency-check scans. See [Internet Access Required](https://jeremylong.github.io/DependencyCheck/data/index.html) dependency-check scans. See [Internet Access Required](https://jeremylong.github.io/DependencyCheck/data/index.html)
for more information on why this task would be used. for more information on why this task would be used.
@@ -24,6 +24,7 @@ proxyPort | The Proxy Port. | &nbsp;
proxyUsername | Defines the proxy user name. | &nbsp; proxyUsername | Defines the proxy user name. | &nbsp;
proxyPassword | Defines the proxy password. | &nbsp; proxyPassword | Defines the proxy password. | &nbsp;
connectionTimeout | The URL Connection Timeout. | &nbsp; connectionTimeout | The URL Connection Timeout. | &nbsp;
failOnError | Whether the build should fail if there is an error executing the update | true
Advanced Configuration Advanced Configuration
==================== ====================

View File

@@ -27,22 +27,25 @@ the project's dependencies.
Configuration: dependency-check Task Configuration: dependency-check Task
-------------------- --------------------
The following properties can be set on the dependency-check-update task. The following properties can be set on the dependency-check task.
Property | Description | Default Value Property | Description | Default Value
----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------- ----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------
autoUpdate | Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. | true autoUpdate | Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. | true
cveValidForHours | Sets the number of hours to wait before checking for new updates from the NVD | 4 cveValidForHours | Sets the number of hours to wait before checking for new updates from the NVD | 4
failBuildOnCVSS | Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which means since the CVSS scores are 0-10, by default the build will never fail. | 11 failBuildOnCVSS | Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which means since the CVSS scores are 0-10, by default the build will never fail. | 11
failOnError | Whether the build should fail if there is an error executing the dependency-check analysis | true
projectName | The name of the project being scanned. | Dependency-Check projectName | The name of the project being scanned. | Dependency-Check
reportFormat | The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML reportFormat | The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML
reportOutputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target' reportOutputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target'
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html) | &nbsp; suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html) | &nbsp;
proxyServer | The Proxy Server. | &nbsp; hintsFile | The file path to the XML hints file \- used to resolve [false negatives](../general/hints.html) | &nbsp;
proxyServer | The Proxy Server; see the [proxy configuration](../data/proxy.html) page for more information. | &nbsp;
proxyPort | The Proxy Port. | &nbsp; proxyPort | The Proxy Port. | &nbsp;
proxyUsername | Defines the proxy user name. | &nbsp; proxyUsername | Defines the proxy user name. | &nbsp;
proxyPassword | Defines the proxy password. | &nbsp; proxyPassword | Defines the proxy password. | &nbsp;
connectionTimeout | The URL Connection Timeout. | &nbsp; connectionTimeout | The URL Connection Timeout. | &nbsp;
enableExperimental | Enable the [experimental analyzers](../analyzers/index.html). If not enabled the experimental analyzers (see below) will not be loaded or used. | false
Analyzer Configuration Analyzer Configuration
==================== ====================
@@ -52,26 +55,30 @@ Note, that specific analyzers will automatically disable themselves if no file
types that they support are detected - so specifically disabling them may not types that they support are detected - so specifically disabling them may not
be needed. be needed.
Property | Description | Default Value Property | Description | Default Value
------------------------------|---------------------------------------------------------------------------|------------------ ------------------------------|-----------------------------------------------------------------------------------|------------------
archiveAnalyzerEnabled | Sets whether the Archive Analyzer will be used. | true archiveAnalyzerEnabled | Sets whether the Archive Analyzer will be used. | true
zipExtensions | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. | &nbsp; zipExtensions | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. | &nbsp;
jarAnalyzer | Sets whether the Jar Analyzer will be used. | true jarAnalyzer | Sets whether the Jar Analyzer will be used. | true
centralAnalyzerEnabled | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer (see below). | true centralAnalyzerEnabled | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer (see below). | true
nexusAnalyzerEnabled | Sets whether Nexus Analyzer will be used. This analyzer is superceded by the Central Analyzer; however, you can configure this to run against a Nexus Pro installation. | true nexusAnalyzerEnabled | Sets whether Nexus Analyzer will be used. This analyzer is superceded by the Central Analyzer; however, you can configure this to run against a Nexus Pro installation. | true
nexusUrl | Defines the Nexus web service endpoint (example http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. | &nbsp; nexusUrl | Defines the Nexus web service endpoint (example http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. | &nbsp;
nexusUsesProxy | Whether or not the defined proxy should be used when connecting to Nexus. | true nexusUsesProxy | Whether or not the defined proxy should be used when connecting to Nexus. | true
pyDistributionAnalyzerEnabled | Sets whether the Python Distribution Analyzer will be used. | true pyDistributionAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Python Distribution Analyzer will be used. | true
pyPackageAnalyzerEnabled | Sets whether the Python Package Analyzer will be used. | true pyPackageAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Python Package Analyzer will be used. | true
rubygemsAnalyzerEnabled | Sets whether the Ruby Gemspec Analyzer will be used. | true rubygemsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Ruby Gemspec Analyzer will be used. | true
opensslAnalyzerEnabled | Sets whether or not the openssl Analyzer should be used. | true opensslAnalyzerEnabled | Sets whether the openssl Analyzer should be used. | true
cmakeAnalyzerEnabled | Sets whether or not the CMake Analyzer should be used. | true cmakeAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) CMake Analyzer should be used. | true
autoconfAnalyzerEnabled | Sets whether or not the autoconf Analyzer should be used. | true autoconfAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) autoconf Analyzer should be used. | true
composerAnalyzerEnabled | Sets whether or not the PHP Composer Lock File Analyzer should be used. | true composerAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer should be used. | true
nodeAnalyzerEnabled | Sets whether or not the Node.js Analyzer should be used. | true nodeAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Node.js Analyzer should be used. | true
nuspecAnalyzerEnabled | Sets whether or not the .NET Nuget Nuspec Analyzer will be used. | true nuspecAnalyzerEnabled | Sets whether the .NET Nuget Nuspec Analyzer will be used. | true
assemblyAnalyzerEnabled | Sets whether or not the .NET Assembly Analyzer should be used. | true cocoapodsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer should be used. | true
pathToMono | The path to Mono for .NET assembly analysis on non-windows systems. | &nbsp; bundleAuditAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Bundle Audit Analyzer should be used. | true
bundleAuditPath | Sets the path to the bundle audit executable; only used if bundle audit analyzer is enabled and experimental analyzers are enabled. | &nbsp;
swiftPackageManagerAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Switft Package Analyzer should be used. | true
assemblyAnalyzerEnabled | Sets whether the .NET Assembly Analyzer should be used. | true
pathToMono | The path to Mono for .NET assembly analysis on non-windows systems. | &nbsp;
Advanced Configuration Advanced Configuration
==================== ====================

View File

@@ -16,8 +16,11 @@ Installation
<property name="dependency-check.home" value="C:/tools/dependency-check-ant"/> <property name="dependency-check.home" value="C:/tools/dependency-check-ant"/>
<path id="dependency-check.path"> <path id="dependency-check.path">
<pathelement location="${dependency-check.home}/dependency-check-ant.jar"/> <pathelement location="${dependency-check.home}/dependency-check-ant.jar"/>
<fileset dir="${dependency-check.home}/lib">
<include name="*.jar"/>
</fileset>
</path> </path>
<taskdef resource="dependency-check-taskdefs.properties"> <taskdef resource="dependency-check-taskdefs.properties">
<classpath refid="dependency-check.path" /> <classpath refid="dependency-check.path" />
</taskdef> </taskdef>
``` ```

View File

@@ -27,7 +27,7 @@ Copyright (c) 2013 Jeremy Long. All Rights Reserved.
<item name="dependency-check" href="../index.html"/> <item name="dependency-check" href="../index.html"/>
</breadcrumbs> </breadcrumbs>
<menu name="Getting Started"> <menu name="Getting Started">
<item name="Installation" href="installation.html"/> <item name="Installation" href="index.html"/>
<item name="Configuration" href="configuration.html"/> <item name="Configuration" href="configuration.html"/>
</menu> </menu>
<menu ref="reports" /> <menu ref="reports" />

View File

@@ -31,7 +31,6 @@ import org.owasp.dependencycheck.utils.Settings;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
/** /**
* *
* @author Jeremy Long * @author Jeremy Long
@@ -65,15 +64,11 @@ public class DependencyCheckTaskTest {
@Test @Test
public void testAddFileSet() throws Exception { public void testAddFileSet() throws Exception {
File report = new File("target/dependency-check-report.html"); File report = new File("target/dependency-check-report.html");
if (report.exists()) { if (report.exists() && !report.delete()) {
if (!report.delete()) { throw new Exception("Unable to delete 'target/DependencyCheck-Report.html' prior to test.");
throw new Exception("Unable to delete 'target/DependencyCheck-Report.html' prior to test.");
}
} }
buildFileRule.executeTarget("test.fileset"); buildFileRule.executeTarget("test.fileset");
assertTrue("DependencyCheck report was not generated", report.exists()); assertTrue("DependencyCheck report was not generated", report.exists());
} }
/** /**

View File

@@ -61,11 +61,14 @@
<target name="failCVSS"> <target name="failCVSS">
<dependency-check <dependency-check
applicationName="test formatBAD" applicationName="test failCVSS"
reportOutputDirectory="${project.build.directory}" reportOutputDirectory="${project.build.directory}"
reportFormat="XML" reportFormat="XML"
autoupdate="false" autoupdate="false"
failBuildOnCVSS="8"> failBuildOnCVSS="3">
<fileset dir="${project.build.directory}/test-classes/jars">
<include name="axis-1.4.jar"/>
</fileset>
</dependency-check> </dependency-check>
</target> </target>
</project> </project>

View File

@@ -5,7 +5,7 @@ performed are a "best effort" and as such, there could be false positives as wel
vulnerabilities in 3rd party components is a well-known problem and is currently documented in the 2013 OWASP vulnerabilities in 3rd party components is a well-known problem and is currently documented in the 2013 OWASP
Top 10 as [A9 - Using Components with Known Vulnerabilities](https://www.owasp.org/index.php/Top_10_2013-A9-Using_Components_with_Known_Vulnerabilities). Top 10 as [A9 - Using Components with Known Vulnerabilities](https://www.owasp.org/index.php/Top_10_2013-A9-Using_Components_with_Known_Vulnerabilities).
Documentation and links to production binary releases can be found on the [github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-cli/installation.html). Documentation and links to production binary releases can be found on the [github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-cli/index.html).
Mailing List Mailing List
------------ ------------

View File

@@ -20,7 +20,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<parent> <parent>
<groupId>org.owasp</groupId> <groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId> <artifactId>dependency-check-parent</artifactId>
<version>1.3.2</version> <version>1.4.6-SNAPSHOT</version>
</parent> </parent>
<artifactId>dependency-check-cli</artifactId> <artifactId>dependency-check-cli</artifactId>
@@ -110,6 +110,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId> <artifactId>maven-surefire-plugin</artifactId>
<configuration> <configuration>
<argLine>-Dfile.encoding=UTF-8</argLine>
<systemProperties> <systemProperties>
<property> <property>
<name>cpe</name> <name>cpe</name>
@@ -139,6 +140,8 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<binFileExtensions> <binFileExtensions>
<unix>.sh</unix> <unix>.sh</unix>
</binFileExtensions> </binFileExtensions>
<configurationDirectory>plugins/*</configurationDirectory>
<includeConfigurationDirectoryInClasspath>true</includeConfigurationDirectoryInClasspath>
</configuration> </configuration>
<executions> <executions>
<execution> <execution>
@@ -193,7 +196,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<version>${reporting.pmd-plugin.version}</version> <version>${reporting.pmd-plugin.version}</version>
<configuration> <configuration>
<targetJdk>1.6</targetJdk> <targetJdk>1.6</targetJdk>
<linkXref>true</linkXref> <linkXRef>true</linkXRef>
<sourceEncoding>utf-8</sourceEncoding> <sourceEncoding>utf-8</sourceEncoding>
<excludes> <excludes>
<exclude>**/generated/*.java</exclude> <exclude>**/generated/*.java</exclude>

View File

@@ -29,6 +29,13 @@
<outputDirectory>dependency-check/repo</outputDirectory> <outputDirectory>dependency-check/repo</outputDirectory>
<directory>${project.build.directory}/release/repo</directory> <directory>${project.build.directory}/release/repo</directory>
</fileSet> </fileSet>
<fileSet>
<directory>.</directory>
<outputDirectory>dependency-check/plugins</outputDirectory>
<excludes>
<exclude>*/**</exclude>
</excludes>
</fileSet>
<fileSet> <fileSet>
<outputDirectory>dependency-check</outputDirectory> <outputDirectory>dependency-check</outputDirectory>
<includes> <includes>
@@ -53,21 +60,4 @@
</includes> </includes>
</fileSet> </fileSet>
</fileSets> </fileSets>
<!--
<fileSets>
<fileSet>
<outputDirectory>/</outputDirectory>
<directory>${project.build.directory}</directory>
<includes>
<include>dependency-check*.jar</include>
</includes>
</fileSet>
</fileSets>
<dependencySets>
<dependencySet>
<outputDirectory>/lib</outputDirectory>
<scope>runtime</scope>
</dependencySet>
</dependencySets>
-->
</assembly> </assembly>

View File

@@ -19,6 +19,7 @@ package org.owasp.dependencycheck;
import ch.qos.logback.classic.LoggerContext; import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.encoder.PatternLayoutEncoder; import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
import ch.qos.logback.classic.spi.ILoggingEvent;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
@@ -27,17 +28,21 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.lang.StringUtils;
import org.owasp.dependencycheck.data.nvdcve.CveDB; import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException; import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties; import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.apache.tools.ant.DirectoryScanner; import org.apache.tools.ant.DirectoryScanner;
import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.reporting.ReportGenerator; import org.owasp.dependencycheck.reporting.ReportGenerator;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import ch.qos.logback.core.FileAppender; import ch.qos.logback.core.FileAppender;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.exception.ExceptionCollection;
import org.owasp.dependencycheck.exception.ReportException;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.slf4j.impl.StaticLoggerBinder; import org.slf4j.impl.StaticLoggerBinder;
/** /**
@@ -58,21 +63,26 @@ public class App {
* @param args the command line arguments * @param args the command line arguments
*/ */
public static void main(String[] args) { public static void main(String[] args) {
int exitCode = 0;
try { try {
Settings.initialize(); Settings.initialize();
final App app = new App(); final App app = new App();
app.run(args); exitCode = app.run(args);
LOGGER.debug("Exit code: " + exitCode);
} finally { } finally {
Settings.cleanup(true); Settings.cleanup(true);
} }
System.exit(exitCode);
} }
/** /**
* Main CLI entry-point into the application. * Main CLI entry-point into the application.
* *
* @param args the command line arguments * @param args the command line arguments
* @return the exit code to return
*/ */
public void run(String[] args) { public int run(String[] args) {
int exitCode = 0;
final CliParser cli = new CliParser(); final CliParser cli = new CliParser();
try { try {
@@ -80,11 +90,11 @@ public class App {
} catch (FileNotFoundException ex) { } catch (FileNotFoundException ex) {
System.err.println(ex.getMessage()); System.err.println(ex.getMessage());
cli.printHelp(); cli.printHelp();
return; return -1;
} catch (ParseException ex) { } catch (ParseException ex) {
System.err.println(ex.getMessage()); System.err.println(ex.getMessage());
cli.printHelp(); cli.printHelp();
return; return -2;
} }
if (cli.getVerboseLog() != null) { if (cli.getVerboseLog() != null) {
@@ -94,8 +104,15 @@ public class App {
if (cli.isPurge()) { if (cli.isPurge()) {
if (cli.getConnectionString() != null) { if (cli.getConnectionString() != null) {
LOGGER.error("Unable to purge the database when using a non-default connection string"); LOGGER.error("Unable to purge the database when using a non-default connection string");
exitCode = -3;
} else { } else {
populateSettings(cli); try {
populateSettings(cli);
} catch (InvalidSettingException ex) {
LOGGER.error(ex.getMessage());
LOGGER.debug("Error loading properties file", ex);
exitCode = -4;
}
File db; File db;
try { try {
db = new File(Settings.getDataDirectory(), "dc.h2.db"); db = new File(Settings.getDataDirectory(), "dc.h2.db");
@@ -104,56 +121,115 @@ public class App {
LOGGER.info("Database file purged; local copy of the NVD has been removed"); LOGGER.info("Database file purged; local copy of the NVD has been removed");
} else { } else {
LOGGER.error("Unable to delete '{}'; please delete the file manually", db.getAbsolutePath()); LOGGER.error("Unable to delete '{}'; please delete the file manually", db.getAbsolutePath());
exitCode = -5;
} }
} else { } else {
LOGGER.error("Unable to purge database; the database file does not exists: {}", db.getAbsolutePath()); LOGGER.error("Unable to purge database; the database file does not exists: {}", db.getAbsolutePath());
exitCode = -6;
} }
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.error("Unable to delete the database"); LOGGER.error("Unable to delete the database");
exitCode = -7;
} }
} }
} else if (cli.isGetVersion()) { } else if (cli.isGetVersion()) {
cli.printVersionInfo(); cli.printVersionInfo();
} else if (cli.isUpdateOnly()) { } else if (cli.isUpdateOnly()) {
populateSettings(cli);
runUpdateOnly();
} else if (cli.isRunScan()) {
populateSettings(cli);
try { try {
runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getProjectName(), cli.getScanFiles(), populateSettings(cli);
cli.getExcludeList(), cli.getSymLinkDepth()); } catch (InvalidSettingException ex) {
LOGGER.error(ex.getMessage());
LOGGER.debug("Error loading properties file", ex);
exitCode = -4;
}
try {
runUpdateOnly();
} catch (UpdateException ex) {
LOGGER.error(ex.getMessage());
exitCode = -8;
} catch (DatabaseException ex) {
LOGGER.error(ex.getMessage());
exitCode = -9;
}
} else if (cli.isRunScan()) {
try {
populateSettings(cli);
} catch (InvalidSettingException ex) {
LOGGER.error(ex.getMessage());
LOGGER.debug("Error loading properties file", ex);
exitCode = -4;
}
try {
final String[] scanFiles = cli.getScanFiles();
if (scanFiles != null) {
exitCode = runScan(cli.getReportDirectory(), cli.getReportFormat(), cli.getProjectName(), scanFiles,
cli.getExcludeList(), cli.getSymLinkDepth(), cli.getFailOnCVSS());
} else {
LOGGER.error("No scan files configured");
}
} catch (InvalidScanPathException ex) { } catch (InvalidScanPathException ex) {
LOGGER.error("An invalid scan path was detected; unable to scan '//*' paths"); LOGGER.error("An invalid scan path was detected; unable to scan '//*' paths");
exitCode = -10;
} catch (DatabaseException ex) {
LOGGER.error(ex.getMessage());
exitCode = -11;
} catch (ReportException ex) {
LOGGER.error(ex.getMessage());
exitCode = -12;
} catch (ExceptionCollection ex) {
if (ex.isFatal()) {
exitCode = -13;
LOGGER.error("One or more fatal errors occurred");
} else {
exitCode = -14;
}
for (Throwable e : ex.getExceptions()) {
LOGGER.error(e.getMessage());
}
} }
} else { } else {
cli.printHelp(); cli.printHelp();
} }
return exitCode;
} }
/** /**
* Scans the specified directories and writes the dependency reports to the reportDirectory. * Scans the specified directories and writes the dependency reports to the
* reportDirectory.
* *
* @param reportDirectory the path to the directory where the reports will be written * @param reportDirectory the path to the directory where the reports will
* be written
* @param outputFormat the output format of the report * @param outputFormat the output format of the report
* @param applicationName the application name for the report * @param applicationName the application name for the report
* @param files the files/directories to scan * @param files the files/directories to scan
* @param excludes the patterns for files/directories to exclude * @param excludes the patterns for files/directories to exclude
* @param symLinkDepth the depth that symbolic links will be followed * @param symLinkDepth the depth that symbolic links will be followed
* @param cvssFailScore the score to fail on if a vulnerability is found
* @return the exit code if there was an error
* *
* @throws InvalidScanPathException thrown if the path to scan starts with "//" * @throws InvalidScanPathException thrown if the path to scan starts with
* "//"
* @throws ReportException thrown when the report cannot be generated
* @throws DatabaseException thrown when there is an error connecting to the
* database
* @throws ExceptionCollection thrown when an exception occurs during
* analysis; there may be multiple exceptions contained within the
* collection.
*/ */
private void runScan(String reportDirectory, String outputFormat, String applicationName, String[] files, private int runScan(String reportDirectory, String outputFormat, String applicationName, String[] files,
String[] excludes, int symLinkDepth) throws InvalidScanPathException { String[] excludes, int symLinkDepth, int cvssFailScore) throws InvalidScanPathException, DatabaseException,
ExceptionCollection, ReportException {
Engine engine = null; Engine engine = null;
int retCode = 0;
try { try {
engine = new Engine(); engine = new Engine();
final List<String> antStylePaths = new ArrayList<String>(); final List<String> antStylePaths = new ArrayList<>();
for (String file : files) { for (String file : files) {
final String antPath = ensureCanonicalPath(file); final String antPath = ensureCanonicalPath(file);
antStylePaths.add(antPath); antStylePaths.add(antPath);
} }
final Set<File> paths = new HashSet<File>(); final Set<File> paths = new HashSet<>();
for (String file : antStylePaths) { for (String file : antStylePaths) {
LOGGER.debug("Scanning {}", file); LOGGER.debug("Scanning {}", file);
final DirectoryScanner scanner = new DirectoryScanner(); final DirectoryScanner scanner = new DirectoryScanner();
@@ -175,8 +251,6 @@ public class App {
include = "**/*"; include = "**/*";
} }
} }
//LOGGER.debug("baseDir: {}", baseDir);
//LOGGER.debug("include: {}", include);
scanner.setBasedir(baseDir); scanner.setBasedir(baseDir);
final String[] includes = {include}; final String[] includes = {include};
scanner.setIncludes(includes); scanner.setIncludes(includes);
@@ -198,34 +272,52 @@ public class App {
} }
engine.scan(paths); engine.scan(paths);
engine.analyzeDependencies(); ExceptionCollection exCol = null;
try {
engine.analyzeDependencies();
} catch (ExceptionCollection ex) {
if (ex.isFatal()) {
throw ex;
}
exCol = ex;
}
final List<Dependency> dependencies = engine.getDependencies(); final List<Dependency> dependencies = engine.getDependencies();
DatabaseProperties prop = null; DatabaseProperties prop = null;
CveDB cve = null; try (CveDB cve = CveDB.getInstance()) {
try {
cve = new CveDB();
cve.open();
prop = cve.getDatabaseProperties(); prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) { } catch (DatabaseException ex) {
//TODO shouldn't this be a fatal exception
LOGGER.debug("Unable to retrieve DB Properties", ex); LOGGER.debug("Unable to retrieve DB Properties", ex);
} finally {
if (cve != null) {
cve.close();
}
} }
final ReportGenerator report = new ReportGenerator(applicationName, dependencies, engine.getAnalyzers(), prop); final ReportGenerator report = new ReportGenerator(applicationName, dependencies, engine.getAnalyzers(), prop);
try { try {
report.generateReports(reportDirectory, outputFormat); report.generateReports(reportDirectory, outputFormat);
} catch (IOException ex) { } catch (ReportException ex) {
LOGGER.error("There was an IO error while attempting to generate the report."); if (exCol != null) {
LOGGER.debug("", ex); exCol.addException(ex);
} catch (Throwable ex) { throw exCol;
LOGGER.error("There was an error while attempting to generate the report."); } else {
LOGGER.debug("", ex); throw ex;
}
} }
} catch (DatabaseException ex) { if (exCol != null && exCol.getExceptions().size() > 0) {
LOGGER.error("Unable to connect to the dependency-check database; analysis has stopped"); throw exCol;
LOGGER.debug("", ex); }
//Set the exit code based on whether we found a high enough vulnerability
for (Dependency dep : dependencies) {
if (!dep.getVulnerabilities().isEmpty()) {
for (Vulnerability vuln : dep.getVulnerabilities()) {
LOGGER.debug("VULNERABILITY FOUND " + dep.getDisplayFileName());
if (vuln.getCvssScore() > cvssFailScore) {
retCode = 1;
}
}
}
}
return retCode;
} finally { } finally {
if (engine != null) { if (engine != null) {
engine.cleanup(); engine.cleanup();
@@ -235,15 +327,16 @@ public class App {
/** /**
* Only executes the update phase of dependency-check. * Only executes the update phase of dependency-check.
*
* @throws UpdateException thrown if there is an error updating
* @throws DatabaseException thrown if a fatal error occurred and a
* connection to the database could not be established
*/ */
private void runUpdateOnly() { private void runUpdateOnly() throws UpdateException, DatabaseException {
Engine engine = null; Engine engine = null;
try { try {
engine = new Engine(); engine = new Engine();
engine.doUpdates(); engine.doUpdates();
} catch (DatabaseException ex) {
LOGGER.error("Unable to connect to the dependency-check database; analysis has stopped");
LOGGER.debug("", ex);
} finally { } finally {
if (engine != null) { if (engine != null) {
engine.cleanup(); engine.cleanup();
@@ -254,11 +347,13 @@ public class App {
/** /**
* Updates the global Settings. * Updates the global Settings.
* *
* @param cli a reference to the CLI Parser that contains the command line arguments used to set the corresponding settings in * @param cli a reference to the CLI Parser that contains the command line
* the core engine. * arguments used to set the corresponding settings in the core engine.
*
* @throws InvalidSettingException thrown when a user defined properties
* file is unable to be loaded.
*/ */
private void populateSettings(CliParser cli) { private void populateSettings(CliParser cli) throws InvalidSettingException {
final boolean autoUpdate = cli.isAutoUpdate(); final boolean autoUpdate = cli.isAutoUpdate();
final String connectionTimeout = cli.getConnectionTimeout(); final String connectionTimeout = cli.getConnectionTimeout();
final String proxyServer = cli.getProxyServer(); final String proxyServer = cli.getProxyServer();
@@ -268,6 +363,7 @@ public class App {
final String dataDirectory = cli.getDataDirectory(); final String dataDirectory = cli.getDataDirectory();
final File propertiesFile = cli.getPropertiesFile(); final File propertiesFile = cli.getPropertiesFile();
final String suppressionFile = cli.getSuppressionFile(); final String suppressionFile = cli.getSuppressionFile();
final String hintsFile = cli.getHintsFile();
final String nexusUrl = cli.getNexusUrl(); final String nexusUrl = cli.getNexusUrl();
final String databaseDriverName = cli.getDatabaseDriverName(); final String databaseDriverName = cli.getDatabaseDriverName();
final String databaseDriverPath = cli.getDatabaseDriverPath(); final String databaseDriverPath = cli.getDatabaseDriverPath();
@@ -281,16 +377,15 @@ public class App {
final String cveBase12 = cli.getBaseCve12Url(); final String cveBase12 = cli.getBaseCve12Url();
final String cveBase20 = cli.getBaseCve20Url(); final String cveBase20 = cli.getBaseCve20Url();
final Integer cveValidForHours = cli.getCveValidForHours(); final Integer cveValidForHours = cli.getCveValidForHours();
final boolean experimentalEnabled = cli.isExperimentalEnabled();
if (propertiesFile != null) { if (propertiesFile != null) {
try { try {
Settings.mergeProperties(propertiesFile); Settings.mergeProperties(propertiesFile);
} catch (FileNotFoundException ex) { } catch (FileNotFoundException ex) {
LOGGER.error("Unable to load properties file '{}'", propertiesFile.getPath()); throw new InvalidSettingException("Unable to find properties file '" + propertiesFile.getPath() + "'", ex);
LOGGER.debug("", ex);
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.error("Unable to find properties file '{}'", propertiesFile.getPath()); throw new InvalidSettingException("Error reading properties file '" + propertiesFile.getPath() + "'", ex);
LOGGER.debug("", ex);
} }
} }
// We have to wait until we've merged the properties before attempting to set whether we use // We have to wait until we've merged the properties before attempting to set whether we use
@@ -316,9 +411,11 @@ public class App {
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPass); Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPass);
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout); Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile); Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
Settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours); Settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
//File Type Analyzer Settings //File Type Analyzer Settings
Settings.setBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, experimentalEnabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !cli.isJarDisabled()); Settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !cli.isJarDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !cli.isArchiveDisabled()); Settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !cli.isArchiveDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !cli.isPythonDistributionDisabled()); Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !cli.isPythonDistributionDisabled());
@@ -331,6 +428,8 @@ public class App {
Settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled()); Settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isComposerDisabled()); Settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isComposerDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isNodeJsDisabled()); Settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isNodeJsDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, !cli.isSwiftPackageAnalyzerDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, !cli.isCocoapodsAnalyzerDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isRubyGemspecDisabled()); Settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isRubyGemspecDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isCentralDisabled()); Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isCentralDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !cli.isNexusDisabled()); Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !cli.isNexusDisabled());
@@ -366,7 +465,7 @@ public class App {
encoder.setPattern("%d %C:%L%n%-5level - %msg%n"); encoder.setPattern("%d %C:%L%n%-5level - %msg%n");
encoder.setContext(context); encoder.setContext(context);
encoder.start(); encoder.start();
final FileAppender fa = new FileAppender(); final FileAppender<ILoggingEvent> fa = new FileAppender<>();
fa.setAppend(true); fa.setAppend(true);
fa.setEncoder(encoder); fa.setEncoder(encoder);
fa.setContext(context); fa.setContext(context);
@@ -384,15 +483,16 @@ public class App {
} }
/** /**
* Takes a path and resolves it to be a canonical &amp; absolute path. The caveats are that this method will take an Ant style * Takes a path and resolves it to be a canonical &amp; absolute path. The
* file selector path (../someDir/**\/*.jar) and convert it to an absolute/canonical path (at least to the left of the first * * caveats are that this method will take an Ant style file selector path
* or ?). * (../someDir/**\/*.jar) and convert it to an absolute/canonical path (at
* least to the left of the first * or ?).
* *
* @param path the path to canonicalize * @param path the path to canonicalize
* @return the canonical path * @return the canonical path
*/ */
protected String ensureCanonicalPath(String path) { protected String ensureCanonicalPath(String path) {
String basePath = null; String basePath;
String wildCards = null; String wildCards = null;
final String file = path.replace('\\', '/'); final String file = path.replace('\\', '/');
if (file.contains("*") || file.contains("?")) { if (file.contains("*") || file.contains("?")) {

View File

@@ -58,7 +58,8 @@ public final class CliParser {
* Parses the arguments passed in and captures the results for later use. * Parses the arguments passed in and captures the results for later use.
* *
* @param args the command line arguments * @param args the command line arguments
* @throws FileNotFoundException is thrown when a 'file' argument does not point to a file that exists. * @throws FileNotFoundException is thrown when a 'file' argument does not
* point to a file that exists.
* @throws ParseException is thrown when a Parse Exception occurs. * @throws ParseException is thrown when a Parse Exception occurs.
*/ */
public void parse(String[] args) throws FileNotFoundException, ParseException { public void parse(String[] args) throws FileNotFoundException, ParseException {
@@ -85,9 +86,10 @@ public final class CliParser {
/** /**
* Validates that the command line arguments are valid. * Validates that the command line arguments are valid.
* *
* @throws FileNotFoundException if there is a file specified by either the SCAN or CPE command line arguments that does not * @throws FileNotFoundException if there is a file specified by either the
* exist. * SCAN or CPE command line arguments that does not exist.
* @throws ParseException is thrown if there is an exception parsing the command line. * @throws ParseException is thrown if there is an exception parsing the
* command line.
*/ */
private void validateArgs() throws FileNotFoundException, ParseException { private void validateArgs() throws FileNotFoundException, ParseException {
if (isUpdateOnly() || isRunScan()) { if (isUpdateOnly() || isRunScan()) {
@@ -141,12 +143,14 @@ public final class CliParser {
} }
/** /**
* Validates whether or not the path(s) points at a file that exists; if the path(s) does not point to an existing file a * Validates whether or not the path(s) points at a file that exists; if the
* FileNotFoundException is thrown. * path(s) does not point to an existing file a FileNotFoundException is
* thrown.
* *
* @param paths the paths to validate if they exists * @param paths the paths to validate if they exists
* @param optType the option being validated (e.g. scan, out, etc.) * @param optType the option being validated (e.g. scan, out, etc.)
* @throws FileNotFoundException is thrown if one of the paths being validated does not exist. * @throws FileNotFoundException is thrown if one of the paths being
* validated does not exist.
*/ */
private void validatePathExists(String[] paths, String optType) throws FileNotFoundException { private void validatePathExists(String[] paths, String optType) throws FileNotFoundException {
for (String path : paths) { for (String path : paths) {
@@ -155,12 +159,14 @@ public final class CliParser {
} }
/** /**
* Validates whether or not the path points at a file that exists; if the path does not point to an existing file a * Validates whether or not the path points at a file that exists; if the
* FileNotFoundException is thrown. * path does not point to an existing file a FileNotFoundException is
* thrown.
* *
* @param path the paths to validate if they exists * @param path the paths to validate if they exists
* @param argumentName the argument being validated (e.g. scan, out, etc.) * @param argumentName the argument being validated (e.g. scan, out, etc.)
* @throws FileNotFoundException is thrown if the path being validated does not exist. * @throws FileNotFoundException is thrown if the path being validated does
* not exist.
*/ */
private void validatePathExists(String path, String argumentName) throws FileNotFoundException { private void validatePathExists(String path, String argumentName) throws FileNotFoundException {
if (path == null) { if (path == null) {
@@ -181,22 +187,25 @@ public final class CliParser {
throw new FileNotFoundException(msg); throw new FileNotFoundException(msg);
} }
} }
} else { } else if (!f.exists()) {
if (!f.exists()) { isValid = false;
isValid = false; final String msg = String.format("Invalid '%s' argument: '%s'", argumentName, path);
final String msg = String.format("Invalid '%s' argument: '%s'", argumentName, path); throw new FileNotFoundException(msg);
throw new FileNotFoundException(msg);
}
} }
} else if (path.startsWith("//") || path.startsWith("\\\\")) { } else if (path.startsWith("//") || path.startsWith("\\\\")) {
isValid = false; isValid = false;
final String msg = String.format("Invalid '%s' argument: '%s'%nUnable to scan paths that start with '//'.", argumentName, path); final String msg = String.format("Invalid '%s' argument: '%s'%nUnable to scan paths that start with '//'.", argumentName, path);
throw new FileNotFoundException(msg); throw new FileNotFoundException(msg);
} else if ((path.endsWith("/*") && !path.endsWith("**/*")) || (path.endsWith("\\*") && path.endsWith("**\\*"))) {
final String msg = String.format("Possibly incorrect path '%s' from argument '%s' because it ends with a slash star; "
+ "dependency-check uses ant-style paths", path, argumentName);
LOGGER.warn(msg);
} }
} }
/** /**
* Generates an Options collection that is used to parse the command line and to display the help message. * Generates an Options collection that is used to parse the command line
* and to display the help message.
* *
* @return the command line options used for parsing the command line * @return the command line options used for parsing the command line
*/ */
@@ -240,7 +249,7 @@ public final class CliParser {
final Option excludes = Option.builder().argName("pattern").hasArg().longOpt(ARGUMENT.EXCLUDE) final Option excludes = Option.builder().argName("pattern").hasArg().longOpt(ARGUMENT.EXCLUDE)
.desc("Specify and exclusion pattern. This option can be specified multiple times" .desc("Specify and exclusion pattern. This option can be specified multiple times"
+ " and it accepts Ant style excludsions.") + " and it accepts Ant style exclusions.")
.build(); .build();
final Option props = Option.builder(ARGUMENT.PROP_SHORT).argName("file").hasArg().longOpt(ARGUMENT.PROP) final Option props = Option.builder(ARGUMENT.PROP_SHORT).argName("file").hasArg().longOpt(ARGUMENT.PROP)
@@ -268,10 +277,23 @@ public final class CliParser {
.desc("The file path to the suppression XML file.") .desc("The file path to the suppression XML file.")
.build(); .build();
final Option hintsFile = Option.builder().argName("file").hasArg().longOpt(ARGUMENT.HINTS_FILE)
.desc("The file path to the hints XML file.")
.build();
final Option cveValidForHours = Option.builder().argName("hours").hasArg().longOpt(ARGUMENT.CVE_VALID_FOR_HOURS) final Option cveValidForHours = Option.builder().argName("hours").hasArg().longOpt(ARGUMENT.CVE_VALID_FOR_HOURS)
.desc("The number of hours to wait before checking for new updates from the NVD.") .desc("The number of hours to wait before checking for new updates from the NVD.")
.build(); .build();
final Option experimentalEnabled = Option.builder().longOpt(ARGUMENT.EXPERIMENTAL)
.desc("Enables the experimental analyzers.")
.build();
final Option failOnCVSS = Option.builder().argName("score").hasArg().longOpt(ARGUMENT.FAIL_ON_CVSS)
.desc("Specifies if the build should be failed if a CVSS score above a specified level is identified. "
+ "The default is 11; since the CVSS scores are 0-10, by default the build will never fail.")
.build();
//This is an option group because it can be specified more then once. //This is an option group because it can be specified more then once.
final OptionGroup og = new OptionGroup(); final OptionGroup og = new OptionGroup();
og.addOption(path); og.addOption(path);
@@ -292,12 +314,16 @@ public final class CliParser {
.addOption(props) .addOption(props)
.addOption(verboseLog) .addOption(verboseLog)
.addOption(suppressionFile) .addOption(suppressionFile)
.addOption(cveValidForHours); .addOption(hintsFile)
.addOption(cveValidForHours)
.addOption(experimentalEnabled)
.addOption(failOnCVSS);
} }
/** /**
* Adds the advanced command line options to the given options collection. These are split out for purposes of being able to * Adds the advanced command line options to the given options collection.
* display two different help messages. * These are split out for purposes of being able to display two different
* help messages.
* *
* @param options a collection of command line arguments * @param options a collection of command line arguments
* @throws IllegalArgumentException thrown if there is an exception * @throws IllegalArgumentException thrown if there is an exception
@@ -344,7 +370,7 @@ public final class CliParser {
final Option pathToMono = Option.builder().argName("path").hasArg().longOpt(ARGUMENT.PATH_TO_MONO) final Option pathToMono = Option.builder().argName("path").hasArg().longOpt(ARGUMENT.PATH_TO_MONO)
.desc("The path to Mono for .NET Assembly analysis on non-windows systems.") .desc("The path to Mono for .NET Assembly analysis on non-windows systems.")
.build(); .build();
final Option pathToBundleAudit = Option.builder().argName("path").hasArg() final Option pathToBundleAudit = Option.builder().argName("path").hasArg()
.longOpt(ARGUMENT.PATH_TO_BUNDLE_AUDIT) .longOpt(ARGUMENT.PATH_TO_BUNDLE_AUDIT)
.desc("The path to bundle-audit for Gem bundle analysis.").build(); .desc("The path to bundle-audit for Gem bundle analysis.").build();
@@ -411,6 +437,11 @@ public final class CliParser {
final Option disableCmakeAnalyzer = Option.builder().longOpt(ARGUMENT.DISABLE_CMAKE) final Option disableCmakeAnalyzer = Option.builder().longOpt(ARGUMENT.DISABLE_CMAKE)
.desc("Disable the Cmake Analyzer.").build(); .desc("Disable the Cmake Analyzer.").build();
final Option cocoapodsAnalyzerEnabled = Option.builder().longOpt(ARGUMENT.DISABLE_COCOAPODS)
.desc("Disable the CocoaPods Analyzer.").build();
final Option swiftPackageManagerAnalyzerEnabled = Option.builder().longOpt(ARGUMENT.DISABLE_SWIFT)
.desc("Disable the swift package Analyzer.").build();
final Option disableCentralAnalyzer = Option.builder().longOpt(ARGUMENT.DISABLE_CENTRAL) final Option disableCentralAnalyzer = Option.builder().longOpt(ARGUMENT.DISABLE_CENTRAL)
.desc("Disable the Central Analyzer. If this analyzer is disabled it is likely you also want to disable " .desc("Disable the Central Analyzer. If this analyzer is disabled it is likely you also want to disable "
+ "the Nexus Analyzer.").build(); + "the Nexus Analyzer.").build();
@@ -455,6 +486,8 @@ public final class CliParser {
.addOption(disableNuspecAnalyzer) .addOption(disableNuspecAnalyzer)
.addOption(disableCentralAnalyzer) .addOption(disableCentralAnalyzer)
.addOption(disableNexusAnalyzer) .addOption(disableNexusAnalyzer)
.addOption(cocoapodsAnalyzerEnabled)
.addOption(swiftPackageManagerAnalyzerEnabled)
.addOption(Option.builder().longOpt(ARGUMENT.DISABLE_NODE_JS) .addOption(Option.builder().longOpt(ARGUMENT.DISABLE_NODE_JS)
.desc("Disable the Node.js Package Analyzer.").build()) .desc("Disable the Node.js Package Analyzer.").build())
.addOption(nexusUrl) .addOption(nexusUrl)
@@ -466,8 +499,10 @@ public final class CliParser {
} }
/** /**
* Adds the deprecated command line options to the given options collection. These are split out for purposes of not including * Adds the deprecated command line options to the given options collection.
* them in the help message. We need to add the deprecated options so as not to break existing scripts. * These are split out for purposes of not including them in the help
* message. We need to add the deprecated options so as not to break
* existing scripts.
* *
* @param options a collection of command line arguments * @param options a collection of command line arguments
* @throws IllegalArgumentException thrown if there is an exception * @throws IllegalArgumentException thrown if there is an exception
@@ -514,7 +549,8 @@ public final class CliParser {
} }
/** /**
* Returns the symbolic link depth (how deeply symbolic links will be followed). * Returns the symbolic link depth (how deeply symbolic links will be
* followed).
* *
* @return the symbolic link depth * @return the symbolic link depth
*/ */
@@ -534,7 +570,8 @@ public final class CliParser {
/** /**
* Returns true if the disableJar command line argument was specified. * Returns true if the disableJar command line argument was specified.
* *
* @return true if the disableJar command line argument was specified; otherwise false * @return true if the disableJar command line argument was specified;
* otherwise false
*/ */
public boolean isJarDisabled() { public boolean isJarDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_JAR); return (line != null) && line.hasOption(ARGUMENT.DISABLE_JAR);
@@ -543,7 +580,8 @@ public final class CliParser {
/** /**
* Returns true if the disableArchive command line argument was specified. * Returns true if the disableArchive command line argument was specified.
* *
* @return true if the disableArchive command line argument was specified; otherwise false * @return true if the disableArchive command line argument was specified;
* otherwise false
*/ */
public boolean isArchiveDisabled() { public boolean isArchiveDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_ARCHIVE); return (line != null) && line.hasOption(ARGUMENT.DISABLE_ARCHIVE);
@@ -552,7 +590,8 @@ public final class CliParser {
/** /**
* Returns true if the disableNuspec command line argument was specified. * Returns true if the disableNuspec command line argument was specified.
* *
* @return true if the disableNuspec command line argument was specified; otherwise false * @return true if the disableNuspec command line argument was specified;
* otherwise false
*/ */
public boolean isNuspecDisabled() { public boolean isNuspecDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_NUSPEC); return (line != null) && line.hasOption(ARGUMENT.DISABLE_NUSPEC);
@@ -561,26 +600,29 @@ public final class CliParser {
/** /**
* Returns true if the disableAssembly command line argument was specified. * Returns true if the disableAssembly command line argument was specified.
* *
* @return true if the disableAssembly command line argument was specified; otherwise false * @return true if the disableAssembly command line argument was specified;
* otherwise false
*/ */
public boolean isAssemblyDisabled() { public boolean isAssemblyDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_ASSEMBLY); return (line != null) && line.hasOption(ARGUMENT.DISABLE_ASSEMBLY);
} }
/** /**
* Returns true if the disableBundleAudit command line argument was specified. * Returns true if the disableBundleAudit command line argument was
* specified.
* *
* @return true if the disableBundleAudit command line argument was specified; otherwise false * @return true if the disableBundleAudit command line argument was
* specified; otherwise false
*/ */
public boolean isBundleAuditDisabled() { public boolean isBundleAuditDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_BUNDLE_AUDIT); return (line != null) && line.hasOption(ARGUMENT.DISABLE_BUNDLE_AUDIT);
} }
/** /**
* Returns true if the disablePyDist command line argument was specified. * Returns true if the disablePyDist command line argument was specified.
* *
* @return true if the disablePyDist command line argument was specified; otherwise false * @return true if the disablePyDist command line argument was specified;
* otherwise false
*/ */
public boolean isPythonDistributionDisabled() { public boolean isPythonDistributionDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_PY_DIST); return (line != null) && line.hasOption(ARGUMENT.DISABLE_PY_DIST);
@@ -589,7 +631,8 @@ public final class CliParser {
/** /**
* Returns true if the disablePyPkg command line argument was specified. * Returns true if the disablePyPkg command line argument was specified.
* *
* @return true if the disablePyPkg command line argument was specified; otherwise false * @return true if the disablePyPkg command line argument was specified;
* otherwise false
*/ */
public boolean isPythonPackageDisabled() { public boolean isPythonPackageDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_PY_PKG); return (line != null) && line.hasOption(ARGUMENT.DISABLE_PY_PKG);
@@ -598,7 +641,8 @@ public final class CliParser {
/** /**
* Returns whether the Ruby gemspec analyzer is disabled. * Returns whether the Ruby gemspec analyzer is disabled.
* *
* @return true if the {@link ARGUMENT#DISABLE_RUBYGEMS} command line argument was specified; otherwise false * @return true if the {@link ARGUMENT#DISABLE_RUBYGEMS} command line
* argument was specified; otherwise false
*/ */
public boolean isRubyGemspecDisabled() { public boolean isRubyGemspecDisabled() {
return (null != line) && line.hasOption(ARGUMENT.DISABLE_RUBYGEMS); return (null != line) && line.hasOption(ARGUMENT.DISABLE_RUBYGEMS);
@@ -607,7 +651,8 @@ public final class CliParser {
/** /**
* Returns true if the disableCmake command line argument was specified. * Returns true if the disableCmake command line argument was specified.
* *
* @return true if the disableCmake command line argument was specified; otherwise false * @return true if the disableCmake command line argument was specified;
* otherwise false
*/ */
public boolean isCmakeDisabled() { public boolean isCmakeDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_CMAKE); return (line != null) && line.hasOption(ARGUMENT.DISABLE_CMAKE);
@@ -616,7 +661,8 @@ public final class CliParser {
/** /**
* Returns true if the disableAutoconf command line argument was specified. * Returns true if the disableAutoconf command line argument was specified.
* *
* @return true if the disableAutoconf command line argument was specified; otherwise false * @return true if the disableAutoconf command line argument was specified;
* otherwise false
*/ */
public boolean isAutoconfDisabled() { public boolean isAutoconfDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_AUTOCONF); return (line != null) && line.hasOption(ARGUMENT.DISABLE_AUTOCONF);
@@ -625,7 +671,8 @@ public final class CliParser {
/** /**
* Returns true if the disableComposer command line argument was specified. * Returns true if the disableComposer command line argument was specified.
* *
* @return true if the disableComposer command line argument was specified; otherwise false * @return true if the disableComposer command line argument was specified;
* otherwise false
*/ */
public boolean isComposerDisabled() { public boolean isComposerDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_COMPOSER); return (line != null) && line.hasOption(ARGUMENT.DISABLE_COMPOSER);
@@ -634,7 +681,8 @@ public final class CliParser {
/** /**
* Returns true if the disableNexus command line argument was specified. * Returns true if the disableNexus command line argument was specified.
* *
* @return true if the disableNexus command line argument was specified; otherwise false * @return true if the disableNexus command line argument was specified;
* otherwise false
*/ */
public boolean isNexusDisabled() { public boolean isNexusDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_NEXUS); return (line != null) && line.hasOption(ARGUMENT.DISABLE_NEXUS);
@@ -643,7 +691,8 @@ public final class CliParser {
/** /**
* Returns true if the disableOpenSSL command line argument was specified. * Returns true if the disableOpenSSL command line argument was specified.
* *
* @return true if the disableOpenSSL command line argument was specified; otherwise false * @return true if the disableOpenSSL command line argument was specified;
* otherwise false
*/ */
public boolean isOpenSSLDisabled() { public boolean isOpenSSLDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_OPENSSL); return (line != null) && line.hasOption(ARGUMENT.DISABLE_OPENSSL);
@@ -652,16 +701,40 @@ public final class CliParser {
/** /**
* Returns true if the disableNodeJS command line argument was specified. * Returns true if the disableNodeJS command line argument was specified.
* *
* @return true if the disableNodeJS command line argument was specified; otherwise false * @return true if the disableNodeJS command line argument was specified;
* otherwise false
*/ */
public boolean isNodeJsDisabled() { public boolean isNodeJsDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_NODE_JS); return (line != null) && line.hasOption(ARGUMENT.DISABLE_NODE_JS);
} }
/**
* Returns true if the disableCocoapodsAnalyzer command line argument was
* specified.
*
* @return true if the disableCocoapodsAnalyzer command line argument was
* specified; otherwise false
*/
public boolean isCocoapodsAnalyzerDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_COCOAPODS);
}
/**
* Returns true if the disableSwiftPackageManagerAnalyzer command line
* argument was specified.
*
* @return true if the disableSwiftPackageManagerAnalyzer command line
* argument was specified; otherwise false
*/
public boolean isSwiftPackageAnalyzerDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_SWIFT);
}
/** /**
* Returns true if the disableCentral command line argument was specified. * Returns true if the disableCentral command line argument was specified.
* *
* @return true if the disableCentral command line argument was specified; otherwise false * @return true if the disableCentral command line argument was specified;
* otherwise false
*/ */
public boolean isCentralDisabled() { public boolean isCentralDisabled() {
return (line != null) && line.hasOption(ARGUMENT.DISABLE_CENTRAL); return (line != null) && line.hasOption(ARGUMENT.DISABLE_CENTRAL);
@@ -670,7 +743,8 @@ public final class CliParser {
/** /**
* Returns the url to the nexus server if one was specified. * Returns the url to the nexus server if one was specified.
* *
* @return the url to the nexus server; if none was specified this will return null; * @return the url to the nexus server; if none was specified this will
* return null;
*/ */
public String getNexusUrl() { public String getNexusUrl() {
if (line == null || !line.hasOption(ARGUMENT.NEXUS_URL)) { if (line == null || !line.hasOption(ARGUMENT.NEXUS_URL)) {
@@ -681,9 +755,11 @@ public final class CliParser {
} }
/** /**
* Returns true if the Nexus Analyzer should use the configured proxy to connect to Nexus; otherwise false is returned. * Returns true if the Nexus Analyzer should use the configured proxy to
* connect to Nexus; otherwise false is returned.
* *
* @return true if the Nexus Analyzer should use the configured proxy to connect to Nexus; otherwise false * @return true if the Nexus Analyzer should use the configured proxy to
* connect to Nexus; otherwise false
*/ */
public boolean isNexusUsesProxy() { public boolean isNexusUsesProxy() {
// If they didn't specify whether Nexus needs to use the proxy, we should // If they didn't specify whether Nexus needs to use the proxy, we should
@@ -723,7 +799,8 @@ public final class CliParser {
} }
/** /**
* Retrieves the file command line parameter(s) specified for the 'scan' argument. * Retrieves the file command line parameter(s) specified for the 'scan'
* argument.
* *
* @return the file paths specified on the command line for scan * @return the file paths specified on the command line for scan
*/ */
@@ -732,7 +809,8 @@ public final class CliParser {
} }
/** /**
* Retrieves the list of excluded file patterns specified by the 'exclude' argument. * Retrieves the list of excluded file patterns specified by the 'exclude'
* argument.
* *
* @return the excluded file patterns * @return the excluded file patterns
*/ */
@@ -741,7 +819,8 @@ public final class CliParser {
} }
/** /**
* Returns the directory to write the reports to specified on the command line. * Returns the directory to write the reports to specified on the command
* line.
* *
* @return the path to the reports directory. * @return the path to the reports directory.
*/ */
@@ -750,7 +829,8 @@ public final class CliParser {
} }
/** /**
* Returns the path to Mono for .NET Assembly analysis on non-windows systems. * Returns the path to Mono for .NET Assembly analysis on non-windows
* systems.
* *
* @return the path to Mono * @return the path to Mono
*/ */
@@ -768,7 +848,8 @@ public final class CliParser {
} }
/** /**
* Returns the output format specified on the command line. Defaults to HTML if no format was specified. * Returns the output format specified on the command line. Defaults to HTML
* if no format was specified.
* *
* @return the output format name. * @return the output format name.
*/ */
@@ -921,6 +1002,15 @@ public final class CliParser {
return line.getOptionValue(ARGUMENT.SUPPRESSION_FILE); return line.getOptionValue(ARGUMENT.SUPPRESSION_FILE);
} }
/**
* Returns the path to the hints file.
*
* @return the path to the hints file
*/
public String getHintsFile() {
return line.getOptionValue(ARGUMENT.HINTS_FILE);
}
/** /**
* <p> * <p>
* Prints the manifest information to standard output.</p> * Prints the manifest information to standard output.</p>
@@ -929,15 +1019,17 @@ public final class CliParser {
*/ */
public void printVersionInfo() { public void printVersionInfo() {
final String version = String.format("%s version %s", final String version = String.format("%s version %s",
Settings.getString(Settings.KEYS.APPLICATION_VAME, "dependency-check"), Settings.getString(Settings.KEYS.APPLICATION_NAME, "dependency-check"),
Settings.getString(Settings.KEYS.APPLICATION_VERSION, "Unknown")); Settings.getString(Settings.KEYS.APPLICATION_VERSION, "Unknown"));
System.out.println(version); System.out.println(version);
} }
/** /**
* Checks if the auto update feature has been disabled. If it has been disabled via the command line this will return false. * Checks if the auto update feature has been disabled. If it has been
* disabled via the command line this will return false.
* *
* @return <code>true</code> if auto-update is allowed; otherwise <code>false</code> * @return <code>true</code> if auto-update is allowed; otherwise
* <code>false</code>
*/ */
public boolean isAutoUpdate() { public boolean isAutoUpdate() {
return line != null && !line.hasOption(ARGUMENT.DISABLE_AUTO_UPDATE); return line != null && !line.hasOption(ARGUMENT.DISABLE_AUTO_UPDATE);
@@ -946,7 +1038,8 @@ public final class CliParser {
/** /**
* Checks if the update only flag has been set. * Checks if the update only flag has been set.
* *
* @return <code>true</code> if the update only flag has been set; otherwise <code>false</code>. * @return <code>true</code> if the update only flag has been set; otherwise
* <code>false</code>.
*/ */
public boolean isUpdateOnly() { public boolean isUpdateOnly() {
return line != null && line.hasOption(ARGUMENT.UPDATE_ONLY); return line != null && line.hasOption(ARGUMENT.UPDATE_ONLY);
@@ -955,14 +1048,16 @@ public final class CliParser {
/** /**
* Checks if the purge NVD flag has been set. * Checks if the purge NVD flag has been set.
* *
* @return <code>true</code> if the purge nvd flag has been set; otherwise <code>false</code>. * @return <code>true</code> if the purge nvd flag has been set; otherwise
* <code>false</code>.
*/ */
public boolean isPurge() { public boolean isPurge() {
return line != null && line.hasOption(ARGUMENT.PURGE_NVD); return line != null && line.hasOption(ARGUMENT.PURGE_NVD);
} }
/** /**
* Returns the database driver name if specified; otherwise null is returned. * Returns the database driver name if specified; otherwise null is
* returned.
* *
* @return the database driver name if specified; otherwise null is returned * @return the database driver name if specified; otherwise null is returned
*/ */
@@ -971,7 +1066,8 @@ public final class CliParser {
} }
/** /**
* Returns the database driver path if specified; otherwise null is returned. * Returns the database driver path if specified; otherwise null is
* returned.
* *
* @return the database driver name if specified; otherwise null is returned * @return the database driver name if specified; otherwise null is returned
*/ */
@@ -980,34 +1076,41 @@ public final class CliParser {
} }
/** /**
* Returns the database connection string if specified; otherwise null is returned. * Returns the database connection string if specified; otherwise null is
* returned.
* *
* @return the database connection string if specified; otherwise null is returned * @return the database connection string if specified; otherwise null is
* returned
*/ */
public String getConnectionString() { public String getConnectionString() {
return line.getOptionValue(ARGUMENT.CONNECTION_STRING); return line.getOptionValue(ARGUMENT.CONNECTION_STRING);
} }
/** /**
* Returns the database database user name if specified; otherwise null is returned. * Returns the database database user name if specified; otherwise null is
* returned.
* *
* @return the database database user name if specified; otherwise null is returned * @return the database database user name if specified; otherwise null is
* returned
*/ */
public String getDatabaseUser() { public String getDatabaseUser() {
return line.getOptionValue(ARGUMENT.DB_NAME); return line.getOptionValue(ARGUMENT.DB_NAME);
} }
/** /**
* Returns the database database password if specified; otherwise null is returned. * Returns the database database password if specified; otherwise null is
* returned.
* *
* @return the database database password if specified; otherwise null is returned * @return the database database password if specified; otherwise null is
* returned
*/ */
public String getDatabasePassword() { public String getDatabasePassword() {
return line.getOptionValue(ARGUMENT.DB_PASSWORD); return line.getOptionValue(ARGUMENT.DB_PASSWORD);
} }
/** /**
* Returns the additional Extensions if specified; otherwise null is returned. * Returns the additional Extensions if specified; otherwise null is
* returned.
* *
* @return the additional Extensions; otherwise null is returned * @return the additional Extensions; otherwise null is returned
*/ */
@@ -1029,7 +1132,36 @@ public final class CliParser {
} }
/** /**
* A collection of static final strings that represent the possible command line arguments. * Returns true if the experimental analyzers are enabled.
*
* @return true if the experimental analyzers are enabled; otherwise false
*/
public boolean isExperimentalEnabled() {
return line.hasOption(ARGUMENT.EXPERIMENTAL);
}
/**
* Returns the CVSS value to fail on.
*
* @return 11 if nothing is set. Otherwise it returns the int passed from
* the command line arg
*/
public int getFailOnCVSS() {
if (line.hasOption(ARGUMENT.FAIL_ON_CVSS)) {
final String value = line.getOptionValue(ARGUMENT.FAIL_ON_CVSS);
try {
return Integer.parseInt(value);
} catch (NumberFormatException nfe) {
return 11;
}
} else {
return 11;
}
}
/**
* A collection of static final strings that represent the possible command
* line arguments.
*/ */
public static class ARGUMENT { public static class ARGUMENT {
@@ -1042,50 +1174,61 @@ public final class CliParser {
*/ */
public static final String SCAN_SHORT = "s"; public static final String SCAN_SHORT = "s";
/** /**
* The long CLI argument name specifying that the CPE/CVE/etc. data should not be automatically updated. * The long CLI argument name specifying that the CPE/CVE/etc. data
* should not be automatically updated.
*/ */
public static final String DISABLE_AUTO_UPDATE = "noupdate"; public static final String DISABLE_AUTO_UPDATE = "noupdate";
/** /**
* The short CLI argument name specifying that the CPE/CVE/etc. data should not be automatically updated. * The short CLI argument name specifying that the CPE/CVE/etc. data
* should not be automatically updated.
*/ */
public static final String DISABLE_AUTO_UPDATE_SHORT = "n"; public static final String DISABLE_AUTO_UPDATE_SHORT = "n";
/** /**
* The long CLI argument name specifying that only the update phase should be executed; no scan should be run. * The long CLI argument name specifying that only the update phase
* should be executed; no scan should be run.
*/ */
public static final String UPDATE_ONLY = "updateonly"; public static final String UPDATE_ONLY = "updateonly";
/** /**
* The long CLI argument name specifying that only the update phase should be executed; no scan should be run. * The long CLI argument name specifying that only the update phase
* should be executed; no scan should be run.
*/ */
public static final String PURGE_NVD = "purge"; public static final String PURGE_NVD = "purge";
/** /**
* The long CLI argument name specifying the directory to write the reports to. * The long CLI argument name specifying the directory to write the
* reports to.
*/ */
public static final String OUT = "out"; public static final String OUT = "out";
/** /**
* The short CLI argument name specifying the directory to write the reports to. * The short CLI argument name specifying the directory to write the
* reports to.
*/ */
public static final String OUT_SHORT = "o"; public static final String OUT_SHORT = "o";
/** /**
* The long CLI argument name specifying the output format to write the reports to. * The long CLI argument name specifying the output format to write the
* reports to.
*/ */
public static final String OUTPUT_FORMAT = "format"; public static final String OUTPUT_FORMAT = "format";
/** /**
* The short CLI argument name specifying the output format to write the reports to. * The short CLI argument name specifying the output format to write the
* reports to.
*/ */
public static final String OUTPUT_FORMAT_SHORT = "f"; public static final String OUTPUT_FORMAT_SHORT = "f";
/** /**
* The long CLI argument name specifying the name of the project to be scanned. * The long CLI argument name specifying the name of the project to be
* scanned.
*/ */
public static final String PROJECT = "project"; public static final String PROJECT = "project";
/** /**
* The long CLI argument name specifying the name of the application to be scanned. * The long CLI argument name specifying the name of the application to
* be scanned.
* *
* @deprecated project should be used instead * @deprecated project should be used instead
*/ */
@Deprecated @Deprecated
public static final String APP_NAME = "app"; public static final String APP_NAME = "app";
/** /**
* The short CLI argument name specifying the name of the application to be scanned. * The short CLI argument name specifying the name of the application to
* be scanned.
* *
* @deprecated project should be used instead * @deprecated project should be used instead
*/ */
@@ -1143,11 +1286,13 @@ public final class CliParser {
*/ */
public static final String CONNECTION_TIMEOUT = "connectiontimeout"; public static final String CONNECTION_TIMEOUT = "connectiontimeout";
/** /**
* The short CLI argument name for setting the location of an additional properties file. * The short CLI argument name for setting the location of an additional
* properties file.
*/ */
public static final String PROP_SHORT = "P"; public static final String PROP_SHORT = "P";
/** /**
* The CLI argument name for setting the location of an additional properties file. * The CLI argument name for setting the location of an additional
* properties file.
*/ */
public static final String PROP = "propertyfile"; public static final String PROP = "propertyfile";
/** /**
@@ -1171,7 +1316,8 @@ public final class CliParser {
*/ */
public static final String CVE_BASE_20 = "cveUrl20Base"; public static final String CVE_BASE_20 = "cveUrl20Base";
/** /**
* The short CLI argument name for setting the location of the data directory. * The short CLI argument name for setting the location of the data
* directory.
*/ */
public static final String DATA_DIRECTORY_SHORT = "d"; public static final String DATA_DIRECTORY_SHORT = "d";
/** /**
@@ -1179,20 +1325,28 @@ public final class CliParser {
*/ */
public static final String VERBOSE_LOG = "log"; public static final String VERBOSE_LOG = "log";
/** /**
* The short CLI argument name for setting the location of the data directory. * The short CLI argument name for setting the location of the data
* directory.
*/ */
public static final String VERBOSE_LOG_SHORT = "l"; public static final String VERBOSE_LOG_SHORT = "l";
/** /**
* The CLI argument name for setting the depth of symbolic links that will be followed. * The CLI argument name for setting the depth of symbolic links that
* will be followed.
*/ */
public static final String SYM_LINK_DEPTH = "symLink"; public static final String SYM_LINK_DEPTH = "symLink";
/** /**
* The CLI argument name for setting the location of the suppression file. * The CLI argument name for setting the location of the suppression
* file.
*/ */
public static final String SUPPRESSION_FILE = "suppression"; public static final String SUPPRESSION_FILE = "suppression";
/** /**
* The CLI argument name for setting the location of the suppression file. * The CLI argument name for setting the location of the hint file.
*/
public static final String HINTS_FILE = "hints";
/**
* The CLI argument name for setting the number of hours to wait before
* checking for new updates from the NVD.
*/ */
public static final String CVE_VALID_FOR_HOURS = "cveValidForHours"; public static final String CVE_VALID_FOR_HOURS = "cveValidForHours";
/** /**
@@ -1227,6 +1381,14 @@ public final class CliParser {
* Disables the Cmake Analyzer. * Disables the Cmake Analyzer.
*/ */
public static final String DISABLE_CMAKE = "disableCmake"; public static final String DISABLE_CMAKE = "disableCmake";
/**
* Disables the cocoapods analyzer.
*/
public static final String DISABLE_COCOAPODS = "disableCocoapodsAnalyzer";
/**
* Disables the swift package manager analyzer.
*/
public static final String DISABLE_SWIFT = "disableSwiftPackageManagerAnalyzer";
/** /**
* Disables the Assembly Analyzer. * Disables the Assembly Analyzer.
*/ */
@@ -1260,7 +1422,8 @@ public final class CliParser {
*/ */
public static final String NEXUS_URL = "nexus"; public static final String NEXUS_URL = "nexus";
/** /**
* Whether or not the defined proxy should be used when connecting to Nexus. * Whether or not the defined proxy should be used when connecting to
* Nexus.
*/ */
public static final String NEXUS_USES_PROXY = "nexusUsesProxy"; public static final String NEXUS_USES_PROXY = "nexusUsesProxy";
/** /**
@@ -1280,11 +1443,13 @@ public final class CliParser {
*/ */
public static final String DB_DRIVER = "dbDriverName"; public static final String DB_DRIVER = "dbDriverName";
/** /**
* The CLI argument name for setting the path to the database driver; in case it is not on the class path. * The CLI argument name for setting the path to the database driver; in
* case it is not on the class path.
*/ */
public static final String DB_DRIVER_PATH = "dbDriverPath"; public static final String DB_DRIVER_PATH = "dbDriverPath";
/** /**
* The CLI argument name for setting the path to mono for .NET Assembly analysis on non-windows systems. * The CLI argument name for setting the path to mono for .NET Assembly
* analysis on non-windows systems.
*/ */
public static final String PATH_TO_MONO = "mono"; public static final String PATH_TO_MONO = "mono";
/** /**
@@ -1296,8 +1461,17 @@ public final class CliParser {
*/ */
public static final String EXCLUDE = "exclude"; public static final String EXCLUDE = "exclude";
/** /**
* The CLI argument name for setting the path to bundle-audit for Ruby bundle analysis. * The CLI argument name for setting the path to bundle-audit for Ruby
* bundle analysis.
*/ */
public static final String PATH_TO_BUNDLE_AUDIT = "bundleAudit"; public static final String PATH_TO_BUNDLE_AUDIT = "bundleAudit";
/**
* The CLI argument to enable the experimental analyzers.
*/
private static final String EXPERIMENTAL = "enableExperimental";
/**
* The CLI argument to enable the experimental analyzers.
*/
private static final String FAIL_ON_CVSS = "failOnCVSS";
} }
} }

View File

@@ -11,6 +11,7 @@ Short | Argument&nbsp;Name&nbsp;&nbsp; | Parameter | Description | Requir
| \-\-symLink | \<depth\> | The depth that symbolic links will be followed; the default is 0 meaning symbolic links will not be followed. | Optional | \-\-symLink | \<depth\> | The depth that symbolic links will be followed; the default is 0 meaning symbolic links will not be followed. | Optional
\-o | \-\-out | \<path\> | The folder to write reports to. This defaults to the current directory. If the format is not set to ALL one could specify a specific file name. | Optional \-o | \-\-out | \<path\> | The folder to write reports to. This defaults to the current directory. If the format is not set to ALL one could specify a specific file name. | Optional
\-f | \-\-format | \<format\> | The output format to write to (XML, HTML, VULN, ALL). The default is HTML. | Required \-f | \-\-format | \<format\> | The output format to write to (XML, HTML, VULN, ALL). The default is HTML. | Required
| \-\-failOnCvss | \<score\> | If the score set between 0 and 10 the exit code from dependency-check will indicate if a vulnerability with a CVSS score equal to or higher was identified. | Optional
\-l | \-\-log | \<file\> | The file path to write verbose logging information. | Optional \-l | \-\-log | \<file\> | The file path to write verbose logging information. | Optional
\-n | \-\-noupdate | | Disables the automatic updating of the CPE data. | Optional \-n | \-\-noupdate | | Disables the automatic updating of the CPE data. | Optional
| \-\-suppression | \<file\> | The file path to the suppression XML file; used to suppress [false positives](../general/suppression.html). | Optional | \-\-suppression | \<file\> | The file path to the suppression XML file; used to suppress [false positives](../general/suppression.html). | Optional
@@ -18,7 +19,7 @@ Short | Argument&nbsp;Name&nbsp;&nbsp; | Parameter | Description | Requir
| \-\-advancedHelp | | Print the advanced help message. | Optional | \-\-advancedHelp | | Print the advanced help message. | Optional
\-v | \-\-version | | Print the version information. | Optional \-v | \-\-version | | Print the version information. | Optional
| \-\-cveValidForHours | \<hours\> | The number of hours to wait before checking for new updates from the NVD. The default is 4 hours. | Optional | \-\-cveValidForHours | \<hours\> | The number of hours to wait before checking for new updates from the NVD. The default is 4 hours. | Optional
| \-\-experimental | | Enable the [experimental analyzers](../analyzers/index.html). If not set the analyzers marked as experimental below will not be loaded or used. | Optional
Advanced Options Advanced Options
================ ================
@@ -30,18 +31,20 @@ Short | Argument&nbsp;Name&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; | Paramete
| \-\-cveUrl20Base | \<url\> | Base URL for each year's CVE 2.0, the %d will be replaced with the year | https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz | \-\-cveUrl20Base | \<url\> | Base URL for each year's CVE 2.0, the %d will be replaced with the year | https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz
\-P | \-\-propertyfile | \<file\> | Specifies a file that contains properties to use instead of applicaion defaults. | &nbsp; \-P | \-\-propertyfile | \<file\> | Specifies a file that contains properties to use instead of applicaion defaults. | &nbsp;
| \-\-updateonly | | If set only the update phase of dependency-check will be executed; no scan will be executed and no report will be generated. | &nbsp; | \-\-updateonly | | If set only the update phase of dependency-check will be executed; no scan will be executed and no report will be generated. | &nbsp;
| \-\-disablePyDist | | Sets whether the Python Distribution Analyzer will be used. | false | \-\-disablePyDist | | Sets whether the [experimental](../analyzers/index.html) Python Distribution Analyzer will be used. | false
| \-\-disablePyPkg | | Sets whether the Python Package Analyzer will be used. | false | \-\-disablePyPkg | | Sets whether the [experimental](../analyzers/index.html) Python Package Analyzer will be used. | false
| \-\-disableNodeJS | | Sets whether the Node.js Package Analyzer will be used. | false | \-\-disableNodeJS | | Sets whether the [experimental](../analyzers/index.html) Node.js Package Analyzer will be used. | false
| \-\-disableRubygems | | Sets whether the Ruby Gemspec Analyzer will be used. | false | \-\-disableRubygems | | Sets whether the [experimental](../analyzers/index.html) Ruby Gemspec Analyzer will be used. | false
| \-\-disableBundleAudit | | Sets whether the Ruby Bundler Audit Analyzer will be used. | false | \-\-disableBundleAudit | | Sets whether the [experimental](../analyzers/index.html) Ruby Bundler Audit Analyzer will be used. | false
| \-\-disableAutoconf | | Sets whether the Autoconf Analyzer will be used. | false | \-\-disableCocoapodsAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer will be used. | false
| \-\-disableOpenSSL | | Sets whether the OpenSSL Analyzer will be used. | false | \-\-disableSwiftPackageManagerAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Swift Package Manager Analyzer will be used. | false
| \-\-disableCmake | | Sets whether the Cmake Analyzer will be disabled. | false | \-\-disableAutoconf | | Sets whether the [experimental](../analyzers/index.html) Autoconf Analyzer will be used. | false
| \-\-disableArchive | | Sets whether the Archive Analyzer will be disabled. | false | \-\-disableOpenSSL | | Sets whether the OpenSSL Analyzer will be used. | false
| \-\-disableCmake | | Sets whether the [experimental](../analyzers/index.html) Cmake Analyzer will be disabled. | false
| \-\-disableArchive | | Sets whether the Archive Analyzer will be disabled. | false
| \-\-zipExtensions | \<strings\> | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. | &nbsp; | \-\-zipExtensions | \<strings\> | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. | &nbsp;
| \-\-disableJar | | Sets whether the Jar Analyzer will be disabled. | false | \-\-disableJar | | Sets whether the Jar Analyzer will be disabled. | false
| \-\-disableComposer | | Sets whether the PHP Composer Lock File Analyzer will be disabled. | false | \-\-disableComposer | | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer will be disabled. | false
| \-\-disableCentral | | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer. | false | \-\-disableCentral | | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer. | false
| \-\-disableNexus | | Sets whether the Nexus Analyzer will be used. Note, this has been superceded by the Central Analyzer. However, you can configure the Nexus URL to utilize an internally hosted Nexus Pro server. | false | \-\-disableNexus | | Sets whether the Nexus Analyzer will be used. Note, this has been superceded by the Central Analyzer. However, you can configure the Nexus URL to utilize an internally hosted Nexus Pro server. | false
| \-\-nexus | \<url\> | The url to the Nexus Server's web service end point (example: http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. | &nbsp; | \-\-nexus | \<url\> | The url to the Nexus Server's web service end point (example: http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. | &nbsp;
@@ -50,7 +53,7 @@ Short | Argument&nbsp;Name&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; | Paramete
| \-\-disableAssembly | | Sets whether or not the .NET Assembly Analyzer should be used. | false | \-\-disableAssembly | | Sets whether or not the .NET Assembly Analyzer should be used. | false
| \-\-mono | \<path\> | The path to Mono for .NET Assembly analysis on non-windows systems. | &nbsp; | \-\-mono | \<path\> | The path to Mono for .NET Assembly analysis on non-windows systems. | &nbsp;
| \-\-bundleAudit | | The path to the bundle-audit executable. | &nbsp; | \-\-bundleAudit | | The path to the bundle-audit executable. | &nbsp;
| \-\-proxyserver | \<server\> | The proxy server to use when downloading resources. | &nbsp; | \-\-proxyserver | \<server\> | The proxy server to use when downloading resources; see the [proxy configuration](../data/proxy.html) page for more information. | &nbsp;
| \-\-proxyport | \<port\> | The proxy port to use when downloading resources. | &nbsp; | \-\-proxyport | \<port\> | The proxy port to use when downloading resources. | &nbsp;
| \-\-connectiontimeout | \<timeout\> | The connection timeout (in milliseconds) to use when downloading resources. | &nbsp; | \-\-connectiontimeout | \<timeout\> | The connection timeout (in milliseconds) to use when downloading resources. | &nbsp;
| \-\-proxypass | \<pass\> | The proxy password to use when downloading resources. | &nbsp; | \-\-proxypass | \<pass\> | The proxy password to use when downloading resources. | &nbsp;

View File

@@ -9,10 +9,7 @@ Installation & Usage
==================== ====================
Download the dependency-check command line tool [here](http://dl.bintray.com/jeremy-long/owasp/dependency-check-${project.version}-release.zip). Download the dependency-check command line tool [here](http://dl.bintray.com/jeremy-long/owasp/dependency-check-${project.version}-release.zip).
Extract the zip file to a location on your computer and put the 'bin' directory into the Extract the zip file to a location on your computer and put the 'bin' directory into the
path environment variable. On \*nix systems you will likely need to make the shell path environment variable.
script executable:
$ chmod +777 dependency-check.sh
#set( $H = '#' ) #set( $H = '#' )

View File

@@ -17,10 +17,6 @@
*/ */
package org.owasp.dependencycheck; package org.owasp.dependencycheck;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*; import static org.junit.Assert.*;
@@ -29,26 +25,6 @@ import static org.junit.Assert.*;
* @author jeremy * @author jeremy
*/ */
public class AppTest { public class AppTest {
public AppTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/** /**
* Test of ensureCanonicalPath method, of class App. * Test of ensureCanonicalPath method, of class App.
*/ */

View File

@@ -17,17 +17,14 @@
*/ */
package org.owasp.dependencycheck; package org.owasp.dependencycheck;
import org.owasp.dependencycheck.CliParser;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
@@ -48,14 +45,6 @@ public class CliParserTest {
Settings.cleanup(true); Settings.cleanup(true);
} }
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
/** /**
* Test of parse method, of class CliParser. * Test of parse method, of class CliParser.
* *
@@ -115,6 +104,63 @@ public class CliParserTest {
} }
/**
* Test of parse method with failOnCVSS without an argument
*
* @throws Exception thrown when an exception occurs.
*/
@Test
public void testParse_failOnCVSSNoArg() throws Exception {
String[] args = {"--failOnCVSS"};
CliParser instance = new CliParser();
try {
instance.parse(args);
} catch (ParseException ex) {
Assert.assertTrue(ex.getMessage().contains("Missing argument"));
}
Assert.assertFalse(instance.isGetVersion());
Assert.assertFalse(instance.isGetHelp());
Assert.assertFalse(instance.isRunScan());
}
/**
* Test of parse method with failOnCVSS invalid argument. It should default to 11
*
* @throws Exception thrown when an exception occurs.
*/
@Test
public void testParse_failOnCVSSInvalidArgument() throws Exception {
String[] args = {"--failOnCVSS","bad"};
CliParser instance = new CliParser();
instance.parse(args);
Assert.assertEquals("Default should be 11", 11, instance.getFailOnCVSS());
Assert.assertFalse(instance.isGetVersion());
Assert.assertFalse(instance.isGetHelp());
Assert.assertFalse(instance.isRunScan());
}
/**
* Test of parse method with failOnCVSS invalid argument. It should default to 11
*
* @throws Exception thrown when an exception occurs.
*/
@Test
public void testParse_failOnCVSSValidArgument() throws Exception {
String[] args = {"--failOnCVSS","6"};
CliParser instance = new CliParser();
instance.parse(args);
Assert.assertEquals(6, instance.getFailOnCVSS());
Assert.assertFalse(instance.isGetVersion());
Assert.assertFalse(instance.isGetHelp());
Assert.assertFalse(instance.isRunScan());
}
/** /**
* Test of parse method with jar and cpe args, of class CliParser. * Test of parse method with jar and cpe args, of class CliParser.
* *
@@ -196,7 +242,7 @@ public class CliParserTest {
*/ */
@Test @Test
public void testParse_scan_withFileExists() throws Exception { public void testParse_scan_withFileExists() throws Exception {
File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").getPath()); File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").toURI().getPath());
String[] args = {"-scan", path.getCanonicalPath(), "-out", "./", "-app", "test"}; String[] args = {"-scan", path.getCanonicalPath(), "-out", "./", "-app", "test"};
CliParser instance = new CliParser(); CliParser instance = new CliParser();

View File

@@ -20,7 +20,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<parent> <parent>
<groupId>org.owasp</groupId> <groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId> <artifactId>dependency-check-parent</artifactId>
<version>1.3.2</version> <version>1.4.6-SNAPSHOT</version>
</parent> </parent>
<artifactId>dependency-check-core</artifactId> <artifactId>dependency-check-core</artifactId>
@@ -83,9 +83,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</testResource> </testResource>
<testResource> <testResource>
<directory>${basedir}/src/test/resources</directory> <directory>${basedir}/src/test/resources</directory>
<excludes>
<exclude>**/mysql-connector-java-5.1.27-bin.jar</exclude>
</excludes>
<filtering>false</filtering> <filtering>false</filtering>
</testResource> </testResource>
</testResources> </testResources>
@@ -178,6 +175,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId> <artifactId>maven-surefire-plugin</artifactId>
<configuration> <configuration>
<argLine>-Dfile.encoding=UTF-8</argLine>
<systemProperties> <systemProperties>
<property> <property>
<name>data.directory</name> <name>data.directory</name>
@@ -246,7 +244,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<version>${reporting.pmd-plugin.version}</version> <version>${reporting.pmd-plugin.version}</version>
<configuration> <configuration>
<targetJdk>1.6</targetJdk> <targetJdk>1.6</targetJdk>
<linkXref>true</linkXref> <linkXRef>true</linkXRef>
<sourceEncoding>utf-8</sourceEncoding> <sourceEncoding>utf-8</sourceEncoding>
<excludes> <excludes>
<exclude>**/generated/*.java</exclude> <exclude>**/generated/*.java</exclude>
@@ -263,6 +261,10 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</reporting> </reporting>
<dependencies> <dependencies>
<!-- Note, to stay compatible with Jenkins installations only JARs compiled to 1.6 can be used --> <!-- Note, to stay compatible with Jenkins installations only JARs compiled to 1.6 can be used -->
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</dependency>
<dependency> <dependency>
<groupId>com.google.code.findbugs</groupId> <groupId>com.google.code.findbugs</groupId>
<artifactId>annotations</artifactId> <artifactId>annotations</artifactId>
@@ -454,6 +456,20 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<scope>test</scope> <scope>test</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
<version>2.7.0</version>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.thoughtworks.xstream</groupId>
<artifactId>xstream</artifactId>
<version>1.4.8</version>
<scope>test</scope>
<optional>true</optional>
</dependency>
</dependencies> </dependencies>
<profiles> <profiles>
<profile> <profile>
@@ -563,15 +579,19 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</plugins> </plugins>
</build> </build>
</profile> </profile>
<profile> <!--
<!-- The following profile adds additional The following profile adds additional dependencies that are only
dependencies that are only used during testing. used during testing.
Additionally, these are only added when using "allTests" to
make the build slightly faster in most cases. --> TODO move the following FP tests to a seperate invoker test in the
maven plugin project. Add checks against the XML to validate that
these do not report FP.
-->
<!--profile>
<id>False Positive Tests</id> <id>False Positive Tests</id>
<activation> <activation>
<property> <property>
<name>allTests</name> <name>releaseTesting</name>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
@@ -582,13 +602,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<scope>test</scope> <scope>test</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency>
<groupId>com.thoughtworks.xstream</groupId>
<artifactId>xstream</artifactId>
<version>1.4.2</version>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency> <dependency>
<groupId>org.apache.ws.security</groupId> <groupId>org.apache.ws.security</groupId>
<artifactId>wss4j</artifactId> <artifactId>wss4j</artifactId>
@@ -659,13 +672,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<scope>test</scope> <scope>test</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>3.2.12.RELEASE</version>
<scope>test</scope>
<optional>true</optional>
</dependency>
<dependency> <dependency>
<groupId>com.google.code.gson</groupId> <groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId> <artifactId>gson</artifactId>
@@ -723,6 +729,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
</dependencies> </dependencies>
</profile> </profile-->
</profiles> </profiles>
</project> </project>

View File

@@ -0,0 +1,129 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2016 Stefan Neuhaus. All Rights Reserved.
*/
package org.owasp.dependencycheck;
import org.owasp.dependencycheck.analyzer.Analyzer;
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.concurrent.Callable;
/**
* Task to support parallelism of dependency-check analysis. Analyses a single
* {@link Dependency} by a specific {@link Analyzer}.
*
* @author Stefan Neuhaus
*/
public class AnalysisTask implements Callable<Void> {
/**
* Instance of the logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(AnalysisTask.class);
/**
* A reference to the analyzer.
*/
private final Analyzer analyzer;
/**
* The dependency to analyze.
*/
private final Dependency dependency;
/**
* A reference to the dependency-check engine.
*/
private final Engine engine;
/**
* The list of exceptions that may occur during analysis.
*/
private final List<Throwable> exceptions;
/**
* A reference to the global settings object.
*/
private final Settings settings;
/**
* Creates a new analysis task.
*
* @param analyzer a reference of the analyzer to execute
* @param dependency the dependency to analyze
* @param engine the dependency-check engine
* @param exceptions exceptions that occur during analysis will be added to
* this collection of exceptions
* @param settings a reference to the global settings object; this is
* necessary so that when the thread is started the dependencies have a
* correct reference to the global settings.
*/
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions, Settings settings) {
this.analyzer = analyzer;
this.dependency = dependency;
this.engine = engine;
this.exceptions = exceptions;
this.settings = settings;
}
/**
* Executes the analysis task.
*
* @return null
*/
@Override
public Void call() {
try {
Settings.setInstance(settings);
if (shouldAnalyze()) {
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
try {
analyzer.analyze(dependency, engine);
} catch (AnalysisException ex) {
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
LOGGER.debug("", ex);
exceptions.add(ex);
} catch (Throwable ex) {
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
LOGGER.debug("", ex);
exceptions.add(ex);
}
}
} finally {
Settings.cleanup(false);
}
return null;
}
/**
* Determines if the analyzer can analyze the given dependency.
*
* @return whether or not the analyzer can analyze the dependency
*/
protected boolean shouldAnalyze() {
if (analyzer instanceof FileTypeAnalyzer) {
final FileTypeAnalyzer fileTypeAnalyzer = (FileTypeAnalyzer) analyzer;
return fileTypeAnalyzer.accept(dependency.getActualFile());
}
return true;
}
}

View File

@@ -21,7 +21,6 @@ import org.owasp.dependencycheck.analyzer.AnalysisPhase;
import org.owasp.dependencycheck.analyzer.Analyzer; import org.owasp.dependencycheck.analyzer.Analyzer;
import org.owasp.dependencycheck.analyzer.AnalyzerService; import org.owasp.dependencycheck.analyzer.AnalyzerService;
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer; import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory; import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
import org.owasp.dependencycheck.data.nvdcve.CveDB; import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException; import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
@@ -29,6 +28,8 @@ import org.owasp.dependencycheck.data.update.CachedWebDataSource;
import org.owasp.dependencycheck.data.update.UpdateService; import org.owasp.dependencycheck.data.update.UpdateService;
import org.owasp.dependencycheck.data.update.exception.UpdateException; import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.ExceptionCollection;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.exception.NoDataException; import org.owasp.dependencycheck.exception.NoDataException;
import org.owasp.dependencycheck.utils.InvalidSettingException; import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
@@ -37,18 +38,28 @@ import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.EnumMap; import java.util.EnumMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
/** /**
* Scans files, directories, etc. for Dependencies. Analyzers are loaded and used to process the files found by the scan, if a * Scans files, directories, etc. for Dependencies. Analyzers are loaded and
* file is encountered and an Analyzer is associated with the file type then the file is turned into a dependency. * used to process the files found by the scan, if a file is encountered and an
* Analyzer is associated with the file type then the file is turned into a
* dependency.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -57,21 +68,26 @@ public class Engine implements FileFilter {
/** /**
* The list of dependencies. * The list of dependencies.
*/ */
private List<Dependency> dependencies = new ArrayList<Dependency>(); private final List<Dependency> dependencies = Collections.synchronizedList(new ArrayList<Dependency>());
/** /**
* A Map of analyzers grouped by Analysis phase. * A Map of analyzers grouped by Analysis phase.
*/ */
private Map<AnalysisPhase, List<Analyzer>> analyzers = new EnumMap<AnalysisPhase, List<Analyzer>>(AnalysisPhase.class); private final Map<AnalysisPhase, List<Analyzer>> analyzers = new EnumMap<>(AnalysisPhase.class);
/** /**
* A Map of analyzers grouped by Analysis phase. * A Map of analyzers grouped by Analysis phase.
*/ */
private Set<FileTypeAnalyzer> fileTypeAnalyzers = new HashSet<FileTypeAnalyzer>(); private final Set<FileTypeAnalyzer> fileTypeAnalyzers = new HashSet<>();
/** /**
* The ClassLoader to use when dynamically loading Analyzer and Update services. * The ClassLoader to use when dynamically loading Analyzer and Update
* services.
*/ */
private ClassLoader serviceClassLoader = Thread.currentThread().getContextClassLoader(); private ClassLoader serviceClassLoader = Thread.currentThread().getContextClassLoader();
/**
* A reference to the database.
*/
private CveDB database = null;
/** /**
* The Logger for use throughout the class. * The Logger for use throughout the class.
*/ */
@@ -80,7 +96,8 @@ public class Engine implements FileFilter {
/** /**
* Creates a new Engine. * Creates a new Engine.
* *
* @throws DatabaseException thrown if there is an error connecting to the database * @throws DatabaseException thrown if there is an error connecting to the
* database
*/ */
public Engine() throws DatabaseException { public Engine() throws DatabaseException {
initializeEngine(); initializeEngine();
@@ -90,7 +107,8 @@ public class Engine implements FileFilter {
* Creates a new Engine. * Creates a new Engine.
* *
* @param serviceClassLoader a reference the class loader being used * @param serviceClassLoader a reference the class loader being used
* @throws DatabaseException thrown if there is an error connecting to the database * @throws DatabaseException thrown if there is an error connecting to the
* database
*/ */
public Engine(ClassLoader serviceClassLoader) throws DatabaseException { public Engine(ClassLoader serviceClassLoader) throws DatabaseException {
this.serviceClassLoader = serviceClassLoader; this.serviceClassLoader = serviceClassLoader;
@@ -98,9 +116,11 @@ public class Engine implements FileFilter {
} }
/** /**
* Creates a new Engine using the specified classloader to dynamically load Analyzer and Update services. * Creates a new Engine using the specified classloader to dynamically load
* Analyzer and Update services.
* *
* @throws DatabaseException thrown if there is an error connecting to the database * @throws DatabaseException thrown if there is an error connecting to the
* database
*/ */
protected final void initializeEngine() throws DatabaseException { protected final void initializeEngine() throws DatabaseException {
ConnectionFactory.initialize(); ConnectionFactory.initialize();
@@ -111,11 +131,16 @@ public class Engine implements FileFilter {
* Properly cleans up resources allocated during analysis. * Properly cleans up resources allocated during analysis.
*/ */
public void cleanup() { public void cleanup() {
if (database != null) {
database.close();
database = null;
}
ConnectionFactory.cleanup(); ConnectionFactory.cleanup();
} }
/** /**
* Loads the analyzers specified in the configuration file (or system properties). * Loads the analyzers specified in the configuration file (or system
* properties).
*/ */
private void loadAnalyzers() { private void loadAnalyzers() {
if (!analyzers.isEmpty()) { if (!analyzers.isEmpty()) {
@@ -126,9 +151,8 @@ public class Engine implements FileFilter {
} }
final AnalyzerService service = new AnalyzerService(serviceClassLoader); final AnalyzerService service = new AnalyzerService(serviceClassLoader);
final Iterator<Analyzer> iterator = service.getAnalyzers(); final List<Analyzer> iterator = service.getAnalyzers();
while (iterator.hasNext()) { for (Analyzer a : iterator) {
final Analyzer a = iterator.next();
analyzers.get(a.getAnalysisPhase()).add(a); analyzers.get(a.getAnalysisPhase()).add(a);
if (a instanceof FileTypeAnalyzer) { if (a instanceof FileTypeAnalyzer) {
this.fileTypeAnalyzers.add((FileTypeAnalyzer) a); this.fileTypeAnalyzers.add((FileTypeAnalyzer) a);
@@ -147,11 +171,17 @@ public class Engine implements FileFilter {
} }
/** /**
* Get the dependencies identified. * Get the dependencies identified. The returned list is a reference to the
* engine's synchronized list. <b>You must synchronize on the returned
* list</b> when you modify and iterate over it from multiple threads. E.g.
* this holds for analyzers supporting parallel processing during their
* analysis phase.
* *
* @return the dependencies identified * @return the dependencies identified
* @see Collections#synchronizedList(List)
* @see Analyzer#supportsParallelProcessing()
*/ */
public List<Dependency> getDependencies() { public synchronized List<Dependency> getDependencies() {
return dependencies; return dependencies;
} }
@@ -161,21 +191,40 @@ public class Engine implements FileFilter {
* @param dependencies the dependencies * @param dependencies the dependencies
*/ */
public void setDependencies(List<Dependency> dependencies) { public void setDependencies(List<Dependency> dependencies) {
this.dependencies = dependencies; synchronized (this.dependencies) {
this.dependencies.clear();
this.dependencies.addAll(dependencies);
}
} }
/** /**
* Scans an array of files or directories. If a directory is specified, it will be scanned recursively. Any dependencies * Scans an array of files or directories. If a directory is specified, it
* identified are added to the dependency collection. * will be scanned recursively. Any dependencies identified are added to the
* dependency collection.
* *
* @param paths an array of paths to files or directories to be analyzed * @param paths an array of paths to files or directories to be analyzed
* @return the list of dependencies scanned * @return the list of dependencies scanned
* @since v0.3.2.5 * @since v0.3.2.5
*/ */
public List<Dependency> scan(String[] paths) { public List<Dependency> scan(String[] paths) {
final List<Dependency> deps = new ArrayList<Dependency>(); return scan(paths, null);
}
/**
* Scans an array of files or directories. If a directory is specified, it
* will be scanned recursively. Any dependencies identified are added to the
* dependency collection.
*
* @param paths an array of paths to files or directories to be analyzed
* @param projectReference the name of the project or scope in which the
* dependency was identified
* @return the list of dependencies scanned
* @since v1.4.4
*/
public List<Dependency> scan(String[] paths, String projectReference) {
final List<Dependency> deps = new ArrayList<>();
for (String path : paths) { for (String path : paths) {
final List<Dependency> d = scan(path); final List<Dependency> d = scan(path, projectReference);
if (d != null) { if (d != null) {
deps.addAll(d); deps.addAll(d);
} }
@@ -184,29 +233,61 @@ public class Engine implements FileFilter {
} }
/** /**
* Scans a given file or directory. If a directory is specified, it will be scanned recursively. Any dependencies identified * Scans a given file or directory. If a directory is specified, it will be
* are added to the dependency collection. * scanned recursively. Any dependencies identified are added to the
* dependency collection.
* *
* @param path the path to a file or directory to be analyzed * @param path the path to a file or directory to be analyzed
* @return the list of dependencies scanned * @return the list of dependencies scanned
*/ */
public List<Dependency> scan(String path) { public List<Dependency> scan(String path) {
final File file = new File(path); return scan(path, null);
return scan(file);
} }
/** /**
* Scans an array of files or directories. If a directory is specified, it will be scanned recursively. Any dependencies * Scans a given file or directory. If a directory is specified, it will be
* identified are added to the dependency collection. * scanned recursively. Any dependencies identified are added to the
* dependency collection.
*
* @param path the path to a file or directory to be analyzed
* @param projectReference the name of the project or scope in which the
* dependency was identified
* @return the list of dependencies scanned
* @since v1.4.4
*/
public List<Dependency> scan(String path, String projectReference) {
final File file = new File(path);
return scan(file, projectReference);
}
/**
* Scans an array of files or directories. If a directory is specified, it
* will be scanned recursively. Any dependencies identified are added to the
* dependency collection.
* *
* @param files an array of paths to files or directories to be analyzed. * @param files an array of paths to files or directories to be analyzed.
* @return the list of dependencies * @return the list of dependencies
* @since v0.3.2.5 * @since v0.3.2.5
*/ */
public List<Dependency> scan(File[] files) { public List<Dependency> scan(File[] files) {
final List<Dependency> deps = new ArrayList<Dependency>(); return scan(files, null);
}
/**
* Scans an array of files or directories. If a directory is specified, it
* will be scanned recursively. Any dependencies identified are added to the
* dependency collection.
*
* @param files an array of paths to files or directories to be analyzed.
* @param projectReference the name of the project or scope in which the
* dependency was identified
* @return the list of dependencies
* @since v1.4.4
*/
public List<Dependency> scan(File[] files, String projectReference) {
final List<Dependency> deps = new ArrayList<>();
for (File file : files) { for (File file : files) {
final List<Dependency> d = scan(file); final List<Dependency> d = scan(file, projectReference);
if (d != null) { if (d != null) {
deps.addAll(d); deps.addAll(d);
} }
@@ -215,17 +296,33 @@ public class Engine implements FileFilter {
} }
/** /**
* Scans a collection of files or directories. If a directory is specified, it will be scanned recursively. Any dependencies * Scans a collection of files or directories. If a directory is specified,
* identified are added to the dependency collection. * it will be scanned recursively. Any dependencies identified are added to
* the dependency collection.
* *
* @param files a set of paths to files or directories to be analyzed * @param files a set of paths to files or directories to be analyzed
* @return the list of dependencies scanned * @return the list of dependencies scanned
* @since v0.3.2.5 * @since v0.3.2.5
*/ */
public List<Dependency> scan(Collection<File> files) { public List<Dependency> scan(Collection<File> files) {
final List<Dependency> deps = new ArrayList<Dependency>(); return scan(files, null);
}
/**
* Scans a collection of files or directories. If a directory is specified,
* it will be scanned recursively. Any dependencies identified are added to
* the dependency collection.
*
* @param files a set of paths to files or directories to be analyzed
* @param projectReference the name of the project or scope in which the
* dependency was identified
* @return the list of dependencies scanned
* @since v1.4.4
*/
public List<Dependency> scan(Collection<File> files, String projectReference) {
final List<Dependency> deps = new ArrayList<>();
for (File file : files) { for (File file : files) {
final List<Dependency> d = scan(file); final List<Dependency> d = scan(file, projectReference);
if (d != null) { if (d != null) {
deps.addAll(d); deps.addAll(d);
} }
@@ -234,21 +331,37 @@ public class Engine implements FileFilter {
} }
/** /**
* Scans a given file or directory. If a directory is specified, it will be scanned recursively. Any dependencies identified * Scans a given file or directory. If a directory is specified, it will be
* are added to the dependency collection. * scanned recursively. Any dependencies identified are added to the
* dependency collection.
* *
* @param file the path to a file or directory to be analyzed * @param file the path to a file or directory to be analyzed
* @return the list of dependencies scanned * @return the list of dependencies scanned
* @since v0.3.2.4 * @since v0.3.2.4
*/ */
public List<Dependency> scan(File file) { public List<Dependency> scan(File file) {
return scan(file, null);
}
/**
* Scans a given file or directory. If a directory is specified, it will be
* scanned recursively. Any dependencies identified are added to the
* dependency collection.
*
* @param file the path to a file or directory to be analyzed
* @param projectReference the name of the project or scope in which the
* dependency was identified
* @return the list of dependencies scanned
* @since v1.4.4
*/
public List<Dependency> scan(File file, String projectReference) {
if (file.exists()) { if (file.exists()) {
if (file.isDirectory()) { if (file.isDirectory()) {
return scanDirectory(file); return scanDirectory(file, projectReference);
} else { } else {
final Dependency d = scanFile(file); final Dependency d = scanFile(file, projectReference);
if (d != null) { if (d != null) {
final List<Dependency> deps = new ArrayList<Dependency>(); final List<Dependency> deps = new ArrayList<>();
deps.add(d); deps.add(d);
return deps; return deps;
} }
@@ -258,23 +371,38 @@ public class Engine implements FileFilter {
} }
/** /**
* Recursively scans files and directories. Any dependencies identified are added to the dependency collection. * Recursively scans files and directories. Any dependencies identified are
* added to the dependency collection.
* *
* @param dir the directory to scan * @param dir the directory to scan
* @return the list of Dependency objects scanned * @return the list of Dependency objects scanned
*/ */
protected List<Dependency> scanDirectory(File dir) { protected List<Dependency> scanDirectory(File dir) {
return scanDirectory(dir, null);
}
/**
* Recursively scans files and directories. Any dependencies identified are
* added to the dependency collection.
*
* @param dir the directory to scan
* @param projectReference the name of the project or scope in which the
* dependency was identified
* @return the list of Dependency objects scanned
* @since v1.4.4
*/
protected List<Dependency> scanDirectory(File dir, String projectReference) {
final File[] files = dir.listFiles(); final File[] files = dir.listFiles();
final List<Dependency> deps = new ArrayList<Dependency>(); final List<Dependency> deps = new ArrayList<>();
if (files != null) { if (files != null) {
for (File f : files) { for (File f : files) {
if (f.isDirectory()) { if (f.isDirectory()) {
final List<Dependency> d = scanDirectory(f); final List<Dependency> d = scanDirectory(f, projectReference);
if (d != null) { if (d != null) {
deps.addAll(d); deps.addAll(d);
} }
} else { } else {
final Dependency d = scanFile(f); final Dependency d = scanFile(f, projectReference);
deps.add(d); deps.add(d);
} }
} }
@@ -283,91 +411,117 @@ public class Engine implements FileFilter {
} }
/** /**
* Scans a specified file. If a dependency is identified it is added to the dependency collection. * Scans a specified file. If a dependency is identified it is added to the
* dependency collection.
* *
* @param file The file to scan * @param file The file to scan
* @return the scanned dependency * @return the scanned dependency
*/ */
protected Dependency scanFile(File file) { protected Dependency scanFile(File file) {
return scanFile(file, null);
}
/**
* Scans a specified file. If a dependency is identified it is added to the
* dependency collection.
*
* @param file The file to scan
* @param projectReference the name of the project or scope in which the
* dependency was identified
* @return the scanned dependency
* @since v1.4.4
*/
protected Dependency scanFile(File file, String projectReference) {
Dependency dependency = null; Dependency dependency = null;
if (file.isFile()) { if (file.isFile()) {
if (accept(file)) { if (accept(file)) {
dependency = new Dependency(file); dependency = new Dependency(file);
dependencies.add(dependency); if (projectReference != null) {
dependency.addProjectReference(projectReference);
}
final String sha1 = dependency.getSha1sum();
boolean found = false;
synchronized (dependencies) {
if (sha1 != null) {
for (Dependency existing : dependencies) {
if (sha1.equals(existing.getSha1sum())) {
found = true;
if (projectReference != null) {
existing.addProjectReference(projectReference);
}
if (existing.getActualFilePath() != null && dependency.getActualFilePath() != null
&& !existing.getActualFilePath().equals(dependency.getActualFilePath())) {
existing.addRelatedDependency(dependency);
} else {
dependency = existing;
}
break;
}
}
}
if (!found) {
dependencies.add(dependency);
}
}
} else {
LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file);
} }
} else {
LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file);
} }
return dependency; return dependency;
} }
/** /**
* Runs the analyzers against all of the dependencies. Since the mutable dependencies list is exposed via * Runs the analyzers against all of the dependencies. Since the mutable
* {@link #getDependencies()}, this method iterates over a copy of the dependencies list. Thus, the potential for * dependencies list is exposed via {@link #getDependencies()}, this method
* {@link java.util.ConcurrentModificationException}s is avoided, and analyzers may safely add or remove entries from the * iterates over a copy of the dependencies list. Thus, the potential for
* dependencies list. * {@link java.util.ConcurrentModificationException}s is avoided, and
* analyzers may safely add or remove entries from the dependencies list.
* <p>
* Every effort is made to complete analysis on the dependencies. In some
* cases an exception will occur with part of the analysis being performed
* which may not affect the entire analysis. If an exception occurs it will
* be included in the thrown exception collection.
*
* @throws ExceptionCollection a collections of any exceptions that occurred
* during analysis
*/ */
public void analyzeDependencies() { public void analyzeDependencies() throws ExceptionCollection {
boolean autoUpdate = true; final List<Throwable> exceptions = Collections.synchronizedList(new ArrayList<Throwable>());
try {
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE); initializeAndUpdateDatabase(exceptions);
} catch (InvalidSettingException ex) {
LOGGER.debug("Invalid setting for auto-update; using true.");
}
if (autoUpdate) {
doUpdates();
}
//need to ensure that data exists //need to ensure that data exists
try { try {
ensureDataExists(); ensureDataExists();
} catch (NoDataException ex) { } catch (NoDataException ex) {
LOGGER.error("{}\n\nUnable to continue dependency-check analysis.", ex.getMessage()); throwFatalExceptionCollection("Unable to continue dependency-check analysis.", ex, exceptions);
LOGGER.debug("", ex);
return;
} catch (DatabaseException ex) {
LOGGER.error("{}\n\nUnable to continue dependency-check analysis.", ex.getMessage());
LOGGER.debug("", ex);
return;
} }
LOGGER.debug("\n----------------------------------------------------\nBEGIN ANALYSIS\n----------------------------------------------------"); LOGGER.debug("\n----------------------------------------------------\nBEGIN ANALYSIS\n----------------------------------------------------");
LOGGER.info("Analysis Starting"); LOGGER.info("Analysis Started");
final long analysisStart = System.currentTimeMillis(); final long analysisStart = System.currentTimeMillis();
// analysis phases // analysis phases
for (AnalysisPhase phase : AnalysisPhase.values()) { for (AnalysisPhase phase : AnalysisPhase.values()) {
final List<Analyzer> analyzerList = analyzers.get(phase); final List<Analyzer> analyzerList = analyzers.get(phase);
for (Analyzer a : analyzerList) { for (final Analyzer analyzer : analyzerList) {
a = initializeAnalyzer(a); final long analyzerStart = System.currentTimeMillis();
try {
initializeAnalyzer(analyzer);
} catch (InitializationException ex) {
exceptions.add(ex);
continue;
}
/* need to create a copy of the collection because some of the if (analyzer.isEnabled()) {
* analyzers may modify it. This prevents ConcurrentModificationExceptions. executeAnalysisTasks(analyzer, exceptions);
* This is okay for adds/deletes because it happens per analyzer.
*/ final long analyzerDurationMillis = System.currentTimeMillis() - analyzerStart;
LOGGER.debug("Begin Analyzer '{}'", a.getName()); final long analyzerDurationSeconds = TimeUnit.MILLISECONDS.toSeconds(analyzerDurationMillis);
final Set<Dependency> dependencySet = new HashSet<Dependency>(dependencies); LOGGER.info("Finished {} ({} seconds)", analyzer.getName(), analyzerDurationSeconds);
for (Dependency d : dependencySet) { } else {
boolean shouldAnalyze = true; LOGGER.debug("Skipping {} (not enabled)", analyzer.getName());
if (a instanceof FileTypeAnalyzer) {
final FileTypeAnalyzer fAnalyzer = (FileTypeAnalyzer) a;
shouldAnalyze = fAnalyzer.accept(d.getActualFile());
}
if (shouldAnalyze) {
LOGGER.debug("Begin Analysis of '{}'", d.getActualFilePath());
try {
a.analyze(d, this);
} catch (AnalysisException ex) {
LOGGER.warn("An error occurred while analyzing '{}'.", d.getActualFilePath());
LOGGER.debug("", ex);
} catch (Throwable ex) {
//final AnalysisException ax = new AnalysisException(axMsg, ex);
LOGGER.warn("An unexpected error occurred during analysis of '{}'", d.getActualFilePath());
LOGGER.debug("", ex);
}
}
} }
} }
} }
@@ -380,20 +534,134 @@ public class Engine implements FileFilter {
} }
LOGGER.debug("\n----------------------------------------------------\nEND ANALYSIS\n----------------------------------------------------"); LOGGER.debug("\n----------------------------------------------------\nEND ANALYSIS\n----------------------------------------------------");
LOGGER.info("Analysis Complete ({} ms)", System.currentTimeMillis() - analysisStart); final long analysisDurationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - analysisStart);
LOGGER.info("Analysis Complete ({} seconds)", analysisDurationSeconds);
if (exceptions.size() > 0) {
throw new ExceptionCollection("One or more exceptions occurred during dependency-check analysis", exceptions);
}
}
/**
* Performs any necessary updates and initializes the database.
*
* @param exceptions a collection to store non-fatal exceptions
* @throws ExceptionCollection thrown if fatal exceptions occur
*/
private void initializeAndUpdateDatabase(final List<Throwable> exceptions) throws ExceptionCollection {
boolean autoUpdate = true;
try {
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
} catch (InvalidSettingException ex) {
LOGGER.debug("Invalid setting for auto-update; using true.");
exceptions.add(ex);
}
if (autoUpdate) {
try {
database = CveDB.getInstance();
doUpdates();
} catch (UpdateException ex) {
exceptions.add(ex);
LOGGER.warn("Unable to update Cached Web DataSource, using local "
+ "data instead. Results may not include recent vulnerabilities.");
LOGGER.debug("Update Error", ex);
} catch (DatabaseException ex) {
throw new ExceptionCollection("Unable to connect to the database", ex);
}
} else {
try {
if (ConnectionFactory.isH2Connection() && !ConnectionFactory.h2DataFileExists()) {
throw new ExceptionCollection(new NoDataException("Autoupdate is disabled and the database does not exist"), true);
} else {
database = CveDB.getInstance();
}
} catch (IOException ex) {
throw new ExceptionCollection(new DatabaseException("Autoupdate is disabled and unable to connect to the database"), true);
} catch (DatabaseException ex) {
throwFatalExceptionCollection("Unable to connect to the dependency-check database.", ex, exceptions);
}
}
}
/**
* Executes executes the analyzer using multiple threads.
*
* @param exceptions a collection of exceptions that occurred during
* analysis
* @param analyzer the analyzer to execute
* @throws ExceptionCollection thrown if exceptions occurred during analysis
*/
protected void executeAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) throws ExceptionCollection {
LOGGER.debug("Starting {}", analyzer.getName());
final List<AnalysisTask> analysisTasks = getAnalysisTasks(analyzer, exceptions);
final ExecutorService executorService = getExecutorService(analyzer);
try {
final List<Future<Void>> results = executorService.invokeAll(analysisTasks, 10, TimeUnit.MINUTES);
// ensure there was no exception during execution
for (Future<Void> result : results) {
try {
result.get();
} catch (ExecutionException e) {
throwFatalExceptionCollection("Analysis task failed with a fatal exception.", e, exceptions);
} catch (CancellationException e) {
throwFatalExceptionCollection("Analysis task timed out.", e, exceptions);
}
}
} catch (InterruptedException e) {
throwFatalExceptionCollection("Analysis has been interrupted.", e, exceptions);
} finally {
executorService.shutdown();
}
}
/**
* Returns the analysis tasks for the dependencies.
*
* @param analyzer the analyzer to create tasks for
* @param exceptions the collection of exceptions to collect
* @return a collection of analysis tasks
*/
protected List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
final List<AnalysisTask> result = new ArrayList<>();
synchronized (dependencies) {
for (final Dependency dependency : dependencies) {
final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions, Settings.getInstance());
result.add(task);
}
}
return result;
}
/**
* Returns the executor service for a given analyzer.
*
* @param analyzer the analyzer to obtain an executor
* @return the executor service
*/
protected ExecutorService getExecutorService(Analyzer analyzer) {
if (analyzer.supportsParallelProcessing()) {
final int maximumNumberOfThreads = Runtime.getRuntime().availableProcessors();
LOGGER.debug("Parallel processing with up to {} threads: {}.", maximumNumberOfThreads, analyzer.getName());
return Executors.newFixedThreadPool(maximumNumberOfThreads);
} else {
LOGGER.debug("Parallel processing is not supported: {}.", analyzer.getName());
return Executors.newSingleThreadExecutor();
}
} }
/** /**
* Initializes the given analyzer. * Initializes the given analyzer.
* *
* @param analyzer the analyzer to initialize * @param analyzer the analyzer to initialize
* @return the initialized analyzer * @throws InitializationException thrown when there is a problem
* initializing the analyzer
*/ */
protected Analyzer initializeAnalyzer(Analyzer analyzer) { protected void initializeAnalyzer(Analyzer analyzer) throws InitializationException {
try { try {
LOGGER.debug("Initializing {}", analyzer.getName()); LOGGER.debug("Initializing {}", analyzer.getName());
analyzer.initialize(); analyzer.initialize();
} catch (Throwable ex) { } catch (InitializationException ex) {
LOGGER.error("Exception occurred initializing {}.", analyzer.getName()); LOGGER.error("Exception occurred initializing {}.", analyzer.getName());
LOGGER.debug("", ex); LOGGER.debug("", ex);
try { try {
@@ -401,8 +669,17 @@ public class Engine implements FileFilter {
} catch (Throwable ex1) { } catch (Throwable ex1) {
LOGGER.trace("", ex1); LOGGER.trace("", ex1);
} }
throw ex;
} catch (Throwable ex) {
LOGGER.error("Unexpected exception occurred initializing {}.", analyzer.getName());
LOGGER.debug("", ex);
try {
analyzer.close();
} catch (Throwable ex1) {
LOGGER.trace("", ex1);
}
throw new InitializationException("Unexpected Exception", ex);
} }
return analyzer;
} }
/** /**
@@ -420,33 +697,31 @@ public class Engine implements FileFilter {
} }
/** /**
* Cycles through the cached web data sources and calls update on all of them. * Cycles through the cached web data sources and calls update on all of
* them.
*
* @throws UpdateException thrown if the operation fails
*/ */
public void doUpdates() { public void doUpdates() throws UpdateException {
LOGGER.info("Checking for updates"); LOGGER.info("Checking for updates");
final long updateStart = System.currentTimeMillis(); final long updateStart = System.currentTimeMillis();
final UpdateService service = new UpdateService(serviceClassLoader); final UpdateService service = new UpdateService(serviceClassLoader);
final Iterator<CachedWebDataSource> iterator = service.getDataSources(); final Iterator<CachedWebDataSource> iterator = service.getDataSources();
while (iterator.hasNext()) { while (iterator.hasNext()) {
final CachedWebDataSource source = iterator.next(); final CachedWebDataSource source = iterator.next();
try { source.update();
source.update();
} catch (UpdateException ex) {
LOGGER.warn(
"Unable to update Cached Web DataSource, using local data instead. Results may not include recent vulnerabilities.");
LOGGER.debug("Unable to update details for {}", source.getClass().getName(), ex);
}
} }
LOGGER.info("Check for updates complete ({} ms)", System.currentTimeMillis() - updateStart); LOGGER.info("Check for updates complete ({} ms)", System.currentTimeMillis() - updateStart);
} }
/** /**
* Returns a full list of all of the analyzers. This is useful for reporting which analyzers where used. * Returns a full list of all of the analyzers. This is useful for reporting
* which analyzers where used.
* *
* @return a list of Analyzers * @return a list of Analyzers
*/ */
public List<Analyzer> getAnalyzers() { public List<Analyzer> getAnalyzers() {
final List<Analyzer> ret = new ArrayList<Analyzer>(); final List<Analyzer> ret = new ArrayList<>();
for (AnalysisPhase phase : AnalysisPhase.values()) { for (AnalysisPhase phase : AnalysisPhase.values()) {
final List<Analyzer> analyzerList = analyzers.get(phase); final List<Analyzer> analyzerList = analyzers.get(phase);
ret.addAll(analyzerList); ret.addAll(analyzerList);
@@ -458,7 +733,8 @@ public class Engine implements FileFilter {
* Checks all analyzers to see if an extension is supported. * Checks all analyzers to see if an extension is supported.
* *
* @param file a file extension * @param file a file extension
* @return true or false depending on whether or not the file extension is supported * @return true or false depending on whether or not the file extension is
* supported
*/ */
@Override @Override
public boolean accept(File file) { public boolean accept(File file) {
@@ -484,22 +760,40 @@ public class Engine implements FileFilter {
} }
/** /**
* Checks the CPE Index to ensure documents exists. If none exist a NoDataException is thrown. * Adds a file type analyzer. This has been added solely to assist in unit
* testing the Engine.
*
* @param fta the file type analyzer to add
*/
protected void addFileTypeAnalyzer(FileTypeAnalyzer fta) {
this.fileTypeAnalyzers.add(fta);
}
/**
* Checks the CPE Index to ensure documents exists. If none exist a
* NoDataException is thrown.
* *
* @throws NoDataException thrown if no data exists in the CPE Index * @throws NoDataException thrown if no data exists in the CPE Index
* @throws DatabaseException thrown if there is an exception opening the database
*/ */
private void ensureDataExists() throws NoDataException, DatabaseException { private void ensureDataExists() throws NoDataException {
final CveDB cve = new CveDB(); if (database == null || !database.dataExists()) {
try { throw new NoDataException("No documents exist");
cve.open();
if (!cve.dataExists()) {
throw new NoDataException("No documents exist");
}
} catch (DatabaseException ex) {
throw new NoDataException(ex.getMessage(), ex);
} finally {
cve.close();
} }
} }
/**
* Constructs and throws a fatal exception collection.
*
* @param message the exception message
* @param throwable the cause
* @param exceptions a collection of exception to include
* @throws ExceptionCollection a collection of exceptions that occurred
* during analysis
*/
private void throwFatalExceptionCollection(String message, Throwable throwable, List<Throwable> exceptions) throws ExceptionCollection {
LOGGER.error("{}\n\n{}", throwable.getMessage(), message);
LOGGER.debug("", throwable);
exceptions.add(throwable);
throw new ExceptionCollection(message, exceptions, true);
}
} }

View File

@@ -27,6 +27,8 @@ import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.Vulnerability; import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.exception.ExceptionCollection;
import org.owasp.dependencycheck.exception.ReportException;
import org.owasp.dependencycheck.exception.ScanAgentException; import org.owasp.dependencycheck.exception.ScanAgentException;
import org.owasp.dependencycheck.reporting.ReportGenerator; import org.owasp.dependencycheck.reporting.ReportGenerator;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
@@ -34,14 +36,16 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* This class provides a way to easily conduct a scan solely based on existing evidence metadata rather than collecting evidence * This class provides a way to easily conduct a scan solely based on existing
* from the files themselves. This class is based on the Ant task and Maven plugin with the exception that it takes a list of * evidence metadata rather than collecting evidence from the files themselves.
* dependencies that can be programmatically added from data in a spreadsheet, database or some other datasource and conduct a * This class is based on the Ant task and Maven plugin with the exception that
* scan based on this pre-defined evidence. * it takes a list of dependencies that can be programmatically added from data
* in a spreadsheet, database or some other datasource and conduct a scan based
* on this pre-defined evidence.
* *
* <h2>Example:</h2> * <h2>Example:</h2>
* <pre> * <pre>
* List<Dependency> dependencies = new ArrayList<Dependency>(); * List&lt;Dependency&gt; dependencies = new ArrayList&lt;Dependency&gt;();
* Dependency dependency = new Dependency(new File(FileUtils.getBitBucket())); * Dependency dependency = new Dependency(new File(FileUtils.getBitBucket()));
* dependency.getProductEvidence().addEvidence("my-datasource", "name", "Jetty", Confidence.HIGH); * dependency.getProductEvidence().addEvidence("my-datasource", "name", "Jetty", Confidence.HIGH);
* dependency.getVersionEvidence().addEvidence("my-datasource", "version", "5.1.10", Confidence.HIGH); * dependency.getVersionEvidence().addEvidence("my-datasource", "version", "5.1.10", Confidence.HIGH);
@@ -55,11 +59,12 @@ import org.slf4j.LoggerFactory;
* scan.execute(); * scan.execute();
* </pre> * </pre>
* *
* @author Steve Springett <steve.springett@owasp.org> * @author Steve Springett
*/ */
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class DependencyCheckScanAgent { public class DependencyCheckScanAgent {
//<editor-fold defaultstate="collapsed" desc="private fields">
/** /**
* System specific new line character. * System specific new line character.
*/ */
@@ -72,6 +77,141 @@ public class DependencyCheckScanAgent {
* The application name for the report. * The application name for the report.
*/ */
private String applicationName = "Dependency-Check"; private String applicationName = "Dependency-Check";
/**
* The pre-determined dependencies to scan
*/
private List<Dependency> dependencies;
/**
* The location of the data directory that contains
*/
private String dataDirectory = null;
/**
* Specifies the destination directory for the generated Dependency-Check
* report.
*/
private String reportOutputDirectory;
/**
* Specifies if the build should be failed if a CVSS score above a specified
* level is identified. The default is 11 which means since the CVSS scores
* are 0-10, by default the build will never fail and the CVSS score is set
* to 11. The valid range for the fail build on CVSS is 0 to 11, where
* anything above 10 will not cause the build to fail.
*/
private float failBuildOnCVSS = 11;
/**
* Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not
* recommended that this be turned to false. Default is true.
*/
private boolean autoUpdate = true;
/**
* flag indicating whether or not to generate a report of findings.
*/
private boolean generateReport = true;
/**
* The report format to be generated (HTML, XML, VULN, ALL). This
* configuration option has no affect if using this within the Site plugin
* unless the externalReport is set to true. Default is HTML.
*/
private ReportGenerator.Format reportFormat = ReportGenerator.Format.HTML;
/**
* The Proxy Server.
*/
private String proxyServer;
/**
* The Proxy Port.
*/
private String proxyPort;
/**
* The Proxy username.
*/
private String proxyUsername;
/**
* The Proxy password.
*/
private String proxyPassword;
/**
* The Connection Timeout.
*/
private String connectionTimeout;
/**
* The file path used for verbose logging.
*/
private String logFile = null;
/**
* flag indicating whether or not to show a summary of findings.
*/
private boolean showSummary = true;
/**
* The path to the suppression file.
*/
private String suppressionFile;
/**
* The password to use when connecting to the database.
*/
private String databasePassword;
/**
* Whether or not the Maven Central analyzer is enabled.
*/
private boolean centralAnalyzerEnabled = true;
/**
* The URL of Maven Central.
*/
private String centralUrl;
/**
* Whether or not the nexus analyzer is enabled.
*/
private boolean nexusAnalyzerEnabled = true;
/**
* The URL of the Nexus server.
*/
private String nexusUrl;
/**
* Whether or not the defined proxy should be used when connecting to Nexus.
*/
private boolean nexusUsesProxy = true;
/**
* The database driver name; such as org.h2.Driver.
*/
private String databaseDriverName;
/**
* The path to the database driver JAR file if it is not on the class path.
*/
private String databaseDriverPath;
/**
* The database connection string.
*/
private String connectionString;
/**
* The user name for connecting to the database.
*/
private String databaseUser;
/**
* Additional ZIP File extensions to add analyze. This should be a
* comma-separated list of file extensions to treat like ZIP files.
*/
private String zipExtensions;
/**
* The url for the modified NVD CVE (1.2 schema).
*/
private String cveUrl12Modified;
/**
* The url for the modified NVD CVE (2.0 schema).
*/
private String cveUrl20Modified;
/**
* Base Data Mirror URL for CVE 1.2.
*/
private String cveUrl12Base;
/**
* Data Mirror URL for CVE 2.0.
*/
private String cveUrl20Base;
/**
* The path to Mono for .NET assembly analysis on non-windows systems.
*/
private String pathToMono;
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="getters/setters">
/** /**
* Get the value of applicationName. * Get the value of applicationName.
@@ -91,11 +231,6 @@ public class DependencyCheckScanAgent {
this.applicationName = applicationName; this.applicationName = applicationName;
} }
/**
* The pre-determined dependencies to scan
*/
private List<Dependency> dependencies;
/** /**
* Returns a list of pre-determined dependencies. * Returns a list of pre-determined dependencies.
* *
@@ -114,11 +249,6 @@ public class DependencyCheckScanAgent {
this.dependencies = dependencies; this.dependencies = dependencies;
} }
/**
* The location of the data directory that contains
*/
private String dataDirectory = null;
/** /**
* Get the value of dataDirectory. * Get the value of dataDirectory.
* *
@@ -137,11 +267,6 @@ public class DependencyCheckScanAgent {
this.dataDirectory = dataDirectory; this.dataDirectory = dataDirectory;
} }
/**
* Specifies the destination directory for the generated Dependency-Check report.
*/
private String reportOutputDirectory;
/** /**
* Get the value of reportOutputDirectory. * Get the value of reportOutputDirectory.
* *
@@ -160,13 +285,6 @@ public class DependencyCheckScanAgent {
this.reportOutputDirectory = reportOutputDirectory; this.reportOutputDirectory = reportOutputDirectory;
} }
/**
* Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which
* means since the CVSS scores are 0-10, by default the build will never fail and the CVSS score is set to 11. The valid range
* for the fail build on CVSS is 0 to 11, where anything above 10 will not cause the build to fail.
*/
private float failBuildOnCVSS = 11;
/** /**
* Get the value of failBuildOnCVSS. * Get the value of failBuildOnCVSS.
* *
@@ -185,12 +303,6 @@ public class DependencyCheckScanAgent {
this.failBuildOnCVSS = failBuildOnCVSS; this.failBuildOnCVSS = failBuildOnCVSS;
} }
/**
* Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. Default
* is true.
*/
private boolean autoUpdate = true;
/** /**
* Get the value of autoUpdate. * Get the value of autoUpdate.
* *
@@ -209,11 +321,6 @@ public class DependencyCheckScanAgent {
this.autoUpdate = autoUpdate; this.autoUpdate = autoUpdate;
} }
/**
* flag indicating whether or not to generate a report of findings.
*/
private boolean generateReport = true;
/** /**
* Get the value of generateReport. * Get the value of generateReport.
* *
@@ -232,12 +339,6 @@ public class DependencyCheckScanAgent {
this.generateReport = generateReport; this.generateReport = generateReport;
} }
/**
* The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the
* Site plugin unless the externalReport is set to true. Default is HTML.
*/
private ReportGenerator.Format reportFormat = ReportGenerator.Format.HTML;
/** /**
* Get the value of reportFormat. * Get the value of reportFormat.
* *
@@ -256,11 +357,6 @@ public class DependencyCheckScanAgent {
this.reportFormat = reportFormat; this.reportFormat = reportFormat;
} }
/**
* The Proxy Server.
*/
private String proxyServer;
/** /**
* Get the value of proxyServer. * Get the value of proxyServer.
* *
@@ -283,7 +379,9 @@ public class DependencyCheckScanAgent {
* Get the value of proxyServer. * Get the value of proxyServer.
* *
* @return the value of proxyServer * @return the value of proxyServer
* @deprecated use {@link org.owasp.dependencycheck.agent.DependencyCheckScanAgent#getProxyServer()} instead * @deprecated use
* {@link org.owasp.dependencycheck.agent.DependencyCheckScanAgent#getProxyServer()}
* instead
*/ */
@Deprecated @Deprecated
public String getProxyUrl() { public String getProxyUrl() {
@@ -302,11 +400,6 @@ public class DependencyCheckScanAgent {
this.proxyServer = proxyUrl; this.proxyServer = proxyUrl;
} }
/**
* The Proxy Port.
*/
private String proxyPort;
/** /**
* Get the value of proxyPort. * Get the value of proxyPort.
* *
@@ -325,11 +418,6 @@ public class DependencyCheckScanAgent {
this.proxyPort = proxyPort; this.proxyPort = proxyPort;
} }
/**
* The Proxy username.
*/
private String proxyUsername;
/** /**
* Get the value of proxyUsername. * Get the value of proxyUsername.
* *
@@ -348,11 +436,6 @@ public class DependencyCheckScanAgent {
this.proxyUsername = proxyUsername; this.proxyUsername = proxyUsername;
} }
/**
* The Proxy password.
*/
private String proxyPassword;
/** /**
* Get the value of proxyPassword. * Get the value of proxyPassword.
* *
@@ -371,11 +454,6 @@ public class DependencyCheckScanAgent {
this.proxyPassword = proxyPassword; this.proxyPassword = proxyPassword;
} }
/**
* The Connection Timeout.
*/
private String connectionTimeout;
/** /**
* Get the value of connectionTimeout. * Get the value of connectionTimeout.
* *
@@ -394,11 +472,6 @@ public class DependencyCheckScanAgent {
this.connectionTimeout = connectionTimeout; this.connectionTimeout = connectionTimeout;
} }
/**
* The file path used for verbose logging.
*/
private String logFile = null;
/** /**
* Get the value of logFile. * Get the value of logFile.
* *
@@ -417,11 +490,6 @@ public class DependencyCheckScanAgent {
this.logFile = logFile; this.logFile = logFile;
} }
/**
* The path to the suppression file.
*/
private String suppressionFile;
/** /**
* Get the value of suppressionFile. * Get the value of suppressionFile.
* *
@@ -440,11 +508,6 @@ public class DependencyCheckScanAgent {
this.suppressionFile = suppressionFile; this.suppressionFile = suppressionFile;
} }
/**
* flag indicating whether or not to show a summary of findings.
*/
private boolean showSummary = true;
/** /**
* Get the value of showSummary. * Get the value of showSummary.
* *
@@ -463,11 +526,6 @@ public class DependencyCheckScanAgent {
this.showSummary = showSummary; this.showSummary = showSummary;
} }
/**
* Whether or not the Maven Central analyzer is enabled.
*/
private boolean centralAnalyzerEnabled = true;
/** /**
* Get the value of centralAnalyzerEnabled. * Get the value of centralAnalyzerEnabled.
* *
@@ -486,11 +544,6 @@ public class DependencyCheckScanAgent {
this.centralAnalyzerEnabled = centralAnalyzerEnabled; this.centralAnalyzerEnabled = centralAnalyzerEnabled;
} }
/**
* The URL of Maven Central.
*/
private String centralUrl;
/** /**
* Get the value of centralUrl. * Get the value of centralUrl.
* *
@@ -509,11 +562,6 @@ public class DependencyCheckScanAgent {
this.centralUrl = centralUrl; this.centralUrl = centralUrl;
} }
/**
* Whether or not the nexus analyzer is enabled.
*/
private boolean nexusAnalyzerEnabled = true;
/** /**
* Get the value of nexusAnalyzerEnabled. * Get the value of nexusAnalyzerEnabled.
* *
@@ -532,11 +580,6 @@ public class DependencyCheckScanAgent {
this.nexusAnalyzerEnabled = nexusAnalyzerEnabled; this.nexusAnalyzerEnabled = nexusAnalyzerEnabled;
} }
/**
* The URL of the Nexus server.
*/
private String nexusUrl;
/** /**
* Get the value of nexusUrl. * Get the value of nexusUrl.
* *
@@ -555,11 +598,6 @@ public class DependencyCheckScanAgent {
this.nexusUrl = nexusUrl; this.nexusUrl = nexusUrl;
} }
/**
* Whether or not the defined proxy should be used when connecting to Nexus.
*/
private boolean nexusUsesProxy = true;
/** /**
* Get the value of nexusUsesProxy. * Get the value of nexusUsesProxy.
* *
@@ -578,11 +616,6 @@ public class DependencyCheckScanAgent {
this.nexusUsesProxy = nexusUsesProxy; this.nexusUsesProxy = nexusUsesProxy;
} }
/**
* The database driver name; such as org.h2.Driver.
*/
private String databaseDriverName;
/** /**
* Get the value of databaseDriverName. * Get the value of databaseDriverName.
* *
@@ -601,11 +634,6 @@ public class DependencyCheckScanAgent {
this.databaseDriverName = databaseDriverName; this.databaseDriverName = databaseDriverName;
} }
/**
* The path to the database driver JAR file if it is not on the class path.
*/
private String databaseDriverPath;
/** /**
* Get the value of databaseDriverPath. * Get the value of databaseDriverPath.
* *
@@ -624,11 +652,6 @@ public class DependencyCheckScanAgent {
this.databaseDriverPath = databaseDriverPath; this.databaseDriverPath = databaseDriverPath;
} }
/**
* The database connection string.
*/
private String connectionString;
/** /**
* Get the value of connectionString. * Get the value of connectionString.
* *
@@ -647,11 +670,6 @@ public class DependencyCheckScanAgent {
this.connectionString = connectionString; this.connectionString = connectionString;
} }
/**
* The user name for connecting to the database.
*/
private String databaseUser;
/** /**
* Get the value of databaseUser. * Get the value of databaseUser.
* *
@@ -670,11 +688,6 @@ public class DependencyCheckScanAgent {
this.databaseUser = databaseUser; this.databaseUser = databaseUser;
} }
/**
* The password to use when connecting to the database.
*/
private String databasePassword;
/** /**
* Get the value of databasePassword. * Get the value of databasePassword.
* *
@@ -693,12 +706,6 @@ public class DependencyCheckScanAgent {
this.databasePassword = databasePassword; this.databasePassword = databasePassword;
} }
/**
* Additional ZIP File extensions to add analyze. This should be a comma-separated list of file extensions to treat like ZIP
* files.
*/
private String zipExtensions;
/** /**
* Get the value of zipExtensions. * Get the value of zipExtensions.
* *
@@ -717,11 +724,6 @@ public class DependencyCheckScanAgent {
this.zipExtensions = zipExtensions; this.zipExtensions = zipExtensions;
} }
/**
* The url for the modified NVD CVE (1.2 schema).
*/
private String cveUrl12Modified;
/** /**
* Get the value of cveUrl12Modified. * Get the value of cveUrl12Modified.
* *
@@ -740,11 +742,6 @@ public class DependencyCheckScanAgent {
this.cveUrl12Modified = cveUrl12Modified; this.cveUrl12Modified = cveUrl12Modified;
} }
/**
* The url for the modified NVD CVE (2.0 schema).
*/
private String cveUrl20Modified;
/** /**
* Get the value of cveUrl20Modified. * Get the value of cveUrl20Modified.
* *
@@ -763,11 +760,6 @@ public class DependencyCheckScanAgent {
this.cveUrl20Modified = cveUrl20Modified; this.cveUrl20Modified = cveUrl20Modified;
} }
/**
* Base Data Mirror URL for CVE 1.2.
*/
private String cveUrl12Base;
/** /**
* Get the value of cveUrl12Base. * Get the value of cveUrl12Base.
* *
@@ -786,11 +778,6 @@ public class DependencyCheckScanAgent {
this.cveUrl12Base = cveUrl12Base; this.cveUrl12Base = cveUrl12Base;
} }
/**
* Data Mirror URL for CVE 2.0.
*/
private String cveUrl20Base;
/** /**
* Get the value of cveUrl20Base. * Get the value of cveUrl20Base.
* *
@@ -809,11 +796,6 @@ public class DependencyCheckScanAgent {
this.cveUrl20Base = cveUrl20Base; this.cveUrl20Base = cveUrl20Base;
} }
/**
* The path to Mono for .NET assembly analysis on non-windows systems.
*/
private String pathToMono;
/** /**
* Get the value of pathToMono. * Get the value of pathToMono.
* *
@@ -831,16 +813,23 @@ public class DependencyCheckScanAgent {
public void setPathToMono(String pathToMono) { public void setPathToMono(String pathToMono) {
this.pathToMono = pathToMono; this.pathToMono = pathToMono;
} }
//</editor-fold>
/** /**
* Executes the Dependency-Check on the dependent libraries. * Executes the Dependency-Check on the dependent libraries.
* *
* @return the Engine used to scan the dependencies. * @return the Engine used to scan the dependencies.
* @throws org.owasp.dependencycheck.data.nvdcve.DatabaseException thrown if there is an exception connecting to the database * @throws ExceptionCollection a collection of one or more exceptions that
* occurred during analysis.
*/ */
private Engine executeDependencyCheck() throws DatabaseException { private Engine executeDependencyCheck() throws ExceptionCollection {
populateSettings(); populateSettings();
final Engine engine = new Engine(); final Engine engine;
try {
engine = new Engine();
} catch (DatabaseException ex) {
throw new ExceptionCollection(ex, true);
}
engine.setDependencies(this.dependencies); engine.setDependencies(this.dependencies);
engine.analyzeDependencies(); engine.analyzeDependencies();
return engine; return engine;
@@ -854,35 +843,25 @@ public class DependencyCheckScanAgent {
*/ */
private void generateExternalReports(Engine engine, File outDirectory) { private void generateExternalReports(Engine engine, File outDirectory) {
DatabaseProperties prop = null; DatabaseProperties prop = null;
CveDB cve = null; try (CveDB cve = CveDB.getInstance()) {
try {
cve = new CveDB();
cve.open();
prop = cve.getDatabaseProperties(); prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) { } catch (DatabaseException ex) {
//TODO shouldn't this be a fatal exception
LOGGER.debug("Unable to retrieve DB Properties", ex); LOGGER.debug("Unable to retrieve DB Properties", ex);
} finally {
if (cve != null) {
cve.close();
}
} }
final ReportGenerator r = new ReportGenerator(this.applicationName, engine.getDependencies(), engine.getAnalyzers(), prop); final ReportGenerator r = new ReportGenerator(this.applicationName, engine.getDependencies(), engine.getAnalyzers(), prop);
try { try {
r.generateReports(outDirectory.getCanonicalPath(), this.reportFormat.name()); r.generateReports(outDirectory.getCanonicalPath(), this.reportFormat.name());
} catch (IOException ex) { } catch (IOException | ReportException ex) {
LOGGER.error( LOGGER.error("Unexpected exception occurred during analysis; please see the verbose error log for more details.");
"Unexpected exception occurred during analysis; please see the verbose error log for more details.");
LOGGER.debug("", ex);
} catch (Throwable ex) {
LOGGER.error(
"Unexpected exception occurred during analysis; please see the verbose error log for more details.");
LOGGER.debug("", ex); LOGGER.debug("", ex);
} }
} }
/** /**
* Takes the properties supplied and updates the dependency-check settings. Additionally, this sets the system properties * Takes the properties supplied and updates the dependency-check settings.
* required to change the proxy server, port, and connection timeout. * Additionally, this sets the system properties required to change the
* proxy server, port, and connection timeout.
*/ */
private void populateSettings() { private void populateSettings() {
Settings.initialize(); Settings.initialize();
@@ -925,7 +904,8 @@ public class DependencyCheckScanAgent {
* Executes the dependency-check and generates the report. * Executes the dependency-check and generates the report.
* *
* @return a reference to the engine used to perform the scan. * @return a reference to the engine used to perform the scan.
* @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if there is an exception executing the scan. * @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if
* there is an exception executing the scan.
*/ */
public Engine execute() throws ScanAgentException { public Engine execute() throws ScanAgentException {
Engine engine = null; Engine engine = null;
@@ -940,10 +920,12 @@ public class DependencyCheckScanAgent {
if (this.failBuildOnCVSS <= 10) { if (this.failBuildOnCVSS <= 10) {
checkForFailure(engine.getDependencies()); checkForFailure(engine.getDependencies());
} }
} catch (DatabaseException ex) { } catch (ExceptionCollection ex) {
LOGGER.error( if (ex.isFatal()) {
"Unable to connect to the dependency-check database; analysis has stopped"); LOGGER.error("A fatal exception occurred during analysis; analysis has stopped. Please see the debug log for more details.");
LOGGER.debug("", ex); LOGGER.debug("", ex);
}
throw new ScanAgentException("One or more exceptions occurred during analysis; please see the debug log for more details.", ex);
} finally { } finally {
Settings.cleanup(true); Settings.cleanup(true);
if (engine != null) { if (engine != null) {
@@ -954,11 +936,12 @@ public class DependencyCheckScanAgent {
} }
/** /**
* Checks to see if a vulnerability has been identified with a CVSS score that is above the threshold set in the * Checks to see if a vulnerability has been identified with a CVSS score
* configuration. * that is above the threshold set in the configuration.
* *
* @param dependencies the list of dependency objects * @param dependencies the list of dependency objects
* @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if there is an exception executing the scan. * @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if
* there is an exception executing the scan.
*/ */
private void checkForFailure(List<Dependency> dependencies) throws ScanAgentException { private void checkForFailure(List<Dependency> dependencies) throws ScanAgentException {
final StringBuilder ids = new StringBuilder(); final StringBuilder ids = new StringBuilder();
@@ -978,7 +961,7 @@ public class DependencyCheckScanAgent {
} }
if (ids.length() > 0) { if (ids.length() > 0) {
final String msg = String.format("%n%nDependency-Check Failure:%n" final String msg = String.format("%n%nDependency-Check Failure:%n"
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater then '%.1f': %s%n" + "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than '%.1f': %s%n"
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString()); + "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
throw new ScanAgentException(msg); throw new ScanAgentException(msg);
@@ -986,7 +969,8 @@ public class DependencyCheckScanAgent {
} }
/** /**
* Generates a warning message listing a summary of dependencies and their associated CPE and CVE entries. * Generates a warning message listing a summary of dependencies and their
* associated CPE and CVE entries.
* *
* @param dependencies a list of dependency objects * @param dependencies a list of dependency objects
*/ */
@@ -1023,5 +1007,4 @@ public class DependencyCheckScanAgent {
summary.toString()); summary.toString());
} }
} }
} }

View File

@@ -17,20 +17,125 @@
*/ */
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* Base class for analyzers to avoid code duplication of initialize and close as
* most analyzers do not need these methods.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public abstract class AbstractAnalyzer implements Analyzer { public abstract class AbstractAnalyzer implements Analyzer {
/** /**
* The initialize method does nothing for this Analyzer. * The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractAnalyzer.class);
/**
* A flag indicating whether or not the analyzer is enabled.
*/
private volatile boolean enabled = true;
/**
* Get the value of enabled.
*
* @return the value of enabled
*/
@Override
public boolean isEnabled() {
return enabled;
}
/**
* Set the value of enabled.
*
* @param enabled new value of enabled
*/
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
protected abstract String getAnalyzerEnabledSettingKey();
/**
* Analyzes a given dependency. If the dependency is an archive, such as a
* WAR or EAR, the contents are extracted, scanned, and added to the list of
* dependencies within the engine.
*
* @param dependency the dependency to analyze
* @param engine the engine scanning
* @throws AnalysisException thrown if there is an analysis exception
*/
protected abstract void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException;
/**
* Initializes a given Analyzer. This will be skipped if the analyzer is
* disabled.
*
* @throws InitializationException thrown if there is an exception
*/
protected void initializeAnalyzer() throws InitializationException {
}
/**
* Closes a given Analyzer. This will be skipped if the analyzer is
* disabled.
* *
* @throws Exception thrown if there is an exception * @throws Exception thrown if there is an exception
*/ */
protected void closeAnalyzer() throws Exception {
// Intentionally empty, analyzer will override this if they must close a resource.
}
/**
* Analyzes a given dependency. If the dependency is an archive, such as a
* WAR or EAR, the contents are extracted, scanned, and added to the list of
* dependencies within the engine.
*
* @param dependency the dependency to analyze
* @param engine the engine scanning
* @throws AnalysisException thrown if there is an analysis exception
*/
@Override @Override
public void initialize() throws Exception { public final void analyze(Dependency dependency, Engine engine) throws AnalysisException {
//do nothing if (this.isEnabled()) {
analyzeDependency(dependency, engine);
}
}
/**
* The initialize method does nothing for this Analyzer.
*
* @throws InitializationException thrown if there is an exception
*/
@Override
public final void initialize() throws InitializationException {
final String key = getAnalyzerEnabledSettingKey();
try {
this.setEnabled(Settings.getBoolean(key, true));
} catch (InvalidSettingException ex) {
LOGGER.warn("Invalid setting for property '{}'", key);
LOGGER.debug("", ex);
}
if (isEnabled()) {
initializeAnalyzer();
} else {
LOGGER.debug("{} has been disabled", getName());
}
} }
/** /**
@@ -39,7 +144,19 @@ public abstract class AbstractAnalyzer implements Analyzer {
* @throws Exception thrown if there is an exception * @throws Exception thrown if there is an exception
*/ */
@Override @Override
public void close() throws Exception { public final void close() throws Exception {
//do nothing if (isEnabled()) {
closeAnalyzer();
}
}
/**
* The default is to support parallel processing.
*
* @return true
*/
@Override
public boolean supportsParallelProcessing() {
return true;
} }
} }

View File

@@ -17,11 +17,6 @@
*/ */
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -30,24 +25,17 @@ import java.io.FileFilter;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import org.owasp.dependencycheck.exception.InitializationException;
/** /**
* The base FileTypeAnalyzer that all analyzers that have specific file types they analyze should extend. * The base FileTypeAnalyzer that all analyzers that have specific file types
* they analyze should extend.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implements FileTypeAnalyzer { public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implements FileTypeAnalyzer {
//<editor-fold defaultstate="collapsed" desc="Constructor"> //<editor-fold defaultstate="collapsed" desc="Field definitions, getters, and setters ">
/**
* Base constructor that all children must call. This checks the configuration to determine if the analyzer is enabled.
*/
public AbstractFileTypeAnalyzer() {
reset();
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Field definitions">
/** /**
* The logger. * The logger.
*/ */
@@ -58,7 +46,8 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
private boolean filesMatched = false; private boolean filesMatched = false;
/** /**
* Get the value of filesMatched. A flag indicating whether the scan included any file types this analyzer supports. * Get the value of filesMatched. A flag indicating whether the scan
* included any file types this analyzer supports.
* *
* @return the value of filesMatched * @return the value of filesMatched
*/ */
@@ -67,7 +56,8 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
} }
/** /**
* Set the value of filesMatched. A flag indicating whether the scan included any file types this analyzer supports. * Set the value of filesMatched. A flag indicating whether the scan
* included any file types this analyzer supports.
* *
* @param filesMatched new value of filesMatched * @param filesMatched new value of filesMatched
*/ */
@@ -75,122 +65,62 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
this.filesMatched = filesMatched; this.filesMatched = filesMatched;
} }
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Final implementations for the Analyzer interface">
/** /**
* A flag indicating whether or not the analyzer is enabled. * Initializes the analyzer.
*/
private boolean enabled = true;
/**
* Get the value of enabled.
* *
* @return the value of enabled * @throws InitializationException thrown if there is an exception during
* initialization
*/ */
public boolean isEnabled() { @Override
return enabled; protected final void initializeAnalyzer() throws InitializationException {
if (filesMatched) {
initializeFileTypeAnalyzer();
} else {
this.setEnabled(false);
}
} }
/** //</editor-fold>
* Set the value of enabled.
*
* @param enabled new value of enabled
*/
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Abstract methods children must implement"> //<editor-fold defaultstate="collapsed" desc="Abstract methods children must implement">
/** /**
* <p> * <p>
* Returns the {@link java.io.FileFilter} used to determine which files are to be analyzed. An example would be an analyzer * Returns the {@link java.io.FileFilter} used to determine which files are
* that inspected Java jar files. Implementors may use {@link org.owasp.dependencycheck.utils.FileFilterBuilder}.</p> * to be analyzed. An example would be an analyzer that inspected Java jar
* files. Implementors may use
* {@link org.owasp.dependencycheck.utils.FileFilterBuilder}.</p>
* <p>
* If the analyzer returns null it will not cause additional files to be
* analyzed, but will be executed against every file loaded.</p>
* *
* @return the file filter used to determine which files are to be analyzed * @return the file filter used to determine which files are to be analyzed
* <p/>
* <p>
* If the analyzer returns null it will not cause additional files to be analyzed, but will be executed against every file
* loaded.</p>
*/ */
protected abstract FileFilter getFileFilter(); protected abstract FileFilter getFileFilter();
/** /**
* Initializes the file type analyzer. * Initializes the file type analyzer.
* *
* @throws Exception thrown if there is an exception during initialization * @throws InitializationException thrown if there is an exception during
* initialization
*/ */
protected abstract void initializeFileTypeAnalyzer() throws Exception; protected abstract void initializeFileTypeAnalyzer() throws InitializationException;
//</editor-fold>
/** /**
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted, scanned, * Determines if the file can be analyzed by the analyzer.
* and added to the list of dependencies within the engine.
* *
* @param dependency the dependency to analyze * @param pathname the path to the file
* @param engine the engine scanning * @return true if the file can be analyzed by the given analyzer; otherwise
* @throws AnalysisException thrown if there is an analysis exception * false
*/ */
protected abstract void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException;
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
protected abstract String getAnalyzerEnabledSettingKey();
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Final implementations for the Analyzer interface">
/**
* Initializes the analyzer.
*
* @throws Exception thrown if there is an exception during initialization
*/
@Override
public final void initialize() throws Exception {
if (filesMatched) {
initializeFileTypeAnalyzer();
} else {
enabled = false;
}
}
/**
* Resets the enabled flag on the analyzer.
*/
@Override
public final void reset() {
final String key = getAnalyzerEnabledSettingKey();
try {
enabled = Settings.getBoolean(key, true);
} catch (InvalidSettingException ex) {
LOGGER.warn("Invalid setting for property '{}'", key);
LOGGER.debug("", ex);
LOGGER.warn("{} has been disabled", getName());
}
}
/**
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted, scanned,
* and added to the list of dependencies within the engine.
*
* @param dependency the dependency to analyze
* @param engine the engine scanning
* @throws AnalysisException thrown if there is an analysis exception
*/
@Override
public final void analyze(Dependency dependency, Engine engine) throws AnalysisException {
if (enabled) {
analyzeFileType(dependency, engine);
}
}
@Override @Override
public boolean accept(File pathname) { public boolean accept(File pathname) {
final FileFilter filter = getFileFilter(); final FileFilter filter = getFileFilter();
boolean accepted = false; boolean accepted = false;
if (null == filter) { if (null == filter) {
LOGGER.error("The '{}' analyzer is misconfigured and does not have a file filter; it will be disabled", getName()); LOGGER.error("The '{}' analyzer is misconfigured and does not have a file filter; it will be disabled", getName());
} else if (enabled) { } else if (this.isEnabled()) {
accepted = filter.accept(pathname); accepted = filter.accept(pathname);
if (accepted) { if (accepted) {
filesMatched = true; filesMatched = true;
@@ -199,13 +129,10 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
return accepted; return accepted;
} }
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Static utility methods">
/** /**
* <p> * <p>
* Utility method to help in the creation of the extensions set. This constructs a new Set that can be used in a final static * Utility method to help in the creation of the extensions set. This
* declaration.</p> * constructs a new Set that can be used in a final static declaration.</p>
* <p/>
* <p> * <p>
* This implementation was copied from * This implementation was copied from
* http://stackoverflow.com/questions/2041778/initialize-java-hashset-values-by-construction</p> * http://stackoverflow.com/questions/2041778/initialize-java-hashset-values-by-construction</p>
@@ -214,10 +141,8 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
* @return a Set of strings. * @return a Set of strings.
*/ */
protected static Set<String> newHashSet(String... strings) { protected static Set<String> newHashSet(String... strings) {
final Set<String> set = new HashSet<String>(strings.length); final Set<String> set = new HashSet<>(strings.length);
Collections.addAll(set, strings); Collections.addAll(set, strings);
return set; return set;
} }
//</editor-fold>
} }

View File

@@ -25,18 +25,21 @@ import java.net.URL;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.owasp.dependencycheck.suppression.SuppressionParseException; import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.suppression.SuppressionParser; import org.owasp.dependencycheck.xml.suppression.SuppressionParseException;
import org.owasp.dependencycheck.suppression.SuppressionRule; import org.owasp.dependencycheck.xml.suppression.SuppressionParser;
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
import org.owasp.dependencycheck.utils.DownloadFailedException; import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader; import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.FileUtils; import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
/** /**
* Abstract base suppression analyzer that contains methods for parsing the suppression xml file. * Abstract base suppression analyzer that contains methods for parsing the
* suppression xml file.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -61,12 +64,15 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
/** /**
* The initialize method loads the suppression XML file. * The initialize method loads the suppression XML file.
* *
* @throws Exception thrown if there is an exception * @throws InitializationException thrown if there is an exception
*/ */
@Override @Override
public void initialize() throws Exception { public void initializeAnalyzer() throws InitializationException {
super.initialize(); try {
loadSuppressionData(); loadSuppressionData();
} catch (SuppressionParseException ex) {
throw new InitializationException("Error initializing the suppression analyzer", ex);
}
} }
/** /**
@@ -101,9 +107,10 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
final SuppressionParser parser = new SuppressionParser(); final SuppressionParser parser = new SuppressionParser();
File file = null; File file = null;
try { try {
rules = parser.parseSuppressionRules(this.getClass().getClassLoader().getResourceAsStream("dependencycheck-base-suppression.xml")); final InputStream in = this.getClass().getClassLoader().getResourceAsStream("dependencycheck-base-suppression.xml");
} catch (SuppressionParseException ex) { rules = parser.parseSuppressionRules(in);
LOGGER.debug("Unable to parse the base suppression data file", ex); } catch (SAXException ex) {
throw new SuppressionParseException("Unable to parse the base suppression data file", ex);
} }
final String suppressionFilePath = Settings.getString(Settings.KEYS.SUPPRESSION_FILE); final String suppressionFilePath = Settings.getString(Settings.KEYS.SUPPRESSION_FILE);
if (suppressionFilePath == null) { if (suppressionFilePath == null) {
@@ -123,29 +130,33 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
} }
} else { } else {
file = new File(suppressionFilePath); file = new File(suppressionFilePath);
if (!file.exists()) { if (!file.exists()) {
final InputStream suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath); try (InputStream suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath)) {
if (suppressionsFromClasspath != null) { if (suppressionsFromClasspath != null) {
deleteTempFile = true; deleteTempFile = true;
file = FileUtils.getTempFile("suppression", "xml"); file = FileUtils.getTempFile("suppression", "xml");
try { try {
org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file); org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file);
} catch (IOException ex) { } catch (IOException ex) {
throwSuppressionParseException("Unable to locate suppressions file in classpath", ex); throwSuppressionParseException("Unable to locate suppressions file in classpath", ex);
}
} }
} }
} }
} }
if (file != null) { if (file != null) {
if (!file.exists()) {
final String msg = String.format("Suppression file '%s' does not exists", file.getPath());
LOGGER.warn(msg);
throw new SuppressionParseException(msg);
}
try { try {
//rules = parser.parseSuppressionRules(file);
rules.addAll(parser.parseSuppressionRules(file)); rules.addAll(parser.parseSuppressionRules(file));
LOGGER.debug("{} suppression rules were loaded.", rules.size()); LOGGER.debug("{} suppression rules were loaded.", rules.size());
} catch (SuppressionParseException ex) { } catch (SuppressionParseException ex) {
LOGGER.warn("Unable to parse suppression xml file '{}'", file.getPath()); LOGGER.warn("Unable to parse suppression xml file '{}'", file.getPath());
LOGGER.warn(ex.getMessage()); LOGGER.warn(ex.getMessage());
LOGGER.debug("", ex);
throw ex; throw ex;
} }
} }
@@ -153,6 +164,8 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
throwSuppressionParseException("Unable to fetch the configured suppression file", ex); throwSuppressionParseException("Unable to fetch the configured suppression file", ex);
} catch (MalformedURLException ex) { } catch (MalformedURLException ex) {
throwSuppressionParseException("Configured suppression file has an invalid URL", ex); throwSuppressionParseException("Configured suppression file has an invalid URL", ex);
} catch (SuppressionParseException ex) {
throw ex;
} catch (IOException ex) { } catch (IOException ex) {
throwSuppressionParseException("Unable to create temp file for suppressions", ex); throwSuppressionParseException("Unable to create temp file for suppressions", ex);
} finally { } finally {
@@ -167,7 +180,8 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
* *
* @param message the exception message * @param message the exception message
* @param exception the cause of the exception * @param exception the cause of the exception
* @throws SuppressionParseException throws the generated SuppressionParseException * @throws SuppressionParseException throws the generated
* SuppressionParseException
*/ */
private void throwSuppressionParseException(String message, Exception exception) throws SuppressionParseException { private void throwSuppressionParseException(String message, Exception exception) throws SuppressionParseException {
LOGGER.warn(message); LOGGER.warn(message);

View File

@@ -29,13 +29,17 @@ public enum AnalysisPhase {
*/ */
INITIAL, INITIAL,
/** /**
* Pre information collection phase * Pre information collection phase.
*/ */
PRE_INFORMATION_COLLECTION, PRE_INFORMATION_COLLECTION,
/** /**
* Information collection phase. * Information collection phase.
*/ */
INFORMATION_COLLECTION, INFORMATION_COLLECTION,
/**
* Post information collection phase.
*/
POST_INFORMATION_COLLECTION,
/** /**
* Pre identifier analysis phase. * Pre identifier analysis phase.
*/ */

View File

@@ -20,24 +20,28 @@ package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.InitializationException;
/** /**
* An interface that defines an Analyzer that is used to identify Dependencies. An analyzer will collect information * An interface that defines an Analyzer that is used to identify Dependencies.
* about the dependency in the form of Evidence. * An analyzer will collect information about the dependency in the form of
* Evidence.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public interface Analyzer { public interface Analyzer {
/** /**
* Analyzes the given dependency. The analysis could be anything from identifying an Identifier for the dependency, * Analyzes the given dependency. The analysis could be anything from
* to finding vulnerabilities, etc. Additionally, if the analyzer collects enough information to add a description * identifying an Identifier for the dependency, to finding vulnerabilities,
* or license information for the dependency it should be added. * etc. Additionally, if the analyzer collects enough information to add a
* description or license information for the dependency it should be added.
* *
* @param dependency a dependency to analyze. * @param dependency a dependency to analyze.
* @param engine the engine that is scanning the dependencies - this is useful if we need to check other * @param engine the engine that is scanning the dependencies - this is
* dependencies * useful if we need to check other dependencies
* @throws AnalysisException is thrown if there is an error analyzing the dependency file * @throws AnalysisException is thrown if there is an error analyzing the
* dependency file
*/ */
void analyze(Dependency dependency, Engine engine) throws AnalysisException; void analyze(Dependency dependency, Engine engine) throws AnalysisException;
@@ -56,16 +60,33 @@ public interface Analyzer {
AnalysisPhase getAnalysisPhase(); AnalysisPhase getAnalysisPhase();
/** /**
* The initialize method is called (once) prior to the analyze method being called on all of the dependencies. * The initialize method is called (once) prior to the analyze method being
* called on all of the dependencies.
* *
* @throws Exception is thrown if an exception occurs initializing the analyzer. * @throws InitializationException is thrown if an exception occurs
* initializing the analyzer.
*/ */
void initialize() throws Exception; void initialize() throws InitializationException;
/** /**
* The close method is called after all of the dependencies have been analyzed. * The close method is called after all of the dependencies have been
* analyzed.
* *
* @throws Exception is thrown if an exception occurs closing the analyzer. * @throws Exception is thrown if an exception occurs closing the analyzer.
*/ */
void close() throws Exception; void close() throws Exception;
/**
* Returns whether multiple instances of the same type of analyzer can run in parallel.
* Note that running analyzers of different types in parallel is not supported at all.
*
* @return {@code true} if the analyzer supports parallel processing, {@code false} else
*/
boolean supportsParallelProcessing();
/**
* Get the value of enabled.
*
* @return the value of enabled
*/
boolean isEnabled();
} }

View File

@@ -17,8 +17,13 @@
*/ */
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
import java.util.ServiceLoader; import java.util.ServiceLoader;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.LoggerFactory;
/** /**
* The Analyzer Service Loader. This class loads all services that implement * The Analyzer Service Loader. This class loads all services that implement
@@ -27,11 +32,15 @@ import java.util.ServiceLoader;
* @author Jeremy Long * @author Jeremy Long
*/ */
public class AnalyzerService { public class AnalyzerService {
/**
* The Logger for use throughout the class.
*/
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(AnalyzerService.class);
/** /**
* The service loader for analyzers. * The service loader for analyzers.
*/ */
private final ServiceLoader<Analyzer> loader; private final ServiceLoader<Analyzer> service;
/** /**
* Creates a new instance of AnalyzerService. * Creates a new instance of AnalyzerService.
@@ -39,15 +48,31 @@ public class AnalyzerService {
* @param classLoader the ClassLoader to use when dynamically loading Analyzer and Update services * @param classLoader the ClassLoader to use when dynamically loading Analyzer and Update services
*/ */
public AnalyzerService(ClassLoader classLoader) { public AnalyzerService(ClassLoader classLoader) {
loader = ServiceLoader.load(Analyzer.class, classLoader); service = ServiceLoader.load(Analyzer.class, classLoader);
} }
/** /**
* Returns an Iterator for all instances of the Analyzer interface. * Returns a list of all instances of the Analyzer interface.
* *
* @return an iterator of Analyzers. * @return a list of Analyzers.
*/ */
public Iterator<Analyzer> getAnalyzers() { public List<Analyzer> getAnalyzers() {
return loader.iterator(); final List<Analyzer> analyzers = new ArrayList<>();
final Iterator<Analyzer> iterator = service.iterator();
boolean experimentalEnabled = false;
try {
experimentalEnabled = Settings.getBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, false);
} catch (InvalidSettingException ex) {
LOGGER.error("invalid experimental setting", ex);
}
while (iterator.hasNext()) {
final Analyzer a = iterator.next();
if (!experimentalEnabled && a.getClass().isAnnotationPresent(Experimental.class)) {
continue;
}
LOGGER.debug("Loaded Analyzer {}", a.getName());
analyzers.add(a);
}
return analyzers;
} }
} }

View File

@@ -18,17 +18,14 @@
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
import java.io.Closeable;
import java.io.File; import java.io.File;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@@ -49,6 +46,7 @@ import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.analyzer.exception.ArchiveExtractionException; import org.owasp.dependencycheck.analyzer.exception.ArchiveExtractionException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.FileUtils; import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
@@ -58,8 +56,8 @@ import org.slf4j.LoggerFactory;
/** /**
* <p> * <p>
* An analyzer that extracts files from archives and ensures any supported files contained within the archive are added to the * An analyzer that extracts files from archives and ensures any supported files
* dependency list.</p> * contained within the archive are added to the dependency list.</p>
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -70,7 +68,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(ArchiveAnalyzer.class); private static final Logger LOGGER = LoggerFactory.getLogger(ArchiveAnalyzer.class);
/** /**
* The count of directories created during analysis. This is used for creating temporary directories. * The count of directories created during analysis. This is used for
* creating temporary directories.
*/ */
private static int dirCount = 0; private static int dirCount = 0;
/** /**
@@ -78,7 +77,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private File tempFileLocation = null; private File tempFileLocation = null;
/** /**
* The max scan depth that the analyzer will recursively extract nested archives. * The max scan depth that the analyzer will recursively extract nested
* archives.
*/ */
private static final int MAX_SCAN_DEPTH = Settings.getInt("archive.scan.depth", 3); private static final int MAX_SCAN_DEPTH = Settings.getInt("archive.scan.depth", 3);
/** /**
@@ -98,43 +98,45 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* The set of things we can handle with Zip methods * The set of things we can handle with Zip methods
*/ */
private static final Set<String> ZIPPABLES = newHashSet("zip", "ear", "war", "jar", "sar", "apk", "nupkg"); private static final Set<String> KNOWN_ZIP_EXT = newHashSet("zip", "ear", "war", "jar", "sar", "apk", "nupkg");
/** /**
* The set of file extensions supported by this analyzer. Note for developers, any additions to this list will need to be * The set of file extensions supported by this analyzer. Note for
* explicitly handled in {@link #extractFiles(File, File, Engine)}. * developers, any additions to this list will need to be explicitly handled
* in {@link #extractFiles(File, File, Engine)}.
*/ */
private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2"); private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2");
/**
* Detects files with extensions to remove from the engine's collection of dependencies.
*/
private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance().addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2")
.build();
static { static {
final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS); final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
if (additionalZipExt != null) { if (additionalZipExt != null) {
final String[] ext = additionalZipExt.split("\\s*,\\s*"); final String[] ext = additionalZipExt.split("\\s*,\\s*");
Collections.addAll(ZIPPABLES, ext); Collections.addAll(KNOWN_ZIP_EXT, ext);
} }
EXTENSIONS.addAll(ZIPPABLES); EXTENSIONS.addAll(KNOWN_ZIP_EXT);
} }
/**
* Detects files with extensions to remove from the engine's collection of
* dependencies.
*/
private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance()
.addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2").build();
/** /**
* The file filter used to filter supported files. * The file filter used to filter supported files.
*/ */
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build(); private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
@Override
protected FileFilter getFileFilter() {
return FILTER;
}
/** /**
* Detects files with .zip extension. * Detects files with .zip extension.
*/ */
private static final FileFilter ZIP_FILTER = FileFilterBuilder.newInstance().addExtensions("zip").build(); private static final FileFilter ZIP_FILTER = FileFilterBuilder.newInstance().addExtensions("zip").build();
@Override
protected FileFilter getFileFilter() {
return FILTER;
}
/** /**
* Returns the name of the analyzer. * Returns the name of the analyzer.
* *
@@ -157,7 +159,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
//</editor-fold> //</editor-fold>
/** /**
* Returns the key used in the properties file to reference the analyzer's enabled property. * Returns the key used in the properties file to reference the analyzer's
* enabled property.
* *
* @return the analyzer's enabled property setting key * @return the analyzer's enabled property setting key
*/ */
@@ -169,29 +172,39 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* The initialize method does nothing for this Analyzer. * The initialize method does nothing for this Analyzer.
* *
* @throws Exception is thrown if there is an exception deleting or creating temporary files * @throws InitializationException is thrown if there is an exception
* deleting or creating temporary files
*/ */
@Override @Override
public void initializeFileTypeAnalyzer() throws Exception { public void initializeFileTypeAnalyzer() throws InitializationException {
final File baseDir = Settings.getTempDirectory(); try {
tempFileLocation = File.createTempFile("check", "tmp", baseDir); final File baseDir = Settings.getTempDirectory();
if (!tempFileLocation.delete()) { tempFileLocation = File.createTempFile("check", "tmp", baseDir);
final String msg = String.format("Unable to delete temporary file '%s'.", tempFileLocation.getAbsolutePath()); if (!tempFileLocation.delete()) {
throw new AnalysisException(msg); setEnabled(false);
} final String msg = String.format("Unable to delete temporary file '%s'.", tempFileLocation.getAbsolutePath());
if (!tempFileLocation.mkdirs()) { throw new InitializationException(msg);
final String msg = String.format("Unable to create directory '%s'.", tempFileLocation.getAbsolutePath()); }
throw new AnalysisException(msg); if (!tempFileLocation.mkdirs()) {
setEnabled(false);
final String msg = String.format("Unable to create directory '%s'.", tempFileLocation.getAbsolutePath());
throw new InitializationException(msg);
}
} catch (IOException ex) {
setEnabled(false);
throw new InitializationException("Unable to create a temporary file", ex);
} }
} }
/** /**
* The close method deletes any temporary files and directories created during analysis. * The close method deletes any temporary files and directories created
* during analysis.
* *
* @throws Exception thrown if there is an exception deleting temporary files * @throws Exception thrown if there is an exception deleting temporary
* files
*/ */
@Override @Override
public void close() throws Exception { public void closeAnalyzer() throws Exception {
if (tempFileLocation != null && tempFileLocation.exists()) { if (tempFileLocation != null && tempFileLocation.exists()) {
LOGGER.debug("Attempting to delete temporary files"); LOGGER.debug("Attempting to delete temporary files");
final boolean success = FileUtils.delete(tempFileLocation); final boolean success = FileUtils.delete(tempFileLocation);
@@ -205,39 +218,71 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Analyzes a given dependency. If the dependency is an archive, such as a WAR or EAR, the contents are extracted, scanned, * Does not support parallel processing as it both modifies and iterates
* and added to the list of dependencies within the engine. * over the engine's list of dependencies.
*
* @return <code>true</code> if the analyzer supports parallel processing;
* otherwise <code>false</code>
* @see #analyzeDependency(Dependency, Engine)
* @see #findMoreDependencies(Engine, File)
*/
@Override
public boolean supportsParallelProcessing() {
return false;
}
/**
* Analyzes a given dependency. If the dependency is an archive, such as a
* WAR or EAR, the contents are extracted, scanned, and added to the list of
* dependencies within the engine.
* *
* @param dependency the dependency to analyze * @param dependency the dependency to analyze
* @param engine the engine scanning * @param engine the engine scanning
* @throws AnalysisException thrown if there is an analysis exception * @throws AnalysisException thrown if there is an analysis exception
*/ */
@Override @Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException { public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
final File f = new File(dependency.getActualFilePath()); final File f = new File(dependency.getActualFilePath());
final File tmpDir = getNextTempDirectory(); final File tmpDir = getNextTempDirectory();
extractFiles(f, tmpDir, engine); extractFiles(f, tmpDir, engine);
//make a copy //make a copy
final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpDir); final List<Dependency> dependencySet = findMoreDependencies(engine, tmpDir);
if (!dependencySet.isEmpty()) {
for (Dependency d : dependencySet) {
//fix the dependency's display name and path
final String displayPath = String.format("%s%s",
dependency.getFilePath(),
d.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
final String displayName = String.format("%s: %s",
dependency.getFileName(),
d.getFileName());
d.setFilePath(displayPath);
d.setFileName(displayName);
//TODO - can we get more evidence from the parent? EAR contains module name, etc. if (dependencySet != null && !dependencySet.isEmpty()) {
//analyze the dependency (i.e. extract files) if it is a supported type. for (Dependency d : dependencySet) {
if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) { if (d.getFilePath().startsWith(tmpDir.getAbsolutePath())) {
scanDepth += 1; //fix the dependency's display name and path
analyze(d, engine); final String displayPath = String.format("%s%s",
scanDepth -= 1; dependency.getFilePath(),
d.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
final String displayName = String.format("%s: %s",
dependency.getFileName(),
d.getFileName());
d.setFilePath(displayPath);
d.setFileName(displayName);
d.setProjectReferences(dependency.getProjectReferences());
//TODO - can we get more evidence from the parent? EAR contains module name, etc.
//analyze the dependency (i.e. extract files) if it is a supported type.
if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) {
scanDepth += 1;
analyze(d, engine);
scanDepth -= 1;
}
} else {
for (Dependency sub : dependencySet) {
if (sub.getFilePath().startsWith(tmpDir.getAbsolutePath())) {
final String displayPath = String.format("%s%s",
dependency.getFilePath(),
sub.getActualFilePath().substring(tmpDir.getAbsolutePath().length()));
final String displayName = String.format("%s: %s",
dependency.getFileName(),
sub.getFileName());
sub.setFilePath(displayPath);
sub.setFileName(displayName);
}
}
} }
} }
} }
@@ -249,7 +294,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* If a zip file was identified as a possible JAR, this method will add the zip to the list of dependencies. * If a zip file was identified as a possible JAR, this method will add the
* zip to the list of dependencies.
* *
* @param dependency the zip file * @param dependency the zip file
* @param engine the engine * @param engine the engine
@@ -257,34 +303,41 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private void addDisguisedJarsToDependencies(Dependency dependency, Engine engine) throws AnalysisException { private void addDisguisedJarsToDependencies(Dependency dependency, Engine engine) throws AnalysisException {
if (ZIP_FILTER.accept(dependency.getActualFile()) && isZipFileActuallyJarFile(dependency)) { if (ZIP_FILTER.accept(dependency.getActualFile()) && isZipFileActuallyJarFile(dependency)) {
final File tdir = getNextTempDirectory(); final File tempDir = getNextTempDirectory();
final String fileName = dependency.getFileName(); final String fileName = dependency.getFileName();
LOGGER.info("The zip file '{}' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName); LOGGER.info("The zip file '{}' appears to be a JAR file, making a copy and analyzing it as a JAR.", fileName);
final File tmpLoc = new File(tempDir, fileName.substring(0, fileName.length() - 3) + "jar");
final File tmpLoc = new File(tdir, fileName.substring(0, fileName.length() - 3) + "jar"); //store the archives sha1 and change it so that the engine doesn't think the zip and jar file are the same
// and add it is a related dependency.
final String archiveSha1 = dependency.getSha1sum();
try { try {
org.apache.commons.io.FileUtils.copyFile(tdir, tmpLoc); dependency.setSha1sum("");
final Set<Dependency> dependencySet = findMoreDependencies(engine, tmpLoc); org.apache.commons.io.FileUtils.copyFile(dependency.getActualFile(), tmpLoc);
if (!dependencySet.isEmpty()) { final List<Dependency> dependencySet = findMoreDependencies(engine, tmpLoc);
if (dependencySet.size() != 1) { if (dependencySet != null && !dependencySet.isEmpty()) {
LOGGER.info("Deep copy of ZIP to JAR file resulted in more than one dependency?");
}
for (Dependency d : dependencySet) { for (Dependency d : dependencySet) {
//fix the dependency's display name and path //fix the dependency's display name and path
d.setFilePath(dependency.getFilePath()); if (d.getActualFile().equals(tmpLoc)) {
d.setDisplayFileName(dependency.getFileName()); d.setFilePath(dependency.getFilePath());
d.setDisplayFileName(dependency.getFileName());
} else {
for (Dependency sub : d.getRelatedDependencies()) {
if (sub.getActualFile().equals(tmpLoc)) {
sub.setFilePath(dependency.getFilePath());
sub.setDisplayFileName(dependency.getFileName());
}
}
}
} }
} }
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.debug("Unable to perform deep copy on '{}'", dependency.getActualFile().getPath(), ex); LOGGER.debug("Unable to perform deep copy on '{}'", dependency.getActualFile().getPath(), ex);
} finally {
dependency.setSha1sum(archiveSha1);
} }
} }
} }
/**
* An empty dependency set.
*/
private static final Set<Dependency> EMPTY_DEPENDENCY_SET = Collections.emptySet();
/** /**
* Scan the given file/folder, and return any new dependencies found. * Scan the given file/folder, and return any new dependencies found.
@@ -293,20 +346,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* @param file target of scanning * @param file target of scanning
* @return any dependencies that weren't known to the engine before * @return any dependencies that weren't known to the engine before
*/ */
private static Set<Dependency> findMoreDependencies(Engine engine, File file) { private static List<Dependency> findMoreDependencies(Engine engine, File file) {
final List<Dependency> before = new ArrayList<Dependency>(engine.getDependencies()); return engine.scan(file);
engine.scan(file);
final List<Dependency> after = engine.getDependencies();
final boolean sizeChanged = before.size() != after.size();
final Set<Dependency> newDependencies;
if (sizeChanged) {
//get the new dependencies
newDependencies = new HashSet<Dependency>(after);
newDependencies.removeAll(before);
} else {
newDependencies = EMPTY_DEPENDENCY_SET;
}
return newDependencies;
} }
/** /**
@@ -339,30 +380,49 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private void extractFiles(File archive, File destination, Engine engine) throws AnalysisException { private void extractFiles(File archive, File destination, Engine engine) throws AnalysisException {
if (archive != null && destination != null) { if (archive != null && destination != null) {
FileInputStream fis; String archiveExt = FileUtils.getFileExtension(archive.getName());
if (archiveExt == null) {
return;
}
archiveExt = archiveExt.toLowerCase();
final FileInputStream fis;
try { try {
fis = new FileInputStream(archive); fis = new FileInputStream(archive);
} catch (FileNotFoundException ex) { } catch (FileNotFoundException ex) {
LOGGER.debug("", ex); LOGGER.debug("", ex);
throw new AnalysisException("Archive file was not found.", ex); throw new AnalysisException("Archive file was not found.", ex);
} }
final String archiveExt = FileUtils.getFileExtension(archive.getName()).toLowerCase(); BufferedInputStream in = null;
ZipArchiveInputStream zin = null;
TarArchiveInputStream tin = null;
GzipCompressorInputStream gin = null;
BZip2CompressorInputStream bzin = null;
try { try {
if (ZIPPABLES.contains(archiveExt)) { if (KNOWN_ZIP_EXT.contains(archiveExt)) {
extractArchive(new ZipArchiveInputStream(new BufferedInputStream(fis)), destination, engine); in = new BufferedInputStream(fis);
ensureReadableJar(archiveExt, in);
zin = new ZipArchiveInputStream(in);
extractArchive(zin, destination, engine);
} else if ("tar".equals(archiveExt)) { } else if ("tar".equals(archiveExt)) {
extractArchive(new TarArchiveInputStream(new BufferedInputStream(fis)), destination, engine); in = new BufferedInputStream(fis);
tin = new TarArchiveInputStream(in);
extractArchive(tin, destination, engine);
} else if ("gz".equals(archiveExt) || "tgz".equals(archiveExt)) { } else if ("gz".equals(archiveExt) || "tgz".equals(archiveExt)) {
final String uncompressedName = GzipUtils.getUncompressedFilename(archive.getName()); final String uncompressedName = GzipUtils.getUncompressedFilename(archive.getName());
final File f = new File(destination, uncompressedName); final File f = new File(destination, uncompressedName);
if (engine.accept(f)) { if (engine.accept(f)) {
decompressFile(new GzipCompressorInputStream(new BufferedInputStream(fis)), f); in = new BufferedInputStream(fis);
gin = new GzipCompressorInputStream(in);
decompressFile(gin, f);
} }
} else if ("bz2".equals(archiveExt) || "tbz2".equals(archiveExt)) { } else if ("bz2".equals(archiveExt) || "tbz2".equals(archiveExt)) {
final String uncompressedName = BZip2Utils.getUncompressedFilename(archive.getName()); final String uncompressedName = BZip2Utils.getUncompressedFilename(archive.getName());
final File f = new File(destination, uncompressedName); final File f = new File(destination, uncompressedName);
if (engine.accept(f)) { if (engine.accept(f)) {
decompressFile(new BZip2CompressorInputStream(new BufferedInputStream(fis)), f); in = new BufferedInputStream(fis);
bzin = new BZip2CompressorInputStream(in);
decompressFile(bzin, f);
} }
} }
} catch (ArchiveExtractionException ex) { } catch (ArchiveExtractionException ex) {
@@ -372,7 +432,66 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.warn("Exception reading archive '{}'.", archive.getName()); LOGGER.warn("Exception reading archive '{}'.", archive.getName());
LOGGER.debug("", ex); LOGGER.debug("", ex);
} finally { } finally {
close(fis); //overly verbose and not needed... but keeping it anyway due to
//having issue with file handles being left open
FileUtils.close(fis);
FileUtils.close(in);
FileUtils.close(zin);
FileUtils.close(tin);
FileUtils.close(gin);
FileUtils.close(bzin);
}
}
}
/**
* Checks if the file being scanned is a JAR that begins with '#!/bin' which
* indicates it is a fully executable jar. If a fully executable JAR is
* identified the input stream will be advanced to the start of the actual
* JAR file ( skipping the script).
*
* @see
* <a href="http://docs.spring.io/spring-boot/docs/1.3.0.BUILD-SNAPSHOT/reference/htmlsingle/#deployment-install">Installing
* Spring Boot Applications</a>
* @param archiveExt the file extension
* @param in the input stream
* @throws IOException thrown if there is an error reading the stream
*/
private void ensureReadableJar(final String archiveExt, BufferedInputStream in) throws IOException {
if ("jar".equals(archiveExt) && in.markSupported()) {
in.mark(7);
final byte[] b = new byte[7];
final int read = in.read(b);
if (read == 7
&& b[0] == '#'
&& b[1] == '!'
&& b[2] == '/'
&& b[3] == 'b'
&& b[4] == 'i'
&& b[5] == 'n'
&& b[6] == '/') {
boolean stillLooking = true;
int chr;
int nxtChr;
while (stillLooking && (chr = in.read()) != -1) {
if (chr == '\n' || chr == '\r') {
in.mark(4);
if ((chr = in.read()) != -1) {
if (chr == 'P' && (chr = in.read()) != -1) {
if (chr == 'K' && (chr = in.read()) != -1) {
if ((chr == 3 || chr == 5 || chr == 7) && (nxtChr = in.read()) != -1) {
if (nxtChr == chr + 1) {
stillLooking = false;
in.reset();
}
}
}
}
}
}
}
} else {
in.reset();
} }
} }
} }
@@ -383,7 +502,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* @param input the archive to extract files from * @param input the archive to extract files from
* @param destination the location to write the files too * @param destination the location to write the files too
* @param engine the dependency-check engine * @param engine the dependency-check engine
* @throws ArchiveExtractionException thrown if there is an exception extracting files from the archive * @throws ArchiveExtractionException thrown if there is an exception
* extracting files from the archive
*/ */
private void extractArchive(ArchiveInputStream input, File destination, Engine engine) throws ArchiveExtractionException { private void extractArchive(ArchiveInputStream input, File destination, Engine engine) throws ArchiveExtractionException {
ArchiveEntry entry; ArchiveEntry entry;
@@ -399,10 +519,10 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
extractAcceptedFile(input, file); extractAcceptedFile(input, file);
} }
} }
} catch (Throwable ex) { } catch (IOException | AnalysisException ex) {
throw new ArchiveExtractionException(ex); throw new ArchiveExtractionException(ex);
} finally { } finally {
close(input); FileUtils.close(input);
} }
} }
@@ -415,14 +535,12 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private static void extractAcceptedFile(ArchiveInputStream input, File file) throws AnalysisException { private static void extractAcceptedFile(ArchiveInputStream input, File file) throws AnalysisException {
LOGGER.debug("Extracting '{}'", file.getPath()); LOGGER.debug("Extracting '{}'", file.getPath());
FileOutputStream fos = null; final File parent = file.getParentFile();
try { if (!parent.isDirectory() && !parent.mkdirs()) {
final File parent = file.getParentFile(); final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath());
if (!parent.isDirectory() && !parent.mkdirs()) { throw new AnalysisException(msg);
final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath()); }
throw new AnalysisException(msg); try (FileOutputStream fos = new FileOutputStream(file)) {
}
fos = new FileOutputStream(file);
IOUtils.copy(input, fos); IOUtils.copy(input, fos);
} catch (FileNotFoundException ex) { } catch (FileNotFoundException ex) {
LOGGER.debug("", ex); LOGGER.debug("", ex);
@@ -432,8 +550,6 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.debug("", ex); LOGGER.debug("", ex);
final String msg = String.format("IO Exception while parsing file '%s'.", file.getName()); final String msg = String.format("IO Exception while parsing file '%s'.", file.getName());
throw new AnalysisException(msg, ex); throw new AnalysisException(msg, ex);
} finally {
close(fos);
} }
} }
@@ -442,37 +558,16 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* *
* @param inputStream the compressed file * @param inputStream the compressed file
* @param outputFile the location to write the decompressed file * @param outputFile the location to write the decompressed file
* @throws ArchiveExtractionException thrown if there is an exception decompressing the file * @throws ArchiveExtractionException thrown if there is an exception
* decompressing the file
*/ */
private void decompressFile(CompressorInputStream inputStream, File outputFile) throws ArchiveExtractionException { private void decompressFile(CompressorInputStream inputStream, File outputFile) throws ArchiveExtractionException {
LOGGER.debug("Decompressing '{}'", outputFile.getPath()); LOGGER.debug("Decompressing '{}'", outputFile.getPath());
FileOutputStream out = null; try (FileOutputStream out = new FileOutputStream(outputFile)) {
try {
out = new FileOutputStream(outputFile);
IOUtils.copy(inputStream, out); IOUtils.copy(inputStream, out);
} catch (FileNotFoundException ex) {
LOGGER.debug("", ex);
throw new ArchiveExtractionException(ex);
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.debug("", ex); LOGGER.debug("", ex);
throw new ArchiveExtractionException(ex); throw new ArchiveExtractionException(ex);
} finally {
close(out);
}
}
/**
* Close the given {@link Closeable} instance, ignoring nulls, and logging any thrown {@link IOException}.
*
* @param closeable to be closed
*/
private static void close(Closeable closeable) {
if (null != closeable) {
try {
closeable.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
} }
} }
@@ -506,7 +601,6 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
} finally { } finally {
ZipFile.closeQuietly(zip); ZipFile.closeQuietly(zip);
} }
return isJar; return isJar;
} }
} }

View File

@@ -37,15 +37,19 @@ import org.w3c.dom.Document;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath; import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory; import javax.xml.xpath.XPathFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import javax.xml.parsers.ParserConfigurationException;
import org.owasp.dependencycheck.exception.InitializationException;
import org.apache.commons.lang3.SystemUtils;
import org.owasp.dependencycheck.utils.XmlUtils;
/** /**
* Analyzer for getting company, product, and version information from a .NET assembly. * Analyzer for getting company, product, and version information from a .NET
* assembly.
* *
* @author colezlaw * @author colezlaw
* *
@@ -68,10 +72,6 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
* The temp value for GrokAssembly.exe * The temp value for GrokAssembly.exe
*/ */
private File grokAssemblyExe = null; private File grokAssemblyExe = null;
/**
* The DocumentBuilder for parsing the XML
*/
private DocumentBuilder builder;
/** /**
* Logger * Logger
*/ */
@@ -82,18 +82,19 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
* *
* @return the list of arguments to begin populating the ProcessBuilder * @return the list of arguments to begin populating the ProcessBuilder
*/ */
private List<String> buildArgumentList() { protected List<String> buildArgumentList() {
// Use file.separator as a wild guess as to whether this is Windows // Use file.separator as a wild guess as to whether this is Windows
final List<String> args = new ArrayList<String>(); final List<String> args = new ArrayList<>();
if (!"\\".equals(System.getProperty("file.separator"))) { if (!SystemUtils.IS_OS_WINDOWS) {
if (Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) { if (Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) {
args.add(Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH)); args.add(Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH));
} else { } else if (isInPath("mono")) {
args.add("mono"); args.add("mono");
} else {
return null;
} }
} }
args.add(grokAssemblyExe.getPath()); args.add(grokAssemblyExe.getPath());
return args; return args;
} }
@@ -105,7 +106,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException if anything goes sideways * @throws AnalysisException if anything goes sideways
*/ */
@Override @Override
public void analyzeFileType(Dependency dependency, Engine engine) public void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException { throws AnalysisException {
if (grokAssemblyExe == null) { if (grokAssemblyExe == null) {
LOGGER.warn("GrokAssembly didn't get deployed"); LOGGER.warn("GrokAssembly didn't get deployed");
@@ -113,11 +114,16 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
} }
final List<String> args = buildArgumentList(); final List<String> args = buildArgumentList();
if (args == null) {
LOGGER.warn("Assembly Analyzer was unable to execute");
return;
}
args.add(dependency.getActualFilePath()); args.add(dependency.getActualFilePath());
final ProcessBuilder pb = new ProcessBuilder(args); final ProcessBuilder pb = new ProcessBuilder(args);
Document doc = null; Document doc = null;
try { try {
final Process proc = pb.start(); final Process proc = pb.start();
final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
doc = builder.parse(proc.getInputStream()); doc = builder.parse(proc.getInputStream());
@@ -138,7 +144,9 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
dependency.getActualFilePath()); dependency.getActualFilePath());
return; return;
} else if (rc != 0) { } else if (rc != 0) {
LOGGER.warn("Return code {} from GrokAssembly", rc); LOGGER.debug("Return code {} from GrokAssembly; dependency-check is unable to analyze the library: {}",
rc, dependency.getActualFilePath());
return;
} }
final XPath xpath = XPathFactory.newInstance().newXPath(); final XPath xpath = XPathFactory.newInstance().newXPath();
@@ -167,86 +175,99 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
product, Confidence.HIGH)); product, Confidence.HIGH));
} }
} catch (IOException ioe) { } catch (ParserConfigurationException pce) {
throw new AnalysisException("Error initializing the assembly analyzer", pce);
} catch (IOException | XPathExpressionException ioe) {
throw new AnalysisException(ioe); throw new AnalysisException(ioe);
} catch (SAXException saxe) { } catch (SAXException saxe) {
throw new AnalysisException("Couldn't parse GrokAssembly result", saxe); LOGGER.error("----------------------------------------------------");
} catch (XPathExpressionException xpe) { LOGGER.error("Failed to read the Assembly Analyzer results. "
// This shouldn't happen + "On some systems mono-runtime and mono-devel need to be installed.");
throw new AnalysisException(xpe); LOGGER.error("----------------------------------------------------");
throw new AnalysisException("Couldn't parse Assembly Analyzer results (GrokAssembly)", saxe);
} }
// This shouldn't happen
} }
/** /**
* Initialize the analyzer. In this case, extract GrokAssembly.exe to a temporary location. * Initialize the analyzer. In this case, extract GrokAssembly.exe to a
* temporary location.
* *
* @throws Exception if anything goes wrong * @throws InitializationException thrown if anything goes wrong
*/ */
@Override @Override
public void initializeFileTypeAnalyzer() throws Exception { public void initializeFileTypeAnalyzer() throws InitializationException {
final File tempFile = File.createTempFile("GKA", ".exe", Settings.getTempDirectory()); final File tempFile;
FileOutputStream fos = null; final String cfg;
InputStream is = null;
try { try {
fos = new FileOutputStream(tempFile); tempFile = File.createTempFile("GKA", ".exe", Settings.getTempDirectory());
is = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe"); cfg = tempFile.getPath() + ".config";
} catch (IOException ex) {
setEnabled(false);
throw new InitializationException("Unable to create temporary file for the assembly analyzer", ex);
}
try (FileOutputStream fos = new FileOutputStream(tempFile);
InputStream is = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe");
FileOutputStream fosCfg = new FileOutputStream(cfg);
InputStream isCfg = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe.config")) {
IOUtils.copy(is, fos); IOUtils.copy(is, fos);
grokAssemblyExe = tempFile; grokAssemblyExe = tempFile;
// Set the temp file to get deleted when we're done
grokAssemblyExe.deleteOnExit();
LOGGER.debug("Extracted GrokAssembly.exe to {}", grokAssemblyExe.getPath()); LOGGER.debug("Extracted GrokAssembly.exe to {}", grokAssemblyExe.getPath());
IOUtils.copy(isCfg, fosCfg);
LOGGER.debug("Extracted GrokAssembly.exe.config to {}", cfg);
} catch (IOException ioe) { } catch (IOException ioe) {
this.setEnabled(false); this.setEnabled(false);
LOGGER.warn("Could not extract GrokAssembly.exe: {}", ioe.getMessage()); LOGGER.warn("Could not extract GrokAssembly.exe: {}", ioe.getMessage());
throw new AnalysisException("Could not extract GrokAssembly.exe", ioe); throw new InitializationException("Could not extract GrokAssembly.exe", ioe);
} finally {
if (fos != null) {
try {
fos.close();
} catch (Throwable e) {
LOGGER.debug("Error closing output stream");
}
}
if (is != null) {
try {
is.close();
} catch (Throwable e) {
LOGGER.debug("Error closing input stream");
}
}
} }
// Now, need to see if GrokAssembly actually runs from this location. // Now, need to see if GrokAssembly actually runs from this location.
final List<String> args = buildArgumentList(); final List<String> args = buildArgumentList();
//TODO this creates an "unreported" error - if someone doesn't look
// at the command output this could easily be missed (especially in an
// Ant or Maven build.
//
// We need to create a non-fatal warning error type that will
// get added to the report.
//TODO this idea needs to get replicated to the bundle audit analyzer.
if (args == null) {
setEnabled(false);
LOGGER.error("----------------------------------------------------");
LOGGER.error(".NET Assembly Analyzer could not be initialized and at least one "
+ "'exe' or 'dll' was scanned. The 'mono' executable could not be found on "
+ "the path; either disable the Assembly Analyzer or configure the path mono. "
+ "On some systems mono-runtime and mono-devel need to be installed.");
LOGGER.error("----------------------------------------------------");
return;
}
try { try {
final ProcessBuilder pb = new ProcessBuilder(args); final ProcessBuilder pb = new ProcessBuilder(args);
final Process p = pb.start(); final Process p = pb.start();
// Try evacuating the error stream // Try evacuating the error stream
IOUtils.copy(p.getErrorStream(), NullOutputStream.NULL_OUTPUT_STREAM); IOUtils.copy(p.getErrorStream(), NullOutputStream.NULL_OUTPUT_STREAM);
final Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(p.getInputStream()); final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
final Document doc = builder.parse(p.getInputStream());
final XPath xpath = XPathFactory.newInstance().newXPath(); final XPath xpath = XPathFactory.newInstance().newXPath();
final String error = xpath.evaluate("/assembly/error", doc); final String error = xpath.evaluate("/assembly/error", doc);
if (p.waitFor() != 1 || error == null || error.isEmpty()) { if (p.waitFor() != 1 || error == null || error.isEmpty()) {
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer, please see the log for more details."); LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer, please see the log for more details.");
LOGGER.debug("GrokAssembly.exe is not working properly"); LOGGER.debug("GrokAssembly.exe is not working properly");
grokAssemblyExe = null; grokAssemblyExe = null;
this.setEnabled(false); setEnabled(false);
throw new AnalysisException("Could not execute .NET AssemblyAnalyzer"); throw new InitializationException("Could not execute .NET AssemblyAnalyzer");
}
} catch (Throwable e) {
if (e instanceof AnalysisException) {
throw (AnalysisException) e;
} else {
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer;\n"
+ "this can be ignored unless you are scanning .NET DLLs. Please see the log for more details.");
LOGGER.debug("Could not execute GrokAssembly {}", e.getMessage());
this.setEnabled(false);
throw new AnalysisException("An error occured with the .NET AssemblyAnalyzer", e);
} }
} catch (InitializationException e) {
setEnabled(false);
throw e;
} catch (IOException | ParserConfigurationException | SAXException | XPathExpressionException | InterruptedException e) {
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer;\n"
+ "this can be ignored unless you are scanning .NET DLLs. Please see the log for more details.");
LOGGER.debug("Could not execute GrokAssembly {}", e.getMessage());
setEnabled(false);
throw new InitializationException("An error occurred with the .NET AssemblyAnalyzer", e);
} }
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
} }
/** /**
@@ -255,14 +276,15 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
* @throws Exception thrown if there is a problem closing the analyzer * @throws Exception thrown if there is a problem closing the analyzer
*/ */
@Override @Override
public void close() throws Exception { public void closeAnalyzer() throws Exception {
super.close();
try { try {
if (grokAssemblyExe != null && !grokAssemblyExe.delete()) { if (grokAssemblyExe != null && !grokAssemblyExe.delete()) {
LOGGER.debug("Unable to delete temporary GrokAssembly.exe; attempting delete on exit");
grokAssemblyExe.deleteOnExit(); grokAssemblyExe.deleteOnExit();
} }
} catch (SecurityException se) { } catch (SecurityException se) {
LOGGER.debug("Can't delete temporary GrokAssembly.exe"); LOGGER.debug("Can't delete temporary GrokAssembly.exe");
grokAssemblyExe.deleteOnExit();
} }
} }
@@ -298,7 +320,8 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Returns the key used in the properties file to reference the analyzer's enabled property. * Returns the key used in the properties file to reference the analyzer's
* enabled property.
* *
* @return the analyzer's enabled property setting key * @return the analyzer's enabled property setting key
*/ */
@@ -306,4 +329,27 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
protected String getAnalyzerEnabledSettingKey() { protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED; return Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED;
} }
/**
* Tests to see if a file is in the system path. <b>Note</b> - the current
* implementation only works on non-windows platforms. For purposes of the
* AssemblyAnalyzer this is okay as this is only needed on Mac/*nix.
*
* @param file the executable to look for
* @return <code>true</code> if the file exists; otherwise
* <code>false</code>
*/
private boolean isInPath(String file) {
final ProcessBuilder pb = new ProcessBuilder("which", file);
try {
final Process proc = pb.start();
final int retCode = proc.waitFor();
if (retCode == 0) {
return true;
}
} catch (IOException | InterruptedException ex) {
LOGGER.debug("Path search failed for " + file, ex);
}
return false;
}
} }

View File

@@ -30,18 +30,21 @@ import org.owasp.dependencycheck.utils.UrlStringUtils;
import java.io.File; import java.io.File;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.nio.charset.Charset;
import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.owasp.dependencycheck.exception.InitializationException;
/** /**
* Used to analyze Autoconf input files named configure.ac or configure.in. Files simply named "configure" are also analyzed, * Used to analyze Autoconf input files named configure.ac or configure.in.
* assuming they are generated by Autoconf, and contain certain special package descriptor variables. * Files simply named "configure" are also analyzed, assuming they are generated
* by Autoconf, and contain certain special package descriptor variables.
* *
* @author Dale Visser <dvisser@ida.org> * @author Dale Visser
* @see <a href="https://www.gnu.org/software/autoconf/">Autoconf - GNU Project - Free Software Foundation (FSF)</a> * @see <a href="https://www.gnu.org/software/autoconf/">Autoconf - GNU Project
* - Free Software Foundation (FSF)</a>
*/ */
@Experimental
public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer { public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
@@ -140,7 +143,8 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Returns the key used in the properties file to reference the analyzer's enabled property. * Returns the key used in the properties file to reference the analyzer's
* enabled property.
* *
* @return the analyzer's enabled property setting key * @return the analyzer's enabled property setting key
*/ */
@@ -150,7 +154,7 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
} }
@Override @Override
protected void analyzeFileType(Dependency dependency, Engine engine) protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException { throws AnalysisException {
final File actualFile = dependency.getActualFile(); final File actualFile = dependency.getActualFile();
final String name = actualFile.getName(); final String name = actualFile.getName();
@@ -172,11 +176,7 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
} }
} }
} else { } else {
// copy, alter and set in case some other thread is iterating over engine.getDependencies().remove(dependency);
final List<Dependency> dependencies = new ArrayList<Dependency>(
engine.getDependencies());
dependencies.remove(dependency);
engine.setDependencies(dependencies);
} }
} }
@@ -220,14 +220,12 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private String getFileContents(final File actualFile) private String getFileContents(final File actualFile)
throws AnalysisException { throws AnalysisException {
String contents = "";
try { try {
contents = FileUtils.readFileToString(actualFile).trim(); return FileUtils.readFileToString(actualFile, Charset.defaultCharset()).trim();
} catch (IOException e) { } catch (IOException e) {
throw new AnalysisException( throw new AnalysisException(
"Problem occurred while reading dependency file.", e); "Problem occurred while reading dependency file.", e);
} }
return contents;
} }
/** /**
@@ -270,10 +268,11 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Initializes the file type analyzer. * Initializes the file type analyzer.
* *
* @throws Exception thrown if there is an exception during initialization * @throws InitializationException thrown if there is an exception during
* initialization
*/ */
@Override @Override
protected void initializeFileTypeAnalyzer() throws Exception { protected void initializeFileTypeAnalyzer() throws InitializationException {
// No initialization needed. // No initialization needed.
} }
} }

View File

@@ -32,23 +32,29 @@ import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.IOException; import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import java.security.MessageDigest; import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.owasp.dependencycheck.exception.InitializationException;
/** /**
* <p> * <p>
* Used to analyze CMake build files, and collect information that can be used to determine the associated CPE.</p> * Used to analyze CMake build files, and collect information that can be used
* <p/> * to determine the associated CPE.</p>
* <p> * <p>
* Note: This analyzer catches straightforward invocations of the project command, plus some other observed patterns of version * Note: This analyzer catches straightforward invocations of the project
* inclusion in real CMake projects. Many projects make use of older versions of CMake and/or use custom "homebrew" ways to insert * command, plus some other observed patterns of version inclusion in real CMake
* version information. Hopefully as the newer CMake call pattern grows in usage, this analyzer allow more CPEs to be * projects. Many projects make use of older versions of CMake and/or use custom
* "homebrew" ways to insert version information. Hopefully as the newer CMake
* call pattern grows in usage, this analyzer allow more CPEs to be
* identified.</p> * identified.</p>
* *
* @author Dale Visser <dvisser@ida.org> * @author Dale Visser
*/ */
@Experimental
public class CMakeAnalyzer extends AbstractFileTypeAnalyzer { public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
@@ -86,24 +92,10 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(".cmake") private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(".cmake")
.addFilenames("CMakeLists.txt").build(); .addFilenames("CMakeLists.txt").build();
/**
* A reference to SHA1 message digest.
*/
private static MessageDigest sha1 = null;
static {
try {
sha1 = MessageDigest.getInstance("SHA1");
} catch (NoSuchAlgorithmException e) {
LOGGER.error(e.getMessage());
}
}
/** /**
* Returns the name of the CMake analyzer. * Returns the name of the CMake analyzer.
* *
* @return the name of the analyzer * @return the name of the analyzer
*
*/ */
@Override @Override
public String getName() { public String getName() {
@@ -131,13 +123,19 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* No-op initializer implementation. * Initializes the analyzer.
* *
* @throws Exception never thrown * @throws InitializationException thrown if an exception occurs getting an
* instance of SHA1
*/ */
@Override @Override
protected void initializeFileTypeAnalyzer() throws Exception { protected void initializeFileTypeAnalyzer() throws InitializationException {
// Nothing to do here. try {
getSha1MessageDigest();
} catch (IllegalStateException ex) {
setEnabled(false);
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
}
} }
/** /**
@@ -145,10 +143,11 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
* *
* @param dependency the dependency being analyzed * @param dependency the dependency being analyzed
* @param engine the engine being used to perform the scan * @param engine the engine being used to perform the scan
* @throws AnalysisException thrown if there is an unrecoverable error analyzing the dependency * @throws AnalysisException thrown if there is an unrecoverable error
* analyzing the dependency
*/ */
@Override @Override
protected void analyzeFileType(Dependency dependency, Engine engine) protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException { throws AnalysisException {
final File file = dependency.getActualFile(); final File file = dependency.getActualFile();
final String parentName = file.getParentFile().getName(); final String parentName = file.getParentFile().getName();
@@ -156,7 +155,7 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
dependency.setDisplayFileName(String.format("%s%c%s", parentName, File.separatorChar, name)); dependency.setDisplayFileName(String.format("%s%c%s", parentName, File.separatorChar, name));
String contents; String contents;
try { try {
contents = FileUtils.readFileToString(file).trim(); contents = FileUtils.readFileToString(file, Charset.defaultCharset()).trim();
} catch (IOException e) { } catch (IOException e) {
throw new AnalysisException( throw new AnalysisException(
"Problem occurred while reading dependency file.", e); "Problem occurred while reading dependency file.", e);
@@ -181,13 +180,17 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Extracts the version information from the contents. If more then one version is found additional dependencies are added to * Extracts the version information from the contents. If more then one
* the dependency list. * version is found additional dependencies are added to the dependency
* list.
* *
* @param dependency the dependency being analyzed * @param dependency the dependency being analyzed
* @param engine the dependency-check engine * @param engine the dependency-check engine
* @param contents the version information * @param contents the version information
*/ */
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings(
value = "DM_DEFAULT_ENCODING",
justification = "Default encoding is only used if UTF-8 is not available")
private void analyzeSetVersionCommand(Dependency dependency, Engine engine, String contents) { private void analyzeSetVersionCommand(Dependency dependency, Engine engine, String contents) {
Dependency currentDep = dependency; Dependency currentDep = dependency;
@@ -212,8 +215,14 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
final String filePath = String.format("%s:%s", dependency.getFilePath(), product); final String filePath = String.format("%s:%s", dependency.getFilePath(), product);
currentDep.setFilePath(filePath); currentDep.setFilePath(filePath);
// prevents coalescing into the dependency provided by engine byte[] path;
currentDep.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes()))); try {
path = filePath.getBytes("UTF-8");
} catch (UnsupportedEncodingException ex) {
path = filePath.getBytes();
}
final MessageDigest sha1 = getSha1MessageDigest();
currentDep.setSha1sum(Checksum.getHex(sha1.digest(path)));
engine.getDependencies().add(currentDep); engine.getDependencies().add(currentDep);
} }
final String source = currentDep.getDisplayFileName(); final String source = currentDep.getDisplayFileName();
@@ -229,4 +238,18 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
protected String getAnalyzerEnabledSettingKey() { protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_CMAKE_ENABLED; return Settings.KEYS.ANALYZER_CMAKE_ENABLED;
} }
/**
* Returns the sha1 message digest.
*
* @return the sha1 message digest
*/
private MessageDigest getSha1MessageDigest() {
try {
return MessageDigest.getInstance("SHA1");
} catch (NoSuchAlgorithmException e) {
LOGGER.error(e.getMessage());
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
}
}
} }

View File

@@ -25,6 +25,8 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.builder.CompareToBuilder;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.queryparser.classic.ParseException; import org.apache.lucene.queryparser.classic.ParseException;
@@ -45,18 +47,21 @@ import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.dependency.EvidenceCollection; import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.VulnerableSoftware; import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.DependencyVersion; import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil; import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* CPEAnalyzer is a utility class that takes a project dependency and attempts to discern if there is an associated CPE. It uses * CPEAnalyzer is a utility class that takes a project dependency and attempts
* the evidence contained within the dependency to search the Lucene index. * to discern if there is an associated CPE. It uses the evidence contained
* within the dependency to search the Lucene index.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class CPEAnalyzer implements Analyzer { public class CPEAnalyzer extends AbstractAnalyzer {
/** /**
* The Logger. * The Logger.
@@ -65,23 +70,26 @@ public class CPEAnalyzer implements Analyzer {
/** /**
* The maximum number of query results to return. * The maximum number of query results to return.
*/ */
static final int MAX_QUERY_RESULTS = 25; private static final int MAX_QUERY_RESULTS = 25;
/** /**
* The weighting boost to give terms when constructing the Lucene query. * The weighting boost to give terms when constructing the Lucene query.
*/ */
static final String WEIGHTING_BOOST = "^5"; private static final String WEIGHTING_BOOST = "^5";
/** /**
* A string representation of a regular expression defining characters utilized within the CPE Names. * A string representation of a regular expression defining characters
* utilized within the CPE Names.
*/ */
static final String CLEANSE_CHARACTER_RX = "[^A-Za-z0-9 ._-]"; private static final String CLEANSE_CHARACTER_RX = "[^A-Za-z0-9 ._-]";
/** /**
* A string representation of a regular expression used to remove all but alpha characters. * A string representation of a regular expression used to remove all but
* alpha characters.
*/ */
static final String CLEANSE_NONALPHA_RX = "[^A-Za-z]*"; private static final String CLEANSE_NONALPHA_RX = "[^A-Za-z]*";
/** /**
* The additional size to add to a new StringBuilder to account for extra data that will be written into the string. * The additional size to add to a new StringBuilder to account for extra
* data that will be written into the string.
*/ */
static final int STRING_BUILDER_BUFFER = 20; private static final int STRING_BUILDER_BUFFER = 20;
/** /**
* The CPE in memory index. * The CPE in memory index.
*/ */
@@ -117,34 +125,55 @@ public class CPEAnalyzer implements Analyzer {
} }
/** /**
* Creates the CPE Lucene Index. * The default is to support parallel processing.
* *
* @throws Exception is thrown if there is an issue opening the index. * @return false
*/ */
@Override @Override
public void initialize() throws Exception { public boolean supportsParallelProcessing() {
this.open(); return false;
}
/**
* Creates the CPE Lucene Index.
*
* @throws InitializationException is thrown if there is an issue opening
* the index.
*/
@Override
public void initializeAnalyzer() throws InitializationException {
try {
this.open();
} catch (IOException ex) {
LOGGER.debug("Exception initializing the Lucene Index", ex);
throw new InitializationException("An exception occurred initializing the Lucene Index", ex);
} catch (DatabaseException ex) {
LOGGER.debug("Exception accessing the database", ex);
throw new InitializationException("An exception occurred accessing the database", ex);
}
} }
/** /**
* Opens the data source. * Opens the data source.
* *
* @throws IOException when the Lucene directory to be queried does not exist or is corrupt. * @throws IOException when the Lucene directory to be queried does not
* @throws DatabaseException when the database throws an exception. This usually occurs when the database is in use by another * exist or is corrupt.
* process. * @throws DatabaseException when the database throws an exception. This
* usually occurs when the database is in use by another process.
*/ */
public void open() throws IOException, DatabaseException { public void open() throws IOException, DatabaseException {
cve = new CveDB(); if (!isOpen()) {
cve.open(); cve = CveDB.getInstance();
cpe = CpeMemoryIndex.getInstance(); cpe = CpeMemoryIndex.getInstance();
try { try {
LOGGER.info("Creating the CPE Index"); final long creationStart = System.currentTimeMillis();
final long creationStart = System.currentTimeMillis(); cpe.open(cve);
cpe.open(cve); final long creationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - creationStart);
LOGGER.info("CPE Index Created ({} ms)", System.currentTimeMillis() - creationStart); LOGGER.info("Created CPE Index ({} seconds)", creationSeconds);
} catch (IndexException ex) { } catch (IndexException ex) {
LOGGER.debug("IndexException", ex); LOGGER.debug("IndexException", ex);
throw new DatabaseException(ex); throw new DatabaseException(ex);
}
} }
} }
@@ -152,24 +181,30 @@ public class CPEAnalyzer implements Analyzer {
* Closes the data sources. * Closes the data sources.
*/ */
@Override @Override
public void close() { public void closeAnalyzer() {
if (cpe != null) {
cpe.close();
cpe = null;
}
if (cve != null) { if (cve != null) {
cve.close(); cve.close();
cve = null; cve = null;
} }
if (cpe != null) {
cpe.close();
cpe = null;
}
} }
/**
* Returns whether or not the analyzer is open.
*
* @return <code>true</code> if the analyzer is open
*/
public boolean isOpen() { public boolean isOpen() {
return cpe != null && cpe.isOpen(); return cpe != null && cpe.isOpen();
} }
/** /**
* Searches the data store of CPE entries, trying to identify the CPE for the given dependency based on the evidence contained * Searches the data store of CPE entries, trying to identify the CPE for
* within. The dependency passed in is updated with any identified CPE values. * the given dependency based on the evidence contained within. The
* dependency passed in is updated with any identified CPE values.
* *
* @param dependency the dependency to search for CPE entries on. * @param dependency the dependency to search for CPE entries on.
* @throws CorruptIndexException is thrown when the Lucene index is corrupt. * @throws CorruptIndexException is thrown when the Lucene index is corrupt.
@@ -177,7 +212,7 @@ public class CPEAnalyzer implements Analyzer {
* @throws ParseException is thrown when the Lucene query cannot be parsed. * @throws ParseException is thrown when the Lucene query cannot be parsed.
*/ */
protected void determineCPE(Dependency dependency) throws CorruptIndexException, IOException, ParseException { protected void determineCPE(Dependency dependency) throws CorruptIndexException, IOException, ParseException {
//TODO test dojo-war against this. we shold get dojo-toolkit:dojo-toolkit AND dojo-toolkit:toolkit //TODO test dojo-war against this. we should get dojo-toolkit:dojo-toolkit AND dojo-toolkit:toolkit
String vendors = ""; String vendors = "";
String products = ""; String products = "";
for (Confidence confidence : Confidence.values()) { for (Confidence confidence : Confidence.values()) {
@@ -190,8 +225,8 @@ public class CPEAnalyzer implements Analyzer {
LOGGER.debug("product search: {}", products); LOGGER.debug("product search: {}", products);
} }
if (!vendors.isEmpty() && !products.isEmpty()) { if (!vendors.isEmpty() && !products.isEmpty()) {
final List<IndexEntry> entries = searchCPE(vendors, products, dependency.getProductEvidence().getWeighting(), final List<IndexEntry> entries = searchCPE(vendors, products, dependency.getVendorEvidence().getWeighting(),
dependency.getVendorEvidence().getWeighting()); dependency.getProductEvidence().getWeighting());
if (entries == null) { if (entries == null) {
continue; continue;
} }
@@ -213,9 +248,10 @@ public class CPEAnalyzer implements Analyzer {
} }
/** /**
* Returns the text created by concatenating the text and the values from the EvidenceCollection (filtered for a specific * Returns the text created by concatenating the text and the values from
* confidence). This attempts to prevent duplicate terms from being added.<br/<br/> Note, if the evidence is longer then 200 * the EvidenceCollection (filtered for a specific confidence). This
* characters it will be truncated. * attempts to prevent duplicate terms from being added.<br/<br/> Note, if
* the evidence is longer then 200 characters it will be truncated.
* *
* @param text the base text. * @param text the base text.
* @param ec an EvidenceCollection * @param ec an EvidenceCollection
@@ -246,23 +282,25 @@ public class CPEAnalyzer implements Analyzer {
/** /**
* <p> * <p>
* Searches the Lucene CPE index to identify possible CPE entries associated with the supplied vendor, product, and * Searches the Lucene CPE index to identify possible CPE entries associated
* version.</p> * with the supplied vendor, product, and version.</p>
* *
* <p> * <p>
* If either the vendorWeightings or productWeightings lists have been populated this data is used to add weighting factors to * If either the vendorWeightings or productWeightings lists have been
* the search.</p> * populated this data is used to add weighting factors to the search.</p>
* *
* @param vendor the text used to search the vendor field * @param vendor the text used to search the vendor field
* @param product the text used to search the product field * @param product the text used to search the product field
* @param vendorWeightings a list of strings to use to add weighting factors to the vendor field * @param vendorWeightings a list of strings to use to add weighting factors
* @param productWeightings Adds a list of strings that will be used to add weighting factors to the product search * to the vendor field
* @param productWeightings Adds a list of strings that will be used to add
* weighting factors to the product search
* @return a list of possible CPE values * @return a list of possible CPE values
*/ */
protected List<IndexEntry> searchCPE(String vendor, String product, protected List<IndexEntry> searchCPE(String vendor, String product,
Set<String> vendorWeightings, Set<String> productWeightings) { Set<String> vendorWeightings, Set<String> productWeightings) {
final List<IndexEntry> ret = new ArrayList<IndexEntry>(MAX_QUERY_RESULTS); final List<IndexEntry> ret = new ArrayList<>(MAX_QUERY_RESULTS);
final String searchString = buildSearch(vendor, product, vendorWeightings, productWeightings); final String searchString = buildSearch(vendor, product, vendorWeightings, productWeightings);
if (searchString == null) { if (searchString == null) {
@@ -284,10 +322,10 @@ public class CPEAnalyzer implements Analyzer {
} }
return ret; return ret;
} catch (ParseException ex) { } catch (ParseException ex) {
LOGGER.warn("An error occured querying the CPE data. See the log for more details."); LOGGER.warn("An error occurred querying the CPE data. See the log for more details.");
LOGGER.info("Unable to parse: {}", searchString, ex); LOGGER.info("Unable to parse: {}", searchString, ex);
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.warn("An error occured reading CPE data. See the log for more details."); LOGGER.warn("An error occurred reading CPE data. See the log for more details.");
LOGGER.info("IO Error with search string: {}", searchString, ex); LOGGER.info("IO Error with search string: {}", searchString, ex);
} }
return null; return null;
@@ -295,16 +333,20 @@ public class CPEAnalyzer implements Analyzer {
/** /**
* <p> * <p>
* Builds a Lucene search string by properly escaping data and constructing a valid search query.</p> * Builds a Lucene search string by properly escaping data and constructing
* a valid search query.</p>
* *
* <p> * <p>
* If either the possibleVendor or possibleProducts lists have been populated this data is used to add weighting factors to * If either the possibleVendor or possibleProducts lists have been
* the search string generated.</p> * populated this data is used to add weighting factors to the search string
* generated.</p>
* *
* @param vendor text to search the vendor field * @param vendor text to search the vendor field
* @param product text to search the product field * @param product text to search the product field
* @param vendorWeighting a list of strings to apply to the vendor to boost the terms weight * @param vendorWeighting a list of strings to apply to the vendor to boost
* @param productWeightings a list of strings to apply to the product to boost the terms weight * the terms weight
* @param productWeightings a list of strings to apply to the product to
* boost the terms weight
* @return the Lucene query * @return the Lucene query
*/ */
protected String buildSearch(String vendor, String product, protected String buildSearch(String vendor, String product,
@@ -325,13 +367,17 @@ public class CPEAnalyzer implements Analyzer {
} }
/** /**
* This method constructs a Lucene query for a given field. The searchText is split into separate words and if the word is * This method constructs a Lucene query for a given field. The searchText
* within the list of weighted words then an additional weighting is applied to the term as it is appended into the query. * is split into separate words and if the word is within the list of
* weighted words then an additional weighting is applied to the term as it
* is appended into the query.
* *
* @param sb a StringBuilder that the query text will be appended to. * @param sb a StringBuilder that the query text will be appended to.
* @param field the field within the Lucene index that the query is searching. * @param field the field within the Lucene index that the query is
* searching.
* @param searchText text used to construct the query. * @param searchText text used to construct the query.
* @param weightedText a list of terms that will be considered higher importance when searching. * @param weightedText a list of terms that will be considered higher
* importance when searching.
* @return if the append was successful. * @return if the append was successful.
*/ */
private boolean appendWeightedSearch(StringBuilder sb, String field, String searchText, Set<String> weightedText) { private boolean appendWeightedSearch(StringBuilder sb, String field, String searchText, Set<String> weightedText) {
@@ -377,7 +423,8 @@ public class CPEAnalyzer implements Analyzer {
} }
/** /**
* Removes characters from the input text that are not used within the CPE index. * Removes characters from the input text that are not used within the CPE
* index.
* *
* @param text is the text to remove the characters from. * @param text is the text to remove the characters from.
* @return the text having removed some characters. * @return the text having removed some characters.
@@ -387,7 +434,8 @@ public class CPEAnalyzer implements Analyzer {
} }
/** /**
* Compares two strings after lower casing them and removing the non-alpha characters. * Compares two strings after lower casing them and removing the non-alpha
* characters.
* *
* @param l string one to compare. * @param l string one to compare.
* @param r string two to compare. * @param r string two to compare.
@@ -404,8 +452,9 @@ public class CPEAnalyzer implements Analyzer {
} }
/** /**
* Ensures that the CPE Identified matches the dependency. This validates that the product, vendor, and version information * Ensures that the CPE Identified matches the dependency. This validates
* for the CPE are contained within the dependencies evidence. * that the product, vendor, and version information for the CPE are
* contained within the dependencies evidence.
* *
* @param entry a CPE entry. * @param entry a CPE entry.
* @param dependency the dependency that the CPE entries could be for. * @param dependency the dependency that the CPE entries could be for.
@@ -437,7 +486,7 @@ public class CPEAnalyzer implements Analyzer {
return false; return false;
} }
final String[] words = text.split("[\\s_-]"); final String[] words = text.split("[\\s_-]");
final List<String> list = new ArrayList<String>(); final List<String> list = new ArrayList<>();
String tempWord = null; String tempWord = null;
for (String word : words) { for (String word : words) {
/* /*
@@ -472,14 +521,16 @@ public class CPEAnalyzer implements Analyzer {
} }
/** /**
* Analyzes a dependency and attempts to determine if there are any CPE identifiers for this dependency. * Analyzes a dependency and attempts to determine if there are any CPE
* identifiers for this dependency.
* *
* @param dependency The Dependency to analyze. * @param dependency The Dependency to analyze.
* @param engine The analysis engine * @param engine The analysis engine
* @throws AnalysisException is thrown if there is an issue analyzing the dependency. * @throws AnalysisException is thrown if there is an issue analyzing the
* dependency.
*/ */
@Override @Override
public void analyze(Dependency dependency, Engine engine) throws AnalysisException { protected synchronized void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
try { try {
determineCPE(dependency); determineCPE(dependency);
} catch (CorruptIndexException ex) { } catch (CorruptIndexException ex) {
@@ -492,15 +543,19 @@ public class CPEAnalyzer implements Analyzer {
} }
/** /**
* Retrieves a list of CPE values from the CveDB based on the vendor and product passed in. The list is then validated to find * Retrieves a list of CPE values from the CveDB based on the vendor and
* only CPEs that are valid for the given dependency. It is possible that the CPE identified is a best effort "guess" based on * product passed in. The list is then validated to find only CPEs that are
* the vendor, product, and version information. * valid for the given dependency. It is possible that the CPE identified is
* a best effort "guess" based on the vendor, product, and version
* information.
* *
* @param dependency the Dependency being analyzed * @param dependency the Dependency being analyzed
* @param vendor the vendor for the CPE being analyzed * @param vendor the vendor for the CPE being analyzed
* @param product the product for the CPE being analyzed * @param product the product for the CPE being analyzed
* @param currentConfidence the current confidence being used during analysis * @param currentConfidence the current confidence being used during
* @return <code>true</code> if an identifier was added to the dependency; otherwise <code>false</code> * analysis
* @return <code>true</code> if an identifier was added to the dependency;
* otherwise <code>false</code>
* @throws UnsupportedEncodingException is thrown if UTF-8 is not supported * @throws UnsupportedEncodingException is thrown if UTF-8 is not supported
*/ */
protected boolean determineIdentifiers(Dependency dependency, String vendor, String product, protected boolean determineIdentifiers(Dependency dependency, String vendor, String product,
@@ -509,11 +564,12 @@ public class CPEAnalyzer implements Analyzer {
DependencyVersion bestGuess = new DependencyVersion("-"); DependencyVersion bestGuess = new DependencyVersion("-");
Confidence bestGuessConf = null; Confidence bestGuessConf = null;
boolean hasBroadMatch = false; boolean hasBroadMatch = false;
final List<IdentifierMatch> collected = new ArrayList<IdentifierMatch>(); final List<IdentifierMatch> collected = new ArrayList<>();
//TODO the following algorithm incorrectly identifies things as a lower version
// if there lower confidence evidence when the current (highest) version number
// is newer then anything in the NVD.
for (Confidence conf : Confidence.values()) { for (Confidence conf : Confidence.values()) {
// if (conf.compareTo(currentConfidence) > 0) {
// break;
// }
for (Evidence evidence : dependency.getVersionEvidence().iterator(conf)) { for (Evidence evidence : dependency.getVersionEvidence().iterator(conf)) {
final DependencyVersion evVer = DependencyVersionUtil.parseVersion(evidence.getValue()); final DependencyVersion evVer = DependencyVersionUtil.parseVersion(evidence.getValue());
if (evVer == null) { if (evVer == null) {
@@ -535,24 +591,22 @@ public class CPEAnalyzer implements Analyzer {
final String url = String.format(NVD_SEARCH_URL, URLEncoder.encode(vs.getName(), "UTF-8")); final String url = String.format(NVD_SEARCH_URL, URLEncoder.encode(vs.getName(), "UTF-8"));
final IdentifierMatch match = new IdentifierMatch("cpe", vs.getName(), url, IdentifierConfidence.EXACT_MATCH, conf); final IdentifierMatch match = new IdentifierMatch("cpe", vs.getName(), url, IdentifierConfidence.EXACT_MATCH, conf);
collected.add(match); collected.add(match);
} else {
//TODO the following isn't quite right is it? need to think about this guessing game a bit more. //TODO the following isn't quite right is it? need to think about this guessing game a bit more.
if (evVer.getVersionParts().size() <= dbVer.getVersionParts().size() } else if (evVer.getVersionParts().size() <= dbVer.getVersionParts().size()
&& evVer.matchesAtLeastThreeLevels(dbVer)) { && evVer.matchesAtLeastThreeLevels(dbVer)) {
if (bestGuessConf == null || bestGuessConf.compareTo(conf) > 0) { if (bestGuessConf == null || bestGuessConf.compareTo(conf) > 0) {
if (bestGuess.getVersionParts().size() < dbVer.getVersionParts().size()) { if (bestGuess.getVersionParts().size() < dbVer.getVersionParts().size()) {
bestGuess = dbVer; bestGuess = dbVer;
bestGuessConf = conf; bestGuessConf = conf;
}
} }
} }
} }
} }
if (bestGuessConf == null || bestGuessConf.compareTo(conf) > 0) { if ((bestGuessConf == null || bestGuessConf.compareTo(conf) > 0)
if (bestGuess.getVersionParts().size() < evVer.getVersionParts().size()) { && bestGuess.getVersionParts().size() < evVer.getVersionParts().size()) {
bestGuess = evVer; bestGuess = evVer;
bestGuessConf = conf; bestGuessConf = conf;
}
} }
} }
} }
@@ -562,10 +616,12 @@ public class CPEAnalyzer implements Analyzer {
final String cpeUrlName = String.format("cpe:/a:%s:%s", vendor, product); final String cpeUrlName = String.format("cpe:/a:%s:%s", vendor, product);
url = String.format(NVD_SEARCH_URL, URLEncoder.encode(cpeUrlName, "UTF-8")); url = String.format(NVD_SEARCH_URL, URLEncoder.encode(cpeUrlName, "UTF-8"));
} }
if (bestGuessConf == null) { if (bestGuessConf
== null) {
bestGuessConf = Confidence.LOW; bestGuessConf = Confidence.LOW;
} }
final IdentifierMatch match = new IdentifierMatch("cpe", cpeName, url, IdentifierConfidence.BEST_GUESS, bestGuessConf); final IdentifierMatch match = new IdentifierMatch("cpe", cpeName, url, IdentifierConfidence.BEST_GUESS, bestGuessConf);
collected.add(match); collected.add(match);
Collections.sort(collected); Collections.sort(collected);
@@ -588,6 +644,18 @@ public class CPEAnalyzer implements Analyzer {
return identifierAdded; return identifierAdded;
} }
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_CPE_ENABLED;
}
/** /**
* The confidence whether the identifier is an exact match, or a best guess. * The confidence whether the identifier is an exact match, or a best guess.
*/ */
@@ -602,37 +670,50 @@ public class CPEAnalyzer implements Analyzer {
*/ */
BEST_GUESS, BEST_GUESS,
/** /**
* The entire vendor/product group must be added (without a guess at version) because there is a CVE with a VS that only * The entire vendor/product group must be added (without a guess at
* specifies vendor/product. * version) because there is a CVE with a VS that only specifies
* vendor/product.
*/ */
BROAD_MATCH BROAD_MATCH
} }
/** /**
* A simple object to hold an identifier and carry information about the confidence in the identifier. * A simple object to hold an identifier and carry information about the
* confidence in the identifier.
*/ */
private static class IdentifierMatch implements Comparable<IdentifierMatch> { private static class IdentifierMatch implements Comparable<IdentifierMatch> {
/**
* The confidence in the evidence used to identify this match.
*/
private Confidence evidenceConfidence;
/**
* The confidence whether this is an exact match, or a best guess.
*/
private IdentifierConfidence confidence;
/**
* The CPE identifier.
*/
private Identifier identifier;
/** /**
* Constructs an IdentifierMatch. * Constructs an IdentifierMatch.
* *
* @param type the type of identifier (such as CPE) * @param type the type of identifier (such as CPE)
* @param value the value of the identifier * @param value the value of the identifier
* @param url the URL of the identifier * @param url the URL of the identifier
* @param identifierConfidence the confidence in the identifier: best guess or exact match * @param identifierConfidence the confidence in the identifier: best
* @param evidenceConfidence the confidence of the evidence used to find the identifier * guess or exact match
* @param evidenceConfidence the confidence of the evidence used to find
* the identifier
*/ */
IdentifierMatch(String type, String value, String url, IdentifierConfidence identifierConfidence, Confidence evidenceConfidence) { IdentifierMatch(String type, String value, String url, IdentifierConfidence identifierConfidence, Confidence evidenceConfidence) {
this.identifier = new Identifier(type, value, url); this.identifier = new Identifier(type, value, url);
this.confidence = identifierConfidence; this.confidence = identifierConfidence;
this.evidenceConfidence = evidenceConfidence; this.evidenceConfidence = evidenceConfidence;
} }
//<editor-fold defaultstate="collapsed" desc="Property implementations: evidenceConfidence, confidence, identifier">
/**
* The confidence in the evidence used to identify this match.
*/
private Confidence evidenceConfidence;
//<editor-fold defaultstate="collapsed" desc="Property implementations: evidenceConfidence, confidence, identifier">
/** /**
* Get the value of evidenceConfidence * Get the value of evidenceConfidence
* *
@@ -650,10 +731,6 @@ public class CPEAnalyzer implements Analyzer {
public void setEvidenceConfidence(Confidence evidenceConfidence) { public void setEvidenceConfidence(Confidence evidenceConfidence) {
this.evidenceConfidence = evidenceConfidence; this.evidenceConfidence = evidenceConfidence;
} }
/**
* The confidence whether this is an exact match, or a best guess.
*/
private IdentifierConfidence confidence;
/** /**
* Get the value of confidence. * Get the value of confidence.
@@ -672,10 +749,6 @@ public class CPEAnalyzer implements Analyzer {
public void setConfidence(IdentifierConfidence confidence) { public void setConfidence(IdentifierConfidence confidence) {
this.confidence = confidence; this.confidence = confidence;
} }
/**
* The CPE identifier.
*/
private Identifier identifier;
/** /**
* Get the value of identifier. * Get the value of identifier.
@@ -743,29 +816,24 @@ public class CPEAnalyzer implements Analyzer {
if (this.confidence != other.confidence) { if (this.confidence != other.confidence) {
return false; return false;
} }
if (this.identifier != other.identifier && (this.identifier == null || !this.identifier.equals(other.identifier))) { return !(this.identifier != other.identifier && (this.identifier == null || !this.identifier.equals(other.identifier)));
return false;
}
return true;
} }
//</editor-fold> //</editor-fold>
/** /**
* Standard implementation of compareTo that compares identifier confidence, evidence confidence, and then the identifier. * Standard implementation of compareTo that compares identifier
* confidence, evidence confidence, and then the identifier.
* *
* @param o the IdentifierMatch to compare to * @param o the IdentifierMatch to compare to
* @return the natural ordering of IdentifierMatch * @return the natural ordering of IdentifierMatch
*/ */
@Override @Override
public int compareTo(IdentifierMatch o) { public int compareTo(IdentifierMatch o) {
int conf = this.confidence.compareTo(o.confidence); return new CompareToBuilder()
if (conf == 0) { .append(confidence, o.confidence)
conf = this.evidenceConfidence.compareTo(o.evidenceConfidence); .append(evidenceConfidence, o.evidenceConfidence)
if (conf == 0) { .append(identifier, o.identifier)
conf = identifier.compareTo(o.identifier); .toComparison();
}
}
return conf;
} }
} }
} }

View File

@@ -33,8 +33,10 @@ import java.io.File;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.List; import java.util.List;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.DownloadFailedException; import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader; import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
@@ -42,8 +44,8 @@ import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
/** /**
* Analyzer which will attempt to locate a dependency, and the GAV information, by querying Central for the dependency's SHA-1 * Analyzer which will attempt to locate a dependency, and the GAV information,
* digest. * by querying Central for the dependency's SHA-1 digest.
* *
* @author colezlaw * @author colezlaw
*/ */
@@ -70,9 +72,10 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
private static final String SUPPORTED_EXTENSIONS = "jar"; private static final String SUPPORTED_EXTENSIONS = "jar";
/** /**
* The analyzer should be disabled if there are errors, so this is a flag to determine if such an error has occurred. * The analyzer should be disabled if there are errors, so this is a flag to
* determine if such an error has occurred.
*/ */
private boolean errorFlag = false; private volatile boolean errorFlag = false;
/** /**
* The searcher itself. * The searcher itself.
@@ -96,17 +99,18 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Determines if this analyzer is enabled. * Determines if this analyzer is enabled.
* *
* @return <code>true</code> if the analyzer is enabled; otherwise <code>false</code> * @return <code>true</code> if the analyzer is enabled; otherwise
* <code>false</code>
*/ */
private boolean checkEnabled() { private boolean checkEnabled() {
boolean retval = false; boolean retVal = false;
try { try {
if (Settings.getBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED)) { if (Settings.getBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED)) {
if (!Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED) if (!Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)
|| NexusAnalyzer.DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL))) { || NexusAnalyzer.DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL))) {
LOGGER.debug("Enabling the Central analyzer"); LOGGER.debug("Enabling the Central analyzer");
retval = true; retVal = true;
} else { } else {
LOGGER.info("Nexus analyzer is enabled, disabling the Central Analyzer"); LOGGER.info("Nexus analyzer is enabled, disabling the Central Analyzer");
} }
@@ -116,22 +120,27 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
} catch (InvalidSettingException ise) { } catch (InvalidSettingException ise) {
LOGGER.warn("Invalid setting. Disabling the Central analyzer"); LOGGER.warn("Invalid setting. Disabling the Central analyzer");
} }
return retval; return retVal;
} }
/** /**
* Initializes the analyzer once before any analysis is performed. * Initializes the analyzer once before any analysis is performed.
* *
* @throws Exception if there's an error during initialization * @throws InitializationException if there's an error during initialization
*/ */
@Override @Override
public void initializeFileTypeAnalyzer() throws Exception { public void initializeFileTypeAnalyzer() throws InitializationException {
LOGGER.debug("Initializing Central analyzer"); LOGGER.debug("Initializing Central analyzer");
LOGGER.debug("Central analyzer enabled: {}", isEnabled()); LOGGER.debug("Central analyzer enabled: {}", isEnabled());
if (isEnabled()) { if (isEnabled()) {
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_CENTRAL_URL); final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_CENTRAL_URL);
LOGGER.debug("Central Analyzer URL: {}", searchUrl); LOGGER.debug("Central Analyzer URL: {}", searchUrl);
searcher = new CentralSearch(new URL(searchUrl)); try {
searcher = new CentralSearch(new URL(searchUrl));
} catch (MalformedURLException ex) {
setEnabled(false);
throw new InitializationException("The configured URL to Maven Central is malformed: " + searchUrl, ex);
}
} }
} }
@@ -146,7 +155,8 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Returns the key used in the properties file to to reference the analyzer's enabled property. * Returns the key used in the properties file to to reference the
* analyzer's enabled property.
* *
* @return the analyzer's enabled property setting key. * @return the analyzer's enabled property setting key.
*/ */
@@ -183,7 +193,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException when there's an exception during analysis * @throws AnalysisException when there's an exception during analysis
*/ */
@Override @Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException { public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (errorFlag || !isEnabled()) { if (errorFlag || !isEnabled()) {
return; return;
} }
@@ -219,7 +229,8 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.warn("Unable to download pom.xml for {} from Central; " LOGGER.warn("Unable to download pom.xml for {} from Central; "
+ "this could result in undetected CPE/CVEs.", dependency.getFileName()); + "this could result in undetected CPE/CVEs.", dependency.getFileName());
} finally { } finally {
if (pomFile != null && !FileUtils.deleteQuietly(pomFile)) { if (pomFile != null && pomFile.exists() && !FileUtils.deleteQuietly(pomFile)) {
LOGGER.debug("Failed to delete temporary pom file {}", pomFile.toString());
pomFile.deleteOnExit(); pomFile.deleteOnExit();
} }
} }

View File

@@ -0,0 +1,205 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
/**
* This analyzer is used to analyze SWIFT and Objective-C packages by collecting
* information from .podspec files. CocoaPods dependency manager see
* https://cocoapods.org/.
*
* @author Bianca Jiang (https://twitter.com/biancajiang)
*/
@Experimental
public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
// private static final Logger LOGGER = LoggerFactory.getLogger(CocoaPodsAnalyzer.class);
/**
* The name of the analyzer.
*/
private static final String ANALYZER_NAME = "CocoaPods Package Analyzer";
/**
* The phase that this analyzer is intended to run in.
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
/**
* The file name to scan.
*/
public static final String PODSPEC = "podspec";
/**
* Filter that detects files named "*.podspec".
*/
private static final FileFilter PODSPEC_FILTER = FileFilterBuilder.newInstance().addExtensions(PODSPEC).build();
/**
* The capture group #1 is the block variable. e.g. "Pod::Spec.new do
* |spec|"
*/
private static final Pattern PODSPEC_BLOCK_PATTERN = Pattern.compile("Pod::Spec\\.new\\s+?do\\s+?\\|(.+?)\\|");
/**
* Returns the FileFilter
*
* @return the FileFilter
*/
@Override
protected FileFilter getFileFilter() {
return PODSPEC_FILTER;
}
@Override
protected void initializeFileTypeAnalyzer() {
// NO-OP
}
/**
* Returns the name of the analyzer.
*
* @return the name of the analyzer.
*/
@Override
public String getName() {
return ANALYZER_NAME;
}
/**
* Returns the phase that the analyzer is intended to run in.
*
* @return the phase that the analyzer is intended to run in.
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
/**
* Returns the key used in the properties file to reference the analyzer's
* enabled property.
*
* @return the analyzer's enabled property setting key
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_COCOAPODS_ENABLED;
}
@Override
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
String contents;
try {
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
} catch (IOException e) {
throw new AnalysisException(
"Problem occurred while reading dependency file.", e);
}
final Matcher matcher = PODSPEC_BLOCK_PATTERN.matcher(contents);
if (matcher.find()) {
contents = contents.substring(matcher.end());
final String blockVariable = matcher.group(1);
final EvidenceCollection vendor = dependency.getVendorEvidence();
final EvidenceCollection product = dependency.getProductEvidence();
final EvidenceCollection version = dependency.getVersionEvidence();
final String name = addStringEvidence(product, contents, blockVariable, "name", "name", Confidence.HIGHEST);
if (!name.isEmpty()) {
vendor.addEvidence(PODSPEC, "name_project", name, Confidence.HIGHEST);
}
addStringEvidence(product, contents, blockVariable, "summary", "summary", Confidence.HIGHEST);
addStringEvidence(vendor, contents, blockVariable, "author", "authors?", Confidence.HIGHEST);
addStringEvidence(vendor, contents, blockVariable, "homepage", "homepage", Confidence.HIGHEST);
addStringEvidence(vendor, contents, blockVariable, "license", "licen[cs]es?", Confidence.HIGHEST);
addStringEvidence(version, contents, blockVariable, "version", "version", Confidence.HIGHEST);
}
setPackagePath(dependency);
}
/**
* Extracts evidence from the contents and adds it to the given evidence
* collection.
*
* @param evidences the evidence collection to update
* @param contents the text to extract evidence from
* @param blockVariable the block variable within the content to search for
* @param field the name of the field being searched for
* @param fieldPattern the field pattern within the contents to search for
* @param confidence the confidence level of the evidence if found
* @return the string that was added as evidence
*/
private String addStringEvidence(EvidenceCollection evidences, String contents,
String blockVariable, String field, String fieldPattern, Confidence confidence) {
String value = "";
//capture array value between [ ]
final Matcher arrayMatcher = Pattern.compile(
String.format("\\s*?%s\\.%s\\s*?=\\s*?\\{\\s*?(.*?)\\s*?\\}", blockVariable, fieldPattern),
Pattern.CASE_INSENSITIVE).matcher(contents);
if (arrayMatcher.find()) {
value = arrayMatcher.group(1);
} else { //capture single value between quotes
final Matcher matcher = Pattern.compile(
String.format("\\s*?%s\\.%s\\s*?=\\s*?(['\"])(.*?)\\1", blockVariable, fieldPattern),
Pattern.CASE_INSENSITIVE).matcher(contents);
if (matcher.find()) {
value = matcher.group(2);
}
}
if (value.length() > 0) {
evidences.addEvidence(PODSPEC, field, value, confidence);
}
return value;
}
/**
* Sets the package path on the given dependency.
*
* @param dep the dependency to update
*/
private void setPackagePath(Dependency dep) {
final File file = new File(dep.getFilePath());
final String parent = file.getParent();
if (parent != null) {
dep.setPackagePath(parent);
}
}
}

View File

@@ -24,6 +24,7 @@ import org.owasp.dependencycheck.data.composer.ComposerException;
import org.owasp.dependencycheck.data.composer.ComposerLockParser; import org.owasp.dependencycheck.data.composer.ComposerLockParser;
import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.Checksum; import org.owasp.dependencycheck.utils.Checksum;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
@@ -32,39 +33,41 @@ import org.slf4j.LoggerFactory;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileNotFoundException; import java.io.IOException;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.security.MessageDigest; import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/** /**
* Used to analyze a composer.lock file for a composer PHP app. * Used to analyze a composer.lock file for a composer PHP app.
* *
* @author colezlaw * @author colezlaw
*/ */
@Experimental
public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer { public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* The logger * The logger.
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(ComposerLockAnalyzer.class); private static final Logger LOGGER = LoggerFactory.getLogger(ComposerLockAnalyzer.class);
/** /**
* The analyzer name * The analyzer name.
*/ */
private static final String ANALYZER_NAME = "Composer.lock analyzer"; private static final String ANALYZER_NAME = "Composer.lock analyzer";
/** /**
* composer.json * composer.json.
*/ */
private static final String COMPOSER_LOCK = "composer.lock"; private static final String COMPOSER_LOCK = "composer.lock";
/** /**
* The FileFilter * The FileFilter.
*/ */
private static final FileFilter FILE_FILTER = FileFilterBuilder.newInstance().addFilenames(COMPOSER_LOCK).build(); private static final FileFilter FILE_FILTER = FileFilterBuilder.newInstance().addFilenames(COMPOSER_LOCK).build();
/** /**
* Returns the FileFilter * Returns the FileFilter.
* *
* @return the FileFilter * @return the FileFilter
*/ */
@@ -74,20 +77,21 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Initializes the analyzer * Initializes the analyzer.
* *
* @throws Exception * @throws InitializationException thrown if an exception occurs getting an
* instance of SHA1
*/ */
@Override @Override
protected void initializeFileTypeAnalyzer() throws Exception { protected void initializeFileTypeAnalyzer() throws InitializationException {
sha1 = MessageDigest.getInstance("SHA1"); try {
getSha1MessageDigest();
} catch (IllegalStateException ex) {
setEnabled(false);
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
}
} }
/**
* The MessageDigest for calculating a new digest for the new dependencies added
*/
private MessageDigest sha1 = null;
/** /**
* Entry point for the analyzer. * Entry point for the analyzer.
* *
@@ -96,10 +100,8 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException if there's a failure during analysis * @throws AnalysisException if there's a failure during analysis
*/ */
@Override @Override
protected void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException { protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
FileInputStream fis = null; try (FileInputStream fis = new FileInputStream(dependency.getActualFile())) {
try {
fis = new FileInputStream(dependency.getActualFile());
final ComposerLockParser clp = new ComposerLockParser(fis); final ComposerLockParser clp = new ComposerLockParser(fis);
LOGGER.info("Checking composer.lock file {}", dependency.getActualFilePath()); LOGGER.info("Checking composer.lock file {}", dependency.getActualFilePath());
clp.process(); clp.process();
@@ -107,6 +109,7 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
final Dependency d = new Dependency(dependency.getActualFile()); final Dependency d = new Dependency(dependency.getActualFile());
d.setDisplayFileName(String.format("%s:%s/%s", dependency.getDisplayFileName(), dep.getGroup(), dep.getProject())); d.setDisplayFileName(String.format("%s:%s/%s", dependency.getDisplayFileName(), dep.getGroup(), dep.getProject()));
final String filePath = String.format("%s:%s/%s", dependency.getFilePath(), dep.getGroup(), dep.getProject()); final String filePath = String.format("%s:%s/%s", dependency.getFilePath(), dep.getGroup(), dep.getProject());
final MessageDigest sha1 = getSha1MessageDigest();
d.setFilePath(filePath); d.setFilePath(filePath);
d.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes(Charset.defaultCharset())))); d.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes(Charset.defaultCharset()))));
d.getVendorEvidence().addEvidence(COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST); d.getVendorEvidence().addEvidence(COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST);
@@ -115,18 +118,10 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.info("Adding dependency {}", d); LOGGER.info("Adding dependency {}", d);
engine.getDependencies().add(d); engine.getDependencies().add(d);
} }
} catch (FileNotFoundException fnfe) { } catch (IOException ex) {
LOGGER.warn("Error opening dependency {}", dependency.getActualFilePath()); LOGGER.warn("Error opening dependency {}", dependency.getActualFilePath());
} catch (ComposerException ce) { } catch (ComposerException ce) {
LOGGER.warn("Error parsing composer.json {}", dependency.getActualFilePath(), ce); LOGGER.warn("Error parsing composer.json {}", dependency.getActualFilePath(), ce);
} finally {
if (fis != null) {
try {
fis.close();
} catch (Exception e) {
LOGGER.debug("Unable to close file", e);
}
}
} }
} }
@@ -159,4 +154,18 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
public AnalysisPhase getAnalysisPhase() { public AnalysisPhase getAnalysisPhase() {
return AnalysisPhase.INFORMATION_COLLECTION; return AnalysisPhase.INFORMATION_COLLECTION;
} }
/**
* Returns the sha1 message digest.
*
* @return the sha1 message digest
*/
private MessageDigest getSha1MessageDigest() {
try {
return MessageDigest.getInstance("SHA1");
} catch (NoSuchAlgorithmException e) {
LOGGER.error(e.getMessage());
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
}
}
} }

View File

@@ -20,7 +20,8 @@ package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.suppression.SuppressionRule; import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
/** /**
* The suppression analyzer processes an externally defined XML document that complies with the suppressions.xsd schema. * The suppression analyzer processes an externally defined XML document that complies with the suppressions.xsd schema.
@@ -62,7 +63,7 @@ public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
//</editor-fold> //</editor-fold>
@Override @Override
public void analyze(final Dependency dependency, final Engine engine) throws AnalysisException { protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (getRules() == null || getRules().size() <= 0) { if (getRules() == null || getRules().size() <= 0) {
return; return;
@@ -72,4 +73,15 @@ public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
rule.process(dependency); rule.process(dependency);
} }
} }
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_CPE_SUPPRESSION_ENABLED;
}
} }

View File

@@ -30,20 +30,24 @@ import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.utils.DependencyVersion; import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil; import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* <p> * <p>
* This analyzer ensures dependencies that should be grouped together, to remove excess noise from the report, are grouped. An * This analyzer ensures dependencies that should be grouped together, to remove
* example would be Spring, Spring Beans, Spring MVC, etc. If they are all for the same version and have the same relative path * excess noise from the report, are grouped. An example would be Spring, Spring
* then these should be grouped into a single dependency under the core/main library.</p> * Beans, Spring MVC, etc. If they are all for the same version and have the
* same relative path then these should be grouped into a single dependency
* under the core/main library.</p>
* <p> * <p>
* Note, this grouping only works on dependencies with identified CVE entries</p> * Note, this grouping only works on dependencies with identified CVE
* entries</p>
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Analyzer { public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
/** /**
* The Logger. * The Logger.
@@ -55,10 +59,23 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
* A pattern for obtaining the first part of a filename. * A pattern for obtaining the first part of a filename.
*/ */
private static final Pattern STARTING_TEXT_PATTERN = Pattern.compile("^[a-zA-Z0-9]*"); private static final Pattern STARTING_TEXT_PATTERN = Pattern.compile("^[a-zA-Z0-9]*");
/** /**
* a flag indicating if this analyzer has run. This analyzer only runs once. * a flag indicating if this analyzer has run. This analyzer only runs once.
*/ */
private boolean analyzed = false; private boolean analyzed = false;
/**
* Returns a flag indicating if this analyzer has run. This analyzer only
* runs once. Note this is currently only used in the unit tests.
*
* @return a flag indicating if this analyzer has run. This analyzer only
* runs once
*/
protected synchronized boolean getAnalyzed() {
return analyzed;
}
//</editor-fold> //</editor-fold>
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer"> //<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/** /**
@@ -68,7 +85,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
/** /**
* The phase that this analyzer is intended to run in. * The phase that this analyzer is intended to run in.
*/ */
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.PRE_FINDING_ANALYSIS; private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.FINAL;
/** /**
* Returns the name of the analyzer. * Returns the name of the analyzer.
@@ -92,18 +109,43 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
//</editor-fold> //</editor-fold>
/** /**
* Analyzes a set of dependencies. If they have been found to have the same base path and the same set of identifiers they are * Does not support parallel processing as it only runs once and then
* likely related. The related dependencies are bundled into a single reportable item. * operates on <em>all</em> dependencies.
*
* @return whether or not parallel processing is enabled
* @see #analyze(Dependency, Engine)
*/
@Override
public boolean supportsParallelProcessing() {
return false;
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_DEPENDENCY_BUNDLING_ENABLED;
}
/**
* Analyzes a set of dependencies. If they have been found to have the same
* base path and the same set of identifiers they are likely related. The
* related dependencies are bundled into a single reportable item.
* *
* @param ignore this analyzer ignores the dependency being analyzed * @param ignore this analyzer ignores the dependency being analyzed
* @param engine the engine that is scanning the dependencies * @param engine the engine that is scanning the dependencies
* @throws AnalysisException is thrown if there is an error reading the JAR file. * @throws AnalysisException is thrown if there is an error reading the JAR
* file.
*/ */
@Override @Override
public void analyze(Dependency ignore, Engine engine) throws AnalysisException { protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
if (!analyzed) { if (!analyzed) {
analyzed = true; analyzed = true;
final Set<Dependency> dependenciesToRemove = new HashSet<Dependency>(); final Set<Dependency> dependenciesToRemove = new HashSet<>();
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator(); final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator();
//for (Dependency nextDependency : engine.getDependencies()) { //for (Dependency nextDependency : engine.getDependencies()) {
while (mainIterator.hasNext()) { while (mainIterator.hasNext()) {
@@ -112,13 +154,15 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex()); final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
while (subIterator.hasNext()) { while (subIterator.hasNext()) {
final Dependency nextDependency = subIterator.next(); final Dependency nextDependency = subIterator.next();
if (hashesMatch(dependency, nextDependency) && !containedInWar(dependency.getFilePath()) if (hashesMatch(dependency, nextDependency)) {
&& !containedInWar(nextDependency.getFilePath())) { if (!containedInWar(dependency.getFilePath())
if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) { && !containedInWar(nextDependency.getFilePath())) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove); if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
} else { mergeDependencies(dependency, nextDependency, dependenciesToRemove);
mergeDependencies(nextDependency, dependency, dependenciesToRemove); } else {
break; //since we merged into the next dependency - skip forward to the next in mainIterator mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
} }
} else if (isShadedJar(dependency, nextDependency)) { } else if (isShadedJar(dependency, nextDependency)) {
if (dependency.getFileName().toLowerCase().endsWith("pom.xml")) { if (dependency.getFileName().toLowerCase().endsWith("pom.xml")) {
@@ -131,6 +175,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
} }
} else if (cpeIdentifiersMatch(dependency, nextDependency) } else if (cpeIdentifiersMatch(dependency, nextDependency)
&& hasSameBasePath(dependency, nextDependency) && hasSameBasePath(dependency, nextDependency)
&& vulnCountMatches(dependency, nextDependency)
&& fileNameMatch(dependency, nextDependency)) { && fileNameMatch(dependency, nextDependency)) {
if (isCore(dependency, nextDependency)) { if (isCore(dependency, nextDependency)) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove); mergeDependencies(dependency, nextDependency, dependenciesToRemove);
@@ -152,10 +197,11 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
* Adds the relatedDependency to the dependency's related dependencies. * Adds the relatedDependency to the dependency's related dependencies.
* *
* @param dependency the main dependency * @param dependency the main dependency
* @param relatedDependency a collection of dependencies to be removed from the main analysis loop, this is the source of * @param relatedDependency a collection of dependencies to be removed from
* dependencies to remove * the main analysis loop, this is the source of dependencies to remove
* @param dependenciesToRemove a collection of dependencies that will be removed from the main analysis loop, this function * @param dependenciesToRemove a collection of dependencies that will be
* adds to this collection * removed from the main analysis loop, this function adds to this
* collection
*/ */
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) { private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
dependency.addRelatedDependency(relatedDependency); dependency.addRelatedDependency(relatedDependency);
@@ -171,13 +217,19 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
} }
/** /**
* Attempts to trim a maven repo to a common base path. This is typically [drive]\[repo_location]\repository\[path1]\[path2]. * Attempts to trim a maven repo to a common base path. This is typically
* [drive]\[repo_location]\repository\[path1]\[path2].
* *
* @param path the path to trim * @param path the path to trim
* @return a string representing the base path. * @return a string representing the base path.
*/ */
private String getBaseRepoPath(final String path) { private String getBaseRepoPath(final String path) {
int pos = path.indexOf("repository" + File.separator) + 11; int pos;
if (path.contains("local-repo")) {
pos = path.indexOf("local-repo" + File.separator) + 11;
} else {
pos = path.indexOf("repository" + File.separator) + 11;
}
if (pos < 0) { if (pos < 0) {
return path; return path;
} }
@@ -196,11 +248,13 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
} }
/** /**
* Returns true if the file names (and version if it exists) of the two dependencies are sufficiently similar. * Returns true if the file names (and version if it exists) of the two
* dependencies are sufficiently similar.
* *
* @param dependency1 a dependency2 to compare * @param dependency1 a dependency2 to compare
* @param dependency2 a dependency2 to compare * @param dependency2 a dependency2 to compare
* @return true if the identifiers in the two supplied dependencies are equal * @return true if the identifiers in the two supplied dependencies are
* equal
*/ */
private boolean fileNameMatch(Dependency dependency1, Dependency dependency2) { private boolean fileNameMatch(Dependency dependency1, Dependency dependency2) {
if (dependency1 == null || dependency1.getFileName() == null if (dependency1 == null || dependency1.getFileName() == null
@@ -228,11 +282,13 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
} }
/** /**
* Returns true if the CPE identifiers in the two supplied dependencies are equal. * Returns true if the CPE identifiers in the two supplied dependencies are
* equal.
* *
* @param dependency1 a dependency2 to compare * @param dependency1 a dependency2 to compare
* @param dependency2 a dependency2 to compare * @param dependency2 a dependency2 to compare
* @return true if the identifiers in the two supplied dependencies are equal * @return true if the identifiers in the two supplied dependencies are
* equal
*/ */
private boolean cpeIdentifiersMatch(Dependency dependency1, Dependency dependency2) { private boolean cpeIdentifiersMatch(Dependency dependency1, Dependency dependency2) {
if (dependency1 == null || dependency1.getIdentifiers() == null if (dependency1 == null || dependency1.getIdentifiers() == null
@@ -266,6 +322,19 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
return matches; return matches;
} }
/**
* Returns true if the two dependencies have the same vulnerability count.
*
* @param dependency1 a dependency2 to compare
* @param dependency2 a dependency2 to compare
* @return true if the two dependencies have the same vulnerability count
*/
private boolean vulnCountMatches(Dependency dependency1, Dependency dependency2) {
return dependency1.getVulnerabilities() != null && dependency2.getVulnerabilities() != null
&& dependency1.getVulnerabilities().size() == dependency2.getVulnerabilities().size();
}
/** /**
* Determines if the two dependencies have the same base path. * Determines if the two dependencies have the same base path.
* *
@@ -283,11 +352,14 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
String right = rFile.getParent(); String right = rFile.getParent();
if (left == null) { if (left == null) {
return right == null; return right == null;
} else if (right == null) {
return false;
} }
if (left.equalsIgnoreCase(right)) { if (left.equalsIgnoreCase(right)) {
return true; return true;
} }
if (left.matches(".*[/\\\\]repository[/\\\\].*") && right.matches(".*[/\\\\]repository[/\\\\].*")) {
if (left.matches(".*[/\\\\](repository|local-repo)[/\\\\].*") && right.matches(".*[/\\\\](repository|local-repo)[/\\\\].*")) {
left = getBaseRepoPath(left); left = getBaseRepoPath(left);
right = getBaseRepoPath(right); right = getBaseRepoPath(right);
} }
@@ -304,14 +376,15 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
} }
/** /**
* This is likely a very broken attempt at determining if the 'left' dependency is the 'core' library in comparison to the * This is likely a very broken attempt at determining if the 'left'
* 'right' library. * dependency is the 'core' library in comparison to the 'right' library.
* *
* @param left the dependency to test * @param left the dependency to test
* @param right the dependency to test against * @param right the dependency to test against
* @return a boolean indicating whether or not the left dependency should be considered the "core" version. * @return a boolean indicating whether or not the left dependency should be
* considered the "core" version.
*/ */
boolean isCore(Dependency left, Dependency right) { protected boolean isCore(Dependency left, Dependency right) {
final String leftName = left.getFileName().toLowerCase(); final String leftName = left.getFileName().toLowerCase();
final String rightName = right.getFileName().toLowerCase(); final String rightName = right.getFileName().toLowerCase();
@@ -324,10 +397,6 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
|| !rightName.contains("core") && leftName.contains("core") || !rightName.contains("core") && leftName.contains("core")
|| !rightName.contains("kernel") && leftName.contains("kernel")) { || !rightName.contains("kernel") && leftName.contains("kernel")) {
returnVal = true; returnVal = true;
// } else if (leftName.matches(".*struts2\\-core.*") && rightName.matches(".*xwork\\-core.*")) {
// returnVal = true;
// } else if (rightName.matches(".*struts2\\-core.*") && leftName.matches(".*xwork\\-core.*")) {
// returnVal = false;
} else { } else {
/* /*
* considered splitting the names up and comparing the components, * considered splitting the names up and comparing the components,
@@ -345,11 +414,13 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
} }
/** /**
* Compares the SHA1 hashes of two dependencies to determine if they are equal. * Compares the SHA1 hashes of two dependencies to determine if they are
* equal.
* *
* @param dependency1 a dependency object to compare * @param dependency1 a dependency object to compare
* @param dependency2 a dependency object to compare * @param dependency2 a dependency object to compare
* @return true if the sha1 hashes of the two dependencies match; otherwise false * @return true if the sha1 hashes of the two dependencies match; otherwise
* false
*/ */
private boolean hashesMatch(Dependency dependency1, Dependency dependency2) { private boolean hashesMatch(Dependency dependency1, Dependency dependency2) {
if (dependency1 == null || dependency2 == null || dependency1.getSha1sum() == null || dependency2.getSha1sum() == null) { if (dependency1 == null || dependency2 == null || dependency1.getSha1sum() == null || dependency2.getSha1sum() == null) {
@@ -359,12 +430,13 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
} }
/** /**
* Determines if the jar is shaded and the created pom.xml identified the same CPE as the jar - if so, the pom.xml dependency * Determines if the jar is shaded and the created pom.xml identified the
* should be removed. * same CPE as the jar - if so, the pom.xml dependency should be removed.
* *
* @param dependency a dependency to check * @param dependency a dependency to check
* @param nextDependency another dependency to check * @param nextDependency another dependency to check
* @return true if on of the dependencies is a pom.xml and the identifiers between the two collections match; otherwise false * @return true if on of the dependencies is a pom.xml and the identifiers
* between the two collections match; otherwise false
*/ */
private boolean isShadedJar(Dependency dependency, Dependency nextDependency) { private boolean isShadedJar(Dependency dependency, Dependency nextDependency) {
final String mainName = dependency.getFileName().toLowerCase(); final String mainName = dependency.getFileName().toLowerCase();
@@ -378,14 +450,18 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
} }
/** /**
* Determines which path is shortest; if path lengths are equal then we use compareTo of the string method to determine if the * Determines which path is shortest; if path lengths are equal then we use
* first path is smaller. * compareTo of the string method to determine if the first path is smaller.
* *
* @param left the first path to compare * @param left the first path to compare
* @param right the second path to compare * @param right the second path to compare
* @return <code>true</code> if the leftPath is the shortest; otherwise <code>false</code> * @return <code>true</code> if the leftPath is the shortest; otherwise
* <code>false</code>
*/ */
protected boolean firstPathIsShortest(String left, String right) { protected boolean firstPathIsShortest(String left, String right) {
if (left.contains("dctemp")) {
return false;
}
final String leftPath = left.replace('\\', '/'); final String leftPath = left.replace('\\', '/');
final String rightPath = right.replace('\\', '/'); final String rightPath = right.replace('\\', '/');
@@ -423,6 +499,7 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer implements Anal
* @return true if the path contains '.war\' or '.ear\'. * @return true if the path contains '.war\' or '.ear\'.
*/ */
private boolean containedInWar(String filePath) { private boolean containedInWar(String filePath) {
return filePath == null ? false : filePath.matches(".*\\.(ear|war)[\\\\/].*"); return filePath != null && filePath.matches(".*\\.(ear|war)[\\\\/].*");
} }
} }

View File

@@ -0,0 +1,283 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.util.HashSet;
import java.util.Iterator;
import java.util.ListIterator;
import java.util.Set;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* This analyzer will merge dependencies, created from different source, into a
* single dependency.</p>
*
* @author Jeremy Long
*/
public class DependencyMergingAnalyzer extends AbstractAnalyzer {
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
/**
* The Logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyMergingAnalyzer.class);
/**
* a flag indicating if this analyzer has run. This analyzer only runs once.
*/
private boolean analyzed = false;
/**
* Returns a flag indicating if this analyzer has run. This analyzer only
* runs once. Note this is currently only used in the unit tests.
*
* @return a flag indicating if this analyzer has run. This analyzer only
* runs once
*/
protected synchronized boolean getAnalyzed() {
return analyzed;
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/**
* The name of the analyzer.
*/
private static final String ANALYZER_NAME = "Dependency Merging Analyzer";
/**
* The phase that this analyzer is intended to run in.
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.POST_INFORMATION_COLLECTION;
/**
* Returns the name of the analyzer.
*
* @return the name of the analyzer.
*/
@Override
public String getName() {
return ANALYZER_NAME;
}
/**
* Returns the phase that the analyzer is intended to run in.
*
* @return the phase that the analyzer is intended to run in.
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
/**
* Does not support parallel processing as it only runs once and then
* operates on <em>all</em> dependencies.
*
* @return whether or not parallel processing is enabled
* @see #analyze(Dependency, Engine)
*/
@Override
public boolean supportsParallelProcessing() {
return false;
}
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_DEPENDENCY_MERGING_ENABLED;
}
//</editor-fold>
/**
* Analyzes a set of dependencies. If they have been found to be the same
* dependency created by more multiple FileTypeAnalyzers (i.e. a gemspec
* dependency and a dependency from the Bundle Audit Analyzer. The
* dependencies are then merged into a single reportable item.
*
* @param ignore this analyzer ignores the dependency being analyzed
* @param engine the engine that is scanning the dependencies
* @throws AnalysisException is thrown if there is an error reading the JAR
* file.
*/
@Override
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
if (!analyzed) {
analyzed = true;
final Set<Dependency> dependenciesToRemove = new HashSet<>();
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator();
//for (Dependency nextDependency : engine.getDependencies()) {
while (mainIterator.hasNext()) {
final Dependency dependency = mainIterator.next();
if (mainIterator.hasNext() && !dependenciesToRemove.contains(dependency)) {
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
while (subIterator.hasNext()) {
final Dependency nextDependency = subIterator.next();
Dependency main;
if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) {
if (main == dependency) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
} else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
if (main == dependency) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
}
}
}
}
//removing dependencies here as ensuring correctness and avoiding ConcurrentUpdateExceptions
// was difficult because of the inner iterator.
engine.getDependencies().removeAll(dependenciesToRemove);
}
}
/**
* Adds the relatedDependency to the dependency's related dependencies.
*
* @param dependency the main dependency
* @param relatedDependency a collection of dependencies to be removed from
* the main analysis loop, this is the source of dependencies to remove
* @param dependenciesToRemove a collection of dependencies that will be
* removed from the main analysis loop, this function adds to this
* collection
*/
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
LOGGER.debug("Merging '{}' into '{}'", relatedDependency.getFilePath(), dependency.getFilePath());
dependency.addRelatedDependency(relatedDependency);
dependency.getVendorEvidence().getEvidence().addAll(relatedDependency.getVendorEvidence().getEvidence());
dependency.getProductEvidence().getEvidence().addAll(relatedDependency.getProductEvidence().getEvidence());
dependency.getVersionEvidence().getEvidence().addAll(relatedDependency.getVersionEvidence().getEvidence());
final Iterator<Dependency> i = relatedDependency.getRelatedDependencies().iterator();
while (i.hasNext()) {
dependency.addRelatedDependency(i.next());
i.remove();
}
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) {
dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
}
dependenciesToRemove.add(relatedDependency);
}
/**
* Bundling Ruby gems that are identified from different .gemspec files but
* denote the same package path. This happens when Ruby bundler installs an
* application's dependencies by running "bundle install".
*
* @param dependency1 dependency to compare
* @param dependency2 dependency to compare
* @return true if the the dependencies being analyzed appear to be the
* same; otherwise false
*/
private boolean isSameRubyGem(Dependency dependency1, Dependency dependency2) {
if (dependency1 == null || dependency2 == null
|| !dependency1.getFileName().endsWith(".gemspec")
|| !dependency2.getFileName().endsWith(".gemspec")
|| dependency1.getPackagePath() == null
|| dependency2.getPackagePath() == null) {
return false;
}
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
}
/**
* Ruby gems installed by "bundle install" can have zero or more *.gemspec
* files, all of which have the same packagePath and should be grouped. If
* one of these gemspec is from <parent>/specifications/*.gemspec, because
* it is a stub with fully resolved gem meta-data created by Ruby bundler,
* this dependency should be the main one. Otherwise, use dependency2 as
* main.
*
* This method returns null if any dependency is not from *.gemspec, or the
* two do not have the same packagePath. In this case, they should not be
* grouped.
*
* @param dependency1 dependency to compare
* @param dependency2 dependency to compare
* @return the main dependency; or null if a gemspec is not included in the
* analysis
*/
private Dependency getMainGemspecDependency(Dependency dependency1, Dependency dependency2) {
if (isSameRubyGem(dependency1, dependency2)) {
final File lFile = dependency1.getActualFile();
final File left = lFile.getParentFile();
if (left != null && left.getName().equalsIgnoreCase("specifications")) {
return dependency1;
}
return dependency2;
}
return null;
}
/**
* Bundling same swift dependencies with the same packagePath but identified
* by different file type analyzers.
*
* @param dependency1 dependency to test
* @param dependency2 dependency to test
* @return <code>true</code> if the dependencies appear to be the same;
* otherwise <code>false</code>
*/
private boolean isSameSwiftPackage(Dependency dependency1, Dependency dependency2) {
if (dependency1 == null || dependency2 == null
|| (!dependency1.getFileName().endsWith(".podspec")
&& !dependency1.getFileName().equals("Package.swift"))
|| (!dependency2.getFileName().endsWith(".podspec")
&& !dependency2.getFileName().equals("Package.swift"))
|| dependency1.getPackagePath() == null
|| dependency2.getPackagePath() == null) {
return false;
}
return dependency1.getPackagePath().equalsIgnoreCase(dependency2.getPackagePath());
}
/**
* Determines which of the swift dependencies should be considered the
* primary.
*
* @param dependency1 the first swift dependency to compare
* @param dependency2 the second swift dependency to compare
* @return the primary swift dependency
*/
private Dependency getMainSwiftDependency(Dependency dependency1, Dependency dependency2) {
if (isSameSwiftPackage(dependency1, dependency2)) {
if (dependency1.getFileName().endsWith(".podspec")) {
return dependency1;
}
return dependency2;
}
return null;
}
}

View File

@@ -13,28 +13,22 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
* *
* Copyright (c) 2015 The OWASP Foundatio. All Rights Reserved. * Copyright (c) 2016 Jeremy Long. All Rights Reserved.
*/ */
package org.owasp.dependencycheck.data.update; package org.owasp.dependencycheck.analyzer;
import org.junit.Test; import java.lang.annotation.ElementType;
import org.owasp.dependencycheck.BaseTest; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/** /**
* Annotation used to flag an analyzer as experimental.
* *
* @author jeremy * @author jeremy long
*/ */
public class CpeUpdaterIntegrationTest extends BaseTest { @Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
/** public @interface Experimental {
* Test of update method, of class CpeUpdater.
*/
@Test
public void testUpdate() throws Exception {
//commented out as the current code base does not utilize the CpeU[pdater.
// CpeUpdater instance = new CpeUpdater();
// instance.update();
}
} }

View File

@@ -34,11 +34,13 @@ import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.VulnerableSoftware; import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* This analyzer attempts to remove some well known false positives - specifically regarding the java runtime. * This analyzer attempts to remove some well known false positives -
* specifically regarding the java runtime.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -83,17 +85,30 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
public AnalysisPhase getAnalysisPhase() { public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE; return ANALYSIS_PHASE;
} }
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_FALSE_POSITIVE_ENABLED;
}
//</editor-fold> //</editor-fold>
/** /**
* Analyzes the dependencies and removes bad/incorrect CPE associations based on various heuristics. * Analyzes the dependencies and removes bad/incorrect CPE associations
* based on various heuristics.
* *
* @param dependency the dependency to analyze. * @param dependency the dependency to analyze.
* @param engine the engine that is scanning the dependencies * @param engine the engine that is scanning the dependencies
* @throws AnalysisException is thrown if there is an error reading the JAR file. * @throws AnalysisException is thrown if there is an error reading the JAR
* file.
*/ */
@Override @Override
public void analyze(Dependency dependency, Engine engine) throws AnalysisException { protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
removeJreEntries(dependency); removeJreEntries(dependency);
removeBadMatches(dependency); removeBadMatches(dependency);
removeBadSpringMatches(dependency); removeBadSpringMatches(dependency);
@@ -106,22 +121,23 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
/** /**
* Removes inaccurate matches on springframework CPEs. * Removes inaccurate matches on springframework CPEs.
* *
* @param dependency the dependency to test for and remove known inaccurate CPE matches * @param dependency the dependency to test for and remove known inaccurate
* CPE matches
*/ */
private void removeBadSpringMatches(Dependency dependency) { private void removeBadSpringMatches(Dependency dependency) {
String mustContain = null; String mustContain = null;
for (Identifier i : dependency.getIdentifiers()) { for (Identifier i : dependency.getIdentifiers()) {
if ("maven".contains(i.getType())) { if ("maven".contains(i.getType())
if (i.getValue() != null && i.getValue().startsWith("org.springframework.")) { && i.getValue() != null && i.getValue().startsWith("org.springframework.")) {
final int endPoint = i.getValue().indexOf(':', 19); final int endPoint = i.getValue().indexOf(':', 19);
if (endPoint >= 0) { if (endPoint >= 0) {
mustContain = i.getValue().substring(19, endPoint).toLowerCase(); mustContain = i.getValue().substring(19, endPoint).toLowerCase();
break; break;
}
} }
} }
} }
if (mustContain != null) { if (mustContain
!= null) {
final Iterator<Identifier> itr = dependency.getIdentifiers().iterator(); final Iterator<Identifier> itr = dependency.getIdentifiers().iterator();
while (itr.hasNext()) { while (itr.hasNext()) {
final Identifier i = itr.next(); final Identifier i = itr.next();
@@ -138,7 +154,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
/** /**
* <p> * <p>
* Intended to remove spurious CPE entries. By spurious we mean duplicate, less specific CPE entries.</p> * Intended to remove spurious CPE entries. By spurious we mean duplicate,
* less specific CPE entries.</p>
* <p> * <p>
* Example:</p> * Example:</p>
* <code> * <code>
@@ -156,7 +173,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
*/ */
@SuppressWarnings("null") @SuppressWarnings("null")
private void removeSpuriousCPE(Dependency dependency) { private void removeSpuriousCPE(Dependency dependency) {
final List<Identifier> ids = new ArrayList<Identifier>(dependency.getIdentifiers()); final List<Identifier> ids = new ArrayList<>(dependency.getIdentifiers());
Collections.sort(ids); Collections.sort(ids);
final ListIterator<Identifier> mainItr = ids.listIterator(); final ListIterator<Identifier> mainItr = ids.listIterator();
while (mainItr.hasNext()) { while (mainItr.hasNext()) {
@@ -189,10 +206,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
if (nextVersion.startsWith(currentVersion) || "-".equals(currentVersion)) { if (nextVersion.startsWith(currentVersion) || "-".equals(currentVersion)) {
dependency.getIdentifiers().remove(currentId); dependency.getIdentifiers().remove(currentId);
} }
} else { } else if (currentVersion.startsWith(nextVersion) || "-".equals(nextVersion)) {
if (currentVersion.startsWith(nextVersion) || "-".equals(nextVersion)) { dependency.getIdentifiers().remove(nextId);
dependency.getIdentifiers().remove(nextId);
}
} }
} }
} }
@@ -200,7 +215,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
} }
} }
/** /**
* Regex to identify core java libraries and a few other commonly misidentified ones. * Regex to identify core java libraries and a few other commonly
* misidentified ones.
*/ */
public static final Pattern CORE_JAVA = Pattern.compile("^cpe:/a:(sun|oracle|ibm):(j2[ems]e|" public static final Pattern CORE_JAVA = Pattern.compile("^cpe:/a:(sun|oracle|ibm):(j2[ems]e|"
+ "java(_platform_micro_edition|_runtime_environment|_se|virtual_machine|se_development_kit|fx)?|" + "java(_platform_micro_edition|_runtime_environment|_se|virtual_machine|se_development_kit|fx)?|"
@@ -215,12 +231,14 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
*/ */
public static final Pattern CORE_FILES = Pattern.compile("(^|/)((alt[-])?rt|jsse|jfxrt|jfr|jce|javaws|deploy|charsets)\\.jar$"); public static final Pattern CORE_FILES = Pattern.compile("(^|/)((alt[-])?rt|jsse|jfxrt|jfr|jce|javaws|deploy|charsets)\\.jar$");
/** /**
* Regex to identify core jsf java library files. This is currently incomplete. * Regex to identify core jsf java library files. This is currently
* incomplete.
*/ */
public static final Pattern CORE_JSF_FILES = Pattern.compile("(^|/)jsf[-][^/]*\\.jar$"); public static final Pattern CORE_JSF_FILES = Pattern.compile("(^|/)jsf[-][^/]*\\.jar$");
/** /**
* Removes any CPE entries for the JDK/JRE unless the filename ends with rt.jar * Removes any CPE entries for the JDK/JRE unless the filename ends with
* rt.jar
* *
* @param dependency the dependency to remove JRE CPEs from * @param dependency the dependency to remove JRE CPEs from
*/ */
@@ -264,8 +282,9 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
} }
/** /**
* Removes bad CPE matches for a dependency. Unfortunately, right now these are hard-coded patches for specific problems * Removes bad CPE matches for a dependency. Unfortunately, right now these
* identified when testing this on a LARGE volume of jar files. * are hard-coded patches for specific problems identified when testing this
* on a LARGE volume of jar files.
* *
* @param dependency the dependency to analyze * @param dependency the dependency to analyze
*/ */
@@ -340,7 +359,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
} }
/** /**
* Removes CPE matches for the wrong version of a dependency. Currently, this only covers Axis 1 & 2. * Removes CPE matches for the wrong version of a dependency. Currently,
* this only covers Axis 1 & 2.
* *
* @param dependency the dependency to analyze * @param dependency the dependency to analyze
*/ */
@@ -373,8 +393,10 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
} }
/** /**
* There are some known CPE entries, specifically regarding sun and oracle products due to the acquisition and changes in * There are some known CPE entries, specifically regarding sun and oracle
* product names, that based on given evidence we can add the related CPE entries to ensure a complete list of CVE entries. * products due to the acquisition and changes in product names, that based
* on given evidence we can add the related CPE entries to ensure a complete
* list of CVE entries.
* *
* @param dependency the dependency being analyzed * @param dependency the dependency being analyzed
*/ */
@@ -411,19 +433,21 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
} }
/** /**
* Removes duplicate entries identified that are contained within JAR files. These occasionally crop up due to POM entries or * Removes duplicate entries identified that are contained within JAR files.
* other types of files (such as DLLs and EXEs) being contained within the JAR. * These occasionally crop up due to POM entries or other types of files
* (such as DLLs and EXEs) being contained within the JAR.
* *
* @param dependency the dependency that might be a duplicate * @param dependency the dependency that might be a duplicate
* @param engine the engine used to scan all dependencies * @param engine the engine used to scan all dependencies
*/ */
private void removeDuplicativeEntriesFromJar(Dependency dependency, Engine engine) { private synchronized void removeDuplicativeEntriesFromJar(Dependency dependency, Engine engine) {
if (dependency.getFileName().toLowerCase().endsWith("pom.xml") if (dependency.getFileName().toLowerCase().endsWith("pom.xml")
|| DLL_EXE_FILTER.accept(dependency.getActualFile())) { || DLL_EXE_FILTER.accept(dependency.getActualFile())) {
String parentPath = dependency.getFilePath().toLowerCase(); String parentPath = dependency.getFilePath().toLowerCase();
if (parentPath.contains(".jar")) { if (parentPath.contains(".jar")) {
parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4); parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4);
final Dependency parent = findDependency(parentPath, engine.getDependencies()); final List<Dependency> dependencies = engine.getDependencies();
final Dependency parent = findDependency(parentPath, dependencies);
if (parent != null) { if (parent != null) {
boolean remove = false; boolean remove = false;
for (Identifier i : dependency.getIdentifiers()) { for (Identifier i : dependency.getIdentifiers()) {
@@ -440,16 +464,16 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
} }
} }
if (remove) { if (remove) {
engine.getDependencies().remove(dependency); dependencies.remove(dependency);
} }
} }
} }
} }
} }
/** /**
* Retrieves a given dependency, based on a given path, from a list of dependencies. * Retrieves a given dependency, based on a given path, from a list of
* dependencies.
* *
* @param dependencyPath the path of the dependency to return * @param dependencyPath the path of the dependency to return
* @param dependencies the collection of dependencies to search * @param dependencies the collection of dependencies to search
@@ -465,7 +489,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
} }
/** /**
* Takes a full CPE and returns the CPE trimmed to include only vendor and product. * Takes a full CPE and returns the CPE trimmed to include only vendor and
* product.
* *
* @param value the CPE value to trim * @param value the CPE value to trim
* @return a CPE value that only includes the vendor and product * @return a CPE value that only includes the vendor and product

View File

@@ -18,13 +18,16 @@
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import java.io.File; import java.io.File;
import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.filefilter.NameFileFilter;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.DependencyVersion; import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil; import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.Settings;
/** /**
* *
@@ -32,7 +35,7 @@ import org.owasp.dependencycheck.utils.DependencyVersionUtil;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer { public class FileNameAnalyzer extends AbstractAnalyzer {
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer"> //<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/** /**
@@ -63,17 +66,38 @@ public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
public AnalysisPhase getAnalysisPhase() { public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE; return ANALYSIS_PHASE;
} }
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_FILE_NAME_ENABLED;
}
//</editor-fold> //</editor-fold>
/**
* Python init files
*/
//CSOFF: WhitespaceAfter
private static final NameFileFilter IGNORED_FILES = new NameFileFilter(new String[]{
"__init__.py",
"__init__.pyc",
"__init__.pyo",});
//CSON: WhitespaceAfter
/** /**
* Collects information about the file name. * Collects information about the file name.
* *
* @param dependency the dependency to analyze. * @param dependency the dependency to analyze.
* @param engine the engine that is scanning the dependencies * @param engine the engine that is scanning the dependencies
* @throws AnalysisException is thrown if there is an error reading the JAR file. * @throws AnalysisException is thrown if there is an error reading the JAR
* file.
*/ */
@Override @Override
public void analyze(Dependency dependency, Engine engine) throws AnalysisException { protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
//strip any path information that may get added by ArchiveAnalyzer, etc. //strip any path information that may get added by ArchiveAnalyzer, etc.
final File f = dependency.getActualFile(); final File f = dependency.getActualFile();
@@ -81,32 +105,27 @@ public class FileNameAnalyzer extends AbstractAnalyzer implements Analyzer {
//add version evidence //add version evidence
final DependencyVersion version = DependencyVersionUtil.parseVersion(fileName); final DependencyVersion version = DependencyVersionUtil.parseVersion(fileName);
final String packageName = DependencyVersionUtil.parsePreVersion(fileName);
if (version != null) { if (version != null) {
// If the version number is just a number like 2 or 23, reduce the confidence // If the version number is just a number like 2 or 23, reduce the confidence
// a shade. This should hopefully correct for cases like log4j.jar or // a shade. This should hopefully correct for cases like log4j.jar or
// struts2-core.jar // struts2-core.jar
if (version.getVersionParts() == null || version.getVersionParts().size() < 2) { if (version.getVersionParts() == null || version.getVersionParts().size() < 2) {
dependency.getVersionEvidence().addEvidence("file", "name", dependency.getVersionEvidence().addEvidence("file", "version",
version.toString(), Confidence.MEDIUM); version.toString(), Confidence.MEDIUM);
} else { } else {
dependency.getVersionEvidence().addEvidence("file", "name", dependency.getVersionEvidence().addEvidence("file", "version",
version.toString(), Confidence.HIGHEST); version.toString(), Confidence.HIGHEST);
} }
dependency.getVersionEvidence().addEvidence("file", "name", dependency.getVersionEvidence().addEvidence("file", "name",
fileName, Confidence.MEDIUM); packageName, Confidence.MEDIUM);
} }
//add as vendor and product evidence if (!IGNORED_FILES.accept(f)) {
if (fileName.contains("-")) {
dependency.getProductEvidence().addEvidence("file", "name", dependency.getProductEvidence().addEvidence("file", "name",
fileName, Confidence.HIGHEST); packageName, Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("file", "name", dependency.getVendorEvidence().addEvidence("file", "name",
fileName, Confidence.HIGHEST); packageName, Confidence.HIGH);
} else {
dependency.getProductEvidence().addEvidence("file", "name",
fileName, Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("file", "name",
fileName, Confidence.HIGH);
} }
} }
} }

View File

@@ -26,8 +26,4 @@ import java.io.FileFilter;
*/ */
public interface FileTypeAnalyzer extends Analyzer, FileFilter { public interface FileTypeAnalyzer extends Analyzer, FileFilter {
/**
* Resets the analyzers state.
*/
void reset();
} }

View File

@@ -17,21 +17,54 @@
*/ */
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.regex.Pattern;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence; import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.xml.suppression.PropertyType;
import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.xml.hints.VendorDuplicatingHintRule;
import org.owasp.dependencycheck.xml.hints.HintParseException;
import org.owasp.dependencycheck.xml.hints.HintParser;
import org.owasp.dependencycheck.xml.hints.HintRule;
import org.owasp.dependencycheck.xml.hints.Hints;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
/** /**
* This analyzer adds evidence to dependencies to enhance the accuracy of
* library identification.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class HintAnalyzer extends AbstractAnalyzer implements Analyzer { public class HintAnalyzer extends AbstractAnalyzer {
/**
* The Logger for use throughout the class
*/
private static final Logger LOGGER = LoggerFactory.getLogger(HintAnalyzer.class);
/**
* The name of the hint rule file
*/
private static final String HINT_RULE_FILE_NAME = "dependencycheck-base-hint.xml";
/**
* The collection of hints.
*/
private Hints hints;
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer"> //<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/** /**
@@ -62,115 +95,192 @@ public class HintAnalyzer extends AbstractAnalyzer implements Analyzer {
public AnalysisPhase getAnalysisPhase() { public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE; return ANALYSIS_PHASE;
} }
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_HINT_ENABLED;
}
/**
* The initialize method does nothing for this Analyzer.
*
* @throws InitializationException thrown if there is an exception
*/
@Override
public void initializeAnalyzer() throws InitializationException {
try {
loadHintRules();
} catch (HintParseException ex) {
LOGGER.debug("Unable to parse hint file", ex);
throw new InitializationException("Unable to parse the hint file", ex);
}
}
//</editor-fold> //</editor-fold>
/** /**
* The HintAnalyzer uses knowledge about a dependency to add additional information to help in identification of identifiers * The HintAnalyzer uses knowledge about a dependency to add additional
* or vulnerabilities. * information to help in identification of identifiers or vulnerabilities.
* *
* @param dependency The dependency being analyzed * @param dependency The dependency being analyzed
* @param engine The scanning engine * @param engine The scanning engine
* @throws AnalysisException is thrown if there is an exception analyzing the dependency. * @throws AnalysisException is thrown if there is an exception analyzing
* the dependency.
*/ */
@Override @Override
public void analyze(Dependency dependency, Engine engine) throws AnalysisException { protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
final Evidence springTest1 = new Evidence("Manifest", for (HintRule hint : hints.getHintRules()) {
"Implementation-Title", boolean matchFound = false;
"Spring Framework", for (Evidence given : hint.getGivenVendor()) {
Confidence.HIGH); if (dependency.getVendorEvidence().getEvidence().contains(given)) {
matchFound = true;
final Evidence springTest2 = new Evidence("Manifest", break;
"Implementation-Title", }
"org.springframework.core", }
Confidence.HIGH); if (!matchFound) {
for (Evidence given : hint.getGivenProduct()) {
final Evidence springTest3 = new Evidence("Manifest", if (dependency.getProductEvidence().getEvidence().contains(given)) {
"Implementation-Title", matchFound = true;
"spring-core", break;
Confidence.HIGH); }
}
final Evidence springTest4 = new Evidence("jar", }
"package name", if (!matchFound) {
"springframework", for (Evidence given : hint.getGivenVersion()) {
Confidence.LOW); if (dependency.getVersionEvidence().getEvidence().contains(given)) {
matchFound = true;
final Evidence springSecurityTest1 = new Evidence("Manifest", break;
"Bundle-Name", }
"Spring Security Core", }
Confidence.MEDIUM); }
if (!matchFound) {
final Evidence springSecurityTest2 = new Evidence("pom", for (PropertyType pt : hint.getFilenames()) {
"artifactid", if (pt.matches(dependency.getFileName())) {
"spring-security-core", matchFound = true;
Confidence.HIGH); break;
}
final Evidence symfony = new Evidence("composer.lock", }
"vendor", }
"symfony", if (matchFound) {
Confidence.HIGHEST); for (Evidence e : hint.getAddVendor()) {
dependency.getVendorEvidence().addEvidence(e);
final Evidence zendframeworkVendor = new Evidence("composer.lock", }
"vendor", for (Evidence e : hint.getAddProduct()) {
"zendframework", dependency.getProductEvidence().addEvidence(e);
Confidence.HIGHEST); }
for (Evidence e : hint.getAddVersion()) {
final Evidence zendframeworkProduct = new Evidence("composer.lock", dependency.getVersionEvidence().addEvidence(e);
"product", }
"zendframework", for (Evidence e : hint.getRemoveVendor()) {
Confidence.HIGHEST); if (dependency.getVendorEvidence().getEvidence().contains(e)) {
dependency.getVendorEvidence().getEvidence().remove(e);
//springsource/vware problem }
final Set<Evidence> product = dependency.getProductEvidence().getEvidence(); }
final Set<Evidence> vendor = dependency.getVendorEvidence().getEvidence(); for (Evidence e : hint.getRemoveProduct()) {
if (dependency.getProductEvidence().getEvidence().contains(e)) {
if (product.contains(springTest1) || product.contains(springTest2) || product.contains(springTest3) dependency.getProductEvidence().getEvidence().remove(e);
|| (dependency.getFileName().contains("spring") && product.contains(springTest4))) { }
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource spring framework", Confidence.HIGH); }
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "SpringSource", Confidence.HIGH); for (Evidence e : hint.getRemoveVersion()) {
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH); if (dependency.getVersionEvidence().getEvidence().contains(e)) {
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "pivotal", Confidence.HIGH); dependency.getVersionEvidence().getEvidence().remove(e);
}
}
}
} }
if (vendor.contains(springTest4)) {
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource_spring_framework", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "pivotal", Confidence.HIGH);
}
if (product.contains(springSecurityTest1) || product.contains(springSecurityTest2)) {
dependency.getProductEvidence().addEvidence("hint analyzer", "product", "springsource_spring_security", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "SpringSource", Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "vmware", Confidence.HIGH);
}
if (vendor.contains(symfony)) {
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "sensiolabs", Confidence.HIGHEST);
}
if (vendor.contains(zendframeworkVendor)) {
dependency.getVendorEvidence().addEvidence("hint analyzer", "vendor", "zend", Confidence.HIGHEST);
}
if (product.contains(zendframeworkProduct)) {
dependency.getProductEvidence().addEvidence("hint analyzer", "vendor", "zend_framework", Confidence.HIGHEST);
}
//sun/oracle problem
final Iterator<Evidence> itr = dependency.getVendorEvidence().iterator(); final Iterator<Evidence> itr = dependency.getVendorEvidence().iterator();
final List<Evidence> newEntries = new ArrayList<Evidence>(); final List<Evidence> newEntries = new ArrayList<>();
while (itr.hasNext()) { while (itr.hasNext()) {
final Evidence e = itr.next(); final Evidence e = itr.next();
if ("sun".equalsIgnoreCase(e.getValue(false))) { for (VendorDuplicatingHintRule dhr : hints.getVendorDuplicatingHintRules()) {
final Evidence newEvidence = new Evidence(e.getSource() + " (hint)", e.getName(), "oracle", e.getConfidence()); if (dhr.getValue().equalsIgnoreCase(e.getValue(false))) {
newEntries.add(newEvidence); newEntries.add(new Evidence(e.getSource() + " (hint)",
} else if ("oracle".equalsIgnoreCase(e.getValue(false))) { e.getName(), dhr.getDuplicate(), e.getConfidence()));
final Evidence newEvidence = new Evidence(e.getSource() + " (hint)", e.getName(), "sun", e.getConfidence()); }
newEntries.add(newEvidence);
} }
} }
for (Evidence e : newEntries) { for (Evidence e : newEntries) {
dependency.getVendorEvidence().addEvidence(e); dependency.getVendorEvidence().addEvidence(e);
} }
}
/**
* Loads the hint rules file.
*
* @throws HintParseException thrown if the XML cannot be parsed.
*/
private void loadHintRules() throws HintParseException {
final HintParser parser = new HintParser();
File file = null;
try {
hints = parser.parseHints(this.getClass().getClassLoader().getResourceAsStream(HINT_RULE_FILE_NAME));
} catch (HintParseException | SAXException ex) {
LOGGER.error("Unable to parse the base hint data file");
LOGGER.debug("Unable to parse the base hint data file", ex);
}
final String filePath = Settings.getString(Settings.KEYS.HINTS_FILE);
if (filePath == null) {
return;
}
boolean deleteTempFile = false;
try {
final Pattern uriRx = Pattern.compile("^(https?|file)\\:.*", Pattern.CASE_INSENSITIVE);
if (uriRx.matcher(filePath).matches()) {
deleteTempFile = true;
file = FileUtils.getTempFile("hint", "xml");
final URL url = new URL(filePath);
try {
Downloader.fetchFile(url, file, false);
} catch (DownloadFailedException ex) {
Downloader.fetchFile(url, file, true);
}
} else {
file = new File(filePath);
if (!file.exists()) {
try (InputStream fromClasspath = this.getClass().getClassLoader().getResourceAsStream(filePath)) {
if (fromClasspath != null) {
deleteTempFile = true;
file = FileUtils.getTempFile("hint", "xml");
try {
org.apache.commons.io.FileUtils.copyInputStreamToFile(fromClasspath, file);
} catch (IOException ex) {
throw new HintParseException("Unable to locate hints file in classpath", ex);
}
}
}
}
}
if (file != null) {
try {
final Hints newHints = parser.parseHints(file);
hints.getHintRules().addAll(newHints.getHintRules());
hints.getVendorDuplicatingHintRules().addAll(newHints.getVendorDuplicatingHintRules());
LOGGER.debug("{} hint rules were loaded.", hints.getHintRules().size());
LOGGER.debug("{} duplicating hint rules were loaded.", hints.getVendorDuplicatingHintRules().size());
} catch (HintParseException ex) {
LOGGER.warn("Unable to parse hint rule xml file '{}'", file.getPath());
LOGGER.warn(ex.getMessage());
LOGGER.debug("", ex);
throw ex;
}
}
} catch (DownloadFailedException ex) {
throw new HintParseException("Unable to fetch the configured hint file", ex);
} catch (MalformedURLException ex) {
throw new HintParseException("Configured hint file has an invalid URL", ex);
} catch (IOException ex) {
throw new HintParseException("Unable to create temp file for hints", ex);
} finally {
if (deleteTempFile && file != null) {
FileUtils.delete(file);
}
}
} }
} }

View File

@@ -23,10 +23,10 @@ import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Reader; import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Arrays;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@@ -35,6 +35,7 @@ import java.util.Map.Entry;
import java.util.Properties; import java.util.Properties;
import java.util.Set; import java.util.Set;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.jar.Attributes; import java.util.jar.Attributes;
import java.util.jar.JarEntry; import java.util.jar.JarEntry;
import java.util.jar.JarFile; import java.util.jar.JarFile;
@@ -43,12 +44,14 @@ import java.util.regex.Pattern;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.StringUtils;
import org.jsoup.Jsoup; import org.jsoup.Jsoup;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceCollection; import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.xml.pom.License; import org.owasp.dependencycheck.xml.pom.License;
import org.owasp.dependencycheck.xml.pom.PomUtils; import org.owasp.dependencycheck.xml.pom.PomUtils;
@@ -59,7 +62,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* Used to load a JAR file and collect information that can be used to determine the associated CPE. * Used to load a JAR file and collect information that can be used to determine
* the associated CPE.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -71,15 +75,17 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(JarAnalyzer.class); private static final Logger LOGGER = LoggerFactory.getLogger(JarAnalyzer.class);
/** /**
* The count of directories created during analysis. This is used for creating temporary directories. * The count of directories created during analysis. This is used for
* creating temporary directories.
*/ */
private static int dirCount = 0; private static final AtomicInteger DIR_COUNT = new AtomicInteger(0);
/** /**
* The system independent newline character. * The system independent newline character.
*/ */
private static final String NEWLINE = System.getProperty("line.separator"); private static final String NEWLINE = System.getProperty("line.separator");
/** /**
* A list of values in the manifest to ignore as they only result in false positives. * A list of values in the manifest to ignore as they only result in false
* positives.
*/ */
private static final Set<String> IGNORE_VALUES = newHashSet( private static final Set<String> IGNORE_VALUES = newHashSet(
"Sun Java System Application Server"); "Sun Java System Application Server");
@@ -122,7 +128,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
"ipojo-extension", "ipojo-extension",
"eclipse-sourcereferences"); "eclipse-sourcereferences");
/** /**
* Deprecated Jar manifest attribute, that is, nonetheless, useful for analysis. * Deprecated Jar manifest attribute, that is, nonetheless, useful for
* analysis.
*/ */
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
private static final String IMPLEMENTATION_VENDOR_ID = Attributes.Name.IMPLEMENTATION_VENDOR_ID private static final String IMPLEMENTATION_VENDOR_ID = Attributes.Name.IMPLEMENTATION_VENDOR_ID
@@ -143,15 +150,6 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* A pattern to detect HTML within text. * A pattern to detect HTML within text.
*/ */
private static final Pattern HTML_DETECTION_PATTERN = Pattern.compile("\\<[a-z]+.*/?\\>", Pattern.CASE_INSENSITIVE); private static final Pattern HTML_DETECTION_PATTERN = Pattern.compile("\\<[a-z]+.*/?\\>", Pattern.CASE_INSENSITIVE);
//</editor-fold>
/**
* Constructs a new JarAnalyzer.
*/
public JarAnalyzer() {
}
//<editor-fold defaultstate="collapsed" desc="All standard implmentation details of Analyzer">
/** /**
* The name of the analyzer. * The name of the analyzer.
*/ */
@@ -170,6 +168,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build(); private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="All standard implmentation details of Analyzer">
/** /**
* Returns the FileFilter. * Returns the FileFilter.
* *
@@ -202,7 +202,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
//</editor-fold> //</editor-fold>
/** /**
* Returns the key used in the properties file to reference the analyzer's enabled property. * Returns the key used in the properties file to reference the analyzer's
* enabled property.
* *
* @return the analyzer's enabled property setting key * @return the analyzer's enabled property setting key
*/ */
@@ -212,15 +213,16 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Loads a specified JAR file and collects information from the manifest and checksums to identify the correct CPE * Loads a specified JAR file and collects information from the manifest and
* information. * checksums to identify the correct CPE information.
* *
* @param dependency the dependency to analyze. * @param dependency the dependency to analyze.
* @param engine the engine that is scanning the dependencies * @param engine the engine that is scanning the dependencies
* @throws AnalysisException is thrown if there is an error reading the JAR file. * @throws AnalysisException is thrown if there is an error reading the JAR
* file.
*/ */
@Override @Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException { public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
try { try {
final List<ClassNameInformation> classNames = collectClassNames(dependency); final List<ClassNameInformation> classNames = collectClassNames(dependency);
final String fileName = dependency.getFileName().toLowerCase(); final String fileName = dependency.getFileName().toLowerCase();
@@ -236,64 +238,58 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
final boolean addPackagesAsEvidence = !(hasManifest && hasPOM); final boolean addPackagesAsEvidence = !(hasManifest && hasPOM);
analyzePackageNames(classNames, dependency, addPackagesAsEvidence); analyzePackageNames(classNames, dependency, addPackagesAsEvidence);
} catch (IOException ex) { } catch (IOException ex) {
throw new AnalysisException("Exception occurred reading the JAR file.", ex); throw new AnalysisException("Exception occurred reading the JAR file (" + dependency.getFileName() + ").", ex);
} }
} }
/** /**
* Attempts to find a pom.xml within the JAR file. If found it extracts information and adds it to the evidence. This will * Attempts to find a pom.xml within the JAR file. If found it extracts
* attempt to interpolate the strings contained within the pom.properties if one exists. * information and adds it to the evidence. This will attempt to interpolate
* the strings contained within the pom.properties if one exists.
* *
* @param dependency the dependency being analyzed * @param dependency the dependency being analyzed
* @param classes a collection of class name information * @param classes a collection of class name information
* @param engine the analysis engine, used to add additional dependencies * @param engine the analysis engine, used to add additional dependencies
* @throws AnalysisException is thrown if there is an exception parsing the pom * @throws AnalysisException is thrown if there is an exception parsing the
* pom
* @return whether or not evidence was added to the dependency * @return whether or not evidence was added to the dependency
*/ */
protected boolean analyzePOM(Dependency dependency, List<ClassNameInformation> classes, Engine engine) throws AnalysisException { protected boolean analyzePOM(Dependency dependency, List<ClassNameInformation> classes, Engine engine) throws AnalysisException {
boolean foundSomething = false; try (JarFile jar = new JarFile(dependency.getActualFilePath())) {
final JarFile jar; final List<String> pomEntries = retrievePomListing(jar);
try { if (pomEntries != null && pomEntries.size() <= 1) {
jar = new JarFile(dependency.getActualFilePath()); String path;
} catch (IOException ex) { File pomFile;
LOGGER.warn("Unable to read JarFile '{}'.", dependency.getActualFilePath()); Properties pomProperties = null;
LOGGER.trace("", ex); if (pomEntries.size() == 1) {
return false; path = pomEntries.get(0);
} pomFile = extractPom(path, jar);
List<String> pomEntries;
try {
pomEntries = retrievePomListing(jar);
} catch (IOException ex) {
LOGGER.warn("Unable to read Jar file entries in '{}'.", dependency.getActualFilePath());
LOGGER.trace("", ex);
return false;
}
File externalPom = null;
if (pomEntries.isEmpty()) {
final String pomPath = FilenameUtils.removeExtension(dependency.getActualFilePath()) + ".pom";
externalPom = new File(pomPath);
if (externalPom.isFile()) {
pomEntries.add(pomPath);
} else {
return false;
}
}
for (String path : pomEntries) {
LOGGER.debug("Reading pom entry: {}", path);
Properties pomProperties = null;
try {
if (externalPom == null) {
pomProperties = retrievePomProperties(path, jar); pomProperties = retrievePomProperties(path, jar);
} else {
path = FilenameUtils.removeExtension(dependency.getActualFilePath()) + ".pom";
pomFile = new File(path);
}
if (pomFile.isFile()) {
final Model pom = PomUtils.readPom(pomFile);
if (pom != null && pomProperties != null) {
pom.processProperties(pomProperties);
}
return pom != null && setPomEvidence(dependency, pom, classes);
} else {
return false;
} }
} catch (IOException ex) {
LOGGER.trace("ignore this, failed reading a non-existent pom.properties", ex);
} }
Model pom = null;
try { //reported possible null dereference on pomEntries is on a non-feasible path
if (pomEntries.size() > 1) { for (String path : pomEntries) {
//TODO - one of these is likely the pom for the main JAR we are analyzing
LOGGER.debug("Reading pom entry: {}", path);
try {
//extract POM to its own directory and add it as its own dependency //extract POM to its own directory and add it as its own dependency
final Dependency newDependency = new Dependency(); final Properties pomProperties = retrievePomProperties(path, jar);
pom = extractPom(path, jar, newDependency); final File pomFile = extractPom(path, jar);
final Model pom = PomUtils.readPom(pomFile);
pom.processProperties(pomProperties);
final String displayPath = String.format("%s%s%s", final String displayPath = String.format("%s%s%s",
dependency.getFilePath(), dependency.getFilePath(),
@@ -303,71 +299,60 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
dependency.getFileName(), dependency.getFileName(),
File.separator, File.separator,
path); path);
final Dependency newDependency = new Dependency();
newDependency.setActualFilePath(pomFile.getAbsolutePath());
newDependency.setFileName(displayName); newDependency.setFileName(displayName);
newDependency.setFilePath(displayPath); newDependency.setFilePath(displayPath);
pom.processProperties(pomProperties);
setPomEvidence(newDependency, pom, null); setPomEvidence(newDependency, pom, null);
engine.getDependencies().add(newDependency); engine.getDependencies().add(newDependency);
Collections.sort(engine.getDependencies()); } catch (AnalysisException ex) {
} else { LOGGER.warn("An error occurred while analyzing '{}'.", dependency.getActualFilePath());
if (externalPom == null) { LOGGER.trace("", ex);
pom = PomUtils.readPom(path, jar);
} else {
pom = PomUtils.readPom(externalPom);
}
pom.processProperties(pomProperties);
foundSomething |= setPomEvidence(dependency, pom, classes);
} }
} catch (AnalysisException ex) {
LOGGER.warn("An error occured while analyzing '{}'.", dependency.getActualFilePath());
LOGGER.trace("", ex);
} }
} catch (IOException ex) {
LOGGER.warn("Unable to read JarFile '{}'.", dependency.getActualFilePath());
LOGGER.trace("", ex);
} }
return foundSomething; return false;
} }
/** /**
* Given a path to a pom.xml within a JarFile, this method attempts to load a sibling pom.properties if one exists. * Given a path to a pom.xml within a JarFile, this method attempts to load
* a sibling pom.properties if one exists.
* *
* @param path the path to the pom.xml within the JarFile * @param path the path to the pom.xml within the JarFile
* @param jar the JarFile to load the pom.properties from * @param jar the JarFile to load the pom.properties from
* @return a Properties object or null if no pom.properties was found * @return a Properties object or null if no pom.properties was found
* @throws IOException thrown if there is an exception reading the pom.properties
*/ */
private Properties retrievePomProperties(String path, final JarFile jar) throws IOException { private Properties retrievePomProperties(String path, final JarFile jar) {
Properties pomProperties = null; Properties pomProperties = null;
final String propPath = path.substring(0, path.length() - 7) + "pom.properies"; final String propPath = path.substring(0, path.length() - 7) + "pom.properies";
final ZipEntry propEntry = jar.getEntry(propPath); final ZipEntry propEntry = jar.getEntry(propPath);
if (propEntry != null) { if (propEntry != null) {
Reader reader = null; try (Reader reader = new InputStreamReader(jar.getInputStream(propEntry), "UTF-8")) {
try {
reader = new InputStreamReader(jar.getInputStream(propEntry), "UTF-8");
pomProperties = new Properties(); pomProperties = new Properties();
pomProperties.load(reader); pomProperties.load(reader);
LOGGER.debug("Read pom.properties: {}", propPath); LOGGER.debug("Read pom.properties: {}", propPath);
} finally { } catch (UnsupportedEncodingException ex) {
if (reader != null) { LOGGER.trace("UTF-8 is not supported", ex);
try { } catch (IOException ex) {
reader.close(); LOGGER.trace("Unable to read the POM properties", ex);
} catch (IOException ex) {
LOGGER.trace("close error", ex);
}
}
} }
} }
return pomProperties; return pomProperties;
} }
/** /**
* Searches a JarFile for pom.xml entries and returns a listing of these entries. * Searches a JarFile for pom.xml entries and returns a listing of these
* entries.
* *
* @param jar the JarFile to search * @param jar the JarFile to search
* @return a list of pom.xml entries * @return a list of pom.xml entries
* @throws IOException thrown if there is an exception reading a JarEntry * @throws IOException thrown if there is an exception reading a JarEntry
*/ */
private List<String> retrievePomListing(final JarFile jar) throws IOException { private List<String> retrievePomListing(final JarFile jar) throws IOException {
final List<String> pomEntries = new ArrayList<String>(); final List<String> pomEntries = new ArrayList<>();
final Enumeration<JarEntry> entries = jar.entries(); final Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) { while (entries.hasMoreElements()) {
final JarEntry entry = entries.nextElement(); final JarEntry entry = entries.nextElement();
@@ -381,64 +366,29 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Retrieves the specified POM from a jar file and converts it to a Model. * Retrieves the specified POM from a jar.
* *
* @param path the path to the pom.xml file within the jar file * @param path the path to the pom.xml file within the jar file
* @param jar the jar file to extract the pom from * @param jar the jar file to extract the pom from
* @param dependency the dependency being analyzed * @return returns the POM file
* @return returns the POM object * @throws AnalysisException is thrown if there is an exception extracting
* @throws AnalysisException is thrown if there is an exception extracting or parsing the POM * the file
* {@link org.owasp.dependencycheck.xml.pom.Model} object
*/ */
private Model extractPom(String path, JarFile jar, Dependency dependency) throws AnalysisException { private File extractPom(String path, JarFile jar) throws AnalysisException {
InputStream input = null;
FileOutputStream fos = null;
final File tmpDir = getNextTempDirectory(); final File tmpDir = getNextTempDirectory();
final File file = new File(tmpDir, "pom.xml"); final File file = new File(tmpDir, "pom.xml");
try { final ZipEntry entry = jar.getEntry(path);
final ZipEntry entry = jar.getEntry(path); if (entry == null) {
input = jar.getInputStream(entry); throw new AnalysisException(String.format("Pom (%s) does not exist in %s", path, jar.getName()));
fos = new FileOutputStream(file); }
try (InputStream input = jar.getInputStream(entry);
FileOutputStream fos = new FileOutputStream(file)) {
IOUtils.copy(input, fos); IOUtils.copy(input, fos);
dependency.setActualFilePath(file.getAbsolutePath());
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.warn("An error occurred reading '{}' from '{}'.", path, dependency.getFilePath()); LOGGER.warn("An error occurred reading '{}' from '{}'.", path, jar.getName());
LOGGER.error("", ex); LOGGER.error("", ex);
} finally {
closeStream(fos);
closeStream(input);
}
return PomUtils.readPom(file);
}
/**
* Silently closes an input stream ignoring errors.
*
* @param stream an input stream to close
*/
private void closeStream(InputStream stream) {
if (stream != null) {
try {
stream.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
}
}
/**
* Silently closes an output stream ignoring errors.
*
* @param stream an output stream to close
*/
private void closeStream(OutputStream stream) {
if (stream != null) {
try {
stream.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
} }
return file;
} }
/** /**
@@ -446,16 +396,17 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* *
* @param dependency the dependency to set data on * @param dependency the dependency to set data on
* @param pom the information from the pom * @param pom the information from the pom
* @param classes a collection of ClassNameInformation - containing data about the fully qualified class names within the JAR * @param classes a collection of ClassNameInformation - containing data
* file being analyzed * about the fully qualified class names within the JAR file being analyzed
* @return true if there was evidence within the pom that we could use; otherwise false * @return true if there was evidence within the pom that we could use;
* otherwise false
*/ */
public static boolean setPomEvidence(Dependency dependency, Model pom, List<ClassNameInformation> classes) { public static boolean setPomEvidence(Dependency dependency, Model pom, List<ClassNameInformation> classes) {
if (pom == null) {
return false;
}
boolean foundSomething = false; boolean foundSomething = false;
boolean addAsIdentifier = true; boolean addAsIdentifier = true;
if (pom == null) {
return foundSomething;
}
String groupid = pom.getGroupId(); String groupid = pom.getGroupId();
String parentGroupId = pom.getParentGroupId(); String parentGroupId = pom.getParentGroupId();
String artifactid = pom.getArtifactId(); String artifactid = pom.getArtifactId();
@@ -474,7 +425,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
final String originalGroupID = groupid; final String originalGroupID = groupid;
if (groupid.startsWith("org.") || groupid.startsWith("com.")) { if (groupid != null && (groupid.startsWith("org.") || groupid.startsWith("com."))) {
groupid = groupid.substring(4); groupid = groupid.substring(4);
} }
@@ -483,7 +434,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
final String originalArtifactID = artifactid; final String originalArtifactID = artifactid;
if (artifactid.startsWith("org.") || artifactid.startsWith("com.")) { if (artifactid != null && (artifactid.startsWith("org.") || artifactid.startsWith("com."))) {
artifactid = artifactid.substring(4); artifactid = artifactid.substring(4);
} }
@@ -545,6 +496,12 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
addMatchingValues(classes, org, dependency.getVendorEvidence()); addMatchingValues(classes, org, dependency.getVendorEvidence());
addMatchingValues(classes, org, dependency.getProductEvidence()); addMatchingValues(classes, org, dependency.getProductEvidence());
} }
// org name
final String orgUrl = pom.getOrganizationUrl();
if (orgUrl != null && !orgUrl.isEmpty()) {
dependency.getVendorEvidence().addEvidence("pom", "organization url", orgUrl, Confidence.MEDIUM);
dependency.getProductEvidence().addEvidence("pom", "organization url", orgUrl, Confidence.LOW);
}
//pom name //pom name
final String pomName = pom.getName(); final String pomName = pom.getName();
if (pomName if (pomName
@@ -565,22 +522,30 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
addMatchingValues(classes, trimmedDescription, dependency.getProductEvidence()); addMatchingValues(classes, trimmedDescription, dependency.getProductEvidence());
} }
final String projectURL = pom.getProjectURL();
if (projectURL != null && !projectURL.trim().isEmpty()) {
dependency.getVendorEvidence().addEvidence("pom", "url", projectURL, Confidence.HIGHEST);
}
extractLicense(pom, dependency); extractLicense(pom, dependency);
return foundSomething; return foundSomething;
} }
/** /**
* Analyzes the path information of the classes contained within the JarAnalyzer to try and determine possible vendor or * Analyzes the path information of the classes contained within the
* product names. If any are found they are stored in the packageVendor and packageProduct hashSets. * JarAnalyzer to try and determine possible vendor or product names. If any
* are found they are stored in the packageVendor and packageProduct
* hashSets.
* *
* @param classNames a list of class names * @param classNames a list of class names
* @param dependency a dependency to analyze * @param dependency a dependency to analyze
* @param addPackagesAsEvidence a flag indicating whether or not package names should be added as evidence. * @param addPackagesAsEvidence a flag indicating whether or not package
* names should be added as evidence.
*/ */
protected void analyzePackageNames(List<ClassNameInformation> classNames, protected void analyzePackageNames(List<ClassNameInformation> classNames,
Dependency dependency, boolean addPackagesAsEvidence) { Dependency dependency, boolean addPackagesAsEvidence) {
final Map<String, Integer> vendorIdentifiers = new HashMap<String, Integer>(); final Map<String, Integer> vendorIdentifiers = new HashMap<>();
final Map<String, Integer> productIdentifiers = new HashMap<String, Integer>(); final Map<String, Integer> productIdentifiers = new HashMap<>();
analyzeFullyQualifiedClassNames(classNames, vendorIdentifiers, productIdentifiers); analyzeFullyQualifiedClassNames(classNames, vendorIdentifiers, productIdentifiers);
final int classCount = classNames.size(); final int classCount = classNames.size();
@@ -610,11 +575,13 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* <p> * <p>
* Reads the manifest from the JAR file and collects the entries. Some vendorKey entries are:</p> * Reads the manifest from the JAR file and collects the entries. Some
* vendorKey entries are:</p>
* <ul><li>Implementation Title</li> * <ul><li>Implementation Title</li>
* <li>Implementation Version</li> <li>Implementation Vendor</li> * <li>Implementation Version</li> <li>Implementation Vendor</li>
* <li>Implementation VendorId</li> <li>Bundle Name</li> <li>Bundle Version</li> <li>Bundle Vendor</li> <li>Bundle * <li>Implementation VendorId</li> <li>Bundle Name</li> <li>Bundle
* Description</li> <li>Main Class</li> </ul> * Version</li> <li>Bundle Vendor</li> <li>Bundle Description</li> <li>Main
* Class</li> </ul>
* However, all but a handful of specific entries are read in. * However, all but a handful of specific entries are read in.
* *
* @param dependency A reference to the dependency * @param dependency A reference to the dependency
@@ -622,16 +589,12 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* @return whether evidence was identified parsing the manifest * @return whether evidence was identified parsing the manifest
* @throws IOException if there is an issue reading the JAR file * @throws IOException if there is an issue reading the JAR file
*/ */
protected boolean parseManifest(Dependency dependency, List<ClassNameInformation> classInformation) throws IOException { protected boolean parseManifest(Dependency dependency, List<ClassNameInformation> classInformation)
throws IOException {
boolean foundSomething = false; boolean foundSomething = false;
JarFile jar = null; try (JarFile jar = new JarFile(dependency.getActualFilePath())) {
try {
jar = new JarFile(dependency.getActualFilePath());
final Manifest manifest = jar.getManifest(); final Manifest manifest = jar.getManifest();
if (manifest == null) { if (manifest == null) {
//don't log this for javadoc or sources jar files
if (!dependency.getFileName().toLowerCase().endsWith("-sources.jar") if (!dependency.getFileName().toLowerCase().endsWith("-sources.jar")
&& !dependency.getFileName().toLowerCase().endsWith("-javadoc.jar") && !dependency.getFileName().toLowerCase().endsWith("-javadoc.jar")
&& !dependency.getFileName().toLowerCase().endsWith("-src.jar") && !dependency.getFileName().toLowerCase().endsWith("-src.jar")
@@ -641,17 +604,13 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
return false; return false;
} }
final Attributes atts = manifest.getMainAttributes();
final EvidenceCollection vendorEvidence = dependency.getVendorEvidence(); final EvidenceCollection vendorEvidence = dependency.getVendorEvidence();
final EvidenceCollection productEvidence = dependency.getProductEvidence(); final EvidenceCollection productEvidence = dependency.getProductEvidence();
final EvidenceCollection versionEvidence = dependency.getVersionEvidence(); final EvidenceCollection versionEvidence = dependency.getVersionEvidence();
String source = "Manifest";
final String source = "Manifest";
String specificationVersion = null; String specificationVersion = null;
boolean hasImplementationVersion = false; boolean hasImplementationVersion = false;
Attributes atts = manifest.getMainAttributes();
for (Entry<Object, Object> entry : atts.entrySet()) { for (Entry<Object, Object> entry : atts.entrySet()) {
String key = entry.getKey().toString(); String key = entry.getKey().toString();
String value = atts.getValue(key); String value = atts.getValue(key);
@@ -669,7 +628,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
foundSomething = true; foundSomething = true;
versionEvidence.addEvidence(source, key, value, Confidence.HIGH); versionEvidence.addEvidence(source, key, value, Confidence.HIGH);
} else if ("specification-version".equalsIgnoreCase(key)) { } else if ("specification-version".equalsIgnoreCase(key)) {
specificationVersion = key; specificationVersion = value;
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VENDOR.toString())) { } else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VENDOR.toString())) {
foundSomething = true; foundSomething = true;
vendorEvidence.addEvidence(source, key, value, Confidence.HIGH); vendorEvidence.addEvidence(source, key, value, Confidence.HIGH);
@@ -681,7 +640,6 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} else if (key.equalsIgnoreCase(BUNDLE_DESCRIPTION)) { } else if (key.equalsIgnoreCase(BUNDLE_DESCRIPTION)) {
foundSomething = true; foundSomething = true;
addDescription(dependency, value, "manifest", key); addDescription(dependency, value, "manifest", key);
//productEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
addMatchingValues(classInformation, value, productEvidence); addMatchingValues(classInformation, value, productEvidence);
} else if (key.equalsIgnoreCase(BUNDLE_NAME)) { } else if (key.equalsIgnoreCase(BUNDLE_NAME)) {
foundSomething = true; foundSomething = true;
@@ -689,25 +647,14 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
addMatchingValues(classInformation, value, productEvidence); addMatchingValues(classInformation, value, productEvidence);
// //the following caused false positives. // //the following caused false positives.
// } else if (key.equalsIgnoreCase(BUNDLE_VENDOR)) { // } else if (key.equalsIgnoreCase(BUNDLE_VENDOR)) {
// foundSomething = true;
// vendorEvidence.addEvidence(source, key, value, Confidence.HIGH);
// addMatchingValues(classInformation, value, vendorEvidence);
} else if (key.equalsIgnoreCase(BUNDLE_VERSION)) { } else if (key.equalsIgnoreCase(BUNDLE_VERSION)) {
foundSomething = true; foundSomething = true;
versionEvidence.addEvidence(source, key, value, Confidence.HIGH); versionEvidence.addEvidence(source, key, value, Confidence.HIGH);
} else if (key.equalsIgnoreCase(Attributes.Name.MAIN_CLASS.toString())) { } else if (key.equalsIgnoreCase(Attributes.Name.MAIN_CLASS.toString())) {
continue; continue;
//skipping main class as if this has important information to add //skipping main class as if this has important information to add it will be added during class name analysis...
// it will be added during class name analysis... if other fields
// have the information from the class name then they will get added...
// foundSomething = true;
// productEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
// vendorEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
// addMatchingValues(classInformation, value, vendorEvidence);
// addMatchingValues(classInformation, value, productEvidence);
} else { } else {
key = key.toLowerCase(); key = key.toLowerCase();
if (!IGNORE_KEYS.contains(key) if (!IGNORE_KEYS.contains(key)
&& !key.endsWith("jdk") && !key.endsWith("jdk")
&& !key.contains("lastmodified") && !key.contains("lastmodified")
@@ -719,21 +666,18 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
&& !value.trim().startsWith("scm:") && !value.trim().startsWith("scm:")
&& !isImportPackage(key, value) && !isImportPackage(key, value)
&& !isPackage(key, value)) { && !isPackage(key, value)) {
foundSomething = true; foundSomething = true;
if (key.contains("version")) { if (key.contains("version")) {
if (!key.contains("specification")) { if (!key.contains("specification")) {
//versionEvidence.addEvidence(source, key, value, Confidence.LOW);
//} else {
versionEvidence.addEvidence(source, key, value, Confidence.MEDIUM); versionEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
} }
} else if ("build-id".equals(key)) { } else if ("build-id".equals(key)) {
int pos = value.indexOf('('); int pos = value.indexOf('(');
if (pos >= 0) { if (pos > 0) {
value = value.substring(0, pos - 1); value = value.substring(0, pos - 1);
} }
pos = value.indexOf('['); pos = value.indexOf('[');
if (pos >= 0) { if (pos > 0) {
value = value.substring(0, pos - 1); value = value.substring(0, pos - 1);
} }
versionEvidence.addEvidence(source, key, value, Confidence.MEDIUM); versionEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
@@ -754,21 +698,19 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
addMatchingValues(classInformation, value, productEvidence); addMatchingValues(classInformation, value, productEvidence);
} else if (key.contains("license")) { } else if (key.contains("license")) {
addLicense(dependency, value); addLicense(dependency, value);
} else if (key.contains("description")) {
addDescription(dependency, value, "manifest", key);
} else { } else {
if (key.contains("description")) { productEvidence.addEvidence(source, key, value, Confidence.LOW);
addDescription(dependency, value, "manifest", key); vendorEvidence.addEvidence(source, key, value, Confidence.LOW);
} else { addMatchingValues(classInformation, value, vendorEvidence);
productEvidence.addEvidence(source, key, value, Confidence.LOW); addMatchingValues(classInformation, value, productEvidence);
vendorEvidence.addEvidence(source, key, value, Confidence.LOW); if (value.matches(".*\\d.*")) {
addMatchingValues(classInformation, value, vendorEvidence); final StringTokenizer tokenizer = new StringTokenizer(value, " ");
addMatchingValues(classInformation, value, productEvidence); while (tokenizer.hasMoreElements()) {
if (value.matches(".*\\d.*")) { final String s = tokenizer.nextToken();
final StringTokenizer tokenizer = new StringTokenizer(value, " "); if (s.matches("^[0-9.]+$")) {
while (tokenizer.hasMoreElements()) { versionEvidence.addEvidence(source, key, s, Confidence.LOW);
final String s = tokenizer.nextToken();
if (s.matches("^[0-9.]+$")) {
versionEvidence.addEvidence(source, key, s, Confidence.LOW);
}
} }
} }
} }
@@ -776,28 +718,52 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
} }
} }
for (Map.Entry<String, Attributes> item : manifest.getEntries().entrySet()) {
final String name = item.getKey();
source = "manifest: " + name;
atts = item.getValue();
for (Entry<Object, Object> entry : atts.entrySet()) {
final String key = entry.getKey().toString();
final String value = atts.getValue(key);
if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_TITLE.toString())) {
foundSomething = true;
productEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
addMatchingValues(classInformation, value, productEvidence);
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VERSION.toString())) {
foundSomething = true;
versionEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
} else if (key.equalsIgnoreCase(Attributes.Name.IMPLEMENTATION_VENDOR.toString())) {
foundSomething = true;
vendorEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
addMatchingValues(classInformation, value, vendorEvidence);
} else if (key.equalsIgnoreCase(Attributes.Name.SPECIFICATION_TITLE.toString())) {
foundSomething = true;
productEvidence.addEvidence(source, key, value, Confidence.MEDIUM);
addMatchingValues(classInformation, value, productEvidence);
}
}
}
if (specificationVersion != null && !hasImplementationVersion) { if (specificationVersion != null && !hasImplementationVersion) {
foundSomething = true; foundSomething = true;
versionEvidence.addEvidence(source, "specificationn-version", specificationVersion, Confidence.HIGH); versionEvidence.addEvidence(source, "specification-version", specificationVersion, Confidence.HIGH);
}
} finally {
if (jar != null) {
jar.close();
} }
} }
return foundSomething; return foundSomething;
} }
/** /**
* Adds a description to the given dependency. If the description contains one of the following strings beyond 100 characters, * Adds a description to the given dependency. If the description contains
* then the description used will be trimmed to that position: * one of the following strings beyond 100 characters, then the description
* <ul><li>"such as"</li><li>"like "</li><li>"will use "</li><li>"* uses "</li></ul> * used will be trimmed to that position:
* <ul><li>"such as"</li><li>"like "</li><li>"will use "</li><li>"* uses
* "</li></ul>
* *
* @param dependency a dependency * @param dependency a dependency
* @param description the description * @param description the description
* @param source the source of the evidence * @param source the source of the evidence
* @param key the "name" of the evidence * @param key the "name" of the evidence
* @return if the description is trimmed, the trimmed version is returned; otherwise the original description is returned * @return if the description is trimmed, the trimmed version is returned;
* otherwise the original description is returned
*/ */
public static String addDescription(Dependency dependency, String description, String source, String key) { public static String addDescription(Dependency dependency, String description, String source, String key) {
if (dependency.getDescription() == null) { if (dependency.getDescription() == null) {
@@ -835,10 +801,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
if (pos > 0) { if (pos > 0) {
final StringBuilder sb = new StringBuilder(pos + 3); desc = desc.substring(0, pos) + "...";
sb.append(desc.substring(0, pos));
sb.append("...");
desc = sb.toString();
} }
dependency.getProductEvidence().addEvidence(source, key, desc, Confidence.LOW); dependency.getProductEvidence().addEvidence(source, key, desc, Confidence.LOW);
dependency.getVendorEvidence().addEvidence(source, key, desc, Confidence.LOW); dependency.getVendorEvidence().addEvidence(source, key, desc, Confidence.LOW);
@@ -871,19 +834,27 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Initializes the JarAnalyzer. * Initializes the JarAnalyzer.
* *
* @throws Exception is thrown if there is an exception creating a temporary directory * @throws InitializationException is thrown if there is an exception
* creating a temporary directory
*/ */
@Override @Override
public void initializeFileTypeAnalyzer() throws Exception { public void initializeFileTypeAnalyzer() throws InitializationException {
final File baseDir = Settings.getTempDirectory(); try {
tempFileLocation = File.createTempFile("check", "tmp", baseDir); final File baseDir = Settings.getTempDirectory();
if (!tempFileLocation.delete()) { tempFileLocation = File.createTempFile("check", "tmp", baseDir);
final String msg = String.format("Unable to delete temporary file '%s'.", tempFileLocation.getAbsolutePath()); if (!tempFileLocation.delete()) {
throw new AnalysisException(msg); final String msg = String.format("Unable to delete temporary file '%s'.", tempFileLocation.getAbsolutePath());
} setEnabled(false);
if (!tempFileLocation.mkdirs()) { throw new InitializationException(msg);
final String msg = String.format("Unable to create directory '%s'.", tempFileLocation.getAbsolutePath()); }
throw new AnalysisException(msg); if (!tempFileLocation.mkdirs()) {
final String msg = String.format("Unable to create directory '%s'.", tempFileLocation.getAbsolutePath());
setEnabled(false);
throw new InitializationException(msg);
}
} catch (IOException ex) {
setEnabled(false);
throw new InitializationException("Unable to create a temporary file", ex);
} }
} }
@@ -891,22 +862,27 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* Deletes any files extracted from the JAR during analysis. * Deletes any files extracted from the JAR during analysis.
*/ */
@Override @Override
public void close() { public void closeAnalyzer() {
if (tempFileLocation != null && tempFileLocation.exists()) { if (tempFileLocation != null && tempFileLocation.exists()) {
LOGGER.debug("Attempting to delete temporary files"); LOGGER.debug("Attempting to delete temporary files");
final boolean success = FileUtils.delete(tempFileLocation); final boolean success = FileUtils.delete(tempFileLocation);
if (!success) { if (!success && tempFileLocation.exists()) {
LOGGER.warn("Failed to delete some temporary files, see the log for more details"); final String[] l = tempFileLocation.list();
if (l != null && l.length > 0) {
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
}
} }
} }
} }
/** /**
* Determines if the key value pair from the manifest is for an "import" type entry for package names. * Determines if the key value pair from the manifest is for an "import"
* type entry for package names.
* *
* @param key the key from the manifest * @param key the key from the manifest
* @param value the value from the manifest * @param value the value from the manifest
* @return true or false depending on if it is believed the entry is an "import" entry * @return true or false depending on if it is believed the entry is an
* "import" entry
*/ */
private boolean isImportPackage(String key, String value) { private boolean isImportPackage(String key, String value) {
final Pattern packageRx = Pattern.compile("^([a-zA-Z0-9_#\\$\\*\\.]+\\s*[,;]\\s*)+([a-zA-Z0-9_#\\$\\*\\.]+\\s*)?$"); final Pattern packageRx = Pattern.compile("^([a-zA-Z0-9_#\\$\\*\\.]+\\s*[,;]\\s*)+([a-zA-Z0-9_#\\$\\*\\.]+\\s*)?$");
@@ -915,17 +891,16 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Cycles through an enumeration of JarEntries, contained within the dependency, and returns a list of the class names. This * Cycles through an enumeration of JarEntries, contained within the
* does not include core Java package names (i.e. java.* or javax.*). * dependency, and returns a list of the class names. This does not include
* core Java package names (i.e. java.* or javax.*).
* *
* @param dependency the dependency being analyzed * @param dependency the dependency being analyzed
* @return an list of fully qualified class names * @return an list of fully qualified class names
*/ */
private List<ClassNameInformation> collectClassNames(Dependency dependency) { private List<ClassNameInformation> collectClassNames(Dependency dependency) {
final List<ClassNameInformation> classNames = new ArrayList<ClassNameInformation>(); final List<ClassNameInformation> classNames = new ArrayList<>();
JarFile jar = null; try (JarFile jar = new JarFile(dependency.getActualFilePath())) {
try {
jar = new JarFile(dependency.getActualFilePath());
final Enumeration<JarEntry> entries = jar.entries(); final Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) { while (entries.hasMoreElements()) {
final JarEntry entry = entries.nextElement(); final JarEntry entry = entries.nextElement();
@@ -939,25 +914,21 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.warn("Unable to open jar file '{}'.", dependency.getFileName()); LOGGER.warn("Unable to open jar file '{}'.", dependency.getFileName());
LOGGER.debug("", ex); LOGGER.debug("", ex);
} finally {
if (jar != null) {
try {
jar.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
}
} }
return classNames; return classNames;
} }
/** /**
* Cycles through the list of class names and places the package levels 0-3 into the provided maps for vendor and product. * Cycles through the list of class names and places the package levels 0-3
* This is helpful when analyzing vendor/product as many times this is included in the package name. * into the provided maps for vendor and product. This is helpful when
* analyzing vendor/product as many times this is included in the package
* name.
* *
* @param classNames a list of class names * @param classNames a list of class names
* @param vendor HashMap of possible vendor names from package names (e.g. owasp) * @param vendor HashMap of possible vendor names from package names (e.g.
* @param product HashMap of possible product names from package names (e.g. dependencycheck) * owasp)
* @param product HashMap of possible product names from package names (e.g.
* dependencycheck)
*/ */
private void analyzeFullyQualifiedClassNames(List<ClassNameInformation> classNames, private void analyzeFullyQualifiedClassNames(List<ClassNameInformation> classNames,
Map<String, Integer> vendor, Map<String, Integer> product) { Map<String, Integer> vendor, Map<String, Integer> product) {
@@ -967,13 +938,11 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
if (list.size() == 2) { if (list.size() == 2) {
addEntry(product, list.get(1)); addEntry(product, list.get(1));
} } else if (list.size() == 3) {
if (list.size() == 3) {
addEntry(vendor, list.get(1)); addEntry(vendor, list.get(1));
addEntry(product, list.get(1)); addEntry(product, list.get(1));
addEntry(product, list.get(2)); addEntry(product, list.get(2));
} } else if (list.size() >= 4) {
if (list.size() >= 4) {
addEntry(vendor, list.get(1)); addEntry(vendor, list.get(1));
addEntry(vendor, list.get(2)); addEntry(vendor, list.get(2));
addEntry(product, list.get(1)); addEntry(product, list.get(1));
@@ -984,8 +953,9 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Adds an entry to the specified collection and sets the Integer (e.g. the count) to 1. If the entry already exists in the * Adds an entry to the specified collection and sets the Integer (e.g. the
* collection then the Integer is incremented by 1. * count) to 1. If the entry already exists in the collection then the
* Integer is incremented by 1.
* *
* @param collection a collection of strings and their occurrence count * @param collection a collection of strings and their occurrence count
* @param key the key to add to the collection * @param key the key to add to the collection
@@ -999,9 +969,10 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Cycles through the collection of class name information to see if parts of the package names are contained in the provided * Cycles through the collection of class name information to see if parts
* value. If found, it will be added as the HIGHEST confidence evidence because we have more then one source corroborating the * of the package names are contained in the provided value. If found, it
* value. * will be added as the HIGHEST confidence evidence because we have more
* then one source corroborating the value.
* *
* @param classes a collection of class name information * @param classes a collection of class name information
* @param value the value to check to see if it contains a package name * @param value the value to check to see if it contains a package name
@@ -1014,7 +985,9 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
final String text = value.toLowerCase(); final String text = value.toLowerCase();
for (ClassNameInformation cni : classes) { for (ClassNameInformation cni : classes) {
for (String key : cni.getPackageStructure()) { for (String key : cni.getPackageStructure()) {
if (text.contains(key)) { //note, package structure elements are already lowercase. final Pattern p = Pattern.compile("\b" + key + "\b");
if (p.matcher(text).find()) {
//if (text.contains(key)) { //note, package structure elements are already lowercase.
evidence.addEvidence("jar", "package name", key, Confidence.HIGHEST); evidence.addEvidence("jar", "package name", key, Confidence.HIGHEST);
} }
} }
@@ -1022,7 +995,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Simple check to see if the attribute from a manifest is just a package name. * Simple check to see if the attribute from a manifest is just a package
* name.
* *
* @param key the key of the value to check * @param key the key of the value to check
* @param value the value to check * @param value the value to check
@@ -1036,7 +1010,8 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Extracts the license information from the pom and adds it to the dependency. * Extracts the license information from the pom and adds it to the
* dependency.
* *
* @param pom the pom object * @param pom the pom object
* @param dependency the dependency to add license information too * @param dependency the dependency to add license information too
@@ -1081,12 +1056,24 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
protected static class ClassNameInformation { protected static class ClassNameInformation {
/**
* The fully qualified class name.
*/
private String name;
/**
* Up to the first four levels of the package structure, excluding a
* leading "org" or "com".
*/
private final ArrayList<String> packageStructure = new ArrayList<>();
/** /**
* <p> * <p>
* Stores information about a given class name. This class will keep the fully qualified class name and a list of the * Stores information about a given class name. This class will keep the
* important parts of the package structure. Up to the first four levels of the package structure are stored, excluding a * fully qualified class name and a list of the important parts of the
* leading "org" or "com". Example:</p> * package structure. Up to the first four levels of the package
* <code>ClassNameInformation obj = new ClassNameInformation("org.owasp.dependencycheck.analyzer.JarAnalyzer"); * structure are stored, excluding a leading "org" or "com".
* Example:</p>
* <code>ClassNameInformation obj = new ClassNameInformation("org/owasp/dependencycheck/analyzer/JarAnalyzer");
* System.out.println(obj.getName()); * System.out.println(obj.getName());
* for (String p : obj.getPackageStructure()) * for (String p : obj.getPackageStructure())
* System.out.println(p); * System.out.println(p);
@@ -1104,7 +1091,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
ClassNameInformation(String className) { ClassNameInformation(String className) {
name = className; name = className;
if (name.contains("/")) { if (name.contains("/")) {
final String[] tmp = className.toLowerCase().split("/"); final String[] tmp = StringUtils.split(className.toLowerCase(), '/');
int start = 0; int start = 0;
int end = 3; int end = 3;
if ("com".equals(tmp[0]) || "org".equals(tmp[0])) { if ("com".equals(tmp[0]) || "org".equals(tmp[0])) {
@@ -1114,17 +1101,11 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
if (tmp.length <= end) { if (tmp.length <= end) {
end = tmp.length - 1; end = tmp.length - 1;
} }
for (int i = start; i <= end; i++) { packageStructure.addAll(Arrays.asList(tmp).subList(start, end + 1));
packageStructure.add(tmp[i]);
}
} else { } else {
packageStructure.add(name); packageStructure.add(name);
} }
} }
/**
* The fully qualified class name.
*/
private String name;
/** /**
* Get the value of name * Get the value of name
@@ -1143,10 +1124,6 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
public void setName(String name) { public void setName(String name) {
this.name = name; this.name = name;
} }
/**
* Up to the first four levels of the package structure, excluding a leading "org" or "com".
*/
private final ArrayList<String> packageStructure = new ArrayList<String>();
/** /**
* Get the value of packageStructure * Get the value of packageStructure
@@ -1165,7 +1142,7 @@ public class JarAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException thrown if unable to create temporary directory * @throws AnalysisException thrown if unable to create temporary directory
*/ */
private File getNextTempDirectory() throws AnalysisException { private File getNextTempDirectory() throws AnalysisException {
dirCount += 1; final int dirCount = DIR_COUNT.incrementAndGet();
final File directory = new File(tempFileLocation, String.valueOf(dirCount)); final File directory = new File(tempFileLocation, String.valueOf(dirCount));
//getting an exception for some directories not being able to be created; might be because the directory already exists? //getting an exception for some directories not being able to be created; might be because the directory already exists?
if (directory.exists()) { if (directory.exists()) {

View File

@@ -35,6 +35,7 @@ import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.DownloadFailedException; import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader; import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
@@ -42,15 +43,18 @@ import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
/** /**
* Analyzer which will attempt to locate a dependency on a Nexus service by SHA-1 digest of the dependency. * Analyzer which will attempt to locate a dependency on a Nexus service by
* SHA-1 digest of the dependency.
* *
* There are two settings which govern this behavior: * There are two settings which govern this behavior:
* *
* <ul> * <ul>
* <li>{@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_NEXUS_ENABLED} determines whether this analyzer is even * <li>{@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_NEXUS_ENABLED}
* enabled. This can be overridden by setting the system property.</li> * determines whether this analyzer is even enabled. This can be overridden by
* <li>{@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_NEXUS_URL} the URL to a Nexus service to search by SHA-1. * setting the system property.</li>
* There is an expected <code>%s</code> in this where the SHA-1 will get entered.</li> * <li>{@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_NEXUS_URL}
* the URL to a Nexus service to search by SHA-1. There is an expected
* <code>%s</code> in this where the SHA-1 will get entered.</li>
* </ul> * </ul>
* *
* @author colezlaw * @author colezlaw
@@ -58,7 +62,8 @@ import org.owasp.dependencycheck.utils.Settings;
public class NexusAnalyzer extends AbstractFileTypeAnalyzer { public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* The default URL - this will be used by the CentralAnalyzer to determine whether to enable this. * The default URL - this will be used by the CentralAnalyzer to determine
* whether to enable this.
*/ */
public static final String DEFAULT_URL = "https://repository.sonatype.org/service/local/"; public static final String DEFAULT_URL = "https://repository.sonatype.org/service/local/";
@@ -95,7 +100,8 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Determines if this analyzer is enabled * Determines if this analyzer is enabled
* *
* @return <code>true</code> if the analyzer is enabled; otherwise <code>false</code> * @return <code>true</code> if the analyzer is enabled; otherwise
* <code>false</code>
*/ */
private boolean checkEnabled() { private boolean checkEnabled() {
/* Enable this analyzer ONLY if the Nexus URL has been set to something /* Enable this analyzer ONLY if the Nexus URL has been set to something
@@ -131,26 +137,25 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Initializes the analyzer once before any analysis is performed. * Initializes the analyzer once before any analysis is performed.
* *
* @throws Exception if there's an error during initialization * @throws InitializationException if there's an error during initialization
*/ */
@Override @Override
public void initializeFileTypeAnalyzer() throws Exception { public void initializeFileTypeAnalyzer() throws InitializationException {
LOGGER.debug("Initializing Nexus Analyzer"); LOGGER.debug("Initializing Nexus Analyzer");
LOGGER.debug("Nexus Analyzer enabled: {}", isEnabled()); LOGGER.debug("Nexus Analyzer enabled: {}", isEnabled());
if (isEnabled()) { if (isEnabled()) {
final boolean useProxy = useProxy();
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL); final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL);
LOGGER.debug("Nexus Analyzer URL: {}", searchUrl); LOGGER.debug("Nexus Analyzer URL: {}", searchUrl);
try { try {
searcher = new NexusSearch(new URL(searchUrl)); searcher = new NexusSearch(new URL(searchUrl), useProxy);
if (!searcher.preflightRequest()) { if (!searcher.preflightRequest()) {
LOGGER.warn("There was an issue getting Nexus status. Disabling analyzer.");
setEnabled(false); setEnabled(false);
throw new InitializationException("There was an issue getting Nexus status. Disabling analyzer.");
} }
} catch (MalformedURLException mue) { } catch (MalformedURLException mue) {
// I know that initialize can throw an exception, but we'll
// just disable the analyzer if the URL isn't valid
LOGGER.warn("Property {} not a valid URL. Nexus Analyzer disabled", searchUrl);
setEnabled(false); setEnabled(false);
throw new InitializationException("Malformed URL to Nexus: " + searchUrl, mue);
} }
} }
} }
@@ -166,7 +171,8 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Returns the key used in the properties file to reference the analyzer's enabled property. * Returns the key used in the properties file to reference the analyzer's
* enabled property.
* *
* @return the analyzer's enabled property setting key * @return the analyzer's enabled property setting key
*/ */
@@ -208,7 +214,7 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException when there's an exception during analysis * @throws AnalysisException when there's an exception during analysis
*/ */
@Override @Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException { public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (!isEnabled()) { if (!isEnabled()) {
return; return;
} }
@@ -240,7 +246,8 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.warn("Unable to download pom.xml for {} from Nexus repository; " LOGGER.warn("Unable to download pom.xml for {} from Nexus repository; "
+ "this could result in undetected CPE/CVEs.", dependency.getFileName()); + "this could result in undetected CPE/CVEs.", dependency.getFileName());
} finally { } finally {
if (pomFile != null && !FileUtils.deleteQuietly(pomFile)) { if (pomFile != null && pomFile.exists() && !FileUtils.deleteQuietly(pomFile)) {
LOGGER.debug("Failed to delete temporary pom file {}", pomFile.toString());
pomFile.deleteOnExit(); pomFile.deleteOnExit();
} }
} }
@@ -257,4 +264,19 @@ public class NexusAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.debug("Could not connect to nexus repository", ioe); LOGGER.debug("Could not connect to nexus repository", ioe);
} }
} }
/**
* Determine if a proxy should be used.
*
* @return {@code true} if a proxy should be used
*/
public static boolean useProxy() {
try {
return Settings.getString(Settings.KEYS.PROXY_SERVER) != null
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
} catch (InvalidSettingException ise) {
LOGGER.warn("Failed to parse proxy settings.", ise);
return false;
}
}
} }

View File

@@ -38,13 +38,15 @@ import javax.json.JsonObject;
import javax.json.JsonReader; import javax.json.JsonReader;
import javax.json.JsonString; import javax.json.JsonString;
import javax.json.JsonValue; import javax.json.JsonValue;
import org.owasp.dependencycheck.exception.InitializationException;
/** /**
* Used to analyze Node Package Manager (npm) package.json files, and collect information that can be used to determine the * Used to analyze Node Package Manager (npm) package.json files, and collect
* associated CPE. * information that can be used to determine the associated CPE.
* *
* @author Dale Visser <dvisser@ida.org> * @author Dale Visser
*/ */
@Experimental
public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer { public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
@@ -83,7 +85,7 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
} }
@Override @Override
protected void initializeFileTypeAnalyzer() throws Exception { protected void initializeFileTypeAnalyzer() throws InitializationException {
// NO-OP // NO-OP
} }
@@ -108,7 +110,8 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Returns the key used in the properties file to reference the analyzer's enabled property. * Returns the key used in the properties file to reference the analyzer's
* enabled property.
* *
* @return the analyzer's enabled property setting key * @return the analyzer's enabled property setting key
*/ */
@@ -118,17 +121,9 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
} }
@Override @Override
protected void analyzeFileType(Dependency dependency, Engine engine) protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
throws AnalysisException {
final File file = dependency.getActualFile(); final File file = dependency.getActualFile();
JsonReader jsonReader; try (JsonReader jsonReader = Json.createReader(FileUtils.openInputStream(file))) {
try {
jsonReader = Json.createReader(FileUtils.openInputStream(file));
} catch (IOException e) {
throw new AnalysisException(
"Problem occurred while reading dependency file.", e);
}
try {
final JsonObject json = jsonReader.readObject(); final JsonObject json = jsonReader.readObject();
final EvidenceCollection productEvidence = dependency.getProductEvidence(); final EvidenceCollection productEvidence = dependency.getProductEvidence();
final EvidenceCollection vendorEvidence = dependency.getVendorEvidence(); final EvidenceCollection vendorEvidence = dependency.getVendorEvidence();
@@ -148,13 +143,14 @@ public class NodePackageAnalyzer extends AbstractFileTypeAnalyzer {
dependency.setDisplayFileName(String.format("%s/%s", file.getParentFile().getName(), file.getName())); dependency.setDisplayFileName(String.format("%s/%s", file.getParentFile().getName(), file.getName()));
} catch (JsonException e) { } catch (JsonException e) {
LOGGER.warn("Failed to parse package.json file.", e); LOGGER.warn("Failed to parse package.json file.", e);
} finally { } catch (IOException e) {
jsonReader.close(); throw new AnalysisException("Problem occurred while reading dependency file.", e);
} }
} }
/** /**
* Adds information to an evidence collection from the node json configuration. * Adds information to an evidence collection from the node json
* configuration.
* *
* @param json information from node.js * @param json information from node.js
* @param collection a set of evidence about a dependency * @param collection a set of evidence about a dependency

View File

@@ -33,7 +33,7 @@ import org.slf4j.LoggerFactory;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import org.owasp.dependencycheck.exception.InitializationException;
/** /**
* Analyzer which will parse a Nuspec file to gather module information. * Analyzer which will parse a Nuspec file to gather module information.
@@ -65,10 +65,10 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Initializes the analyzer once before any analysis is performed. * Initializes the analyzer once before any analysis is performed.
* *
* @throws Exception if there's an error during initialization * @throws InitializationException if there's an error during initialization
*/ */
@Override @Override
public void initializeFileTypeAnalyzer() throws Exception { public void initializeFileTypeAnalyzer() throws InitializationException {
} }
/** /**
@@ -82,7 +82,8 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Returns the key used in the properties file to reference the analyzer's enabled property. * Returns the key used in the properties file to reference the analyzer's
* enabled property.
* *
* @return the analyzer's enabled property setting key * @return the analyzer's enabled property setting key
*/ */
@@ -125,27 +126,15 @@ public class NuspecAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException when there's an exception during analysis * @throws AnalysisException when there's an exception during analysis
*/ */
@Override @Override
public void analyzeFileType(Dependency dependency, Engine engine) throws AnalysisException { public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
LOGGER.debug("Checking Nuspec file {}", dependency); LOGGER.debug("Checking Nuspec file {}", dependency);
try { try {
final NuspecParser parser = new XPathNuspecParser(); final NuspecParser parser = new XPathNuspecParser();
NugetPackage np = null; NugetPackage np = null;
FileInputStream fis = null; try (FileInputStream fis = new FileInputStream(dependency.getActualFilePath())) {
try {
fis = new FileInputStream(dependency.getActualFilePath());
np = parser.parse(fis); np = parser.parse(fis);
} catch (NuspecParseException ex) { } catch (NuspecParseException | FileNotFoundException ex) {
throw new AnalysisException(ex); throw new AnalysisException(ex);
} catch (FileNotFoundException ex) {
throw new AnalysisException(ex);
} finally {
if (fis != null) {
try {
fis.close();
} catch (IOException e) {
LOGGER.debug("Error closing input stream");
}
}
} }
if (np.getOwners() != null) { if (np.getOwners() != null) {

View File

@@ -27,19 +27,24 @@ import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.Vulnerability; import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.LoggerFactory;
/** /**
* NvdCveAnalyzer is a utility class that takes a project dependency and attempts to discern if there is an associated * NvdCveAnalyzer is a utility class that takes a project dependency and
* CVEs. It uses the the identifiers found by other analyzers to lookup the CVE data. * attempts to discern if there is an associated CVEs. It uses the the
* identifiers found by other analyzers to lookup the CVE data.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class NvdCveAnalyzer implements Analyzer { public class NvdCveAnalyzer extends AbstractAnalyzer {
/** /**
* The maximum number of query results to return. * The Logger for use throughout the class
*/ */
static final int MAX_QUERY_RESULTS = 100; private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(NvdCveAnalyzer.class);
/** /**
* The CVE Index. * The CVE Index.
*/ */
@@ -51,18 +56,18 @@ public class NvdCveAnalyzer implements Analyzer {
* @throws SQLException thrown when there is a SQL Exception * @throws SQLException thrown when there is a SQL Exception
* @throws IOException thrown when there is an IO Exception * @throws IOException thrown when there is an IO Exception
* @throws DatabaseException thrown when there is a database exceptions * @throws DatabaseException thrown when there is a database exceptions
* @throws ClassNotFoundException thrown if the h2 database driver cannot be loaded * @throws ClassNotFoundException thrown if the h2 database driver cannot be
* loaded
*/ */
public void open() throws SQLException, IOException, DatabaseException, ClassNotFoundException { public void open() throws SQLException, IOException, DatabaseException, ClassNotFoundException {
cveDB = new CveDB(); cveDB = CveDB.getInstance();
cveDB.open();
} }
/** /**
* Closes the data source. * Closes the data source.
*/ */
@Override @Override
public void close() { public void closeAnalyzer() {
cveDB.close(); cveDB.close();
cveDB = null; cveDB = null;
} }
@@ -77,27 +82,16 @@ public class NvdCveAnalyzer implements Analyzer {
} }
/** /**
* Ensures that the CVE Database is closed. * Analyzes a dependency and attempts to determine if there are any CPE
* * identifiers for this dependency.
* @throws Throwable when a throwable is thrown.
*/
@Override
protected void finalize() throws Throwable {
super.finalize();
if (isOpen()) {
close();
}
}
/**
* Analyzes a dependency and attempts to determine if there are any CPE identifiers for this dependency.
* *
* @param dependency The Dependency to analyze * @param dependency The Dependency to analyze
* @param engine The analysis engine * @param engine The analysis engine
* @throws AnalysisException is thrown if there is an issue analyzing the dependency * @throws AnalysisException thrown if there is an issue analyzing the
* dependency
*/ */
@Override @Override
public void analyze(Dependency dependency, Engine engine) throws AnalysisException { protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
for (Identifier id : dependency.getIdentifiers()) { for (Identifier id : dependency.getIdentifiers()) {
if ("cpe".equals(id.getType())) { if ("cpe".equals(id.getType())) {
try { try {
@@ -143,12 +137,38 @@ public class NvdCveAnalyzer implements Analyzer {
} }
/** /**
* Opens the database used to gather NVD CVE data. * <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
* *
* @throws Exception is thrown if there is an issue opening the index. * @return the key for the analyzer's enabled property
*/ */
@Override @Override
public void initialize() throws Exception { protected String getAnalyzerEnabledSettingKey() {
this.open(); return Settings.KEYS.ANALYZER_NVD_CVE_ENABLED;
}
/**
* Opens the database used to gather NVD CVE data.
*
* @throws InitializationException is thrown if there is an issue opening
* the index.
*/
@Override
public void initializeAnalyzer() throws InitializationException {
try {
this.open();
} catch (SQLException ex) {
LOGGER.debug("SQL Exception initializing NvdCveAnalyzer", ex);
throw new InitializationException(ex);
} catch (IOException ex) {
LOGGER.debug("IO Exception initializing NvdCveAnalyzer", ex);
throw new InitializationException(ex);
} catch (DatabaseException ex) {
LOGGER.debug("Database Exception initializing NvdCveAnalyzer", ex);
throw new InitializationException(ex);
} catch (ClassNotFoundException ex) {
LOGGER.debug("Exception initializing NvdCveAnalyzer", ex);
throw new InitializationException(ex);
}
} }
} }

View File

@@ -28,16 +28,21 @@ import org.owasp.dependencycheck.utils.Settings;
import java.io.File; import java.io.File;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.owasp.dependencycheck.exception.InitializationException;
/** /**
* Used to analyze OpenSSL source code present in the file system. * Used to analyze OpenSSL source code present in the file system.
* *
* @author Dale Visser <dvisser@ida.org> * @author Dale Visser
*/ */
public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer { public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
/**
* Hexadecimal.
*/
private static final int HEXADECIMAL = 16; private static final int HEXADECIMAL = 16;
/** /**
* Filename to analyze. All other .h files get removed from consideration. * Filename to analyze. All other .h files get removed from consideration.
@@ -48,17 +53,47 @@ public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
* Filter that detects files named "__init__.py". * Filter that detects files named "__init__.py".
*/ */
private static final FileFilter OPENSSLV_FILTER = FileFilterBuilder.newInstance().addFilenames(OPENSSLV_H).build(); private static final FileFilter OPENSSLV_FILTER = FileFilterBuilder.newInstance().addFilenames(OPENSSLV_H).build();
/**
* Open SSL Version number pattern.
*/
private static final Pattern VERSION_PATTERN = Pattern.compile( private static final Pattern VERSION_PATTERN = Pattern.compile(
"define\\s+OPENSSL_VERSION_NUMBER\\s+0x([0-9a-zA-Z]{8})L", Pattern.DOTALL "define\\s+OPENSSL_VERSION_NUMBER\\s+0x([0-9a-zA-Z]{8})L", Pattern.DOTALL
| Pattern.CASE_INSENSITIVE); | Pattern.CASE_INSENSITIVE);
/**
* The offset of the major version number.
*/
private static final int MAJOR_OFFSET = 28; private static final int MAJOR_OFFSET = 28;
/**
* The mask for the minor version number.
*/
private static final long MINOR_MASK = 0x0ff00000L; private static final long MINOR_MASK = 0x0ff00000L;
/**
* The offset of the minor version number.
*/
private static final int MINOR_OFFSET = 20; private static final int MINOR_OFFSET = 20;
/**
* The max for the fix version.
*/
private static final long FIX_MASK = 0x000ff000L; private static final long FIX_MASK = 0x000ff000L;
/**
* The offset for the fix version.
*/
private static final int FIX_OFFSET = 12; private static final int FIX_OFFSET = 12;
/**
* The mask for the patch version.
*/
private static final long PATCH_MASK = 0x00000ff0L; private static final long PATCH_MASK = 0x00000ff0L;
/**
* The offset for the patch version.
*/
private static final int PATCH_OFFSET = 4; private static final int PATCH_OFFSET = 4;
/**
* Number of letters.
*/
private static final int NUM_LETTERS = 26; private static final int NUM_LETTERS = 26;
/**
* The status mask.
*/
private static final int STATUS_MASK = 0x0000000f; private static final int STATUS_MASK = 0x0000000f;
/** /**
@@ -67,7 +102,7 @@ public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
* @param openSSLVersionConstant The open SSL version * @param openSSLVersionConstant The open SSL version
* @return the version of openssl * @return the version of openssl
*/ */
static String getOpenSSLVersion(long openSSLVersionConstant) { protected static String getOpenSSLVersion(long openSSLVersionConstant) {
final long major = openSSLVersionConstant >>> MAJOR_OFFSET; final long major = openSSLVersionConstant >>> MAJOR_OFFSET;
final long minor = (openSSLVersionConstant & MINOR_MASK) >>> MINOR_OFFSET; final long minor = (openSSLVersionConstant & MINOR_MASK) >>> MINOR_OFFSET;
final long fix = (openSSLVersionConstant & FIX_MASK) >>> FIX_OFFSET; final long fix = (openSSLVersionConstant & FIX_MASK) >>> FIX_OFFSET;
@@ -111,10 +146,10 @@ public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* No-op initializer implementation. * No-op initializer implementation.
* *
* @throws Exception never thrown * @throws InitializationException never thrown
*/ */
@Override @Override
protected void initializeFileTypeAnalyzer() throws Exception { protected void initializeFileTypeAnalyzer() throws InitializationException {
// Nothing to do here. // Nothing to do here.
} }
@@ -123,10 +158,11 @@ public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
* *
* @param dependency the dependency being analyzed * @param dependency the dependency being analyzed
* @param engine the engine being used to perform the scan * @param engine the engine being used to perform the scan
* @throws AnalysisException thrown if there is an unrecoverable error analyzing the dependency * @throws AnalysisException thrown if there is an unrecoverable error
* analyzing the dependency
*/ */
@Override @Override
protected void analyzeFileType(Dependency dependency, Engine engine) protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException { throws AnalysisException {
final File file = dependency.getActualFile(); final File file = dependency.getActualFile();
final String parentName = file.getParentFile().getName(); final String parentName = file.getParentFile().getName();
@@ -158,16 +194,19 @@ public class OpenSSLAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private String getFileContents(final File actualFile) private String getFileContents(final File actualFile)
throws AnalysisException { throws AnalysisException {
String contents;
try { try {
contents = FileUtils.readFileToString(actualFile).trim(); return FileUtils.readFileToString(actualFile, Charset.defaultCharset()).trim();
} catch (IOException e) { } catch (IOException e) {
throw new AnalysisException( throw new AnalysisException(
"Problem occurred while reading dependency file.", e); "Problem occurred while reading dependency file.", e);
} }
return contents;
} }
/**
* Returns the setting for the analyzer enabled setting key.
*
* @return the setting for the analyzer enabled setting key
*/
@Override @Override
protected String getAnalyzerEnabledSettingKey() { protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_OPENSSL_ENABLED; return Settings.KEYS.ANALYZER_OPENSSL_ENABLED;

View File

@@ -23,9 +23,10 @@ import java.io.FileFilter;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.FilenameFilter; import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.io.filefilter.NameFileFilter; import org.apache.commons.io.filefilter.NameFileFilter;
import org.apache.commons.io.filefilter.SuffixFileFilter; import org.apache.commons.io.filefilter.SuffixFileFilter;
import org.apache.commons.io.input.AutoCloseInputStream;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
@@ -37,19 +38,23 @@ import org.slf4j.LoggerFactory;
import javax.mail.MessagingException; import javax.mail.MessagingException;
import javax.mail.internet.InternetHeaders; import javax.mail.internet.InternetHeaders;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.ExtractionException; import org.owasp.dependencycheck.utils.ExtractionException;
import org.owasp.dependencycheck.utils.ExtractionUtil; import org.owasp.dependencycheck.utils.ExtractionUtil;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.FileUtils; import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.UrlStringUtils; import org.owasp.dependencycheck.utils.UrlStringUtils;
import java.util.concurrent.atomic.AtomicInteger;
/** /**
* Used to analyze a Wheel or egg distribution files, or their contents in unzipped form, and collect information that can be used * Used to analyze a Wheel or egg distribution files, or their contents in
* to determine the associated CPE. * unzipped form, and collect information that can be used to determine the
* associated CPE.
* *
* @author Dale Visser <dvisser@ida.org> * @author Dale Visser
*/ */
@Experimental
public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer { public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
@@ -69,9 +74,10 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
.getLogger(PythonDistributionAnalyzer.class); .getLogger(PythonDistributionAnalyzer.class);
/** /**
* The count of directories created during analysis. This is used for creating temporary directories. * The count of directories created during analysis. This is used for
* creating temporary directories.
*/ */
private static int dirCount = 0; private static final AtomicInteger DIR_COUNT = new AtomicInteger(0);
/** /**
* The name of the analyzer. * The name of the analyzer.
@@ -103,7 +109,8 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
private File tempFileLocation; private File tempFileLocation;
/** /**
* Filter that detects *.dist-info files (but doesn't verify they are directories. * Filter that detects *.dist-info files (but doesn't verify they are
* directories.
*/ */
private static final FilenameFilter DIST_INFO_FILTER = new SuffixFileFilter( private static final FilenameFilter DIST_INFO_FILTER = new SuffixFileFilter(
".dist-info"); ".dist-info");
@@ -163,7 +170,8 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Returns the key used in the properties file to reference the analyzer's enabled property. * Returns the key used in the properties file to reference the analyzer's
* enabled property.
* *
* @return the analyzer's enabled property setting key * @return the analyzer's enabled property setting key
*/ */
@@ -173,7 +181,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
} }
@Override @Override
protected void analyzeFileType(Dependency dependency, Engine engine) protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException { throws AnalysisException {
final File actualFile = dependency.getActualFile(); final File actualFile = dependency.getActualFile();
if (WHL_FILTER.accept(actualFile)) { if (WHL_FILTER.accept(actualFile)) {
@@ -205,7 +213,8 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
* @param dependency the archive being scanned * @param dependency the archive being scanned
* @param folderFilter the filter to apply to the folder * @param folderFilter the filter to apply to the folder
* @param metadataFilter the filter to apply to the meta data * @param metadataFilter the filter to apply to the meta data
* @throws AnalysisException thrown when there is a problem analyzing the dependency * @throws AnalysisException thrown when there is a problem analyzing the
* dependency
*/ */
private void collectMetadataFromArchiveFormat(Dependency dependency, private void collectMetadataFromArchiveFormat(Dependency dependency,
FilenameFilter folderFilter, FilenameFilter metadataFilter) FilenameFilter folderFilter, FilenameFilter metadataFilter)
@@ -220,32 +229,43 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
throw new AnalysisException(ex); throw new AnalysisException(ex);
} }
collectWheelMetadata( File matchingFile = getMatchingFile(temp, folderFilter);
dependency, if (matchingFile != null) {
getMatchingFile(getMatchingFile(temp, folderFilter), matchingFile = getMatchingFile(matchingFile, metadataFilter);
metadataFilter)); if (matchingFile != null) {
collectWheelMetadata(dependency, matchingFile);
}
}
} }
/** /**
* Makes sure a usable temporary directory is available. * Makes sure a usable temporary directory is available.
* *
* @throws Exception an AnalyzeException is thrown when the temp directory cannot be created * @throws InitializationException an AnalyzeException is thrown when the
* temp directory cannot be created
*/ */
@Override @Override
protected void initializeFileTypeAnalyzer() throws Exception { protected void initializeFileTypeAnalyzer() throws InitializationException {
final File baseDir = Settings.getTempDirectory(); try {
tempFileLocation = File.createTempFile("check", "tmp", baseDir); final File baseDir = Settings.getTempDirectory();
if (!tempFileLocation.delete()) { tempFileLocation = File.createTempFile("check", "tmp", baseDir);
final String msg = String.format( if (!tempFileLocation.delete()) {
"Unable to delete temporary file '%s'.", setEnabled(false);
tempFileLocation.getAbsolutePath()); final String msg = String.format(
throw new AnalysisException(msg); "Unable to delete temporary file '%s'.",
} tempFileLocation.getAbsolutePath());
if (!tempFileLocation.mkdirs()) { throw new InitializationException(msg);
final String msg = String.format( }
"Unable to create directory '%s'.", if (!tempFileLocation.mkdirs()) {
tempFileLocation.getAbsolutePath()); setEnabled(false);
throw new AnalysisException(msg); final String msg = String.format(
"Unable to create directory '%s'.",
tempFileLocation.getAbsolutePath());
throw new InitializationException(msg);
}
} catch (IOException ex) {
setEnabled(false);
throw new InitializationException("Unable to create a temporary file", ex);
} }
} }
@@ -253,13 +273,15 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
* Deletes any files extracted from the Wheel during analysis. * Deletes any files extracted from the Wheel during analysis.
*/ */
@Override @Override
public void close() { public void closeAnalyzer() {
if (tempFileLocation != null && tempFileLocation.exists()) { if (tempFileLocation != null && tempFileLocation.exists()) {
LOGGER.debug("Attempting to delete temporary files"); LOGGER.debug("Attempting to delete temporary files");
final boolean success = FileUtils.delete(tempFileLocation); final boolean success = FileUtils.delete(tempFileLocation);
if (!success) { if (!success && tempFileLocation.exists()) {
LOGGER.warn( final String[] l = tempFileLocation.list();
"Failed to delete some temporary files, see the log for more details"); if (l != null && l.length > 0) {
LOGGER.warn("Failed to delete some temporary files, see the log for more details");
}
} }
} }
} }
@@ -311,7 +333,8 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Returns a list of files that match the given filter, this does not recursively scan the directory. * Returns a list of files that match the given filter, this does not
* recursively scan the directory.
* *
* @param folder the folder to filter * @param folder the folder to filter
* @param filter the filter to apply to the files in the directory * @param filter the filter to apply to the files in the directory
@@ -337,20 +360,20 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
if (null == manifest) { if (null == manifest) {
LOGGER.debug("Manifest file not found."); LOGGER.debug("Manifest file not found.");
} else { } else {
try { try (InputStream in = new BufferedInputStream(new FileInputStream(manifest))) {
result.load(new AutoCloseInputStream(new BufferedInputStream( result.load(in);
new FileInputStream(manifest)))); } catch (MessagingException | FileNotFoundException e) {
} catch (MessagingException e) {
LOGGER.warn(e.getMessage(), e);
} catch (FileNotFoundException e) {
LOGGER.warn(e.getMessage(), e); LOGGER.warn(e.getMessage(), e);
} catch (IOException ex) {
LOGGER.warn(ex.getMessage(), ex);
} }
} }
return result; return result;
} }
/** /**
* Retrieves the next temporary destination directory for extracting an archive. * Retrieves the next temporary destination directory for extracting an
* archive.
* *
* @return a directory * @return a directory
* @throws AnalysisException thrown if unable to create temporary directory * @throws AnalysisException thrown if unable to create temporary directory
@@ -361,7 +384,7 @@ public class PythonDistributionAnalyzer extends AbstractFileTypeAnalyzer {
// getting an exception for some directories not being able to be // getting an exception for some directories not being able to be
// created; might be because the directory already exists? // created; might be because the directory already exists?
do { do {
dirCount += 1; final int dirCount = DIR_COUNT.incrementAndGet();
directory = new File(tempFileLocation, String.valueOf(dirCount)); directory = new File(tempFileLocation, String.valueOf(dirCount));
} while (directory.exists()); } while (directory.exists());
if (!directory.mkdirs()) { if (!directory.mkdirs()) {

View File

@@ -32,16 +32,18 @@ import org.owasp.dependencycheck.utils.UrlStringUtils;
import java.io.File; import java.io.File;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.nio.charset.Charset;
import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.owasp.dependencycheck.exception.InitializationException;
/** /**
* Used to analyze a Python package, and collect information that can be used to determine the associated CPE. * Used to analyze a Python package, and collect information that can be used to
* determine the associated CPE.
* *
* @author Dale Visser <dvisser@ida.org> * @author Dale Visser
*/ */
@Experimental
public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer { public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
@@ -141,10 +143,10 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* No-op initializer implementation. * No-op initializer implementation.
* *
* @throws Exception never thrown * @throws InitializationException never thrown
*/ */
@Override @Override
protected void initializeFileTypeAnalyzer() throws Exception { protected void initializeFileTypeAnalyzer() throws InitializationException {
// Nothing to do here. // Nothing to do here.
} }
@@ -165,39 +167,38 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
* *
* @param dependency the dependency being analyzed * @param dependency the dependency being analyzed
* @param engine the engine being used to perform the scan * @param engine the engine being used to perform the scan
* @throws AnalysisException thrown if there is an unrecoverable error analyzing the dependency * @throws AnalysisException thrown if there is an unrecoverable error
* analyzing the dependency
*/ */
@Override @Override
protected void analyzeFileType(Dependency dependency, Engine engine) protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException { throws AnalysisException {
final File file = dependency.getActualFile(); final File file = dependency.getActualFile();
final File parent = file.getParentFile(); final File parent = file.getParentFile();
final String parentName = parent.getName(); final String parentName = parent.getName();
boolean found = false;
if (INIT_PY_FILTER.accept(file)) { if (INIT_PY_FILTER.accept(file)) {
//by definition, the containing folder of __init__.py is considered the package, even the file is empty:
//"The __init__.py files are required to make Python treat the directories as containing packages"
//see section "6.4 Packages" from https://docs.python.org/2/tutorial/modules.html;
dependency.setDisplayFileName(parentName + "/__init__.py");
dependency.getProductEvidence().addEvidence(file.getName(),
"PackageName", parentName, Confidence.HIGHEST);
final File[] fileList = parent.listFiles(PY_FILTER); final File[] fileList = parent.listFiles(PY_FILTER);
if (fileList != null) { if (fileList != null) {
for (final File sourceFile : fileList) { for (final File sourceFile : fileList) {
found |= analyzeFileContents(dependency, sourceFile); analyzeFileContents(dependency, sourceFile);
} }
} }
}
if (found) {
dependency.setDisplayFileName(parentName + "/__init__.py");
dependency.getProductEvidence().addEvidence(file.getName(),
"PackageName", parentName, Confidence.MEDIUM);
} else { } else {
// copy, alter and set in case some other thread is iterating over engine.getDependencies().remove(dependency);
final List<Dependency> dependencies = new ArrayList<Dependency>(
engine.getDependencies());
dependencies.remove(dependency);
engine.setDependencies(dependencies);
} }
} }
/** /**
* This should gather information from leading docstrings, file comments, and assignments to __version__, __title__, * This should gather information from leading docstrings, file comments,
* __summary__, __uri__, __url__, __home*page__, __author__, and their all caps equivalents. * and assignments to __version__, __title__, __summary__, __uri__, __url__,
* __home*page__, __author__, and their all caps equivalents.
* *
* @param dependency the dependency being analyzed * @param dependency the dependency being analyzed
* @param file the file name to analyze * @param file the file name to analyze
@@ -208,7 +209,7 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
throws AnalysisException { throws AnalysisException {
String contents; String contents;
try { try {
contents = FileUtils.readFileToString(file).trim(); contents = FileUtils.readFileToString(file, Charset.defaultCharset()).trim();
} catch (IOException e) { } catch (IOException e) {
throw new AnalysisException( throw new AnalysisException(
"Problem occurred while reading dependency file.", e); "Problem occurred while reading dependency file.", e);
@@ -288,7 +289,8 @@ public class PythonPackageAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Gather evidence from a Python source file using the given string assignment regex pattern. * Gather evidence from a Python source file using the given string
* assignment regex pattern.
* *
* @param pattern to scan contents with * @param pattern to scan contents with
* @param contents of Python source file * @param contents of Python source file

View File

@@ -17,28 +17,45 @@
*/ */
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Reference; import org.owasp.dependencycheck.dependency.Reference;
import org.owasp.dependencycheck.dependency.Vulnerability; import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.*;
import java.util.*;
/** /**
* Used to analyze Ruby Bundler Gemspec.lock files utilizing the 3rd party bundle-audit tool. * Used to analyze Ruby Bundler Gemspec.lock files utilizing the 3rd party
* bundle-audit tool.
* *
* @author Dale Visser <dvisser@ida.org> * @author Dale Visser
*/ */
@Experimental
public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer { public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(RubyBundleAuditAnalyzer.class); private static final Logger LOGGER = LoggerFactory.getLogger(RubyBundleAuditAnalyzer.class);
/** /**
@@ -50,14 +67,32 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
* The phase that this analyzer is intended to run in. * The phase that this analyzer is intended to run in.
*/ */
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.PRE_INFORMATION_COLLECTION; private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.PRE_INFORMATION_COLLECTION;
/**
private static final FileFilter FILTER = * The filter defining which files will be analyzed.
FileFilterBuilder.newInstance().addFilenames("Gemfile.lock").build(); */
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addFilenames("Gemfile.lock").build();
/**
* Name.
*/
public static final String NAME = "Name: "; public static final String NAME = "Name: ";
/**
* Version.
*/
public static final String VERSION = "Version: "; public static final String VERSION = "Version: ";
/**
* Advisory.
*/
public static final String ADVISORY = "Advisory: "; public static final String ADVISORY = "Advisory: ";
/**
* Criticality.
*/
public static final String CRITICALITY = "Criticality: "; public static final String CRITICALITY = "Criticality: ";
/**
* The DAL.
*/
private CveDB cvedb;
/** /**
* @return a filter that accepts files named Gemfile.lock * @return a filter that accepts files named Gemfile.lock
*/ */
@@ -69,65 +104,118 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Launch bundle-audit. * Launch bundle-audit.
* *
* @param folder directory that contains bundle audit
* @return a handle to the process * @return a handle to the process
* @throws AnalysisException thrown when there is an issue launching bundle
* audit
*/ */
private Process launchBundleAudit(File folder) throws AnalysisException { private Process launchBundleAudit(File folder) throws AnalysisException {
if (!folder.isDirectory()) { if (!folder.isDirectory()) {
throw new AnalysisException(String.format("%s should have been a directory.", folder.getAbsolutePath())); throw new AnalysisException(String.format("%s should have been a directory.", folder.getAbsolutePath()));
} }
final List<String> args = new ArrayList<String>(); final List<String> args = new ArrayList<>();
final String bundleAuditPath = Settings.getString(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH); final String bundleAuditPath = Settings.getString(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH);
args.add(null == bundleAuditPath ? "bundle-audit" : bundleAuditPath); File bundleAudit = null;
if (bundleAuditPath != null) {
bundleAudit = new File(bundleAuditPath);
if (!bundleAudit.isFile()) {
LOGGER.warn("Supplied `bundleAudit` path is incorrect: " + bundleAuditPath);
bundleAudit = null;
}
}
args.add(bundleAudit != null && bundleAudit.isFile() ? bundleAudit.getAbsolutePath() : "bundle-audit");
args.add("check"); args.add("check");
args.add("--verbose"); args.add("--verbose");
final ProcessBuilder builder = new ProcessBuilder(args); final ProcessBuilder builder = new ProcessBuilder(args);
builder.directory(folder); builder.directory(folder);
try { try {
LOGGER.info("Launching: " + args + " from " + folder);
return builder.start(); return builder.start();
} catch (IOException ioe) { } catch (IOException ioe) {
throw new AnalysisException("bundle-audit failure", ioe); throw new AnalysisException("bundle-audit initialization failure; this error can be ignored if you are not analyzing Ruby. "
+ "Otherwise ensure that bundle-audit is installed and the path to bundle audit is correctly specified", ioe);
} }
} }
/** /**
* Initialize the analyzer. In this case, extract GrokAssembly.exe to a temporary location. * Initialize the analyzer. In this case, extract GrokAssembly.exe to a
* temporary location.
* *
* @throws Exception if anything goes wrong * @throws InitializationException if anything goes wrong
*/ */
@Override @Override
public void initializeFileTypeAnalyzer() throws Exception { public void initializeFileTypeAnalyzer() throws InitializationException {
// Now, need to see if bundle-audit actually runs from this location. try {
Process process = launchBundleAudit(Settings.getTempDirectory()); cvedb = CveDB.getInstance();
int exitValue = process.waitFor(); } catch (DatabaseException ex) {
if (0 == exitValue) { LOGGER.warn("Exception opening the database");
LOGGER.warn("Unexpected exit code from bundle-audit process. Disabling {}: {}", ANALYZER_NAME, exitValue); LOGGER.debug("error", ex);
setEnabled(false); setEnabled(false);
throw new AnalysisException("Unexpected exit code from bundle-audit process."); throw new InitializationException("Error connecting to the database", ex);
}
// Now, need to see if bundle-audit actually runs from this location.
Process process = null;
try {
process = launchBundleAudit(Settings.getTempDirectory());
} catch (AnalysisException ae) {
setEnabled(false);
final String msg = String.format("Exception from bundle-audit process: %s. Disabling %s", ae.getCause(), ANALYZER_NAME);
throw new InitializationException(msg, ae);
} catch (IOException ex) {
setEnabled(false);
throw new InitializationException("Unable to create temporary file, the Ruby Bundle Audit Analyzer will be disabled", ex);
}
final int exitValue;
try {
exitValue = process.waitFor();
} catch (InterruptedException ex) {
setEnabled(false);
final String msg = String.format("Bundle-audit process was interrupted. Disabling %s", ANALYZER_NAME);
throw new InitializationException(msg);
}
if (0 == exitValue) {
setEnabled(false);
final String msg = String.format("Unexpected exit code from bundle-audit process. Disabling %s: %s", ANALYZER_NAME, exitValue);
throw new InitializationException(msg);
} else { } else {
BufferedReader reader = null; try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream(), "UTF-8"))) {
try {
reader = new BufferedReader(new InputStreamReader(process.getErrorStream(), "UTF-8"));
if (!reader.ready()) { if (!reader.ready()) {
LOGGER.warn("Bundle-audit error stream unexpectedly not ready. Disabling " + ANALYZER_NAME); LOGGER.warn("Bundle-audit error stream unexpectedly not ready. Disabling " + ANALYZER_NAME);
setEnabled(false); setEnabled(false);
throw new AnalysisException("Bundle-audit error stream unexpectedly not ready."); throw new InitializationException("Bundle-audit error stream unexpectedly not ready.");
} else { } else {
final String line = reader.readLine(); final String line = reader.readLine();
if (!line.contains("Errno::ENOENT")) { if (line == null || !line.contains("Errno::ENOENT")) {
LOGGER.warn("Unexpected bundle-audit output. Disabling {}: {}", ANALYZER_NAME, line); LOGGER.warn("Unexpected bundle-audit output. Disabling {}: {}", ANALYZER_NAME, line);
setEnabled(false); setEnabled(false);
throw new AnalysisException("Unexpected bundle-audit output."); throw new InitializationException("Unexpected bundle-audit output.");
} }
} }
} finally { } catch (UnsupportedEncodingException ex) {
if (null != reader) { setEnabled(false);
reader.close(); throw new InitializationException("Unexpected bundle-audit encoding.", ex);
} } catch (IOException ex) {
setEnabled(false);
throw new InitializationException("Unable to read bundle-audit output.", ex);
} }
} }
if (isEnabled()) { if (isEnabled()) {
LOGGER.info(ANALYZER_NAME + " is enabled. It is necessary to manually run \"bundle-audit update\" " + LOGGER.info(ANALYZER_NAME + " is enabled. It is necessary to manually run \"bundle-audit update\" "
"occasionally to keep its database up to date."); + "occasionally to keep its database up to date.");
}
}
/**
* Closes the data source.
*/
@Override
public void closeAnalyzer() {
if (cvedb != null) {
cvedb.close();
cvedb = null;
} }
} }
@@ -152,7 +240,8 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Returns the key used in the properties file to reference the analyzer's enabled property. * Returns the key used in the properties file to reference the analyzer's
* enabled property.
* *
* @return the analyzer's enabled property setting key * @return the analyzer's enabled property setting key
*/ */
@@ -162,61 +251,83 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* If {@link #analyzeFileType(Dependency, Engine)} is called, then we have successfully initialized, and it will * If {@link #analyzeDependency(Dependency, Engine)} is called, then we have
* be necessary to disable {@link RubyGemspecAnalyzer}. * successfully initialized, and it will be necessary to disable
* {@link RubyGemspecAnalyzer}.
*/ */
private boolean needToDisableGemspecAnalyzer = true; private boolean needToDisableGemspecAnalyzer = true;
/**
* Determines if the analyzer can analyze the given file type.
*
* @param dependency the dependency to determine if it can analyze
* @param engine the dependency-check engine
* @throws AnalysisException thrown if there is an analysis exception.
*/
@Override @Override
protected void analyzeFileType(Dependency dependency, Engine engine) protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException { throws AnalysisException {
if (needToDisableGemspecAnalyzer) { if (needToDisableGemspecAnalyzer) {
boolean failed = true; boolean failed = true;
final String className = RubyGemspecAnalyzer.class.getName(); final String className = RubyGemspecAnalyzer.class.getName();
for (FileTypeAnalyzer analyzer : engine.getFileTypeAnalyzers()) { for (FileTypeAnalyzer analyzer : engine.getFileTypeAnalyzers()) {
if (analyzer instanceof RubyGemspecAnalyzer) { if (analyzer instanceof RubyBundlerAnalyzer) {
((RubyBundlerAnalyzer) analyzer).setEnabled(false);
LOGGER.info("Disabled " + RubyBundlerAnalyzer.class.getName() + " to avoid noisy duplicate results.");
} else if (analyzer instanceof RubyGemspecAnalyzer) {
((RubyGemspecAnalyzer) analyzer).setEnabled(false); ((RubyGemspecAnalyzer) analyzer).setEnabled(false);
LOGGER.info("Disabled " + className + " to avoid noisy duplicate results."); LOGGER.info("Disabled " + className + " to avoid noisy duplicate results.");
failed = false; failed = false;
} }
} }
if (failed) { if (failed) {
LOGGER.warn("Did not find" + className + '.'); LOGGER.warn("Did not find " + className + '.');
} }
needToDisableGemspecAnalyzer = false; needToDisableGemspecAnalyzer = false;
} }
final File parentFile = dependency.getActualFile().getParentFile(); final File parentFile = dependency.getActualFile().getParentFile();
final Process process = launchBundleAudit(parentFile); final Process process = launchBundleAudit(parentFile);
final int exitValue;
try { try {
process.waitFor(); exitValue = process.waitFor();
} catch (InterruptedException ie) { } catch (InterruptedException ie) {
throw new AnalysisException("bundle-audit process interrupted", ie); throw new AnalysisException("bundle-audit process interrupted", ie);
} }
BufferedReader rdr = null; if (exitValue < 0 || exitValue > 1) {
final String msg = String.format("Unexpected exit code from bundle-audit process; exit code: %s", exitValue);
throw new AnalysisException(msg);
}
try { try {
rdr = new BufferedReader(new InputStreamReader(process.getInputStream(), "UTF-8")); try (BufferedReader errReader = new BufferedReader(new InputStreamReader(process.getErrorStream(), "UTF-8"))) {
processBundlerAuditOutput(dependency, engine, rdr); while (errReader.ready()) {
} catch (IOException ioe) { final String error = errReader.readLine();
LOGGER.warn("bundle-audit failure", ioe); LOGGER.warn(error);
} finally {
if (null != rdr) {
try {
rdr.close();
} catch (IOException ioe) {
LOGGER.warn("bundle-audit close failure", ioe);
} }
} }
try (BufferedReader rdr = new BufferedReader(new InputStreamReader(process.getInputStream(), "UTF-8"))) {
processBundlerAuditOutput(dependency, engine, rdr);
}
} catch (IOException ioe) {
LOGGER.warn("bundle-audit failure", ioe);
} }
} }
/**
* Processes the bundler audit output.
*
* @param original the dependency
* @param engine the dependency-check engine
* @param rdr the reader of the report
* @throws IOException thrown if the report cannot be read.
*/
private void processBundlerAuditOutput(Dependency original, Engine engine, BufferedReader rdr) throws IOException { private void processBundlerAuditOutput(Dependency original, Engine engine, BufferedReader rdr) throws IOException {
final String parentName = original.getActualFile().getParentFile().getName(); final String parentName = original.getActualFile().getParentFile().getName();
final String fileName = original.getFileName(); final String fileName = original.getFileName();
final String filePath = original.getFilePath();
Dependency dependency = null; Dependency dependency = null;
Vulnerability vulnerability = null; Vulnerability vulnerability = null;
String gem = null; String gem = null;
final Map<String, Dependency> map = new HashMap<String, Dependency>(); final Map<String, Dependency> map = new HashMap<>();
boolean appendToDescription = false; boolean appendToDescription = false;
while (rdr.ready()) { while (rdr.ready()) {
final String nextLine = rdr.readLine(); final String nextLine = rdr.readLine();
@@ -226,12 +337,12 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
appendToDescription = false; appendToDescription = false;
gem = nextLine.substring(NAME.length()); gem = nextLine.substring(NAME.length());
if (!map.containsKey(gem)) { if (!map.containsKey(gem)) {
map.put(gem, createDependencyForGem(engine, parentName, fileName, gem)); map.put(gem, createDependencyForGem(engine, parentName, fileName, filePath, gem));
} }
dependency = map.get(gem); dependency = map.get(gem);
LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine)); LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine));
} else if (nextLine.startsWith(VERSION)) { } else if (nextLine.startsWith(VERSION)) {
vulnerability = createVulnerability(parentName, dependency, vulnerability, gem, nextLine); vulnerability = createVulnerability(parentName, dependency, gem, nextLine);
} else if (nextLine.startsWith(ADVISORY)) { } else if (nextLine.startsWith(ADVISORY)) {
setVulnerabilityName(parentName, dependency, vulnerability, nextLine); setVulnerabilityName(parentName, dependency, vulnerability, nextLine);
} else if (nextLine.startsWith(CRITICALITY)) { } else if (nextLine.startsWith(CRITICALITY)) {
@@ -241,16 +352,24 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
} else if (nextLine.startsWith("Description:")) { } else if (nextLine.startsWith("Description:")) {
appendToDescription = true; appendToDescription = true;
if (null != vulnerability) { if (null != vulnerability) {
vulnerability.setDescription("*** Vulnerability obtained from bundle-audit verbose report. Title link may not work. CPE below is guessed. CVSS score is estimated (-1.0 indicates unknown). See link below for full details. *** "); vulnerability.setDescription("*** Vulnerability obtained from bundle-audit verbose report. "
} + "Title link may not work. CPE below is guessed. CVSS score is estimated (-1.0 "
} else if (appendToDescription) { + " indicates unknown). See link below for full details. *** ");
if (null != vulnerability) {
vulnerability.setDescription(vulnerability.getDescription() + nextLine + "\n");
} }
} else if (appendToDescription && null != vulnerability) {
vulnerability.setDescription(vulnerability.getDescription() + nextLine + "\n");
} }
} }
} }
/**
* Sets the vulnerability name.
*
* @param parentName the parent name
* @param dependency the dependency
* @param vulnerability the vulnerability
* @param nextLine the line to parse
*/
private void setVulnerabilityName(String parentName, Dependency dependency, Vulnerability vulnerability, String nextLine) { private void setVulnerabilityName(String parentName, Dependency dependency, Vulnerability vulnerability, String nextLine) {
final String advisory = nextLine.substring((ADVISORY.length())); final String advisory = nextLine.substring((ADVISORY.length()));
if (null != vulnerability) { if (null != vulnerability) {
@@ -262,10 +381,17 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine)); LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine));
} }
/**
* Adds a reference to the vulnerability.
*
* @param parentName the parent name
* @param vulnerability the vulnerability
* @param nextLine the line to parse
*/
private void addReferenceToVulnerability(String parentName, Vulnerability vulnerability, String nextLine) { private void addReferenceToVulnerability(String parentName, Vulnerability vulnerability, String nextLine) {
final String url = nextLine.substring(("URL: ").length()); final String url = nextLine.substring(("URL: ").length());
if (null != vulnerability) { if (null != vulnerability) {
Reference ref = new Reference(); final Reference ref = new Reference();
ref.setName(vulnerability.getName()); ref.setName(vulnerability.getName());
ref.setSource("bundle-audit"); ref.setSource("bundle-audit");
ref.setUrl(url); ref.setUrl(url);
@@ -274,23 +400,48 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine)); LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine));
} }
/**
* Adds the criticality to the vulnerability
*
* @param parentName the parent name
* @param vulnerability the vulnerability
* @param nextLine the line to parse
*/
private void addCriticalityToVulnerability(String parentName, Vulnerability vulnerability, String nextLine) { private void addCriticalityToVulnerability(String parentName, Vulnerability vulnerability, String nextLine) {
if (null != vulnerability) { if (null != vulnerability) {
final String criticality = nextLine.substring(CRITICALITY.length()).trim(); final String criticality = nextLine.substring(CRITICALITY.length()).trim();
if ("High".equals(criticality)) { float score = -1.0f;
vulnerability.setCvssScore(8.5f); Vulnerability v = null;
} else if ("Medium".equals(criticality)) { try {
vulnerability.setCvssScore(5.5f); v = cvedb.getVulnerability(vulnerability.getName());
} else if ("Low".equals(criticality)) { } catch (DatabaseException ex) {
vulnerability.setCvssScore(2.0f); LOGGER.debug("Unable to look up vulnerability {}", vulnerability.getName());
} else {
vulnerability.setCvssScore(-1.0f);
} }
if (v != null) {
score = v.getCvssScore();
} else if ("High".equalsIgnoreCase(criticality)) {
score = 8.5f;
} else if ("Medium".equalsIgnoreCase(criticality)) {
score = 5.5f;
} else if ("Low".equalsIgnoreCase(criticality)) {
score = 2.0f;
}
vulnerability.setCvssScore(score);
} }
LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine)); LOGGER.debug(String.format("bundle-audit (%s): %s", parentName, nextLine));
} }
private Vulnerability createVulnerability(String parentName, Dependency dependency, Vulnerability vulnerability, String gem, String nextLine) { /**
* Creates a vulnerability.
*
* @param parentName the parent name
* @param dependency the dependency
* @param gem the gem name
* @param nextLine the line to parse
* @return the vulnerability
*/
private Vulnerability createVulnerability(String parentName, Dependency dependency, String gem, String nextLine) {
Vulnerability vulnerability = null;
if (null != dependency) { if (null != dependency) {
final String version = nextLine.substring(VERSION.length()); final String version = nextLine.substring(VERSION.length());
dependency.getVersionEvidence().addEvidence( dependency.getVersionEvidence().addEvidence(
@@ -313,13 +464,30 @@ public class RubyBundleAuditAnalyzer extends AbstractFileTypeAnalyzer {
return vulnerability; return vulnerability;
} }
private Dependency createDependencyForGem(Engine engine, String parentName, String fileName, String gem) throws IOException { /**
final File tempFile = File.createTempFile("Gemfile-" + gem, ".lock", Settings.getTempDirectory()); * Creates the dependency based off of the gem.
*
* @param engine the engine used for scanning
* @param parentName the gem parent
* @param fileName the file name
* @param filePath the file path
* @param gem the gem name
* @return the dependency to add
* @throws IOException thrown if a temporary gem file could not be written
*/
private Dependency createDependencyForGem(Engine engine, String parentName, String fileName, String filePath, String gem) throws IOException {
final File gemFile = new File(Settings.getTempDirectory(), gem + "_Gemfile.lock");
if (!gemFile.createNewFile()) {
throw new IOException("Unable to create temporary gem file");
}
final String displayFileName = String.format("%s%c%s:%s", parentName, File.separatorChar, fileName, gem); final String displayFileName = String.format("%s%c%s:%s", parentName, File.separatorChar, fileName, gem);
FileUtils.write(tempFile, displayFileName); // unique contents to avoid dependency bundling
final Dependency dependency = new Dependency(tempFile); FileUtils.write(gemFile, displayFileName, Charset.defaultCharset()); // unique contents to avoid dependency bundling
final Dependency dependency = new Dependency(gemFile);
dependency.getProductEvidence().addEvidence("bundler-audit", "Name", gem, Confidence.HIGHEST); dependency.getProductEvidence().addEvidence("bundler-audit", "Name", gem, Confidence.HIGHEST);
dependency.setDisplayFileName(displayFileName); dependency.setDisplayFileName(displayFileName);
dependency.setFileName(fileName);
dependency.setFilePath(filePath);
engine.getDependencies().add(dependency); engine.getDependencies().add(dependency);
return dependency; return dependency;
} }

View File

@@ -0,0 +1,141 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.io.FilenameFilter;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
/**
* This analyzer accepts the fully resolved .gemspec created by the Ruby bundler
* (http://bundler.io) for better evidence results. It also tries to resolve the
* dependency packagePath to where the gem is actually installed. Then during
* the {@link org.owasp.dependencycheck.analyzer.AnalysisPhase#PRE_FINDING_ANALYSIS}
* {@link DependencyMergingAnalyzer} will merge two .gemspec dependencies
* together if <code>Dependency.getPackagePath()</code> are the same.
*
* Ruby bundler creates new .gemspec files under a folder called
* "specifications" at deploy time, in addition to the original .gemspec files
* from source. The bundler generated .gemspec files always contain fully
* resolved attributes thus provide more accurate evidences, whereas the
* original .gemspec from source often contain variables for attributes that
* can't be used for evidences.
*
* Note this analyzer share the same
* {@link org.owasp.dependencycheck.utils.Settings.KEYS#ANALYZER_RUBY_GEMSPEC_ENABLED}
* as {@link RubyGemspecAnalyzer}, so it will enabled/disabled with
* {@link RubyGemspecAnalyzer}.
*
* @author Bianca Jiang (https://twitter.com/biancajiang)
*/
@Experimental
public class RubyBundlerAnalyzer extends RubyGemspecAnalyzer {
/**
* The name of the analyzer.
*/
private static final String ANALYZER_NAME = "Ruby Bundler Analyzer";
/**
* Folder name that contains .gemspec files created by "bundle install"
*/
private static final String SPECIFICATIONS = "specifications";
/**
* Folder name that contains the gems by "bundle install"
*/
private static final String GEMS = "gems";
/**
* Returns the name of the analyzer.
*
* @return the name of the analyzer.
*/
@Override
public String getName() {
return ANALYZER_NAME;
}
/**
* Only accept *.gemspec files generated by "bundle install --deployment"
* under "specifications" folder.
*
* @param pathname the path name to test
* @return true if the analyzer can process the given file; otherwise false
*/
@Override
public boolean accept(File pathname) {
boolean accepted = super.accept(pathname);
if (accepted) {
final File parentDir = pathname.getParentFile();
accepted = parentDir != null && parentDir.getName().equals(SPECIFICATIONS);
}
return accepted;
}
@Override
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
super.analyzeDependency(dependency, engine);
//find the corresponding gem folder for this .gemspec stub by "bundle install --deployment"
final File gemspecFile = dependency.getActualFile();
final String gemFileName = gemspecFile.getName();
final String gemName = gemFileName.substring(0, gemFileName.lastIndexOf(".gemspec"));
final File specificationsDir = gemspecFile.getParentFile();
if (specificationsDir != null && specificationsDir.getName().equals(SPECIFICATIONS) && specificationsDir.exists()) {
final File parentDir = specificationsDir.getParentFile();
if (parentDir != null && parentDir.exists()) {
final File gemsDir = new File(parentDir, GEMS);
if (gemsDir.exists()) {
final File[] matchingFiles = gemsDir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.equals(gemName);
}
});
if (matchingFiles != null && matchingFiles.length > 0) {
final String gemPath = matchingFiles[0].getAbsolutePath();
if (dependency.getActualFilePath().equals(dependency.getFilePath())) {
if (gemPath != null) {
dependency.setPackagePath(gemPath);
}
} else {
//.gemspec's actualFilePath and filePath are different when it's from a compressed file
//in which case actualFilePath is the temp directory used by decompression.
//packagePath should use the filePath of the identified gem file in "gems" folder
final File gemspecStub = new File(dependency.getFilePath());
final File specDir = gemspecStub.getParentFile();
if (specDir != null && specDir.getName().equals(SPECIFICATIONS)) {
final File gemsDir2 = new File(specDir.getParentFile(), GEMS);
final File packageDir = new File(gemsDir2, gemName);
dependency.setPackagePath(packageDir.getAbsolutePath());
}
}
}
}
}
}
}
}

View File

@@ -17,28 +17,41 @@
*/ */
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.io.FileFilter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceCollection; import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import java.io.FileFilter; import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/** /**
* Used to analyze Ruby Gem specifications and collect information that can be used to determine the associated CPE. * Used to analyze Ruby Gem specifications and collect information that can be
* Regular expressions are used to parse the well-defined Ruby syntax that forms the specification. * used to determine the associated CPE. Regular expressions are used to parse
* the well-defined Ruby syntax that forms the specification.
* *
* @author Dale Visser <dvisser@ida.org> * @author Dale Visser
*/ */
@Experimental
public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer { public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(RubyGemspecAnalyzer.class);
/** /**
* The name of the analyzer. * The name of the analyzer.
*/ */
@@ -49,15 +62,26 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION; private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
/**
* The gemspec file extension.
*/
private static final String GEMSPEC = "gemspec"; private static final String GEMSPEC = "gemspec";
private static final FileFilter FILTER = /**
FileFilterBuilder.newInstance().addExtensions(GEMSPEC).addFilenames("Rakefile").build(); * The file filter containing the list of file extensions that can be
* analyzed.
private static final String EMAIL = "email"; */
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(GEMSPEC).build();
//TODO: support Rakefile
//= FileFilterBuilder.newInstance().addExtensions(GEMSPEC).addFilenames("Rakefile").build();
/** /**
* @return a filter that accepts files named Rakefile or matching the glob pattern, *.gemspec * The name of the version file.
*/
private static final String VERSION_FILE_NAME = "VERSION";
/**
* @return a filter that accepts files matching the glob pattern, *.gemspec
*/ */
@Override @Override
protected FileFilter getFileFilter() { protected FileFilter getFileFilter() {
@@ -65,7 +89,7 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
} }
@Override @Override
protected void initializeFileTypeAnalyzer() throws Exception { protected void initializeFileTypeAnalyzer() throws InitializationException {
// NO-OP // NO-OP
} }
@@ -90,7 +114,8 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* Returns the key used in the properties file to reference the analyzer's enabled property. * Returns the key used in the properties file to reference the analyzer's
* enabled property.
* *
* @return the analyzer's enabled property setting key * @return the analyzer's enabled property setting key
*/ */
@@ -102,15 +127,14 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* The capture group #1 is the block variable. * The capture group #1 is the block variable.
*/ */
private static final Pattern GEMSPEC_BLOCK_INIT = private static final Pattern GEMSPEC_BLOCK_INIT = Pattern.compile("Gem::Specification\\.new\\s+?do\\s+?\\|(.+?)\\|");
Pattern.compile("Gem::Specification\\.new\\s+?do\\s+?\\|(.+?)\\|");
@Override @Override
protected void analyzeFileType(Dependency dependency, Engine engine) protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException { throws AnalysisException {
String contents; String contents;
try { try {
contents = FileUtils.readFileToString(dependency.getActualFile()); contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
} catch (IOException e) { } catch (IOException e) {
throw new AnalysisException( throw new AnalysisException(
"Problem occurred while reading dependency file.", e); "Problem occurred while reading dependency file.", e);
@@ -119,43 +143,107 @@ public class RubyGemspecAnalyzer extends AbstractFileTypeAnalyzer {
if (matcher.find()) { if (matcher.find()) {
contents = contents.substring(matcher.end()); contents = contents.substring(matcher.end());
final String blockVariable = matcher.group(1); final String blockVariable = matcher.group(1);
final EvidenceCollection vendor = dependency.getVendorEvidence(); final EvidenceCollection vendor = dependency.getVendorEvidence();
addStringEvidence(vendor, contents, blockVariable, "author", Confidence.HIGHEST);
addListEvidence(vendor, contents, blockVariable, "authors", Confidence.HIGHEST);
final String email = addStringEvidence(vendor, contents, blockVariable, EMAIL, Confidence.MEDIUM);
if (email.isEmpty()) {
addListEvidence(vendor, contents, blockVariable, EMAIL, Confidence.MEDIUM);
}
addStringEvidence(vendor, contents, blockVariable, "homepage", Confidence.MEDIUM);
final EvidenceCollection product = dependency.getProductEvidence(); final EvidenceCollection product = dependency.getProductEvidence();
final String name = addStringEvidence(product, contents, blockVariable, "name", Confidence.HIGHEST); final String name = addStringEvidence(product, contents, blockVariable, "name", "name", Confidence.HIGHEST);
if (!name.isEmpty()) { if (!name.isEmpty()) {
vendor.addEvidence(GEMSPEC, "name_project", name + "_project", Confidence.LOW); vendor.addEvidence(GEMSPEC, "name_project", name + "_project", Confidence.LOW);
} }
addStringEvidence(product, contents, blockVariable, "summary", Confidence.LOW); addStringEvidence(product, contents, blockVariable, "summary", "summary", Confidence.LOW);
addStringEvidence(dependency.getVersionEvidence(), contents, blockVariable, "version", Confidence.HIGHEST);
} addStringEvidence(vendor, contents, blockVariable, "author", "authors?", Confidence.HIGHEST);
} addStringEvidence(vendor, contents, blockVariable, "email", "emails?", Confidence.MEDIUM);
addStringEvidence(vendor, contents, blockVariable, "homepage", "homepage", Confidence.HIGHEST);
private void addListEvidence(EvidenceCollection evidences, String contents, addStringEvidence(vendor, contents, blockVariable, "license", "licen[cs]es?", Confidence.HIGHEST);
String blockVariable, String field, Confidence confidence) {
final Matcher matcher = Pattern.compile( final String value = addStringEvidence(dependency.getVersionEvidence(), contents,
String.format("\\s+?%s\\.%s\\s*?=\\s*?\\[(.*?)\\]", blockVariable, field)).matcher(contents); blockVariable, "version", "version", Confidence.HIGHEST);
if (matcher.find()) { if (value.length() < 1) {
final String value = matcher.group(1).replaceAll("['\"]", " ").trim(); addEvidenceFromVersionFile(dependency.getActualFile(), dependency.getVersionEvidence());
evidences.addEvidence(GEMSPEC, field, value, confidence); }
} }
setPackagePath(dependency);
} }
/**
* Adds the specified evidence to the given evidence collection.
*
* @param evidences the collection to add the evidence to
* @param contents the evidence contents
* @param blockVariable the variable
* @param field the field
* @param fieldPattern the field pattern
* @param confidence the confidence of the evidence
* @return the evidence string value added
*/
private String addStringEvidence(EvidenceCollection evidences, String contents, private String addStringEvidence(EvidenceCollection evidences, String contents,
String blockVariable, String field, Confidence confidence) { String blockVariable, String field, String fieldPattern, Confidence confidence) {
final Matcher matcher = Pattern.compile(
String.format("\\s+?%s\\.%s\\s*?=\\s*?(['\"])(.*?)\\1", blockVariable, field)).matcher(contents);
String value = ""; String value = "";
if (matcher.find()) {
value = matcher.group(2); //capture array value between [ ]
final Matcher arrayMatcher = Pattern.compile(
String.format("\\s*?%s\\.%s\\s*?=\\s*?\\[(.*?)\\]", blockVariable, fieldPattern), Pattern.CASE_INSENSITIVE).matcher(contents);
if (arrayMatcher.find()) {
final String arrayValue = arrayMatcher.group(1);
value = arrayValue.replaceAll("['\"]", "").trim(); //strip quotes
} else { //capture single value between quotes
final Matcher matcher = Pattern.compile(
String.format("\\s*?%s\\.%s\\s*?=\\s*?(['\"])(.*?)\\1", blockVariable, fieldPattern), Pattern.CASE_INSENSITIVE).matcher(contents);
if (matcher.find()) {
value = matcher.group(2);
}
}
if (value.length() > 0) {
evidences.addEvidence(GEMSPEC, field, value, confidence); evidences.addEvidence(GEMSPEC, field, value, confidence);
} }
return value; return value;
} }
/**
* Adds evidence from the version file.
*
* @param dependencyFile the dependency being analyzed
* @param versionEvidences the version evidence
*/
private void addEvidenceFromVersionFile(File dependencyFile, EvidenceCollection versionEvidences) {
final File parentDir = dependencyFile.getParentFile();
if (parentDir != null) {
final File[] matchingFiles = parentDir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.contains(VERSION_FILE_NAME);
}
});
if (matchingFiles == null) {
return;
}
for (File f : matchingFiles) {
try {
final List<String> lines = FileUtils.readLines(f, Charset.defaultCharset());
if (lines.size() == 1) { //TODO other checking?
final String value = lines.get(0).trim();
versionEvidences.addEvidence(GEMSPEC, "version", value, Confidence.HIGH);
}
} catch (IOException e) {
LOGGER.debug("Error reading gemspec", e);
}
}
}
}
/**
* Sets the package path on the dependency.
*
* @param dep the dependency to alter
*/
private void setPackagePath(Dependency dep) {
final File file = new File(dep.getFilePath());
final String parent = file.getParent();
if (parent != null) {
dep.setPackagePath(parent);
}
}
} }

View File

@@ -0,0 +1,192 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2016 IBM Corporation. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings;
/**
* This analyzer is used to analyze the SWIFT Package Manager
* (https://swift.org/package-manager/). It collects information about a package
* from Package.swift files.
*
* @author Bianca Jiang (https://twitter.com/biancajiang)
*/
@Experimental
public class SwiftPackageManagerAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The name of the analyzer.
*/
private static final String ANALYZER_NAME = "SWIFT Package Manager Analyzer";
/**
* The phase that this analyzer is intended to run in.
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
/**
* The file name to scan.
*/
public static final String SPM_FILE_NAME = "Package.swift";
/**
* Filter that detects files named "package.json".
*/
private static final FileFilter SPM_FILE_FILTER = FileFilterBuilder.newInstance().addFilenames(SPM_FILE_NAME).build();
/**
* The capture group #1 is the block variable. e.g. "import
* PackageDescription let package = Package( name: "Gloss" )"
*/
private static final Pattern SPM_BLOCK_PATTERN = Pattern.compile("let[^=]+=\\s*Package\\s*\\(\\s*([^)]*)\\s*\\)", Pattern.DOTALL);
/**
* Returns the FileFilter
*
* @return the FileFilter
*/
@Override
protected FileFilter getFileFilter() {
return SPM_FILE_FILTER;
}
@Override
protected void initializeFileTypeAnalyzer() {
// NO-OP
}
/**
* Returns the name of the analyzer.
*
* @return the name of the analyzer.
*/
@Override
public String getName() {
return ANALYZER_NAME;
}
/**
* Returns the phase that the analyzer is intended to run in.
*
* @return the phase that the analyzer is intended to run in.
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
/**
* Returns the key used in the properties file to reference the analyzer's
* enabled property.
*
* @return the analyzer's enabled property setting key
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED;
}
@Override
protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException {
String contents;
try {
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
} catch (IOException e) {
throw new AnalysisException(
"Problem occurred while reading dependency file.", e);
}
final Matcher matcher = SPM_BLOCK_PATTERN.matcher(contents);
if (matcher.find()) {
final String packageDescription = matcher.group(1);
if (packageDescription.isEmpty()) {
return;
}
final EvidenceCollection product = dependency.getProductEvidence();
final EvidenceCollection vendor = dependency.getVendorEvidence();
//SPM is currently under development for SWIFT 3. Its current metadata includes package name and dependencies.
//Future interesting metadata: version, license, homepage, author, summary, etc.
final String name = addStringEvidence(product, packageDescription, "name", "name", Confidence.HIGHEST);
if (name != null && !name.isEmpty()) {
vendor.addEvidence(SPM_FILE_NAME, "name_project", name, Confidence.HIGHEST);
}
}
setPackagePath(dependency);
}
/**
* Extracts evidence from the package description and adds it to the given
* evidence collection.
*
* @param evidences the evidence collection to update
* @param packageDescription the text to extract evidence from
* @param field the name of the field being searched for
* @param fieldPattern the field pattern within the contents to search for
* @param confidence the confidence level of the evidence if found
* @return the string that was added as evidence
*/
private String addStringEvidence(EvidenceCollection evidences,
String packageDescription, String field, String fieldPattern, Confidence confidence) {
String value = "";
final Matcher matcher = Pattern.compile(
String.format("%s *:\\s*\"([^\"]*)", fieldPattern), Pattern.DOTALL).matcher(packageDescription);
if (matcher.find()) {
value = matcher.group(1);
}
if (value != null) {
value = value.trim();
if (value.length() > 0) {
evidences.addEvidence(SPM_FILE_NAME, field, value, confidence);
}
}
return value;
}
/**
* Sets the package path on the given dependency.
*
* @param dep the dependency to update
*/
private void setPackagePath(Dependency dep) {
final File file = new File(dep.getFilePath());
final String parent = file.getParent();
if (parent != null) {
dep.setPackagePath(parent);
}
}
}

View File

@@ -0,0 +1,167 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2017 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import java.util.Iterator;
import java.util.Objects;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This analyzer attempts to filter out erroneous version numbers collected.
* Initially, this will focus on JAR files that contain a POM version number
* that matches the file name - if identified all other version information will
* be removed.
*
* @author Jeremy Long
*/
public class VersionFilterAnalyzer extends AbstractAnalyzer {
//<editor-fold defaultstate="collapsed" desc="Constants">
/**
* Evidence source.
*/
private static final String FILE = "file";
/**
* Evidence source.
*/
private static final String POM = "pom";
/**
* Evidence source.
*/
private static final String NEXUS = "nexus";
/**
* Evidence source.
*/
private static final String CENTRAL = "central";
/**
* Evidence source.
*/
private static final String MANIFEST = "Manifest";
/**
* Evidence name.
*/
private static final String VERSION = "version";
/**
* Evidence name.
*/
private static final String IMPLEMENTATION_VERSION = "Implementation-Version";
/**
* The name of the analyzer.
*/
private static final String ANALYZER_NAME = "Version Filter Analyzer";
/**
* The phase that this analyzer is intended to run in.
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.POST_INFORMATION_COLLECTION;
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Standard implementation of Analyzer">
/**
* Returns the name of the analyzer.
*
* @return the name of the analyzer.
*/
@Override
public String getName() {
return ANALYZER_NAME;
}
/**
* Returns the phase that the analyzer is intended to run in.
*
* @return the phase that the analyzer is intended to run in.
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
/**
* Returns the setting key to determine if the analyzer is enabled.
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_VERSION_FILTER_ENABLED;
}
//</editor-fold>
/**
* The Logger for use throughout the class
*/
private static final Logger LOGGER = LoggerFactory.getLogger(VersionFilterAnalyzer.class);
/**
* The HintAnalyzer uses knowledge about a dependency to add additional
* information to help in identification of identifiers or vulnerabilities.
*
* @param dependency The dependency being analyzed
* @param engine The scanning engine
* @throws AnalysisException is thrown if there is an exception analyzing
* the dependency.
*/
@Override
protected synchronized void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
String fileVersion = null;
String pomVersion = null;
String manifestVersion = null;
for (Evidence e : dependency.getVersionEvidence()) {
if (FILE.equals(e.getSource()) && VERSION.equals(e.getName())) {
fileVersion = e.getValue(Boolean.FALSE);
} else if ((NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource())
|| POM.equals(e.getSource())) && VERSION.equals(e.getName())) {
pomVersion = e.getValue(Boolean.FALSE);
} else if (MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName())) {
manifestVersion = e.getValue(Boolean.FALSE);
}
}
//ensure we have at least two not null
if (((fileVersion == null ? 0 : 1) + (pomVersion == null ? 0 : 1) + (manifestVersion == null ? 0 : 1)) > 1) {
final DependencyVersion dvFile = new DependencyVersion(fileVersion);
final DependencyVersion dvPom = new DependencyVersion(pomVersion);
final DependencyVersion dvManifest = new DependencyVersion(manifestVersion);
final boolean fileMatch = Objects.equals(dvFile, dvPom) || Objects.equals(dvFile, dvManifest);
final boolean manifestMatch = Objects.equals(dvManifest, dvPom) || Objects.equals(dvManifest, dvFile);
final boolean pomMatch = Objects.equals(dvPom, dvFile) || Objects.equals(dvPom, dvManifest);
if (fileMatch || manifestMatch || pomMatch) {
LOGGER.debug("filtering evidence from {}", dependency.getFileName());
final EvidenceCollection versionEvidence = dependency.getVersionEvidence();
final Iterator<Evidence> itr = versionEvidence.iterator();
while (itr.hasNext()) {
final Evidence e = itr.next();
if (!(pomMatch && VERSION.equals(e.getName())
&& (NEXUS.equals(e.getSource()) || CENTRAL.equals(e.getSource()) || POM.equals(e.getSource())))
&& !(fileMatch && VERSION.equals(e.getName()) && FILE.equals(e.getSource()))
&& !(manifestMatch && MANIFEST.equals(e.getSource()) && IMPLEMENTATION_VERSION.equals(e.getName()))) {
itr.remove();
}
}
}
}
}
}

View File

@@ -20,11 +20,13 @@ package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.suppression.SuppressionRule; import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
/** /**
* The suppression analyzer processes an externally defined XML document that complies with the suppressions.xsd schema. * The suppression analyzer processes an externally defined XML document that
* Any identified Vulnerability entries within the dependencies that match will be removed. * complies with the suppressions.xsd schema. Any identified Vulnerability
* entries within the dependencies that match will be removed.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -59,10 +61,29 @@ public class VulnerabilitySuppressionAnalyzer extends AbstractSuppressionAnalyze
public AnalysisPhase getAnalysisPhase() { public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE; return ANALYSIS_PHASE;
} }
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_VULNERABILITY_SUPPRESSION_ENABLED;
}
//</editor-fold> //</editor-fold>
/**
* Analyzes a dependency's vulnerabilities against the configured CVE
* suppressions.
*
* @param dependency the dependency being analyzed
* @param engine a reference to the engine orchestrating the analysis
* @throws AnalysisException thrown if there is an error during analysis
*/
@Override @Override
public void analyze(final Dependency dependency, final Engine engine) throws AnalysisException { protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (getRules() == null || getRules().size() <= 0) { if (getRules() == null || getRules().size() <= 0) {
return; return;

View File

@@ -24,17 +24,20 @@ import java.net.URL;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath; import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory; import javax.xml.xpath.XPathFactory;
import org.owasp.dependencycheck.data.nexus.MavenArtifact; import org.owasp.dependencycheck.data.nexus.MavenArtifact;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.URLConnectionFactory; import org.owasp.dependencycheck.utils.URLConnectionFactory;
import org.owasp.dependencycheck.utils.XmlUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.NodeList; import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/** /**
* Class of methods to search Maven Central via Central. * Class of methods to search Maven Central via Central.
@@ -51,7 +54,7 @@ public class CentralSearch {
/** /**
* Whether to use the Proxy when making requests * Whether to use the Proxy when making requests
*/ */
private boolean useProxy; private final boolean useProxy;
/** /**
* Used for logging. * Used for logging.
@@ -61,8 +64,8 @@ public class CentralSearch {
/** /**
* Creates a NexusSearch for the given repository URL. * Creates a NexusSearch for the given repository URL.
* *
* @param rootURL the URL of the repository on which searches should execute. Only parameters are added to this (so it should * @param rootURL the URL of the repository on which searches should
* end in /select) * execute. Only parameters are added to this (so it should end in /select)
*/ */
public CentralSearch(URL rootURL) { public CentralSearch(URL rootURL) {
this.rootURL = rootURL; this.rootURL = rootURL;
@@ -76,18 +79,20 @@ public class CentralSearch {
} }
/** /**
* Searches the configured Central URL for the given sha1 hash. If the artifact is found, a <code>MavenArtifact</code> is * Searches the configured Central URL for the given sha1 hash. If the
* populated with the GAV. * artifact is found, a <code>MavenArtifact</code> is populated with the
* GAV.
* *
* @param sha1 the SHA-1 hash string for which to search * @param sha1 the SHA-1 hash string for which to search
* @return the populated Maven GAV. * @return the populated Maven GAV.
* @throws IOException if it's unable to connect to the specified repository or if the specified artifact is not found. * @throws IOException if it's unable to connect to the specified repository
* or if the specified artifact is not found.
*/ */
public List<MavenArtifact> searchSha1(String sha1) throws IOException { public List<MavenArtifact> searchSha1(String sha1) throws IOException {
if (null == sha1 || !sha1.matches("^[0-9A-Fa-f]{40}$")) { if (null == sha1 || !sha1.matches("^[0-9A-Fa-f]{40}$")) {
throw new IllegalArgumentException("Invalid SHA1 format"); throw new IllegalArgumentException("Invalid SHA1 format");
} }
List<MavenArtifact> result = null;
final URL url = new URL(rootURL + String.format("?q=1:\"%s\"&wt=xml", sha1)); final URL url = new URL(rootURL + String.format("?q=1:\"%s\"&wt=xml", sha1));
LOGGER.debug("Searching Central url {}", url); LOGGER.debug("Searching Central url {}", url);
@@ -108,15 +113,14 @@ public class CentralSearch {
if (conn.getResponseCode() == 200) { if (conn.getResponseCode() == 200) {
boolean missing = false; boolean missing = false;
try { try {
final DocumentBuilder builder = DocumentBuilderFactory final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
.newInstance().newDocumentBuilder();
final Document doc = builder.parse(conn.getInputStream()); final Document doc = builder.parse(conn.getInputStream());
final XPath xpath = XPathFactory.newInstance().newXPath(); final XPath xpath = XPathFactory.newInstance().newXPath();
final String numFound = xpath.evaluate("/response/result/@numFound", doc); final String numFound = xpath.evaluate("/response/result/@numFound", doc);
if ("0".equals(numFound)) { if ("0".equals(numFound)) {
missing = true; missing = true;
} else { } else {
final List<MavenArtifact> result = new ArrayList<MavenArtifact>(); result = new ArrayList<>();
final NodeList docs = (NodeList) xpath.evaluate("/response/result/doc", doc, XPathConstants.NODESET); final NodeList docs = (NodeList) xpath.evaluate("/response/result/doc", doc, XPathConstants.NODESET);
for (int i = 0; i < docs.getLength(); i++) { for (int i = 0; i < docs.getLength(); i++) {
final String g = xpath.evaluate("./str[@name='g']", docs.item(i)); final String g = xpath.evaluate("./str[@name='g']", docs.item(i));
@@ -124,11 +128,11 @@ public class CentralSearch {
final String a = xpath.evaluate("./str[@name='a']", docs.item(i)); final String a = xpath.evaluate("./str[@name='a']", docs.item(i));
LOGGER.trace("ArtifactId: {}", a); LOGGER.trace("ArtifactId: {}", a);
final String v = xpath.evaluate("./str[@name='v']", docs.item(i)); final String v = xpath.evaluate("./str[@name='v']", docs.item(i));
NodeList atts = (NodeList) xpath.evaluate("./arr[@name='ec']/str", docs.item(i), XPathConstants.NODESET); NodeList attributes = (NodeList) xpath.evaluate("./arr[@name='ec']/str", docs.item(i), XPathConstants.NODESET);
boolean pomAvailable = false; boolean pomAvailable = false;
boolean jarAvailable = false; boolean jarAvailable = false;
for (int x = 0; x < atts.getLength(); x++) { for (int x = 0; x < attributes.getLength(); x++) {
final String tmp = xpath.evaluate(".", atts.item(x)); final String tmp = xpath.evaluate(".", attributes.item(x));
if (".pom".equals(tmp)) { if (".pom".equals(tmp)) {
pomAvailable = true; pomAvailable = true;
} else if (".jar".equals(tmp)) { } else if (".jar".equals(tmp)) {
@@ -136,24 +140,20 @@ public class CentralSearch {
} }
} }
atts = (NodeList) xpath.evaluate("./arr[@name='tags']/str", docs.item(i), XPathConstants.NODESET); attributes = (NodeList) xpath.evaluate("./arr[@name='tags']/str", docs.item(i), XPathConstants.NODESET);
boolean useHTTPS = false; boolean useHTTPS = false;
for (int x = 0; x < atts.getLength(); x++) { for (int x = 0; x < attributes.getLength(); x++) {
final String tmp = xpath.evaluate(".", atts.item(x)); final String tmp = xpath.evaluate(".", attributes.item(x));
if ("https".equals(tmp)) { if ("https".equals(tmp)) {
useHTTPS = true; useHTTPS = true;
} }
} }
LOGGER.trace("Version: {}", v); LOGGER.trace("Version: {}", v);
result.add(new MavenArtifact(g, a, v, jarAvailable, pomAvailable, useHTTPS)); result.add(new MavenArtifact(g, a, v, jarAvailable, pomAvailable, useHTTPS));
} }
return result;
} }
} catch (Throwable e) { } catch (ParserConfigurationException | IOException | SAXException | XPathExpressionException e) {
// Anything else is jacked up XML stuff that we really can't recover // Anything else is jacked up XML stuff that we really can't recover from well
// from well
throw new IOException(e.getMessage(), e); throw new IOException(e.getMessage(), e);
} }
@@ -162,10 +162,9 @@ public class CentralSearch {
} }
} else { } else {
LOGGER.debug("Could not connect to Central received response code: {} {}", LOGGER.debug("Could not connect to Central received response code: {} {}",
conn.getResponseCode(), conn.getResponseMessage()); conn.getResponseCode(), conn.getResponseMessage());
throw new IOException("Could not connect to Central"); throw new IOException("Could not connect to Central");
} }
return result;
return null;
} }
} }

View File

@@ -1,6 +1,6 @@
/** /**
* *
* Contains classes related to searching Maven Central.<br/><br/> * Contains classes related to searching Maven Central.<br><br>
* *
* These are used to abstract Maven Central searching away from OWASP Dependency Check so they can be reused elsewhere. * These are used to abstract Maven Central searching away from OWASP Dependency Check so they can be reused elsewhere.
*/ */

View File

@@ -18,7 +18,7 @@
package org.owasp.dependencycheck.data.composer; package org.owasp.dependencycheck.data.composer;
/** /**
* Reperesents a dependency (GAV, right now) from a Composer dependency. * Represents a dependency (GAV, right now) from a Composer dependency.
* *
* @author colezlaw * @author colezlaw
*/ */

View File

@@ -42,11 +42,6 @@ public class ComposerLockParser {
*/ */
private final JsonReader jsonReader; private final JsonReader jsonReader;
/**
* The input stream we'll read
*/
private final InputStream inputStream; // NOPMD - it gets set in the constructor, read later
/** /**
* The List of ComposerDependencies found * The List of ComposerDependencies found
*/ */
@@ -58,15 +53,14 @@ public class ComposerLockParser {
private static final Logger LOGGER = LoggerFactory.getLogger(ComposerLockParser.class); private static final Logger LOGGER = LoggerFactory.getLogger(ComposerLockParser.class);
/** /**
* Createas a ComposerLockParser from a JsonReader and an InputStream. * Creates a ComposerLockParser from a JsonReader and an InputStream.
* *
* @param inputStream the InputStream to parse * @param inputStream the InputStream to parse
*/ */
public ComposerLockParser(InputStream inputStream) { public ComposerLockParser(InputStream inputStream) {
LOGGER.info("Creating a ComposerLockParser"); LOGGER.info("Creating a ComposerLockParser");
this.inputStream = inputStream;
this.jsonReader = Json.createReader(inputStream); this.jsonReader = Json.createReader(inputStream);
this.composerDependencies = new ArrayList<ComposerDependency>(); this.composerDependencies = new ArrayList<>();
} }
/** /**
@@ -87,7 +81,7 @@ public class ComposerLockParser {
final String group = groupName.substring(0, groupName.indexOf('/')); final String group = groupName.substring(0, groupName.indexOf('/'));
final String project = groupName.substring(groupName.indexOf('/') + 1); final String project = groupName.substring(groupName.indexOf('/') + 1);
String version = pkg.getString("version"); String version = pkg.getString("version");
// Some version nubmers begin with v - which doesn't end up matching CPE's // Some version numbers begin with v - which doesn't end up matching CPE's
if (version.startsWith("v")) { if (version.startsWith("v")) {
version = version.substring(1); version = version.substring(1);
} }

View File

@@ -38,7 +38,6 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
import org.owasp.dependencycheck.data.lucene.FieldAnalyzer;
import org.owasp.dependencycheck.data.lucene.LuceneUtils; import org.owasp.dependencycheck.data.lucene.LuceneUtils;
import org.owasp.dependencycheck.data.lucene.SearchFieldAnalyzer; import org.owasp.dependencycheck.data.lucene.SearchFieldAnalyzer;
import org.owasp.dependencycheck.data.nvdcve.CveDB; import org.owasp.dependencycheck.data.nvdcve.CveDB;
@@ -48,8 +47,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* An in memory lucene index that contains the vendor/product combinations from the CPE (application) identifiers within the NVD * An in memory lucene index that contains the vendor/product combinations from
* CVE data. * the CPE (application) identifiers within the NVD CVE data.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -63,21 +62,6 @@ public final class CpeMemoryIndex {
* singleton instance. * singleton instance.
*/ */
private static final CpeMemoryIndex INSTANCE = new CpeMemoryIndex(); private static final CpeMemoryIndex INSTANCE = new CpeMemoryIndex();
/**
* private constructor for singleton.
*/
private CpeMemoryIndex() {
}
/**
* Gets the singleton instance of the CpeMemoryIndex.
*
* @return the instance of the CpeMemoryIndex
*/
public static CpeMemoryIndex getInstance() {
return INSTANCE;
}
/** /**
* The in memory Lucene index. * The in memory Lucene index.
*/ */
@@ -101,11 +85,30 @@ public final class CpeMemoryIndex {
/** /**
* The search field analyzer for the product field. * The search field analyzer for the product field.
*/ */
private SearchFieldAnalyzer productSearchFieldAnalyzer; private SearchFieldAnalyzer productFieldAnalyzer;
/** /**
* The search field analyzer for the vendor field. * The search field analyzer for the vendor field.
*/ */
private SearchFieldAnalyzer vendorSearchFieldAnalyzer; private SearchFieldAnalyzer vendorFieldAnalyzer;
/**
* A flag indicating whether or not the index is open.
*/
private boolean openState = false;
/**
* private constructor for singleton.
*/
private CpeMemoryIndex() {
}
/**
* Gets the singleton instance of the CpeMemoryIndex.
*
* @return the instance of the CpeMemoryIndex
*/
public static CpeMemoryIndex getInstance() {
return INSTANCE;
}
/** /**
* Creates and loads data into an in memory index. * Creates and loads data into an in memory index.
@@ -130,10 +133,6 @@ public final class CpeMemoryIndex {
} }
} }
} }
/**
* A flag indicating whether or not the index is open.
*/
private boolean openState = false;
/** /**
* returns whether or not the index is open. * returns whether or not the index is open.
@@ -144,31 +143,20 @@ public final class CpeMemoryIndex {
return openState; return openState;
} }
/**
* Creates the indexing analyzer for the CPE Index.
*
* @return the CPE Analyzer.
*/
private Analyzer createIndexingAnalyzer() {
final Map<String, Analyzer> fieldAnalyzers = new HashMap<String, Analyzer>();
fieldAnalyzers.put(Fields.DOCUMENT_KEY, new KeywordAnalyzer());
return new PerFieldAnalyzerWrapper(new FieldAnalyzer(LuceneUtils.CURRENT_VERSION), fieldAnalyzers);
}
/** /**
* Creates an Analyzer for searching the CPE Index. * Creates an Analyzer for searching the CPE Index.
* *
* @return the CPE Analyzer. * @return the CPE Analyzer.
*/ */
private Analyzer createSearchingAnalyzer() { private Analyzer createSearchingAnalyzer() {
final Map<String, Analyzer> fieldAnalyzers = new HashMap<String, Analyzer>(); final Map<String, Analyzer> fieldAnalyzers = new HashMap<>();
fieldAnalyzers.put(Fields.DOCUMENT_KEY, new KeywordAnalyzer()); fieldAnalyzers.put(Fields.DOCUMENT_KEY, new KeywordAnalyzer());
productSearchFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION); productFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
vendorSearchFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION); vendorFieldAnalyzer = new SearchFieldAnalyzer(LuceneUtils.CURRENT_VERSION);
fieldAnalyzers.put(Fields.PRODUCT, productSearchFieldAnalyzer); fieldAnalyzers.put(Fields.PRODUCT, productFieldAnalyzer);
fieldAnalyzers.put(Fields.VENDOR, vendorSearchFieldAnalyzer); fieldAnalyzers.put(Fields.VENDOR, vendorFieldAnalyzer);
return new PerFieldAnalyzerWrapper(new FieldAnalyzer(LuceneUtils.CURRENT_VERSION), fieldAnalyzers); return new PerFieldAnalyzerWrapper(new KeywordAnalyzer(), fieldAnalyzers);
} }
/** /**
@@ -203,65 +191,47 @@ public final class CpeMemoryIndex {
* @throws IndexException thrown if there is an issue creating the index * @throws IndexException thrown if there is an issue creating the index
*/ */
private void buildIndex(CveDB cve) throws IndexException { private void buildIndex(CveDB cve) throws IndexException {
Analyzer analyzer = null; try (Analyzer analyzer = createSearchingAnalyzer();
IndexWriter indexWriter = null; IndexWriter indexWriter = new IndexWriter(index, new IndexWriterConfig(LuceneUtils.CURRENT_VERSION, analyzer))) {
try { // Tip: reuse the Document and Fields for performance...
analyzer = createIndexingAnalyzer(); // See "Re-use Document and Field instances" from
final IndexWriterConfig conf = new IndexWriterConfig(LuceneUtils.CURRENT_VERSION, analyzer); // http://wiki.apache.org/lucene-java/ImproveIndexingSpeed
indexWriter = new IndexWriter(index, conf); final Document doc = new Document();
try { final Field v = new TextField(Fields.VENDOR, Fields.VENDOR, Field.Store.YES);
// Tip: reuse the Document and Fields for performance... final Field p = new TextField(Fields.PRODUCT, Fields.PRODUCT, Field.Store.YES);
// See "Re-use Document and Field instances" from doc.add(v);
// http://wiki.apache.org/lucene-java/ImproveIndexingSpeed doc.add(p);
final Document doc = new Document();
final Field v = new TextField(Fields.VENDOR, Fields.VENDOR, Field.Store.YES);
final Field p = new TextField(Fields.PRODUCT, Fields.PRODUCT, Field.Store.YES);
doc.add(v);
doc.add(p);
final Set<Pair<String, String>> data = cve.getVendorProductList(); final Set<Pair<String, String>> data = cve.getVendorProductList();
for (Pair<String, String> pair : data) { for (Pair<String, String> pair : data) {
if (pair.getLeft() != null && pair.getRight() != null) {
v.setStringValue(pair.getLeft()); v.setStringValue(pair.getLeft());
p.setStringValue(pair.getRight()); p.setStringValue(pair.getRight());
indexWriter.addDocument(doc); indexWriter.addDocument(doc);
resetFieldAnalyzer();
} }
} catch (DatabaseException ex) {
LOGGER.debug("", ex);
throw new IndexException("Error reading CPE data", ex);
} }
indexWriter.commit();
indexWriter.close(true);
} catch (DatabaseException ex) {
LOGGER.debug("", ex);
throw new IndexException("Error reading CPE data", ex);
} catch (CorruptIndexException ex) { } catch (CorruptIndexException ex) {
throw new IndexException("Unable to close an in-memory index", ex); throw new IndexException("Unable to close an in-memory index", ex);
} catch (IOException ex) { } catch (IOException ex) {
throw new IndexException("Unable to close an in-memory index", ex); throw new IndexException("Unable to close an in-memory index", ex);
} finally {
if (indexWriter != null) {
try {
try {
indexWriter.commit();
} finally {
indexWriter.close(true);
}
} catch (CorruptIndexException ex) {
throw new IndexException("Unable to close an in-memory index", ex);
} catch (IOException ex) {
throw new IndexException("Unable to close an in-memory index", ex);
}
if (analyzer != null) {
analyzer.close();
}
}
} }
} }
/** /**
* Resets the searching analyzers * Resets the product and vendor field analyzers.
*/ */
private void resetSearchingAnalyzer() { private void resetFieldAnalyzer() {
if (productSearchFieldAnalyzer != null) { if (productFieldAnalyzer != null) {
productSearchFieldAnalyzer.clear(); productFieldAnalyzer.clear();
} }
if (vendorSearchFieldAnalyzer != null) { if (vendorFieldAnalyzer != null) {
vendorSearchFieldAnalyzer.clear(); vendorFieldAnalyzer.clear();
} }
} }
@@ -272,13 +242,15 @@ public final class CpeMemoryIndex {
* @param maxQueryResults the maximum number of documents to return * @param maxQueryResults the maximum number of documents to return
* @return the TopDocs found by the search * @return the TopDocs found by the search
* @throws ParseException thrown when the searchString is invalid * @throws ParseException thrown when the searchString is invalid
* @throws IOException is thrown if there is an issue with the underlying Index * @throws IOException is thrown if there is an issue with the underlying
* Index
*/ */
public TopDocs search(String searchString, int maxQueryResults) throws ParseException, IOException { public synchronized TopDocs search(String searchString, int maxQueryResults) throws ParseException, IOException {
if (searchString == null || searchString.trim().isEmpty()) { if (searchString == null || searchString.trim().isEmpty()) {
throw new ParseException("Query is null or empty"); throw new ParseException("Query is null or empty");
} }
LOGGER.debug(searchString); LOGGER.debug(searchString);
resetFieldAnalyzer();
final Query query = queryParser.parse(searchString); final Query query = queryParser.parse(searchString);
return search(query, maxQueryResults); return search(query, maxQueryResults);
} }
@@ -292,8 +264,8 @@ public final class CpeMemoryIndex {
* @throws CorruptIndexException thrown if the Index is corrupt * @throws CorruptIndexException thrown if the Index is corrupt
* @throws IOException thrown if there is an IOException * @throws IOException thrown if there is an IOException
*/ */
public TopDocs search(Query query, int maxQueryResults) throws CorruptIndexException, IOException { public synchronized TopDocs search(Query query, int maxQueryResults) throws CorruptIndexException, IOException {
resetSearchingAnalyzer(); resetFieldAnalyzer();
return indexSearcher.search(query, maxQueryResults); return indexSearcher.search(query, maxQueryResults);
} }

View File

@@ -20,6 +20,7 @@ package org.owasp.dependencycheck.data.cpe;
import java.io.Serializable; import java.io.Serializable;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.net.URLDecoder; import java.net.URLDecoder;
import org.apache.commons.lang3.StringUtils;
/** /**
* A CPE entry containing the name, vendor, product, and version. * A CPE entry containing the name, vendor, product, and version.
@@ -31,7 +32,7 @@ public class IndexEntry implements Serializable {
/** /**
* the serial version uid. * the serial version uid.
*/ */
static final long serialVersionUID = 8011924485946326934L; private static final long serialVersionUID = 8011924485946326934L;
/** /**
* The vendor name. * The vendor name.
*/ */
@@ -143,7 +144,8 @@ public class IndexEntry implements Serializable {
*/ */
public void parseName(String cpeName) throws UnsupportedEncodingException { public void parseName(String cpeName) throws UnsupportedEncodingException {
if (cpeName != null && cpeName.length() > 7) { if (cpeName != null && cpeName.length() > 7) {
final String[] data = cpeName.substring(7).split(":"); final String cpeNameWithoutPrefix = cpeName.substring(7);
final String[] data = StringUtils.split(cpeNameWithoutPrefix, ':');
if (data.length >= 1) { if (data.length >= 1) {
vendor = URLDecoder.decode(data[0].replace("+", "%2B"), "UTF-8"); vendor = URLDecoder.decode(data[0].replace("+", "%2B"), "UTF-8");
if (data.length >= 2) { if (data.length >= 2) {
@@ -172,10 +174,7 @@ public class IndexEntry implements Serializable {
if ((this.vendor == null) ? (other.vendor != null) : !this.vendor.equals(other.vendor)) { if ((this.vendor == null) ? (other.vendor != null) : !this.vendor.equals(other.vendor)) {
return false; return false;
} }
if ((this.product == null) ? (other.product != null) : !this.product.equals(other.product)) { return !((this.product == null) ? (other.product != null) : !this.product.equals(other.product));
return false;
}
return true;
} }
/** /**

View File

@@ -54,12 +54,10 @@ public final class CweDB {
* @return a HashMap of CWE data * @return a HashMap of CWE data
*/ */
private static Map<String, String> loadData() { private static Map<String, String> loadData() {
ObjectInputStream oin = null; final String filePath = "data/cwe.hashmap.serialized";
try { try (InputStream input = CweDB.class.getClassLoader().getResourceAsStream(filePath);
final String filePath = "data/cwe.hashmap.serialized"; ObjectInputStream oin = new ObjectInputStream(input)) {
final InputStream input = CweDB.class.getClassLoader().getResourceAsStream(filePath);
oin = new ObjectInputStream(input);
@SuppressWarnings("unchecked")
final Map<String, String> ret = (HashMap<String, String>) oin.readObject(); final Map<String, String> ret = (HashMap<String, String>) oin.readObject();
return ret; return ret;
} catch (ClassNotFoundException ex) { } catch (ClassNotFoundException ex) {
@@ -68,14 +66,6 @@ public final class CweDB {
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.warn("Unable to load CWE data due to an IO Error. This should not be an issue."); LOGGER.warn("Unable to load CWE data due to an IO Error. This should not be an issue.");
LOGGER.debug("", ex); LOGGER.debug("", ex);
} finally {
if (oin != null) {
try {
oin.close();
} catch (IOException ex) {
LOGGER.trace("", ex);
}
}
} }
return null; return null;
} }

View File

@@ -32,12 +32,12 @@ public class CweHandler extends DefaultHandler {
/** /**
* a HashMap containing the CWE data. * a HashMap containing the CWE data.
*/ */
private final HashMap<String, String> cwe = new HashMap<String, String>(); private final HashMap<String, String> cwe = new HashMap<>();
/** /**
* Returns the HashMap of CWE entries (CWE-ID, Full CWE Name). * Returns the HashMap of CWE entries (CWE-ID, Full CWE Name).
* *
* @return a HashMap of CWE entries <String, String> * @return a HashMap of CWE entries &lt;String, String&gt;
*/ */
public HashMap<String, String> getCwe() { public HashMap<String, String> getCwe() {
return cwe; return cwe;

View File

@@ -63,7 +63,7 @@ public abstract class AbstractTokenizingFilter extends TokenFilter {
*/ */
public AbstractTokenizingFilter(TokenStream stream) { public AbstractTokenizingFilter(TokenStream stream) {
super(stream); super(stream);
tokens = new LinkedList<String>(); tokens = new LinkedList<>();
} }
/** /**

View File

@@ -29,11 +29,15 @@ import org.apache.lucene.util.Version;
/** /**
* <p> * <p>
* A Lucene Analyzer that utilizes the WhitespaceTokenizer, WordDelimiterFilter, LowerCaseFilter, and StopFilter. The intended * A Lucene Analyzer that utilizes the WhitespaceTokenizer, WordDelimiterFilter,
* purpose of this Analyzer is to index the CPE fields vendor and product.</p> * LowerCaseFilter, and StopFilter. The intended purpose of this Analyzer is to
* index the CPE fields vendor and product.</p>
* *
* @author Jeremy Long * @author Jeremy Long
* @deprecated the field analyzer should not be used, instead use the
* SearchFieldAnalyzer so that the token analyzing filter is used.
*/ */
@Deprecated
public class FieldAnalyzer extends Analyzer { public class FieldAnalyzer extends Analyzer {
/** /**

View File

@@ -27,7 +27,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
* <p> * <p>
* Takes a TokenStream and adds additional tokens by concatenating pairs of words.</p> * Takes a TokenStream and adds additional tokens by concatenating pairs of words.</p>
* <p> * <p>
* <b>Example:</b> "Spring Framework Core" -> "Spring SpringFramework Framework FrameworkCore Core".</p> * <b>Example:</b> "Spring Framework Core" -&gt; "Spring SpringFramework Framework FrameworkCore Core".</p>
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -71,7 +71,7 @@ public final class TokenPairConcatenatingFilter extends TokenFilter {
*/ */
public TokenPairConcatenatingFilter(TokenStream stream) { public TokenPairConcatenatingFilter(TokenStream stream) {
super(stream); super(stream);
words = new LinkedList<String>(); words = new LinkedList<>();
} }
/** /**
@@ -156,10 +156,7 @@ public final class TokenPairConcatenatingFilter extends TokenFilter {
if ((this.previousWord == null) ? (other.previousWord != null) : !this.previousWord.equals(other.previousWord)) { if ((this.previousWord == null) ? (other.previousWord != null) : !this.previousWord.equals(other.previousWord)) {
return false; return false;
} }
if (this.words != other.words && (this.words == null || !this.words.equals(other.words))) { return !(this.words != other.words && (this.words == null || !this.words.equals(other.words)));
return false;
}
return true;
} }
} }

View File

@@ -31,15 +31,17 @@ import org.slf4j.LoggerFactory;
* <p> * <p>
* Takes a TokenStream and splits or adds tokens to correctly index version numbers.</p> * Takes a TokenStream and splits or adds tokens to correctly index version numbers.</p>
* <p> * <p>
* <b>Example:</b> "3.0.0.RELEASE" -> "3 3.0 3.0.0 RELEASE 3.0.0.RELEASE".</p> * <b>Example:</b> "3.0.0.RELEASE" -&gt; "3 3.0 3.0.0 RELEASE 3.0.0.RELEASE".</p>
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public final class UrlTokenizingFilter extends AbstractTokenizingFilter { public final class UrlTokenizingFilter extends AbstractTokenizingFilter {
/** /**
* The logger. * The logger.
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(UrlTokenizingFilter.class); private static final Logger LOGGER = LoggerFactory.getLogger(UrlTokenizingFilter.class);
/** /**
* Constructs a new VersionTokenizingFilter. * Constructs a new VersionTokenizingFilter.
* *
@@ -50,8 +52,8 @@ public final class UrlTokenizingFilter extends AbstractTokenizingFilter {
} }
/** /**
* Increments the underlying TokenStream and sets CharTermAttributes to construct an expanded set of tokens by * Increments the underlying TokenStream and sets CharTermAttributes to construct an expanded set of tokens by concatenating
* concatenating tokens with the previous token. * tokens with the previous token.
* *
* @return whether or not we have hit the end of the TokenStream * @return whether or not we have hit the end of the TokenStream
* @throws IOException is thrown when an IOException occurs * @throws IOException is thrown when an IOException occurs

View File

@@ -22,15 +22,17 @@ import java.io.IOException;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.net.URL; import java.net.URL;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath; import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory; import javax.xml.xpath.XPathFactory;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.URLConnectionFactory; import org.owasp.dependencycheck.utils.URLConnectionFactory;
import org.owasp.dependencycheck.utils.XmlUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.xml.sax.SAXException;
/** /**
* Class of methods to search Nexus repositories. * Class of methods to search Nexus repositories.
@@ -47,7 +49,7 @@ public class NexusSearch {
/** /**
* Whether to use the Proxy when making requests. * Whether to use the Proxy when making requests.
*/ */
private boolean useProxy; private final boolean useProxy;
/** /**
* Used for logging. * Used for logging.
*/ */
@@ -56,32 +58,26 @@ public class NexusSearch {
/** /**
* Creates a NexusSearch for the given repository URL. * Creates a NexusSearch for the given repository URL.
* *
* @param rootURL the root URL of the repository on which searches should execute. full URL's are calculated relative to this * @param rootURL the root URL of the repository on which searches should
* URL, so it should end with a / * execute. full URL's are calculated relative to this URL, so it should end
* with a /
* @param useProxy flag indicating if the proxy settings should be used
*/ */
public NexusSearch(URL rootURL) { public NexusSearch(URL rootURL, boolean useProxy) {
this.rootURL = rootURL; this.rootURL = rootURL;
try { this.useProxy = useProxy;
if (null != Settings.getString(Settings.KEYS.PROXY_SERVER) LOGGER.debug("Using proxy: {}", useProxy);
&& Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY)) {
useProxy = true;
LOGGER.debug("Using proxy");
} else {
useProxy = false;
LOGGER.debug("Not using proxy");
}
} catch (InvalidSettingException ise) {
useProxy = false;
}
} }
/** /**
* Searches the configured Nexus repository for the given sha1 hash. If the artifact is found, a <code>MavenArtifact</code> is * Searches the configured Nexus repository for the given sha1 hash. If the
* populated with the coordinate information. * artifact is found, a <code>MavenArtifact</code> is populated with the
* coordinate information.
* *
* @param sha1 The SHA-1 hash string for which to search * @param sha1 The SHA-1 hash string for which to search
* @return the populated Maven coordinates * @return the populated Maven coordinates
* @throws IOException if it's unable to connect to the specified repository or if the specified artifact is not found. * @throws IOException if it's unable to connect to the specified repository
* or if the specified artifact is not found.
*/ */
public MavenArtifact searchSha1(String sha1) throws IOException { public MavenArtifact searchSha1(String sha1) throws IOException {
if (null == sha1 || !sha1.matches("^[0-9A-Fa-f]{40}$")) { if (null == sha1 || !sha1.matches("^[0-9A-Fa-f]{40}$")) {
@@ -106,57 +102,58 @@ public class NexusSearch {
conn.addRequestProperty("Accept", "application/xml"); conn.addRequestProperty("Accept", "application/xml");
conn.connect(); conn.connect();
if (conn.getResponseCode() == 200) { switch (conn.getResponseCode()) {
try { case 200:
final DocumentBuilder builder = DocumentBuilderFactory try {
.newInstance().newDocumentBuilder(); final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
final Document doc = builder.parse(conn.getInputStream()); final Document doc = builder.parse(conn.getInputStream());
final XPath xpath = XPathFactory.newInstance().newXPath(); final XPath xpath = XPathFactory.newInstance().newXPath();
final String groupId = xpath final String groupId = xpath
.evaluate( .evaluate(
"/org.sonatype.nexus.rest.model.NexusArtifact/groupId", "/org.sonatype.nexus.rest.model.NexusArtifact/groupId",
doc); doc);
final String artifactId = xpath.evaluate( final String artifactId = xpath.evaluate(
"/org.sonatype.nexus.rest.model.NexusArtifact/artifactId", "/org.sonatype.nexus.rest.model.NexusArtifact/artifactId",
doc); doc);
final String version = xpath final String version = xpath
.evaluate( .evaluate(
"/org.sonatype.nexus.rest.model.NexusArtifact/version", "/org.sonatype.nexus.rest.model.NexusArtifact/version",
doc); doc);
final String link = xpath final String link = xpath
.evaluate( .evaluate(
"/org.sonatype.nexus.rest.model.NexusArtifact/artifactLink", "/org.sonatype.nexus.rest.model.NexusArtifact/artifactLink",
doc); doc);
final String pomLink = xpath final String pomLink = xpath
.evaluate( .evaluate(
"/org.sonatype.nexus.rest.model.NexusArtifact/pomLink", "/org.sonatype.nexus.rest.model.NexusArtifact/pomLink",
doc); doc);
final MavenArtifact ma = new MavenArtifact(groupId, artifactId, version); final MavenArtifact ma = new MavenArtifact(groupId, artifactId, version);
if (link != null && !link.isEmpty()) { if (link != null && !link.isEmpty()) {
ma.setArtifactUrl(link); ma.setArtifactUrl(link);
}
if (pomLink != null && !pomLink.isEmpty()) {
ma.setPomUrl(pomLink);
}
return ma;
} catch (ParserConfigurationException | IOException | SAXException | XPathExpressionException e) {
// Anything else is jacked-up XML stuff that we really can't recover
// from well
throw new IOException(e.getMessage(), e);
} }
if (pomLink != null && !pomLink.isEmpty()) { case 404:
ma.setPomUrl(pomLink); throw new FileNotFoundException("Artifact not found in Nexus");
} default:
return ma; LOGGER.debug("Could not connect to Nexus received response code: {} {}",
} catch (Throwable e) { conn.getResponseCode(), conn.getResponseMessage());
// Anything else is jacked-up XML stuff that we really can't recover throw new IOException("Could not connect to Nexus");
// from well
throw new IOException(e.getMessage(), e);
}
} else if (conn.getResponseCode() == 404) {
throw new FileNotFoundException("Artifact not found in Nexus");
} else {
LOGGER.debug("Could not connect to Nexus received response code: {} {}",
conn.getResponseCode(), conn.getResponseMessage());
throw new IOException("Could not connect to Nexus");
} }
} }
/** /**
* Do a preflight request to see if the repository is actually working. * Do a preflight request to see if the repository is actually working.
* *
* @return whether the repository is listening and returns the /status URL correctly * @return whether the repository is listening and returns the /status URL
* correctly
*/ */
public boolean preflightRequest() { public boolean preflightRequest() {
HttpURLConnection conn; HttpURLConnection conn;
@@ -169,13 +166,14 @@ public class NexusSearch {
LOGGER.warn("Expected 200 result from Nexus, got {}", conn.getResponseCode()); LOGGER.warn("Expected 200 result from Nexus, got {}", conn.getResponseCode());
return false; return false;
} }
final DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); final DocumentBuilder builder = XmlUtils.buildSecureDocumentBuilder();
final Document doc = builder.parse(conn.getInputStream()); final Document doc = builder.parse(conn.getInputStream());
if (!"status".equals(doc.getDocumentElement().getNodeName())) { if (!"status".equals(doc.getDocumentElement().getNodeName())) {
LOGGER.warn("Expected root node name of status, got {}", doc.getDocumentElement().getNodeName()); LOGGER.warn("Expected root node name of status, got {}", doc.getDocumentElement().getNodeName());
return false; return false;
} }
} catch (Throwable e) { } catch (IOException | ParserConfigurationException | SAXException e) {
return false; return false;
} }

View File

@@ -1,5 +1,5 @@
/** /**
* Contains classes related to searching a Nexus repository.<br/><br/> * Contains classes related to searching a Nexus repository.<br><br>
* *
* These are used to abstract Nexus searching away from OWASP Dependency Check so they can be reused elsewhere. * These are used to abstract Nexus searching away from OWASP Dependency Check so they can be reused elsewhere.
*/ */

View File

@@ -53,12 +53,6 @@ public class NugetPackage {
*/ */
private String licenseUrl; private String licenseUrl;
/**
* Creates an empty NugetPackage.
*/
public NugetPackage() {
}
/** /**
* Sets the id. * Sets the id.
* @param id the id * @param id the id

View File

@@ -17,13 +17,18 @@
*/ */
package org.owasp.dependencycheck.data.nuget; package org.owasp.dependencycheck.data.nuget;
import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath; import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory; import javax.xml.xpath.XPathFactory;
import org.owasp.dependencycheck.utils.XmlUtils;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Node; import org.w3c.dom.Node;
import org.xml.sax.SAXException;
/** /**
* Parse a Nuspec file using XPath. * Parse a Nuspec file using XPath.
@@ -36,7 +41,8 @@ public class XPathNuspecParser implements NuspecParser {
* Gets the string value of a node or null if it's not present * Gets the string value of a node or null if it's not present
* *
* @param n the node to test * @param n the node to test
* @return the string content of the node, or null if the node itself is null * @return the string content of the node, or null if the node itself is
* null
*/ */
private String getOrNull(Node n) { private String getOrNull(Node n) {
if (n != null) { if (n != null) {
@@ -56,7 +62,9 @@ public class XPathNuspecParser implements NuspecParser {
@Override @Override
public NugetPackage parse(InputStream stream) throws NuspecParseException { public NugetPackage parse(InputStream stream) throws NuspecParseException {
try { try {
final Document d = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(stream); final DocumentBuilder db = XmlUtils.buildSecureDocumentBuilder();
final Document d = db.parse(stream);
final XPath xpath = XPathFactory.newInstance().newXPath(); final XPath xpath = XPathFactory.newInstance().newXPath();
final NugetPackage nuspec = new NugetPackage(); final NugetPackage nuspec = new NugetPackage();
@@ -74,7 +82,7 @@ public class XPathNuspecParser implements NuspecParser {
nuspec.setLicenseUrl(getOrNull((Node) xpath.evaluate("/package/metadata/licenseUrl", d, XPathConstants.NODE))); nuspec.setLicenseUrl(getOrNull((Node) xpath.evaluate("/package/metadata/licenseUrl", d, XPathConstants.NODE)));
nuspec.setTitle(getOrNull((Node) xpath.evaluate("/package/metadata/title", d, XPathConstants.NODE))); nuspec.setTitle(getOrNull((Node) xpath.evaluate("/package/metadata/title", d, XPathConstants.NODE)));
return nuspec; return nuspec;
} catch (Throwable e) { } catch (ParserConfigurationException | SAXException | IOException | XPathExpressionException | NuspecParseException e) {
throw new NuspecParseException("Unable to parse nuspec", e); throw new NuspecParseException("Unable to parse nuspec", e);
} }
} }

View File

@@ -1,5 +1,5 @@
/** /**
* Contains classes related to parsing Nuget related files<br/><br/> * Contains classes related to parsing Nuget related files<br><br>
* These are used to abstract away Nuget-related handling from Dependency Check so they can be used elsewhere. * These are used to abstract away Nuget-related handling from Dependency Check so they can be used elsewhere.
*/ */
package org.owasp.dependencycheck.data.nuget; package org.owasp.dependencycheck.data.nuget;

View File

@@ -20,7 +20,7 @@ package org.owasp.dependencycheck.data.nvdcve;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.CallableStatement; import java.sql.PreparedStatement;
import java.sql.Connection; import java.sql.Connection;
import java.sql.Driver; import java.sql.Driver;
import java.sql.DriverManager; import java.sql.DriverManager;
@@ -36,8 +36,10 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* Loads the configured database driver and returns the database connection. If the embedded H2 database is used obtaining a * Loads the configured database driver and returns the database connection. If
* connection will ensure the database file exists and that the appropriate table structure has been created. * the embedded H2 database is used obtaining a connection will ensure the
* database file exists and that the appropriate table structure has been
* created.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -87,12 +89,13 @@ public final class ConnectionFactory {
} }
/** /**
* Initializes the connection factory. Ensuring that the appropriate drivers are loaded and that a connection can be made * Initializes the connection factory. Ensuring that the appropriate drivers
* successfully. * are loaded and that a connection can be made successfully.
* *
* @throws DatabaseException thrown if we are unable to connect to the database * @throws DatabaseException thrown if we are unable to connect to the
* database
*/ */
public static synchronized void initialize() throws DatabaseException { public static void initialize() throws DatabaseException {
//this only needs to be called once. //this only needs to be called once.
if (connectionString != null) { if (connectionString != null) {
return; return;
@@ -188,11 +191,12 @@ public final class ConnectionFactory {
} }
/** /**
* Cleans up resources and unloads any registered database drivers. This needs to be called to ensure the driver is * Cleans up resources and unloads any registered database drivers. This
* unregistered prior to the finalize method being called as during shutdown the class loader used to load the driver may be * needs to be called to ensure the driver is unregistered prior to the
* unloaded prior to the driver being de-registered. * finalize method being called as during shutdown the class loader used to
* load the driver may be unloaded prior to the driver being de-registered.
*/ */
public static synchronized void cleanup() { public static void cleanup() {
if (driver != null) { if (driver != null) {
try { try {
DriverManager.deregisterDriver(driver); DriverManager.deregisterDriver(driver);
@@ -210,10 +214,12 @@ public final class ConnectionFactory {
} }
/** /**
* Constructs a new database connection object per the database configuration. * Constructs a new database connection object per the database
* configuration.
* *
* @return a database connection object * @return a database connection object
* @throws DatabaseException thrown if there is an exception loading the database connection * @throws DatabaseException thrown if there is an exception loading the
* database connection
*/ */
public static Connection getConnection() throws DatabaseException { public static Connection getConnection() throws DatabaseException {
initialize(); initialize();
@@ -228,12 +234,14 @@ public final class ConnectionFactory {
} }
/** /**
* Determines if the H2 database file exists. If it does not exist then the data structure will need to be created. * Determines if the H2 database file exists. If it does not exist then the
* data structure will need to be created.
* *
* @return true if the H2 database file does not exist; otherwise false * @return true if the H2 database file does not exist; otherwise false
* @throws IOException thrown if the data directory does not exist and cannot be created * @throws IOException thrown if the data directory does not exist and
* cannot be created
*/ */
private static boolean h2DataFileExists() throws IOException { public static boolean h2DataFileExists() throws IOException {
final File dir = Settings.getDataDirectory(); final File dir = Settings.getDataDirectory();
final String fileName = Settings.getString(Settings.KEYS.DB_FILE_NAME); final String fileName = Settings.getString(Settings.KEYS.DB_FILE_NAME);
final File file = new File(dir, fileName); final File file = new File(dir, fileName);
@@ -241,7 +249,26 @@ public final class ConnectionFactory {
} }
/** /**
* Creates the database structure (tables and indexes) to store the CVE data. * Determines if the connection string is for an H2 database.
*
* @return true if the connection string is for an H2 database
*/
public static boolean isH2Connection() {
String connStr;
try {
connStr = Settings.getConnectionString(
Settings.KEYS.DB_CONNECTION_STRING,
Settings.KEYS.DB_FILE_NAME);
} catch (IOException ex) {
LOGGER.debug("Unable to get connectionn string", ex);
return false;
}
return connStr.startsWith("jdbc:h2:file:");
}
/**
* Creates the database structure (tables and indexes) to store the CVE
* data.
* *
* @param conn the database connection * @param conn the database connection
* @throws DatabaseException thrown if there is a Database Exception * @throws DatabaseException thrown if there is a Database Exception
@@ -271,15 +298,21 @@ public final class ConnectionFactory {
} }
/** /**
* Updates the database schema by loading the upgrade script for the version specified. The intended use is that if the * Updates the database schema by loading the upgrade script for the version
* current schema version is 2.9 then we would call updateSchema(conn, "2.9"). This would load the upgrade_2.9.sql file and * specified. The intended use is that if the current schema version is 2.9
* execute it against the database. The upgrade script must update the 'version' in the properties table. * then we would call updateSchema(conn, "2.9"). This would load the
* upgrade_2.9.sql file and execute it against the database. The upgrade
* script must update the 'version' in the properties table.
* *
* @param conn the database connection object * @param conn the database connection object
* @param schema the current schema version that is being upgraded * @param appExpectedVersion the schema version that the application expects
* @throws DatabaseException thrown if there is an exception upgrading the database schema * @param currentDbVersion the current schema version of the database
* @throws DatabaseException thrown if there is an exception upgrading the
* database schema
*/ */
private static void updateSchema(Connection conn, String schema) throws DatabaseException { private static void updateSchema(Connection conn, DependencyVersion appExpectedVersion, DependencyVersion currentDbVersion)
throws DatabaseException {
final String databaseProductName; final String databaseProductName;
try { try {
databaseProductName = conn.getMetaData().getDatabaseProductName(); databaseProductName = conn.getMetaData().getDatabaseProductName();
@@ -291,7 +324,7 @@ public final class ConnectionFactory {
InputStream is = null; InputStream is = null;
String updateFile = null; String updateFile = null;
try { try {
updateFile = String.format(DB_STRUCTURE_UPDATE_RESOURCE, schema); updateFile = String.format(DB_STRUCTURE_UPDATE_RESOURCE, currentDbVersion.toString());
is = ConnectionFactory.class.getClassLoader().getResourceAsStream(updateFile); is = ConnectionFactory.class.getClassLoader().getResourceAsStream(updateFile);
if (is == null) { if (is == null) {
throw new DatabaseException(String.format("Unable to load update file '%s'", updateFile)); throw new DatabaseException(String.format("Unable to load update file '%s'", updateFile));
@@ -303,7 +336,8 @@ public final class ConnectionFactory {
statement = conn.createStatement(); statement = conn.createStatement();
final boolean success = statement.execute(dbStructureUpdate); final boolean success = statement.execute(dbStructureUpdate);
if (!success && statement.getUpdateCount() <= 0) { if (!success && statement.getUpdateCount() <= 0) {
throw new DatabaseException(String.format("Unable to upgrade the database schema to %s", schema)); throw new DatabaseException(String.format("Unable to upgrade the database schema to %s",
currentDbVersion.toString()));
} }
} catch (SQLException ex) { } catch (SQLException ex) {
LOGGER.debug("", ex); LOGGER.debug("", ex);
@@ -318,36 +352,57 @@ public final class ConnectionFactory {
IOUtils.closeQuietly(is); IOUtils.closeQuietly(is);
} }
} else { } else {
LOGGER.error("The database schema must be upgraded to use this version of dependency-check. Please see {} for more information.", UPGRADE_HELP_URL); final int e0 = Integer.parseInt(appExpectedVersion.getVersionParts().get(0));
throw new DatabaseException("Database schema is out of date"); final int c0 = Integer.parseInt(currentDbVersion.getVersionParts().get(0));
final int e1 = Integer.parseInt(appExpectedVersion.getVersionParts().get(1));
final int c1 = Integer.parseInt(currentDbVersion.getVersionParts().get(1));
if (e0 == c0 && e1 < c1) {
LOGGER.warn("A new version of dependency-check is available; consider upgrading");
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, false);
} else if (e0 == c0 && e1 == c1) {
//do nothing - not sure how we got here, but just in case...
} else {
LOGGER.error("The database schema must be upgraded to use this version of dependency-check. Please see {} for more information.",
UPGRADE_HELP_URL);
throw new DatabaseException("Database schema is out of date");
}
} }
} }
/** /**
* Counter to ensure that calls to ensureSchemaVersion does not end up in an endless loop. * Counter to ensure that calls to ensureSchemaVersion does not end up in an
* endless loop.
*/ */
private static int callDepth = 0; private static int callDepth = 0;
/** /**
* Uses the provided connection to check the specified schema version within the database. * Uses the provided connection to check the specified schema version within
* the database.
* *
* @param conn the database connection object * @param conn the database connection object
* @throws DatabaseException thrown if the schema version is not compatible with this version of dependency-check * @throws DatabaseException thrown if the schema version is not compatible
* with this version of dependency-check
*/ */
private static void ensureSchemaVersion(Connection conn) throws DatabaseException { private static void ensureSchemaVersion(Connection conn) throws DatabaseException {
ResultSet rs = null; ResultSet rs = null;
CallableStatement cs = null; PreparedStatement ps = null;
try { try {
//TODO convert this to use DatabaseProperties //TODO convert this to use DatabaseProperties
cs = conn.prepareCall("SELECT value FROM properties WHERE id = 'version'"); ps = conn.prepareStatement("SELECT value FROM properties WHERE id = 'version'");
rs = cs.executeQuery(); rs = ps.executeQuery();
if (rs.next()) { if (rs.next()) {
final DependencyVersion current = DependencyVersionUtil.parseVersion(DB_SCHEMA_VERSION); final DependencyVersion appDbVersion = DependencyVersionUtil.parseVersion(DB_SCHEMA_VERSION);
if (appDbVersion == null) {
throw new DatabaseException("Invalid application database schema");
}
final DependencyVersion db = DependencyVersionUtil.parseVersion(rs.getString(1)); final DependencyVersion db = DependencyVersionUtil.parseVersion(rs.getString(1));
if (current.compareTo(db) > 0) { if (db == null) {
LOGGER.debug("Current Schema: " + DB_SCHEMA_VERSION); throw new DatabaseException("Invalid database schema");
LOGGER.debug("DB Schema: " + rs.getString(1)); }
updateSchema(conn, rs.getString(1)); if (appDbVersion.compareTo(db) > 0) {
LOGGER.debug("Current Schema: {}", DB_SCHEMA_VERSION);
LOGGER.debug("DB Schema: {}", rs.getString(1));
updateSchema(conn, appDbVersion, db);
if (++callDepth < 10) { if (++callDepth < 10) {
ensureSchemaVersion(conn); ensureSchemaVersion(conn);
} }
@@ -360,7 +415,7 @@ public final class ConnectionFactory {
throw new DatabaseException("Unable to check the database schema version"); throw new DatabaseException("Unable to check the database schema version");
} finally { } finally {
DBUtils.closeResultSet(rs); DBUtils.closeResultSet(rs);
DBUtils.closeStatement(cs); DBUtils.closeStatement(ps);
} }
} }
} }

View File

@@ -19,13 +19,12 @@ package org.owasp.dependencycheck.data.nvdcve;
import java.io.IOException; import java.io.IOException;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.sql.CallableStatement;
import java.sql.Connection; import java.sql.Connection;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@@ -36,6 +35,7 @@ import java.util.MissingResourceException;
import java.util.Properties; import java.util.Properties;
import java.util.ResourceBundle; import java.util.ResourceBundle;
import java.util.Set; import java.util.Set;
import javax.annotation.concurrent.ThreadSafe;
import org.owasp.dependencycheck.data.cwe.CweDB; import org.owasp.dependencycheck.data.cwe.CweDB;
import org.owasp.dependencycheck.dependency.Reference; import org.owasp.dependencycheck.dependency.Reference;
import org.owasp.dependencycheck.dependency.Vulnerability; import org.owasp.dependencycheck.dependency.Vulnerability;
@@ -48,13 +48,27 @@ import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import static org.owasp.dependencycheck.data.nvdcve.CveDB.PreparedStatementCveDb.*;
/** /**
* The database holding information about the NVD CVE data. * The database holding information about the NVD CVE data. This class is safe
* to be accessed from multiple threads in parallel, however internally only one
* connection will be used.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class CveDB { @ThreadSafe
public final class CveDB implements AutoCloseable {
/**
* Singleton instance of the CveDB.
*/
private static CveDB instance = null;
/**
* Track the number of current users of the CveDB; so that if someone is
* using database another user cannot close the connection on them.
*/
private int usageCount = 0;
/** /**
* The logger. * The logger.
*/ */
@@ -62,72 +76,204 @@ public class CveDB {
/** /**
* Database connection * Database connection
*/ */
private Connection conn; private Connection connection;
/** /**
* The bundle of statements used when accessing the database. * The bundle of statements used when accessing the database.
*/ */
private ResourceBundle statementBundle = null; private ResourceBundle statementBundle;
/**
* Database properties object containing the 'properties' from the database
* table.
*/
private DatabaseProperties databaseProperties;
/**
* The prepared statements.
*/
private final EnumMap<PreparedStatementCveDb, PreparedStatement> preparedStatements = new EnumMap<>(PreparedStatementCveDb.class);
/** /**
* Creates a new CveDB object and opens the database connection. Note, the connection must be closed by the caller by calling * The enum value names must match the keys of the statements in the
* the close method. * statement bundles "dbStatements*.properties".
*
* @throws DatabaseException thrown if there is an exception opening the database.
*/ */
public CveDB() throws DatabaseException { enum PreparedStatementCveDb {
super(); /**
* Key for SQL Statement.
*/
CLEANUP_ORPHANS,
/**
* Key for SQL Statement.
*/
COUNT_CPE,
/**
* Key for SQL Statement.
*/
DELETE_REFERENCE,
/**
* Key for SQL Statement.
*/
DELETE_SOFTWARE,
/**
* Key for SQL Statement.
*/
DELETE_VULNERABILITY,
/**
* Key for SQL Statement.
*/
INSERT_CPE,
/**
* Key for SQL Statement.
*/
INSERT_PROPERTY,
/**
* Key for SQL Statement.
*/
INSERT_REFERENCE,
/**
* Key for SQL Statement.
*/
INSERT_SOFTWARE,
/**
* Key for SQL Statement.
*/
INSERT_VULNERABILITY,
/**
* Key for SQL Statement.
*/
MERGE_PROPERTY,
/**
* Key for SQL Statement.
*/
SELECT_CPE_ENTRIES,
/**
* Key for SQL Statement.
*/
SELECT_CPE_ID,
/**
* Key for SQL Statement.
*/
SELECT_CVE_FROM_SOFTWARE,
/**
* Key for SQL Statement.
*/
SELECT_PROPERTIES,
/**
* Key for SQL Statement.
*/
SELECT_REFERENCES,
/**
* Key for SQL Statement.
*/
SELECT_SOFTWARE,
/**
* Key for SQL Statement.
*/
SELECT_VENDOR_PRODUCT_LIST,
/**
* Key for SQL Statement.
*/
SELECT_VULNERABILITY,
/**
* Key for SQL Statement.
*/
SELECT_VULNERABILITY_ID,
/**
* Key for SQL Statement.
*/
UPDATE_PROPERTY,
/**
* Key for SQL Statement.
*/
UPDATE_VULNERABILITY
}
/**
* Gets the CveDB singleton object.
*
* @return the CveDB singleton
* @throws DatabaseException thrown if there is a database error
*/
public static synchronized CveDB getInstance() throws DatabaseException {
if (instance == null) {
instance = new CveDB();
}
if (!instance.isOpen()) {
instance.open();
}
instance.usageCount += 1;
return instance;
}
/**
* Creates a new CveDB object and opens the database connection. Note, the
* connection must be closed by the caller by calling the close method.
*
* @throws DatabaseException thrown if there is an exception opening the
* database.
*/
private CveDB() throws DatabaseException {
}
/**
* Tries to determine the product name of the database.
*
* @param conn the database connection
* @return the product name of the database if successful, {@code null} else
*/
private static String determineDatabaseProductName(Connection conn) {
try { try {
open(); final String databaseProductName = conn.getMetaData().getDatabaseProductName();
try { LOGGER.debug("Database product: {}", databaseProductName);
final String databaseProductName = conn.getMetaData().getDatabaseProductName(); return databaseProductName;
LOGGER.debug("Database dialect: {}", databaseProductName); } catch (SQLException se) {
final Locale dbDialect = new Locale(databaseProductName); LOGGER.warn("Problem determining database product!", se);
statementBundle = ResourceBundle.getBundle("data/dbStatements", dbDialect); return null;
} catch (SQLException se) {
LOGGER.warn("Problem loading database specific dialect!", se);
statementBundle = ResourceBundle.getBundle("data/dbStatements");
}
databaseProperties = new DatabaseProperties(this);
} catch (DatabaseException ex) {
throw ex;
} }
} }
/** /**
* Returns the database connection. * Opens the database connection. If the database does not exist, it will
* create a new one.
* *
* @return the database connection * @throws DatabaseException thrown if there is an error opening the
* database connection
*/ */
protected Connection getConnection() { private synchronized void open() throws DatabaseException {
return conn; if (!instance.isOpen()) {
} instance.connection = ConnectionFactory.getConnection();
final String databaseProductName = determineDatabaseProductName(instance.connection);
/** instance.statementBundle = databaseProductName != null
* Opens the database connection. If the database does not exist, it will create a new one. ? ResourceBundle.getBundle("data/dbStatements", new Locale(databaseProductName))
* : ResourceBundle.getBundle("data/dbStatements");
* @throws DatabaseException thrown if there is an error opening the database connection instance.prepareStatements();
*/ instance.databaseProperties = new DatabaseProperties(instance);
public final void open() throws DatabaseException {
if (!isOpen()) {
conn = ConnectionFactory.getConnection();
} }
} }
/** /**
* Closes the DB4O database. Close should be called on this object when it is done being used. * Closes the database connection. Close should be called on this object
* when it is done being used.
*/ */
public void close() { @Override
if (conn != null) { public synchronized void close() {
try { if (instance != null) {
conn.close(); instance.usageCount -= 1;
} catch (SQLException ex) { if (instance.usageCount <= 0 && instance.isOpen()) {
LOGGER.error("There was an error attempting to close the CveDB, see the log for more details."); instance.usageCount = 0;
LOGGER.debug("", ex); instance.closeStatements();
} catch (Throwable ex) { try {
LOGGER.error("There was an exception attempting to close the CveDB, see the log for more details."); instance.connection.close();
LOGGER.debug("", ex); } catch (SQLException ex) {
LOGGER.error("There was an error attempting to close the CveDB, see the log for more details.");
LOGGER.debug("", ex);
} catch (Throwable ex) {
LOGGER.error("There was an exception attempting to close the CveDB, see the log for more details.");
LOGGER.debug("", ex);
}
instance.statementBundle = null;
instance.preparedStatements.clear();
instance.databaseProperties = null;
instance.connection = null;
} }
conn = null;
} }
} }
@@ -136,8 +282,54 @@ public class CveDB {
* *
* @return whether the database connection is open or closed * @return whether the database connection is open or closed
*/ */
public boolean isOpen() { protected synchronized boolean isOpen() {
return conn != null; return connection != null;
}
/**
* Prepares all statements to be used.
*
* @throws DatabaseException thrown if there is an error preparing the
* statements
*/
private void prepareStatements() throws DatabaseException {
for (PreparedStatementCveDb key : values()) {
final String statementString = statementBundle.getString(key.name());
final PreparedStatement preparedStatement;
try {
if (key == INSERT_VULNERABILITY || key == INSERT_CPE) {
preparedStatement = connection.prepareStatement(statementString, new String[]{"id"});
} else {
preparedStatement = connection.prepareStatement(statementString);
}
} catch (SQLException exception) {
throw new DatabaseException(exception);
}
preparedStatements.put(key, preparedStatement);
}
}
/**
* Closes all prepared statements.
*/
private synchronized void closeStatements() {
for (PreparedStatement preparedStatement : preparedStatements.values()) {
DBUtils.closeStatement(preparedStatement);
}
}
/**
* Returns the specified prepared statement.
*
* @param key the prepared statement from {@link PreparedStatementCveDb} to
* return
* @return the prepared statement
* @throws SQLException thrown if a SQL Exception occurs
*/
private synchronized PreparedStatement getPreparedStatement(PreparedStatementCveDb key) throws SQLException {
final PreparedStatement preparedStatement = preparedStatements.get(key);
preparedStatement.clearParameters();
return preparedStatement;
} }
/** /**
@@ -145,10 +337,10 @@ public class CveDB {
* *
* @throws SQLException thrown if a SQL Exception occurs * @throws SQLException thrown if a SQL Exception occurs
*/ */
public void commit() throws SQLException { public synchronized void commit() throws SQLException {
//temporary remove this as autocommit is on. //temporary remove this as autocommit is on.
//if (conn != null) { //if (isOpen()) {
// conn.commit(); // connection.commit();
//} //}
} }
@@ -164,34 +356,41 @@ public class CveDB {
close(); close();
super.finalize(); super.finalize();
} }
/**
* Database properties object containing the 'properties' from the database table.
*/
private DatabaseProperties databaseProperties;
/** /**
* Get the value of databaseProperties. * Get the value of databaseProperties.
* *
* @return the value of databaseProperties * @return the value of databaseProperties
*/ */
public DatabaseProperties getDatabaseProperties() { public synchronized DatabaseProperties getDatabaseProperties() {
return databaseProperties; return databaseProperties;
} }
/** /**
* Searches the CPE entries in the database and retrieves all entries for a given vendor and product combination. The returned * Used within the unit tests to reload the database properties.
* list will include all versions of the product that are registered in the NVD CVE data. *
* @return the database properties
*/
protected synchronized DatabaseProperties reloadProperties() {
databaseProperties = new DatabaseProperties(this);
return databaseProperties;
}
/**
* Searches the CPE entries in the database and retrieves all entries for a
* given vendor and product combination. The returned list will include all
* versions of the product that are registered in the NVD CVE data.
* *
* @param vendor the identified vendor name of the dependency being analyzed * @param vendor the identified vendor name of the dependency being analyzed
* @param product the identified name of the product of the dependency being analyzed * @param product the identified name of the product of the dependency being
* analyzed
* @return a set of vulnerable software * @return a set of vulnerable software
*/ */
public Set<VulnerableSoftware> getCPEs(String vendor, String product) { public synchronized Set<VulnerableSoftware> getCPEs(String vendor, String product) {
final Set<VulnerableSoftware> cpe = new HashSet<VulnerableSoftware>(); final Set<VulnerableSoftware> cpe = new HashSet<>();
ResultSet rs = null; ResultSet rs = null;
PreparedStatement ps = null;
try { try {
ps = getConnection().prepareStatement(statementBundle.getString("SELECT_CPE_ENTRIES")); final PreparedStatement ps = getPreparedStatement(SELECT_CPE_ENTRIES);
ps.setString(1, vendor); ps.setString(1, vendor);
ps.setString(2, product); ps.setString(2, product);
rs = ps.executeQuery(); rs = ps.executeQuery();
@@ -206,7 +405,6 @@ public class CveDB {
LOGGER.debug("", ex); LOGGER.debug("", ex);
} finally { } finally {
DBUtils.closeResultSet(rs); DBUtils.closeResultSet(rs);
DBUtils.closeStatement(ps);
} }
return cpe; return cpe;
} }
@@ -215,24 +413,23 @@ public class CveDB {
* Returns the entire list of vendor/product combinations. * Returns the entire list of vendor/product combinations.
* *
* @return the entire list of vendor/product combinations * @return the entire list of vendor/product combinations
* @throws DatabaseException thrown when there is an error retrieving the data from the DB * @throws DatabaseException thrown when there is an error retrieving the
* data from the DB
*/ */
public Set<Pair<String, String>> getVendorProductList() throws DatabaseException { public synchronized Set<Pair<String, String>> getVendorProductList() throws DatabaseException {
final Set<Pair<String, String>> data = new HashSet<Pair<String, String>>(); final Set<Pair<String, String>> data = new HashSet<>();
ResultSet rs = null; ResultSet rs = null;
PreparedStatement ps = null;
try { try {
ps = getConnection().prepareStatement(statementBundle.getString("SELECT_VENDOR_PRODUCT_LIST")); final PreparedStatement ps = getPreparedStatement(SELECT_VENDOR_PRODUCT_LIST);
rs = ps.executeQuery(); rs = ps.executeQuery();
while (rs.next()) { while (rs.next()) {
data.add(new Pair<String, String>(rs.getString(1), rs.getString(2))); data.add(new Pair<>(rs.getString(1), rs.getString(2)));
} }
} catch (SQLException ex) { } catch (SQLException ex) {
final String msg = "An unexpected SQL Exception occurred; please see the verbose log for more details."; final String msg = "An unexpected SQL Exception occurred; please see the verbose log for more details.";
throw new DatabaseException(msg, ex); throw new DatabaseException(msg, ex);
} finally { } finally {
DBUtils.closeResultSet(rs); DBUtils.closeResultSet(rs);
DBUtils.closeStatement(ps);
} }
return data; return data;
} }
@@ -242,12 +439,11 @@ public class CveDB {
* *
* @return the properties from the database * @return the properties from the database
*/ */
Properties getProperties() { public synchronized Properties getProperties() {
final Properties prop = new Properties(); final Properties prop = new Properties();
PreparedStatement ps = null;
ResultSet rs = null; ResultSet rs = null;
try { try {
ps = getConnection().prepareStatement(statementBundle.getString("SELECT_PROPERTIES")); final PreparedStatement ps = getPreparedStatement(SELECT_PROPERTIES);
rs = ps.executeQuery(); rs = ps.executeQuery();
while (rs.next()) { while (rs.next()) {
prop.setProperty(rs.getString(1), rs.getString(2)); prop.setProperty(rs.getString(1), rs.getString(2));
@@ -256,7 +452,6 @@ public class CveDB {
LOGGER.error("An unexpected SQL Exception occurred; please see the verbose log for more details."); LOGGER.error("An unexpected SQL Exception occurred; please see the verbose log for more details.");
LOGGER.debug("", ex); LOGGER.debug("", ex);
} finally { } finally {
DBUtils.closeStatement(ps);
DBUtils.closeResultSet(rs); DBUtils.closeResultSet(rs);
} }
return prop; return prop;
@@ -268,34 +463,23 @@ public class CveDB {
* @param key the property key * @param key the property key
* @param value the property value * @param value the property value
*/ */
void saveProperty(String key, String value) { public synchronized void saveProperty(String key, String value) {
try { try {
try { try {
final PreparedStatement mergeProperty = getConnection().prepareStatement(statementBundle.getString("MERGE_PROPERTY")); final PreparedStatement mergeProperty = getPreparedStatement(MERGE_PROPERTY);
try { mergeProperty.setString(1, key);
mergeProperty.setString(1, key); mergeProperty.setString(2, value);
mergeProperty.setString(2, value); mergeProperty.executeUpdate();
mergeProperty.executeUpdate();
} finally {
DBUtils.closeStatement(mergeProperty);
}
} catch (MissingResourceException mre) { } catch (MissingResourceException mre) {
// No Merge statement, so doing an Update/Insert... // No Merge statement, so doing an Update/Insert...
PreparedStatement updateProperty = null; final PreparedStatement updateProperty = getPreparedStatement(UPDATE_PROPERTY);
PreparedStatement insertProperty = null; updateProperty.setString(1, value);
try { updateProperty.setString(2, key);
updateProperty = getConnection().prepareStatement(statementBundle.getString("UPDATE_PROPERTY")); if (updateProperty.executeUpdate() == 0) {
updateProperty.setString(1, value); final PreparedStatement insertProperty = getPreparedStatement(INSERT_PROPERTY);
updateProperty.setString(2, key); insertProperty.setString(1, key);
if (updateProperty.executeUpdate() == 0) { insertProperty.setString(2, value);
insertProperty = getConnection().prepareStatement(statementBundle.getString("INSERT_PROPERTY")); insertProperty.executeUpdate();
insertProperty.setString(1, key);
insertProperty.setString(2, value);
insertProperty.executeUpdate();
}
} finally {
DBUtils.closeStatement(updateProperty);
DBUtils.closeStatement(insertProperty);
} }
} }
} catch (SQLException ex) { } catch (SQLException ex) {
@@ -311,7 +495,7 @@ public class CveDB {
* @return a list of Vulnerabilities * @return a list of Vulnerabilities
* @throws DatabaseException thrown if there is an exception retrieving data * @throws DatabaseException thrown if there is an exception retrieving data
*/ */
public List<Vulnerability> getVulnerabilities(String cpeStr) throws DatabaseException { public synchronized List<Vulnerability> getVulnerabilities(String cpeStr) throws DatabaseException {
final VulnerableSoftware cpe = new VulnerableSoftware(); final VulnerableSoftware cpe = new VulnerableSoftware();
try { try {
cpe.parseName(cpeStr); cpe.parseName(cpeStr);
@@ -319,18 +503,17 @@ public class CveDB {
LOGGER.trace("", ex); LOGGER.trace("", ex);
} }
final DependencyVersion detectedVersion = parseDependencyVersion(cpe); final DependencyVersion detectedVersion = parseDependencyVersion(cpe);
final List<Vulnerability> vulnerabilities = new ArrayList<Vulnerability>(); final List<Vulnerability> vulnerabilities = new ArrayList<>();
PreparedStatement ps = null;
ResultSet rs = null; ResultSet rs = null;
try { try {
ps = getConnection().prepareStatement(statementBundle.getString("SELECT_CVE_FROM_SOFTWARE")); final PreparedStatement ps = getPreparedStatement(SELECT_CVE_FROM_SOFTWARE);
ps.setString(1, cpe.getVendor()); ps.setString(1, cpe.getVendor());
ps.setString(2, cpe.getProduct()); ps.setString(2, cpe.getProduct());
rs = ps.executeQuery(); rs = ps.executeQuery();
String currentCVE = ""; String currentCVE = "";
final Map<String, Boolean> vulnSoftware = new HashMap<String, Boolean>(); final Map<String, Boolean> vulnSoftware = new HashMap<>();
while (rs.next()) { while (rs.next()) {
final String cveId = rs.getString(1); final String cveId = rs.getString(1);
if (!currentCVE.equals(cveId)) { //check for match and add if (!currentCVE.equals(cveId)) { //check for match and add
@@ -360,7 +543,6 @@ public class CveDB {
throw new DatabaseException("Exception retrieving vulnerability for " + cpeStr, ex); throw new DatabaseException("Exception retrieving vulnerability for " + cpeStr, ex);
} finally { } finally {
DBUtils.closeResultSet(rs); DBUtils.closeResultSet(rs);
DBUtils.closeStatement(ps);
} }
return vulnerabilities; return vulnerabilities;
} }
@@ -372,16 +554,14 @@ public class CveDB {
* @return a vulnerability object * @return a vulnerability object
* @throws DatabaseException if an exception occurs * @throws DatabaseException if an exception occurs
*/ */
private Vulnerability getVulnerability(String cve) throws DatabaseException { public synchronized Vulnerability getVulnerability(String cve) throws DatabaseException {
PreparedStatement psV = null;
PreparedStatement psR = null;
PreparedStatement psS = null;
ResultSet rsV = null; ResultSet rsV = null;
ResultSet rsR = null; ResultSet rsR = null;
ResultSet rsS = null; ResultSet rsS = null;
Vulnerability vuln = null; Vulnerability vuln = null;
try { try {
psV = getConnection().prepareStatement(statementBundle.getString("SELECT_VULNERABILITY")); final PreparedStatement psV = getPreparedStatement(SELECT_VULNERABILITY);
psV.setString(1, cve); psV.setString(1, cve);
rsV = psV.executeQuery(); rsV = psV.executeQuery();
if (rsV.next()) { if (rsV.next()) {
@@ -405,13 +585,14 @@ public class CveDB {
vuln.setCvssIntegrityImpact(rsV.getString(9)); vuln.setCvssIntegrityImpact(rsV.getString(9));
vuln.setCvssAvailabilityImpact(rsV.getString(10)); vuln.setCvssAvailabilityImpact(rsV.getString(10));
psR = getConnection().prepareStatement(statementBundle.getString("SELECT_REFERENCES")); final PreparedStatement psR = getPreparedStatement(SELECT_REFERENCES);
psR.setInt(1, cveId); psR.setInt(1, cveId);
rsR = psR.executeQuery(); rsR = psR.executeQuery();
while (rsR.next()) { while (rsR.next()) {
vuln.addReference(rsR.getString(1), rsR.getString(2), rsR.getString(3)); vuln.addReference(rsR.getString(1), rsR.getString(2), rsR.getString(3));
} }
psS = getConnection().prepareStatement(statementBundle.getString("SELECT_SOFTWARE"));
final PreparedStatement psS = getPreparedStatement(SELECT_SOFTWARE);
psS.setInt(1, cveId); psS.setInt(1, cveId);
rsS = psS.executeQuery(); rsS = psS.executeQuery();
while (rsS.next()) { while (rsS.next()) {
@@ -430,65 +611,43 @@ public class CveDB {
DBUtils.closeResultSet(rsV); DBUtils.closeResultSet(rsV);
DBUtils.closeResultSet(rsR); DBUtils.closeResultSet(rsR);
DBUtils.closeResultSet(rsS); DBUtils.closeResultSet(rsS);
DBUtils.closeStatement(psV);
DBUtils.closeStatement(psR);
DBUtils.closeStatement(psS);
} }
return vuln; return vuln;
} }
/** /**
* Updates the vulnerability within the database. If the vulnerability does not exist it will be added. * Updates the vulnerability within the database. If the vulnerability does
* not exist it will be added.
* *
* @param vuln the vulnerability to add to the database * @param vuln the vulnerability to add to the database
* @throws DatabaseException is thrown if the database * @throws DatabaseException is thrown if the database
*/ */
public void updateVulnerability(Vulnerability vuln) throws DatabaseException { public synchronized void updateVulnerability(Vulnerability vuln) throws DatabaseException {
PreparedStatement selectVulnerabilityId = null;
PreparedStatement deleteVulnerability = null;
PreparedStatement deleteReferences = null;
PreparedStatement deleteSoftware = null;
PreparedStatement updateVulnerability = null;
PreparedStatement insertVulnerability = null;
PreparedStatement insertReference = null;
PreparedStatement selectCpeId = null;
PreparedStatement insertCpe = null;
PreparedStatement insertSoftware = null;
try { try {
selectVulnerabilityId = getConnection().prepareStatement(statementBundle.getString("SELECT_VULNERABILITY_ID"));
deleteVulnerability = getConnection().prepareStatement(statementBundle.getString("DELETE_VULNERABILITY"));
deleteReferences = getConnection().prepareStatement(statementBundle.getString("DELETE_REFERENCE"));
deleteSoftware = getConnection().prepareStatement(statementBundle.getString("DELETE_SOFTWARE"));
updateVulnerability = getConnection().prepareStatement(statementBundle.getString("UPDATE_VULNERABILITY"));
final String[] ids = {"id"};
insertVulnerability = getConnection().prepareStatement(statementBundle.getString("INSERT_VULNERABILITY"),
//Statement.RETURN_GENERATED_KEYS);
ids);
insertReference = getConnection().prepareStatement(statementBundle.getString("INSERT_REFERENCE"));
selectCpeId = getConnection().prepareStatement(statementBundle.getString("SELECT_CPE_ID"));
insertCpe = getConnection().prepareStatement(statementBundle.getString("INSERT_CPE"),
//Statement.RETURN_GENERATED_KEYS);
ids);
insertSoftware = getConnection().prepareStatement(statementBundle.getString("INSERT_SOFTWARE"));
int vulnerabilityId = 0; int vulnerabilityId = 0;
final PreparedStatement selectVulnerabilityId = getPreparedStatement(SELECT_VULNERABILITY_ID);
selectVulnerabilityId.setString(1, vuln.getName()); selectVulnerabilityId.setString(1, vuln.getName());
ResultSet rs = selectVulnerabilityId.executeQuery(); ResultSet rs = selectVulnerabilityId.executeQuery();
if (rs.next()) { if (rs.next()) {
vulnerabilityId = rs.getInt(1); vulnerabilityId = rs.getInt(1);
// first delete any existing vulnerability info. We don't know what was updated. yes, slower but atm easier. // first delete any existing vulnerability info. We don't know what was updated. yes, slower but atm easier.
deleteReferences.setInt(1, vulnerabilityId); final PreparedStatement deleteReference = getPreparedStatement(DELETE_REFERENCE);
deleteReferences.execute(); deleteReference.setInt(1, vulnerabilityId);
deleteReference.execute();
final PreparedStatement deleteSoftware = getPreparedStatement(DELETE_SOFTWARE);
deleteSoftware.setInt(1, vulnerabilityId); deleteSoftware.setInt(1, vulnerabilityId);
deleteSoftware.execute(); deleteSoftware.execute();
} }
DBUtils.closeResultSet(rs); DBUtils.closeResultSet(rs);
rs = null;
if (vulnerabilityId != 0) { if (vulnerabilityId != 0) {
if (vuln.getDescription().contains("** REJECT **")) { if (vuln.getDescription().contains("** REJECT **")) {
final PreparedStatement deleteVulnerability = getPreparedStatement(DELETE_VULNERABILITY);
deleteVulnerability.setInt(1, vulnerabilityId); deleteVulnerability.setInt(1, vulnerabilityId);
deleteVulnerability.executeUpdate(); deleteVulnerability.executeUpdate();
} else { } else {
final PreparedStatement updateVulnerability = getPreparedStatement(UPDATE_VULNERABILITY);
updateVulnerability.setString(1, vuln.getDescription()); updateVulnerability.setString(1, vuln.getDescription());
updateVulnerability.setString(2, vuln.getCwe()); updateVulnerability.setString(2, vuln.getCwe());
updateVulnerability.setFloat(3, vuln.getCvssScore()); updateVulnerability.setFloat(3, vuln.getCvssScore());
@@ -502,6 +661,7 @@ public class CveDB {
updateVulnerability.executeUpdate(); updateVulnerability.executeUpdate();
} }
} else { } else {
final PreparedStatement insertVulnerability = getPreparedStatement(INSERT_VULNERABILITY);
insertVulnerability.setString(1, vuln.getName()); insertVulnerability.setString(1, vuln.getName());
insertVulnerability.setString(2, vuln.getDescription()); insertVulnerability.setString(2, vuln.getDescription());
insertVulnerability.setString(3, vuln.getCwe()); insertVulnerability.setString(3, vuln.getCwe());
@@ -522,18 +682,22 @@ public class CveDB {
throw new DatabaseException(msg, ex); throw new DatabaseException(msg, ex);
} finally { } finally {
DBUtils.closeResultSet(rs); DBUtils.closeResultSet(rs);
rs = null;
} }
} }
insertReference.setInt(1, vulnerabilityId);
final PreparedStatement insertReference = getPreparedStatement(INSERT_REFERENCE);
for (Reference r : vuln.getReferences()) { for (Reference r : vuln.getReferences()) {
insertReference.setInt(1, vulnerabilityId);
insertReference.setString(2, r.getName()); insertReference.setString(2, r.getName());
insertReference.setString(3, r.getUrl()); insertReference.setString(3, r.getUrl());
insertReference.setString(4, r.getSource()); insertReference.setString(4, r.getSource());
insertReference.execute(); insertReference.execute();
} }
final PreparedStatement insertSoftware = getPreparedStatement(INSERT_SOFTWARE);
for (VulnerableSoftware s : vuln.getVulnerableSoftware()) { for (VulnerableSoftware s : vuln.getVulnerableSoftware()) {
int cpeProductId = 0; int cpeProductId = 0;
final PreparedStatement selectCpeId = getPreparedStatement(SELECT_CPE_ID);
selectCpeId.setString(1, s.getName()); selectCpeId.setString(1, s.getName());
try { try {
rs = selectCpeId.executeQuery(); rs = selectCpeId.executeQuery();
@@ -544,10 +708,10 @@ public class CveDB {
throw new DatabaseException("Unable to get primary key for new cpe: " + s.getName(), ex); throw new DatabaseException("Unable to get primary key for new cpe: " + s.getName(), ex);
} finally { } finally {
DBUtils.closeResultSet(rs); DBUtils.closeResultSet(rs);
rs = null;
} }
if (cpeProductId == 0) { if (cpeProductId == 0) {
final PreparedStatement insertCpe = getPreparedStatement(INSERT_CPE);
insertCpe.setString(1, s.getName()); insertCpe.setString(1, s.getName());
insertCpe.setString(2, s.getVendor()); insertCpe.setString(2, s.getVendor());
insertCpe.setString(3, s.getProduct()); insertCpe.setString(3, s.getProduct());
@@ -560,29 +724,28 @@ public class CveDB {
insertSoftware.setInt(1, vulnerabilityId); insertSoftware.setInt(1, vulnerabilityId);
insertSoftware.setInt(2, cpeProductId); insertSoftware.setInt(2, cpeProductId);
if (s.getPreviousVersion() == null) { if (s.getPreviousVersion() == null) {
insertSoftware.setNull(3, java.sql.Types.VARCHAR); insertSoftware.setNull(3, java.sql.Types.VARCHAR);
} else { } else {
insertSoftware.setString(3, s.getPreviousVersion()); insertSoftware.setString(3, s.getPreviousVersion());
} }
insertSoftware.execute(); try {
} insertSoftware.execute();
} catch (SQLException ex) {
if (ex.getMessage().contains("Duplicate entry")) {
final String msg = String.format("Duplicate software key identified in '%s:%s'", vuln.getName(), s.getName());
LOGGER.info(msg, ex);
} else {
throw ex;
}
}
}
} catch (SQLException ex) { } catch (SQLException ex) {
final String msg = String.format("Error updating '%s'", vuln.getName()); final String msg = String.format("Error updating '%s'", vuln.getName());
LOGGER.debug("", ex); LOGGER.debug(msg, ex);
throw new DatabaseException(msg, ex); throw new DatabaseException(msg, ex);
} finally {
DBUtils.closeStatement(selectVulnerabilityId);
DBUtils.closeStatement(deleteReferences);
DBUtils.closeStatement(deleteSoftware);
DBUtils.closeStatement(updateVulnerability);
DBUtils.closeStatement(deleteVulnerability);
DBUtils.closeStatement(insertVulnerability);
DBUtils.closeStatement(insertReference);
DBUtils.closeStatement(selectCpeId);
DBUtils.closeStatement(insertCpe);
DBUtils.closeStatement(insertSoftware);
} }
} }
@@ -591,18 +754,17 @@ public class CveDB {
* *
* @return <code>true</code> if data exists; otherwise <code>false</code> * @return <code>true</code> if data exists; otherwise <code>false</code>
*/ */
public boolean dataExists() { public synchronized boolean dataExists() {
Statement cs = null;
ResultSet rs = null; ResultSet rs = null;
try { try {
cs = conn.createStatement(); final PreparedStatement cs = getPreparedStatement(COUNT_CPE);
rs = cs.executeQuery("SELECT COUNT(*) records FROM cpeEntry"); rs = cs.executeQuery();
if (rs.next()) { if (rs.next()) {
if (rs.getInt(1) > 0) { if (rs.getInt(1) > 0) {
return true; return true;
} }
} }
} catch (SQLException ex) { } catch (Exception ex) {
String dd; String dd;
try { try {
dd = Settings.getDataDirectory().getAbsolutePath(); dd = Settings.getDataDirectory().getAbsolutePath();
@@ -613,50 +775,51 @@ public class CveDB {
+ "If the problem persist try deleting the files in '{}' and running {} again. If the problem continues, please " + "If the problem persist try deleting the files in '{}' and running {} again. If the problem continues, please "
+ "create a log file (see documentation at http://jeremylong.github.io/DependencyCheck/) and open a ticket at " + "create a log file (see documentation at http://jeremylong.github.io/DependencyCheck/) and open a ticket at "
+ "https://github.com/jeremylong/DependencyCheck/issues and include the log file.\n\n", + "https://github.com/jeremylong/DependencyCheck/issues and include the log file.\n\n",
dd, dd, Settings.getString(Settings.KEYS.APPLICATION_VAME)); dd, dd, Settings.getString(Settings.KEYS.APPLICATION_NAME));
LOGGER.debug("", ex); LOGGER.debug("", ex);
} finally { } finally {
DBUtils.closeResultSet(rs); DBUtils.closeResultSet(rs);
DBUtils.closeStatement(cs);
} }
return false; return false;
} }
/** /**
* It is possible that orphaned rows may be generated during database updates. This should be called after all updates have * It is possible that orphaned rows may be generated during database
* been completed to ensure orphan entries are removed. * updates. This should be called after all updates have been completed to
* ensure orphan entries are removed.
*/ */
public void cleanupDatabase() { public synchronized void cleanupDatabase() {
PreparedStatement ps = null;
try { try {
ps = getConnection().prepareStatement(statementBundle.getString("CLEANUP_ORPHANS")); final PreparedStatement ps = getPreparedStatement(CLEANUP_ORPHANS);
if (ps != null) { if (ps != null) {
ps.executeUpdate(); ps.executeUpdate();
} }
} catch (SQLException ex) { } catch (SQLException ex) {
LOGGER.error("An unexpected SQL Exception occurred; please see the verbose log for more details."); LOGGER.error("An unexpected SQL Exception occurred; please see the verbose log for more details.");
LOGGER.debug("", ex); LOGGER.debug("", ex);
} finally {
DBUtils.closeStatement(ps);
} }
} }
/** /**
* Determines if the given identifiedVersion is affected by the given cpeId and previous version flag. A non-null, non-empty * Determines if the given identifiedVersion is affected by the given cpeId
* string passed to the previous version argument indicates that all previous versions are affected. * and previous version flag. A non-null, non-empty string passed to the
* previous version argument indicates that all previous versions are
* affected.
* *
* @param vendor the vendor of the dependency being analyzed * @param vendor the vendor of the dependency being analyzed
* @param product the product name of the dependency being analyzed * @param product the product name of the dependency being analyzed
* @param vulnerableSoftware a map of the vulnerable software with a boolean indicating if all previous versions are affected * @param vulnerableSoftware a map of the vulnerable software with a boolean
* @param identifiedVersion the identified version of the dependency being analyzed * indicating if all previous versions are affected
* @param identifiedVersion the identified version of the dependency being
* analyzed
* @return true if the identified version is affected, otherwise false * @return true if the identified version is affected, otherwise false
*/ */
Entry<String, Boolean> getMatchingSoftware(Map<String, Boolean> vulnerableSoftware, String vendor, String product, protected Entry<String, Boolean> getMatchingSoftware(Map<String, Boolean> vulnerableSoftware, String vendor, String product,
DependencyVersion identifiedVersion) { DependencyVersion identifiedVersion) {
final boolean isVersionTwoADifferentProduct = "apache".equals(vendor) && "struts".equals(product); final boolean isVersionTwoADifferentProduct = "apache".equals(vendor) && "struts".equals(product);
final Set<String> majorVersionsAffectingAllPrevious = new HashSet<String>(); final Set<String> majorVersionsAffectingAllPrevious = new HashSet<>();
final boolean matchesAnyPrevious = identifiedVersion == null || "-".equals(identifiedVersion.toString()); final boolean matchesAnyPrevious = identifiedVersion == null || "-".equals(identifiedVersion.toString());
String majorVersionMatch = null; String majorVersionMatch = null;
for (Entry<String, Boolean> entry : vulnerableSoftware.entrySet()) { for (Entry<String, Boolean> entry : vulnerableSoftware.entrySet()) {
@@ -685,12 +848,12 @@ public class CveDB {
if (!entry.getValue()) { if (!entry.getValue()) {
final DependencyVersion v = parseDependencyVersion(entry.getKey()); final DependencyVersion v = parseDependencyVersion(entry.getKey());
//this can't dereference a null 'majorVersionMatch' as canSkipVersions accounts for this. //this can't dereference a null 'majorVersionMatch' as canSkipVersions accounts for this.
if (canSkipVersions && !majorVersionMatch.equals(v.getVersionParts().get(0))) { if (canSkipVersions && majorVersionMatch != null && !majorVersionMatch.equals(v.getVersionParts().get(0))) {
continue; continue;
} }
//this can't dereference a null 'identifiedVersion' because if it was null we would have exited //this can't dereference a null 'identifiedVersion' because if it was null we would have exited
//in the above loop or just after loop (if matchesAnyPrevious return null). //in the above loop or just after loop (if matchesAnyPrevious return null).
if (identifiedVersion.equals(v)) { if (identifiedVersion != null && identifiedVersion.equals(v)) {
return entry; return entry;
} }
} }
@@ -699,12 +862,12 @@ public class CveDB {
if (entry.getValue()) { if (entry.getValue()) {
final DependencyVersion v = parseDependencyVersion(entry.getKey()); final DependencyVersion v = parseDependencyVersion(entry.getKey());
//this can't dereference a null 'majorVersionMatch' as canSkipVersions accounts for this. //this can't dereference a null 'majorVersionMatch' as canSkipVersions accounts for this.
if (canSkipVersions && !majorVersionMatch.equals(v.getVersionParts().get(0))) { if (canSkipVersions && majorVersionMatch != null && !majorVersionMatch.equals(v.getVersionParts().get(0))) {
continue; continue;
} }
//this can't dereference a null 'identifiedVersion' because if it was null we would have exited //this can't dereference a null 'identifiedVersion' because if it was null we would have exited
//in the above loop or just after loop (if matchesAnyPrevious return null). //in the above loop or just after loop (if matchesAnyPrevious return null).
if (entry.getValue() && identifiedVersion.compareTo(v) <= 0) { if (entry.getValue() && identifiedVersion != null && identifiedVersion.compareTo(v) <= 0) {
if (!(isVersionTwoADifferentProduct && !identifiedVersion.getVersionParts().get(0).equals(v.getVersionParts().get(0)))) { if (!(isVersionTwoADifferentProduct && !identifiedVersion.getVersionParts().get(0).equals(v.getVersionParts().get(0)))) {
return entry; return entry;
} }
@@ -715,7 +878,8 @@ public class CveDB {
} }
/** /**
* Parses the version (including revision) from a CPE identifier. If no version is identified then a '-' is returned. * Parses the version (including revision) from a CPE identifier. If no
* version is identified then a '-' is returned.
* *
* @param cpeStr a cpe identifier * @param cpeStr a cpe identifier
* @return a dependency version * @return a dependency version
@@ -732,7 +896,8 @@ public class CveDB {
} }
/** /**
* Takes a CPE and parses out the version number. If no version is identified then a '-' is returned. * Takes a CPE and parses out the version number. If no version is
* identified then a '-' is returned.
* *
* @param cpe a cpe object * @param cpe a cpe object
* @return a dependency version * @return a dependency version
@@ -758,20 +923,21 @@ public class CveDB {
* *
* Deletes unused dictionary entries from the database. * Deletes unused dictionary entries from the database.
*/ */
public void deleteUnusedCpe() { public synchronized void deleteUnusedCpe() {
CallableStatement cs = null; PreparedStatement ps = null;
try { try {
cs = getConnection().prepareCall(statementBundle.getString("DELETE_UNUSED_DICT_CPE")); ps = connection.prepareStatement(statementBundle.getString("DELETE_UNUSED_DICT_CPE"));
cs.executeUpdate(); ps.executeUpdate();
} catch (SQLException ex) { } catch (SQLException ex) {
LOGGER.error("Unable to delete CPE dictionary entries", ex); LOGGER.error("Unable to delete CPE dictionary entries", ex);
} finally { } finally {
DBUtils.closeStatement(cs); DBUtils.closeStatement(ps);
} }
} }
/** /**
* This method is only referenced in unused code and will likely break on MySQL if ever used due to the MERGE statement. * This method is only referenced in unused code and will likely break on
* MySQL if ever used due to the MERGE statement.
* *
* Merges CPE entries into the database. * Merges CPE entries into the database.
* *
@@ -779,10 +945,10 @@ public class CveDB {
* @param vendor the CPE vendor * @param vendor the CPE vendor
* @param product the CPE product * @param product the CPE product
*/ */
public void addCpe(String cpe, String vendor, String product) { public synchronized void addCpe(String cpe, String vendor, String product) {
PreparedStatement ps = null; PreparedStatement ps = null;
try { try {
ps = getConnection().prepareCall(statementBundle.getString("ADD_DICT_CPE")); ps = connection.prepareStatement(statementBundle.getString("ADD_DICT_CPE"));
ps.setString(1, cpe); ps.setString(1, cpe);
ps.setString(2, vendor); ps.setString(2, vendor);
ps.setString(3, product); ps.setString(3, product);

View File

@@ -17,13 +17,14 @@
*/ */
package org.owasp.dependencycheck.data.nvdcve; package org.owasp.dependencycheck.data.nvdcve;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Properties; import java.util.Properties;
import java.util.TreeMap; import java.util.TreeMap;
import javax.annotation.concurrent.ThreadSafe;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.owasp.dependencycheck.data.update.nvd.NvdCveInfo; import org.owasp.dependencycheck.data.update.nvd.NvdCveInfo;
import org.owasp.dependencycheck.data.update.exception.UpdateException; import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -31,9 +32,11 @@ import org.slf4j.LoggerFactory;
/** /**
* This is a wrapper around a set of properties that are stored in the database. * This is a wrapper around a set of properties that are stored in the database.
* This class is safe to be accessed from multiple threads in parallel.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@ThreadSafe
public class DatabaseProperties { public class DatabaseProperties {
/** /**
@@ -41,21 +44,24 @@ public class DatabaseProperties {
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseProperties.class); private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseProperties.class);
/** /**
* Modified key word, used as a key to store information about the modified file (i.e. the containing the last 8 days of * Modified key word, used as a key to store information about the modified
* updates).. * file (i.e. the containing the last 8 days of updates)..
*/ */
public static final String MODIFIED = "Modified"; public static final String MODIFIED = "Modified";
/** /**
* The properties file key for the last checked field - used to store the last check time of the Modified NVD CVE xml file. * The properties file key for the last checked field - used to store the
* last check time of the Modified NVD CVE xml file.
*/ */
public static final String LAST_CHECKED = "NVD CVE Checked"; public static final String LAST_CHECKED = "NVD CVE Checked";
/** /**
* The properties file key for the last updated field - used to store the last updated time of the Modified NVD CVE xml file. * The properties file key for the last updated field - used to store the
* last updated time of the Modified NVD CVE xml file.
*/ */
public static final String LAST_UPDATED = "NVD CVE Modified"; public static final String LAST_UPDATED = "NVD CVE Modified";
/** /**
* Stores the last updated time for each of the NVD CVE files. These timestamps should be updated if we process the modified * Stores the last updated time for each of the NVD CVE files. These
* file within 7 days of the last update. * timestamps should be updated if we process the modified file within 7
* days of the last update.
*/ */
public static final String LAST_UPDATED_BASE = "NVD CVE "; public static final String LAST_UPDATED_BASE = "NVD CVE ";
/** /**
@@ -70,11 +76,11 @@ public class DatabaseProperties {
/** /**
* A collection of properties about the data. * A collection of properties about the data.
*/ */
private Properties properties; private final Properties properties;
/** /**
* A reference to the database. * A reference to the database.
*/ */
private CveDB cveDB; private final CveDB cveDB;
/** /**
* Constructs a new data properties object. * Constructs a new data properties object.
@@ -83,13 +89,6 @@ public class DatabaseProperties {
*/ */
DatabaseProperties(CveDB cveDB) { DatabaseProperties(CveDB cveDB) {
this.cveDB = cveDB; this.cveDB = cveDB;
loadProperties();
}
/**
* Loads the properties from the database.
*/
private void loadProperties() {
this.properties = cveDB.getProperties(); this.properties = cveDB.getProperties();
} }
@@ -128,7 +127,8 @@ public class DatabaseProperties {
} }
/** /**
* Returns the property value for the given key. If the key is not contained in the underlying properties null is returned. * Returns the property value for the given key. If the key is not contained
* in the underlying properties null is returned.
* *
* @param key the property key * @param key the property key
* @return the value of the property * @return the value of the property
@@ -138,8 +138,8 @@ public class DatabaseProperties {
} }
/** /**
* Returns the property value for the given key. If the key is not contained in the underlying properties the default value is * Returns the property value for the given key. If the key is not contained
* returned. * in the underlying properties the default value is returned.
* *
* @param key the property key * @param key the property key
* @param defaultValue the default value * @param defaultValue the default value
@@ -159,22 +159,26 @@ public class DatabaseProperties {
} }
/** /**
* Returns a map of the meta data from the database properties. This primarily contains timestamps of when the NVD CVE * Returns a map of the meta data from the database properties. This
* information was last updated. * primarily contains timestamps of when the NVD CVE information was last
* updated.
* *
* @return a map of the database meta data * @return a map of the database meta data
*/ */
public Map<String, String> getMetaData() { public Map<String, String> getMetaData() {
final Map<String, String> map = new TreeMap<String, String>(); final Map<String, String> map = new TreeMap<>();
for (Entry<Object, Object> entry : properties.entrySet()) { for (Entry<Object, Object> entry : properties.entrySet()) {
final String key = (String) entry.getKey(); final String key = (String) entry.getKey();
if (!"version".equals(key)) { if (!"version".equals(key)) {
if (key.startsWith("NVD CVE ")) { if (key.startsWith("NVD CVE ")) {
try { try {
final long epoch = Long.parseLong((String) entry.getValue()); final long epoch = Long.parseLong((String) entry.getValue());
final Date date = new Date(epoch); final DateTime date = new DateTime(epoch);
final DateFormat format = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss"); final DateTimeFormatter format = DateTimeFormat.forPattern("dd/MM/yyyy HH:mm:ss");
final String formatted = format.format(date); final String formatted = format.print(date);
// final Date date = new Date(epoch);
// final DateFormat format = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
// final String formatted = format.format(date);
map.put(key, formatted); map.put(key, formatted);
} catch (Throwable ex) { //deliberately being broad in this catch clause } catch (Throwable ex) { //deliberately being broad in this catch clause
LOGGER.debug("Unable to parse timestamp from DB", ex); LOGGER.debug("Unable to parse timestamp from DB", ex);

View File

@@ -75,7 +75,7 @@ public final class DriverLoader {
*/ */
public static Driver load(String className, String pathToDriver) throws DriverLoadException { public static Driver load(String className, String pathToDriver) throws DriverLoadException {
final URLClassLoader parent = (URLClassLoader) ClassLoader.getSystemClassLoader(); final URLClassLoader parent = (URLClassLoader) ClassLoader.getSystemClassLoader();
final List<URL> urls = new ArrayList<URL>(); final List<URL> urls = new ArrayList<>();
final String[] paths = pathToDriver.split(File.pathSeparator); final String[] paths = pathToDriver.split(File.pathSeparator);
for (String path : paths) { for (String path : paths) {
final File file = new File(path); final File file = new File(path);
@@ -129,19 +129,7 @@ public final class DriverLoader {
//using the DriverShim to get around the fact that the DriverManager won't register a driver not in the base class path //using the DriverShim to get around the fact that the DriverManager won't register a driver not in the base class path
DriverManager.registerDriver(shim); DriverManager.registerDriver(shim);
return shim; return shim;
} catch (ClassNotFoundException ex) { } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | SQLException ex) {
final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex);
} catch (InstantiationException ex) {
final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex);
} catch (IllegalAccessException ex) {
final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex);
} catch (SQLException ex) {
final String msg = String.format("Unable to load database driver '%s'", className); final String msg = String.format("Unable to load database driver '%s'", className);
LOGGER.debug(msg, ex); LOGGER.debug(msg, ex);
throw new DriverLoadException(msg, ex); throw new DriverLoadException(msg, ex);

View File

@@ -115,7 +115,6 @@ class DriverShim implements Driver {
* @throws SQLFeatureNotSupportedException thrown if the feature is not supported * @throws SQLFeatureNotSupportedException thrown if the feature is not supported
* @see java.sql.Driver#getParentLogger() * @see java.sql.Driver#getParentLogger()
*/ */
@Override
public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException { public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
//return driver.getParentLogger(); //return driver.getParentLogger();
Method m = null; Method m = null;
@@ -127,11 +126,7 @@ class DriverShim implements Driver {
if (m != null) { if (m != null) {
try { try {
return (java.util.logging.Logger) m.invoke(m); return (java.util.logging.Logger) m.invoke(m);
} catch (IllegalAccessException ex) { } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
LOGGER.trace("", ex);
} catch (IllegalArgumentException ex) {
LOGGER.trace("", ex);
} catch (InvocationTargetException ex) {
LOGGER.trace("", ex); LOGGER.trace("", ex);
} }
} }

View File

@@ -1,88 +0,0 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2015 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.data.update;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Jeremy Long
*/
public abstract class BaseUpdater {
/**
* Static logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(BaseUpdater.class);
/**
* Information about the timestamps and URLs for data that needs to be updated.
*/
private DatabaseProperties properties;
/**
* Reference to the Cve Database.
*/
private CveDB cveDB = null;
protected CveDB getCveDB() {
return cveDB;
}
protected DatabaseProperties getProperties() {
return properties;
}
/**
* Closes the CVE and CPE data stores.
*/
protected void closeDataStores() {
if (cveDB != null) {
try {
cveDB.close();
cveDB = null;
properties = null;
} catch (Throwable ignore) {
LOGGER.trace("Error closing the database", ignore);
}
}
}
/**
* Opens the data store.
*
* @throws UpdateException thrown if a data store cannot be opened
*/
protected final void openDataStores() throws UpdateException {
if (cveDB != null) {
return;
}
try {
cveDB = new CveDB();
cveDB.open();
properties = cveDB.getDatabaseProperties();
} catch (DatabaseException ex) {
closeDataStores();
LOGGER.debug("Database Exception opening databases", ex);
throw new UpdateException("Error updating the database, please see the log file for more details.");
}
}
}

View File

@@ -16,20 +16,14 @@
* Copyright (c) 2015 Jeremy Long. All Rights Reserved. * Copyright (c) 2015 Jeremy Long. All Rights Reserved.
*/ */
package org.owasp.dependencycheck.data.update; package org.owasp.dependencycheck.data.update;
/*
import java.io.File; import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.List; import java.util.List;
import java.util.zip.GZIPInputStream;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.apache.commons.io.FileUtils;
import static org.owasp.dependencycheck.data.nvdcve.DatabaseProperties.LAST_CPE_UPDATE; import static org.owasp.dependencycheck.data.nvdcve.DatabaseProperties.LAST_CPE_UPDATE;
import org.owasp.dependencycheck.data.update.cpe.CPEHandler; import org.owasp.dependencycheck.data.update.cpe.CPEHandler;
import org.owasp.dependencycheck.data.update.cpe.Cpe; import org.owasp.dependencycheck.data.update.cpe.Cpe;
@@ -37,164 +31,132 @@ import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.utils.DateUtil; import org.owasp.dependencycheck.utils.DateUtil;
import org.owasp.dependencycheck.utils.DownloadFailedException; import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader; import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.ExtractionUtil;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.XmlUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
*/
/** /**
* *
* This class is currently unused and if enabled will likely not work on MySQL as the MERGE statement is used. * This class is currently unused and if enabled will likely not work on MySQL
* as the MERGE statement is used.
* *
* The CpeUpdater is designed to download the CPE data file from NIST and import the data into the database. However, as this * The CpeUpdater is designed to download the CPE data file from NIST and import
* currently adds no beneficial data, compared to what is in the CPE data contained in the CVE data files, this class is not * the data into the database. However, as this currently adds no beneficial
* currently used. The code is being kept as a future update may utilize more data from the CPE xml files. * data, compared to what is in the CPE data contained in the CVE data files,
* this class is not currently used. The code is being kept as a future update
* may utilize more data from the CPE XML files.
* *
* @deprecated the CPE updater is not currently used.
* @author Jeremy Long * @author Jeremy Long
*/ */
public class CpeUpdater extends BaseUpdater implements CachedWebDataSource { @Deprecated
public class CpeUpdater { //extends BaseUpdater implements CachedWebDataSource {
/** //
* Static logger. // /**
*/ // * Static logger.
private static final Logger LOGGER = LoggerFactory.getLogger(CpeUpdater.class); // */
// private static final Logger LOGGER = LoggerFactory.getLogger(CpeUpdater.class);
@Override //
public void update() throws UpdateException { // @Override
try { // public void update() throws UpdateException {
openDataStores(); // /*
if (updateNeeded()) { // //the following could be used if this were ever used.
LOGGER.info("Updating the Common Platform Enumeration (CPE)"); // try {
final File xml = downloadCpe(); // if (!Settings.getBoolean(Settings.KEYS.UPDATE_NVDCVE_ENABLED, true)) {
final List<Cpe> cpes = processXML(xml); // return;
getCveDB().deleteUnusedCpe(); // }
for (Cpe cpe : cpes) { // } catch (InvalidSettingException ex) {
getCveDB().addCpe(cpe.getValue(), cpe.getVendor(), cpe.getProduct()); // LOGGER.trace("invalid setting UPDATE_NVDCVE_ENABLED", ex);
} // }
final long now = System.currentTimeMillis(); // */
getProperties().save(LAST_CPE_UPDATE, Long.toString(now)); //
LOGGER.info("CPE update complete"); // try {
} // openDataStores();
} finally { // if (updateNeeded()) {
closeDataStores(); // LOGGER.info("Updating the Common Platform Enumeration (CPE)");
} // final File xml = downloadCpe();
} // final List<Cpe> cpes = processXML(xml);
// getCveDB().deleteUnusedCpe();
/** // for (Cpe cpe : cpes) {
* Downloads the CPE XML file. // getCveDB().addCpe(cpe.getValue(), cpe.getVendor(), cpe.getProduct());
* // }
* @return the file reference to the CPE.xml file // final long now = System.currentTimeMillis();
* @throws UpdateException thrown if there is an issue downloading the XML file // getProperties().save(LAST_CPE_UPDATE, Long.toString(now));
*/ // LOGGER.info("CPE update complete");
private File downloadCpe() throws UpdateException { // }
File xml; // } finally {
final URL url; // closeDataStores();
try { // }
url = new URL(Settings.getString(Settings.KEYS.CPE_URL)); // }
xml = File.createTempFile("cpe", ".xml", Settings.getTempDirectory()); //
Downloader.fetchFile(url, xml); // /**
if (url.toExternalForm().endsWith(".xml.gz")) { // * Downloads the CPE XML file.
extractGzip(xml); // *
} // * @return the file reference to the CPE.xml file
// * @throws UpdateException thrown if there is an issue downloading the XML
} catch (MalformedURLException ex) { // * file
throw new UpdateException("Invalid CPE URL", ex); // */
} catch (DownloadFailedException ex) { // private File downloadCpe() throws UpdateException {
throw new UpdateException("Unable to download CPE XML file", ex); // File xml;
} catch (IOException ex) { // final URL url;
throw new UpdateException("Unable to create temporary file to download CPE", ex); // try {
} // url = new URL(Settings.getString(Settings.KEYS.CPE_URL));
return xml; // xml = File.createTempFile("cpe", ".xml", Settings.getTempDirectory());
} // Downloader.fetchFile(url, xml);
// if (url.toExternalForm().endsWith(".xml.gz")) {
/** // ExtractionUtil.extractGzip(xml);
* Parses the CPE XML file to return a list of CPE entries. // }
* //
* @param xml the CPE data file // } catch (MalformedURLException ex) {
* @return the list of CPE entries // throw new UpdateException("Invalid CPE URL", ex);
* @throws UpdateException thrown if there is an issue with parsing the XML file // } catch (DownloadFailedException ex) {
*/ // throw new UpdateException("Unable to download CPE XML file", ex);
private List<Cpe> processXML(final File xml) throws UpdateException { // } catch (IOException ex) {
try { // throw new UpdateException("Unable to create temporary file to download CPE", ex);
final SAXParserFactory factory = SAXParserFactory.newInstance(); // }
final SAXParser saxParser = factory.newSAXParser(); // return xml;
final CPEHandler handler = new CPEHandler(); // }
saxParser.parse(xml, handler); //
return handler.getData(); // /**
} catch (ParserConfigurationException ex) { // * Parses the CPE XML file to return a list of CPE entries.
throw new UpdateException("Unable to parse CPE XML file due to SAX Parser Issue", ex); // *
} catch (SAXException ex) { // * @param xml the CPE data file
throw new UpdateException("Unable to parse CPE XML file due to SAX Parser Exception", ex); // * @return the list of CPE entries
} catch (IOException ex) { // * @throws UpdateException thrown if there is an issue with parsing the XML
throw new UpdateException("Unable to parse CPE XML file due to IO Failure", ex); // * file
} // */
} // private List<Cpe> processXML(final File xml) throws UpdateException {
// try {
/** // final SAXParser saxParser = XmlUtils.buildSecureSaxParser();
* Checks to find the last time the CPE data was refreshed and if it needs to be updated. // final CPEHandler handler = new CPEHandler();
* // saxParser.parse(xml, handler);
* @return true if the CPE data should be refreshed // return handler.getData();
*/ // } catch (ParserConfigurationException ex) {
private boolean updateNeeded() { // throw new UpdateException("Unable to parse CPE XML file due to SAX Parser Issue", ex);
final long now = System.currentTimeMillis(); // } catch (SAXException ex) {
final int days = Settings.getInt(Settings.KEYS.CPE_MODIFIED_VALID_FOR_DAYS, 30); // throw new UpdateException("Unable to parse CPE XML file due to SAX Parser Exception", ex);
long timestamp = 0; // } catch (IOException ex) {
final String ts = getProperties().getProperty(LAST_CPE_UPDATE); // throw new UpdateException("Unable to parse CPE XML file due to IO Failure", ex);
if (ts != null && ts.matches("^[0-9]+$")) { // }
timestamp = Long.parseLong(ts); // }
} //
return !DateUtil.withinDateRange(timestamp, now, days); // /**
} // * Checks to find the last time the CPE data was refreshed and if it needs
// * to be updated.
/** // *
* Extracts the file contained in a gzip archive. The extracted file is placed in the exact same path as the file specified. // * @return true if the CPE data should be refreshed
* // */
* @param file the archive file // private boolean updateNeeded() {
* @throws FileNotFoundException thrown if the file does not exist // final long now = System.currentTimeMillis();
* @throws IOException thrown if there is an error extracting the file. // final int days = Settings.getInt(Settings.KEYS.CPE_MODIFIED_VALID_FOR_DAYS, 30);
*/ // long timestamp = 0;
private void extractGzip(File file) throws FileNotFoundException, IOException { // final String ts = getProperties().getProperty(LAST_CPE_UPDATE);
//TODO - move this to a util class as it is duplicative of (copy of) code in the DownloadTask // if (ts != null && ts.matches("^[0-9]+$")) {
final String originalPath = file.getPath(); // timestamp = Long.parseLong(ts);
final File gzip = new File(originalPath + ".gz"); // }
if (gzip.isFile() && !gzip.delete()) { // return !DateUtil.withinDateRange(timestamp, now, days);
gzip.deleteOnExit(); // }
}
if (!file.renameTo(gzip)) {
throw new IOException("Unable to rename '" + file.getPath() + "'");
}
final File newfile = new File(originalPath);
final byte[] buffer = new byte[4096];
GZIPInputStream cin = null;
FileOutputStream out = null;
try {
cin = new GZIPInputStream(new FileInputStream(gzip));
out = new FileOutputStream(newfile);
int len;
while ((len = cin.read(buffer)) > 0) {
out.write(buffer, 0, len);
}
} finally {
if (cin != null) {
try {
cin.close();
} catch (IOException ex) {
LOGGER.trace("ignore", ex);
}
}
if (out != null) {
try {
out.close();
} catch (IOException ex) {
LOGGER.trace("ignore", ex);
}
}
if (gzip.isFile()) {
FileUtils.deleteQuietly(gzip);
}
}
}
} }

View File

@@ -28,6 +28,7 @@ import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.data.update.exception.UpdateException; import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.utils.DateUtil; import org.owasp.dependencycheck.utils.DateUtil;
import org.owasp.dependencycheck.utils.DependencyVersion; import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.URLConnectionFactory; import org.owasp.dependencycheck.utils.URLConnectionFactory;
import org.owasp.dependencycheck.utils.URLConnectionFailureException; import org.owasp.dependencycheck.utils.URLConnectionFailureException;
@@ -35,6 +36,10 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* Checks the gh-pages dependency-check site to determine the current released
* version number. If the released version number is greater than the running
* version number a warning is printed recommending that an upgrade be
* performed.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -53,17 +58,14 @@ public class EngineVersionCheck implements CachedWebDataSource {
*/ */
public static final String CURRENT_ENGINE_RELEASE = "CurrentEngineRelease"; public static final String CURRENT_ENGINE_RELEASE = "CurrentEngineRelease";
/** /**
* Reference to the Cve Database. * The version retrieved from the database properties or web to check
*/ * against.
private CveDB cveDB = null;
/**
* The version retrieved from the database properties or web to check against.
*/ */
private String updateToVersion; private String updateToVersion;
/** /**
* Getter for updateToVersion - only used for testing. Represents the version retrieved from the database. * Getter for updateToVersion - only used for testing. Represents the
* version retrieved from the database.
* *
* @return the version to test * @return the version to test
*/ */
@@ -72,7 +74,8 @@ public class EngineVersionCheck implements CachedWebDataSource {
} }
/** /**
* Setter for updateToVersion - only used for testing. Represents the version retrieved from the database. * Setter for updateToVersion - only used for testing. Represents the
* version retrieved from the database.
* *
* @param version the version to test * @param version the version to test
*/ */
@@ -80,50 +83,69 @@ public class EngineVersionCheck implements CachedWebDataSource {
updateToVersion = version; updateToVersion = version;
} }
/**
* Downloads the current released version number and compares it to the
* running engine's version number. If the released version number is newer
* a warning is printed recommending an upgrade.
*
* @throws UpdateException thrown if the local database properties could not
* be updated
*/
@Override @Override
public void update() throws UpdateException { public void update() throws UpdateException {
try { try (CveDB db = CveDB.getInstance()) {
openDatabase(); final boolean autoupdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE, true);
LOGGER.debug("Begin Engine Version Check"); final boolean enabled = Settings.getBoolean(Settings.KEYS.UPDATE_VERSION_CHECK_ENABLED, true);
final DatabaseProperties properties = cveDB.getDatabaseProperties(); final String original = Settings.getString(Settings.KEYS.CVE_ORIGINAL_MODIFIED_20_URL);
final long lastChecked = Long.parseLong(properties.getProperty(ENGINE_VERSION_CHECKED_ON, "0")); final String current = Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL);
final long now = System.currentTimeMillis(); /*
updateToVersion = properties.getProperty(CURRENT_ENGINE_RELEASE, ""); * Only update if auto-update is enabled, the engine check is
final String currentVersion = Settings.getString(Settings.KEYS.APPLICATION_VERSION, "0.0.0"); * enabled, and the NVD CVE URLs have not been modified (i.e. the
LOGGER.debug("Last checked: {}", lastChecked); * user has not configured them to point to an internal source).
LOGGER.debug("Now: {}", now); */
LOGGER.debug("Current version: {}", currentVersion); if (enabled && autoupdate && original != null && original.equals(current)) {
final boolean updateNeeded = shouldUpdate(lastChecked, now, properties, currentVersion); LOGGER.debug("Begin Engine Version Check");
if (updateNeeded) {
LOGGER.warn("A new version of dependency-check is available. Consider updating to version {}.", final DatabaseProperties properties = db.getDatabaseProperties();
updateToVersion);
final long lastChecked = Long.parseLong(properties.getProperty(ENGINE_VERSION_CHECKED_ON, "0"));
final long now = System.currentTimeMillis();
updateToVersion = properties.getProperty(CURRENT_ENGINE_RELEASE, "");
final String currentVersion = Settings.getString(Settings.KEYS.APPLICATION_VERSION, "0.0.0");
LOGGER.debug("Last checked: {}", lastChecked);
LOGGER.debug("Now: {}", now);
LOGGER.debug("Current version: {}", currentVersion);
final boolean updateNeeded = shouldUpdate(lastChecked, now, properties, currentVersion);
if (updateNeeded) {
LOGGER.warn("A new version of dependency-check is available. Consider updating to version {}.",
updateToVersion);
}
} }
} catch (DatabaseException ex) { } catch (DatabaseException ex) {
LOGGER.debug("Database Exception opening databases to retrieve properties", ex); LOGGER.debug("Database Exception opening databases to retrieve properties", ex);
throw new UpdateException("Error occured updating database properties."); throw new UpdateException("Error occurred updating database properties.");
} finally { } catch (InvalidSettingException ex) {
closeDatabase(); LOGGER.debug("Unable to determine if autoupdate is enabled", ex);
} }
} }
/** /**
* Determines if a new version of the dependency-check engine has been released. * Determines if a new version of the dependency-check engine has been
* released.
* *
* @param lastChecked the epoch time of the last version check * @param lastChecked the epoch time of the last version check
* @param now the current epoch time * @param now the current epoch time
* @param properties the database properties object * @param properties the database properties object
* @param currentVersion the current version of dependency-check * @param currentVersion the current version of dependency-check
* @return <code>true</code> if a newer version of the database has been released; otherwise <code>false</code> * @return <code>true</code> if a newer version of the database has been
* @throws UpdateException thrown if there is an error connecting to the github documentation site or accessing the local * released; otherwise <code>false</code>
* database. * @throws UpdateException thrown if there is an error connecting to the
* github documentation site or accessing the local database.
*/ */
protected boolean shouldUpdate(final long lastChecked, final long now, final DatabaseProperties properties, protected boolean shouldUpdate(final long lastChecked, final long now, final DatabaseProperties properties,
String currentVersion) throws UpdateException { String currentVersion) throws UpdateException {
//check every 30 days if we know there is an update, otherwise check every 7 days //check every 30 days if we know there is an update, otherwise check every 7 days
int checkRange = 30; final int checkRange = 30;
if (updateToVersion.isEmpty()) {
checkRange = 7;
}
if (!DateUtil.withinDateRange(lastChecked, now, checkRange)) { if (!DateUtil.withinDateRange(lastChecked, now, checkRange)) {
LOGGER.debug("Checking web for new version."); LOGGER.debug("Checking web for new version.");
final String currentRelease = getCurrentReleaseVersion(); final String currentRelease = getCurrentReleaseVersion();
@@ -133,14 +155,16 @@ public class EngineVersionCheck implements CachedWebDataSource {
updateToVersion = v.toString(); updateToVersion = v.toString();
if (!currentRelease.equals(updateToVersion)) { if (!currentRelease.equals(updateToVersion)) {
properties.save(CURRENT_ENGINE_RELEASE, updateToVersion); properties.save(CURRENT_ENGINE_RELEASE, updateToVersion);
} else {
properties.save(CURRENT_ENGINE_RELEASE, "");
} }
properties.save(ENGINE_VERSION_CHECKED_ON, Long.toString(now)); properties.save(ENGINE_VERSION_CHECKED_ON, Long.toString(now));
} }
} }
LOGGER.debug("Current Release: {}", updateToVersion); LOGGER.debug("Current Release: {}", updateToVersion);
} }
if (updateToVersion == null) {
LOGGER.debug("Unable to obtain current release");
return false;
}
final DependencyVersion running = new DependencyVersion(currentVersion); final DependencyVersion running = new DependencyVersion(currentVersion);
final DependencyVersion released = new DependencyVersion(updateToVersion); final DependencyVersion released = new DependencyVersion(updateToVersion);
if (running.compareTo(released) < 0) { if (running.compareTo(released) < 0) {
@@ -152,34 +176,8 @@ public class EngineVersionCheck implements CachedWebDataSource {
} }
/** /**
* Opens the CVE and CPE data stores. * Retrieves the current released version number from the github
* * documentation site.
* @throws DatabaseException thrown if a data store cannot be opened
*/
protected final void openDatabase() throws DatabaseException {
if (cveDB != null) {
return;
}
cveDB = new CveDB();
cveDB.open();
}
/**
* Closes the CVE and CPE data stores.
*/
protected void closeDatabase() {
if (cveDB != null) {
try {
cveDB.close();
cveDB = null;
} catch (Throwable ignore) {
LOGGER.trace("Error closing the cveDB", ignore);
}
}
}
/**
* Retrieves the current released version number from the github documentation site.
* *
* @return the current released version number * @return the current released version number
*/ */
@@ -198,11 +196,11 @@ public class EngineVersionCheck implements CachedWebDataSource {
return releaseVersion.trim(); return releaseVersion.trim();
} }
} catch (MalformedURLException ex) { } catch (MalformedURLException ex) {
LOGGER.debug("unable to retrieve current release version of dependency-check", ex); LOGGER.debug("Unable to retrieve current release version of dependency-check - malformed url?");
} catch (URLConnectionFailureException ex) { } catch (URLConnectionFailureException ex) {
LOGGER.debug("unable to retrieve current release version of dependency-check", ex); LOGGER.debug("Unable to retrieve current release version of dependency-check - connection failed");
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.debug("unable to retrieve current release version of dependency-check", ex); LOGGER.debug("Unable to retrieve current release version of dependency-check - i/o exception");
} finally { } finally {
if (conn != null) { if (conn != null) {
conn.disconnect(); conn.disconnect();

View File

@@ -17,14 +17,28 @@
*/ */
package org.owasp.dependencycheck.data.update; package org.owasp.dependencycheck.data.update;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.util.Calendar; import java.util.Calendar;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.net.URL;
import java.nio.channels.FileLock;
import java.util.Date;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties; import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import static org.owasp.dependencycheck.data.nvdcve.DatabaseProperties.MODIFIED; import static org.owasp.dependencycheck.data.nvdcve.DatabaseProperties.MODIFIED;
import org.owasp.dependencycheck.data.update.exception.InvalidDataException; import org.owasp.dependencycheck.data.update.exception.InvalidDataException;
@@ -34,6 +48,7 @@ import org.owasp.dependencycheck.data.update.nvd.NvdCveInfo;
import org.owasp.dependencycheck.data.update.nvd.ProcessTask; import org.owasp.dependencycheck.data.update.nvd.ProcessTask;
import org.owasp.dependencycheck.data.update.nvd.UpdateableNvdCve; import org.owasp.dependencycheck.data.update.nvd.UpdateableNvdCve;
import org.owasp.dependencycheck.utils.DateUtil; import org.owasp.dependencycheck.utils.DateUtil;
import org.owasp.dependencycheck.utils.Downloader;
import org.owasp.dependencycheck.utils.DownloadFailedException; import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.InvalidSettingException; import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
@@ -45,37 +60,99 @@ import org.slf4j.LoggerFactory;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource { public class NvdCveUpdater implements CachedWebDataSource {
/** /**
* The logger * The logger.
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(NvdCveUpdater.class); private static final Logger LOGGER = LoggerFactory.getLogger(NvdCveUpdater.class);
/** /**
* The max thread pool size to use when downloading files. * The thread pool size to use for CPU-intense tasks.
*/ */
public static final int MAX_THREAD_POOL_SIZE = Settings.getInt(Settings.KEYS.MAX_DOWNLOAD_THREAD_POOL_SIZE, 3); private static final int PROCESSING_THREAD_POOL_SIZE = Runtime.getRuntime().availableProcessors();
/**
* The thread pool size to use when downloading files.
*/
private static final int DOWNLOAD_THREAD_POOL_SIZE = Math.round(1.5f * Runtime.getRuntime().availableProcessors());
/**
* ExecutorService for CPU-intense processing tasks.
*/
private ExecutorService processingExecutorService = null;
/**
* ExecutorService for tasks that involve blocking activities and are not
* very CPU-intense, e.g. downloading files.
*/
private ExecutorService downloadExecutorService = null;
/** /**
* <p> * Reference to the DAO.
* Downloads the latest NVD CVE XML file from the web and imports it into the current CVE Database.</p> */
private CveDB cveDb = null;
/**
* The properties obtained from the database.
*/
private DatabaseProperties dbProperties = null;
/**
* Downloads the latest NVD CVE XML file from the web and imports it into
* the current CVE Database. A lock on a file is obtained in an attempt to
* prevent more then one thread/JVM from updating the database at the same
* time. This method may sleep upto 5 minutes.
* *
* @throws UpdateException is thrown if there is an error updating the database * @throws UpdateException is thrown if there is an error updating the
* database
*/ */
@Override @Override
public void update() throws UpdateException { public synchronized void update() throws UpdateException {
if (isUpdateConfiguredFalse()) {
return;
}
FileLock lock = null;
RandomAccessFile ulFile = null;
File lockFile = null;
try { try {
openDataStores(); if (ConnectionFactory.isH2Connection()) {
final File dir = Settings.getDataDirectory();
lockFile = new File(dir, "odc.update.lock");
if (lockFile.isFile() && getFileAge(lockFile) > 5 && !lockFile.delete()) {
LOGGER.warn("An old db update lock file was found but the system was unable to delete the file. Consider manually deleting " + lockFile.getAbsolutePath());
}
int ctr = 0;
do {
try {
if (!lockFile.exists() && lockFile.createNewFile()) {
ulFile = new RandomAccessFile(lockFile, "rw");
lock = ulFile.getChannel().lock();
}
} catch (IOException ex) {
LOGGER.trace("Expected error as another thread has likely locked the file", ex);
}
if (lock == null || !lock.isValid()) {
try {
LOGGER.debug(String.format("Sleeping thread %s for 5 seconds because we could not obtain the update lock.", Thread.currentThread().getName()));
Thread.sleep(5000);
} catch (InterruptedException ex) {
LOGGER.trace("ignorable error, sleep was interrupted.", ex);
}
}
} while (++ctr < 60 && (lock == null || !lock.isValid()));
if (lock == null || !lock.isValid()) {
throw new UpdateException("Unable to obtain the update lock, skipping the database update. Skippinig the database update.");
}
}
initializeExecutorServices();
cveDb = CveDB.getInstance();
dbProperties = cveDb.getDatabaseProperties();
if (checkUpdate()) { if (checkUpdate()) {
final UpdateableNvdCve updateable = getUpdatesNeeded(); final UpdateableNvdCve updateable = getUpdatesNeeded();
if (updateable.isUpdateNeeded()) { if (updateable.isUpdateNeeded()) {
performUpdate(updateable); performUpdate(updateable);
} }
dbProperties.save(DatabaseProperties.LAST_CHECKED, Long.toString(System.currentTimeMillis()));
} }
} catch (MalformedURLException ex) { } catch (MalformedURLException ex) {
LOGGER.warn( throw new UpdateException("NVD CVE properties files contain an invalid URL, unable to update the data to use the most current data.", ex);
"NVD CVE properties files contain an invalid URL, unable to update the data to use the most current data.");
LOGGER.debug("", ex);
} catch (DownloadFailedException ex) { } catch (DownloadFailedException ex) {
LOGGER.warn( LOGGER.warn(
"Unable to download the NVD CVE data; the results may not include the most recent CPE/CVEs from the NVD."); "Unable to download the NVD CVE data; the results may not include the most recent CPE/CVEs from the NVD.");
@@ -83,33 +160,114 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
LOGGER.info( LOGGER.info(
"If you are behind a proxy you may need to configure dependency-check to use the proxy."); "If you are behind a proxy you may need to configure dependency-check to use the proxy.");
} }
LOGGER.debug("", ex); throw new UpdateException("Unable to download the NVD CVE data.", ex);
} catch (DatabaseException ex) {
throw new UpdateException("Database Exception, unable to update the data to use the most current data.", ex);
} catch (IOException ex) {
throw new UpdateException("Database Exception", ex);
} finally { } finally {
closeDataStores(); shutdownExecutorServices();
cveDb.close();
if (lock != null) {
try {
lock.release();
} catch (IOException ex) {
LOGGER.trace("Ignorable exception", ex);
}
}
if (ulFile != null) {
try {
ulFile.close();
} catch (IOException ex) {
LOGGER.trace("Ignorable exception", ex);
}
}
if (lockFile != null) {
lockFile.delete();
}
} }
} }
/** /**
* Checks if the NVD CVE XML files were last checked recently. As an optimization, we can avoid repetitive checks against the * Checks if the system is configured NOT to update.
* NVD. Setting CVE_CHECK_VALID_FOR_HOURS determines the duration since last check before checking again. A database property
* stores the timestamp of the last check.
* *
* @return true to proceed with the check, or false to skip. * @return false if the system is configured to perform an update; otherwise
* @throws UpdateException thrown when there is an issue checking for updates. * true
*/
private boolean isUpdateConfiguredFalse() {
try {
if (!Settings.getBoolean(Settings.KEYS.UPDATE_NVDCVE_ENABLED, true)) {
return true;
}
} catch (InvalidSettingException ex) {
LOGGER.trace("invalid setting UPDATE_NVDCVE_ENABLED", ex);
}
boolean autoUpdate = true;
try {
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
} catch (InvalidSettingException ex) {
LOGGER.debug("Invalid setting for auto-update; using true.");
}
return !autoUpdate;
}
/**
* Returns the age of the file in minutes.
*
* @param file the file to calculate the age
* @return the age of the file
*/
private long getFileAge(File file) {
final Date d = new Date();
final long modified = file.lastModified();
return (d.getTime() - modified) / 1000 / 60;
}
/**
* Initialize the executor services for download and processing of the NVD
* CVE XML data.
*/
protected void initializeExecutorServices() {
processingExecutorService = Executors.newFixedThreadPool(PROCESSING_THREAD_POOL_SIZE);
downloadExecutorService = Executors.newFixedThreadPool(DOWNLOAD_THREAD_POOL_SIZE);
LOGGER.debug("#download threads: {}", DOWNLOAD_THREAD_POOL_SIZE);
LOGGER.debug("#processing threads: {}", PROCESSING_THREAD_POOL_SIZE);
}
/**
* Shutdown and cleanup of resources used by the executor services.
*/
private void shutdownExecutorServices() {
if (processingExecutorService != null) {
processingExecutorService.shutdownNow();
}
if (downloadExecutorService != null) {
downloadExecutorService.shutdownNow();
}
}
/**
* Checks if the NVD CVE XML files were last checked recently. As an
* optimization, we can avoid repetitive checks against the NVD. Setting
* CVE_CHECK_VALID_FOR_HOURS determines the duration since last check before
* checking again. A database property stores the timestamp of the last
* check.
*
* @return true to proceed with the check, or false to skip
* @throws UpdateException thrown when there is an issue checking for
* updates
*/ */
private boolean checkUpdate() throws UpdateException { private boolean checkUpdate() throws UpdateException {
boolean proceed = true; boolean proceed = true;
// If the valid setting has not been specified, then we proceed to check... // If the valid setting has not been specified, then we proceed to check...
final int validForHours = Settings.getInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, 0); final int validForHours = Settings.getInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, 0);
if (0 < validForHours) { if (dataExists() && 0 < validForHours) {
// ms Valid = valid (hours) x 60 min/hour x 60 sec/min x 1000 ms/sec // ms Valid = valid (hours) x 60 min/hour x 60 sec/min x 1000 ms/sec
final long msValid = validForHours * 60L * 60L * 1000L; final long msValid = validForHours * 60L * 60L * 1000L;
final long lastChecked = Long.parseLong(getProperties().getProperty(DatabaseProperties.LAST_CHECKED, "0")); final long lastChecked = Long.parseLong(dbProperties.getProperty(DatabaseProperties.LAST_CHECKED, "0"));
final long now = System.currentTimeMillis(); final long now = System.currentTimeMillis();
proceed = (now - lastChecked) > msValid; proceed = (now - lastChecked) > msValid;
if (proceed) { if (!proceed) {
getProperties().save(DatabaseProperties.LAST_CHECKED, Long.toString(now));
} else {
LOGGER.info("Skipping NVD check since last check was within {} hours.", validForHours); LOGGER.info("Skipping NVD check since last check was within {} hours.", validForHours);
LOGGER.debug("Last NVD was at {}, and now {} is within {} ms.", LOGGER.debug("Last NVD was at {}, and now {} is within {} ms.",
lastChecked, now, msValid); lastChecked, now, msValid);
@@ -119,114 +277,110 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
} }
/** /**
* Downloads the latest NVD CVE XML file from the web and imports it into the current CVE Database. * Checks the CVE Index to ensure data exists and analysis can continue.
* *
* @param updateable a collection of NVD CVE data file references that need to be downloaded and processed to update the * @return true if the database contains data
* database
* @throws UpdateException is thrown if there is an error updating the database
*/ */
public void performUpdate(UpdateableNvdCve updateable) throws UpdateException { private boolean dataExists() {
int maxUpdates = 0; try (CveDB cve = CveDB.getInstance()) {
try { return cve.dataExists();
for (NvdCveInfo cve : updateable) { } catch (DatabaseException ex) {
if (cve.getNeedsUpdate()) { return false;
maxUpdates += 1;
}
}
if (maxUpdates <= 0) {
return;
}
if (maxUpdates > 3) {
LOGGER.info(
"NVD CVE requires several updates; this could take a couple of minutes.");
}
if (maxUpdates > 0) {
openDataStores();
}
final int poolSize = (MAX_THREAD_POOL_SIZE < maxUpdates) ? MAX_THREAD_POOL_SIZE : maxUpdates;
final ExecutorService downloadExecutors = Executors.newFixedThreadPool(poolSize);
final ExecutorService processExecutor = Executors.newSingleThreadExecutor();
final Set<Future<Future<ProcessTask>>> downloadFutures = new HashSet<Future<Future<ProcessTask>>>(maxUpdates);
for (NvdCveInfo cve : updateable) {
if (cve.getNeedsUpdate()) {
final DownloadTask call = new DownloadTask(cve, processExecutor, getCveDB(), Settings.getInstance());
downloadFutures.add(downloadExecutors.submit(call));
}
}
downloadExecutors.shutdown();
//next, move the future future processTasks to just future processTasks
final Set<Future<ProcessTask>> processFutures = new HashSet<Future<ProcessTask>>(maxUpdates);
for (Future<Future<ProcessTask>> future : downloadFutures) {
Future<ProcessTask> task = null;
try {
task = future.get();
} catch (InterruptedException ex) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during download", ex);
throw new UpdateException("The download was interrupted", ex);
} catch (ExecutionException ex) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during download execution", ex);
throw new UpdateException("The execution of the download was interrupted", ex);
}
if (task == null) {
downloadExecutors.shutdownNow();
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during download");
throw new UpdateException("The download was interrupted; unable to complete the update");
} else {
processFutures.add(task);
}
}
for (Future<ProcessTask> future : processFutures) {
try {
final ProcessTask task = future.get();
if (task.getException() != null) {
throw task.getException();
}
} catch (InterruptedException ex) {
processExecutor.shutdownNow();
LOGGER.debug("Thread was interrupted during processing", ex);
throw new UpdateException(ex);
} catch (ExecutionException ex) {
processExecutor.shutdownNow();
LOGGER.debug("Execution Exception during process", ex);
throw new UpdateException(ex);
} finally {
processExecutor.shutdown();
}
}
if (maxUpdates >= 1) { //ensure the modified file date gets written (we may not have actually updated it)
getProperties().save(updateable.get(MODIFIED));
LOGGER.info("Begin database maintenance.");
getCveDB().cleanupDatabase();
LOGGER.info("End database maintenance.");
}
} finally {
closeDataStores();
} }
} }
/** /**
* Determines if the index needs to be updated. This is done by fetching the NVD CVE meta data and checking the last update * Downloads the latest NVD CVE XML file from the web and imports it into
* date. If the data needs to be refreshed this method will return the NvdCveUrl for the files that need to be updated. * the current CVE Database.
*
* @param updateable a collection of NVD CVE data file references that need
* to be downloaded and processed to update the database
* @throws UpdateException is thrown if there is an error updating the
* database
*/
private void performUpdate(UpdateableNvdCve updateable) throws UpdateException {
int maxUpdates = 0;
for (NvdCveInfo cve : updateable) {
if (cve.getNeedsUpdate()) {
maxUpdates += 1;
}
}
if (maxUpdates <= 0) {
return;
}
if (maxUpdates > 3) {
LOGGER.info("NVD CVE requires several updates; this could take a couple of minutes.");
}
final Set<Future<Future<ProcessTask>>> downloadFutures = new HashSet<>(maxUpdates);
for (NvdCveInfo cve : updateable) {
if (cve.getNeedsUpdate()) {
final DownloadTask call = new DownloadTask(cve, processingExecutorService, cveDb, Settings.getInstance());
downloadFutures.add(downloadExecutorService.submit(call));
}
}
//next, move the future future processTasks to just future processTasks
final Set<Future<ProcessTask>> processFutures = new HashSet<>(maxUpdates);
for (Future<Future<ProcessTask>> future : downloadFutures) {
Future<ProcessTask> task;
try {
task = future.get();
} catch (InterruptedException ex) {
LOGGER.debug("Thread was interrupted during download", ex);
throw new UpdateException("The download was interrupted", ex);
} catch (ExecutionException ex) {
LOGGER.debug("Thread was interrupted during download execution", ex);
throw new UpdateException("The execution of the download was interrupted", ex);
}
if (task == null) {
LOGGER.debug("Thread was interrupted during download");
throw new UpdateException("The download was interrupted; unable to complete the update");
} else {
processFutures.add(task);
}
}
for (Future<ProcessTask> future : processFutures) {
try {
final ProcessTask task = future.get();
if (task.getException() != null) {
throw task.getException();
}
} catch (InterruptedException ex) {
LOGGER.debug("Thread was interrupted during processing", ex);
throw new UpdateException(ex);
} catch (ExecutionException ex) {
LOGGER.debug("Execution Exception during process", ex);
throw new UpdateException(ex);
}
}
if (maxUpdates >= 1) { //ensure the modified file date gets written (we may not have actually updated it)
dbProperties.save(updateable.get(MODIFIED));
LOGGER.info("Begin database maintenance.");
cveDb.cleanupDatabase();
LOGGER.info("End database maintenance.");
}
}
/**
* Determines if the index needs to be updated. This is done by fetching the
* NVD CVE meta data and checking the last update date. If the data needs to
* be refreshed this method will return the NvdCveUrl for the files that
* need to be updated.
* *
* @return the collection of files that need to be updated * @return the collection of files that need to be updated
* @throws MalformedURLException is thrown if the URL for the NVD CVE Meta data is incorrect * @throws MalformedURLException is thrown if the URL for the NVD CVE Meta
* @throws DownloadFailedException is thrown if there is an error. downloading the NVD CVE download data file * data is incorrect
* @throws UpdateException Is thrown if there is an issue with the last updated properties file * @throws DownloadFailedException is thrown if there is an error.
* downloading the NVD CVE download data file
* @throws UpdateException Is thrown if there is an issue with the last
* updated properties file
*/ */
protected final UpdateableNvdCve getUpdatesNeeded() throws MalformedURLException, DownloadFailedException, UpdateException { protected final UpdateableNvdCve getUpdatesNeeded() throws MalformedURLException, DownloadFailedException, UpdateException {
UpdateableNvdCve updates = null; LOGGER.info("starting getUpdatesNeeded() ...");
UpdateableNvdCve updates;
try { try {
updates = retrieveCurrentTimestampsFromWeb(); updates = retrieveCurrentTimestampsFromWeb();
} catch (InvalidDataException ex) { } catch (InvalidDataException ex) {
@@ -241,14 +395,24 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
if (updates == null) { if (updates == null) {
throw new DownloadFailedException("Unable to retrieve the timestamps of the currently published NVD CVE data"); throw new DownloadFailedException("Unable to retrieve the timestamps of the currently published NVD CVE data");
} }
if (!getProperties().isEmpty()) { if (dbProperties != null && !dbProperties.isEmpty()) {
try { try {
final long lastUpdated = Long.parseLong(getProperties().getProperty(DatabaseProperties.LAST_UPDATED, "0")); final int startYear = Settings.getInt(Settings.KEYS.CVE_START_YEAR, 2002);
final int endYear = Calendar.getInstance().get(Calendar.YEAR);
boolean needsFullUpdate = false;
for (int y = startYear; y <= endYear; y++) {
final long val = Long.parseLong(dbProperties.getProperty(DatabaseProperties.LAST_UPDATED_BASE + y, "0"));
if (val == 0) {
needsFullUpdate = true;
}
}
final long lastUpdated = Long.parseLong(dbProperties.getProperty(DatabaseProperties.LAST_UPDATED, "0"));
final long now = System.currentTimeMillis(); final long now = System.currentTimeMillis();
final int days = Settings.getInt(Settings.KEYS.CVE_MODIFIED_VALID_FOR_DAYS, 7); final int days = Settings.getInt(Settings.KEYS.CVE_MODIFIED_VALID_FOR_DAYS, 7);
if (lastUpdated == updates.getTimeStamp(MODIFIED)) { if (!needsFullUpdate && lastUpdated == updates.getTimeStamp(MODIFIED)) {
updates.clear(); //we don't need to update anything. updates.clear(); //we don't need to update anything.
} else if (DateUtil.withinDateRange(lastUpdated, now, days)) { } else if (!needsFullUpdate && DateUtil.withinDateRange(lastUpdated, now, days)) {
for (NvdCveInfo entry : updates) { for (NvdCveInfo entry : updates) {
if (MODIFIED.equals(entry.getId())) { if (MODIFIED.equals(entry.getId())) {
entry.setNeedsUpdate(true); entry.setNeedsUpdate(true);
@@ -263,7 +427,7 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
} else { } else {
long currentTimestamp = 0; long currentTimestamp = 0;
try { try {
currentTimestamp = Long.parseLong(getProperties().getProperty(DatabaseProperties.LAST_UPDATED_BASE currentTimestamp = Long.parseLong(dbProperties.getProperty(DatabaseProperties.LAST_UPDATED_BASE
+ entry.getId(), "0")); + entry.getId(), "0"));
} catch (NumberFormatException ex) { } catch (NumberFormatException ex) {
LOGGER.debug("Error parsing '{}' '{}' from nvdcve.lastupdated", LOGGER.debug("Error parsing '{}' '{}' from nvdcve.lastupdated",
@@ -287,29 +451,112 @@ public class NvdCveUpdater extends BaseUpdater implements CachedWebDataSource {
* Retrieves the timestamps from the NVD CVE meta data file. * Retrieves the timestamps from the NVD CVE meta data file.
* *
* @return the timestamp from the currently published nvdcve downloads page * @return the timestamp from the currently published nvdcve downloads page
* @throws MalformedURLException thrown if the URL for the NVD CCE Meta data is incorrect. * @throws MalformedURLException thrown if the URL for the NVD CCE Meta data
* @throws DownloadFailedException thrown if there is an error downloading the nvd cve meta data file * is incorrect.
* @throws InvalidDataException thrown if there is an exception parsing the timestamps * @throws DownloadFailedException thrown if there is an error downloading
* the nvd cve meta data file
* @throws InvalidDataException thrown if there is an exception parsing the
* timestamps
* @throws InvalidSettingException thrown if the settings are invalid * @throws InvalidSettingException thrown if the settings are invalid
*/ */
private UpdateableNvdCve retrieveCurrentTimestampsFromWeb() private UpdateableNvdCve retrieveCurrentTimestampsFromWeb()
throws MalformedURLException, DownloadFailedException, InvalidDataException, InvalidSettingException { throws MalformedURLException, DownloadFailedException, InvalidDataException, InvalidSettingException {
final UpdateableNvdCve updates = new UpdateableNvdCve();
updates.add(MODIFIED, Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL),
Settings.getString(Settings.KEYS.CVE_MODIFIED_12_URL),
false);
final int start = Settings.getInt(Settings.KEYS.CVE_START_YEAR); final int start = Settings.getInt(Settings.KEYS.CVE_START_YEAR);
final int end = Calendar.getInstance().get(Calendar.YEAR); final int end = Calendar.getInstance().get(Calendar.YEAR);
final Map<String, Long> lastModifiedDates = retrieveLastModifiedDates(start, end);
final UpdateableNvdCve updates = new UpdateableNvdCve();
final String baseUrl20 = Settings.getString(Settings.KEYS.CVE_SCHEMA_2_0); final String baseUrl20 = Settings.getString(Settings.KEYS.CVE_SCHEMA_2_0);
final String baseUrl12 = Settings.getString(Settings.KEYS.CVE_SCHEMA_1_2); final String baseUrl12 = Settings.getString(Settings.KEYS.CVE_SCHEMA_1_2);
for (int i = start; i <= end; i++) { for (int i = start; i <= end; i++) {
updates.add(Integer.toString(i), String.format(baseUrl20, i), final String url = String.format(baseUrl20, i);
String.format(baseUrl12, i), updates.add(Integer.toString(i), url, String.format(baseUrl12, i),
true); lastModifiedDates.get(url), true);
} }
final String url = Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL);
updates.add(MODIFIED, url, Settings.getString(Settings.KEYS.CVE_MODIFIED_12_URL),
lastModifiedDates.get(url), false);
return updates; return updates;
} }
/**
* Retrieves the timestamps from the NVD CVE meta data file.
*
* @param startYear the first year whose item to check for the timestamp
* @param endYear the last year whose item to check for the timestamp
* @return the timestamps from the currently published NVD CVE downloads
* page
* @throws MalformedURLException thrown if the URL for the NVD CCE Meta data
* is incorrect.
* @throws DownloadFailedException thrown if there is an error downloading
* the NVD CVE meta data file
*/
private Map<String, Long> retrieveLastModifiedDates(int startYear, int endYear)
throws MalformedURLException, DownloadFailedException {
final Set<String> urls = new HashSet<>();
final String baseUrl20 = Settings.getString(Settings.KEYS.CVE_SCHEMA_2_0);
for (int i = startYear; i <= endYear; i++) {
final String url = String.format(baseUrl20, i);
urls.add(url);
}
urls.add(Settings.getString(Settings.KEYS.CVE_MODIFIED_20_URL));
final Map<String, Future<Long>> timestampFutures = new HashMap<>();
for (String url : urls) {
final TimestampRetriever timestampRetriever = new TimestampRetriever(url);
final Future<Long> future = downloadExecutorService.submit(timestampRetriever);
timestampFutures.put(url, future);
}
final Map<String, Long> lastModifiedDates = new HashMap<>();
for (String url : urls) {
final Future<Long> timestampFuture = timestampFutures.get(url);
final long timestamp;
try {
timestamp = timestampFuture.get(60, TimeUnit.SECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
throw new DownloadFailedException(e);
}
lastModifiedDates.put(url, timestamp);
}
return lastModifiedDates;
}
/**
* Retrieves the last modified timestamp from a NVD CVE meta data file.
*/
private static class TimestampRetriever implements Callable<Long> {
/**
* The URL to obtain the timestamp from.
*/
private final String url;
/**
* Instantiates a new timestamp retriever object.
*
* @param url the URL to hit
*/
TimestampRetriever(String url) {
this.url = url;
}
@Override
public Long call() throws Exception {
LOGGER.debug("Checking for updates from: {}", url);
try {
Settings.initialize();
return Downloader.getLastModified(new URL(url));
} finally {
Settings.cleanup(false);
}
}
}
} }

View File

@@ -22,6 +22,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.owasp.dependencycheck.data.update.NvdCveUpdater; import org.owasp.dependencycheck.data.update.NvdCveUpdater;
import org.owasp.dependencycheck.data.update.exception.InvalidDataException; import org.owasp.dependencycheck.data.update.exception.InvalidDataException;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.xml.sax.Attributes; import org.xml.sax.Attributes;
@@ -29,7 +30,7 @@ import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler; import org.xml.sax.helpers.DefaultHandler;
/** /**
* A SAX Handler that will parse the CPE XML and load it into the databse. * A SAX Handler that will parse the CPE XML and load it into the database.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -40,13 +41,18 @@ public class CPEHandler extends DefaultHandler {
*/ */
private static final String CURRENT_SCHEMA_VERSION = "2.3"; private static final String CURRENT_SCHEMA_VERSION = "2.3";
/** /**
* The text content of the node being processed. This can be used during the end element event. * The Starts with expression to filter CVE entries by CPE.
*/
private static final String CPE_STARTS_WITH = Settings.getString(Settings.KEYS.CVE_CPE_STARTS_WITH_FILTER, "cpe:/a:");
/**
* The text content of the node being processed. This can be used during the
* end element event.
*/ */
private StringBuilder nodeText = null; private StringBuilder nodeText = null;
/** /**
* A reference to the current element. * A reference to the current element.
*/ */
private Element current = new Element(); private final Element current = new Element();
/** /**
* The logger. * The logger.
*/ */
@@ -54,7 +60,7 @@ public class CPEHandler extends DefaultHandler {
/** /**
* The list of CPE values. * The list of CPE values.
*/ */
private List<Cpe> data = new ArrayList<Cpe>(); private final List<Cpe> data = new ArrayList<>();
/** /**
* Returns the list of CPE values. * Returns the list of CPE values.
@@ -72,7 +78,8 @@ public class CPEHandler extends DefaultHandler {
* @param localName the local name * @param localName the local name
* @param qName the qualified name * @param qName the qualified name
* @param attributes the attributes * @param attributes the attributes
* @throws SAXException thrown if there is an exception processing the element * @throws SAXException thrown if there is an exception processing the
* element
*/ */
@Override @Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
@@ -82,7 +89,7 @@ public class CPEHandler extends DefaultHandler {
final String temp = attributes.getValue("deprecated"); final String temp = attributes.getValue("deprecated");
final String value = attributes.getValue("name"); final String value = attributes.getValue("name");
final boolean delete = "true".equalsIgnoreCase(temp); final boolean delete = "true".equalsIgnoreCase(temp);
if (!delete && value.startsWith("cpe:/a:") && value.length() > 7) { if (!delete && value.startsWith(CPE_STARTS_WITH) && value.length() > 7) {
try { try {
final Cpe cpe = new Cpe(value); final Cpe cpe = new Cpe(value);
data.add(cpe); data.add(cpe);
@@ -123,7 +130,8 @@ public class CPEHandler extends DefaultHandler {
* @param ch the char array * @param ch the char array
* @param start the start position of the data read * @param start the start position of the data read
* @param length the length of the data read * @param length the length of the data read
* @throws SAXException thrown if there is an exception processing the characters * @throws SAXException thrown if there is an exception processing the
* characters
*/ */
@Override @Override
public void characters(char[] ch, int start, int length) throws SAXException { public void characters(char[] ch, int start, int length) throws SAXException {
@@ -133,51 +141,29 @@ public class CPEHandler extends DefaultHandler {
} }
/** /**
* Handles the end element event. Stores the CPE data in the Cve Database if the cpe item node is ending. * Handles the end element event. Stores the CPE data in the Cve Database if
* the cpe item node is ending.
* *
* @param uri the element's uri * @param uri the element's uri
* @param localName the local name * @param localName the local name
* @param qName the qualified name * @param qName the qualified name
* @throws SAXException thrown if there is an exception processing the element * @throws SAXException thrown if there is an exception processing the
* element
*/ */
@Override @Override
public void endElement(String uri, String localName, String qName) throws SAXException { public void endElement(String uri, String localName, String qName) throws SAXException {
current.setNode(qName); current.setNode(qName);
if (current.isSchemaVersionNode() && !CURRENT_SCHEMA_VERSION.equals(nodeText.toString())) { if (current.isSchemaVersionNode() && !CURRENT_SCHEMA_VERSION.equals(nodeText.toString())) {
throw new SAXException("ERROR: Unexpecgted CPE Schema Version, expected: " throw new SAXException("ERROR: Unexpected CPE Schema Version, expected: "
+ CURRENT_SCHEMA_VERSION + ", file is: " + nodeText); + CURRENT_SCHEMA_VERSION + ", file is: " + nodeText);
} }
// } else if (current.isCpeItemNode()) {
// //do nothing
// } else if (current.isTitleNode()) {
// //do nothing
// } else if (current.isCpeListNode()) {
// //do nothing
// } else if (current.isMetaNode()) {
// //do nothing
// } else if (current.isNotesNode()) {
// //do nothing
// } else if (current.isNoteNode()) {
// //do nothing
// } else if (current.isCheckNode()) {
// //do nothing
// } else if (current.isGeneratorNode()) {
// //do nothing
// } else if (current.isProductNameNode()) {
// //do nothing
// } else if (current.isProductVersionNode()) {
// //do nothing
// else if (current.isTimestampNode()) {
// //do nothing
// } else {
// throw new SAXException("ERROR STATE: Unexpected qName '" + qName + "'");
// }
} }
// <editor-fold defaultstate="collapsed" desc="The Element Class that maintains state information about the current node"> // <editor-fold defaultstate="collapsed" desc="The Element Class that maintains state information about the current node">
/** /**
* A simple class to maintain information about the current element while parsing the CPE XML. * A simple class to maintain information about the current element while
* parsing the CPE XML.
*/ */
protected static final class Element { protected static final class Element {

View File

@@ -17,6 +17,7 @@
*/ */
package org.owasp.dependencycheck.data.update.cpe; package org.owasp.dependencycheck.data.update.cpe;
import org.apache.commons.lang3.StringUtils;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.net.URLDecoder; import java.net.URLDecoder;
import org.owasp.dependencycheck.data.update.exception.InvalidDataException; import org.owasp.dependencycheck.data.update.exception.InvalidDataException;
@@ -36,7 +37,8 @@ public class Cpe {
*/ */
public Cpe(String value) throws UnsupportedEncodingException, InvalidDataException { public Cpe(String value) throws UnsupportedEncodingException, InvalidDataException {
this.value = value; this.value = value;
final String[] data = value.substring(7).split(":"); final String valueWithoutPrefix = value.substring(7);
final String[] data = StringUtils.split(valueWithoutPrefix, ':');
if (data.length >= 2) { if (data.length >= 2) {
vendor = URLDecoder.decode(data[0].replace("+", "%2B"), "UTF-8"); vendor = URLDecoder.decode(data[0].replace("+", "%2B"), "UTF-8");
product = URLDecoder.decode(data[1].replace("+", "%2B"), "UTF-8"); product = URLDecoder.decode(data[1].replace("+", "%2B"), "UTF-8");

Some files were not shown because too many files have changed in this diff Show More