Compare commits

..

580 Commits

Author SHA1 Message Date
Šesták Vít
3e5c8f28c8 Added support for Java update version parsing. 2018-02-21 16:34:36 +01:00
Jeremy Long
e854c37605 fix for issue #1108 2018-02-19 06:27:47 -05:00
Jeremy Long
7229f3bde4 updated dependencies 2018-02-18 07:54:48 -05:00
Jeremy Long
b9cf393eb6 updated test case 2018-02-18 07:54:32 -05:00
Jeremy Long
4baf1687b5 Merge pull request #1101 from freedumbytes/windows
Thanks for the PR!   Fix groovy script failure because of unescaped Windows File.separator.
2018-02-11 07:38:55 -05:00
Jene Jasper
8eaf6a9cf0 Fix groovy script unescaped Windows File.separator.
[ERROR] gmavenplus-plugin:execute (add-dynamic-properties-clean) on project dependency-check-parent
 Error occurred while calling a method on a Groovy class from classpath.:
 InvocationTargetException: startup failed:
[ERROR] Script1.groovy: 2: unexpected char: '\' @ line 2, column 57.
[ERROR]              config = "file:///C:\dev\projects\DependencyCheck/src/main/config
2018-02-11 10:38:33 +01:00
Jeremy Long
3b294282f3 Merge pull request #1094 from jeremylong/restructure
Restructure code base
2018-02-08 06:01:08 -05:00
Jeremy Long
ccd08e0c9d Merge branch 'restructure' of github.com:jeremylong/DependencyCheck into restructure 2018-02-06 06:46:56 -05:00
Jeremy Long
62a5db6b8b renamed modules and fixed errors with various lifecycle stages 2018-02-06 06:41:24 -05:00
Jeremy Long
3736161e39 restructure config 2018-02-06 06:41:16 -05:00
Jeremy Long
92b4a74440 upgrade 2018-02-06 06:33:29 -05:00
Jeremy Long
f8c18ae270 patch for issue #1090 2018-02-05 07:11:23 -05:00
Jeremy Long
7ca02f06c6 fix exception message 2018-02-05 07:10:49 -05:00
Jeremy Long
61eeaca039 avoid null logging statements 2018-02-05 07:10:26 -05:00
Jeremy Long
03f6777197 added hint per issue #1085 2018-01-30 06:52:40 -05:00
Jeremy Long
09a5dd6aa9 snapshot version 2018-01-30 06:50:26 -05:00
Jeremy Long
32cde3b7d8 snapshot version 2018-01-30 06:50:12 -05:00
Jeremy Long
c25bfb98d2 updated for release 2018-01-29 07:07:48 -05:00
Jeremy Long
62d1b135fb version 3.1.1 2018-01-28 22:12:27 -05:00
Jeremy Long
dda33932e1 fixed merge 2018-01-27 09:17:30 -05:00
Jeremy Long
00a7197f12 reduced logging level 2018-01-27 08:53:02 -05:00
Jeremy Long
33817684cc added documentation 2018-01-27 08:52:19 -05:00
Jeremy Long
005036a272 minor update 2018-01-26 21:59:44 -05:00
Jeremy Long
3ba68d8a34 Merge branch 'master' into restructure 2018-01-26 07:24:16 -05:00
Jeremy Long
4bedacfdd2 fix per https://github.com/jeremylong/dependency-check-gradle/issues/69#issuecomment-360519698 2018-01-26 07:23:58 -05:00
Jeremy Long
41d4f1fcc2 upgrade 2018-01-26 07:23:13 -05:00
Jeremy Long
23cd3bfa93 renamed modules and fixed errors with various lifecycle stages 2018-01-25 06:54:01 -05:00
Jeremy Long
0a23f44052 restructure config 2018-01-23 07:23:34 -05:00
Jeremy Long
1c10ff54e7 cleanup unused config 2018-01-22 08:10:02 -05:00
Jeremy Long
b35f47e9cb prevent alpha/beta version upgrades 2018-01-22 08:07:11 -05:00
Jeremy Long
7c98f19d73 fix for #1072 2018-01-22 07:02:28 -05:00
Jeremy Long
2ef3fdcc4e null check and debugging code per issue #1071 2018-01-22 06:43:28 -05:00
Jeremy Long
40f70c257d updated logging 2018-01-21 09:35:13 -05:00
Jeremy Long
275d1bdbf9 improved logging to assist in resolving #1061 2018-01-21 09:12:36 -05:00
Jeremy Long
9cf3313f31 patch for issue #1060 2018-01-16 08:28:28 -05:00
Steve Springett
3e7e5688ef Corrected version 2018-01-15 22:36:41 -06:00
Steve Springett
90073f48c1 Fixed issue introduced in 5bbb386f8c where the refactored evidence collection was not being written to the XML report. This affected 3.0.0 - 3.1.0. 2018-01-15 22:34:00 -06:00
Steve Springett
71f9831a5c Merge remote-tracking branch 'origin/master' 2018-01-12 22:46:34 -06:00
Steve Springett
1781aadab0 Added support for cpeStartsWithFilter to scan agent 2018-01-12 22:46:25 -06:00
Jeremy Long
d9aa52befb patch for issue #1054 2018-01-12 07:28:18 -05:00
Jeremy Long
3abe415805 updates to resolve #1050 and #1051 2018-01-05 06:32:28 -05:00
Jeremy Long
560f7b6e24 fix #1053 2018-01-03 05:52:20 -05:00
Jeremy Long
5c2fe57252 snapshot version 2018-01-03 05:51:54 -05:00
Jeremy Long
2bd6895bf3 version 3.1.0 2018-01-02 07:12:39 -05:00
Jeremy Long
79deda799f javadoc fix 2018-01-02 06:55:32 -05:00
Jeremy Long
b86fa133c5 checkstyle updates 2018-01-02 06:17:58 -05:00
Jeremy Long
30622f9c4a fixed link 2018-01-02 06:17:46 -05:00
Jeremy Long
23159caf2b reducing potential FP due to imports statements in the manifest 2018-01-02 06:01:35 -05:00
Jeremy Long
2cd5ced015 version upgrade 2018-01-02 05:54:13 -05:00
Jeremy Long
e9e7f095be Merge pull request #1048 from jeremylong/luceneUpgrade
Lucene upgrade, bug fixes, and general cleanup
2017-12-29 06:26:25 -05:00
Jeremy Long
43af96bb0f enhanced filter to support empty tokens 2017-12-29 05:58:44 -05:00
Jeremy Long
e8088c2bda resolve merge conflict 2017-12-29 05:46:48 -05:00
Jeremy Long
af303df965 general FP cleanup 2017-12-28 08:18:42 -05:00
Jeremy Long
48ff396e7e code cleanup per codacy 2017-12-28 06:25:36 -05:00
Jeremy Long
a85a0456bc minor cleanup 2017-12-28 06:14:01 -05:00
Jeremy Long
b5e7a54d35 Merge pull request #1047 from jeremylong/falseNegative
False negative on xmlsec
2017-12-27 09:28:48 -05:00
Jeremy Long
df031a1bd6 upgrade lucene, bug fixes, and general cleanup 2017-12-27 09:16:17 -05:00
Jeremy Long
f598857a83 object will never be null (coverity scan) 2017-12-24 08:01:47 -05:00
Jeremy Long
420f50b9bf Merge branch 'master' into falseNegative 2017-12-24 07:43:17 -05:00
Jeremy Long
40699fa1ac having issues with the dec 12th trusty update; using the previous image 2017-12-24 07:42:57 -05:00
Jeremy Long
64e44ad614 patch for #1043 2017-12-23 07:46:46 -05:00
Jeremy Long
c6f2bf66e6 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-12-22 06:52:03 -05:00
Jeremy Long
d27b47c0d5 Merge pull request #1044 from jeremylong/luceneQueryFix
Lucene query fix
2017-12-22 06:51:39 -05:00
Jeremy Long
fe2cbdd007 update 2017-12-22 06:51:00 -05:00
Jeremy Long
3afea40bbb increase build time 2017-12-22 06:47:59 -05:00
Jeremy Long
536914c3b7 fix possible NPE 2017-12-22 06:32:54 -05:00
Jeremy Long
302fe1ce05 Merge pull request #1034 from jeremylong/hook
updated lock file cleanup to be useable by other build plugins
2017-12-22 06:11:16 -05:00
Jeremy Long
121972ffd9 codacy cleanup 2017-12-22 06:10:22 -05:00
Jeremy Long
f58ee9896a Merge pull request #1036 from msrb/fp-vertx-config-kubernetes-configmap
Suppress FP: io.vertx:vertx-config-kubernetes-configmap
2017-12-21 06:00:49 -05:00
Jeremy Long
dd4d1495c1 cleaned up lucene query construction and added test cases 2017-12-21 05:58:39 -05:00
Jeremy Long
16a6a2d2d8 added a max length to limit query parse issues 2017-12-20 06:26:03 -05:00
Jeremy Long
b91d086340 updated FP list while working on #632 2017-12-20 06:25:34 -05:00
Michal Srb
0f25e53eda Suppress FP: io.vertx:vertx-config-kubernetes-configmap
Signed-off-by: Michal Srb <michal@redhat.com>
2017-12-17 14:13:07 +01:00
Jeremy Long
bb20129f0e updated lock file cleanup to be useable by other build plugins 2017-12-17 07:36:23 -05:00
Jeremy Long
9be1da7e12 updated per issue #1022 2017-12-16 08:31:40 -05:00
Jeremy Long
6636d7d143 fixed documentation issue per #1018 2017-12-16 08:17:24 -05:00
Jeremy Long
8619aadf16 Merge pull request #1030 from jeremylong/shutdownhook
Add Shutdown Hook to remove h2 lock file
2017-12-16 07:40:33 -05:00
Jeremy Long
3555733bbf updated console output per issue #1000 2017-12-12 07:09:16 -05:00
Jeremy Long
2d9ad67b14 add shutdown hook to resolve #1027 2017-12-12 06:57:43 -05:00
Jeremy Long
5fc2fcd7d8 patch for issue #943 2017-12-11 06:52:06 -05:00
Jeremy Long
2a0df96c5b updated documentation per issue #1018 2017-12-11 06:01:24 -05:00
Jeremy Long
4bfb7d341e Merge pull request #1028 from jeremylong/resolve_fp
Resolve fp
2017-12-10 13:35:49 -05:00
Jeremy Long
eed32bd4f6 Merge branch 'versions' into resolve_fp 2017-12-10 08:48:57 -05:00
Jeremy Long
8b35fe2683 Merge branch 'master' into versions 2017-12-10 08:47:31 -05:00
Jeremy Long
2db2235803 Merge branch 'master' into resolve_fp 2017-12-10 08:21:41 -05:00
Jeremy Long
05ed69193c Merge branch 'alefq-master' 2017-12-10 08:20:06 -05:00
Jeremy Long
7b561d559e update per issue #1025 2017-12-10 08:18:14 -05:00
Jeremy Long
1e72c49eb2 javadoc update 2017-12-09 08:14:21 -05:00
Jeremy Long
63a2874cec updated logging levels to reduce spamming the console 2017-12-09 07:51:06 -05:00
Jeremy Long
fd47ede9d6 resolve merge conflicts 2017-12-09 07:03:33 -05:00
Jeremy Long
c0a0636fe9 Merge pull request #1015 from jeremylong/versions
Version Upgrades
2017-12-09 06:47:48 -05:00
Jeremy Long
412b72540a resolve version matching for issue #997 2017-12-09 06:46:05 -05:00
Jeremy Long
a1012ded26 fixes for FP reported in issues #942, #944, #946, #947, #949, #951, #952, #964, #965, #999, #1003, #1003, and #1010 2017-12-08 06:12:39 -05:00
Jeremy Long
782a9dea7a updated per issue #950 - fp on url value 2017-12-05 05:49:45 -05:00
Jeremy Long
df4cc59efa ide suggested changes 2017-12-05 05:44:41 -05:00
Jeremy Long
d20c679528 Merge branch 'master' into versions 2017-12-03 11:41:02 -05:00
Jeremy Long
872a524c44 updated to use wildcare 2017-12-03 11:40:42 -05:00
Jeremy Long
5a0e597124 merge 2017-12-03 10:35:36 -05:00
Jeremy Long
2d984dda94 checkstyle update 2017-12-03 10:15:57 -05:00
Jeremy Long
dc1acc99da Merge branch 'suppressionFileWarn' of https://github.com/stboiss/DependencyCheck into stboiss-suppressionFileWarn 2017-12-03 10:04:23 -05:00
Jeremy Long
363399d95f Merge pull request #1006 from jeremylong/issue-993
Improve Node.js and NSP analyzers
2017-12-03 06:16:31 -05:00
Jeremy Long
d713e5d7d7 remove code duplication 2017-12-03 05:57:20 -05:00
Jeremy Long
c6363fde7a code cleanup, checkstyle, codacy, findbugs, etc. 2017-12-02 08:06:16 -05:00
Jeremy Long
eddffaae3d updated versions and include new enforcer rule to validate class file formats of dependencies 2017-12-02 06:54:40 -05:00
Ale Feltes Quenhan
b46d13fc18 Update CveDB.java
Remove unused variable.
2017-11-30 16:21:39 -03:00
Ale Feltes
482f8daaf3 Restore CVE start year
cve.startyear value is 2002.
2017-11-30 15:18:32 -03:00
Ale Feltes
334829604f Adds configurable batch insert for References and Vulnerabilities
Applies batch inserts for reference and vulnerability tables, solves
slow one-by-one insert process, for Vulnerabilities with several
references/vulnerabilities associated.
Feature is configurable through properties: database.batchinsert.enabled
and database.batchinsert.maxsize.
2017-11-30 14:08:45 -03:00
Ale Feltes
ebff547b6f Merge remote-tracking branch 'upstream/master' 2017-11-30 14:04:26 -03:00
Jeremy Long
4862811600 updated log statement per issue #1008 2017-11-28 06:34:22 -05:00
Jeremy Long
0a2bfcaed2 upgrades 2017-11-27 23:14:17 -05:00
Jeremy Long
a31dddf8ef updated config 2017-11-27 22:45:56 -05:00
Jeremy Long
19c223161d fix plugin configuration 2017-11-27 22:35:07 -05:00
Jeremy Long
ae128c38ec configured version updates 2017-11-27 22:29:25 -05:00
Jeremy Long
a7dddfa905 fixed incorrect name of shrinkwrap.json 2017-11-27 21:59:50 -05:00
Jeremy Long
c465bc9fc7 fixed incorrect parsing of license information 2017-11-27 21:46:33 -05:00
Jeremy Long
7bcde5d439 move non-test dependency version numbers to properties 2017-11-27 21:14:34 -05:00
stboiss
76d79186c7 small correction 2017-11-27 15:26:17 +01:00
a141757
d7606d0263 make Error on personal Suppression File to not fatal 2017-11-27 15:14:04 +01:00
Jeremy Long
72c121797f fixed test cases 2017-11-26 10:26:37 -05:00
Jeremy Long
f51edf52e7 updates for issue #991 2017-11-26 10:13:32 -05:00
Jeremy Long
eb023c0c99 updated to better support npm 2017-11-26 10:05:50 -05:00
Jeremy Long
0e3fa6645d due to removing the retired attribute from NodePackageAnalyzer we should increment the minor version 2017-11-26 09:08:36 -05:00
Jeremy Long
93f25abd99 checkstyle suggestions 2017-11-26 09:05:42 -05:00
Jeremy Long
f1631e9ff3 cleanup of code and added warning messages 2017-11-26 09:03:39 -05:00
Jeremy Long
8c4187967a Merge branch 'master' into issue-993 2017-11-25 11:52:33 -05:00
Jeremy Long
ddb60cab61 fix build warning 2017-11-25 11:52:10 -05:00
Jeremy Long
f7a72489d4 fix test case due to new CPE/CVEs 2017-11-25 11:48:34 -05:00
Jeremy Long
c58ec0ff8c overhaul node package and nsp analyzer 2017-11-25 11:14:29 -05:00
Jeremy Long
9e6cf2e6f3 overhaul node package and nsp analyzer 2017-11-25 11:13:16 -05:00
Jeremy Long
332bbe72aa overhaul node package and nsp analyzer 2017-11-25 11:13:02 -05:00
Jeremy Long
0b32d3b991 fixed javadoc 2017-11-25 11:09:46 -05:00
Jeremy Long
9e92e2f8da added test case resources 2017-11-25 10:06:47 -05:00
Jeremy Long
fb138364d4 Merge branch 'master' into issue-993 2017-11-20 07:10:42 -05:00
Jeremy Long
082ac5d229 suppressed some checkstyle findings 2017-11-20 07:10:29 -05:00
Jeremy Long
e18c32c5dc fixed build warning 2017-11-20 07:10:02 -05:00
Jeremy Long
5ebc2dc244 checkstyle corrections 2017-11-20 06:55:36 -05:00
Jeremy Long
e4b7f7aa8f update to ensure NodePackageAnalyzer will not run without a backing vulnerability analyzer 2017-11-20 06:46:25 -05:00
Jeremy Long
a754a8e6b4 Merge branch 'master' into issue-993 2017-11-19 08:33:26 -05:00
Jeremy Long
43621016cf Merge pull request #996 from jeremylong/uncPathcs-995
removed validation preventing UNC paths per issue #995
2017-11-19 08:32:50 -05:00
Jeremy Long
741fba51f5 updated as this would end up similiar to #933 2017-11-19 08:29:19 -05:00
Jeremy Long
4a3f8c4f2a Merge branch 'master' into issue-993 2017-11-19 08:21:47 -05:00
Jeremy Long
14839cadf5 snapshot version 2017-11-19 08:21:26 -05:00
Jeremy Long
d560ca927e added per request #948 2017-11-19 07:50:03 -05:00
Jeremy Long
eacb4c9d62 fix issue #868 2017-11-19 07:37:18 -05:00
Jeremy Long
804f8e38da checkstyle suggested changes 2017-11-18 16:32:40 -05:00
Jeremy Long
7e1b6d0cc7 fixed test cases 2017-11-18 15:02:59 -05:00
Jeremy Long
3440edbfb6 fix generated hyperlinks 2017-11-18 13:30:14 -05:00
Jeremy Long
2a1186c4fa re-enable node analyzer for #993 2017-11-18 13:24:23 -05:00
Jeremy Long
af63b40307 removed validation preventing UNC paths per issue #995 2017-11-18 05:53:16 -05:00
Ale Feltes
38499898aa Added Batch insert for References and Vulnerabilities
Applies batch inserts for reference and vulnerability tables,
solves the slow one-by-one insert process, for Vulnerabilities
with several references/vulnerabilities associated.
2017-11-17 10:36:15 -03:00
Jeremy Long
dea9fa1145 externalized central search query so future changes can be handled via a properties change - issue #978 2017-11-14 06:37:41 -05:00
Jeremy Long
49d14d1272 Merge pull request #983 from jeremylong/compare-cleanup
CompareTo() cleanup
2017-11-14 05:52:11 -05:00
Jeremy Long
f8bf9d4eb7 codacy suggested change 2017-11-13 06:46:29 -05:00
Jeremy Long
0536fa6c2a codacy suggested change 2017-11-13 06:42:18 -05:00
Jeremy Long
16977e4869 codacy suggested change 2017-11-13 06:38:34 -05:00
Jeremy Long
5cb1d93029 Merge branch 'master' into compare-cleanup 2017-11-13 06:15:44 -05:00
Jeremy Long
376fe38a02 version 3.0.2 2017-11-13 06:15:28 -05:00
Jeremy Long
fda21e3eff updated site to link to the archetype 2017-11-13 06:15:16 -05:00
Jeremy Long
6197660292 updated site to link to the archetype 2017-11-13 06:12:10 -05:00
Jeremy Long
f474276807 updates per issue #928 2017-11-12 08:55:57 -05:00
Jeremy Long
5c44ea19cc removed compareTo per issue #928 2017-11-12 08:26:20 -05:00
Jeremy Long
95331082d5 added default property value for netbeans 2017-11-12 08:10:28 -05:00
Jeremy Long
0a344912d3 fixed compareTo 2017-11-12 08:03:09 -05:00
Jeremy Long
7952df0883 fixed identifiers compareTo 2017-11-12 07:33:32 -05:00
Jeremy Long
02785f2a4a fixed test case 2017-11-12 07:14:05 -05:00
Jeremy Long
8ea104544c Merge branch 'master' into compare-cleanup 2017-11-12 07:04:07 -05:00
Jeremy Long
8428e96702 removed TreeSet to improve performance 2017-11-12 07:03:35 -05:00
Jeremy Long
3d11a36671 removed sorting of dependencies 2017-11-12 07:02:53 -05:00
Steve Springett
cc2b033e6d Merge remote-tracking branch 'origin/master' 2017-11-10 16:25:10 -06:00
Steve Springett
c0dfacbf6c URL encoding double quotes passed in to Maven Central search API #978 2017-11-10 16:24:50 -06:00
Jeremy Long
8ebaf055a1 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-11-10 06:24:39 -05:00
Jeremy Long
2431da4c6e fix for issue #809 2017-11-10 06:24:35 -05:00
Steve Springett
088566a2cf Adding enhancement (and test) that compensates for an invalid package.json (one without a name field) and automatically adds the name field with a value of "1" so that the analysis continues rather than fails. #975 2017-11-09 16:14:24 -06:00
Jeremy Long
210dd3f778 Merge pull request #970 from ka7/feature/spelling
spelling fixes
2017-11-08 05:51:01 -05:00
klemens
4776a542a7 spelling fixes 2017-11-07 21:45:36 +01:00
Jeremy Long
8ab6b77b56 merged #961 2017-11-05 06:27:55 -05:00
Jeremy Long
e92e3aa321 merged #961 2017-11-05 06:26:16 -05:00
Jeremy Long
9077c77908 minor formating update 2017-11-05 06:24:25 -05:00
Steve Springett
01c7979231 Updated javadoc to reflect changes in 3.x. 2017-10-30 22:30:31 -05:00
Glen Vermeylen
7f01829de1 add nullcheck in scanDirectory() on result of scanFile() 2017-10-28 17:20:32 +02:00
Jeremy Long
ce7c07813b Merge pull request #958 from jeremylong/issue954
updates to resolve issue #954
2017-10-27 07:04:51 -04:00
Jeremy Long
c36348611b updates to resolve issue #954 2017-10-26 19:17:57 -04:00
Jeremy Long
9525ab449f Merge pull request #940 from jeremylong/addMd5
Add Md5 checksum to the composer and cmake analyzers
2017-10-25 07:06:27 -04:00
Jeremy Long
5b7a978f01 updated ignore list 2017-10-25 06:50:15 -04:00
Jeremy Long
988d1d5147 centralized string converversion to bytes 2017-10-25 06:47:48 -04:00
Jeremy Long
644f4ca6c2 Merge branch 'master' into addMd5 2017-10-25 06:13:30 -04:00
Jeremy Long
8ceaa04320 Merge pull request #939 from jeremylong/retiredAnalyzerUpdates
Update documentation and configuration for Retired Analyzers
2017-10-25 06:12:03 -04:00
Jeremy Long
a78f28ade6 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-10-24 07:06:30 -04:00
Jeremy Long
3a07795e39 added ruby and mono (see issue #953) 2017-10-24 06:53:15 -04:00
Jeremy Long
aab42547ad Merge pull request #941 from sdutry/unusedImports
removed unused imports
2017-10-23 06:16:01 -04:00
Stefaan Dutry
62ca5e890a removed unused imports 2017-10-22 22:57:55 +02:00
Jeremy Long
765bfa0e1d update per issue #933 2017-10-22 15:34:16 -04:00
Jeremy Long
5b7314e6d3 fix for issue #937 2017-10-22 14:41:06 -04:00
Jeremy Long
714b3d29b9 updated docker file 2017-10-22 11:48:13 -04:00
Jeremy Long
19fde6d667 version 3.0.1 2017-10-18 06:39:57 -04:00
Jeremy Long
f42d3bea5a removed unused import 2017-10-16 17:40:11 -04:00
Jeremy Long
60fd4f6311 reverted change to resolve issue #930 2017-10-16 06:55:45 -04:00
Jeremy Long
0b3a50f320 snapshot version 2017-10-16 06:23:56 -04:00
Jeremy Long
b8f938b81b updated suppression rule 2017-10-16 06:23:42 -04:00
Jeremy Long
23a6a726fe version 3.0.0 2017-10-15 08:20:48 -04:00
Jeremy Long
f4b11d8e44 fixed bug were an exception would cause the lock not to be released 2017-10-15 07:57:33 -04:00
Jeremy Long
c8bfdddd59 fix javadoc warning 2017-10-15 07:56:42 -04:00
Jeremy Long
f2d1819589 checkstyle suggestions 2017-10-14 07:51:41 -04:00
Jeremy Long
e2a97738e1 checkstyle suggested changes 2017-10-13 07:07:52 -04:00
Jeremy Long
2f6e40f123 Merge branch 'fabio-boldrini-master' 2017-10-13 06:33:36 -04:00
Jeremy Long
f6d301fd67 minor formating update 2017-10-13 06:33:23 -04:00
Jeremy Long
c484edf7ae Merge branch 'master' of https://github.com/fabio-boldrini/DependencyCheck into fabio-boldrini-master 2017-10-13 06:28:46 -04:00
Jeremy Long
667e784d06 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-10-13 05:59:09 -04:00
Jeremy Long
20ff49f66c update for issue/pr #862 2017-10-13 05:58:56 -04:00
fabio.boldrini
2332c0fa5e On VulnerableSoftware changed implementation of compare to support
version in the format of 3b that is now splitted in 3 and b. Before the
versions "5.0.3a", "5.0.9" and "5.0.30" were not correctly comparable.
See VulnerableSoftwareTest.testVersionsWithLettersComparison.
This issue can cause a runtime exception during sort
2017-10-12 09:25:50 +02:00
Jeremy Long
fa05482e69 Merge pull request #921 from stefanneuhaus/fix-typo-and-make-public-method-private
Fix typo: Engine.isDataBseRequired()
2017-10-09 16:05:03 -04:00
Stefan Neuhaus
2ef4237adf Fix typo: Engine.isDataBseRequired()
Also make the method private.
2017-10-09 09:57:42 +02:00
Jeremy Long
79b7d74387 coverity suggested changes 2017-10-08 17:20:08 -04:00
Jeremy Long
b3d034a435 updated documentation per issue #834 2017-10-08 16:22:18 -04:00
Jeremy Long
f5ec0331eb Merge pull request #919 from stefanneuhaus/fix-typos
Fix typos
2017-10-08 16:07:21 -04:00
Jeremy Long
b4661d85f4 updated to resolve #871 and other FP 2017-10-08 15:32:55 -04:00
Jeremy Long
f15edfb806 fix issue #859 2017-10-08 15:10:18 -04:00
Stefan Neuhaus
143c8d151f Fix typos 2017-10-08 14:50:21 +02:00
Jeremy Long
01ff6d986c fix for issue #863 2017-10-08 08:24:42 -04:00
Jeremy Long
c153463471 update documentation per issue #878 2017-10-08 07:50:17 -04:00
Jeremy Long
e90444f012 minor update 2017-10-08 07:49:30 -04:00
Jeremy Long
55b9a42b62 several updates for FP 2017-10-08 07:48:10 -04:00
Jeremy Long
c51c772ff6 updated per issue #888 2017-10-08 07:28:55 -04:00
Jeremy Long
2507a56a3a fix issue #913 2017-10-08 06:31:42 -04:00
Jeremy Long
77b4372eff Merge branch 'aikebah-issue_704' 2017-10-08 06:29:20 -04:00
Jeremy Long
8a7066cda7 resolved merge conflicts 2017-10-06 06:33:43 -04:00
Jeremy Long
51d7618661 Merge branch '365farmnet-master' 2017-10-06 05:55:41 -04:00
Jeremy Long
4b0164cffb updated enforcer definition 2017-10-06 05:55:20 -04:00
Jeremy Long
d18a63635d Merge branch 'master' of https://github.com/365farmnet/DependencyCheck into 365farmnet-master 2017-10-06 05:46:57 -04:00
Jeremy Long
51cf98eb60 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-10-06 05:46:23 -04:00
Jeremy Long
4370dfcd5a allow integrated security 2017-10-06 05:46:16 -04:00
Jeremy Long
664f083071 Merge pull request #916 from stephengroat/patch-1
fix spelling
2017-10-06 05:36:43 -04:00
Jeremy Long
57e729512e Merge pull request #909 from stefanneuhaus/improve-centralanalyzer-stability
Improve centralanalyzer stability
2017-10-06 05:36:09 -04:00
Stephen
818b8b295f fix spelling 2017-10-05 08:53:33 -07:00
Stefan Neuhaus
c8dd241567 Merge remote-tracking branch 'upstream/master' into improve-centralanalyzer-stability 2017-10-04 20:56:05 +02:00
Stefan Neuhaus
ed49251310 Cleanup test 2017-10-04 20:54:02 +02:00
Stefan Neuhaus
98f9628e27 Fail analysis/build in case of recurring IOExceptions when connecting to MavenCentral 2017-10-04 20:48:16 +02:00
Stefan Neuhaus
bfbec1d0a6 Cleanup: Remove enabled flag (reuse flag from AbstractAnalyzer class) 2017-10-04 20:34:39 +02:00
Stefan Neuhaus
6ddc0bfa27 Add license information 2017-10-04 20:11:38 +02:00
Stefan Neuhaus
eacf3ac906 Please PMD 2017-10-04 15:04:53 +02:00
Stefan Neuhaus
6fc15984b8 Please PMD 2017-10-04 14:43:12 +02:00
Jeremy Long
cd875777e7 added hints from community feedback 2017-10-04 06:27:09 -04:00
Stefan Neuhaus
a38f8b447c Cleanup: remove dead code 2017-10-02 17:06:27 +02:00
Stefan Neuhaus
142eb41312 Cleanup: remove useless overwrite
As the default is "true" again, we do not need to overwrite it here. And even if we changed the default back to "false", then there would probably be a good reason why we would not want to overwrite it to "true" for this specific analyzer.
2017-10-02 17:05:55 +02:00
Stefan Neuhaus
1835355f4d Cleanup: formatting and typo 2017-10-02 17:03:20 +02:00
Stefan Neuhaus
3c3534e7da CentralAnalyzer: Implement retry for fetching MavenArtifacts due to sporadic issues 2017-10-02 17:01:16 +02:00
Jeremy Long
27abb72df1 Merge branch 'brianf-nugetNameImprovements' 2017-10-02 06:59:23 -04:00
Jeremy Long
159b9006cc resolved merge conflicts 2017-10-02 06:59:12 -04:00
Jeremy Long
54ccd04c17 Merge branch 'brianf-dependencyNameImprovements' 2017-10-02 06:48:29 -04:00
Jeremy Long
cf2f2dc62d resolved merge conflicts 2017-10-02 06:47:41 -04:00
Hugo Hirsch
52b55434eb Fix build warning about wrong usage of maven prerequisites for non-maven plugin projects 2017-10-02 09:42:10 +02:00
Hugo Hirsch
f5e16ea6ee Add defaultGoal 2017-10-02 09:31:42 +02:00
brianf
16892d022f Nuspec tests and name normalization
added tests for the existing analyzer and normalized the name and set
the ecosystem.
2017-10-01 11:41:45 -04:00
Jeremy Long
fa377cfc05 Merge branch 'elenoir-hide_failure_msg_showSummary' 2017-09-30 07:54:42 -04:00
Jeremy Long
423216f1a3 resolved merge conflict 2017-09-30 07:54:33 -04:00
Jeremy Long
b6936bf805 Merge pull request #880 from elenoir/enhance_csv_report
Enhance csv report
2017-09-30 07:29:03 -04:00
Jeremy Long
3b019d173c resolve merge conflict and update test cases 2017-09-30 07:27:44 -04:00
Jeremy Long
1be196698d Merge pull request #877 from jeremylong/dependency-updates
Thread Safety Updates
2017-09-30 06:10:45 -04:00
Jeremy Long
cd018def91 checkstyle suggestions 2017-09-27 06:59:18 -04:00
Jeremy Long
9c0a166b7d bug fixes 2017-09-27 06:38:52 -04:00
Jeremy Long
516390827b removed redundant close 2017-09-27 05:40:35 -04:00
Jeremy Long
bb5b6b75b8 ensure CPE analyzer is closed so that the data can be refreshed in the lucene index 2017-09-27 05:26:26 -04:00
brianf
c33cc3f230 few more formatting fixes 2017-09-25 10:25:56 -04:00
brianf
4fc8dd59d2 cleanup from reviews. Mostly formatting 2017-09-25 10:18:56 -04:00
Jeremy Long
04dc5f8491 changes to resolve issues with multiple connections to the embedded H2 database 2017-09-25 06:55:22 -04:00
brianf
e0af41e439 cleanup 2017-09-22 13:47:08 -04:00
brianf
1564f11b89 Normalize Ruby analyzers 2017-09-21 21:44:49 -04:00
brianf
69323bf0a4 Normalize the Node Analyzers 2017-09-21 21:00:55 -04:00
brianf
6726101e36 Added Ecosystem to Java 2017-09-21 17:37:17 -04:00
brianf
9998cd0ccc Normailze Cmake names 2017-09-21 16:35:14 -04:00
brianf
562269dd2b Normalized Python Package Name 2017-09-21 15:53:13 -04:00
brianf
a8b740a538 Normalized Python Dist names 2017-09-21 15:41:13 -04:00
brianf
7a74917b67 Standardized the Composer / PHP Names 2017-09-21 15:30:47 -04:00
brianf
4a95efefac Merge branch 'phpAddVersion' into dependencyNameImprovements 2017-09-21 15:22:43 -04:00
brianf
9b718490e3 Centralize the collection of name and version to be used for
synthesizing a displayName.

Fixed the swift/cocoapod analyzers to new model
2017-09-21 15:00:38 -04:00
brianf
4e745c9c89 Merge branch 'swiftDisplayFileName' into dependencyNameImprovements 2017-09-21 14:23:33 -04:00
brianf
4ac0a0e305 Removing Redundant dependency entry for composer.lock
Move all the log statements to debug
2017-09-19 09:52:32 -04:00
brianf
3b00b764ac Remove the redundant top level entry for composer.lock once the child
dependencies are processed.

This main entry is empty of evidence because everything is added into
the new dependencies.
2017-09-17 18:01:40 -04:00
Hans Aikema
8595f55eb3 Put Fields first in class as requested by codacy/pr automated review 2017-09-17 15:53:38 +02:00
Hans Aikema
67aa59c4b8 Implement issue #704 and enable multi-schema validation 2017-09-17 15:30:48 +02:00
Jeremy Long
165170e5d4 increased logging level in test 2017-09-15 18:57:08 -04:00
Jeremy Long
816e17a67b Merge pull request #885 from Piccirello/patch-1
Minor grammatical change in documentation
2017-09-15 17:51:52 -04:00
Jeremy Long
a98b946354 updated debug output for CI 2017-09-15 17:44:56 -04:00
Jeremy Long
a5c3ecf6de updated to look for the correct update string 2017-09-15 17:37:07 -04:00
Jeremy Long
c998bff178 removed old/invalid comment from copy/paste 2017-09-15 17:36:40 -04:00
Jeremy Long
620e518e92 updated invoker config as we no longer need to run setup projects first 2017-09-15 17:35:28 -04:00
Jeremy Long
1e96b43720 locking fixes for H2 updates 2017-09-15 17:34:46 -04:00
Tom Piccirello
58bf7ff670 than vs then 2017-09-15 16:45:36 -04:00
brianf
a0081318b6 Adding version to the composer.lock displayFileName
Changed output to debug
Added basic test for composer parsing, including the new version
2017-09-15 13:27:44 -04:00
brianf
9175b2624d Following the pattern of other analyzers and including the parent name
so the report doesn't list dozens of "package.swift" entries
2017-09-14 12:29:28 -04:00
Erik Lenoir
9db7012042 Branch showSummary on checkForFailure 2017-09-14 15:12:57 +02:00
Erik Lenoir
9a9cf826ab Add TU 2017-09-14 14:01:41 +02:00
Erik Lenoir
60c2e31cea Enhance CSV report, cf #809 2017-09-14 12:46:10 +02:00
Jeremy Long
cb6287eacc added debugging code as this only seems to fail on the CI 2017-09-13 08:11:45 -04:00
Jeremy Long
6182ac3307 reverted change in thread count as part of debugging 2017-09-13 07:20:56 -04:00
Jeremy Long
7ee7d2fa1c reduced thread count for debugging 2017-09-12 23:24:54 -04:00
Jeremy Long
322f0518f9 updated to assist in debugging on travis 2017-09-12 19:52:25 -04:00
Jeremy Long
4358952e17 checkstyle suggested cleanup 2017-09-12 08:55:57 -04:00
Jeremy Long
82a5b4ab12 updated templates for API changes in 3.0.0 2017-09-12 08:19:41 -04:00
Jeremy Long
e45a5a99c3 updated method names to better state what is happening 2017-09-11 12:55:08 -04:00
Jeremy Long
e5eb056324 updated suppressions based on changes to core 2017-09-10 18:06:58 -04:00
Jeremy Long
3a0a170904 removed invalid loopcount output 2017-09-10 08:46:53 -04:00
Jeremy Long
b05bb8a1ee updated to support changes for threadsafety 2017-09-10 08:40:01 -04:00
Jeremy Long
a4768386cc removed unused code 2017-09-10 08:39:33 -04:00
stevespringett
1bfd2d7ac1 Added support for retiring analyzers (disabled by default) and retired the NodePackageAnalyzer. 2017-09-09 21:50:17 -05:00
Jeremy Long
1548f9a4b2 refactored the dependency object to be threadsafe 2017-09-09 20:42:42 -04:00
Jeremy Long
61390b200d Merge branch 'threadsafe' into dependency-updates 2017-09-09 15:47:06 -04:00
Jeremy Long
df737539a5 Merge branch 'master' into threadsafe 2017-09-09 15:46:46 -04:00
Jeremy Long
5bbb386f8c evidence thread-safety updates 2017-09-09 15:42:12 -04:00
stevespringett
bbd59be1d6 Minor modification to XML schema and unit test. 2017-09-08 10:06:32 -05:00
stevespringett
1b1debdb30 Minor modifications to CSV, JSON, and XML reports to include Dependency.isVirtual and Vulnerability.Source 2017-09-08 00:58:27 -05:00
Jeremy Long
37eefc7f8b removed unused imports 2017-09-07 06:12:34 -04:00
Jeremy Long
325ed8e47c converted to integration test as updates from NVD are required for this test which slows down just a standard mvn package 2017-09-07 06:11:50 -04:00
Jeremy Long
33640ccfbb merged master to keep branch up to date 2017-09-07 06:08:06 -04:00
Jeremy Long
519d90e3d0 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-09-07 05:46:35 -04:00
Jeremy Long
417fda8c7c lgtm suggested changes 2017-09-07 05:46:21 -04:00
Jeremy Long
c31a56228b swithc to openjdk to make travis work 2017-09-07 05:44:03 -04:00
stevespringett
c472608876 Added updateOnly and the loading of user-supplied properties files to scan agent 2017-09-05 22:52:00 -05:00
Jeremy Long
7f92109bde updated to support parallel processing 2017-09-04 08:54:48 -04:00
Jeremy Long
31fb9b0a20 removed synchronization 2017-09-04 08:52:45 -04:00
Jeremy Long
a967735e11 cached the external dependency view 2017-09-04 08:52:20 -04:00
Jeremy Long
d8f79fa51d added jprofiler logo to site 2017-09-04 07:41:23 -04:00
Jeremy Long
3bf69651fd reverted changes 2017-09-03 21:28:19 -04:00
Jeremy Long
ed22b6532f coverity suggested updates 2017-09-03 21:10:33 -04:00
Jeremy Long
c4ee53e147 updated init logic 2017-09-03 21:10:13 -04:00
Jeremy Long
cc256d5ef0 removed unused imports 2017-09-03 16:09:02 -04:00
Jeremy Long
f51a7371b7 removed comments 2017-09-03 16:07:24 -04:00
Jeremy Long
235df3e482 resolve issue #690 2017-09-03 16:02:37 -04:00
Jeremy Long
f36f4068f3 removed cleanup in favor of close from autoclosable 2017-09-03 14:34:39 -04:00
Jeremy Long
9b491fb286 update wait time on travis 2017-09-03 12:48:40 -04:00
Jeremy Long
dc41cb7674 increase wait time on travis 2017-09-03 12:34:36 -04:00
Jeremy Long
070f4edce1 oracle jdk 7 failing in travis 2017-09-03 11:36:04 -04:00
Jeremy Long
ab5de24518 continued work on threading and code improvement 2017-09-03 09:05:55 -04:00
Jeremy Long
795de6f9ea added test case 2017-09-02 06:35:17 -04:00
Jeremy Long
7eda83a434 fixed test cacses 2017-08-31 07:01:43 -04:00
Jeremy Long
190fa55ace commented out invalid test case 2017-08-31 06:03:49 -04:00
Jeremy Long
ff7ebf405c fix test case 2017-08-31 05:48:34 -04:00
Jeremy Long
74a2326e0e Modified CveDB and Settings so that they are no longer singletons; first step in thread safety updates 2017-08-30 06:47:45 -04:00
Jeremy Long
c4b67a1db2 breaking api changes are being made, incrementing major version 2017-08-26 06:42:02 -04:00
Jeremy Long
ae50b01318 Merge pull request #858 from AndrewJCarr/master
#842 Honor skip configuration in reports
2017-08-26 06:37:46 -04:00
Andrew Carr
e203bc63e9 #842 Honor skip configuration in reports 2017-08-25 13:54:40 -05:00
Jeremy Long
f700b22358 snapshot version 2017-08-25 07:08:12 -04:00
Jeremy Long
dc1195f8b1 version 2.1.1 2017-08-25 06:42:39 -04:00
Jeremy Long
8f582c55d1 fixed typo per #855 2017-08-24 06:38:54 -04:00
Jeremy Long
e82bbbefe8 updated per issue #854 2017-08-24 06:35:36 -04:00
Jeremy Long
aa033cde4b updated per group discussion https://groups.google.com/forum/#!topic/dependency-check/GcOFleJz-nk 2017-08-24 06:32:10 -04:00
Jeremy Long
af02238f01 reverted change until threading issue is resolved 2017-08-23 06:42:11 -04:00
Jeremy Long
2421380d1d added hint per discussion here: https://groups.google.com/forum/#!topic/dependency-check/GcOFleJz-nk 2017-08-23 06:41:39 -04:00
Jeremy Long
c8a73afe84 Merge branch 'mealingr-catch_ioexception_parsing_manifest' 2017-08-23 06:15:40 -04:00
Jeremy Long
0f87dee1a0 Merge branch 'catch_ioexception_parsing_manifest' of https://github.com/mealingr/DependencyCheck into mealingr-catch_ioexception_parsing_manifest 2017-08-23 05:51:15 -04:00
Jeremy Long
5ff9814894 patch for issue #851 2017-08-23 05:43:47 -04:00
Richard Mealing
5d87dc2942 change to skip and remove macOS metadata and non-zip files 2017-08-21 13:51:45 +01:00
Jeremy Long
1049a18a15 temporarily suspend parallel processing 2017-08-20 11:22:35 -04:00
Jeremy Long
e07401dc55 fixed incorrect logging level 2017-08-20 11:21:56 -04:00
Jeremy Long
bcae8d2015 Merge pull request #839 from jeremylong/h2upgrade
H2 Upgrade
2017-08-20 11:02:17 -04:00
Jeremy Long
631c10f8b6 fix issue #849 2017-08-20 11:01:10 -04:00
Jeremy Long
a015cf4210 patch to fix issue #844 2017-08-19 17:20:36 -04:00
Jeremy Long
07f838ccf3 patch to fix issue #844 2017-08-19 17:18:48 -04:00
Jeremy Long
d06d561a55 initial addition of available version numbers 2017-08-19 16:57:32 -04:00
Jeremy Long
6567c971e1 updated test case 2017-08-19 16:56:36 -04:00
Jeremy Long
d6eac2b3c8 removed test from invoker setup 2017-08-19 16:56:13 -04:00
Jeremy Long
ec3aec6445 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-08-19 15:29:33 -04:00
Jeremy Long
a9449fe5ff initial version 2017-08-19 15:02:45 -04:00
Jeremy Long
d7b6988e2e Merge pull request #846 from mattnelson/check_populateSettings
Call populateSettings in CheckMojo#canGenerateReport to address NPE
2017-08-19 12:58:40 -04:00
Jeremy Long
c39e223f0f Merge pull request #847 from volphy/master
Fixed URL of TLS Failures page
2017-08-19 12:48:37 -04:00
Jeremy Long
06f59893af Merge pull request #840 from jeremylong/lucene-usage
Improve Lucene Usage
2017-08-19 12:48:03 -04:00
Richard Mealing
6d7f7d8e42 updated error message to be more representative 2017-08-18 13:44:53 +01:00
Richard Mealing
e7055c8a38 increased test robustness 2017-08-18 09:55:19 +01:00
Richard Mealing
ab2e5f31c8 catch IOExceptions when parsing jar manifest 2017-08-17 19:54:13 +01:00
Krzysztof Wilk
828ff5a1ec Fixed URL of TLS Failures page 2017-08-17 10:53:27 +02:00
Matt Nelson
60b1775e37 Call populateSettings in CheckMojo#canGenerateReport to address NPE 2017-08-15 15:12:29 -05:00
Jeremy Long
c0aca39d31 revert database name to dc.h2.db 2017-08-13 16:05:26 -04:00
Jeremy Long
bf5aafe455 improve usage of lucene, add stop words, code cleanup, etc. 2017-08-13 13:18:58 -04:00
Jeremy Long
fb2b3159e8 added suppression rule per issue #838 2017-08-13 12:37:17 -04:00
Jeremy Long
9ebbbe6a5b resolve incorrect error reporting per issue #837 2017-08-13 12:18:09 -04:00
Jeremy Long
593fddb1f9 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-08-13 10:47:50 -04:00
Jeremy Long
3ef80644f8 updated connection string for concurrency 2017-08-13 08:28:32 -04:00
Jeremy Long
d401a7e60a minor code formating fix 2017-08-13 07:43:34 -04:00
Jeremy Long
1e269f2a2c externalized db lock 2017-08-13 07:41:35 -04:00
Steve Springett
333dc96d6f Merge pull request #836 from ktham/fix-nsp-analyzer
Fix NspAnalyzer
2017-08-08 14:15:40 -05:00
Kevin Tham
ade69168d0 Make fixes to NspAnalyzer to correctly parse package.json files
* parse `bundledDependencies` and `bundleDependencies' as a JsonArray
* parse `license` as a JsonObject for older libraries that used license objects
2017-08-08 11:33:50 -07:00
Kevin Tham
89c63ac5c9 Add NspAnalyzerTest.java and package.json files for testing 2017-08-08 11:30:54 -07:00
Jeremy Long
9fd8f1c898 initial upgrade of h2 2017-08-07 18:37:03 -04:00
Jeremy Long
b44862f713 updated log message, see https://github.com/jeremylong/dependency-check-gradle/issues/52 - original message was misleading 2017-08-05 07:53:40 -04:00
Jeremy Long
4da950f37c Merge pull request #830 from stefanneuhaus/documentation-fixes
Documentation fixes
2017-07-29 22:16:47 -04:00
Jeremy Long
10a8bf5356 Merge pull request #829 from stefanneuhaus/fix-postgres-save-properties
Fix save_properties on Postgres
2017-07-29 22:14:42 -04:00
Stefan Neuhaus
a06c6dda40 Also mention the Gradle plugin in the documentation “Using a Database Server” 2017-07-29 20:44:50 +02:00
Stefan Neuhaus
c6c194dddb Fix save_properties on Postgres
“CALL save_property(.)” does not work (tested with Postgres 9.6):
SQL Error [42601]: ERROR: syntax error at or near "CALL"
  Position: 1
  org.postgresql.util.PSQLException: ERROR: syntax error at or near "CALL"
  Position: 1
2017-07-29 19:49:32 +02:00
Stefan Neuhaus
a13d29b0cc Fix link and instructions on nist-data-mirror. 2017-07-29 16:27:55 +02:00
Stefan Neuhaus
1caca99e82 Prepend “analyzers” config group column to configuration table of the Gradle plugin 2017-07-29 14:59:40 +02:00
Stefan Neuhaus
0336fcb7a3 Prepend “proxy” config group column to configuration table of the Gradle plugin 2017-07-29 14:53:00 +02:00
Stefan Neuhaus
95c0d9b9a0 Remove unsupported Gradle proxy configuration setting “connectionTimeout” from documentation 2017-07-29 14:31:55 +02:00
Stefan Neuhaus
240d06d7e4 Reference the Gradle Plugin proxy configuration page in the more general Proxy configuration page 2017-07-29 14:12:33 +02:00
Jeremy Long
2753bb97c8 added synchronization per issue #785 2017-07-26 11:48:50 -04:00
Jeremy Long
9c744211d7 fixed JSON report to generate valid JSON 2017-07-26 09:59:44 -04:00
Jeremy Long
8a8d4fb994 updated temporary data directory logic as the previous implementation did not work correctly 2017-07-26 09:59:21 -04:00
Jeremy Long
d24dac26ea updated setup/teardown so that the settings are properly set 2017-07-25 13:07:16 -04:00
Jeremy Long
93088c2e9a updated error handling for issue #821 2017-07-25 13:01:55 -04:00
Jeremy Long
61ad90c939 corrected json report per issue #821 2017-07-25 10:30:54 -04:00
Jeremy Long
c849af19ed updated per issue #823 2017-07-25 09:42:33 -04:00
Jeremy Long
4f7ce49dea snapshot version 2017-07-23 17:01:38 -04:00
Jeremy Long
c94717bd1c updated README.md per issue #818 2017-07-23 17:01:27 -04:00
Jeremy Long
06cf39b59b cleanup, checkstyle, etc. 2017-07-23 08:34:40 -04:00
Jeremy Long
c3c52c2b2a Merge branch 'stevespringett-master' 2017-07-23 07:43:57 -04:00
Jeremy Long
b4dcd61f58 Merge branch 'master' of https://github.com/stevespringett/DependencyCheck into stevespringett-master 2017-07-23 07:26:55 -04:00
Jeremy Long
89b8f314d8 version 2.1.0 2017-07-23 07:26:37 -04:00
Jeremy Long
e975ba5199 resolve issue #818 2017-07-23 07:11:47 -04:00
Jeremy Long
12d74510cd Coverity suggested cleanup 2017-07-23 06:27:14 -04:00
Jeremy Long
176363492e checkstyle updates 2017-07-22 18:20:11 -04:00
stevespringett
b8edcaeaf8 Adds Maven support for specifying multiple filesets in a 'scanSet'. #773 2017-07-22 15:10:42 -05:00
Jeremy Long
db2a0abcb6 Merge pull request #819 from jeremylong/issue815
Fix Error Reporting in Maven Aggregate Goal
2017-07-22 07:58:23 -04:00
Jeremy Long
84c6320e49 Merge branch 'master' into issue815 2017-07-22 07:25:21 -04:00
Jeremy Long
cc2051b308 removed un-needed prebuild scripts 2017-07-22 07:24:50 -04:00
Jeremy Long
43d71e7665 upgraded invoker version so that the threadcount can be increased to reduce build time 2017-07-22 07:23:49 -04:00
Jeremy Long
37b9f49467 resolve issue #815 2017-07-22 07:22:37 -04:00
Jeremy Long
79d64a617d Merge pull request #817 from jeremylong/bundleaudit
Resolve issue #810
2017-07-22 06:46:50 -04:00
stevespringett
784a1393fc Added check for node_modules directory. Will skip if package.json is a node module. Fixes #797 2017-07-21 15:18:28 -05:00
Jeremy Long
af9bc9ec3e updated exception handling 2017-07-21 06:53:54 -04:00
Jeremy Long
f3580dece7 documentation update 2017-07-21 06:35:30 -04:00
Jeremy Long
0183457b7a resolve issue #810 2017-07-21 06:10:03 -04:00
Jeremy Long
60b8bde19a resolve false positive per issue #814 2017-07-20 06:32:51 -04:00
Jeremy Long
5e8b012a5d fixed regexes per issue #794 2017-07-20 06:23:12 -04:00
Jeremy Long
5703a44ab5 Merge pull request #798 from markrekveld/engine-modes
Adds engine execution modes to separate evidence collection from analysis. The default case is to use both evidence collection and analysis.
2017-07-19 06:28:05 -04:00
Jeremy Long
91b1d5cbde fixed issue #808 2017-07-17 06:40:39 -04:00
Jeremy Long
2ab92a940b updates to resolve issue #801 2017-07-16 13:15:23 -04:00
Jeremy Long
4ec8e3bbbb fix issue #806 2017-07-16 12:22:19 -04:00
Jeremy Long
ed56eb2ec1 updated test case per issue #792 2017-07-16 08:46:48 -04:00
Jeremy Long
d4c1a9ea08 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-07-16 08:27:00 -04:00
Jeremy Long
48947ca722 updated per issue #792 2017-07-16 08:26:52 -04:00
Jeremy Long
5d028ee9fe Merge pull request #800 from aikebah/issue-799
Fix issue #799 - Initialize exCol to prevent NPE
2017-07-14 06:59:41 -04:00
Jeremy Long
35b762bd75 Merge pull request #803 from stefanneuhaus/fix-postgres-usage_missing-resource-exception_merge-property
Fix MissingResourceException "MERGE_PROPERTY" for Postgres
2017-07-14 06:37:25 -04:00
Stefan Neuhaus
cbb10a1b1c In case of missing resources for prepared statements detect and clearly indicate this issue. 2017-07-13 21:22:15 +02:00
Stefan Neuhaus
239c5f2e46 Prevent NPE in case the CveDB.getInstance() failed. This NPE masked the actual cause thereby hampering issue analysis 2017-07-13 21:21:03 +02:00
Stefan Neuhaus
d7d5e0c757 Cleanup: Ease debugging connection problems: add cause to thrown exceptions 2017-07-13 21:18:27 +02:00
Stefan Neuhaus
fccac8cb85 Actual fix: the database product was reported as “PostgreSQL” by the driver. As the custom “Locale” used in the ResourceBundle is case-sensitive, the mixed-case properties file fails to be resolved (at least on case-sensitive file systems) 2017-07-13 21:16:46 +02:00
Hans Aikema
6d2a6bbd3d Fix issue #799 - Initialize exCol to prevent NPE 2017-07-12 21:40:31 +02:00
Jeremy Long
4fc2abd183 false positive per issue #796 2017-07-12 07:24:30 -04:00
Mark Rekveld
b762d8e664 comment 2017-07-12 13:10:36 +02:00
Mark Rekveld
ccce1eea4b tests 2017-07-11 09:59:28 +02:00
Mark Rekveld
11ef55920e missing mode.phases 2017-07-10 18:14:42 +02:00
Mark Rekveld
7c0a7a0dd0 removed throws 2017-07-10 17:18:22 +02:00
Mark Rekveld
e6ec9d9aa3 Autocloseable 2017-07-10 16:58:08 +02:00
Mark Rekveld
1fe24a2e0c type 2017-07-10 16:36:41 +02:00
Mark Rekveld
f2aa3f12be javadocs + tests 2017-07-10 16:28:02 +02:00
Mark Rekveld
cb3cf79beb Engine execution modes 2017-07-10 16:28:02 +02:00
Jeremy Long
a27f390d37 snapshot version 2017-07-08 05:40:28 -04:00
Jeremy Long
d8107c1232 fixed distributionManagement id 2017-07-08 05:38:43 -04:00
Jeremy Long
9272bded7e version 2.0.1 2017-07-06 19:53:05 -04:00
Jeremy Long
6800029163 updated settings 2017-07-06 19:51:09 -04:00
Jeremy Long
64c824fedf Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-07-06 19:38:15 -04:00
Jeremy Long
8338668ab4 Merge pull request #789 from jeremylong/vladt-FixGrokAssemblyExeCreation
Minor updates to PR#787
2017-07-06 19:37:47 -04:00
Jeremy Long
eb244e0234 minor code quality cleanup per codacy 2017-07-06 06:55:16 -04:00
Jeremy Long
3ffb2d1312 removed un-needed checks in tearDown 2017-07-06 06:31:20 -04:00
Jeremy Long
d76832f761 updated tear down to call the correct close method for this test 2017-07-06 06:17:49 -04:00
Jeremy Long
d5503ff615 updated error reporting for non-existent files 2017-07-06 06:05:26 -04:00
Jeremy Long
87f327b095 suppressed unchecked warning on cast 2017-07-06 05:41:06 -04:00
Jeremy Long
756d39df9a Merge branch 'FixGrokAssemblyExeCreation' of https://github.com/vladt/DependencyCheck into vladt-FixGrokAssemblyExeCreation 2017-07-06 05:34:17 -04:00
Jeremy Long
fa4d8832d7 updated documentation to try and avoid issue like #783 in the future 2017-07-06 05:28:23 -04:00
Jeremy Long
9d0a5da783 fix issue #783 2017-07-06 05:27:37 -04:00
vladt
725f1e9759 Fixed the creation of the GrokAssembly.exe temp file and the cleanup of the temp config file. 2017-07-05 10:07:29 -04:00
Jeremy Long
55689fe911 snapshot version 2017-07-03 18:23:54 -04:00
Jeremy Long
de4c116271 updated release notes 2017-07-03 13:41:27 -04:00
Jeremy Long
10ebe49287 minor update to regex for #781 2017-07-03 11:41:51 -04:00
Jeremy Long
305db5f8b1 updated documentation per issue #782 2017-07-03 08:04:54 -04:00
Jeremy Long
6e2b82c446 resolve false positives per issue #781 2017-07-03 07:40:04 -04:00
Jeremy Long
db135f8e11 fix site configuration 2017-07-02 06:38:20 -04:00
Jeremy Long
c3b5d2f620 version 2.0.0 2017-07-01 08:09:38 -04:00
Jeremy Long
02052799ed fix issue #777 2017-07-01 07:34:39 -04:00
Jeremy Long
9f31c33938 updated sonarcloud 2017-06-25 06:20:52 -04:00
Jeremy Long
dd1cadf621 updated sonarcloud 2017-06-25 06:02:24 -04:00
Jeremy Long
5bc1c3f616 coverity suggested changes 2017-06-24 15:58:33 -04:00
Jeremy Long
c555f60f47 fix jacoco transmission to sonar 2017-06-24 15:58:11 -04:00
Jeremy Long
a4ea892f20 added jacoco coverage data to sonarcloud 2017-06-24 07:24:49 -04:00
Jeremy Long
2cb017cf83 updated codacy code coverage reporter to latest 2017-06-24 07:07:50 -04:00
Jeremy Long
ac830d5784 trimmed after success to a single JDK 2017-06-24 07:03:57 -04:00
Jeremy Long
1db9add9ff fix issue #774 2017-06-24 07:03:17 -04:00
Jeremy Long
9936b1339c fixed 2017-06-24 06:30:00 -04:00
Jeremy Long
7a373799cf added jdk8 matrix for sonarcloud 2017-06-24 06:24:31 -04:00
Jeremy Long
466562df41 updated travis config for sonar integration 2017-06-23 07:12:36 -04:00
Jeremy Long
b9e9c837c8 sonar, checkstyle, etc. suggested changes 2017-06-23 06:26:18 -04:00
Jeremy Long
9b289e619a checkstyle, pmd, sonar, etc. corrections 2017-06-22 21:07:41 -04:00
Jeremy Long
006b180a0c updated test case 2017-06-22 19:30:09 -04:00
Jeremy Long
abcb5c3a32 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-06-22 07:19:22 -04:00
Jeremy Long
0c0151e550 Merge branch 'pwhittlesea-issue-730' 2017-06-22 07:18:48 -04:00
Jeremy Long
dee1ccfd3e updates to allow old suppression file configuration 2017-06-22 07:18:14 -04:00
Jeremy Long
af2259d69e Merge pull request #769 from ragaller/ragaller-patch-1
Fixes wrong description of --failOnCVSS argument
2017-06-21 06:33:16 -04:00
Georg Ragaller
131caf0e3e Fixes wrong description of --failOnCVSS argument 2017-06-20 16:53:36 +02:00
Jeremy Long
3d5b86d96f minor formating updates 2017-06-20 06:57:58 -04:00
Phillip Whittlesea
9a30c3d0cb Merge branch 'master' into issue-730 2017-06-19 21:44:25 +01:00
Jeremy Long
7545329db2 updated copyright... 2017-06-19 07:05:19 -04:00
Jeremy Long
066c331f96 Merge branch 'anderruiz-bootclasspath_fixes' 2017-06-19 06:42:04 -04:00
Jeremy Long
7ccfee73bc minor formating updatae 2017-06-19 06:41:34 -04:00
Jeremy Long
3f7a9b92ec Merge branch 'bootclasspath_fixes' of https://github.com/anderruiz/DependencyCheck into anderruiz-bootclasspath_fixes 2017-06-19 06:30:45 -04:00
Jeremy Long
5d15c60c68 Merge branch 'stevespringett-master' 2017-06-19 06:20:22 -04:00
Jeremy Long
dacf493a94 fixed ci build/deploy 2017-06-19 06:13:33 -04:00
Jeremy Long
65ad53f59e trimmed build time tasks temporarily while debugging artifactory integration 2017-06-19 05:51:12 -04:00
Jeremy Long
b0cf555b6e fix encrypted setting 2017-06-18 20:56:33 -04:00
Jeremy Long
d0bfe114f6 fix encrypted setting 2017-06-18 20:26:35 -04:00
Jeremy Long
4f4e734eee fix encrypted setting 2017-06-18 16:15:50 -04:00
Jeremy Long
37ea0bf05b fix encrypted setting 2017-06-18 16:13:34 -04:00
Jeremy Long
31463597ef fix encrypted setting 2017-06-18 16:10:56 -04:00
Jeremy Long
8b2c6d6918 fix encrypted setting 2017-06-18 16:05:52 -04:00
Jeremy Long
9c52ffc48f fix encrypted setting 2017-06-18 16:03:08 -04:00
Jeremy Long
74dd1e6359 updated credentials 2017-06-18 15:48:08 -04:00
Jeremy Long
74fbaeefbf updated credentials 2017-06-18 15:32:52 -04:00
Jeremy Long
fec0878091 updated credentials 2017-06-18 14:57:35 -04:00
Jeremy Long
f257388108 corrected distribution management section to point to the correct repository 2017-06-18 14:34:16 -04:00
Jeremy Long
99828b5cb3 updated jfrog credentials 2017-06-18 14:26:33 -04:00
Jeremy Long
d56f452f31 continued debugging 2017-06-18 13:30:11 -04:00
Jeremy Long
9f52bf5dc9 fixed deploy script 2017-06-18 11:45:14 -04:00
Jeremy Long
aed980f79d fix build script for travis wait 2017-06-18 10:32:47 -04:00
Jeremy Long
f219cb69d4 increase build time 2017-06-18 10:19:25 -04:00
Jeremy Long
936830084e removed release profile 2017-06-18 07:36:00 -04:00
Jeremy Long
2e35c5bcab minor build cleanup 2017-06-18 07:01:13 -04:00
Jeremy Long
a13c6fcb25 updated deployment for staging and gh-pages 2017-06-18 06:36:38 -04:00
Jeremy Long
c748d59146 minor fix to javadoc 2017-06-18 06:25:04 -04:00
Jeremy Long
d1ac0de740 updates and add deploy 2017-06-17 21:19:23 -04:00
Ander Ruiz
0075a7e1ce Patch for bootclasspath loading 2017-06-13 09:10:39 +02:00
Ander Ruiz
091108a369 Minor trace patch 2017-06-13 09:10:13 +02:00
Phillip Whittlesea
0be494a211 Issue #730: Remove redundant method from Settings
Pull file loading from loadSuppressionData() to make it easier to read
Add test assertion to happy case Ant task test
2017-06-12 01:48:33 +01:00
Phillip Whittlesea
8021aaed4b Issue #730: Core tests for multiple suppression files
Added updates to Maven plugin documentation
Added upgrade notes to the README
2017-06-12 01:18:10 +01:00
Phillip Whittlesea
584fd2a47b Issue #730: Allow multiple suppression files in Maven
The core has been extended to handle multiple suppression files
Extended the Ant test to cover multiple suppression files
NOTE: This change is breaking for users of the Maven plugin
2017-06-11 23:30:02 +01:00
Phillip Whittlesea
237dbe7061 Issue #730: Allow multiple suppression files in Ant
The core has not been extended but the Ant task is able to parse and pass to the Settings singleton
NOTE: This change is breaking for users of the Ant Task
2017-06-11 19:08:59 +01:00
Phillip Whittlesea
ed214d05fa Issue #730: Add a test for suppression in an Ant task 2017-06-11 16:06:32 +01:00
Phillip Whittlesea
76218da8d1 Issue #730: Allow multiple args for CLI suppresion
The core has not been extended but the CLI is able to parse and pass to the Settings singleton
This change to the CLI is backwards compatible
2017-06-11 15:05:24 +01:00
Phillip Whittlesea
869c9c0114 Issue #730: Add CLI test for single suppresion file
Added @Before and @After for cleaning the singleton
Cleaned class to ensure I can add further tests easily

I would suggest that AppTest#testPopulateSettings() be split into tests which fail for a single reason.
I have avoided that ATM to minimise code I'm meddling with
2017-06-11 12:57:22 +01:00
Jeremy Long
c3c1869829 Merge branch 'COMINTO-754-error-resolving' 2017-06-10 07:51:56 -04:00
Jeremy Long
e2617b7434 added test and documentation 2017-06-10 07:51:07 -04:00
Jeremy Long
5607e1f179 add the ability to enable/disable the NSP analyzer and updated the site documentation 2017-06-07 07:00:14 -04:00
Johann Schmitz
23ad3d04b0 Issue #754: Allow exclusion of artifacts by type (regex) 2017-06-06 12:22:31 +02:00
Jeremy Long
d498c7c7b0 resolve conflicts 2017-06-04 20:54:17 -04:00
Jeremy Long
66dbcb98d2 updated report to support changes in PR #714 2017-06-04 20:51:27 -04:00
Jeremy Long
422418f396 minor codacy suggested changes 2017-06-04 08:13:59 -04:00
Jeremy Long
31ad7adadd fix issue #751 2017-06-04 08:03:11 -04:00
Jeremy Long
b3216effa4 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-06-04 07:50:22 -04:00
Jeremy Long
060cfd625e removed unused imports 2017-06-04 07:49:55 -04:00
Jeremy Long
38c0882a3a Merge pull request #755 from jeremylong/cleanup
Cleanup
2017-06-04 07:44:02 -04:00
Jeremy Long
a47d46914a made coverity scan executable 2017-06-04 07:07:36 -04:00
Jeremy Long
789a57b430 reverted version due to java 8 requirement 2017-06-04 06:52:09 -04:00
Jeremy Long
6b359a7138 codacy, checkstyle, upgrades, etc. 2017-06-04 06:41:30 -04:00
Jeremy Long
e9e7042923 Merge branch 'master' of github.com:jeremylong/DependencyCheck 2017-06-03 20:17:49 -04:00
Jeremy Long
1fff0db18c added CII badge 2017-06-03 20:17:33 -04:00
Jeremy Long
516129533e Merge pull request #753 from cainj13/fixRuntimeOmission
Fix #752 where skipping runtime-scoped maven artifacts also skipped c…
2017-06-03 12:07:01 -04:00
Jeremy Long
ca4da60dc1 fixed coverity reported unguarded read 2017-06-03 08:07:55 -04:00
Jeremy Long
c3ff5bac54 added coverity scans via cron 2017-06-03 07:23:57 -04:00
Jeremy Long
a07ab11f9f temporary change 2017-06-03 07:10:27 -04:00
Jeremy Long
bbf0b295ce attempting to get travis to run a coverity scan 2017-06-03 06:57:59 -04:00
Josh Cain
ed09242cb7 include checking for maven scope COMPILE_PLUS_RUNTIME on artifact omission predicate 2017-06-02 10:57:38 -05:00
Jeremy Long
52b2b4794e Merge branch 'stevespringett-master' of github.com:jeremylong/DependencyCheck into stevespringett-master 2017-06-02 06:45:47 -04:00
Jeremy Long
4293cce282 Merge branch 'master' of github.com:jeremylong/DependencyCheck into stevespringett-master 2017-06-02 06:44:13 -04:00
Jeremy Long
839d869137 attempting to get coverity to scan via the matrix only when a cron job occurs 2017-06-02 06:20:01 -04:00
Jeremy Long
a8add14255 incremented missed java6 reference 2017-06-02 05:47:21 -04:00
Jeremy Long
6ca6d4d71c Merge pull request #750 from jeremylong/710-pom-parse-error
710 pom parse error
2017-06-01 06:17:02 -04:00
Jeremy Long
ee1934f74b fix travis build to post jacoco coverage to codacy 2017-05-30 20:14:57 -04:00
Josh Cain
69f39d4dfe Fix #752 where skipping runtime-scoped maven artifacts also skipped compile-time artifacts 2017-05-30 14:58:47 -05:00
Jeremy Long
d355cab2f9 minor codacy requested fixes 2017-05-30 06:44:00 -04:00
Jeremy Long
217b08b571 fix wget 2017-05-29 12:04:06 -04:00
Jeremy Long
72aec26ede test cases for fix of issue #710 2017-05-29 08:21:40 -04:00
Jeremy Long
a076ce6e8e fix coverage results to codacy 2017-05-29 08:20:11 -04:00
Jeremy Long
ec448438e5 fix for issue #710 2017-05-29 08:19:22 -04:00
Jeremy Long
9777406460 should not have been in this branch 2017-05-29 07:21:20 -04:00
Jeremy Long
7956606876 test case for #737 2017-05-28 07:35:04 -04:00
Jeremy Long
04e0b95a8a this is purely a reporting module - no need to deploy 2017-05-27 13:03:51 -04:00
Jeremy Long
1b14c10085 checkstyle corrections 2017-05-27 12:06:49 -04:00
Jeremy Long
d5df4920c7 updated version 2017-05-27 12:06:34 -04:00
Jeremy Long
576e26144d updated version 2017-05-27 12:05:08 -04:00
Jeremy Long
e411c03e7f updates for issue #743 and #746 2017-05-27 11:10:14 -04:00
Jeremy Long
bf5f1df8a7 Merge pull request #744 from ysoftdevs/cvedb-cache
Added some caching for CveDB in order to speedup some scans
2017-05-26 07:37:57 -04:00
Jeremy Long
986ad0584d updated packaging 2017-05-25 07:50:10 -04:00
Jeremy Long
6d2ac67011 Merge pull request #745 from jeremylong/reportGeneration
Report Generation Cleanup
2017-05-24 19:28:08 -04:00
Jeremy Long
3e4d012a69 Merge branch 'master' into reportGeneration 2017-05-23 21:02:49 -04:00
Jeremy Long
1b84095c0e Merge branch 'master' into reportGeneration 2017-05-23 21:01:34 -04:00
Jeremy Long
c96ef88222 Moved report generation into the engine, cleaned up code, etc. 2017-05-23 21:00:40 -04:00
stevespringett
8206aa9bfd Added additional check when submitting an invalid payload to nsp. Corrected unit test. 2017-05-23 11:08:54 -05:00
Jeremy Long
dd4a1f2d56 updated for code coverage 2017-05-23 10:44:00 -05:00
Jeremy Long
b0f9935fcb updated to resolve issue #696 2017-05-23 10:44:00 -05:00
Jeremy Long
122c78648a updated code to better handle TLS errors 2017-05-21 18:04:26 -04:00
Jeremy Long
d457fd1452 fixed copyright 2017-05-21 07:45:27 -04:00
Jeremy Long
454a875593 Merge branch 'master' of https://github.com/stevespringett/DependencyCheck into stevespringett-master 2017-05-21 07:29:05 -04:00
stevespringett
9da95e592c Added NSP Analyzer Support 2017-04-26 00:40:15 -05:00
792 changed files with 18690 additions and 8361 deletions

4
.gitignore vendored
View File

@@ -1,6 +1,7 @@
*/target/** */target/**
# IntelliJ test run side-effects # IntelliJ test run side-effects
dependency-check-core/data/ dependency-check-core/data/
dependency-check-ant/data/
# Intellij project files # Intellij project files
*.iml *.iml
*.ipr *.ipr
@@ -15,6 +16,7 @@ maven-eclipse.xml
.pmd .pmd
# Netbeans configuration # Netbeans configuration
nb-configuration.xml nb-configuration.xml
**/nbproject/
/target/ /target/
#maven-shade-plugin generated pom #maven-shade-plugin generated pom
dependency-reduced-pom.xml dependency-reduced-pom.xml
@@ -27,4 +29,4 @@ _site/**
#coverity #coverity
/cov-int/ /cov-int/
/dependency-check-core/nbproject/ /dependency-check-core/nbproject/
cov-scan.bat cov-scan.bat

66
.travis.settings.xml Normal file
View File

@@ -0,0 +1,66 @@
<?xml version="1.0" encoding="UTF-8"?>
<settings xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.1.0 http://maven.apache.org/xsd/settings-1.1.0.xsd" xmlns="http://maven.apache.org/SETTINGS/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<servers>
<server>
<username>${env.JFROG_USER}</username>
<password>${env.JFROG_PASSWORD}</password>
<id>release</id>
</server>
<server>
<username>${env.JFROG_USER}</username>
<password>${env.JFROG_PASSWORD}</password>
<id>snapshot</id>
</server>
<server>
<username>${env.JFROG_USER}</username>
<password>${env.JFROG_PASSWORD}</password>
<id>plugins-release</id>
</server>
<server>
<username>${env.JFROG_USER}</username>
<password>${env.JFROG_PASSWORD}</password>
<id>plugins-snapshot</id>
</server>
</servers>
<profiles>
<profile>
<repositories>
<repository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>release</id>
<name>libs-release</name>
<url>https://dependencycheck.jfrog.io/dependencycheck/libs-release</url>
</repository>
<repository>
<snapshots />
<id>snapshot</id>
<name>libs-snapshot</name>
<url>https://dependencycheck.jfrog.io/dependencycheck/libs-snapshot</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>plugins-release</id>
<name>plugins-release</name>
<url>https://dependencycheck.jfrog.io/dependencycheck/plugins-release</url>
</pluginRepository>
<pluginRepository>
<snapshots />
<id>plugins-snapshot</id>
<name>plugins-snapshot</name>
<url>https://dependencycheck.jfrog.io/dependencycheck/plugins-snapshot</url>
</pluginRepository>
</pluginRepositories>
<id>artifactory</id>
</profile>
</profiles>
<activeProfiles>
<activeProfile>artifactory</activeProfile>
</activeProfiles>
</settings>

View File

@@ -1,13 +1,78 @@
language: java language: java
jdk: oraclejdk7 sudo: required
script: mvn install -DreleaseTesting group: deprecated-2017Q4
env: env:
global: global:
secure: ZUzhWfpXJw/oAeDlUkDFkEJMT0T7kCN3d7ah8urkL2B0KFfKOqQagkbXkgvDa1SYud8VdcnoGa69LfkEr5IrdqW7R4bEYZAiN5swm4Z0iO8t53szVspm2f+O9jQ44O/sfOfpfLxWUUuhdc7Vbrszp+tSszxdPmssWL+f5a/mfWs= - secure: "ZUzhWfpXJw/oAeDlUkDFkEJMT0T7kCN3d7ah8urkL2B0KFfKOqQagkbXkgvDa1SYud8VdcnoGa69LfkEr5IrdqW7R4bEYZAiN5swm4Z0iO8t53szVspm2f+O9jQ44O/sfOfpfLxWUUuhdc7Vbrszp+tSszxdPmssWL+f5a/mfWs="
- secure: "pmFymoI7qH0Kna3NkcHrqLiTVWKmrhwqA4Z9U6XLhWDQxcs5g94wCCKpGB6Lkz9mkvRxBRFpZZelnXJa9W9mnuVOMIa5tQfS5gBuaNXOe7AXXdc+Y2975OR9sSfvf16FxLFvNJILmZq+bpMLs+EXaQvjYQHW2O6OWZdLhAPVG6A="
- secure: "omj5HP2wKdegLYp8/a24Wsoryb92+XYWheEkxp7CzHGDJB1Y4SSr315n/na/mdgd7lr1Ac+m4stYfCrclG7be71xWs6ApF+6I5QSzplJ1fyIF5piHrmhgw6ymIf/HBdeevggJM8igD8agCOwEETYFKfPEj5wFWhNQfxYwANbpl0="
- secure: "FqPcda7a6rEvGVYEyWeaFP+mIhZeJ6FGSdHvVRlBL0H9I3bz6eZg50g6DH3yo1bkmTPQ94eXdDpoKihk9+CDLl0TS+Sg9W8HplG3B2U1/6Yi3vd0T8yjKZC7xf0VZO6t8AT9vpFvzQBRZe24n+6kDtp2OiBzawJhgU5t09zH6is="
- secure: "Bh5LAk8XQnJ885jc/Lli2fhPKDx0TNZRxcJMnNo96EgwOnD+Zhw+v3u/DMCgyyrRToM8Bkca/HktrlZaRTk2htsdKZZ3RHFMCXO0fXCgpcf+wkaSYDF/lnErpSJG3Lrz8ILxJPODsrGhjaIg2++79lwhsBYtpujc6UdxFhgpffc="
addons:
sonarcloud:
organization: "odc"
token:
secure: "YVDnYmonPug885Hmr2pLWBko+rQ+oKyTUA95ry0PGGyfgs0z6kPCjmWBDVm7K4GM7NOluldWb5gLMf0QXoHGstdp9L6fQCQElt8hZMOwJf+IR3bWjiG3VfVyyB3gJWBWlcJFM9NVyfICidwBH5ZiJ0+LXhKUgnNqarTh/YmNj9w="
cache:
directories:
- "$HOME/.m2/repository"
- "$HOME/.sonar/cache"
before_install: before_install:
- sudo apt-get install jq - sudo apt-get install jq
- wget -O ~/codacy-coverage-reporter-assembly-latest.jar $(curl https://api.github.com/repos/codacy/codacy-coverage-reporter/releases/latest | jq -r .assets[0].browser_download_url) - wget -O ~/codacy-coverage-reporter-assembly-latest.jar $(curl https://api.github.com/repos/codacy/codacy-coverage-reporter/releases/latest | jq -r .assets[0].browser_download_url)
matrix:
include:
- jdk: openjdk7
env:
- JDK="JDK7"
script:
- if [ ! -z "$TRAVIS_TAG" ]; then travis_wait 60 mvn install site site:stage -DreleaseTesting; else travis_wait 60 mvn install -DreleaseTesting; fi
- jdk: oraclejdk8
env:
- JDK="JDK8"
script:
- travis_wait 60 mvn install -DreleaseTesting
after_success: after_success:
- java -cp ~/codacy-coverage-reporter-assembly-latest.jar com.codacy.CodacyCoverageReporter -l Java -r build-reporting/target/site/jacoco-aggregate/jacoco.xml - if [ "$JDK" == "JDK8" ]; then
java -cp ~/codacy-coverage-reporter-assembly-latest.jar com.codacy.CodacyCoverageReporter -l Java -r build-reporting/target/coverage-reports/jacoco.xml;
mvn sonar:sonar -Dsonar.java.coveragePlugin=jacoco -Dsonar.jacoco.reportPath=build-reporting/target/jacoco.xml;
./coverity_scan.sh;
fi;
after_failure:
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/617-hierarchical-cross-deps/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/618-aggregator-purge/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/618-aggregator-update-only/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/629-jackson-dataformat/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/690-threadsafety/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/710-pom-parse-error/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/729-system-scope-resolved/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/729-system-scope-skipped/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/730-multiple-suppression-files/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/730-multiple-suppression-files-configs/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/815-broken-suppression-aggregate/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/846-site-plugin/build.log
- cat /home/travis/build/jeremylong/DependencyCheck/dependency-check-maven/target/it/false-positives/build.log
deploy:
- provider: script
script: mvn --settings .travis.settings.xml source:jar javadoc:jar package deploy -DskipTests=true
skip_cleanup: true
on:
branch: master
jdk: openjdk7
- provider: pages
skip_cleanup: true
local_dir: target/staging
github_token: $GITHUB_TOKEN
on:
tags: true
branch: master
jdk: openjdk7

View File

@@ -1,4 +1,4 @@
FROM java:8 FROM openjdk:8-jre-slim
MAINTAINER Timo Pagel <dependencycheckmaintainer@timo-pagel.de> MAINTAINER Timo Pagel <dependencycheckmaintainer@timo-pagel.de>
@@ -6,24 +6,31 @@ ENV user=dependencycheck
ENV version_url=https://jeremylong.github.io/DependencyCheck/current.txt ENV version_url=https://jeremylong.github.io/DependencyCheck/current.txt
ENV download_url=https://dl.bintray.com/jeremy-long/owasp ENV download_url=https://dl.bintray.com/jeremy-long/owasp
RUN wget -O /tmp/current.txt ${version_url} && \ RUN apt-get update && \
version=$(cat /tmp/current.txt) && \ apt-get install -y --no-install-recommends wget ruby mono-runtime && \
file="dependency-check-${version}-release.zip" && \ gem install bundle-audit && \
wget "$download_url/$file" && \ gem cleanup
unzip ${file} && \
rm ${file} && \
mv dependency-check /usr/share/
RUN useradd -ms /bin/bash ${user} && \
chown -R ${user}:${user} /usr/share/dependency-check && \
mkdir /report && \
chown -R ${user}:${user} /report
RUN wget -O /tmp/current.txt ${version_url} && \
version=$(cat /tmp/current.txt) && \
file="dependency-check-${version}-release.zip" && \
wget "$download_url/$file" && \
unzip ${file} && \
rm ${file} && \
mv dependency-check /usr/share/ && \
useradd -ms /bin/bash ${user} && \
chown -R ${user}:${user} /usr/share/dependency-check && \
mkdir /report && \
chown -R ${user}:${user} /report && \
apt-get remove --purge -y wget && \
apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/* /tmp/*
USER ${user} USER ${user}
VOLUME ["/src" "/usr/share/dependency-check/data" "/report"] VOLUME ["/src" "/usr/share/dependency-check/data" "/report"]
WORKDIR /report WORKDIR /src
CMD ["--help"] CMD ["--help"]
ENTRYPOINT ["/usr/share/dependency-check/bin/dependency-check.sh"] ENTRYPOINT ["/usr/share/dependency-check/bin/dependency-check.sh"]

115
README.md
View File

@@ -1,4 +1,4 @@
[![Build Status](https://travis-ci.org/jeremylong/DependencyCheck.svg?branch=master)](https://travis-ci.org/jeremylong/DependencyCheck) [![Coverity Scan Build Status](https://scan.coverity.com/projects/1654/badge.svg)](https://scan.coverity.com/projects/dependencycheck) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/6b6021d481dc41a888c5da0d9ecf9494)](https://www.codacy.com/app/jeremylong/DependencyCheck?utm_source=github.com&amp;utm_medium=referral&amp;utm_content=jeremylong/DependencyCheck&amp;utm_campaign=Badge_Grade) [![Apache 2.0 License](https://img.shields.io/badge/license-Apache%202-blue.svg)](https://www.apache.org/licenses/LICENSE-2.0.txt) [![Build Status](https://travis-ci.org/jeremylong/DependencyCheck.svg?branch=master)](https://travis-ci.org/jeremylong/DependencyCheck) [![Coverity Scan Build Status](https://scan.coverity.com/projects/1654/badge.svg)](https://scan.coverity.com/projects/dependencycheck) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/6b6021d481dc41a888c5da0d9ecf9494)](https://www.codacy.com/app/jeremylong/DependencyCheck?utm_source=github.com&amp;utm_medium=referral&amp;utm_content=jeremylong/DependencyCheck&amp;utm_campaign=Badge_Grade) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/843/badge)](https://bestpractices.coreinfrastructure.org/projects/843) [![Apache 2.0 License](https://img.shields.io/badge/license-Apache%202-blue.svg)](https://www.apache.org/licenses/LICENSE-2.0.txt)
[![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2015.svg)](https://www.toolswatch.org/2015/06/black-hat-arsenal-usa-2015-speakers-lineup/) [![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2014.svg)](https://www.toolswatch.org/2014/06/black-hat-usa-2014-arsenal-tools-speaker-list/) [![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2013.svg)](https://www.toolswatch.org/2013/06/announcement-blackhat-arsenal-usa-2013-selected-tools/) [![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2015.svg)](https://www.toolswatch.org/2015/06/black-hat-arsenal-usa-2015-speakers-lineup/) [![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2014.svg)](https://www.toolswatch.org/2014/06/black-hat-usa-2014-arsenal-tools-speaker-list/) [![Black Hat Arsenal](https://www.toolswatch.org/badges/arsenal/2013.svg)](https://www.toolswatch.org/2013/06/announcement-blackhat-arsenal-usa-2013-selected-tools/)
@@ -42,7 +42,10 @@ $ dependency-check --project Testing --out . --scan [path to jar files to be sca
### Maven Plugin ### Maven Plugin
More detailed instructions can be found on the [dependency-check-maven github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-maven). More detailed instructions can be found on the [dependency-check-maven github pages](http://jeremylong.github.io/DependencyCheck/dependency-check-maven).
The plugin can be configured using the following: By default, the plugin is tied to the `verify` phase (i.e. `mvn verify`). Alternatively,
one can directly invoke the plugin via `mvn org.owasp:dependency-check-maven:check`.
The dependency-check plugin can be configured using the following:
```xml ```xml
<project> <project>
@@ -97,7 +100,7 @@ On Windows
> .\dependency-check-cli\target\release\bin\dependency-check.bat --project Testing --out . --scan ./src/test/resources > .\dependency-check-cli\target\release\bin\dependency-check.bat --project Testing --out . --scan ./src/test/resources
``` ```
Then load the resulting 'DependencyCheck-Report.html' into your favorite browser. Then load the resulting 'dependency-check-report.html' into your favorite browser.
### Docker ### Docker
@@ -136,6 +139,110 @@ docker run --rm \
``` ```
Upgrade Notes
-------------
### Upgrading from **1.x.x** to **2.x.x**
Note that when upgrading from version 1.x.x that the following changes will need to be made to your configuration.
#### Suppression file
In order to support multiple suppression files, the mechanism for configuring suppression files has changed.
As such, users that have defined a suppression file in their configuration will need to update.
See the examples below:
##### Ant
Old:
```xml
<dependency-check
failBuildOnCVSS="3"
suppressionFile="suppression.xml">
</dependency-check>
```
New:
```xml
<dependency-check
failBuildOnCVSS="3">
<suppressionFile path="suppression.xml" />
</dependency-check>
```
##### Maven
Old:
```xml
<plugin>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-maven</artifactId>
<configuration>
<suppressionFile>suppression.xml</suppressionFile>
</configuration>
</plugin>
```
New:
```xml
<plugin>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-maven</artifactId>
<configuration>
<suppressionFiles>
<suppressionFile>suppression.xml</suppressionFile>
</suppressionFiles>
</configuration>
</plugin>
```
### Gradle
In addition to the changes to the suppression file, the task `dependencyCheck` has been
renamed to `dependencyCheckAnalyze`.
Old:
```groovy
buildscript {
repositories {
mavenLocal()
}
dependencies {
classpath 'org.owasp:dependency-check-gradle:2.0.1-SNAPSHOT'
}
}
apply plugin: 'org.owasp.dependencycheck'
dependencyCheck {
suppressionFile='path/to/suppression.xml'
}
check.dependsOn dependencyCheckAnalyze
```
New:
```groovy
buildscript {
repositories {
mavenLocal()
}
dependencies {
classpath 'org.owasp:dependency-check-gradle:2.0.1-SNAPSHOT'
}
}
apply plugin: 'org.owasp.dependencycheck'
dependencyCheck {
suppressionFiles = ['path/to/suppression1.xml', 'path/to/suppression2.xml']
}
check.dependsOn dependencyCheckAnalyze
```
Mailing List Mailing List
------------ ------------
@@ -148,7 +255,7 @@ Archive: [google group](https://groups.google.com/forum/#!forum/dependency-check
Copyright & License Copyright & License
- -
Dependency-Check is Copyright (c) 2012-2016 Jeremy Long. All Rights Reserved. Dependency-Check is Copyright (c) 2012-2017 Jeremy Long. All Rights Reserved.
Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/LICENSE.txt) file for the full license. Permission to modify and redistribute is granted under the terms of the Apache 2.0 license. See the [LICENSE.txt](https://raw.githubusercontent.com/jeremylong/DependencyCheck/master/LICENSE.txt) file for the full license.

61
RELEASE_NOTES.md Normal file
View File

@@ -0,0 +1,61 @@
# Release Notes
Please see the [dependency-check google group](https://groups.google.com/forum/#!forum/dependency-check) for the release notes on versions not listed below.
## [Version 3.1.1](https://github.com/jeremylong/DependencyCheck/releases/tag/v3.1.1) (2018-01-29)
### Bug fixes
- Fixed the Central Analyzer to use the updated SHA1 query syntax.
- Reverted change that broke Maven 3.1.0 compatability; Maven 3.1.0 and beyond is once again supported.
- False positive reduction.
- Minor documentation cleanup.
## [Version 3.1.0](https://github.com/jeremylong/DependencyCheck/releases/tag/v3.1.0) (2018-01-02)
### Enhancements
- Major enhancements to the Node and NSP analyzer - the analyzers are now considered
production ready and should be used in combination.
- Added a shutdown hook so that if the update process is interrupted while using an H2
database the lock files will be properly removed allowing future executions of ODC to
succeed.
- UNC paths can now be scanned using the CLI.
- Batch updates are now used which may help with the update speed when using some DBMS
instead of the embedded H2.
- Upgrade Lucene to 5.5.5, the highest version that will allow us to maintain Java 7 support
### Bug fixes
- Fixed the CSV report output to correctly list all fields.
- Invalid suppression files will now break the build instead of causing ODC to
skip the usage of the suppression analyzer.
- Fixed bug in Lucene query where LARGE entries in the pom.xml or manifest caused
the query to break.
- General cleanup, false positive, and false negative reduction.
## [Version 3.0.2](https://github.com/jeremylong/DependencyCheck/releases/tag/v3.0.2) (2017-11-13)
### Bug fixes
- Updated the query format for the CentralAnalyzer; the old format caused the CentralAnalyzer to fail
## [Version 3.0.1](https://github.com/jeremylong/DependencyCheck/releases/tag/v3.0.1) (2017-10-20)
### Bug fixes
- Fixed a database connection issue that affected some usages.
## [Version 3.0.0](https://github.com/jeremylong/DependencyCheck/releases/tag/v3.0.0) (2017-10-16)
- Several bug fixes and false positive reduction
- The 2.x branch introduced several new false positives but also reduced the false negatives
- Java 9 compatibility update
- Stability issues with the Central Analyzer resolved
- This comes at a cost of a longer analysis time
- The CSV report now includes the GAV and CPE
- The Hint Analyzer now supports regular expressions
- If show summary is disabled and vulnerable libraries are found that fail the build details are no longer displayed in the console only that vulnerable libraries were identified
- Resolved issues with threading and multiple connections to the embedded H2 database
- This allows the Jenkins pipeline, Maven Plugin, etc. to safely run parallel executions of dependency-check

View File

@@ -20,7 +20,7 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<parent> <parent>
<groupId>org.owasp</groupId> <groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId> <artifactId>dependency-check-parent</artifactId>
<version>1.4.6-SNAPSHOT</version> <version>3.1.2-SNAPSHOT</version>
</parent> </parent>
<artifactId>dependency-check-ant</artifactId> <artifactId>dependency-check-ant</artifactId>
@@ -28,15 +28,6 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
<name>Dependency-Check Ant Task</name> <name>Dependency-Check Ant Task</name>
<description>dependency-check-ant is an Ant Task that uses dependency-check-core to detect publicly disclosed vulnerabilities associated with the project's dependencies. The task will generate a report listing the dependency, any identified Common Platform Enumeration (CPE) identifiers, and the associated Common Vulnerability and Exposure (CVE) entries.</description> <description>dependency-check-ant is an Ant Task that uses dependency-check-core to detect publicly disclosed vulnerabilities associated with the project's dependencies. The task will generate a report listing the dependency, any identified Common Platform Enumeration (CPE) identifiers, and the associated Common Vulnerability and Exposure (CVE) entries.</description>
<!-- begin copy from http://minds.coremedia.com/2012/09/11/problem-solved-deploy-multi-module-maven-project-site-as-github-pages/ -->
<distributionManagement>
<site>
<id>github-pages-site</id>
<name>Deployment through GitHub's site deployment plugin</name>
<url>${basedir}/../target/site/${project.version}/dependency-check-ant</url>
</site>
</distributionManagement>
<!-- end copy -->
<build> <build>
<resources> <resources>
<resource> <resource>
@@ -225,42 +216,6 @@ Copyright (c) 2013 - Jeremy Long. All Rights Reserved.
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
<reporting>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>${reporting.checkstyle-plugin.version}</version>
<configuration>
<enableRulesSummary>false</enableRulesSummary>
<enableFilesSummary>false</enableFilesSummary>
<configLocation>${basedir}/../src/main/config/checkstyle-checks.xml</configLocation>
<headerLocation>${basedir}/../src/main/config/checkstyle-header.txt</headerLocation>
<suppressionsLocation>${basedir}/../src/main/config/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-pmd-plugin</artifactId>
<version>${reporting.pmd-plugin.version}</version>
<configuration>
<targetJdk>1.6</targetJdk>
<linkXRef>true</linkXRef>
<sourceEncoding>utf-8</sourceEncoding>
<excludes>
<exclude>**/generated/*.java</exclude>
</excludes>
<rulesets>
<ruleset>../src/main/config/dcrules.xml</ruleset>
<ruleset>/rulesets/java/basic.xml</ruleset>
<ruleset>/rulesets/java/imports.xml</ruleset>
<ruleset>/rulesets/java/unusedcode.xml</ruleset>
</rulesets>
</configuration>
</plugin>
</plugins>
</reporting>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.owasp</groupId> <groupId>org.owasp</groupId>

View File

@@ -18,7 +18,10 @@
package org.owasp.dependencycheck.taskdefs; package org.owasp.dependencycheck.taskdefs;
import java.io.File; import java.io.File;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import javax.annotation.concurrent.NotThreadSafe;
import org.apache.tools.ant.BuildException; import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project; import org.apache.tools.ant.Project;
import org.apache.tools.ant.types.EnumeratedAttribute; import org.apache.tools.ant.types.EnumeratedAttribute;
@@ -28,16 +31,13 @@ import org.apache.tools.ant.types.ResourceCollection;
import org.apache.tools.ant.types.resources.FileProvider; import org.apache.tools.ant.types.resources.FileProvider;
import org.apache.tools.ant.types.resources.Resources; import org.apache.tools.ant.types.resources.Resources;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException; import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.data.update.exception.UpdateException; import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.Vulnerability; import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.exception.ExceptionCollection; import org.owasp.dependencycheck.exception.ExceptionCollection;
import org.owasp.dependencycheck.exception.ReportException; import org.owasp.dependencycheck.exception.ReportException;
import org.owasp.dependencycheck.reporting.ReportGenerator;
import org.owasp.dependencycheck.reporting.ReportGenerator.Format; import org.owasp.dependencycheck.reporting.ReportGenerator.Format;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.impl.StaticLoggerBinder; import org.slf4j.impl.StaticLoggerBinder;
@@ -47,6 +47,7 @@ import org.slf4j.impl.StaticLoggerBinder;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@NotThreadSafe
public class Check extends Update { public class Check extends Update {
/** /**
@@ -61,6 +62,11 @@ public class Check extends Update {
* Whether or not the Node.js Analyzer is enabled. * Whether or not the Node.js Analyzer is enabled.
*/ */
private Boolean nodeAnalyzerEnabled; private Boolean nodeAnalyzerEnabled;
/**
* Whether or not the NSP Analyzer is enabled.
*/
private Boolean nspAnalyzerEnabled;
/** /**
* Whether or not the Ruby Bundle Audit Analyzer is enabled. * Whether or not the Ruby Bundle Audit Analyzer is enabled.
*/ */
@@ -146,14 +152,20 @@ public class Check extends Update {
private boolean updateOnly = false; private boolean updateOnly = false;
/** /**
* The report format to be generated (HTML, XML, VULN, CSV, JSON, ALL). Default is * The report format to be generated (HTML, XML, VULN, CSV, JSON, ALL).
* HTML. * Default is HTML.
*/ */
private String reportFormat = "HTML"; private String reportFormat = "HTML";
/** /**
* The path to the suppression file. * Suppression file path.
*/ */
private String suppressionFile; private String suppressionFile = null;
/**
* Suppression file paths.
*/
@SuppressWarnings("CanBeFinal")
private List<String> suppressionFiles = new ArrayList<>();
/** /**
* The path to the suppression file. * The path to the suppression file.
*/ */
@@ -166,6 +178,10 @@ public class Check extends Update {
* Whether experimental analyzers are enabled. * Whether experimental analyzers are enabled.
*/ */
private Boolean enableExperimental; private Boolean enableExperimental;
/**
* Whether retired analyzers are enabled.
*/
private Boolean enableRetired;
/** /**
* Whether or not the Jar Analyzer is enabled. * Whether or not the Jar Analyzer is enabled.
*/ */
@@ -228,6 +244,17 @@ public class Check extends Update {
getPath().add(rc); getPath().add(rc);
} }
/**
* Add a suppression file.
*
* This is called by Ant with the configured {@link SuppressionFile}.
*
* @param suppressionFile the suppression file to add.
*/
public void addConfiguredSuppressionFile(final SuppressionFile suppressionFile) {
suppressionFiles.add(suppressionFile.getPath());
}
/** /**
* Returns the path. If the path has not been initialized yet, this class is * Returns the path. If the path has not been initialized yet, this class is
* synchronized, and will instantiate the path object. * synchronized, and will instantiate the path object.
@@ -434,12 +461,12 @@ public class Check extends Update {
} }
/** /**
* Get the value of suppressionFile. * Gets suppression file paths.
* *
* @return the value of suppressionFile * @return the suppression files.
*/ */
public String getSuppressionFile() { public List<String> getSuppressionFiles() {
return suppressionFile; return suppressionFiles;
} }
/** /**
@@ -449,6 +476,7 @@ public class Check extends Update {
*/ */
public void setSuppressionFile(String suppressionFile) { public void setSuppressionFile(String suppressionFile) {
this.suppressionFile = suppressionFile; this.suppressionFile = suppressionFile;
suppressionFiles.add(suppressionFile);
} }
/** /**
@@ -505,6 +533,24 @@ public class Check extends Update {
this.enableExperimental = enableExperimental; this.enableExperimental = enableExperimental;
} }
/**
* Get the value of enableRetired.
*
* @return the value of enableRetired
*/
public Boolean isEnableRetired() {
return enableRetired;
}
/**
* Set the value of enableRetired.
*
* @param enableRetired new value of enableRetired
*/
public void setEnableRetired(Boolean enableRetired) {
this.enableRetired = enableRetired;
}
/** /**
* Returns whether or not the analyzer is enabled. * Returns whether or not the analyzer is enabled.
* *
@@ -741,6 +787,24 @@ public class Check extends Update {
this.nodeAnalyzerEnabled = nodeAnalyzerEnabled; this.nodeAnalyzerEnabled = nodeAnalyzerEnabled;
} }
/**
* Get the value of nspAnalyzerEnabled.
*
* @return the value of nspAnalyzerEnabled
*/
public Boolean isNspAnalyzerEnabled() {
return nspAnalyzerEnabled;
}
/**
* Set the value of nspAnalyzerEnabled.
*
* @param nspAnalyzerEnabled new value of nspAnalyzerEnabled
*/
public void setNspAnalyzerEnabled(Boolean nspAnalyzerEnabled) {
this.nspAnalyzerEnabled = nspAnalyzerEnabled;
}
/** /**
* Get the value of rubygemsAnalyzerEnabled. * Get the value of rubygemsAnalyzerEnabled.
* *
@@ -909,9 +973,7 @@ public class Check extends Update {
dealWithReferences(); dealWithReferences();
validateConfiguration(); validateConfiguration();
populateSettings(); populateSettings();
Engine engine = null; try (Engine engine = new Engine(Check.class.getClassLoader(), getSettings())) {
try {
engine = new Engine(Check.class.getClassLoader());
if (isUpdateOnly()) { if (isUpdateOnly()) {
log("Deprecated 'UpdateOnly' property set; please use the UpdateTask instead", Project.MSG_WARN); log("Deprecated 'UpdateOnly' property set; please use the UpdateTask instead", Project.MSG_WARN);
try { try {
@@ -940,16 +1002,7 @@ public class Check extends Update {
throw new BuildException(ex); throw new BuildException(ex);
} }
} }
DatabaseProperties prop = null; engine.writeReports(getProjectName(), new File(reportOutputDirectory), reportFormat);
try (CveDB cve = CveDB.getInstance()) {
prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) {
//TODO shouldn't this be a fatal exception
log("Unable to retrieve DB Properties", ex, Project.MSG_DEBUG);
}
final ReportGenerator reporter = new ReportGenerator(getProjectName(), engine.getDependencies(), engine.getAnalyzers(), prop);
reporter.generateReports(reportOutputDirectory, reportFormat);
if (this.failBuildOnCVSS <= 10) { if (this.failBuildOnCVSS <= 10) {
checkForFailure(engine.getDependencies()); checkForFailure(engine.getDependencies());
@@ -970,11 +1023,6 @@ public class Check extends Update {
throw new BuildException(msg, ex); throw new BuildException(msg, ex);
} }
log(msg, ex, Project.MSG_ERR); log(msg, ex, Project.MSG_ERR);
} finally {
Settings.cleanup(true);
if (engine != null) {
engine.cleanup();
}
} }
} }
@@ -984,8 +1032,8 @@ public class Check extends Update {
* *
* @throws BuildException if the task was not configured correctly. * @throws BuildException if the task was not configured correctly.
*/ */
private void validateConfiguration() throws BuildException { private synchronized void validateConfiguration() throws BuildException {
if (getPath() == null) { if (path == null) {
throw new BuildException("No project dependencies have been defined to analyze."); throw new BuildException("No project dependencies have been defined to analyze.");
} }
if (failBuildOnCVSS < 0 || failBuildOnCVSS > 11) { if (failBuildOnCVSS < 0 || failBuildOnCVSS > 11) {
@@ -1003,32 +1051,34 @@ public class Check extends Update {
@Override @Override
protected void populateSettings() throws BuildException { protected void populateSettings() throws BuildException {
super.populateSettings(); super.populateSettings();
Settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate); getSettings().setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile); getSettings().setArrayIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFiles.toArray(new String[suppressionFiles.size()]));
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile); getSettings().setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, enableExperimental); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, enableExperimental);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_RETIRED_ENABLED, enableRetired);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, pyDistributionAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, pyPackageAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, pyDistributionAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, rubygemsAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, pyPackageAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, opensslAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, rubygemsAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_CMAKE_ENABLED, cmakeAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, opensslAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, swiftPackageManagerAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_CMAKE_ENABLED, cmakeAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, cocoapodsAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, swiftPackageManagerAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, bundleAuditAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, cocoapodsAnalyzerEnabled);
Settings.setStringIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, bundleAuditPath); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, bundleAuditAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, autoconfAnalyzerEnabled); getSettings().setStringIfNotNull(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, bundleAuditPath);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, composerAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, autoconfAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, nodeAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, composerAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, nuspecAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, nodeAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED, nspAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, nuspecAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, archiveAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, assemblyAnalyzerEnabled); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, archiveAnalyzerEnabled);
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, assemblyAnalyzerEnabled);
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions); getSettings().setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono); getSettings().setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
getSettings().setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
getSettings().setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
} }
/** /**
@@ -1039,7 +1089,7 @@ public class Check extends Update {
* @throws BuildException thrown if a CVSS score is found that is higher * @throws BuildException thrown if a CVSS score is found that is higher
* than the threshold set * than the threshold set
*/ */
private void checkForFailure(List<Dependency> dependencies) throws BuildException { private void checkForFailure(Dependency[] dependencies) throws BuildException {
final StringBuilder ids = new StringBuilder(); final StringBuilder ids = new StringBuilder();
for (Dependency d : dependencies) { for (Dependency d : dependencies) {
for (Vulnerability v : d.getVulnerabilities()) { for (Vulnerability v : d.getVulnerabilities()) {
@@ -1053,9 +1103,16 @@ public class Check extends Update {
} }
} }
if (ids.length() > 0) { if (ids.length() > 0) {
final String msg = String.format("%n%nDependency-Check Failure:%n" final String msg;
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than '%.1f': %s%n" if (showSummary) {
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString()); msg = String.format("%n%nDependency-Check Failure:%n"
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than or equal to '%.1f': %s%n"
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
} else {
msg = String.format("%n%nDependency-Check Failure:%n"
+ "One or more dependencies were identified with vulnerabilities.%n%n"
+ "See the dependency-check report for more details.%n%n");
}
throw new BuildException(msg); throw new BuildException(msg);
} }
} }
@@ -1066,12 +1123,12 @@ public class Check extends Update {
* *
* @param dependencies a list of dependency objects * @param dependencies a list of dependency objects
*/ */
private void showSummary(List<Dependency> dependencies) { private void showSummary(Dependency[] dependencies) {
final StringBuilder summary = new StringBuilder(); final StringBuilder summary = new StringBuilder();
for (Dependency d : dependencies) { for (Dependency d : dependencies) {
boolean firstEntry = true; boolean firstEntry = true;
final StringBuilder ids = new StringBuilder(); final StringBuilder ids = new StringBuilder();
for (Vulnerability v : d.getVulnerabilities()) { for (Vulnerability v : d.getVulnerabilities(true)) {
if (firstEntry) { if (firstEntry) {
firstEntry = false; firstEntry = false;
} else { } else {
@@ -1102,8 +1159,8 @@ public class Check extends Update {
} }
/** /**
* An enumeration of supported report formats: "ALL", "HTML", "XML", "CSV", "JSON", "VULN", * An enumeration of supported report formats: "ALL", "HTML", "XML", "CSV",
* etc.. * "JSON", "VULN", etc..
*/ */
public static class ReportFormats extends EnumeratedAttribute { public static class ReportFormats extends EnumeratedAttribute {

View File

@@ -37,21 +37,35 @@ public class Purge extends Task {
* The properties file location. * The properties file location.
*/ */
private static final String PROPERTIES_FILE = "task.properties"; private static final String PROPERTIES_FILE = "task.properties";
/**
* The configured settings.
*/
private Settings settings;
/**
* The location of the data directory that contains
*/
private String dataDirectory = null;
/**
* Indicates if dependency-check should fail the build if an exception
* occurs.
*/
private boolean failOnError = true;
/** /**
* Construct a new DependencyCheckTask. * Construct a new DependencyCheckTask.
*/ */
public Purge() { public Purge() {
super(); super();
// Call this before Dependency Check Core starts logging anything - this way, all SLF4J messages from // Call this before Dependency Check Core starts logging anything - this way, all SLF4J messages from
// core end up coming through this tasks logger // core end up coming through this tasks logger
StaticLoggerBinder.getSingleton().setTask(this); StaticLoggerBinder.getSingleton().setTask(this);
} }
/** public Settings getSettings() {
* The location of the data directory that contains return settings;
*/ }
private String dataDirectory = null;
/** /**
* Get the value of dataDirectory. * Get the value of dataDirectory.
@@ -71,12 +85,6 @@ public class Purge extends Task {
this.dataDirectory = dataDirectory; this.dataDirectory = dataDirectory;
} }
/**
* Indicates if dependency-check should fail the build if an exception
* occurs.
*/
private boolean failOnError = true;
/** /**
* Get the value of failOnError. * Get the value of failOnError.
* *
@@ -106,7 +114,7 @@ public class Purge extends Task {
populateSettings(); populateSettings();
File db; File db;
try { try {
db = new File(Settings.getDataDirectory(), "dc.h2.db"); db = new File(settings.getDataDirectory(), "dc.h2.db");
if (db.exists()) { if (db.exists()) {
if (db.delete()) { if (db.delete()) {
log("Database file purged; local copy of the NVD has been removed", Project.MSG_INFO); log("Database file purged; local copy of the NVD has been removed", Project.MSG_INFO);
@@ -118,7 +126,7 @@ public class Purge extends Task {
log(msg, Project.MSG_ERR); log(msg, Project.MSG_ERR);
} }
} else { } else {
final String msg = String.format("Unable to purge database; the database file does not exists: %s", db.getAbsolutePath()); final String msg = String.format("Unable to purge database; the database file does not exist: %s", db.getAbsolutePath());
if (this.failOnError) { if (this.failOnError) {
throw new BuildException(msg); throw new BuildException(msg);
} }
@@ -131,7 +139,7 @@ public class Purge extends Task {
} }
log(msg, Project.MSG_ERR); log(msg, Project.MSG_ERR);
} finally { } finally {
Settings.cleanup(true); settings.cleanup(true);
} }
} }
@@ -143,9 +151,9 @@ public class Purge extends Task {
* @throws BuildException thrown if the properties file cannot be read. * @throws BuildException thrown if the properties file cannot be read.
*/ */
protected void populateSettings() throws BuildException { protected void populateSettings() throws BuildException {
Settings.initialize(); settings = new Settings();
try (InputStream taskProperties = this.getClass().getClassLoader().getResourceAsStream(PROPERTIES_FILE)) { try (InputStream taskProperties = this.getClass().getClassLoader().getResourceAsStream(PROPERTIES_FILE)) {
Settings.mergeProperties(taskProperties); settings.mergeProperties(taskProperties);
} catch (IOException ex) { } catch (IOException ex) {
final String msg = "Unable to load the dependency-check ant task.properties file."; final String msg = "Unable to load the dependency-check ant task.properties file.";
if (this.failOnError) { if (this.failOnError) {
@@ -154,13 +162,13 @@ public class Purge extends Task {
log(msg, ex, Project.MSG_WARN); log(msg, ex, Project.MSG_WARN);
} }
if (dataDirectory != null) { if (dataDirectory != null) {
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory); settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
} else { } else {
final File jarPath = new File(Purge.class.getProtectionDomain().getCodeSource().getLocation().getPath()); final File jarPath = new File(Purge.class.getProtectionDomain().getCodeSource().getLocation().getPath());
final File base = jarPath.getParentFile(); final File base = jarPath.getParentFile();
final String sub = Settings.getString(Settings.KEYS.DATA_DIRECTORY); final String sub = settings.getString(Settings.KEYS.DATA_DIRECTORY);
final File dataDir = new File(base, sub); final File dataDir = new File(base, sub);
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath()); settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
} }
} }
} }

View File

@@ -0,0 +1,51 @@
/*
* This file is part of dependency-check-ant.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2017 The OWASP Foundation. All Rights Reserved.
*/
package org.owasp.dependencycheck.taskdefs;
/**
* Class : {@link SuppressionFile} Responsibility : Models a suppression file
* nested XML element where the simple content is its location.
*
* @author Phillip Whittlesea
*/
public class SuppressionFile {
/**
* The path to the suppression file.
*/
private String path;
/**
* Sets the path to the suppression file.
*
* @param path the path to the suppression file
*/
public void setPath(String path) {
this.path = path;
}
/**
* Gets the path to the suppression file.
*
* @return the path
*/
public String getPath() {
return path;
}
}

View File

@@ -385,9 +385,7 @@ public class Update extends Purge {
@Override @Override
public void execute() throws BuildException { public void execute() throws BuildException {
populateSettings(); populateSettings();
Engine engine = null; try (Engine engine = new Engine(Update.class.getClassLoader(), getSettings())) {
try {
engine = new Engine(Update.class.getClassLoader());
try { try {
engine.doUpdates(); engine.doUpdates();
} catch (UpdateException ex) { } catch (UpdateException ex) {
@@ -402,11 +400,6 @@ public class Update extends Purge {
throw new BuildException(msg, ex); throw new BuildException(msg, ex);
} }
log(msg, Project.MSG_ERR); log(msg, Project.MSG_ERR);
} finally {
Settings.cleanup(true);
if (engine != null) {
engine.cleanup();
}
} }
} }
@@ -420,23 +413,23 @@ public class Update extends Purge {
@Override @Override
protected void populateSettings() throws BuildException { protected void populateSettings() throws BuildException {
super.populateSettings(); super.populateSettings();
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer); getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort); getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername); getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword); getSettings().setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout); getSettings().setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName); getSettings().setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath); getSettings().setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString); getSettings().setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser); getSettings().setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword); getSettings().setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified); getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified); getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base); getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base); getSettings().setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
if (cveValidForHours != null) { if (cveValidForHours != null) {
if (cveValidForHours >= 0) { if (cveValidForHours >= 0) {
Settings.setInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours); getSettings().setInt(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
} else { } else {
throw new BuildException("Invalid setting: `cpeValidForHours` must be 0 or greater"); throw new BuildException("Invalid setting: `cpeValidForHours` must be 0 or greater");
} }

View File

@@ -48,7 +48,7 @@ public class StaticLoggerBinder implements LoggerFactoryBinder {
* *
* @return the StaticLoggerBinder singleton * @return the StaticLoggerBinder singleton
*/ */
public static final StaticLoggerBinder getSingleton() { public static StaticLoggerBinder getSingleton() {
return SINGLETON; return SINGLETON;
} }

View File

@@ -17,7 +17,7 @@ the project's dependencies.
<dependency-check projectname="Hello World" <dependency-check projectname="Hello World"
reportoutputdirectory="${basedir}" reportoutputdirectory="${basedir}"
reportformat="ALL"> reportformat="ALL">
<suppressionfile path="${basedir}/path/to/suppression.xml" />
<fileset dir="lib"> <fileset dir="lib">
<include name="**/*.jar"/> <include name="**/*.jar"/>
</fileset> </fileset>
@@ -33,19 +33,26 @@ Property | Description
----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------- ----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------
autoUpdate | Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. | true autoUpdate | Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. | true
cveValidForHours | Sets the number of hours to wait before checking for new updates from the NVD | 4 cveValidForHours | Sets the number of hours to wait before checking for new updates from the NVD | 4
failBuildOnCVSS | Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which means since the CVSS scores are 0-10, by default the build will never fail. | 11 failBuildOnCVSS | Specifies if the build should be failed if a CVSS score equal to or above a specified level is identified. The default is 11 which means since the CVSS scores are 0-10, by default the build will never fail. | 11
failOnError | Whether the build should fail if there is an error executing the dependency-check analysis | true failOnError | Whether the build should fail if there is an error executing the dependency-check analysis | true
projectName | The name of the project being scanned. | Dependency-Check projectName | The name of the project being scanned. | Dependency-Check
reportFormat | The report format to be generated (HTML, XML, CSV, JSON, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML reportFormat | The report format to be generated (HTML, XML, CSV, JSON, VULN, ALL). This configuration option has no affect if using this within the Site plugin unless the externalReport is set to true. | HTML
reportOutputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target' reportOutputDirectory | The location to write the report(s). Note, this is not used if generating the report as part of a `mvn site` build | 'target'
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html) | &nbsp;
hintsFile | The file path to the XML hints file \- used to resolve [false negatives](../general/hints.html) | &nbsp; hintsFile | The file path to the XML hints file \- used to resolve [false negatives](../general/hints.html) | &nbsp;
proxyServer | The Proxy Server; see the [proxy configuration](../data/proxy.html) page for more information. | &nbsp; proxyServer | The Proxy Server; see the [proxy configuration](../data/proxy.html) page for more information. | &nbsp;
proxyPort | The Proxy Port. | &nbsp; proxyPort | The Proxy Port. | &nbsp;
proxyUsername | Defines the proxy user name. | &nbsp; proxyUsername | Defines the proxy user name. | &nbsp;
proxyPassword | Defines the proxy password. | &nbsp; proxyPassword | Defines the proxy password. | &nbsp;
connectionTimeout | The URL Connection Timeout. | &nbsp; connectionTimeout | The URL Connection Timeout. | &nbsp;
enableExperimental | Enable the [experimental analyzers](../analyzers/index.html). If not enabled the experimental analyzers (see below) will not be loaded or used. | false enableExperimental | Enable the [experimental analyzers](../analyzers/index.html). If not enabled the experimental analyzers (see below) will not be loaded or used. | false
enableRetired | Enable the [retired analyzers](../analyzers/index.html). If not enabled the retired analyzers (see below) will not be loaded or used. | false
suppressionFile | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html). | &nbsp;
The following nested elements can be set on the dependency-check task.
Element | Property | Description | Default Value
------------------|----------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------
suppressionFile | path | The file path to the XML suppression file \- used to suppress [false positives](../general/suppression.html). Element can be specified multiple times. | &nbsp;
Analyzer Configuration Analyzer Configuration
==================== ====================
@@ -55,23 +62,24 @@ Note, that specific analyzers will automatically disable themselves if no file
types that they support are detected - so specifically disabling them may not types that they support are detected - so specifically disabling them may not
be needed. be needed.
Property | Description | Default Value Property | Description | Default Value
------------------------------|-----------------------------------------------------------------------------------|------------------ ------------------------------|------------------------------------------------------------------------------------------------------------|------------------
archiveAnalyzerEnabled | Sets whether the Archive Analyzer will be used. | true archiveAnalyzerEnabled | Sets whether the Archive Analyzer will be used. | true
zipExtensions | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. | &nbsp; zipExtensions | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. | &nbsp;
jarAnalyzer | Sets whether the Jar Analyzer will be used. | true jarAnalyzer | Sets whether the Jar Analyzer will be used. | true
centralAnalyzerEnabled | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer (see below). | true centralAnalyzerEnabled | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer (see below). | true
nexusAnalyzerEnabled | Sets whether Nexus Analyzer will be used. This analyzer is superceded by the Central Analyzer; however, you can configure this to run against a Nexus Pro installation. | true nexusAnalyzerEnabled | Sets whether Nexus Analyzer will be used (requires Nexus Pro). This analyzer is superceded by the Central Analyzer; however, you can configure this to run against a Nexus Pro installation. | true
nexusUrl | Defines the Nexus web service endpoint (example http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. | &nbsp; nexusUrl | Defines the Nexus web service endpoint (example http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. | &nbsp;
nexusUsesProxy | Whether or not the defined proxy should be used when connecting to Nexus. | true nexusUsesProxy | Whether or not the defined proxy should be used when connecting to Nexus. | true
pyDistributionAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Python Distribution Analyzer will be used. | true pyDistributionAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Python Distribution Analyzer will be used. | true
pyPackageAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Python Package Analyzer will be used. | true pyPackageAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Python Package Analyzer will be used. | true
rubygemsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Ruby Gemspec Analyzer will be used. | true rubygemsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Ruby Gemspec Analyzer will be used. | true
opensslAnalyzerEnabled | Sets whether the openssl Analyzer should be used. | true opensslAnalyzerEnabled | Sets whether the openssl Analyzer should be used. | true
cmakeAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) CMake Analyzer should be used. | true cmakeAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) CMake Analyzer should be used. | true
autoconfAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) autoconf Analyzer should be used. | true autoconfAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) autoconf Analyzer should be used. | true
composerAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer should be used. | true composerAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer should be used. | true
nodeAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Node.js Analyzer should be used. | true nodeAnalyzerEnabled | Sets whether the [retired](../analyzers/index.html) Node.js Analyzer should be used. | true
nspAnalyzerEnabled | Sets whether the NSP Analyzer should be used. | true
nuspecAnalyzerEnabled | Sets whether the .NET Nuget Nuspec Analyzer will be used. | true nuspecAnalyzerEnabled | Sets whether the .NET Nuget Nuspec Analyzer will be used. | true
cocoapodsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer should be used. | true cocoapodsAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer should be used. | true
bundleAuditAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Bundle Audit Analyzer should be used. | true bundleAuditAnalyzerEnabled | Sets whether the [experimental](../analyzers/index.html) Bundle Audit Analyzer should be used. | true
@@ -92,8 +100,8 @@ cveUrl20Modified | URL for the modified CVE 2.0.
cveUrl12Base | Base URL for each year's CVE 1.2, the %d will be replaced with the year. | http://nvd.nist.gov/download/nvdcve-%d.xml cveUrl12Base | Base URL for each year's CVE 1.2, the %d will be replaced with the year. | http://nvd.nist.gov/download/nvdcve-%d.xml
cveUrl20Base | Base URL for each year's CVE 2.0, the %d will be replaced with the year. | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml cveUrl20Base | Base URL for each year's CVE 2.0, the %d will be replaced with the year. | http://static.nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml
dataDirectory | Data directory that is used to store the local copy of the NVD. This should generally not be changed. | data dataDirectory | Data directory that is used to store the local copy of the NVD. This should generally not be changed. | data
databaseDriverName | The name of the database driver. Example: org.h2.Driver. | &nbsp; databaseDriverName | The name of the database driver. Example: org.h2.Driver. | &nbsp;
databaseDriverPath | The path to the database driver JAR file; only used if the driver is not in the class path. | &nbsp; databaseDriverPath | The path to the database driver JAR file; only used if the driver is not in the class path. | &nbsp;
connectionString | The connection string used to connect to the database. | &nbsp; connectionString | The connection string used to connect to the database. | &nbsp;
databaseUser | The username used when connecting to the database. | &nbsp; databaseUser | The username used when connecting to the database. | &nbsp;
databasePassword | The password used when connecting to the database. | &nbsp; databasePassword | The password used when connecting to the database. | &nbsp;

View File

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 10 KiB

View File

@@ -21,13 +21,12 @@ import java.io.File;
import org.apache.tools.ant.BuildException; import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.BuildFileRule; import org.apache.tools.ant.BuildFileRule;
import org.junit.After; import org.apache.tools.ant.types.LogLevel;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.ExpectedException; import org.junit.rules.ExpectedException;
import org.owasp.dependencycheck.BaseDBTestCase; import org.owasp.dependencycheck.BaseDBTestCase;
import org.owasp.dependencycheck.utils.Settings;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@@ -35,7 +34,7 @@ import static org.junit.Assert.assertTrue;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class DependencyCheckTaskTest { public class DependencyCheckTaskTest extends BaseDBTestCase {
@Rule @Rule
public BuildFileRule buildFileRule = new BuildFileRule(); public BuildFileRule buildFileRule = new BuildFileRule();
@@ -44,18 +43,11 @@ public class DependencyCheckTaskTest {
public ExpectedException expectedException = ExpectedException.none(); public ExpectedException expectedException = ExpectedException.none();
@Before @Before
@Override
public void setUp() throws Exception { public void setUp() throws Exception {
Settings.initialize(); super.setUp();
BaseDBTestCase.ensureDBExists();
final String buildFile = this.getClass().getClassLoader().getResource("build.xml").getPath(); final String buildFile = this.getClass().getClassLoader().getResource("build.xml").getPath();
buildFileRule.configureProject(buildFile); buildFileRule.configureProject(buildFile, LogLevel.VERBOSE.getLevel());
}
@After
public void tearDown() {
//no cleanup...
//executeTarget("cleanup");
Settings.cleanup(true);
} }
/** /**
@@ -65,7 +57,7 @@ public class DependencyCheckTaskTest {
public void testAddFileSet() throws Exception { public void testAddFileSet() throws Exception {
File report = new File("target/dependency-check-report.html"); File report = new File("target/dependency-check-report.html");
if (report.exists() && !report.delete()) { if (report.exists() && !report.delete()) {
throw new Exception("Unable to delete 'target/DependencyCheck-Report.html' prior to test."); throw new Exception("Unable to delete 'target/dependency-check-report.html' prior to test.");
} }
buildFileRule.executeTarget("test.fileset"); buildFileRule.executeTarget("test.fileset");
assertTrue("DependencyCheck report was not generated", report.exists()); assertTrue("DependencyCheck report was not generated", report.exists());
@@ -114,4 +106,66 @@ public class DependencyCheckTaskTest {
expectedException.expect(BuildException.class); expectedException.expect(BuildException.class);
buildFileRule.executeTarget("failCVSS"); buildFileRule.executeTarget("failCVSS");
} }
/**
* Test the DependencyCheckTask where a CVE is suppressed.
*/
@Test
public void testSuppressingCVE() {
// GIVEN an ant task with a vulnerability
final String antTaskName = "suppression";
// WHEN executing the ant task
buildFileRule.executeTarget(antTaskName);
System.out.println("----------------------------------------------------------");
System.out.println("----------------------------------------------------------");
System.out.println("----------------------------------------------------------");
System.out.println("----------------------------------------------------------");
System.out.println(buildFileRule.getError());
System.out.println("----------------------------------------------------------");
System.out.println("----------------------------------------------------------");
System.out.println(buildFileRule.getFullLog());
System.out.println("----------------------------------------------------------");
System.out.println("----------------------------------------------------------");
System.out.println("----------------------------------------------------------");
System.out.println("----------------------------------------------------------");
// THEN the ant task executed without error
final File report = new File("target/suppression-report.html");
assertTrue("Expected the DependencyCheck report to be generated", report.exists());
}
/**
* Test the DependencyCheckTask deprecated suppression property throws an
* exception with a warning.
*/
@Test
public void testSuppressingSingle() {
// GIVEN an ant task with a vulnerability using the legacy property
final String antTaskName = "suppression-single";
// WHEN executing the ant task
buildFileRule.executeTarget(antTaskName);
// THEN the ant task executed without error
final File report = new File("target/suppression-single-report.html");
assertTrue("Expected the DependencyCheck report to be generated", report.exists());
}
/**
* Test the DependencyCheckTask deprecated suppression property throws an
* exception with a warning.
*/
@Test
public void testSuppressingMultiple() {
// GIVEN an ant task with a vulnerability using multiple was to configure the suppression file
final String antTaskName = "suppression-multiple";
// WHEN executing the ant task
buildFileRule.executeTarget(antTaskName);
// THEN the ant task executed without error
final File report = new File("target/suppression-multiple-report.html");
assertTrue("Expected the DependencyCheck report to be generated", report.exists());
}
} }

View File

@@ -71,4 +71,47 @@
</fileset> </fileset>
</dependency-check> </dependency-check>
</target> </target>
<target name="suppression">
<dependency-check
applicationName="test suppression"
reportOutputDirectory="${project.build.directory}/suppression-report.html"
autoupdate="false"
failBuildOnCVSS="3">
<suppressionfile path="${project.build.directory}/test-classes/test-suppression1.xml" />
<suppressionfile path="${project.build.directory}/test-classes/test-suppression2.xml" />
<fileset dir="${project.build.directory}/test-classes/jars">
<include name="axis-1.4.jar"/>
</fileset>
<filelist
dir="${project.build.directory}/test-classes/list"
files="jetty-6.1.0.jar,org.mortbay.jetty.jar"/>
</dependency-check>
</target>
<target name="suppression-single">
<dependency-check
applicationName="test suppression"
reportOutputDirectory="${project.build.directory}/suppression-single-report.html"
autoupdate="false"
failBuildOnCVSS="3"
suppressionFile="${project.build.directory}/test-classes/test-suppression.xml">
<fileset dir="${project.build.directory}/test-classes/jars">
<include name="axis-1.4.jar"/>
</fileset>
</dependency-check>
</target>
<target name="suppression-multiple">
<dependency-check
applicationName="test suppression"
reportOutputDirectory="${project.build.directory}/suppression-multiple-report.html"
autoupdate="false"
failBuildOnCVSS="3"
suppressionFile="${project.build.directory}/test-classes/test-suppression1.xml">
<suppressionfile path="${project.build.directory}/test-classes/test-suppression2.xml"/>
<fileset dir="${project.build.directory}/test-classes/jars">
<include name="axis-1.4.jar"/>
</fileset>
</dependency-check>
</target>
</project> </project>

View File

@@ -0,0 +1,48 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This file is part of dependency-check-core.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Copyright (c) 2017 The OWASP Foundation. All Rights Reserved.
-->
<suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.1.xsd">
<suppress>
<notes><![CDATA[
file name: axis-1.4.jar
]]></notes>
<gav regex="true">^org\.apache\.axis:axis:.*$</gav>
<cpe>cpe:/a:apache:axis</cpe>
</suppress>
<suppress>
<notes><![CDATA[
file name: org.mortbay.jetty.jar
]]></notes>
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
<cpe>cpe:/a:jetty:jetty</cpe>
</suppress>
<suppress>
<notes><![CDATA[
file name: org.mortbay.jetty.jar
]]></notes>
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
<cpe>cpe:/a:mortbay:jetty</cpe>
</suppress>
<suppress>
<notes><![CDATA[
file name: org.mortbay.jetty.jar
]]></notes>
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
<cpe>cpe:/a:mortbay_jetty:jetty</cpe>
</suppress>
</suppressions>

View File

@@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This file is part of dependency-check-core.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Copyright (c) 2017 The OWASP Foundation. All Rights Reserved.
-->
<suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.1.xsd">
<suppress>
<notes><![CDATA[
file name: axis-1.4.jar
]]></notes>
<gav regex="true">^org\.apache\.axis:axis:.*$</gav>
<cpe>cpe:/a:apache:axis</cpe>
</suppress>
</suppressions>

View File

@@ -0,0 +1,41 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This file is part of dependency-check-core.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Copyright (c) 2017 The OWASP Foundation. All Rights Reserved.
-->
<suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.1.xsd">
<suppress>
<notes><![CDATA[
file name: org.mortbay.jetty.jar
]]></notes>
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
<cpe>cpe:/a:jetty:jetty</cpe>
</suppress>
<suppress>
<notes><![CDATA[
file name: org.mortbay.jetty.jar
]]></notes>
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
<cpe>cpe:/a:mortbay:jetty</cpe>
</suppress>
<suppress>
<notes><![CDATA[
file name: org.mortbay.jetty.jar
]]></notes>
<gav regex="true">^jetty:org\.mortbay\.jetty:.*$</gav>
<cpe>cpe:/a:mortbay_jetty:jetty</cpe>
</suppress>
</suppressions>

View File

@@ -21,21 +21,12 @@ Copyright (c) 2017 Jeremy Long. All Rights Reserved.
<parent> <parent>
<groupId>org.owasp</groupId> <groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId> <artifactId>dependency-check-parent</artifactId>
<version>1.4.6-SNAPSHOT</version> <version>3.1.2-SNAPSHOT</version>
</parent> </parent>
<groupId>org.owasp</groupId> <groupId>org.owasp</groupId>
<artifactId>dependency-check-plugin</artifactId> <artifactId>dependency-check-plugin</artifactId>
<name>Dependency-Check Plugin Archetype</name> <name>Dependency-Check Plugin Archetype</name>
<packaging>jar</packaging> <packaging>jar</packaging>
<!-- begin copy from http://minds.coremedia.com/2012/09/11/problem-solved-deploy-multi-module-maven-project-site-as-github-pages/ -->
<distributionManagement>
<site>
<id>github-pages-site</id>
<name>Deployment through GitHub's site deployment plugin</name>
<url>${basedir}/../target/site/${project.version}/dependency-check-plugin</url>
</site>
</distributionManagement>
<!-- end copy -->
<build> <build>
<plugins> <plugins>
<plugin> <plugin>

View File

@@ -23,6 +23,7 @@ import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.Settings;
/** /**
* An OWASP dependency-check plug-in example. If you are not implementing a * An OWASP dependency-check plug-in example. If you are not implementing a
@@ -66,7 +67,7 @@ public class ${analyzerName} implements Analyzer, FileTypeAnalyzer {
@Override @Override
public void analyze(Dependency dependency, Engine engine) throws AnalysisException { public void analyze(Dependency dependency, Engine engine) throws AnalysisException {
if (enabled) { if (enabled) {
throw new UnsupportedOperationException("Not implemented yet."); //TODO implement analyze
} }
} }
@@ -91,15 +92,26 @@ public class ${analyzerName} implements Analyzer, FileTypeAnalyzer {
} }
/** /**
* The initialize method is called (once) prior to the analyze method being * The initialize method is called just after instantiation of the object.
* called on all of the dependencies.
* *
* @throws InitializationException is thrown if an exception occurs * @param settings a reference to the configured settings
* initializing the analyzer.
*/ */
@Override @Override
public void initialize() throws InitializationException { public void initialize(Settings settings) {
//TODO implement initialize
}
/**
* The prepare method is called once just prior to repeated calls to
* analyze.
*
* @param engine a reference to the engine
* @throws InitializationException thrown when the analyzer cannot be
* initialized
*/
@Override
public void prepare(Engine engine) throws InitializationException {
//TODO implement prepare
} }
/** /**
@@ -116,7 +128,7 @@ public class ${analyzerName} implements Analyzer, FileTypeAnalyzer {
/** /**
* Returns whether multiple instances of the same type of analyzer can run * Returns whether multiple instances of the same type of analyzer can run
* in parallel. If the analyzer does not support parallel processing it is * in parallel. If the analyzer does not support parallel processing it is
* generally best to also mark the analyze(Dependency,Engine) as synchronized. * generally best to also mark the analyze(Dependency,Engine) as synchronized.
* *
* @return {@code true} if the analyzer supports parallel processing, * @return {@code true} if the analyzer supports parallel processing,
* {@code false} else * {@code false} else

View File

@@ -30,25 +30,27 @@ import org.owasp.dependencycheck.utils.Settings;
*/ */
public class ${analyzerName}Test { public class ${analyzerName}Test {
Settings settings = null;
public ${analyzerName}Test() { public ${analyzerName}Test() {
} }
@BeforeClass @BeforeClass
public static void setUpClass() { public static void setUpClass() {
Settings.initialize();
} }
@AfterClass @AfterClass
public static void tearDownClass() { public static void tearDownClass() {
Settings.cleanup();
} }
@Before @Before
public void setUp() { public void setUp() {
settings = new Settings();
} }
@After @After
public void tearDown() { public void tearDown() {
settings.cleanup();
} }
/** /**
@@ -68,12 +70,14 @@ public class ${analyzerName}Test {
*/ */
@Test @Test
public void testAnalyze() throws Exception { public void testAnalyze() throws Exception {
//The engine is generally null for most analyzer test cases but can be instantiated if needed.
Engine engine = null;
${analyzerName} instance = new ${analyzerName}(); ${analyzerName} instance = new ${analyzerName}();
instance.initialize(); instance.initialize(settings);
instance.prepare(engine);
File file = new File(${analyzerName}.class.getClassLoader().getResource("test.file").toURI().getPath()); File file = new File(${analyzerName}.class.getClassLoader().getResource("test.file").toURI().getPath());
Dependency dependency = new Dependency(file); Dependency dependency = new Dependency(file);
//The engine is generally null for most analyzer test cases.
Engine engine = null;
//TODO uncomment the following line and add assertions against the dependency. //TODO uncomment the following line and add assertions against the dependency.
//instance.analyze(dependency, engine); //instance.analyze(dependency, engine);
@@ -107,7 +111,7 @@ public class ${analyzerName}Test {
@Test @Test
public void testInitialize() throws Exception { public void testInitialize() throws Exception {
${analyzerName} instance = new ${analyzerName}(); ${analyzerName} instance = new ${analyzerName}();
instance.initialize(); instance.initialize(settings);
} }
/** /**

View File

Before

Width:  |  Height:  |  Size: 9.0 KiB

After

Width:  |  Height:  |  Size: 9.0 KiB

View File

@@ -20,19 +20,11 @@ Copyright (c) 2017 - Jeremy Long. All Rights Reserved.
<parent> <parent>
<groupId>org.owasp</groupId> <groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId> <artifactId>dependency-check-parent</artifactId>
<version>1.4.6-SNAPSHOT</version> <version>3.1.2-SNAPSHOT</version>
</parent> </parent>
<name>Dependency-Check Build-Reporting</name>
<artifactId>build-reporting</artifactId> <artifactId>build-reporting</artifactId>
<!-- begin copy from http://minds.coremedia.com/2012/09/11/problem-solved-deploy-multi-module-maven-project-site-as-github-pages/ --> <packaging>pom</packaging>
<distributionManagement>
<site>
<id>github-pages-site</id>
<name>Deployment through GitHub's site deployment plugin</name>
<url>${basedir}/../target/site/${project.version}/build-reporting</url>
</site>
</distributionManagement>
<!-- end copy -->
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.owasp</groupId> <groupId>org.owasp</groupId>
@@ -62,16 +54,58 @@ Copyright (c) 2017 - Jeremy Long. All Rights Reserved.
</dependencies> </dependencies>
<build> <build>
<plugins> <plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin> <plugin>
<groupId>org.jacoco</groupId> <groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId> <artifactId>jacoco-maven-plugin</artifactId>
<executions> <executions>
<execution>
<id>report-merge</id>
<phase>verify</phase>
<goals>
<goal>merge</goal>
</goals>
<configuration>
<fileSets>
<fileSet implementation="org.apache.maven.shared.model.fileset.FileSet">
<directory>${project.basedir}/../</directory>
<includes>
<include>utils/target/coverage-reports/*.exec</include>
<include>core/target/coverage-reports/*.exec</include>
<include>cli/target/coverage-reports/*.exec</include>
<include>ant/target/coverage-reports/*.exec</include>
<include>maven/target/coverage-reports/*.exec</include>
</includes>
</fileSet>
</fileSets>
</configuration>
</execution>
<execution> <execution>
<id>report-aggregate</id> <id>report-aggregate</id>
<phase>verify</phase> <phase>verify</phase>
<goals> <goals>
<goal>report-aggregate</goal> <goal>report-aggregate</goal>
</goals> </goals>
<configuration>
<outputDirectory>target/coverage-reports/</outputDirectory>
<dataFileIncludes>
<dataFileInclude>target/coverage-reports/jacoco-ut.exec</dataFileInclude>
<dataFileInclude>target/coverage-reports/jacoco-it.exec</dataFileInclude>
</dataFileIncludes>
</configuration>
</execution> </execution>
</executions> </executions>
</plugin> </plugin>

View File

@@ -20,7 +20,7 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<parent> <parent>
<groupId>org.owasp</groupId> <groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId> <artifactId>dependency-check-parent</artifactId>
<version>1.4.6-SNAPSHOT</version> <version>3.1.2-SNAPSHOT</version>
</parent> </parent>
<artifactId>dependency-check-cli</artifactId> <artifactId>dependency-check-cli</artifactId>
@@ -28,15 +28,6 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
<name>Dependency-Check Command Line</name> <name>Dependency-Check Command Line</name>
<description>dependency-check-cli is an command line tool that uses dependency-check-core to detect publicly disclosed vulnerabilities associated with the scanned project dependencies. The tool will generate a report listing the dependency, any identified Common Platform Enumeration (CPE) identifiers, and the associated Common Vulnerability and Exposure (CVE) entries.</description> <description>dependency-check-cli is an command line tool that uses dependency-check-core to detect publicly disclosed vulnerabilities associated with the scanned project dependencies. The tool will generate a report listing the dependency, any identified Common Platform Enumeration (CPE) identifiers, and the associated Common Vulnerability and Exposure (CVE) entries.</description>
<!-- begin copy from http://minds.coremedia.com/2012/09/11/problem-solved-deploy-multi-module-maven-project-site-as-github-pages/ -->
<distributionManagement>
<site>
<id>github-pages-site</id>
<name>Deployment through GitHub's site deployment plugin</name>
<url>${basedir}/../target/site/${project.version}/dependency-check-cli</url>
</site>
</distributionManagement>
<!-- end copy -->
<build> <build>
<finalName>dependency-check-${project.version}</finalName> <finalName>dependency-check-${project.version}</finalName>
<resources> <resources>
@@ -119,42 +110,6 @@ Copyright (c) 2012 - Jeremy Long. All Rights Reserved.
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
<reporting>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>${reporting.checkstyle-plugin.version}</version>
<configuration>
<enableRulesSummary>false</enableRulesSummary>
<enableFilesSummary>false</enableFilesSummary>
<configLocation>${basedir}/../src/main/config/checkstyle-checks.xml</configLocation>
<headerLocation>${basedir}/../src/main/config/checkstyle-header.txt</headerLocation>
<suppressionsLocation>${basedir}/../src/main/config/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-pmd-plugin</artifactId>
<version>${reporting.pmd-plugin.version}</version>
<configuration>
<targetJdk>1.6</targetJdk>
<linkXRef>true</linkXRef>
<sourceEncoding>utf-8</sourceEncoding>
<excludes>
<exclude>**/generated/*.java</exclude>
</excludes>
<rulesets>
<ruleset>../src/main/config/dcrules.xml</ruleset>
<ruleset>/rulesets/java/basic.xml</ruleset>
<ruleset>/rulesets/java/imports.xml</ruleset>
<ruleset>/rulesets/java/unusedcode.xml</ruleset>
</rulesets>
</configuration>
</plugin>
</plugins>
</reporting>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>commons-cli</groupId> <groupId>commons-cli</groupId>

View File

@@ -28,13 +28,10 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException; import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.apache.tools.ant.DirectoryScanner; import org.apache.tools.ant.DirectoryScanner;
import org.owasp.dependencycheck.dependency.Vulnerability; import org.owasp.dependencycheck.dependency.Vulnerability;
import org.owasp.dependencycheck.reporting.ReportGenerator;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -56,6 +53,10 @@ public class App {
* The logger. * The logger.
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(App.class); private static final Logger LOGGER = LoggerFactory.getLogger(App.class);
/**
* The configured settings.
*/
private Settings settings = null;
/** /**
* The main method for the application. * The main method for the application.
@@ -64,17 +65,28 @@ public class App {
*/ */
public static void main(String[] args) { public static void main(String[] args) {
int exitCode = 0; int exitCode = 0;
try { final App app = new App();
Settings.initialize(); exitCode = app.run(args);
final App app = new App(); LOGGER.debug("Exit code: {}", exitCode);
exitCode = app.run(args);
LOGGER.debug("Exit code: " + exitCode);
} finally {
Settings.cleanup(true);
}
System.exit(exitCode); System.exit(exitCode);
} }
/**
* Builds the App object.
*/
public App() {
settings = new Settings();
}
/**
* Builds the App object; this method is used for testing.
*
* @param settings the configured settings
*/
protected App(Settings settings) {
this.settings = settings;
}
/** /**
* Main CLI entry-point into the application. * Main CLI entry-point into the application.
* *
@@ -83,7 +95,7 @@ public class App {
*/ */
public int run(String[] args) { public int run(String[] args) {
int exitCode = 0; int exitCode = 0;
final CliParser cli = new CliParser(); final CliParser cli = new CliParser(settings);
try { try {
cli.parse(args); cli.parse(args);
@@ -112,10 +124,11 @@ public class App {
LOGGER.error(ex.getMessage()); LOGGER.error(ex.getMessage());
LOGGER.debug("Error loading properties file", ex); LOGGER.debug("Error loading properties file", ex);
exitCode = -4; exitCode = -4;
return exitCode;
} }
File db; File db;
try { try {
db = new File(Settings.getDataDirectory(), "dc.h2.db"); db = new File(settings.getDataDirectory(), settings.getString(Settings.KEYS.DB_FILE_NAME, "dc.h2.db"));
if (db.exists()) { if (db.exists()) {
if (db.delete()) { if (db.delete()) {
LOGGER.info("Database file purged; local copy of the NVD has been removed"); LOGGER.info("Database file purged; local copy of the NVD has been removed");
@@ -124,12 +137,14 @@ public class App {
exitCode = -5; exitCode = -5;
} }
} else { } else {
LOGGER.error("Unable to purge database; the database file does not exists: {}", db.getAbsolutePath()); LOGGER.error("Unable to purge database; the database file does not exist: {}", db.getAbsolutePath());
exitCode = -6; exitCode = -6;
} }
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.error("Unable to delete the database"); LOGGER.error("Unable to delete the database");
exitCode = -7; exitCode = -7;
} finally {
settings.cleanup();
} }
} }
} else if (cli.isGetVersion()) { } else if (cli.isGetVersion()) {
@@ -141,6 +156,7 @@ public class App {
LOGGER.error(ex.getMessage()); LOGGER.error(ex.getMessage());
LOGGER.debug("Error loading properties file", ex); LOGGER.debug("Error loading properties file", ex);
exitCode = -4; exitCode = -4;
return exitCode;
} }
try { try {
runUpdateOnly(); runUpdateOnly();
@@ -150,6 +166,8 @@ public class App {
} catch (DatabaseException ex) { } catch (DatabaseException ex) {
LOGGER.error(ex.getMessage()); LOGGER.error(ex.getMessage());
exitCode = -9; exitCode = -9;
} finally {
settings.cleanup();
} }
} else if (cli.isRunScan()) { } else if (cli.isRunScan()) {
try { try {
@@ -158,6 +176,7 @@ public class App {
LOGGER.error(ex.getMessage()); LOGGER.error(ex.getMessage());
LOGGER.debug("Error loading properties file", ex); LOGGER.debug("Error loading properties file", ex);
exitCode = -4; exitCode = -4;
return exitCode;
} }
try { try {
final String[] scanFiles = cli.getScanFiles(); final String[] scanFiles = cli.getScanFiles();
@@ -184,8 +203,12 @@ public class App {
exitCode = -14; exitCode = -14;
} }
for (Throwable e : ex.getExceptions()) { for (Throwable e : ex.getExceptions()) {
LOGGER.error(e.getMessage()); if (e.getMessage() != null) {
LOGGER.error(e.getMessage());
}
} }
} finally {
settings.cleanup();
} }
} else { } else {
cli.printHelp(); cli.printHelp();
@@ -220,56 +243,11 @@ public class App {
String[] excludes, int symLinkDepth, int cvssFailScore) throws InvalidScanPathException, DatabaseException, String[] excludes, int symLinkDepth, int cvssFailScore) throws InvalidScanPathException, DatabaseException,
ExceptionCollection, ReportException { ExceptionCollection, ReportException {
Engine engine = null; Engine engine = null;
int retCode = 0;
try { try {
engine = new Engine(); final List<String> antStylePaths = getPaths(files);
final List<String> antStylePaths = new ArrayList<>(); final Set<File> paths = scanAntStylePaths(antStylePaths, symLinkDepth, excludes);
for (String file : files) {
final String antPath = ensureCanonicalPath(file);
antStylePaths.add(antPath);
}
final Set<File> paths = new HashSet<>(); engine = new Engine(settings);
for (String file : antStylePaths) {
LOGGER.debug("Scanning {}", file);
final DirectoryScanner scanner = new DirectoryScanner();
String include = file.replace('\\', '/');
File baseDir;
if (include.startsWith("//")) {
throw new InvalidScanPathException("Unable to scan paths specified by //");
} else {
final int pos = getLastFileSeparator(include);
final String tmpBase = include.substring(0, pos);
final String tmpInclude = include.substring(pos + 1);
if (tmpInclude.indexOf('*') >= 0 || tmpInclude.indexOf('?') >= 0
|| (new File(include)).isFile()) {
baseDir = new File(tmpBase);
include = tmpInclude;
} else {
baseDir = new File(tmpBase, tmpInclude);
include = "**/*";
}
}
scanner.setBasedir(baseDir);
final String[] includes = {include};
scanner.setIncludes(includes);
scanner.setMaxLevelsOfSymlinks(symLinkDepth);
if (symLinkDepth <= 0) {
scanner.setFollowSymlinks(false);
}
if (excludes != null && excludes.length > 0) {
scanner.addExcludes(excludes);
}
scanner.scan();
if (scanner.getIncludedFilesCount() > 0) {
for (String s : scanner.getIncludedFiles()) {
final File f = new File(baseDir, s);
LOGGER.debug("Found file {}", f.toString());
paths.add(f);
}
}
}
engine.scan(paths); engine.scan(paths);
ExceptionCollection exCol = null; ExceptionCollection exCol = null;
@@ -281,18 +259,9 @@ public class App {
} }
exCol = ex; exCol = ex;
} }
final List<Dependency> dependencies = engine.getDependencies();
DatabaseProperties prop = null;
try (CveDB cve = CveDB.getInstance()) {
prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) {
//TODO shouldn't this be a fatal exception
LOGGER.debug("Unable to retrieve DB Properties", ex);
}
final ReportGenerator report = new ReportGenerator(applicationName, dependencies, engine.getAnalyzers(), prop);
try { try {
report.generateReports(reportDirectory, outputFormat); engine.writeReports(applicationName, new File(reportDirectory), outputFormat);
} catch (ReportException ex) { } catch (ReportException ex) {
if (exCol != null) { if (exCol != null) {
exCol.addException(ex); exCol.addException(ex);
@@ -301,28 +270,110 @@ public class App {
throw ex; throw ex;
} }
} }
if (exCol != null && exCol.getExceptions().size() > 0) { if (exCol != null && !exCol.getExceptions().isEmpty()) {
throw exCol; throw exCol;
} }
return determineReturnCode(engine, cvssFailScore);
} finally {
if (engine != null) {
engine.close();
}
}
}
//Set the exit code based on whether we found a high enough vulnerability /**
for (Dependency dep : dependencies) { * Determines the return code based on if one of the dependencies scanned
if (!dep.getVulnerabilities().isEmpty()) { * has a vulnerability with a CVSS score above the cvssFailScore.
for (Vulnerability vuln : dep.getVulnerabilities()) { *
LOGGER.debug("VULNERABILITY FOUND " + dep.getDisplayFileName()); * @param engine the engine used during analysis
if (vuln.getCvssScore() > cvssFailScore) { * @param cvssFailScore the max allowed CVSS score
retCode = 1; * @return returns <code>1</code> if a severe enough vulnerability is
} * identified; otherwise <code>0</code>
*/
private int determineReturnCode(Engine engine, int cvssFailScore) {
int retCode = 0;
//Set the exit code based on whether we found a high enough vulnerability
for (Dependency dep : engine.getDependencies()) {
if (!dep.getVulnerabilities().isEmpty()) {
for (Vulnerability vuln : dep.getVulnerabilities()) {
LOGGER.debug("VULNERABILITY FOUND {}", dep.getDisplayFileName());
if (vuln.getCvssScore() > cvssFailScore) {
retCode = 1;
} }
} }
} }
}
return retCode;
}
return retCode; /**
} finally { * Scans the give Ant Style paths and collects the actual files.
if (engine != null) { *
engine.cleanup(); * @param antStylePaths a list of ant style paths to scan for actual files
* @param symLinkDepth the depth to traverse symbolic links
* @param excludes an array of ant style excludes
* @return returns the set of identified files
* @throws InvalidScanPathException thrown when the scan path is invalid
*/
private Set<File> scanAntStylePaths(List<String> antStylePaths, int symLinkDepth, String[] excludes)
throws InvalidScanPathException {
final Set<File> paths = new HashSet<>();
for (String file : antStylePaths) {
LOGGER.debug("Scanning {}", file);
final DirectoryScanner scanner = new DirectoryScanner();
String include = file.replace('\\', '/');
File baseDir;
if (include.startsWith("//")) {
throw new InvalidScanPathException("Unable to scan paths specified by //");
} else {
final int pos = getLastFileSeparator(include);
final String tmpBase = include.substring(0, pos);
final String tmpInclude = include.substring(pos + 1);
if (tmpInclude.indexOf('*') >= 0 || tmpInclude.indexOf('?') >= 0
|| (new File(include)).isFile()) {
baseDir = new File(tmpBase);
include = tmpInclude;
} else {
baseDir = new File(tmpBase, tmpInclude);
include = "**/*";
}
}
scanner.setBasedir(baseDir);
final String[] includes = {include};
scanner.setIncludes(includes);
scanner.setMaxLevelsOfSymlinks(symLinkDepth);
if (symLinkDepth <= 0) {
scanner.setFollowSymlinks(false);
}
if (excludes != null && excludes.length > 0) {
scanner.addExcludes(excludes);
}
scanner.scan();
if (scanner.getIncludedFilesCount() > 0) {
for (String s : scanner.getIncludedFiles()) {
final File f = new File(baseDir, s);
LOGGER.debug("Found file {}", f.toString());
paths.add(f);
}
} }
} }
return paths;
}
/**
* Determines the ant style paths from the given array of files.
*
* @param files an array of file paths
* @return a list containing ant style paths
*/
private List<String> getPaths(String[] files) {
final List<String> antStylePaths = new ArrayList<>();
for (String file : files) {
final String antPath = ensureCanonicalPath(file);
antStylePaths.add(antPath);
}
return antStylePaths;
} }
/** /**
@@ -333,14 +384,8 @@ public class App {
* connection to the database could not be established * connection to the database could not be established
*/ */
private void runUpdateOnly() throws UpdateException, DatabaseException { private void runUpdateOnly() throws UpdateException, DatabaseException {
Engine engine = null; try (Engine engine = new Engine(settings)) {
try {
engine = new Engine();
engine.doUpdates(); engine.doUpdates();
} finally {
if (engine != null) {
engine.cleanup();
}
} }
} }
@@ -361,7 +406,7 @@ public class App {
final String proxyPass = cli.getProxyPassword(); final String proxyPass = cli.getProxyPassword();
final String dataDirectory = cli.getDataDirectory(); final String dataDirectory = cli.getDataDirectory();
final File propertiesFile = cli.getPropertiesFile(); final File propertiesFile = cli.getPropertiesFile();
final String suppressionFile = cli.getSuppressionFile(); final String[] suppressionFiles = cli.getSuppressionFiles();
final String hintsFile = cli.getHintsFile(); final String hintsFile = cli.getHintsFile();
final String nexusUrl = cli.getNexusUrl(); final String nexusUrl = cli.getNexusUrl();
final String databaseDriverName = cli.getDatabaseDriverName(); final String databaseDriverName = cli.getDatabaseDriverName();
@@ -378,10 +423,11 @@ public class App {
final Integer cveValidForHours = cli.getCveValidForHours(); final Integer cveValidForHours = cli.getCveValidForHours();
final Boolean autoUpdate = cli.isAutoUpdate(); final Boolean autoUpdate = cli.isAutoUpdate();
final Boolean experimentalEnabled = cli.isExperimentalEnabled(); final Boolean experimentalEnabled = cli.isExperimentalEnabled();
final Boolean retiredEnabled = cli.isRetiredEnabled();
if (propertiesFile != null) { if (propertiesFile != null) {
try { try {
Settings.mergeProperties(propertiesFile); settings.mergeProperties(propertiesFile);
} catch (FileNotFoundException ex) { } catch (FileNotFoundException ex) {
throw new InvalidSettingException("Unable to find properties file '" + propertiesFile.getPath() + "'", ex); throw new InvalidSettingException("Unable to find properties file '" + propertiesFile.getPath() + "'", ex);
} catch (IOException ex) { } catch (IOException ex) {
@@ -393,63 +439,66 @@ public class App {
// on the command line. This is true of other boolean values set below not using the setBooleanIfNotNull. // on the command line. This is true of other boolean values set below not using the setBooleanIfNotNull.
final boolean nexusUsesProxy = cli.isNexusUsesProxy(); final boolean nexusUsesProxy = cli.isNexusUsesProxy();
if (dataDirectory != null) { if (dataDirectory != null) {
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory); settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
} else if (System.getProperty("basedir") != null) { } else if (System.getProperty("basedir") != null) {
final File dataDir = new File(System.getProperty("basedir"), "data"); final File dataDir = new File(System.getProperty("basedir"), "data");
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath()); settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
} else { } else {
final File jarPath = new File(App.class.getProtectionDomain().getCodeSource().getLocation().getPath()); final File jarPath = new File(App.class.getProtectionDomain().getCodeSource().getLocation().getPath());
final File base = jarPath.getParentFile(); final File base = jarPath.getParentFile();
final String sub = Settings.getString(Settings.KEYS.DATA_DIRECTORY); final String sub = settings.getString(Settings.KEYS.DATA_DIRECTORY);
final File dataDir = new File(base, sub); final File dataDir = new File(base, sub);
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath()); settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
} }
Settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate); settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUser); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUser);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPass); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPass);
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout); settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile); settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile);
Settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, hintsFile); settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
Settings.setIntIfNotNull(Settings.KEYS.CVE_CHECK_VALID_FOR_HOURS, cveValidForHours);
settings.setArrayIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFiles);
//File Type Analyzer Settings //File Type Analyzer Settings
Settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, experimentalEnabled); settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, experimentalEnabled);
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RETIRED_ENABLED, retiredEnabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !cli.isJarDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !cli.isJarDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !cli.isArchiveDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !cli.isArchiveDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !cli.isPythonDistributionDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !cli.isPythonDistributionDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, !cli.isPythonPackageDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, !cli.isPythonPackageDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, !cli.isAutoconfDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, !cli.isAutoconfDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_CMAKE_ENABLED, !cli.isCmakeDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_CMAKE_ENABLED, !cli.isCmakeDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, !cli.isNuspecDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, !cli.isNuspecDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, !cli.isAssemblyDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, !cli.isAssemblyDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, !cli.isBundleAuditDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, !cli.isBundleAuditDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isOpenSSLDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isComposerDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isComposerDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isNodeJsDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isNodeJsDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, !cli.isSwiftPackageAnalyzerDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED, !cli.isNspDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, !cli.isCocoapodsAnalyzerDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, !cli.isSwiftPackageAnalyzerDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isRubyGemspecDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, !cli.isCocoapodsAnalyzerDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isCentralDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isRubyGemspecDisabled());
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !cli.isNexusDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isCentralDisabled());
settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, !cli.isNexusDisabled());
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, cli.getPathToBundleAudit()); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, cli.getPathToBundleAudit());
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy); settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName); settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath); settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString); settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser); settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword); settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, additionalZipExtensions); settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, additionalZipExtensions);
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
if (cveBase12 != null && !cveBase12.isEmpty()) { if (cveBase12 != null && !cveBase12.isEmpty()) {
Settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, cveBase12); settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, cveBase12);
Settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, cveBase20); settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, cveBase20);
Settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, cveMod12); settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, cveMod12);
Settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, cveMod20); settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, cveMod20);
} }
} }

View File

@@ -53,6 +53,19 @@ public final class CliParser {
* Indicates whether the arguments are valid. * Indicates whether the arguments are valid.
*/ */
private boolean isValid = true; private boolean isValid = true;
/**
* The configured settings.
*/
private final Settings settings;
/**
* Constructs a new CLI Parser object with the configured settings.
*
* @param settings the configured settings
*/
public CliParser(Settings settings) {
this.settings = settings;
}
/** /**
* Parses the arguments passed in and captures the results for later use. * Parses the arguments passed in and captures the results for later use.
@@ -192,14 +205,13 @@ public final class CliParser {
final String msg = String.format("Invalid '%s' argument: '%s'", argumentName, path); final String msg = String.format("Invalid '%s' argument: '%s'", argumentName, path);
throw new FileNotFoundException(msg); throw new FileNotFoundException(msg);
} }
} else if (path.startsWith("//") || path.startsWith("\\\\")) { // } else if (path.startsWith("//") || path.startsWith("\\\\")) {
isValid = false; // isValid = false;
final String msg = String.format("Invalid '%s' argument: '%s'%nUnable to scan paths that start with '//'.", argumentName, path); // final String msg = String.format("Invalid '%s' argument: '%s'%nUnable to scan paths that start with '//'.", argumentName, path);
throw new FileNotFoundException(msg); // throw new FileNotFoundException(msg);
} else if ((path.endsWith("/*") && !path.endsWith("**/*")) || (path.endsWith("\\*") && path.endsWith("**\\*"))) { } else if ((path.endsWith("/*") && !path.endsWith("**/*")) || (path.endsWith("\\*") && path.endsWith("**\\*"))) {
final String msg = String.format("Possibly incorrect path '%s' from argument '%s' because it ends with a slash star; " LOGGER.warn("Possibly incorrect path '{}' from argument '{}' because it ends with a slash star; "
+ "dependency-check uses ant-style paths", path, argumentName); + "dependency-check uses ant-style paths", path, argumentName);
LOGGER.warn(msg);
} }
} }
@@ -222,10 +234,9 @@ public final class CliParser {
* Adds the standard command line options to the given options collection. * Adds the standard command line options to the given options collection.
* *
* @param options a collection of command line arguments * @param options a collection of command line arguments
* @throws IllegalArgumentException thrown if there is an exception
*/ */
@SuppressWarnings("static-access") @SuppressWarnings("static-access")
private void addStandardOptions(final Options options) throws IllegalArgumentException { private void addStandardOptions(final Options options) {
final Option help = new Option(ARGUMENT.HELP_SHORT, ARGUMENT.HELP, false, final Option help = new Option(ARGUMENT.HELP_SHORT, ARGUMENT.HELP, false,
"Print this message."); "Print this message.");
@@ -273,8 +284,9 @@ public final class CliParser {
.desc("Sets how deep nested symbolic links will be followed; 0 indicates symbolic links will not be followed.") .desc("Sets how deep nested symbolic links will be followed; 0 indicates symbolic links will not be followed.")
.build(); .build();
final Option suppressionFile = Option.builder().argName("file").hasArg().longOpt(ARGUMENT.SUPPRESSION_FILE) final Option suppressionFile = Option.builder().argName("file").hasArgs().longOpt(ARGUMENT.SUPPRESSION_FILES)
.desc("The file path to the suppression XML file.") .desc("The file path to the suppression XML file. This can be specified more then once to utilize multiple "
+ "suppression files")
.build(); .build();
final Option hintsFile = Option.builder().argName("file").hasArg().longOpt(ARGUMENT.HINTS_FILE) final Option hintsFile = Option.builder().argName("file").hasArg().longOpt(ARGUMENT.HINTS_FILE)
@@ -289,6 +301,10 @@ public final class CliParser {
.desc("Enables the experimental analyzers.") .desc("Enables the experimental analyzers.")
.build(); .build();
final Option retiredEnabled = Option.builder().longOpt(ARGUMENT.RETIRED)
.desc("Enables the experimental analyzers.")
.build();
final Option failOnCVSS = Option.builder().argName("score").hasArg().longOpt(ARGUMENT.FAIL_ON_CVSS) final Option failOnCVSS = Option.builder().argName("score").hasArg().longOpt(ARGUMENT.FAIL_ON_CVSS)
.desc("Specifies if the build should be failed if a CVSS score above a specified level is identified. " .desc("Specifies if the build should be failed if a CVSS score above a specified level is identified. "
+ "The default is 11; since the CVSS scores are 0-10, by default the build will never fail.") + "The default is 11; since the CVSS scores are 0-10, by default the build will never fail.")
@@ -317,6 +333,7 @@ public final class CliParser {
.addOption(hintsFile) .addOption(hintsFile)
.addOption(cveValidForHours) .addOption(cveValidForHours)
.addOption(experimentalEnabled) .addOption(experimentalEnabled)
.addOption(retiredEnabled)
.addOption(failOnCVSS); .addOption(failOnCVSS);
} }
@@ -326,10 +343,9 @@ public final class CliParser {
* help messages. * help messages.
* *
* @param options a collection of command line arguments * @param options a collection of command line arguments
* @throws IllegalArgumentException thrown if there is an exception
*/ */
@SuppressWarnings("static-access") @SuppressWarnings("static-access")
private void addAdvancedOptions(final Options options) throws IllegalArgumentException { private void addAdvancedOptions(final Options options) {
final Option cve12Base = Option.builder().argName("url").hasArg().longOpt(ARGUMENT.CVE_BASE_12) final Option cve12Base = Option.builder().argName("url").hasArg().longOpt(ARGUMENT.CVE_BASE_12)
.desc("Base URL for each years CVE 1.2, the %d will be replaced with the year. ") .desc("Base URL for each years CVE 1.2, the %d will be replaced with the year. ")
@@ -490,6 +506,8 @@ public final class CliParser {
.addOption(swiftPackageManagerAnalyzerEnabled) .addOption(swiftPackageManagerAnalyzerEnabled)
.addOption(Option.builder().longOpt(ARGUMENT.DISABLE_NODE_JS) .addOption(Option.builder().longOpt(ARGUMENT.DISABLE_NODE_JS)
.desc("Disable the Node.js Package Analyzer.").build()) .desc("Disable the Node.js Package Analyzer.").build())
.addOption(Option.builder().longOpt(ARGUMENT.DISABLE_NSP)
.desc("Disable the NSP Package Analyzer.").build())
.addOption(nexusUrl) .addOption(nexusUrl)
.addOption(nexusUsesProxy) .addOption(nexusUsesProxy)
.addOption(additionalZipExtensions) .addOption(additionalZipExtensions)
@@ -505,10 +523,9 @@ public final class CliParser {
* existing scripts. * existing scripts.
* *
* @param options a collection of command line arguments * @param options a collection of command line arguments
* @throws IllegalArgumentException thrown if there is an exception
*/ */
@SuppressWarnings({"static-access", "deprecation"}) @SuppressWarnings({"static-access", "deprecation"})
private void addDeprecatedOptions(final Options options) throws IllegalArgumentException { private void addDeprecatedOptions(final Options options) {
final Option proxyServer = Option.builder().argName("url").hasArg().longOpt(ARGUMENT.PROXY_URL) final Option proxyServer = Option.builder().argName("url").hasArg().longOpt(ARGUMENT.PROXY_URL)
.desc("The proxy url argument is deprecated, use proxyserver instead.") .desc("The proxy url argument is deprecated, use proxyserver instead.")
@@ -583,7 +600,7 @@ public final class CliParser {
private boolean hasDisableOption(String argument, String setting) { private boolean hasDisableOption(String argument, String setting) {
if (line == null || !line.hasOption(argument)) { if (line == null || !line.hasOption(argument)) {
try { try {
return !Settings.getBoolean(setting); return !settings.getBoolean(setting);
} catch (InvalidSettingException ise) { } catch (InvalidSettingException ise) {
LOGGER.warn("Invalid property setting '{}' defaulting to false", setting); LOGGER.warn("Invalid property setting '{}' defaulting to false", setting);
return false; return false;
@@ -734,6 +751,16 @@ public final class CliParser {
return hasDisableOption(ARGUMENT.DISABLE_NODE_JS, Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED); return hasDisableOption(ARGUMENT.DISABLE_NODE_JS, Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED);
} }
/**
* Returns true if the disableNSP command line argument was specified.
*
* @return true if the disableNSP command line argument was specified;
* otherwise false
*/
public boolean isNspDisabled() {
return hasDisableOption(ARGUMENT.DISABLE_NSP, Settings.KEYS.ANALYZER_NSP_PACKAGE_ENABLED);
}
/** /**
* Returns true if the disableCocoapodsAnalyzer command line argument was * Returns true if the disableCocoapodsAnalyzer command line argument was
* specified. * specified.
@@ -792,7 +819,7 @@ public final class CliParser {
// still honor the property if it's set. // still honor the property if it's set.
if (line == null || !line.hasOption(ARGUMENT.NEXUS_USES_PROXY)) { if (line == null || !line.hasOption(ARGUMENT.NEXUS_USES_PROXY)) {
try { try {
return Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY); return settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
} catch (InvalidSettingException ise) { } catch (InvalidSettingException ise) {
return true; return true;
} }
@@ -814,10 +841,10 @@ public final class CliParser {
final String helpMsg = String.format("%n%s" final String helpMsg = String.format("%n%s"
+ " can be used to identify if there are any known CVE vulnerabilities in libraries utilized by an application. " + " can be used to identify if there are any known CVE vulnerabilities in libraries utilized by an application. "
+ "%s will automatically update required data from the Internet, such as the CVE and CPE data files from nvd.nist.gov.%n%n", + "%s will automatically update required data from the Internet, such as the CVE and CPE data files from nvd.nist.gov.%n%n",
Settings.getString("application.name", "DependencyCheck"), settings.getString("application.name", "DependencyCheck"),
Settings.getString("application.name", "DependencyCheck")); settings.getString("application.name", "DependencyCheck"));
formatter.printHelp(Settings.getString("application.name", "DependencyCheck"), formatter.printHelp(settings.getString("application.name", "DependencyCheck"),
helpMsg, helpMsg,
options, options,
"", "",
@@ -893,7 +920,7 @@ public final class CliParser {
String name = line.getOptionValue(ARGUMENT.PROJECT); String name = line.getOptionValue(ARGUMENT.PROJECT);
if (name == null && appName != null) { if (name == null && appName != null) {
name = appName; name = appName;
LOGGER.warn("The '" + ARGUMENT.APP_NAME + "' argument should no longer be used; use '" + ARGUMENT.PROJECT + "' instead."); LOGGER.warn("The '{}' argument should no longer be used; use '{}' instead.", ARGUMENT.APP_NAME, ARGUMENT.PROJECT);
} }
return name; return name;
} }
@@ -1020,12 +1047,12 @@ public final class CliParser {
} }
/** /**
* Returns the path to the suppression file. * Returns the paths to the suppression files.
* *
* @return the path to the suppression file * @return the paths to the suppression files.
*/ */
public String getSuppressionFile() { public String[] getSuppressionFiles() {
return line.getOptionValue(ARGUMENT.SUPPRESSION_FILE); return line.getOptionValues(ARGUMENT.SUPPRESSION_FILES);
} }
/** /**
@@ -1045,8 +1072,8 @@ public final class CliParser {
*/ */
public void printVersionInfo() { public void printVersionInfo() {
final String version = String.format("%s version %s", final String version = String.format("%s version %s",
Settings.getString(Settings.KEYS.APPLICATION_NAME, "dependency-check"), settings.getString(Settings.KEYS.APPLICATION_NAME, "dependency-check"),
Settings.getString(Settings.KEYS.APPLICATION_VERSION, "Unknown")); settings.getString(Settings.KEYS.APPLICATION_VERSION, "Unknown"));
System.out.println(version); System.out.println(version);
} }
@@ -1166,6 +1193,15 @@ public final class CliParser {
return (line != null && line.hasOption(ARGUMENT.EXPERIMENTAL)) ? true : null; return (line != null && line.hasOption(ARGUMENT.EXPERIMENTAL)) ? true : null;
} }
/**
* Returns true if the retired analyzers are enabled.
*
* @return true if the retired analyzers are enabled; otherwise null
*/
public Boolean isRetiredEnabled() {
return (line != null && line.hasOption(ARGUMENT.RETIRED)) ? true : null;
}
/** /**
* Returns the CVSS value to fail on. * Returns the CVSS value to fail on.
* *
@@ -1363,9 +1399,9 @@ public final class CliParser {
public static final String SYM_LINK_DEPTH = "symLink"; public static final String SYM_LINK_DEPTH = "symLink";
/** /**
* The CLI argument name for setting the location of the suppression * The CLI argument name for setting the location of the suppression
* file. * file(s).
*/ */
public static final String SUPPRESSION_FILE = "suppression"; public static final String SUPPRESSION_FILES = "suppression";
/** /**
* The CLI argument name for setting the location of the hint file. * The CLI argument name for setting the location of the hint file.
*/ */
@@ -1443,6 +1479,10 @@ public final class CliParser {
* Disables the Node.js Package Analyzer. * Disables the Node.js Package Analyzer.
*/ */
public static final String DISABLE_NODE_JS = "disableNodeJS"; public static final String DISABLE_NODE_JS = "disableNodeJS";
/**
* Disables the NSP Analyzer.
*/
public static final String DISABLE_NSP = "disableNSP";
/** /**
* The URL of the nexus server. * The URL of the nexus server.
*/ */
@@ -1495,6 +1535,10 @@ public final class CliParser {
* The CLI argument to enable the experimental analyzers. * The CLI argument to enable the experimental analyzers.
*/ */
private static final String EXPERIMENTAL = "enableExperimental"; private static final String EXPERIMENTAL = "enableExperimental";
/**
* The CLI argument to enable the retired analyzers.
*/
private static final String RETIRED = "enableRetired";
/** /**
* The CLI argument to enable the experimental analyzers. * The CLI argument to enable the experimental analyzers.
*/ */

View File

@@ -0,0 +1,69 @@
Command Line Arguments
======================
The following table lists the command line arguments:
Short | Argument&nbsp;Name&nbsp;&nbsp; | Parameter | Description | Requirement
-------|------------------------|-----------------|-------------|------------
| \-\-project | \<name\> | The name of the project being scanned. | Required
\-s | \-\-scan | \<path\> | The path to scan \- this option can be specified multiple times. It is also possible to specify Ant style paths (e.g. directory/**/*.jar). | Required
| \-\-exclude | \<pattern\> | The path patterns to exclude from the scan \- this option can be specified multiple times. This accepts Ant style path patterns (e.g. **/exclude/**). | Optional
| \-\-symLink | \<depth\> | The depth that symbolic links will be followed; the default is 0 meaning symbolic links will not be followed. | Optional
\-o | \-\-out | \<path\> | The folder to write reports to. This defaults to the current directory. If the format is not set to ALL one could specify a specific file name. | Optional
\-f | \-\-format | \<format\> | The output format to write to (XML, HTML, CSV, JSON, VULN, ALL). The default is HTML. | Required
| \-\-failOnCVSS | \<score\> | If the score set between 0 and 10 the exit code from dependency-check will indicate if a vulnerability with a CVSS score equal to or higher was identified. | Optional
\-l | \-\-log | \<file\> | The file path to write verbose logging information. | Optional
\-n | \-\-noupdate | | Disables the automatic updating of the CPE data. | Optional
| \-\-suppression | \<files\> | The file paths to the suppression XML files; used to suppress [false positives](../general/suppression.html). This can be specified more than once to utilize multiple suppression files. | Optional
\-h | \-\-help | | Print the help message. | Optional
| \-\-advancedHelp | | Print the advanced help message. | Optional
\-v | \-\-version | | Print the version information. | Optional
| \-\-cveValidForHours | \<hours\> | The number of hours to wait before checking for new updates from the NVD. The default is 4 hours. | Optional
| \-\-enableExperimental | | Enable the [experimental analyzers](../analyzers/index.html). If not set the analyzers marked as experimental below will not be loaded or used. | Optional
| \-\-enableRetired | | Enable the [retired analyzers](../analyzers/index.html). If not set the analyzers marked as retired below will not be loaded or used. | Optional
Advanced Options
================
Short | Argument&nbsp;Name&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; | Parameter | Description | Default&nbsp;Value
-------|------------------------|-----------------|----------------------------------------------------------------------------------|-------------------
| \-\-cveUrl12Modified | \<url\> | URL for the modified CVE 1.2 | https://nvd.nist.gov/download/nvdcve-Modified.xml.gz
| \-\-cveUrl20Modified | \<url\> | URL for the modified CVE 2.0 | https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-Modified.xml.gz
| \-\-cveUrl12Base | \<url\> | Base URL for each year's CVE 1.2, the %d will be replaced with the year | https://nvd.nist.gov/download/nvdcve-%d.xml.gz
| \-\-cveUrl20Base | \<url\> | Base URL for each year's CVE 2.0, the %d will be replaced with the year | https://nvd.nist.gov/feeds/xml/cve/nvdcve-2.0-%d.xml.gz
\-P | \-\-propertyfile | \<file\> | Specifies a file that contains properties to use instead of applicaion defaults. | &nbsp;
| \-\-updateonly | | If set only the update phase of dependency-check will be executed; no scan will be executed and no report will be generated. | &nbsp;
| \-\-disablePyDist | | Sets whether the [experimental](../analyzers/index.html) Python Distribution Analyzer will be used. | false
| \-\-disablePyPkg | | Sets whether the [experimental](../analyzers/index.html) Python Package Analyzer will be used. | false
| \-\-disableNodeJS | | Sets whether the [retired](../analyzers/index.html) Node.js Package Analyzer will be used. | false
| \-\-disableNSP | | Sets whether the NSP Analyzer will be used. | false
| \-\-disableRubygems | | Sets whether the [experimental](../analyzers/index.html) Ruby Gemspec Analyzer will be used. | false
| \-\-disableBundleAudit | | Sets whether the [experimental](../analyzers/index.html) Ruby Bundler Audit Analyzer will be used. | false
| \-\-disableCocoapodsAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Cocoapods Analyzer will be used. | false
| \-\-disableSwiftPackageManagerAnalyzer | | Sets whether the [experimental](../analyzers/index.html) Swift Package Manager Analyzer will be used. | false
| \-\-disableAutoconf | | Sets whether the [experimental](../analyzers/index.html) Autoconf Analyzer will be used. | false
| \-\-disableOpenSSL | | Sets whether the OpenSSL Analyzer will be used. | false
| \-\-disableCmake | | Sets whether the [experimental](../analyzers/index.html) Cmake Analyzer will be disabled. | false
| \-\-disableArchive | | Sets whether the Archive Analyzer will be disabled. | false
| \-\-zipExtensions | \<strings\> | A comma-separated list of additional file extensions to be treated like a ZIP file, the contents will be extracted and analyzed. | &nbsp;
| \-\-disableJar | | Sets whether the Jar Analyzer will be disabled. | false
| \-\-disableComposer | | Sets whether the [experimental](../analyzers/index.html) PHP Composer Lock File Analyzer will be disabled. | false
| \-\-disableCentral | | Sets whether the Central Analyzer will be used. **Disabling this analyzer is not recommended as it could lead to false negatives (e.g. libraries that have vulnerabilities may not be reported correctly).** If this analyzer is being disabled there is a good chance you also want to disable the Nexus Analyzer. | false
| \-\-disableNexus | | Sets whether the Nexus Analyzer will be used (requires Nexus Pro). Note, this has been superceded by the Central Analyzer. However, you can configure the Nexus URL to utilize an internally hosted Nexus Pro server. | false
| \-\-nexus | \<url\> | The url to the Nexus Server's web service end point (example: http://domain.enterprise/nexus/service/local/). If not set the Nexus Analyzer will be disabled. | &nbsp;
| \-\-nexusUsesProxy | \<true\|false\> | Whether or not the defined proxy should be used when connecting to Nexus. | true
| \-\-disableNuspec | | Sets whether or not the .NET Nuget Nuspec Analyzer will be used. | false
| \-\-disableAssembly | | Sets whether or not the .NET Assembly Analyzer should be used. | false
| \-\-mono | \<path\> | The path to Mono for .NET Assembly analysis on non-windows systems. | &nbsp;
| \-\-bundleAudit | | The path to the bundle-audit executable. | &nbsp;
| \-\-proxyserver | \<server\> | The proxy server to use when downloading resources; see the [proxy configuration](../data/proxy.html) page for more information. | &nbsp;
| \-\-proxyport | \<port\> | The proxy port to use when downloading resources. | &nbsp;
| \-\-connectiontimeout | \<timeout\> | The connection timeout (in milliseconds) to use when downloading resources. | &nbsp;
| \-\-proxypass | \<pass\> | The proxy password to use when downloading resources. | &nbsp;
| \-\-proxyuser | \<user\> | The proxy username to use when downloading resources. | &nbsp;
| \-\-connectionString | \<connStr\> | The connection string to the database. | &nbsp;
| \-\-dbDriverName | \<driver\> | The database driver name. | &nbsp;
| \-\-dbDriverPath | \<path\> | The path to the database driver; note, this does not need to be set unless the JAR is outside of the class path. | &nbsp;
| \-\-dbPassword | \<password\> | The password for connecting to the database. | &nbsp;
| \-\-dbUser | \<user\> | The username used to connect to the database. | &nbsp;
\-d | \-\-data | \<path\> | The location of the data directory used to store persistent data. This option should generally not be set. | &nbsp;
| \-\-purge | | Delete the local copy of the NVD. This is used to force a refresh of the data. | &nbsp;

View File

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 10 KiB

View File

@@ -13,27 +13,40 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
* *
* Copyright (c) 2017 The OWASP Foundatio. All Rights Reserved. * Copyright (c) 2017 The OWASP Foundation. All Rights Reserved.
*/ */
package org.owasp.dependencycheck; package org.owasp.dependencycheck;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.UnrecognizedOptionException; import org.apache.commons.cli.UnrecognizedOptionException;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*; import org.junit.rules.ExpectedException;
import org.owasp.dependencycheck.utils.InvalidSettingException; import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.Settings.KEYS;
/** /**
* * Tests for the {@link AppTest} class.
* @author jeremy
*/ */
public class AppTest { public class AppTest extends BaseTest {
/**
* Test rule for asserting exceptions and their contents.
*/
@Rule
public ExpectedException expectedException = ExpectedException.none();
/** /**
* Test of ensureCanonicalPath method, of class App. * Test of ensureCanonicalPath method, of class App.
@@ -41,7 +54,7 @@ public class AppTest {
@Test @Test
public void testEnsureCanonicalPath() { public void testEnsureCanonicalPath() {
String file = "../*.jar"; String file = "../*.jar";
App instance = new App(); App instance = new App(getSettings());
String result = instance.ensureCanonicalPath(file); String result = instance.ensureCanonicalPath(file);
assertFalse(result.contains("..")); assertFalse(result.contains(".."));
assertTrue(result.endsWith("*.jar")); assertTrue(result.endsWith("*.jar"));
@@ -52,20 +65,20 @@ public class AppTest {
assertTrue("result=" + result, result.endsWith(expResult)); assertTrue("result=" + result, result.endsWith(expResult));
} }
@Test(expected = UnrecognizedOptionException.class) /**
public void testPopulateSettingsException() throws FileNotFoundException, ParseException, InvalidSettingException, URISyntaxException { * Assert that boolean properties can be set on the CLI and parsed into the
String[] args = {"-invalidPROPERTY"}; * {@link Settings}.
assertTrue(testBooleanProperties(args, null)); *
} * @throws Exception the unexpected {@link Exception}.
*/
@Test @Test
public void testPopulateSettings() throws FileNotFoundException, ParseException, InvalidSettingException, URISyntaxException { public void testPopulateSettings() throws Exception {
File prop = new File(this.getClass().getClassLoader().getResource("sample.properties").toURI().getPath()); File prop = new File(this.getClass().getClassLoader().getResource("sample.properties").toURI().getPath());
String[] args = {"-P", prop.getAbsolutePath()}; String[] args = {"-P", prop.getAbsolutePath()};
Map<String, Boolean> expected = new HashMap<>(); Map<String, Boolean> expected = new HashMap<>();
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE); expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE);
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.TRUE); expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.TRUE);
assertTrue(testBooleanProperties(args, expected)); assertTrue(testBooleanProperties(args, expected));
String[] args2 = {"-n"}; String[] args2 = {"-n"};
@@ -103,25 +116,79 @@ public class AppTest {
expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE); expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE);
expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE); expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE);
assertTrue(testBooleanProperties(args8, expected)); assertTrue(testBooleanProperties(args8, expected));
}
/**
* Assert that an {@link UnrecognizedOptionException} is thrown when a
* property that is not supported is specified on the CLI.
*
* @throws Exception the unexpected {@link Exception}.
*/
@Test
public void testPopulateSettingsException() throws Exception {
String[] args = {"-invalidPROPERTY"};
expectedException.expect(UnrecognizedOptionException.class);
expectedException.expectMessage("Unrecognized option: -invalidPROPERTY");
testBooleanProperties(args, null);
}
/**
* Assert that a single suppression file can be set using the CLI.
*
* @throws Exception the unexpected {@link Exception}.
*/
@Test
public void testPopulatingSuppressionSettingsWithASingleFile() throws Exception {
// GIVEN CLI properties with the mandatory arguments
File prop = new File(this.getClass().getClassLoader().getResource("sample.properties").toURI().getPath());
// AND a single suppression file
String[] args = {"-P", prop.getAbsolutePath(), "--suppression", "another-file.xml"};
// WHEN parsing the CLI arguments
final CliParser cli = new CliParser(getSettings());
cli.parse(args);
final App classUnderTest = new App(getSettings());
classUnderTest.populateSettings(cli);
// THEN the suppression file is set in the settings for use in the application core
assertThat("Expected the suppression file to be set in the Settings", getSettings().getString(KEYS.SUPPRESSION_FILE), is("another-file.xml"));
}
/**
* Assert that multiple suppression files can be set using the CLI.
*
* @throws Exception the unexpected {@link Exception}.
*/
@Test
public void testPopulatingSuppressionSettingsWithMultipleFiles() throws Exception {
// GIVEN CLI properties with the mandatory arguments
File prop = new File(this.getClass().getClassLoader().getResource("sample.properties").toURI().getPath());
// AND a single suppression file
String[] args = {"-P", prop.getAbsolutePath(), "--suppression", "first-file.xml", "another-file.xml"};
// WHEN parsing the CLI arguments
final CliParser cli = new CliParser(getSettings());
cli.parse(args);
final App classUnderTest = new App(getSettings());
classUnderTest.populateSettings(cli);
// THEN the suppression file is set in the settings for use in the application core
assertThat("Expected the suppression files to be set in the Settings with a separator", getSettings().getString(KEYS.SUPPRESSION_FILE), is("first-file.xml,another-file.xml"));
} }
private boolean testBooleanProperties(String[] args, Map<String, Boolean> expected) throws URISyntaxException, FileNotFoundException, ParseException, InvalidSettingException { private boolean testBooleanProperties(String[] args, Map<String, Boolean> expected) throws URISyntaxException, FileNotFoundException, ParseException, InvalidSettingException {
Settings.initialize(); this.reloadSettings();
try { final CliParser cli = new CliParser(getSettings());
final CliParser cli = new CliParser(); cli.parse(args);
cli.parse(args); App instance = new App(getSettings());
App instance = new App(); instance.populateSettings(cli);
instance.populateSettings(cli); boolean results = true;
boolean results = true; for (Map.Entry<String, Boolean> entry : expected.entrySet()) {
for (Map.Entry<String, Boolean> entry : expected.entrySet()) { results &= getSettings().getBoolean(entry.getKey()) == entry.getValue();
results &= Settings.getBoolean(entry.getKey()) == entry.getValue();
}
return results;
} finally {
Settings.cleanup();
} }
return results;
} }
} }

View File

@@ -0,0 +1,62 @@
/*
* Copyright 2014 OWASP.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.owasp.dependencycheck;
import org.junit.After;
import org.junit.Before;
import org.owasp.dependencycheck.utils.Settings;
/**
*
* @author Jeremy Long
*/
public class BaseTest {
/**
* The configured settings.
*/
private Settings settings;
/**
* Initialize the {@link Settings}.
*/
@Before
public void setUp() {
settings = new Settings();
}
/**
* Clean the {@link Settings}.
*/
@After
public void tearDown() {
settings.cleanup(true);
}
/**
* Returns the settings for the test cases.
*
* @return
*/
protected Settings getSettings() {
return settings;
}
protected void reloadSettings() {
tearDown();
setUp();
}
}

View File

@@ -33,17 +33,7 @@ import org.owasp.dependencycheck.utils.Settings;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class CliParserTest { public class CliParserTest extends BaseTest {
@BeforeClass
public static void setUpClass() throws Exception {
Settings.initialize();
}
@AfterClass
public static void tearDownClass() throws Exception {
Settings.cleanup(true);
}
/** /**
* Test of parse method, of class CliParser. * Test of parse method, of class CliParser.
@@ -59,7 +49,7 @@ public class CliParserTest {
ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream();
System.setOut(new PrintStream(baos)); System.setOut(new PrintStream(baos));
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
instance.parse(args); instance.parse(args);
Assert.assertFalse(instance.isGetVersion()); Assert.assertFalse(instance.isGetVersion());
@@ -78,7 +68,7 @@ public class CliParserTest {
String[] args = {"-help"}; String[] args = {"-help"};
PrintStream out = System.out; PrintStream out = System.out;
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
instance.parse(args); instance.parse(args);
Assert.assertFalse(instance.isGetVersion()); Assert.assertFalse(instance.isGetVersion());
@@ -96,7 +86,7 @@ public class CliParserTest {
String[] args = {"-version"}; String[] args = {"-version"};
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
instance.parse(args); instance.parse(args);
Assert.assertTrue(instance.isGetVersion()); Assert.assertTrue(instance.isGetVersion());
Assert.assertFalse(instance.isGetHelp()); Assert.assertFalse(instance.isGetHelp());
@@ -114,7 +104,7 @@ public class CliParserTest {
String[] args = {"--failOnCVSS"}; String[] args = {"--failOnCVSS"};
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
try { try {
instance.parse(args); instance.parse(args);
} catch (ParseException ex) { } catch (ParseException ex) {
@@ -135,7 +125,7 @@ public class CliParserTest {
String[] args = {"--failOnCVSS","bad"}; String[] args = {"--failOnCVSS","bad"};
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
instance.parse(args); instance.parse(args);
Assert.assertEquals("Default should be 11", 11, instance.getFailOnCVSS()); Assert.assertEquals("Default should be 11", 11, instance.getFailOnCVSS());
Assert.assertFalse(instance.isGetVersion()); Assert.assertFalse(instance.isGetVersion());
@@ -153,7 +143,7 @@ public class CliParserTest {
String[] args = {"--failOnCVSS","6"}; String[] args = {"--failOnCVSS","6"};
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
instance.parse(args); instance.parse(args);
Assert.assertEquals(6, instance.getFailOnCVSS()); Assert.assertEquals(6, instance.getFailOnCVSS());
Assert.assertFalse(instance.isGetVersion()); Assert.assertFalse(instance.isGetVersion());
@@ -178,7 +168,7 @@ public class CliParserTest {
System.setOut(new PrintStream(baos_out)); System.setOut(new PrintStream(baos_out));
System.setErr(new PrintStream(baos_err)); System.setErr(new PrintStream(baos_err));
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
try { try {
instance.parse(args); instance.parse(args);
@@ -200,7 +190,7 @@ public class CliParserTest {
String[] args = {"-scan"}; String[] args = {"-scan"};
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
try { try {
instance.parse(args); instance.parse(args);
@@ -223,7 +213,7 @@ public class CliParserTest {
String[] args = {"-scan", "jar.that.does.not.exist", "-app", "test"}; String[] args = {"-scan", "jar.that.does.not.exist", "-app", "test"};
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
try { try {
instance.parse(args); instance.parse(args);
} catch (FileNotFoundException ex) { } catch (FileNotFoundException ex) {
@@ -245,7 +235,7 @@ public class CliParserTest {
File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").toURI().getPath()); File path = new File(this.getClass().getClassLoader().getResource("checkSumTest.file").toURI().getPath());
String[] args = {"-scan", path.getCanonicalPath(), "-out", "./", "-app", "test"}; String[] args = {"-scan", path.getCanonicalPath(), "-out", "./", "-app", "test"};
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
instance.parse(args); instance.parse(args);
Assert.assertEquals(path.getCanonicalPath(), instance.getScanFiles()[0]); Assert.assertEquals(path.getCanonicalPath(), instance.getScanFiles()[0]);
@@ -267,7 +257,7 @@ public class CliParserTest {
ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream();
System.setOut(new PrintStream(baos)); System.setOut(new PrintStream(baos));
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
instance.printVersionInfo(); instance.printVersionInfo();
try { try {
baos.flush(); baos.flush();
@@ -296,7 +286,7 @@ public class CliParserTest {
ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream();
System.setOut(new PrintStream(baos)); System.setOut(new PrintStream(baos));
CliParser instance = new CliParser(); CliParser instance = new CliParser(getSettings());
String[] args = {"-h"}; String[] args = {"-h"};
instance.parse(args); instance.parse(args);
instance.printHelp(); instance.printHelp();

View File

@@ -1,5 +1,5 @@
autoupdate=false autoupdate=false
somethingmadeup=test
analyzer.experimental.enabled=false analyzer.experimental.enabled=false
analyzer.jar.enabled=true analyzer.jar.enabled=true
analyzer.archive.enabled=true analyzer.archive.enabled=true

View File

@@ -20,7 +20,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<parent> <parent>
<groupId>org.owasp</groupId> <groupId>org.owasp</groupId>
<artifactId>dependency-check-parent</artifactId> <artifactId>dependency-check-parent</artifactId>
<version>1.4.6-SNAPSHOT</version> <version>3.1.2-SNAPSHOT</version>
</parent> </parent>
<artifactId>dependency-check-core</artifactId> <artifactId>dependency-check-core</artifactId>
@@ -28,15 +28,6 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<name>Dependency-Check Core</name> <name>Dependency-Check Core</name>
<description>dependency-check-core is the engine and reporting tool used to identify and report if there are any known, publicly disclosed vulnerabilities in the scanned project's dependencies. The engine extracts meta-data from the dependencies and uses this to do fuzzy key-word matching against the Common Platfrom Enumeration (CPE), if any CPE identifiers are found the associated Common Vulnerability and Exposure (CVE) entries are added to the generated report.</description> <description>dependency-check-core is the engine and reporting tool used to identify and report if there are any known, publicly disclosed vulnerabilities in the scanned project's dependencies. The engine extracts meta-data from the dependencies and uses this to do fuzzy key-word matching against the Common Platfrom Enumeration (CPE), if any CPE identifiers are found the associated Common Vulnerability and Exposure (CVE) entries are added to the generated report.</description>
<!-- begin copy from http://minds.coremedia.com/2012/09/11/problem-solved-deploy-multi-module-maven-project-site-as-github-pages/ -->
<distributionManagement>
<site>
<id>github-pages-site</id>
<name>Deployment through GitHub's site deployment plugin</name>
<url>${basedir}/../target/site/${project.version}/dependency-check-core</url>
</site>
</distributionManagement>
<!-- end copy -->
<build> <build>
<resources> <resources>
<resource> <resource>
@@ -99,6 +90,7 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<configuration> <configuration>
<outputDirectory>${project.build.directory}/test-classes</outputDirectory> <outputDirectory>${project.build.directory}/test-classes</outputDirectory>
<includeScope>test</includeScope> <includeScope>test</includeScope>
<excludeArtifactIds>dependency-check-utils</excludeArtifactIds>
</configuration> </configuration>
</execution> </execution>
</executions> </executions>
@@ -123,56 +115,11 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
<reporting>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-report-plugin</artifactId>
<reportSets>
<reportSet>
<id>integration-tests</id>
<reports>
<report>report-only</report>
<report>failsafe-report-only</report>
</reports>
</reportSet>
</reportSets>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>${reporting.checkstyle-plugin.version}</version>
<configuration>
<enableRulesSummary>false</enableRulesSummary>
<enableFilesSummary>false</enableFilesSummary>
<configLocation>${basedir}/../src/main/config/checkstyle-checks.xml</configLocation>
<headerLocation>${basedir}/../src/main/config/checkstyle-header.txt</headerLocation>
<suppressionsLocation>${basedir}/../src/main/config/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-pmd-plugin</artifactId>
<version>${reporting.pmd-plugin.version}</version>
<configuration>
<targetJdk>1.6</targetJdk>
<linkXRef>true</linkXRef>
<sourceEncoding>utf-8</sourceEncoding>
<excludes>
<exclude>**/generated/*.java</exclude>
</excludes>
<rulesets>
<ruleset>../src/main/config/dcrules.xml</ruleset>
<ruleset>/rulesets/java/basic.xml</ruleset>
<ruleset>/rulesets/java/imports.xml</ruleset>
<ruleset>/rulesets/java/unusedcode.xml</ruleset>
</rulesets>
</configuration>
</plugin>
</plugins>
</reporting>
<dependencies> <dependencies>
<dependency>
<groupId>com.vdurmont</groupId>
<artifactId>semver4j</artifactId>
</dependency>
<!-- Note, to stay compatible with Jenkins installations only JARs compiled to 1.6 can be used --> <!-- Note, to stay compatible with Jenkins installations only JARs compiled to 1.6 can be used -->
<dependency> <dependency>
<groupId>joda-time</groupId> <groupId>joda-time</groupId>
@@ -257,7 +204,8 @@ Copyright (c) 2012 Jeremy Long. All Rights Reserved.
<groupId>com.google.code.gson</groupId> <groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId> <artifactId>gson</artifactId>
</dependency> </dependency>
<!-- The following dependencies are only used during testing --> <!-- The following dependencies are only used during testing
and must not be converted to a properties based version number -->
<dependency> <dependency>
<groupId>org.apache.maven.scm</groupId> <groupId>org.apache.maven.scm</groupId>
<artifactId>maven-scm-provider-cvsexe</artifactId> <artifactId>maven-scm-provider-cvsexe</artifactId>

View File

@@ -21,12 +21,12 @@ import org.owasp.dependencycheck.analyzer.Analyzer;
import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer; import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.List; import java.util.List;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import javax.annotation.concurrent.ThreadSafe;
/** /**
* Task to support parallelism of dependency-check analysis. Analyses a single * Task to support parallelism of dependency-check analysis. Analyses a single
@@ -34,6 +34,7 @@ import java.util.concurrent.Callable;
* *
* @author Stefan Neuhaus * @author Stefan Neuhaus
*/ */
@ThreadSafe
public class AnalysisTask implements Callable<Void> { public class AnalysisTask implements Callable<Void> {
/** /**
@@ -57,10 +58,6 @@ public class AnalysisTask implements Callable<Void> {
* The list of exceptions that may occur during analysis. * The list of exceptions that may occur during analysis.
*/ */
private final List<Throwable> exceptions; private final List<Throwable> exceptions;
/**
* A reference to the global settings object.
*/
private final Settings settings;
/** /**
* Creates a new analysis task. * Creates a new analysis task.
@@ -70,16 +67,12 @@ public class AnalysisTask implements Callable<Void> {
* @param engine the dependency-check engine * @param engine the dependency-check engine
* @param exceptions exceptions that occur during analysis will be added to * @param exceptions exceptions that occur during analysis will be added to
* this collection of exceptions * this collection of exceptions
* @param settings a reference to the global settings object; this is
* necessary so that when the thread is started the dependencies have a
* correct reference to the global settings.
*/ */
AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions, Settings settings) { AnalysisTask(Analyzer analyzer, Dependency dependency, Engine engine, List<Throwable> exceptions) {
this.analyzer = analyzer; this.analyzer = analyzer;
this.dependency = dependency; this.dependency = dependency;
this.engine = engine; this.engine = engine;
this.exceptions = exceptions; this.exceptions = exceptions;
this.settings = settings;
} }
/** /**
@@ -89,26 +82,20 @@ public class AnalysisTask implements Callable<Void> {
*/ */
@Override @Override
public Void call() { public Void call() {
try { if (shouldAnalyze()) {
Settings.setInstance(settings); LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName());
try {
if (shouldAnalyze()) { analyzer.analyze(dependency, engine);
LOGGER.debug("Begin Analysis of '{}' ({})", dependency.getActualFilePath(), analyzer.getName()); } catch (AnalysisException ex) {
try { LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName());
analyzer.analyze(dependency, engine); LOGGER.debug("", ex);
} catch (AnalysisException ex) { exceptions.add(ex);
LOGGER.warn("An error occurred while analyzing '{}' ({}).", dependency.getActualFilePath(), analyzer.getName()); } catch (Throwable ex) {
LOGGER.debug("", ex); LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}",
exceptions.add(ex); dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
} catch (Throwable ex) { LOGGER.debug("", ex);
LOGGER.warn("An unexpected error occurred during analysis of '{}' ({}): {}", exceptions.add(ex);
dependency.getActualFilePath(), analyzer.getName(), ex.getMessage());
LOGGER.debug("", ex);
exceptions.add(ex);
}
} }
} finally {
Settings.cleanup(false);
} }
return null; return null;
} }
@@ -123,7 +110,6 @@ public class AnalysisTask implements Callable<Void> {
final FileTypeAnalyzer fileTypeAnalyzer = (FileTypeAnalyzer) analyzer; final FileTypeAnalyzer fileTypeAnalyzer = (FileTypeAnalyzer) analyzer;
return fileTypeAnalyzer.accept(dependency.getActualFile()); return fileTypeAnalyzer.accept(dependency.getActualFile());
} }
return true; return true;
} }
} }

View File

@@ -24,6 +24,7 @@ import org.owasp.dependencycheck.analyzer.FileTypeAnalyzer;
import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory; import org.owasp.dependencycheck.data.nvdcve.ConnectionFactory;
import org.owasp.dependencycheck.data.nvdcve.CveDB; import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException; import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties;
import org.owasp.dependencycheck.data.update.CachedWebDataSource; import org.owasp.dependencycheck.data.update.CachedWebDataSource;
import org.owasp.dependencycheck.data.update.UpdateService; import org.owasp.dependencycheck.data.update.UpdateService;
import org.owasp.dependencycheck.data.update.exception.UpdateException; import org.owasp.dependencycheck.data.update.exception.UpdateException;
@@ -31,6 +32,8 @@ import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.ExceptionCollection; import org.owasp.dependencycheck.exception.ExceptionCollection;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.exception.NoDataException; import org.owasp.dependencycheck.exception.NoDataException;
import org.owasp.dependencycheck.exception.ReportException;
import org.owasp.dependencycheck.reporting.ReportGenerator;
import org.owasp.dependencycheck.utils.InvalidSettingException; import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -39,6 +42,7 @@ import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
@@ -54,6 +58,13 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import javax.annotation.concurrent.NotThreadSafe;
import org.owasp.dependencycheck.exception.H2DBLockException;
import org.owasp.dependencycheck.utils.H2DBLock;
//CSOFF: AvoidStarImport
import static org.owasp.dependencycheck.analyzer.AnalysisPhase.*;
//CSON: AvoidStarImport
/** /**
* Scans files, directories, etc. for Dependencies. Analyzers are loaded and * Scans files, directories, etc. for Dependencies. Analyzers are loaded and
@@ -63,12 +74,93 @@ import java.util.concurrent.TimeUnit;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class Engine implements FileFilter { @NotThreadSafe
public class Engine implements FileFilter, AutoCloseable {
/**
* {@link Engine} execution modes.
*/
public enum Mode {
/**
* In evidence collection mode the {@link Engine} only collects evidence
* from the scan targets, and doesn't require a database.
*/
EVIDENCE_COLLECTION(
false,
INITIAL,
PRE_INFORMATION_COLLECTION,
INFORMATION_COLLECTION,
POST_INFORMATION_COLLECTION
),
/**
* In evidence processing mode the {@link Engine} processes the evidence
* collected using the {@link #EVIDENCE_COLLECTION} mode. Dependencies
* should be injected into the {@link Engine} using
* {@link Engine#setDependencies(List)}.
*/
EVIDENCE_PROCESSING(
true,
PRE_IDENTIFIER_ANALYSIS,
IDENTIFIER_ANALYSIS,
POST_IDENTIFIER_ANALYSIS,
PRE_FINDING_ANALYSIS,
FINDING_ANALYSIS,
POST_FINDING_ANALYSIS,
FINAL
),
/**
* In standalone mode the {@link Engine} will collect and process
* evidence in a single execution.
*/
STANDALONE(true, AnalysisPhase.values());
/**
* Whether the database is required in this mode.
*/
private final boolean databaseRequired;
/**
* The analysis phases included in the mode.
*/
private final AnalysisPhase[] phases;
/**
* Returns true if the database is required; otherwise false.
*
* @return whether or not the database is required
*/
private boolean isDatabaseRequired() {
return databaseRequired;
}
/**
* Returns the phases for this mode.
*
* @return the phases for this mode
*/
public AnalysisPhase[] getPhases() {
return phases;
}
/**
* Constructs a new mode.
*
* @param databaseRequired if the database is required for the mode
* @param phases the analysis phases to include in the mode
*/
Mode(boolean databaseRequired, AnalysisPhase... phases) {
this.databaseRequired = databaseRequired;
this.phases = phases;
}
}
/** /**
* The list of dependencies. * The list of dependencies.
*/ */
private final List<Dependency> dependencies = Collections.synchronizedList(new ArrayList<Dependency>()); private final List<Dependency> dependencies = Collections.synchronizedList(new ArrayList<Dependency>());
/**
* The external view of the dependency list.
*/
private Dependency[] dependenciesExternalView = null;
/** /**
* A Map of analyzers grouped by Analysis phase. * A Map of analyzers grouped by Analysis phase.
*/ */
@@ -79,11 +171,17 @@ public class Engine implements FileFilter {
*/ */
private final Set<FileTypeAnalyzer> fileTypeAnalyzers = new HashSet<>(); private final Set<FileTypeAnalyzer> fileTypeAnalyzers = new HashSet<>();
/**
* The engine execution mode indicating it will either collect evidence or
* process evidence or both.
*/
private final Mode mode;
/** /**
* The ClassLoader to use when dynamically loading Analyzer and Update * The ClassLoader to use when dynamically loading Analyzer and Update
* services. * services.
*/ */
private ClassLoader serviceClassLoader = Thread.currentThread().getContextClassLoader(); private final ClassLoader serviceClassLoader;
/** /**
* A reference to the database. * A reference to the database.
*/ */
@@ -92,26 +190,51 @@ public class Engine implements FileFilter {
* The Logger for use throughout the class. * The Logger for use throughout the class.
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(Engine.class); private static final Logger LOGGER = LoggerFactory.getLogger(Engine.class);
/**
* The configured settings.
*/
private final Settings settings;
/**
* Creates a new {@link Mode#STANDALONE} Engine.
*
* @param settings reference to the configured settings
*/
public Engine(Settings settings) {
this(Mode.STANDALONE, settings);
}
/** /**
* Creates a new Engine. * Creates a new Engine.
* *
* @throws DatabaseException thrown if there is an error connecting to the * @param mode the mode of operation
* database * @param settings reference to the configured settings
*/ */
public Engine() throws DatabaseException { public Engine(Mode mode, Settings settings) {
initializeEngine(); this(Thread.currentThread().getContextClassLoader(), mode, settings);
}
/**
* Creates a new {@link Mode#STANDALONE} Engine.
*
* @param serviceClassLoader a reference the class loader being used
* @param settings reference to the configured settings
*/
public Engine(ClassLoader serviceClassLoader, Settings settings) {
this(serviceClassLoader, Mode.STANDALONE, settings);
} }
/** /**
* Creates a new Engine. * Creates a new Engine.
* *
* @param serviceClassLoader a reference the class loader being used * @param serviceClassLoader a reference the class loader being used
* @throws DatabaseException thrown if there is an error connecting to the * @param mode the mode of the engine
* database * @param settings reference to the configured settings
*/ */
public Engine(ClassLoader serviceClassLoader) throws DatabaseException { public Engine(ClassLoader serviceClassLoader, Mode mode, Settings settings) {
this.settings = settings;
this.serviceClassLoader = serviceClassLoader; this.serviceClassLoader = serviceClassLoader;
this.mode = mode;
initializeEngine(); initializeEngine();
} }
@@ -122,20 +245,21 @@ public class Engine implements FileFilter {
* @throws DatabaseException thrown if there is an error connecting to the * @throws DatabaseException thrown if there is an error connecting to the
* database * database
*/ */
protected final void initializeEngine() throws DatabaseException { protected final void initializeEngine() {
ConnectionFactory.initialize();
loadAnalyzers(); loadAnalyzers();
} }
/** /**
* Properly cleans up resources allocated during analysis. * Properly cleans up resources allocated during analysis.
*/ */
public void cleanup() { @Override
if (database != null) { public void close() {
database.close(); if (mode.isDatabaseRequired()) {
database = null; if (database != null) {
database.close();
database = null;
}
} }
ConnectionFactory.cleanup();
} }
/** /**
@@ -146,13 +270,13 @@ public class Engine implements FileFilter {
if (!analyzers.isEmpty()) { if (!analyzers.isEmpty()) {
return; return;
} }
for (AnalysisPhase phase : AnalysisPhase.values()) { for (AnalysisPhase phase : mode.getPhases()) {
analyzers.put(phase, new ArrayList<Analyzer>()); analyzers.put(phase, new ArrayList<Analyzer>());
} }
final AnalyzerService service = new AnalyzerService(serviceClassLoader, settings);
final AnalyzerService service = new AnalyzerService(serviceClassLoader); final List<Analyzer> iterator = service.getAnalyzers(mode.getPhases());
final List<Analyzer> iterator = service.getAnalyzers();
for (Analyzer a : iterator) { for (Analyzer a : iterator) {
a.initialize(this.settings);
analyzers.get(a.getAnalysisPhase()).add(a); analyzers.get(a.getAnalysisPhase()).add(a);
if (a instanceof FileTypeAnalyzer) { if (a instanceof FileTypeAnalyzer) {
this.fileTypeAnalyzers.add((FileTypeAnalyzer) a); this.fileTypeAnalyzers.add((FileTypeAnalyzer) a);
@@ -171,18 +295,44 @@ public class Engine implements FileFilter {
} }
/** /**
* Get the dependencies identified. The returned list is a reference to the * Adds a dependency.
* engine's synchronized list. <b>You must synchronize on the returned *
* list</b> when you modify and iterate over it from multiple threads. E.g. * @param dependency the dependency to add
* this holds for analyzers supporting parallel processing during their */
* analysis phase. public synchronized void addDependency(Dependency dependency) {
dependencies.add(dependency);
dependenciesExternalView = null;
}
/**
* Sorts the dependency list.
*/
public synchronized void sortDependencies() {
//TODO - is this actually necassary????
// Collections.sort(dependencies);
// dependenciesExternalView = null;
}
/**
* Removes the dependency.
*
* @param dependency the dependency to remove.
*/
public synchronized void removeDependency(Dependency dependency) {
dependencies.remove(dependency);
dependenciesExternalView = null;
}
/**
* Returns a copy of the dependencies as an array.
* *
* @return the dependencies identified * @return the dependencies identified
* @see Collections#synchronizedList(List)
* @see Analyzer#supportsParallelProcessing()
*/ */
public synchronized List<Dependency> getDependencies() { public synchronized Dependency[] getDependencies() {
return dependencies; if (dependenciesExternalView == null) {
dependenciesExternalView = dependencies.toArray(new Dependency[dependencies.size()]);
}
return dependenciesExternalView;
} }
/** /**
@@ -190,11 +340,10 @@ public class Engine implements FileFilter {
* *
* @param dependencies the dependencies * @param dependencies the dependencies
*/ */
public void setDependencies(List<Dependency> dependencies) { public synchronized void setDependencies(List<Dependency> dependencies) {
synchronized (this.dependencies) { this.dependencies.clear();
this.dependencies.clear(); this.dependencies.addAll(dependencies);
this.dependencies.addAll(dependencies); dependenciesExternalView = null;
}
} }
/** /**
@@ -403,7 +552,9 @@ public class Engine implements FileFilter {
} }
} else { } else {
final Dependency d = scanFile(f, projectReference); final Dependency d = scanFile(f, projectReference);
deps.add(d); if (d != null) {
deps.add(d);
}
} }
} }
} }
@@ -431,7 +582,7 @@ public class Engine implements FileFilter {
* @return the scanned dependency * @return the scanned dependency
* @since v1.4.4 * @since v1.4.4
*/ */
protected Dependency scanFile(File file, String projectReference) { protected synchronized Dependency scanFile(File file, String projectReference) {
Dependency dependency = null; Dependency dependency = null;
if (file.isFile()) { if (file.isFile()) {
if (accept(file)) { if (accept(file)) {
@@ -441,31 +592,31 @@ public class Engine implements FileFilter {
} }
final String sha1 = dependency.getSha1sum(); final String sha1 = dependency.getSha1sum();
boolean found = false; boolean found = false;
synchronized (dependencies) {
if (sha1 != null) { if (sha1 != null) {
for (Dependency existing : dependencies) { for (Dependency existing : dependencies) {
if (sha1.equals(existing.getSha1sum())) { if (sha1.equals(existing.getSha1sum())) {
found = true; found = true;
if (projectReference != null) { if (projectReference != null) {
existing.addProjectReference(projectReference); existing.addProjectReference(projectReference);
}
if (existing.getActualFilePath() != null && dependency.getActualFilePath() != null
&& !existing.getActualFilePath().equals(dependency.getActualFilePath())) {
existing.addRelatedDependency(dependency);
} else {
dependency = existing;
}
break;
} }
if (existing.getActualFilePath() != null && dependency.getActualFilePath() != null
&& !existing.getActualFilePath().equals(dependency.getActualFilePath())) {
existing.addRelatedDependency(dependency);
} else {
dependency = existing;
}
break;
} }
} }
if (!found) {
dependencies.add(dependency);
}
} }
} else { if (!found) {
LOGGER.debug("Path passed to scanFile(File) is not a file: {}. Skipping the file.", file); dependencies.add(dependency);
dependenciesExternalView = null;
}
} }
} else {
LOGGER.debug("Path passed to scanFile(File) is not a file that can be scanned by dependency-check: {}. Skipping the file.", file);
} }
return dependency; return dependency;
} }
@@ -502,7 +653,7 @@ public class Engine implements FileFilter {
final long analysisStart = System.currentTimeMillis(); final long analysisStart = System.currentTimeMillis();
// analysis phases // analysis phases
for (AnalysisPhase phase : AnalysisPhase.values()) { for (AnalysisPhase phase : mode.getPhases()) {
final List<Analyzer> analyzerList = analyzers.get(phase); final List<Analyzer> analyzerList = analyzers.get(phase);
for (final Analyzer analyzer : analyzerList) { for (final Analyzer analyzer : analyzerList) {
@@ -511,7 +662,9 @@ public class Engine implements FileFilter {
initializeAnalyzer(analyzer); initializeAnalyzer(analyzer);
} catch (InitializationException ex) { } catch (InitializationException ex) {
exceptions.add(ex); exceptions.add(ex);
continue; if (ex.isFatal()) {
continue;
}
} }
if (analyzer.isEnabled()) { if (analyzer.isEnabled()) {
@@ -525,7 +678,7 @@ public class Engine implements FileFilter {
} }
} }
} }
for (AnalysisPhase phase : AnalysisPhase.values()) { for (AnalysisPhase phase : mode.getPhases()) {
final List<Analyzer> analyzerList = analyzers.get(phase); final List<Analyzer> analyzerList = analyzers.get(phase);
for (Analyzer a : analyzerList) { for (Analyzer a : analyzerList) {
@@ -548,17 +701,19 @@ public class Engine implements FileFilter {
* @throws ExceptionCollection thrown if fatal exceptions occur * @throws ExceptionCollection thrown if fatal exceptions occur
*/ */
private void initializeAndUpdateDatabase(final List<Throwable> exceptions) throws ExceptionCollection { private void initializeAndUpdateDatabase(final List<Throwable> exceptions) throws ExceptionCollection {
if (!mode.isDatabaseRequired()) {
return;
}
boolean autoUpdate = true; boolean autoUpdate = true;
try { try {
autoUpdate = Settings.getBoolean(Settings.KEYS.AUTO_UPDATE); autoUpdate = settings.getBoolean(Settings.KEYS.AUTO_UPDATE);
} catch (InvalidSettingException ex) { } catch (InvalidSettingException ex) {
LOGGER.debug("Invalid setting for auto-update; using true."); LOGGER.debug("Invalid setting for auto-update; using true.");
exceptions.add(ex); exceptions.add(ex);
} }
if (autoUpdate) { if (autoUpdate) {
try { try {
database = CveDB.getInstance(); doUpdates(true);
doUpdates();
} catch (UpdateException ex) { } catch (UpdateException ex) {
exceptions.add(ex); exceptions.add(ex);
LOGGER.warn("Unable to update Cached Web DataSource, using local " LOGGER.warn("Unable to update Cached Web DataSource, using local "
@@ -569,10 +724,10 @@ public class Engine implements FileFilter {
} }
} else { } else {
try { try {
if (ConnectionFactory.isH2Connection() && !ConnectionFactory.h2DataFileExists()) { if (ConnectionFactory.isH2Connection(settings) && !ConnectionFactory.h2DataFileExists(settings)) {
throw new ExceptionCollection(new NoDataException("Autoupdate is disabled and the database does not exist"), true); throw new ExceptionCollection(new NoDataException("Autoupdate is disabled and the database does not exist"), true);
} else { } else {
database = CveDB.getInstance(); openDatabase(true, true);
} }
} catch (IOException ex) { } catch (IOException ex) {
throw new ExceptionCollection(new DatabaseException("Autoupdate is disabled and unable to connect to the database"), true); throw new ExceptionCollection(new DatabaseException("Autoupdate is disabled and unable to connect to the database"), true);
@@ -605,10 +760,11 @@ public class Engine implements FileFilter {
} catch (ExecutionException e) { } catch (ExecutionException e) {
throwFatalExceptionCollection("Analysis task failed with a fatal exception.", e, exceptions); throwFatalExceptionCollection("Analysis task failed with a fatal exception.", e, exceptions);
} catch (CancellationException e) { } catch (CancellationException e) {
throwFatalExceptionCollection("Analysis task timed out.", e, exceptions); throwFatalExceptionCollection("Analysis task was cancelled.", e, exceptions);
} }
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt();
throwFatalExceptionCollection("Analysis has been interrupted.", e, exceptions); throwFatalExceptionCollection("Analysis has been interrupted.", e, exceptions);
} finally { } finally {
executorService.shutdown(); executorService.shutdown();
@@ -622,13 +778,11 @@ public class Engine implements FileFilter {
* @param exceptions the collection of exceptions to collect * @param exceptions the collection of exceptions to collect
* @return a collection of analysis tasks * @return a collection of analysis tasks
*/ */
protected List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) { protected synchronized List<AnalysisTask> getAnalysisTasks(Analyzer analyzer, List<Throwable> exceptions) {
final List<AnalysisTask> result = new ArrayList<>(); final List<AnalysisTask> result = new ArrayList<>();
synchronized (dependencies) { for (final Dependency dependency : dependencies) {
for (final Dependency dependency : dependencies) { final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions);
final AnalysisTask task = new AnalysisTask(analyzer, dependency, this, exceptions, Settings.getInstance()); result.add(task);
result.add(task);
}
} }
return result; return result;
} }
@@ -653,21 +807,23 @@ public class Engine implements FileFilter {
/** /**
* Initializes the given analyzer. * Initializes the given analyzer.
* *
* @param analyzer the analyzer to initialize * @param analyzer the analyzer to prepare
* @throws InitializationException thrown when there is a problem * @throws InitializationException thrown when there is a problem
* initializing the analyzer * initializing the analyzer
*/ */
protected void initializeAnalyzer(Analyzer analyzer) throws InitializationException { protected void initializeAnalyzer(Analyzer analyzer) throws InitializationException {
try { try {
LOGGER.debug("Initializing {}", analyzer.getName()); LOGGER.debug("Initializing {}", analyzer.getName());
analyzer.initialize(); analyzer.prepare(this);
} catch (InitializationException ex) { } catch (InitializationException ex) {
LOGGER.error("Exception occurred initializing {}.", analyzer.getName()); LOGGER.error("Exception occurred initializing {}.", analyzer.getName());
LOGGER.debug("", ex); LOGGER.debug("", ex);
try { if (ex.isFatal()) {
analyzer.close(); try {
} catch (Throwable ex1) { analyzer.close();
LOGGER.trace("", ex1); } catch (Throwable ex1) {
LOGGER.trace("", ex1);
}
} }
throw ex; throw ex;
} catch (Throwable ex) { } catch (Throwable ex) {
@@ -703,15 +859,131 @@ public class Engine implements FileFilter {
* @throws UpdateException thrown if the operation fails * @throws UpdateException thrown if the operation fails
*/ */
public void doUpdates() throws UpdateException { public void doUpdates() throws UpdateException {
LOGGER.info("Checking for updates"); doUpdates(false);
final long updateStart = System.currentTimeMillis(); }
final UpdateService service = new UpdateService(serviceClassLoader);
final Iterator<CachedWebDataSource> iterator = service.getDataSources(); /**
while (iterator.hasNext()) { * Cycles through the cached web data sources and calls update on all of
final CachedWebDataSource source = iterator.next(); * them.
source.update(); *
* @param remainOpen whether or not the database connection should remain
* open
* @throws UpdateException thrown if the operation fails
*/
public void doUpdates(boolean remainOpen) throws UpdateException {
if (mode.isDatabaseRequired()) {
H2DBLock dblock = null;
try {
if (ConnectionFactory.isH2Connection(settings)) {
dblock = new H2DBLock(settings);
LOGGER.debug("locking for update");
dblock.lock();
}
openDatabase(false, false);
LOGGER.info("Checking for updates");
final long updateStart = System.currentTimeMillis();
final UpdateService service = new UpdateService(serviceClassLoader);
final Iterator<CachedWebDataSource> iterator = service.getDataSources();
while (iterator.hasNext()) {
final CachedWebDataSource source = iterator.next();
source.update(this);
}
database.close();
database = null;
LOGGER.info("Check for updates complete ({} ms)", System.currentTimeMillis() - updateStart);
if (remainOpen) {
openDatabase(true, false);
}
} catch (DatabaseException ex) {
throw new UpdateException(ex.getMessage(), ex.getCause());
} catch (H2DBLockException ex) {
throw new UpdateException("Unable to obtain an exclusive lock on the H2 database to perform updates", ex);
} finally {
if (dblock != null) {
dblock.release();
}
}
} else {
LOGGER.info("Skipping update check in evidence collection mode.");
} }
LOGGER.info("Check for updates complete ({} ms)", System.currentTimeMillis() - updateStart); }
/**
* <p>
* This method is only public for unit/integration testing. This method
* should not be called by any integration that uses
* dependency-check-core.</p>
* <p>
* Opens the database connection.</p>
*
* @throws DatabaseException if the database connection could not be created
*/
public void openDatabase() throws DatabaseException {
openDatabase(false, true);
}
/**
* <p>
* This method is only public for unit/integration testing. This method
* should not be called by any integration that uses
* dependency-check-core.</p>
* <p>
* Opens the database connection; if readOnly is true a copy of the database
* will be made.</p>
*
* @param readOnly whether or not the database connection should be readonly
* @param lockRequired whether or not a lock needs to be acquired when
* opening the database
* @throws DatabaseException if the database connection could not be created
*/
public void openDatabase(boolean readOnly, boolean lockRequired) throws DatabaseException {
if (mode.isDatabaseRequired() && database == null) {
//needed to update schema any required schema changes
database = new CveDB(settings);
if (readOnly
&& ConnectionFactory.isH2Connection(settings)
&& settings.getString(Settings.KEYS.DB_CONNECTION_STRING).contains("file:%s")) {
H2DBLock lock = null;
try {
final File db = ConnectionFactory.getH2DataFile(settings);
if (db.isFile()) {
database.close();
if (lockRequired) {
lock = new H2DBLock(settings);
lock.lock();
}
LOGGER.debug("copying database");
final File temp = settings.getTempDirectory();
final File tempDB = new File(temp, db.getName());
Files.copy(db.toPath(), tempDB.toPath());
LOGGER.debug("copying complete '{}'", temp.toPath());
settings.setString(Settings.KEYS.DATA_DIRECTORY, temp.getPath());
final String connStr = settings.getString(Settings.KEYS.DB_CONNECTION_STRING);
if (!connStr.contains("ACCESS_MODE_DATA")) {
settings.setString(Settings.KEYS.DB_CONNECTION_STRING, connStr + "ACCESS_MODE_DATA=r");
}
database = new CveDB(settings);
}
} catch (IOException ex) {
throw new DatabaseException("Unable to open db in read only mode", ex);
} catch (H2DBLockException ex) {
throw new DatabaseException("Failed to obtain lock - unable to open db in read only mode", ex);
} finally {
if (lock != null) {
lock.release();
}
}
}
}
}
/**
* Returns a reference to the database.
*
* @return a reference to the database
*/
public CveDB getDatabase() {
return this.database;
} }
/** /**
@@ -722,7 +994,7 @@ public class Engine implements FileFilter {
*/ */
public List<Analyzer> getAnalyzers() { public List<Analyzer> getAnalyzers() {
final List<Analyzer> ret = new ArrayList<>(); final List<Analyzer> ret = new ArrayList<>();
for (AnalysisPhase phase : AnalysisPhase.values()) { for (AnalysisPhase phase : mode.getPhases()) {
final List<Analyzer> analyzerList = analyzers.get(phase); final List<Analyzer> analyzerList = analyzers.get(phase);
ret.addAll(analyzerList); ret.addAll(analyzerList);
} }
@@ -759,6 +1031,24 @@ public class Engine implements FileFilter {
return this.fileTypeAnalyzers; return this.fileTypeAnalyzers;
} }
/**
* Returns the configured settings.
*
* @return the configured settings
*/
public Settings getSettings() {
return settings;
}
/**
* Returns the mode of the engine.
*
* @return the mode of the engine
*/
public Mode getMode() {
return mode;
}
/** /**
* Adds a file type analyzer. This has been added solely to assist in unit * Adds a file type analyzer. This has been added solely to assist in unit
* testing the Engine. * testing the Engine.
@@ -776,7 +1066,7 @@ public class Engine implements FileFilter {
* @throws NoDataException thrown if no data exists in the CPE Index * @throws NoDataException thrown if no data exists in the CPE Index
*/ */
private void ensureDataExists() throws NoDataException { private void ensureDataExists() throws NoDataException {
if (database == null || !database.dataExists()) { if (mode.isDatabaseRequired() && (database == null || !database.dataExists())) {
throw new NoDataException("No documents exist"); throw new NoDataException("No documents exist");
} }
} }
@@ -796,4 +1086,44 @@ public class Engine implements FileFilter {
exceptions.add(throwable); exceptions.add(throwable);
throw new ExceptionCollection(message, exceptions, true); throw new ExceptionCollection(message, exceptions, true);
} }
/**
* Writes the report to the given output directory.
*
* @param applicationName the name of the application/project
* @param groupId the Maven groupId
* @param artifactId the Maven artifactId
* @param version the Maven version
* @param outputDir the path to the output directory (can include the full
* file name if the format is not ALL)
* @param format the report format (ALL, HTML, CSV, JSON, etc.)
* @throws ReportException thrown if there is an error generating the report
*/
public synchronized void writeReports(String applicationName, String groupId, String artifactId,
String version, File outputDir, String format) throws ReportException {
if (mode == Mode.EVIDENCE_COLLECTION) {
throw new UnsupportedOperationException("Cannot generate report in evidence collection mode.");
}
final DatabaseProperties prop = database.getDatabaseProperties();
final ReportGenerator r = new ReportGenerator(applicationName, groupId, artifactId, version, dependencies, getAnalyzers(), prop, settings);
try {
r.write(outputDir.getAbsolutePath(), format);
} catch (ReportException ex) {
final String msg = String.format("Error generating the report for %s", applicationName);
throw new ReportException(msg, ex);
}
}
/**
* Writes the report to the given output directory.
*
* @param applicationName the name of the application/project
* @param outputDir the path to the output directory (can include the full
* file name if the format is not ALL)
* @param format the report format (ALL, HTML, CSV, JSON, etc.)
* @throws ReportException thrown if there is an error generating the report
*/
public void writeReports(String applicationName, File outputDir, String format) throws ReportException {
writeReports(applicationName, null, null, null, outputDir, format);
}
} }

View File

@@ -20,10 +20,10 @@ package org.owasp.dependencycheck.agent;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import javax.annotation.concurrent.NotThreadSafe;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException; import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties; import org.owasp.dependencycheck.data.update.exception.UpdateException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.Vulnerability; import org.owasp.dependencycheck.dependency.Vulnerability;
@@ -47,9 +47,9 @@ import org.slf4j.LoggerFactory;
* <pre> * <pre>
* List&lt;Dependency&gt; dependencies = new ArrayList&lt;Dependency&gt;(); * List&lt;Dependency&gt; dependencies = new ArrayList&lt;Dependency&gt;();
* Dependency dependency = new Dependency(new File(FileUtils.getBitBucket())); * Dependency dependency = new Dependency(new File(FileUtils.getBitBucket()));
* dependency.getProductEvidence().addEvidence("my-datasource", "name", "Jetty", Confidence.HIGH); * dependency.addEvidence(EvidenceType.PRODUCT, "my-datasource", "name", "Jetty", Confidence.HIGH);
* dependency.getVersionEvidence().addEvidence("my-datasource", "version", "5.1.10", Confidence.HIGH); * dependency.addEvidence(EvidenceType.VERSION, "my-datasource", "version", "5.1.10", Confidence.HIGH);
* dependency.getVendorEvidence().addEvidence("my-datasource", "vendor", "mortbay", Confidence.HIGH); * dependency.addEvidence(EvidenceType.VENDOR, "my-datasource", "vendor", "mortbay", Confidence.HIGH);
* dependencies.add(dependency); * dependencies.add(dependency);
* *
* DependencyCheckScanAgent scan = new DependencyCheckScanAgent(); * DependencyCheckScanAgent scan = new DependencyCheckScanAgent();
@@ -62,6 +62,7 @@ import org.slf4j.LoggerFactory;
* @author Steve Springett * @author Steve Springett
*/ */
@SuppressWarnings("unused") @SuppressWarnings("unused")
@NotThreadSafe
public class DependencyCheckScanAgent { public class DependencyCheckScanAgent {
//<editor-fold defaultstate="collapsed" desc="private fields"> //<editor-fold defaultstate="collapsed" desc="private fields">
@@ -103,6 +104,11 @@ public class DependencyCheckScanAgent {
* recommended that this be turned to false. Default is true. * recommended that this be turned to false. Default is true.
*/ */
private boolean autoUpdate = true; private boolean autoUpdate = true;
/**
* Sets whether the data directory should be updated without performing a
* scan. Default is false.
*/
private boolean updateOnly = false;
/** /**
* flag indicating whether or not to generate a report of findings. * flag indicating whether or not to generate a report of findings.
*/ */
@@ -149,6 +155,10 @@ public class DependencyCheckScanAgent {
* The password to use when connecting to the database. * The password to use when connecting to the database.
*/ */
private String databasePassword; private String databasePassword;
/**
* The starting string that identifies CPEs that are qualified to be imported.
*/
private String cpeStartsWithFilter;
/** /**
* Whether or not the Maven Central analyzer is enabled. * Whether or not the Maven Central analyzer is enabled.
*/ */
@@ -210,6 +220,16 @@ public class DependencyCheckScanAgent {
* The path to Mono for .NET assembly analysis on non-windows systems. * The path to Mono for .NET assembly analysis on non-windows systems.
*/ */
private String pathToMono; private String pathToMono;
/**
* The configured settings.
*/
private Settings settings;
/**
* The path to optional dependency-check properties file. This will be used
* to side-load additional user-defined properties.
* {@link Settings#mergeProperties(String)}
*/
private String propertiesFilePath;
//</editor-fold> //</editor-fold>
//<editor-fold defaultstate="collapsed" desc="getters/setters"> //<editor-fold defaultstate="collapsed" desc="getters/setters">
@@ -321,6 +341,24 @@ public class DependencyCheckScanAgent {
this.autoUpdate = autoUpdate; this.autoUpdate = autoUpdate;
} }
/**
* Get the value of updateOnly.
*
* @return the value of updateOnly
*/
public boolean isUpdateOnly() {
return updateOnly;
}
/**
* Set the value of updateOnly.
*
* @param updateOnly new value of updateOnly
*/
public void setUpdateOnly(boolean updateOnly) {
this.updateOnly = updateOnly;
}
/** /**
* Get the value of generateReport. * Get the value of generateReport.
* *
@@ -526,6 +564,22 @@ public class DependencyCheckScanAgent {
this.showSummary = showSummary; this.showSummary = showSummary;
} }
/**
* Sets starting string that identifies CPEs that are qualified to be imported.
* @param cpeStartsWithFilter filters CPEs based on this starting string (i.e. cpe:/a: )
*/
public void setCpeStartsWithFilter(String cpeStartsWithFilter) {
this.cpeStartsWithFilter = cpeStartsWithFilter;
}
/**
* Returns the starting string that identifies CPEs that are qualified to be imported.
* @return the CPE starting filter (i.e. cpe:/a: )
*/
public String getCpeStartsWithFilter() {
return cpeStartsWithFilter;
}
/** /**
* Get the value of centralAnalyzerEnabled. * Get the value of centralAnalyzerEnabled.
* *
@@ -813,10 +867,29 @@ public class DependencyCheckScanAgent {
public void setPathToMono(String pathToMono) { public void setPathToMono(String pathToMono) {
this.pathToMono = pathToMono; this.pathToMono = pathToMono;
} }
/**
* Get the value of propertiesFilePath.
*
* @return the value of propertiesFilePath
*/
public String getPropertiesFilePath() {
return propertiesFilePath;
}
/**
* Set the value of propertiesFilePath.
*
* @param propertiesFilePath new value of propertiesFilePath
*/
public void setPropertiesFilePath(String propertiesFilePath) {
this.propertiesFilePath = propertiesFilePath;
}
//</editor-fold> //</editor-fold>
/** /**
* Executes the Dependency-Check on the dependent libraries. * Executes the Dependency-Check on the dependent libraries. <b>Note</b>, the engine
* object returned from this method must be closed by calling `close()`
* *
* @return the Engine used to scan the dependencies. * @return the Engine used to scan the dependencies.
* @throws ExceptionCollection a collection of one or more exceptions that * @throws ExceptionCollection a collection of one or more exceptions that
@@ -826,12 +899,22 @@ public class DependencyCheckScanAgent {
populateSettings(); populateSettings();
final Engine engine; final Engine engine;
try { try {
engine = new Engine(); engine = new Engine(settings);
} catch (DatabaseException ex) { } catch (DatabaseException ex) {
throw new ExceptionCollection(ex, true); throw new ExceptionCollection(ex, true);
} }
engine.setDependencies(this.dependencies); if (this.updateOnly) {
engine.analyzeDependencies(); try {
engine.doUpdates();
} catch (UpdateException ex) {
throw new ExceptionCollection("Unable to perform update", ex);
} finally {
engine.close();
}
} else {
engine.setDependencies(this.dependencies);
engine.analyzeDependencies();
}
return engine; return engine;
} }
@@ -840,21 +923,15 @@ public class DependencyCheckScanAgent {
* *
* @param engine a dependency-check engine * @param engine a dependency-check engine
* @param outDirectory the directory to write the reports to * @param outDirectory the directory to write the reports to
* @throws ScanAgentException thrown if there is an error generating the
* report
*/ */
private void generateExternalReports(Engine engine, File outDirectory) { private void generateExternalReports(Engine engine, File outDirectory) throws ScanAgentException {
DatabaseProperties prop = null;
try (CveDB cve = CveDB.getInstance()) {
prop = cve.getDatabaseProperties();
} catch (DatabaseException ex) {
//TODO shouldn't this be a fatal exception
LOGGER.debug("Unable to retrieve DB Properties", ex);
}
final ReportGenerator r = new ReportGenerator(this.applicationName, engine.getDependencies(), engine.getAnalyzers(), prop);
try { try {
r.generateReports(outDirectory.getCanonicalPath(), this.reportFormat.name()); engine.writeReports(applicationName, outDirectory, this.reportFormat.name());
} catch (IOException | ReportException ex) { } catch (ReportException ex) {
LOGGER.error("Unexpected exception occurred during analysis; please see the verbose error log for more details."); LOGGER.debug("Unexpected exception occurred during analysis; please see the verbose error log for more details.", ex);
LOGGER.debug("", ex); throw new ScanAgentException("Error generating the report", ex);
} }
} }
@@ -864,40 +941,50 @@ public class DependencyCheckScanAgent {
* proxy server, port, and connection timeout. * proxy server, port, and connection timeout.
*/ */
private void populateSettings() { private void populateSettings() {
Settings.initialize(); settings = new Settings();
if (dataDirectory != null) { if (dataDirectory != null) {
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory); settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
} else { } else {
final File jarPath = new File(DependencyCheckScanAgent.class.getProtectionDomain().getCodeSource().getLocation().getPath()); final File jarPath = new File(DependencyCheckScanAgent.class.getProtectionDomain().getCodeSource().getLocation().getPath());
final File base = jarPath.getParentFile(); final File base = jarPath.getParentFile();
final String sub = Settings.getString(Settings.KEYS.DATA_DIRECTORY); final String sub = settings.getString(Settings.KEYS.DATA_DIRECTORY);
final File dataDir = new File(base, sub); final File dataDir = new File(base, sub);
Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath()); settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
}
if (propertiesFilePath != null) {
try {
settings.mergeProperties(propertiesFilePath);
LOGGER.info("Successfully loaded user-defined properties");
} catch (IOException e) {
LOGGER.error("Unable to merge user-defined properties", e);
LOGGER.error("Continuing execution");
}
} }
Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate); settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, proxyServer);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, proxyPort);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, proxyUsername);
Settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, proxyPassword);
Settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout); settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout);
Settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile); settings.setStringIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, suppressionFile);
Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled); settings.setStringIfNotEmpty(Settings.KEYS.CVE_CPE_STARTS_WITH_FILTER, cpeStartsWithFilter);
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_CENTRAL_URL, centralUrl); settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_CENTRAL_URL, centralUrl);
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl); settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled);
Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName); settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath); settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString); settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser); settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, connectionString);
Settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword); settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, databaseUser);
Settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions); settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, databasePassword);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified); settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified); settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base); settings.setStringIfNotEmpty(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified);
Settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base); settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base);
Settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono); settings.setStringIfNotEmpty(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base);
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono);
} }
/** /**
@@ -911,14 +998,16 @@ public class DependencyCheckScanAgent {
Engine engine = null; Engine engine = null;
try { try {
engine = executeDependencyCheck(); engine = executeDependencyCheck();
if (this.generateReport) { if (!this.updateOnly) {
generateExternalReports(engine, new File(this.reportOutputDirectory)); if (this.generateReport) {
} generateExternalReports(engine, new File(this.reportOutputDirectory));
if (this.showSummary) { }
showSummary(engine.getDependencies()); if (this.showSummary) {
} showSummary(engine.getDependencies());
if (this.failBuildOnCVSS <= 10) { }
checkForFailure(engine.getDependencies()); if (this.failBuildOnCVSS <= 10) {
checkForFailure(engine.getDependencies());
}
} }
} catch (ExceptionCollection ex) { } catch (ExceptionCollection ex) {
if (ex.isFatal()) { if (ex.isFatal()) {
@@ -927,9 +1016,9 @@ public class DependencyCheckScanAgent {
} }
throw new ScanAgentException("One or more exceptions occurred during analysis; please see the debug log for more details.", ex); throw new ScanAgentException("One or more exceptions occurred during analysis; please see the debug log for more details.", ex);
} finally { } finally {
Settings.cleanup(true); settings.cleanup(true);
if (engine != null) { if (engine != null) {
engine.cleanup(); engine.close();
} }
} }
return engine; return engine;
@@ -943,7 +1032,7 @@ public class DependencyCheckScanAgent {
* @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if * @throws org.owasp.dependencycheck.exception.ScanAgentException thrown if
* there is an exception executing the scan. * there is an exception executing the scan.
*/ */
private void checkForFailure(List<Dependency> dependencies) throws ScanAgentException { private void checkForFailure(Dependency[] dependencies) throws ScanAgentException {
final StringBuilder ids = new StringBuilder(); final StringBuilder ids = new StringBuilder();
for (Dependency d : dependencies) { for (Dependency d : dependencies) {
boolean addName = true; boolean addName = true;
@@ -960,10 +1049,16 @@ public class DependencyCheckScanAgent {
} }
} }
if (ids.length() > 0) { if (ids.length() > 0) {
final String msg = String.format("%n%nDependency-Check Failure:%n" final String msg;
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than '%.1f': %s%n" if (showSummary) {
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString()); msg = String.format("%n%nDependency-Check Failure:%n"
+ "One or more dependencies were identified with vulnerabilities that have a CVSS score greater than or equal to '%.1f': %s%n"
+ "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString());
} else {
msg = String.format("%n%nDependency-Check Failure:%n"
+ "One or more dependencies were identified with vulnerabilities.%n%n"
+ "See the dependency-check report for more details.%n%n");
}
throw new ScanAgentException(msg); throw new ScanAgentException(msg);
} }
} }
@@ -974,12 +1069,12 @@ public class DependencyCheckScanAgent {
* *
* @param dependencies a list of dependency objects * @param dependencies a list of dependency objects
*/ */
private void showSummary(List<Dependency> dependencies) { private void showSummary(Dependency[] dependencies) {
final StringBuilder summary = new StringBuilder(); final StringBuilder summary = new StringBuilder();
for (Dependency d : dependencies) { for (Dependency d : dependencies) {
boolean firstEntry = true; boolean firstEntry = true;
final StringBuilder ids = new StringBuilder(); final StringBuilder ids = new StringBuilder();
for (Vulnerability v : d.getVulnerabilities()) { for (Vulnerability v : d.getVulnerabilities(true)) {
if (firstEntry) { if (firstEntry) {
firstEntry = false; firstEntry = false;
} else { } else {

View File

@@ -23,15 +23,17 @@ import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.InvalidSettingException; import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import javax.annotation.concurrent.ThreadSafe;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* Base class for analyzers to avoid code duplication of initialize and close as * Base class for analyzers to avoid code duplication of prepare and close as
* most analyzers do not need these methods. * most analyzers do not need these methods.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@ThreadSafe
public abstract class AbstractAnalyzer implements Analyzer { public abstract class AbstractAnalyzer implements Analyzer {
/** /**
@@ -42,6 +44,10 @@ public abstract class AbstractAnalyzer implements Analyzer {
* A flag indicating whether or not the analyzer is enabled. * A flag indicating whether or not the analyzer is enabled.
*/ */
private volatile boolean enabled = true; private volatile boolean enabled = true;
/**
* The configured settings.
*/
private Settings settings;
/** /**
* Get the value of enabled. * Get the value of enabled.
@@ -63,41 +69,56 @@ public abstract class AbstractAnalyzer implements Analyzer {
} }
/** /**
* <p> * Returns the configured settings.
* Returns the setting key to determine if the analyzer is enabled.</p>
* *
* @return the key for the analyzer's enabled property * @return the configured settings
*/ */
protected abstract String getAnalyzerEnabledSettingKey(); protected Settings getSettings() {
return settings;
/**
* Analyzes a given dependency. If the dependency is an archive, such as a
* WAR or EAR, the contents are extracted, scanned, and added to the list of
* dependencies within the engine.
*
* @param dependency the dependency to analyze
* @param engine the engine scanning
* @throws AnalysisException thrown if there is an analysis exception
*/
protected abstract void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException;
/**
* Initializes a given Analyzer. This will be skipped if the analyzer is
* disabled.
*
* @throws InitializationException thrown if there is an exception
*/
protected void initializeAnalyzer() throws InitializationException {
} }
/** /**
* Closes a given Analyzer. This will be skipped if the analyzer is * Initializes the analyzer with the configured settings.
*
* @param settings the configured settings to use
*/
@Override
public void initialize(Settings settings) {
this.settings = settings;
final String key = getAnalyzerEnabledSettingKey();
try {
this.setEnabled(settings.getBoolean(key, true));
} catch (InvalidSettingException ex) {
final String msg = String.format("Invalid setting for property '%s'", key);
LOGGER.warn(msg);
LOGGER.debug(msg, ex);
}
}
/**
* Initialize the abstract analyzer.
*
* @param engine a reference to the dependency-check engine
* @throws InitializationException thrown if there is an exception
*/
@Override
public final void prepare(Engine engine) throws InitializationException {
if (isEnabled()) {
prepareAnalyzer(engine);
} else {
LOGGER.debug("{} has been disabled", getName());
}
}
/**
* Prepares a given Analyzer. This will be skipped if the analyzer is
* disabled. * disabled.
* *
* @throws Exception thrown if there is an exception * @param engine a reference to the dependency-check engine
* @throws InitializationException thrown if there is an exception
*/ */
protected void closeAnalyzer() throws Exception { protected void prepareAnalyzer(Engine engine) throws InitializationException {
// Intentionally empty, analyzer will override this if they must close a resource. // Intentionally empty, analyzer will override this if they must prepare anything.
} }
/** /**
@@ -117,26 +138,15 @@ public abstract class AbstractAnalyzer implements Analyzer {
} }
/** /**
* The initialize method does nothing for this Analyzer. * Analyzes a given dependency. If the dependency is an archive, such as a
* WAR or EAR, the contents are extracted, scanned, and added to the list of
* dependencies within the engine.
* *
* @throws InitializationException thrown if there is an exception * @param dependency the dependency to analyze
* @param engine the engine scanning
* @throws AnalysisException thrown if there is an analysis exception
*/ */
@Override protected abstract void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException;
public final void initialize() throws InitializationException {
final String key = getAnalyzerEnabledSettingKey();
try {
this.setEnabled(Settings.getBoolean(key, true));
} catch (InvalidSettingException ex) {
LOGGER.warn("Invalid setting for property '{}'", key);
LOGGER.debug("", ex);
}
if (isEnabled()) {
initializeAnalyzer();
} else {
LOGGER.debug("{} has been disabled", getName());
}
}
/** /**
* The close method does nothing for this Analyzer. * The close method does nothing for this Analyzer.
@@ -150,6 +160,16 @@ public abstract class AbstractAnalyzer implements Analyzer {
} }
} }
/**
* Closes a given Analyzer. This will be skipped if the analyzer is
* disabled.
*
* @throws Exception thrown if there is an exception
*/
protected void closeAnalyzer() throws Exception {
// Intentionally empty, analyzer will override this if they must close a resource.
}
/** /**
* The default is to support parallel processing. * The default is to support parallel processing.
* *
@@ -159,4 +179,13 @@ public abstract class AbstractAnalyzer implements Analyzer {
public boolean supportsParallelProcessing() { public boolean supportsParallelProcessing() {
return true; return true;
} }
/**
* <p>
* Returns the setting key to determine if the analyzer is enabled.</p>
*
* @return the key for the analyzer's enabled property
*/
protected abstract String getAnalyzerEnabledSettingKey();
} }

View File

@@ -0,0 +1,125 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2017 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import java.util.HashSet;
import java.util.Set;
import javax.annotation.concurrent.ThreadSafe;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* This analyzer ensures dependencies that should be grouped together, to remove
* excess noise from the report, are grouped. An example would be Spring, Spring
* Beans, Spring MVC, etc. If they are all for the same version and have the
* same relative path then these should be grouped into a single dependency
* under the core/main library.</p>
* <p>
* Note, this grouping only works on dependencies with identified CVE
* entries</p>
*
* @author Jeremy Long
*/
@ThreadSafe
public abstract class AbstractDependencyComparingAnalyzer extends AbstractAnalyzer {
/**
* The Logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractDependencyComparingAnalyzer.class);
/**
* a flag indicating if this analyzer has run. This analyzer only runs once.
*/
private boolean analyzed = false;
/**
* Returns a flag indicating if this analyzer has run. This analyzer only
* runs once. Note this is currently only used in the unit tests.
*
* @return a flag indicating if this analyzer has run. This analyzer only
* runs once
*/
protected synchronized boolean getAnalyzed() {
return analyzed;
}
/**
* Does not support parallel processing as it only runs once and then
* operates on <em>all</em> dependencies.
*
* @return whether or not parallel processing is enabled
* @see #analyze(Dependency, Engine)
*/
@Override
public final boolean supportsParallelProcessing() {
return false;
}
/**
* Analyzes a set of dependencies. If they have been found to have the same
* base path and the same set of identifiers they are likely related. The
* related dependencies are bundled into a single reportable item.
*
* @param ignore this analyzer ignores the dependency being analyzed
* @param engine the engine that is scanning the dependencies
* @throws AnalysisException is thrown if there is an error reading the JAR
* file.
*/
@Override
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException {
if (!analyzed) {
analyzed = true;
final Set<Dependency> dependenciesToRemove = new HashSet<>();
final Dependency[] dependencies = engine.getDependencies();
if (dependencies.length < 2) {
return;
}
for (int x = 0; x < dependencies.length - 1; x++) {
final Dependency dependency = dependencies[x];
if (!dependenciesToRemove.contains(dependency)) {
for (int y = x + 1; y < dependencies.length; y++) {
final Dependency nextDependency = dependencies[y];
if (evaluateDependencies(dependency, nextDependency, dependenciesToRemove)) {
break;
}
}
}
}
for (Dependency d : dependenciesToRemove) {
engine.removeDependency(d);
}
}
}
/**
* Evaluates the dependencies
*
* @param dependency a dependency to compare
* @param nextDependency a dependency to compare
* @param dependenciesToRemove a set of dependencies that will be removed
* @return true if a dependency is removed; otherwise false
*/
protected abstract boolean evaluateDependencies(final Dependency dependency,
final Dependency nextDependency, final Set<Dependency> dependenciesToRemove);
}

View File

@@ -25,6 +25,8 @@ import java.io.FileFilter;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import javax.annotation.concurrent.ThreadSafe;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
/** /**
@@ -33,6 +35,7 @@ import org.owasp.dependencycheck.exception.InitializationException;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@ThreadSafe
public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implements FileTypeAnalyzer { public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implements FileTypeAnalyzer {
//<editor-fold defaultstate="collapsed" desc="Field definitions, getters, and setters "> //<editor-fold defaultstate="collapsed" desc="Field definitions, getters, and setters ">
@@ -45,16 +48,6 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
*/ */
private boolean filesMatched = false; private boolean filesMatched = false;
/**
* Get the value of filesMatched. A flag indicating whether the scan
* included any file types this analyzer supports.
*
* @return the value of filesMatched
*/
protected boolean isFilesMatched() {
return filesMatched;
}
/** /**
* Set the value of filesMatched. A flag indicating whether the scan * Set the value of filesMatched. A flag indicating whether the scan
* included any file types this analyzer supports. * included any file types this analyzer supports.
@@ -70,13 +63,14 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
/** /**
* Initializes the analyzer. * Initializes the analyzer.
* *
* @param engine a reference to the dependency-check engine
* @throws InitializationException thrown if there is an exception during * @throws InitializationException thrown if there is an exception during
* initialization * initialization
*/ */
@Override @Override
protected final void initializeAnalyzer() throws InitializationException { protected final void prepareAnalyzer(Engine engine) throws InitializationException {
if (filesMatched) { if (filesMatched) {
initializeFileTypeAnalyzer(); prepareFileTypeAnalyzer(engine);
} else { } else {
this.setEnabled(false); this.setEnabled(false);
} }
@@ -99,12 +93,13 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
protected abstract FileFilter getFileFilter(); protected abstract FileFilter getFileFilter();
/** /**
* Initializes the file type analyzer. * Prepares the file type analyzer for dependency analysis.
* *
* @param engine a reference to the dependency-check engine
* @throws InitializationException thrown if there is an exception during * @throws InitializationException thrown if there is an exception during
* initialization * initialization
*/ */
protected abstract void initializeFileTypeAnalyzer() throws InitializationException; protected abstract void prepareFileTypeAnalyzer(Engine engine) throws InitializationException;
//</editor-fold> //</editor-fold>
/** /**
@@ -135,7 +130,7 @@ public abstract class AbstractFileTypeAnalyzer extends AbstractAnalyzer implemen
* constructs a new Set that can be used in a final static declaration.</p> * constructs a new Set that can be used in a final static declaration.</p>
* <p> * <p>
* This implementation was copied from * This implementation was copied from
* http://stackoverflow.com/questions/2041778/initialize-java-hashset-values-by-construction</p> * http://stackoverflow.com/questions/2041778/prepare-java-hashset-values-by-construction</p>
* *
* @param strings a list of strings to add to the set. * @param strings a list of strings to add to the set.
* @return a Set of strings. * @return a Set of strings.

View File

@@ -0,0 +1,291 @@
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2017 Steve Springett. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import javax.annotation.concurrent.ThreadSafe;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonString;
import javax.json.JsonValue;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.utils.Checksum;
/**
* An abstract NPM analyzer that contains common methods for concrete
* implementations.
*
* @author Steve Springett
*/
@ThreadSafe
public abstract class AbstractNpmAnalyzer extends AbstractFileTypeAnalyzer {
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractNpmAnalyzer.class);
/**
* A descriptor for the type of dependencies processed or added by this
* analyzer.
*/
public static final String NPM_DEPENDENCY_ECOSYSTEM = "npm";
/**
* The file name to scan.
*/
private static final String PACKAGE_JSON = "package.json";
/**
* Determines if the file can be analyzed by the analyzer.
*
* @param pathname the path to the file
* @return true if the file can be analyzed by the given analyzer; otherwise
* false
*/
@Override
public boolean accept(File pathname) {
boolean accept = super.accept(pathname);
if (accept) {
try {
accept |= shouldProcess(pathname);
} catch (AnalysisException ex) {
throw new RuntimeException(ex.getMessage(), ex.getCause());
}
}
return accept;
}
/**
* Determines if the path contains "/node_modules/" (i.e. it is a child
* module. This analyzer does not scan child modules.
*
* @param pathname the path to test
* @return <code>true</code> if the path does not contain "/node_modules/"
* @throws AnalysisException thrown if the canonical path cannot be obtained
* from the given file
*/
protected boolean shouldProcess(File pathname) throws AnalysisException {
try {
// Do not scan the node_modules directory
if (pathname.getCanonicalPath().contains(File.separator + "node_modules" + File.separator)) {
LOGGER.debug("Skipping analysis of node module: " + pathname.getCanonicalPath());
return false;
}
} catch (IOException ex) {
throw new AnalysisException("Unable to process dependency", ex);
}
return true;
}
/**
* Construct a dependency object.
*
* @param dependency the parent dependency
* @param name the name of the dependency to create
* @param version the version of the dependency to create
* @param scope the scope of the dependency being created
* @return the generated dependency
*/
protected Dependency createDependency(Dependency dependency, String name, String version, String scope) {
final Dependency nodeModule = new Dependency(new File(dependency.getActualFile() + "?" + name), true);
nodeModule.setEcosystem(NPM_DEPENDENCY_ECOSYSTEM);
//this is virtual - the sha1 is purely for the hyperlink in the final html report
nodeModule.setSha1sum(Checksum.getSHA1Checksum(String.format("%s:%s", name, version)));
nodeModule.setMd5sum(Checksum.getMD5Checksum(String.format("%s:%s", name, version)));
nodeModule.addEvidence(EvidenceType.PRODUCT, "package.json", "name", name, Confidence.HIGHEST);
nodeModule.addEvidence(EvidenceType.VENDOR, "package.json", "name", name, Confidence.HIGH);
nodeModule.addEvidence(EvidenceType.VERSION, "package.json", "version", version, Confidence.HIGHEST);
nodeModule.addProjectReference(dependency.getName() + ": " + scope);
nodeModule.setName(name);
nodeModule.setVersion(version);
nodeModule.addIdentifier("npm", String.format("%s:%s", name, version), null, Confidence.HIGHEST);
return nodeModule;
}
/**
* Processes a part of package.json (as defined by JsonArray) and update the
* specified dependency with relevant info.
*
* @param engine the dependency-check engine
* @param dependency the Dependency to update
* @param jsonArray the jsonArray to parse
* @param depType the dependency type
*/
protected void processPackage(Engine engine, Dependency dependency, JsonArray jsonArray, String depType) {
final JsonObjectBuilder builder = Json.createObjectBuilder();
for (JsonString str : jsonArray.getValuesAs(JsonString.class)) {
builder.add(str.toString(), "");
}
final JsonObject jsonObject = builder.build();
processPackage(engine, dependency, jsonObject, depType);
}
/**
* Processes a part of package.json (as defined by JsonObject) and update
* the specified dependency with relevant info.
*
* @param engine the dependency-check engine
* @param dependency the Dependency to update
* @param jsonObject the jsonObject to parse
* @param depType the dependency type
*/
protected void processPackage(Engine engine, Dependency dependency, JsonObject jsonObject, String depType) {
for (int i = 0; i < jsonObject.size(); i++) {
for (Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
final String name = entry.getKey();
String version = "";
if (entry.getValue() != null && entry.getValue().getValueType() == JsonValue.ValueType.STRING) {
version = ((JsonString) entry.getValue()).getString();
}
final Dependency existing = findDependency(engine, name, version);
if (existing == null) {
final Dependency nodeModule = createDependency(dependency, name, version, depType);
engine.addDependency(nodeModule);
} else {
existing.addProjectReference(dependency.getName() + ": " + depType);
}
}
}
}
/**
* Adds information to an evidence collection from the node json
* configuration.
*
* @param dep the dependency to add the evidence
* @param t the type of evidence to add
* @param json information from node.js
* @return the actual string set into evidence
* @param key the key to obtain the data from the json information
*/
private static String addToEvidence(Dependency dep, EvidenceType t, JsonObject json, String key) {
String evidenceStr = null;
if (json.containsKey(key)) {
final JsonValue value = json.get(key);
if (value instanceof JsonString) {
evidenceStr = ((JsonString) value).getString();
dep.addEvidence(t, PACKAGE_JSON, key, evidenceStr, Confidence.HIGHEST);
} else if (value instanceof JsonObject) {
final JsonObject jsonObject = (JsonObject) value;
for (final Map.Entry<String, JsonValue> entry : jsonObject.entrySet()) {
final String property = entry.getKey();
final JsonValue subValue = entry.getValue();
if (subValue instanceof JsonString) {
evidenceStr = ((JsonString) subValue).getString();
dep.addEvidence(t, PACKAGE_JSON,
String.format("%s.%s", key, property),
evidenceStr,
Confidence.HIGHEST);
} else {
LOGGER.warn("JSON sub-value not string as expected: {}", subValue);
}
}
} else {
LOGGER.warn("JSON value not string or JSON object as expected: {}", value);
}
}
return evidenceStr;
}
/**
* Locates the dependency from the list of dependencies that have been
* scanned by the engine.
*
* @param engine the dependency-check engine
* @param name the name of the dependency to find
* @param version the version of the dependency to find
* @return the identified dependency; otherwise null
*/
protected Dependency findDependency(Engine engine, String name, String version) {
for (Dependency d : engine.getDependencies()) {
if (NPM_DEPENDENCY_ECOSYSTEM.equals(d.getEcosystem()) && name.equals(d.getName()) && version != null && d.getVersion() != null) {
final String dependencyVersion = d.getVersion();
if (DependencyBundlingAnalyzer.npmVersionsMatch(version, dependencyVersion)) {
return d;
}
}
}
return null;
}
/**
* Collects evidence from the given JSON for the associated dependency.
*
* @param json the JSON that contains the evidence to collect
* @param dependency the dependency to add the evidence too
*/
public void gatherEvidence(final JsonObject json, Dependency dependency) {
if (json.containsKey("name")) {
final Object value = json.get("name");
if (value instanceof JsonString) {
final String valueString = ((JsonString) value).getString();
dependency.setName(valueString);
dependency.setPackagePath(valueString);
dependency.addEvidence(EvidenceType.PRODUCT, PACKAGE_JSON, "name", valueString, Confidence.HIGHEST);
dependency.addEvidence(EvidenceType.VENDOR, PACKAGE_JSON, "name", valueString, Confidence.HIGH);
} else {
LOGGER.warn("JSON value not string as expected: {}", value);
}
}
final String desc = addToEvidence(dependency, EvidenceType.PRODUCT, json, "description");
dependency.setDescription(desc);
addToEvidence(dependency, EvidenceType.VENDOR, json, "author");
final String version = addToEvidence(dependency, EvidenceType.VERSION, json, "version");
if (version != null) {
dependency.setVersion(version);
dependency.addIdentifier("npm", String.format("%s:%s", dependency.getName(), version), null, Confidence.HIGHEST);
}
// Adds the license if defined in package.json
if (json.containsKey("license")) {
final Object value = json.get("license");
if (value instanceof JsonString) {
dependency.setLicense(json.getString("license"));
} else if (value instanceof JsonArray) {
final JsonArray array = (JsonArray) value;
final StringBuilder sb = new StringBuilder();
boolean addComma = false;
for (int x = 0; x < array.size(); x++) {
if (!array.isNull(x)) {
if (addComma) {
sb.append(", ");
} else {
addComma = true;
}
sb.append(array.getString(x));
}
}
dependency.setLicense(sb.toString());
} else {
dependency.setLicense(json.getJsonObject("license").getString("type"));
}
}
}
}

View File

@@ -22,9 +22,14 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import javax.annotation.concurrent.ThreadSafe;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.xml.suppression.SuppressionParseException; import org.owasp.dependencycheck.xml.suppression.SuppressionParseException;
import org.owasp.dependencycheck.xml.suppression.SuppressionParser; import org.owasp.dependencycheck.xml.suppression.SuppressionParser;
@@ -39,18 +44,31 @@ import org.xml.sax.SAXException;
/** /**
* Abstract base suppression analyzer that contains methods for parsing the * Abstract base suppression analyzer that contains methods for parsing the
* suppression xml file. * suppression XML file.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@ThreadSafe
public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer { public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
/** /**
* The Logger for use throughout the class * The Logger for use throughout the class.
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractSuppressionAnalyzer.class); private static final Logger LOGGER = LoggerFactory.getLogger(AbstractSuppressionAnalyzer.class);
/**
* The list of suppression rules.
*/
private List<SuppressionRule> rules = new ArrayList<>();
/**
* Get the number of suppression rules.
*
* @return the number of suppression rules
*/
protected int getRuleCount() {
return rules.size();
}
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/** /**
* Returns a list of file EXTENSIONS supported by this analyzer. * Returns a list of file EXTENSIONS supported by this analyzer.
* *
@@ -60,86 +78,130 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
return null; return null;
} }
//</editor-fold>
/** /**
* The initialize method loads the suppression XML file. * The prepare method loads the suppression XML file.
* *
* @param engine a reference the dependency-check engine
* @throws InitializationException thrown if there is an exception * @throws InitializationException thrown if there is an exception
*/ */
@Override @Override
public void initializeAnalyzer() throws InitializationException { public synchronized void prepareAnalyzer(Engine engine) throws InitializationException {
try { if (rules.isEmpty()) {
loadSuppressionData(); try {
} catch (SuppressionParseException ex) { loadSuppressionBaseData();
throw new InitializationException("Error initializing the suppression analyzer", ex); } catch (SuppressionParseException ex) {
throw new InitializationException("Error initializing the suppression analyzer: " + ex.getLocalizedMessage(), ex, true);
}
try {
loadSuppressionData();
} catch (SuppressionParseException ex) {
throw new InitializationException("Warn initializing the suppression analyzer: " + ex.getLocalizedMessage(), ex, false);
}
}
}
@Override
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (rules.isEmpty()) {
return;
}
for (final SuppressionRule rule : rules) {
rule.process(dependency);
} }
} }
/** /**
* The list of suppression rules * Loads all the suppression rules files configured in the {@link Settings}.
*/
private List<SuppressionRule> rules;
/**
* Get the value of rules.
*
* @return the value of rules
*/
public List<SuppressionRule> getRules() {
return rules;
}
/**
* Set the value of rules.
*
* @param rules new value of rules
*/
public void setRules(List<SuppressionRule> rules) {
this.rules = rules;
}
/**
* Loads the suppression rules file.
* *
* @throws SuppressionParseException thrown if the XML cannot be parsed. * @throws SuppressionParseException thrown if the XML cannot be parsed.
*/ */
private void loadSuppressionData() throws SuppressionParseException { private void loadSuppressionData() throws SuppressionParseException {
final List<SuppressionRule> ruleList = new ArrayList<>();
final SuppressionParser parser = new SuppressionParser(); final SuppressionParser parser = new SuppressionParser();
File file = null; final String[] suppressionFilePaths = getSettings().getArray(Settings.KEYS.SUPPRESSION_FILE);
final List<String> failedLoadingFiles = new ArrayList<>();
if (suppressionFilePaths != null && suppressionFilePaths.length > 0) {
// Load all the suppression file paths
for (final String suppressionFilePath : suppressionFilePaths) {
try {
ruleList.addAll(loadSuppressionFile(parser, suppressionFilePath));
} catch (SuppressionParseException ex) {
final String msg = String.format("Failed to load %s, caused by %s. ", suppressionFilePath, ex.getMessage());
failedLoadingFiles.add(msg);
}
}
}
LOGGER.debug("{} suppression rules were loaded.", ruleList.size());
rules.addAll(ruleList);
if (!failedLoadingFiles.isEmpty()) {
LOGGER.debug("{} suppression files failed to load.", failedLoadingFiles.size());
final StringBuilder sb = new StringBuilder();
for (String item : failedLoadingFiles) {
sb.append(item);
}
throw new SuppressionParseException(sb.toString());
}
}
/**
* Loads all the base suppression rules files.
*
* @throws SuppressionParseException thrown if the XML cannot be parsed.
*/
private void loadSuppressionBaseData() throws SuppressionParseException {
final SuppressionParser parser = new SuppressionParser();
List<SuppressionRule> ruleList;
try { try {
final InputStream in = this.getClass().getClassLoader().getResourceAsStream("dependencycheck-base-suppression.xml"); final InputStream in = FileUtils.getResourceAsStream("dependencycheck-base-suppression.xml");
rules = parser.parseSuppressionRules(in); ruleList = parser.parseSuppressionRules(in);
} catch (SAXException ex) { } catch (SAXException ex) {
throw new SuppressionParseException("Unable to parse the base suppression data file", ex); throw new SuppressionParseException("Unable to parse the base suppression data file", ex);
} }
final String suppressionFilePath = Settings.getString(Settings.KEYS.SUPPRESSION_FILE); rules.addAll(ruleList);
if (suppressionFilePath == null) { }
return;
} /**
* Load a single suppression rules file from the path provided using the
* parser provided.
*
* @param parser the parser to use for loading the file
* @param suppressionFilePath the path to load
* @return the list of loaded suppression rules
* @throws SuppressionParseException thrown if the suppression file cannot
* be loaded and parsed.
*/
private List<SuppressionRule> loadSuppressionFile(final SuppressionParser parser,
final String suppressionFilePath) throws SuppressionParseException {
LOGGER.debug("Loading suppression rules from '{}'", suppressionFilePath);
final List<SuppressionRule> list = new ArrayList<>();
File file = null;
boolean deleteTempFile = false; boolean deleteTempFile = false;
try { try {
final Pattern uriRx = Pattern.compile("^(https?|file)\\:.*", Pattern.CASE_INSENSITIVE); final Pattern uriRx = Pattern.compile("^(https?|file)\\:.*", Pattern.CASE_INSENSITIVE);
if (uriRx.matcher(suppressionFilePath).matches()) { if (uriRx.matcher(suppressionFilePath).matches()) {
deleteTempFile = true; deleteTempFile = true;
file = FileUtils.getTempFile("suppression", "xml"); file = getSettings().getTempFile("suppression", "xml");
final URL url = new URL(suppressionFilePath); final URL url = new URL(suppressionFilePath);
final Downloader downloader = new Downloader(getSettings());
try { try {
Downloader.fetchFile(url, file, false); downloader.fetchFile(url, file, false);
} catch (DownloadFailedException ex) { } catch (DownloadFailedException ex) {
Downloader.fetchFile(url, file, true); LOGGER.trace("Failed download - first attempt", ex);
downloader.fetchFile(url, file, true);
} }
} else { } else {
file = new File(suppressionFilePath); file = new File(suppressionFilePath);
if (!file.exists()) { if (!file.exists()) {
try (InputStream suppressionsFromClasspath = this.getClass().getClassLoader().getResourceAsStream(suppressionFilePath)) { try (InputStream suppressionsFromClasspath = FileUtils.getResourceAsStream(suppressionFilePath)) {
if (suppressionsFromClasspath != null) { if (suppressionsFromClasspath != null) {
deleteTempFile = true; deleteTempFile = true;
file = FileUtils.getTempFile("suppression", "xml"); file = getSettings().getTempFile("suppression", "xml");
try { try {
org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file); org.apache.commons.io.FileUtils.copyInputStreamToFile(suppressionsFromClasspath, file);
} catch (IOException ex) { } catch (IOException ex) {
throwSuppressionParseException("Unable to locate suppressions file in classpath", ex); throwSuppressionParseException("Unable to locate suppressions file in classpath", ex, suppressionFilePath);
} }
} }
} }
@@ -147,13 +209,12 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
} }
if (file != null) { if (file != null) {
if (!file.exists()) { if (!file.exists()) {
final String msg = String.format("Suppression file '%s' does not exists", file.getPath()); final String msg = String.format("Suppression file '%s' does not exist", file.getPath());
LOGGER.warn(msg); LOGGER.warn(msg);
throw new SuppressionParseException(msg); throw new SuppressionParseException(msg);
} }
try { try {
rules.addAll(parser.parseSuppressionRules(file)); list.addAll(parser.parseSuppressionRules(file));
LOGGER.debug("{} suppression rules were loaded.", rules.size());
} catch (SuppressionParseException ex) { } catch (SuppressionParseException ex) {
LOGGER.warn("Unable to parse suppression xml file '{}'", file.getPath()); LOGGER.warn("Unable to parse suppression xml file '{}'", file.getPath());
LOGGER.warn(ex.getMessage()); LOGGER.warn(ex.getMessage());
@@ -161,18 +222,19 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
} }
} }
} catch (DownloadFailedException ex) { } catch (DownloadFailedException ex) {
throwSuppressionParseException("Unable to fetch the configured suppression file", ex); throwSuppressionParseException("Unable to fetch the configured suppression file", ex, suppressionFilePath);
} catch (MalformedURLException ex) { } catch (MalformedURLException ex) {
throwSuppressionParseException("Configured suppression file has an invalid URL", ex); throwSuppressionParseException("Configured suppression file has an invalid URL", ex, suppressionFilePath);
} catch (SuppressionParseException ex) { } catch (SuppressionParseException ex) {
throw ex; throw ex;
} catch (IOException ex) { } catch (IOException ex) {
throwSuppressionParseException("Unable to create temp file for suppressions", ex); throwSuppressionParseException("Unable to create temp file for suppressions", ex, suppressionFilePath);
} finally { } finally {
if (deleteTempFile && file != null) { if (deleteTempFile && file != null) {
FileUtils.delete(file); FileUtils.delete(file);
} }
} }
return list;
} }
/** /**
@@ -180,11 +242,12 @@ public abstract class AbstractSuppressionAnalyzer extends AbstractAnalyzer {
* *
* @param message the exception message * @param message the exception message
* @param exception the cause of the exception * @param exception the cause of the exception
* @param suppressionFilePath the path file
* @throws SuppressionParseException throws the generated * @throws SuppressionParseException throws the generated
* SuppressionParseException * SuppressionParseException
*/ */
private void throwSuppressionParseException(String message, Exception exception) throws SuppressionParseException { private void throwSuppressionParseException(String message, Exception exception, String suppressionFilePath) throws SuppressionParseException {
LOGGER.warn(message); LOGGER.warn(String.format(message + "'%s'", suppressionFilePath));
LOGGER.debug("", exception); LOGGER.debug("", exception);
throw new SuppressionParseException(message, exception); throw new SuppressionParseException(message, exception);
} }

View File

@@ -21,11 +21,22 @@ import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.Settings;
/** /**
* <p>
* An interface that defines an Analyzer that is used to identify Dependencies. * An interface that defines an Analyzer that is used to identify Dependencies.
* An analyzer will collect information about the dependency in the form of * An analyzer will collect information about the dependency in the form of
* Evidence. * Evidence.</p>
* <p>
* When the {@link org.owasp.dependencycheck.Engine} executes it will load the
* analyzers and call the methods in the following order:</p>
* <ol>
* <li>{@link #initialize(org.owasp.dependencycheck.utils.Settings)}</li>
* <li>{@link #prepare(org.owasp.dependencycheck.Engine)}</li>
* <li>{@link #analyze(org.owasp.dependencycheck.dependency.Dependency, org.owasp.dependencycheck.Engine)}</li>
* <li>{@link #close()}</li>
* </ol>
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@@ -60,13 +71,21 @@ public interface Analyzer {
AnalysisPhase getAnalysisPhase(); AnalysisPhase getAnalysisPhase();
/** /**
* The initialize method is called (once) prior to the analyze method being * Initializes the analyzer with the configured settings.
*
* @param settings the configured settings
*/
void initialize(Settings settings);
/**
* The prepare method is called (once) prior to the analyze method being
* called on all of the dependencies. * called on all of the dependencies.
* *
* @param engine a reference to the dependency-check engine
* @throws InitializationException is thrown if an exception occurs * @throws InitializationException is thrown if an exception occurs
* initializing the analyzer. * initializing the analyzer.
*/ */
void initialize() throws InitializationException; void prepare(Engine engine) throws InitializationException;
/** /**
* The close method is called after all of the dependencies have been * The close method is called after all of the dependencies have been
@@ -77,16 +96,20 @@ public interface Analyzer {
void close() throws Exception; void close() throws Exception;
/** /**
* Returns whether multiple instances of the same type of analyzer can run in parallel. * Returns whether multiple instances of the same type of analyzer can run
* Note that running analyzers of different types in parallel is not supported at all. * in parallel. Note that running analyzers of different types in parallel
* is not supported at all.
* *
* @return {@code true} if the analyzer supports parallel processing, {@code false} else * @return {@code true} if the analyzer supports parallel processing,
* {@code false} else
*/ */
boolean supportsParallelProcessing(); boolean supportsParallelProcessing();
/** /**
* Get the value of enabled. * Get the value of enabled.
* *
* @return the value of enabled * @return the value of enabled
*/ */
boolean isEnabled(); boolean isEnabled();
} }

View File

@@ -18,20 +18,25 @@
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import java.util.ArrayList; import java.util.ArrayList;
import org.slf4j.LoggerFactory;
import static java.util.Arrays.asList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.ServiceLoader; import java.util.ServiceLoader;
import javax.annotation.concurrent.ThreadSafe;
import org.owasp.dependencycheck.utils.InvalidSettingException; import org.owasp.dependencycheck.utils.InvalidSettingException;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.LoggerFactory;
/** /**
* The Analyzer Service Loader. This class loads all services that implement * The Analyzer Service Loader. This class loads all services that implement
* org.owasp.dependencycheck.analyzer.Analyzer. * {@link org.owasp.dependencycheck.analyzer.Analyzer}.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@ThreadSafe
public class AnalyzerService { public class AnalyzerService {
/** /**
* The Logger for use throughout the class. * The Logger for use throughout the class.
*/ */
@@ -41,14 +46,21 @@ public class AnalyzerService {
* The service loader for analyzers. * The service loader for analyzers.
*/ */
private final ServiceLoader<Analyzer> service; private final ServiceLoader<Analyzer> service;
/**
* The configured settings.
*/
private final Settings settings;
/** /**
* Creates a new instance of AnalyzerService. * Creates a new instance of AnalyzerService.
* *
* @param classLoader the ClassLoader to use when dynamically loading Analyzer and Update services * @param classLoader the ClassLoader to use when dynamically loading
* Analyzer and Update services
* @param settings the configured settings
*/ */
public AnalyzerService(ClassLoader classLoader) { public AnalyzerService(ClassLoader classLoader, Settings settings) {
service = ServiceLoader.load(Analyzer.class, classLoader); service = ServiceLoader.load(Analyzer.class, classLoader);
this.settings = settings;
} }
/** /**
@@ -57,19 +69,49 @@ public class AnalyzerService {
* @return a list of Analyzers. * @return a list of Analyzers.
*/ */
public List<Analyzer> getAnalyzers() { public List<Analyzer> getAnalyzers() {
return getAnalyzers(AnalysisPhase.values());
}
/**
* Returns a list of all instances of the Analyzer interface that are bound
* to one of the given phases.
*
* @param phases the phases to obtain analyzers for
* @return a list of Analyzers.
*/
public List<Analyzer> getAnalyzers(AnalysisPhase... phases) {
return getAnalyzers(asList(phases));
}
/**
* Returns a list of all instances of the Analyzer interface that are bound
* to one of the given phases.
*
* @param phases the phases to obtain analyzers for
* @return a list of Analyzers
*/
private List<Analyzer> getAnalyzers(List<AnalysisPhase> phases) {
final List<Analyzer> analyzers = new ArrayList<>(); final List<Analyzer> analyzers = new ArrayList<>();
final Iterator<Analyzer> iterator = service.iterator(); final Iterator<Analyzer> iterator = service.iterator();
boolean experimentalEnabled = false; boolean experimentalEnabled = false;
boolean retiredEnabled = false;
try { try {
experimentalEnabled = Settings.getBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, false); experimentalEnabled = settings.getBoolean(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, false);
retiredEnabled = settings.getBoolean(Settings.KEYS.ANALYZER_RETIRED_ENABLED, false);
} catch (InvalidSettingException ex) { } catch (InvalidSettingException ex) {
LOGGER.error("invalid experimental setting", ex); LOGGER.error("invalid experimental or retired setting", ex);
} }
while (iterator.hasNext()) { while (iterator.hasNext()) {
final Analyzer a = iterator.next(); final Analyzer a = iterator.next();
if (!phases.contains(a.getAnalysisPhase())) {
continue;
}
if (!experimentalEnabled && a.getClass().isAnnotationPresent(Experimental.class)) { if (!experimentalEnabled && a.getClass().isAnnotationPresent(Experimental.class)) {
continue; continue;
} }
if (!retiredEnabled && a.getClass().isAnnotationPresent(Retired.class)) {
continue;
}
LOGGER.debug("Loaded Analyzer {}", a.getName()); LOGGER.debug("Loaded Analyzer {}", a.getName());
analyzers.add(a); analyzers.add(a);
} }

View File

@@ -28,6 +28,8 @@ import java.util.Collections;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.concurrent.ThreadSafe;
import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.archivers.ArchiveInputStream;
@@ -61,6 +63,7 @@ import org.slf4j.LoggerFactory;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@ThreadSafe
public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer { public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
@@ -71,7 +74,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* The count of directories created during analysis. This is used for * The count of directories created during analysis. This is used for
* creating temporary directories. * creating temporary directories.
*/ */
private static int dirCount = 0; private static final AtomicInteger DIRECTORY_COUNT = new AtomicInteger(0);
/** /**
* The parent directory for the individual directories per archive. * The parent directory for the individual directories per archive.
*/ */
@@ -80,21 +83,11 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* The max scan depth that the analyzer will recursively extract nested * The max scan depth that the analyzer will recursively extract nested
* archives. * archives.
*/ */
private static final int MAX_SCAN_DEPTH = Settings.getInt("archive.scan.depth", 3); private int maxScanDepth;
/** /**
* Tracks the current scan/extraction depth for nested archives. * The file filter used to filter supported files.
*/ */
private int scanDepth = 0; private FileFilter fileFilter = null;
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/**
* The name of the analyzer.
*/
private static final String ANALYZER_NAME = "Archive Analyzer";
/**
* The phase that this analyzer is intended to run in.
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INITIAL;
/** /**
* The set of things we can handle with Zip methods * The set of things we can handle with Zip methods
*/ */
@@ -106,35 +99,41 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2"); private static final Set<String> EXTENSIONS = newHashSet("tar", "gz", "tgz", "bz2", "tbz2");
static {
final String additionalZipExt = Settings.getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
if (additionalZipExt != null) {
final String[] ext = additionalZipExt.split("\\s*,\\s*");
Collections.addAll(KNOWN_ZIP_EXT, ext);
}
EXTENSIONS.addAll(KNOWN_ZIP_EXT);
}
/** /**
* Detects files with extensions to remove from the engine's collection of * Detects files with extensions to remove from the engine's collection of
* dependencies. * dependencies.
*/ */
private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance() private static final FileFilter REMOVE_FROM_ANALYSIS = FileFilterBuilder.newInstance()
.addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2").build(); .addExtensions("zip", "tar", "gz", "tgz", "bz2", "tbz2").build();
/**
* The file filter used to filter supported files.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
/** /**
* Detects files with .zip extension. * Detects files with .zip extension.
*/ */
private static final FileFilter ZIP_FILTER = FileFilterBuilder.newInstance().addExtensions("zip").build(); private static final FileFilter ZIP_FILTER = FileFilterBuilder.newInstance().addExtensions("zip").build();
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/**
* The name of the analyzer.
*/
private static final String ANALYZER_NAME = "Archive Analyzer";
/**
* The phase that this analyzer is intended to run in.
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INITIAL;
/**
* Initializes the analyzer with the configured settings.
*
* @param settings the configured settings to use
*/
@Override
public void initialize(Settings settings) {
super.initialize(settings);
initializeSettings();
}
@Override @Override
protected FileFilter getFileFilter() { protected FileFilter getFileFilter() {
return FILTER; return fileFilter;
} }
/** /**
@@ -170,15 +169,16 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
} }
/** /**
* The initialize method does nothing for this Analyzer. * The prepare method does nothing for this Analyzer.
* *
* @param engine a reference to the dependency-check engine
* @throws InitializationException is thrown if there is an exception * @throws InitializationException is thrown if there is an exception
* deleting or creating temporary files * deleting or creating temporary files
*/ */
@Override @Override
public void initializeFileTypeAnalyzer() throws InitializationException { public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
try { try {
final File baseDir = Settings.getTempDirectory(); final File baseDir = getSettings().getTempDirectory();
tempFileLocation = File.createTempFile("check", "tmp", baseDir); tempFileLocation = File.createTempFile("check", "tmp", baseDir);
if (!tempFileLocation.delete()) { if (!tempFileLocation.delete()) {
setEnabled(false); setEnabled(false);
@@ -206,31 +206,18 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
@Override @Override
public void closeAnalyzer() throws Exception { public void closeAnalyzer() throws Exception {
if (tempFileLocation != null && tempFileLocation.exists()) { if (tempFileLocation != null && tempFileLocation.exists()) {
LOGGER.debug("Attempting to delete temporary files"); LOGGER.debug("Attempting to delete temporary files from `{}`", tempFileLocation.toString());
final boolean success = FileUtils.delete(tempFileLocation); final boolean success = FileUtils.delete(tempFileLocation);
if (!success && tempFileLocation.exists()) { if (!success && tempFileLocation.exists()) {
final String[] l = tempFileLocation.list(); final String[] l = tempFileLocation.list();
if (l != null && l.length > 0) { if (l != null && l.length > 0) {
LOGGER.warn("Failed to delete some temporary files, see the log for more details"); LOGGER.warn("Failed to delete the Archive Analyzer's temporary files from `{}`, "
+ "see the log for more details", tempFileLocation.toString());
} }
} }
} }
} }
/**
* Does not support parallel processing as it both modifies and iterates
* over the engine's list of dependencies.
*
* @return <code>true</code> if the analyzer supports parallel processing;
* otherwise <code>false</code>
* @see #analyzeDependency(Dependency, Engine)
* @see #findMoreDependencies(Engine, File)
*/
@Override
public boolean supportsParallelProcessing() {
return false;
}
/** /**
* Analyzes a given dependency. If the dependency is an archive, such as a * Analyzes a given dependency. If the dependency is an archive, such as a
* WAR or EAR, the contents are extracted, scanned, and added to the list of * WAR or EAR, the contents are extracted, scanned, and added to the list of
@@ -242,6 +229,22 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
@Override @Override
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException { public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
extractAndAnalyze(dependency, engine, 0);
engine.sortDependencies();
}
/**
* Extracts the contents of the archive dependency and scans for additional
* dependencies.
*
* @param dependency the dependency being analyzed
* @param engine the engine doing the analysis
* @param scanDepth the current scan depth; extracctAndAnalyze is recursive
* and will, be default, only go 3 levels deep
* @throws AnalysisException thrown if there is a problem analyzing the
* dependencies
*/
private void extractAndAnalyze(Dependency dependency, Engine engine, int scanDepth) throws AnalysisException {
final File f = new File(dependency.getActualFilePath()); final File f = new File(dependency.getActualFilePath());
final File tmpDir = getNextTempDirectory(); final File tmpDir = getNextTempDirectory();
extractFiles(f, tmpDir, engine); extractFiles(f, tmpDir, engine);
@@ -261,14 +264,12 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
d.getFileName()); d.getFileName());
d.setFilePath(displayPath); d.setFilePath(displayPath);
d.setFileName(displayName); d.setFileName(displayName);
d.setProjectReferences(dependency.getProjectReferences()); d.addAllProjectReferences(dependency.getProjectReferences());
//TODO - can we get more evidence from the parent? EAR contains module name, etc. //TODO - can we get more evidence from the parent? EAR contains module name, etc.
//analyze the dependency (i.e. extract files) if it is a supported type. //analyze the dependency (i.e. extract files) if it is a supported type.
if (this.accept(d.getActualFile()) && scanDepth < MAX_SCAN_DEPTH) { if (this.accept(d.getActualFile()) && scanDepth < maxScanDepth) {
scanDepth += 1; extractAndAnalyze(d, engine, scanDepth + 1);
analyze(d, engine);
scanDepth -= 1;
} }
} else { } else {
for (Dependency sub : dependencySet) { for (Dependency sub : dependencySet) {
@@ -288,9 +289,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
} }
if (REMOVE_FROM_ANALYSIS.accept(dependency.getActualFile())) { if (REMOVE_FROM_ANALYSIS.accept(dependency.getActualFile())) {
addDisguisedJarsToDependencies(dependency, engine); addDisguisedJarsToDependencies(dependency, engine);
engine.getDependencies().remove(dependency); engine.removeDependency(dependency);
} }
Collections.sort(engine.getDependencies());
} }
/** /**
@@ -357,8 +357,7 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException thrown if unable to create temporary directory * @throws AnalysisException thrown if unable to create temporary directory
*/ */
private File getNextTempDirectory() throws AnalysisException { private File getNextTempDirectory() throws AnalysisException {
dirCount += 1; final File directory = new File(tempFileLocation, String.valueOf(DIRECTORY_COUNT.incrementAndGet()));
final File directory = new File(tempFileLocation, String.valueOf(dirCount));
//getting an exception for some directories not being able to be created; might be because the directory already exists? //getting an exception for some directories not being able to be created; might be because the directory already exists?
if (directory.exists()) { if (directory.exists()) {
return getNextTempDirectory(); return getNextTempDirectory();
@@ -390,8 +389,9 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
try { try {
fis = new FileInputStream(archive); fis = new FileInputStream(archive);
} catch (FileNotFoundException ex) { } catch (FileNotFoundException ex) {
LOGGER.debug("", ex); final String msg = String.format("Error extracting file `%s`: %s", archive.getAbsolutePath(), ex.getMessage());
throw new AnalysisException("Archive file was not found.", ex); LOGGER.debug(msg, ex);
throw new AnalysisException(msg);
} }
BufferedInputStream in = null; BufferedInputStream in = null;
ZipArchiveInputStream zin = null; ZipArchiveInputStream zin = null;
@@ -473,6 +473,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
boolean stillLooking = true; boolean stillLooking = true;
int chr; int chr;
int nxtChr; int nxtChr;
//CSOFF: InnerAssignment
//CSOFF: NestedIfDepth
while (stillLooking && (chr = in.read()) != -1) { while (stillLooking && (chr = in.read()) != -1) {
if (chr == '\n' || chr == '\r') { if (chr == '\n' || chr == '\r') {
in.mark(4); in.mark(4);
@@ -490,6 +492,8 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
} }
} }
} }
//CSON: InnerAssignment
//CSON: NestedIfDepth
} else { } else {
in.reset(); in.reset();
} }
@@ -603,4 +607,19 @@ public class ArchiveAnalyzer extends AbstractFileTypeAnalyzer {
} }
return isJar; return isJar;
} }
/**
* Initializes settings used by the scanning functions of the archive
* analyzer.
*/
private void initializeSettings() {
maxScanDepth = getSettings().getInt("archive.scan.depth", 3);
final String additionalZipExt = getSettings().getString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS);
if (additionalZipExt != null) {
final String[] ext = additionalZipExt.split("\\s*,\\s*");
Collections.addAll(KNOWN_ZIP_EXT, ext);
}
EXTENSIONS.addAll(KNOWN_ZIP_EXT);
fileFilter = FileFilterBuilder.newInstance().addExtensions(EXTENSIONS).build();
}
} }

View File

@@ -28,8 +28,8 @@ import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -42,9 +42,11 @@ import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory; import javax.xml.xpath.XPathFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import javax.annotation.concurrent.ThreadSafe;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
import org.apache.commons.lang3.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.utils.XmlUtils; import org.owasp.dependencycheck.utils.XmlUtils;
/** /**
@@ -54,6 +56,7 @@ import org.owasp.dependencycheck.utils.XmlUtils;
* @author colezlaw * @author colezlaw
* *
*/ */
@ThreadSafe
public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer { public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
@@ -72,6 +75,10 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
* The temp value for GrokAssembly.exe * The temp value for GrokAssembly.exe
*/ */
private File grokAssemblyExe = null; private File grokAssemblyExe = null;
/**
* The temp value for GrokAssembly.exe.config
*/
private File grokAssemblyConfig = null;
/** /**
* Logger * Logger
*/ */
@@ -86,8 +93,8 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
// Use file.separator as a wild guess as to whether this is Windows // Use file.separator as a wild guess as to whether this is Windows
final List<String> args = new ArrayList<>(); final List<String> args = new ArrayList<>();
if (!SystemUtils.IS_OS_WINDOWS) { if (!SystemUtils.IS_OS_WINDOWS) {
if (Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) { if (getSettings().getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH) != null) {
args.add(Settings.getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH)); args.add(getSettings().getString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH));
} else if (isInPath("mono")) { } else if (isInPath("mono")) {
args.add("mono"); args.add("mono");
} else { } else {
@@ -106,13 +113,16 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
* @throws AnalysisException if anything goes sideways * @throws AnalysisException if anything goes sideways
*/ */
@Override @Override
public void analyzeDependency(Dependency dependency, Engine engine) public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
throws AnalysisException { final File test = new File(dependency.getActualFilePath());
if (!test.isFile()) {
throw new AnalysisException(String.format("%s does not exist and cannot be analyzed by dependency-check",
dependency.getActualFilePath()));
}
if (grokAssemblyExe == null) { if (grokAssemblyExe == null) {
LOGGER.warn("GrokAssembly didn't get deployed"); LOGGER.warn("GrokAssembly didn't get deployed");
return; return;
} }
final List<String> args = buildArgumentList(); final List<String> args = buildArgumentList();
if (args == null) { if (args == null) {
LOGGER.warn("Assembly Analyzer was unable to execute"); LOGGER.warn("Assembly Analyzer was unable to execute");
@@ -137,6 +147,7 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
try { try {
rc = proc.waitFor(); rc = proc.waitFor();
} catch (InterruptedException ie) { } catch (InterruptedException ie) {
Thread.currentThread().interrupt();
return; return;
} }
if (rc == 3) { if (rc == 3) {
@@ -159,20 +170,17 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
final String version = xpath.evaluate("/assembly/version", doc); final String version = xpath.evaluate("/assembly/version", doc);
if (version != null) { if (version != null) {
dependency.getVersionEvidence().addEvidence(new Evidence("grokassembly", "version", dependency.addEvidence(EvidenceType.VERSION, "grokassembly", "version", version, Confidence.HIGHEST);
version, Confidence.HIGHEST));
} }
final String vendor = xpath.evaluate("/assembly/company", doc); final String vendor = xpath.evaluate("/assembly/company", doc);
if (vendor != null) { if (vendor != null) {
dependency.getVendorEvidence().addEvidence(new Evidence("grokassembly", "vendor", dependency.addEvidence(EvidenceType.VENDOR, "grokassembly", "vendor", vendor, Confidence.HIGH);
vendor, Confidence.HIGH));
} }
final String product = xpath.evaluate("/assembly/product", doc); final String product = xpath.evaluate("/assembly/product", doc);
if (product != null) { if (product != null) {
dependency.getProductEvidence().addEvidence(new Evidence("grokassembly", "product", dependency.addEvidence(EvidenceType.PRODUCT, "grokassembly", "product", product, Confidence.HIGH);
product, Confidence.HIGH));
} }
} catch (ParserConfigurationException pce) { } catch (ParserConfigurationException pce) {
@@ -186,36 +194,36 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.error("----------------------------------------------------"); LOGGER.error("----------------------------------------------------");
throw new AnalysisException("Couldn't parse Assembly Analyzer results (GrokAssembly)", saxe); throw new AnalysisException("Couldn't parse Assembly Analyzer results (GrokAssembly)", saxe);
} }
// This shouldn't happen
} }
/** /**
* Initialize the analyzer. In this case, extract GrokAssembly.exe to a * Initialize the analyzer. In this case, extract GrokAssembly.exe to a
* temporary location. * temporary location.
* *
* @param engine a reference to the dependency-check engine
* @throws InitializationException thrown if anything goes wrong * @throws InitializationException thrown if anything goes wrong
*/ */
@Override @Override
public void initializeFileTypeAnalyzer() throws InitializationException { public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
final File tempFile; final File tempFile;
final String cfg; final File cfgFile;
try { try {
tempFile = File.createTempFile("GKA", ".exe", Settings.getTempDirectory()); tempFile = File.createTempFile("GKA", ".exe", getSettings().getTempDirectory());
cfg = tempFile.getPath() + ".config"; cfgFile = new File(tempFile.getPath() + ".config");
} catch (IOException ex) { } catch (IOException ex) {
setEnabled(false); setEnabled(false);
throw new InitializationException("Unable to create temporary file for the assembly analyzer", ex); throw new InitializationException("Unable to create temporary file for the assembly analyzer", ex);
} }
try (FileOutputStream fos = new FileOutputStream(tempFile); try (FileOutputStream fos = new FileOutputStream(tempFile);
InputStream is = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe"); InputStream is = FileUtils.getResourceAsStream("GrokAssembly.exe");
FileOutputStream fosCfg = new FileOutputStream(cfg); FileOutputStream fosCfg = new FileOutputStream(cfgFile);
InputStream isCfg = AssemblyAnalyzer.class.getClassLoader().getResourceAsStream("GrokAssembly.exe.config")) { InputStream isCfg = FileUtils.getResourceAsStream("GrokAssembly.exe.config")) {
IOUtils.copy(is, fos); IOUtils.copy(is, fos);
grokAssemblyExe = tempFile; grokAssemblyExe = tempFile;
LOGGER.debug("Extracted GrokAssembly.exe to {}", grokAssemblyExe.getPath()); LOGGER.debug("Extracted GrokAssembly.exe to {}", grokAssemblyExe.getPath());
IOUtils.copy(isCfg, fosCfg); IOUtils.copy(isCfg, fosCfg);
LOGGER.debug("Extracted GrokAssembly.exe.config to {}", cfg); grokAssemblyConfig = cfgFile;
LOGGER.debug("Extracted GrokAssembly.exe.config to {}", cfgFile);
} catch (IOException ioe) { } catch (IOException ioe) {
this.setEnabled(false); this.setEnabled(false);
LOGGER.warn("Could not extract GrokAssembly.exe: {}", ioe.getMessage()); LOGGER.warn("Could not extract GrokAssembly.exe: {}", ioe.getMessage());
@@ -286,6 +294,15 @@ public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.debug("Can't delete temporary GrokAssembly.exe"); LOGGER.debug("Can't delete temporary GrokAssembly.exe");
grokAssemblyExe.deleteOnExit(); grokAssemblyExe.deleteOnExit();
} }
try {
if (grokAssemblyConfig != null && !grokAssemblyConfig.delete()) {
LOGGER.debug("Unable to delete temporary GrokAssembly.exe.config; attempting delete on exit");
grokAssemblyConfig.deleteOnExit();
}
} catch (SecurityException se) {
LOGGER.debug("Can't delete temporary GrokAssembly.exe.config");
grokAssemblyConfig.deleteOnExit();
}
} }
/** /**

View File

@@ -22,7 +22,6 @@ import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceCollection;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.utils.UrlStringUtils; import org.owasp.dependencycheck.utils.UrlStringUtils;
@@ -33,6 +32,7 @@ import java.io.IOException;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
/** /**
@@ -168,15 +168,14 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
final String contents = getFileContents(actualFile); final String contents = getFileContents(actualFile);
if (!contents.isEmpty()) { if (!contents.isEmpty()) {
if (isOutputScript) { if (isOutputScript) {
extractConfigureScriptEvidence(dependency, name, extractConfigureScriptEvidence(dependency, name, contents);
contents);
} else { } else {
gatherEvidence(dependency, name, contents); gatherEvidence(dependency, name, contents);
} }
} }
} }
} else { } else {
engine.getDependencies().remove(dependency); engine.removeDependency(dependency);
} }
} }
@@ -195,17 +194,13 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
final String value = matcher.group(2); final String value = matcher.group(2);
if (!value.isEmpty()) { if (!value.isEmpty()) {
if (variable.endsWith("NAME")) { if (variable.endsWith("NAME")) {
dependency.getProductEvidence().addEvidence(name, variable, dependency.addEvidence(EvidenceType.PRODUCT, name, variable, value, Confidence.HIGHEST);
value, Confidence.HIGHEST);
} else if ("VERSION".equals(variable)) { } else if ("VERSION".equals(variable)) {
dependency.getVersionEvidence().addEvidence(name, variable, dependency.addEvidence(EvidenceType.VERSION, name, variable, value, Confidence.HIGHEST);
value, Confidence.HIGHEST);
} else if ("BUGREPORT".equals(variable)) { } else if ("BUGREPORT".equals(variable)) {
dependency.getVendorEvidence().addEvidence(name, variable, dependency.addEvidence(EvidenceType.VENDOR, name, variable, value, Confidence.HIGH);
value, Confidence.HIGH);
} else if ("URL".equals(variable)) { } else if ("URL".equals(variable)) {
dependency.getVendorEvidence().addEvidence(name, variable, dependency.addEvidence(EvidenceType.VENDOR, name, variable, value, Confidence.HIGH);
value, Confidence.HIGH);
} }
} }
} }
@@ -239,27 +234,19 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
String contents) { String contents) {
final Matcher matcher = AC_INIT_PATTERN.matcher(contents); final Matcher matcher = AC_INIT_PATTERN.matcher(contents);
if (matcher.find()) { if (matcher.find()) {
final EvidenceCollection productEvidence = dependency dependency.addEvidence(EvidenceType.PRODUCT, name, "Package", matcher.group(1), Confidence.HIGHEST);
.getProductEvidence(); dependency.addEvidence(EvidenceType.VERSION, name, "Package Version", matcher.group(2), Confidence.HIGHEST);
productEvidence.addEvidence(name, "Package", matcher.group(1),
Confidence.HIGHEST);
dependency.getVersionEvidence().addEvidence(name,
"Package Version", matcher.group(2), Confidence.HIGHEST);
final EvidenceCollection vendorEvidence = dependency
.getVendorEvidence();
if (null != matcher.group(3)) { if (null != matcher.group(3)) {
vendorEvidence.addEvidence(name, "Bug report address", dependency.addEvidence(EvidenceType.VENDOR, name, "Bug report address", matcher.group(4), Confidence.HIGH);
matcher.group(4), Confidence.HIGH);
} }
if (null != matcher.group(5)) { if (null != matcher.group(5)) {
productEvidence.addEvidence(name, "Tarname", matcher.group(6), dependency.addEvidence(EvidenceType.PRODUCT, name, "Tarname", matcher.group(6), Confidence.HIGH);
Confidence.HIGH);
} }
if (null != matcher.group(7)) { if (null != matcher.group(7)) {
final String url = matcher.group(8); final String url = matcher.group(8);
if (UrlStringUtils.isUrl(url)) { if (UrlStringUtils.isUrl(url)) {
vendorEvidence.addEvidence(name, "URL", url, dependency.addEvidence(EvidenceType.VENDOR, name, "URL", url, Confidence.HIGH);
Confidence.HIGH);
} }
} }
} }
@@ -268,11 +255,12 @@ public class AutoconfAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Initializes the file type analyzer. * Initializes the file type analyzer.
* *
* @param engine a reference to the dependency-check engine
* @throws InitializationException thrown if there is an exception during * @throws InitializationException thrown if there is an exception during
* initialization * initialization
*/ */
@Override @Override
protected void initializeFileTypeAnalyzer() throws InitializationException { protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
// No initialization needed. // No initialization needed.
} }
} }

View File

@@ -32,12 +32,10 @@ import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.IOException; import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
/** /**
@@ -57,6 +55,12 @@ import org.owasp.dependencycheck.exception.InitializationException;
@Experimental @Experimental
public class CMakeAnalyzer extends AbstractFileTypeAnalyzer { public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
/**
* A descriptor for the type of dependencies processed or added by this
* analyzer.
*/
public static final String DEPENDENCY_ECOSYSTEM = "CMAKE";
/** /**
* The logger. * The logger.
*/ */
@@ -65,8 +69,7 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Used when compiling file scanning regex patterns. * Used when compiling file scanning regex patterns.
*/ */
private static final int REGEX_OPTIONS = Pattern.DOTALL private static final int REGEX_OPTIONS = Pattern.DOTALL | Pattern.CASE_INSENSITIVE | Pattern.MULTILINE;
| Pattern.CASE_INSENSITIVE | Pattern.MULTILINE;
/** /**
* Regex to extract the product information. * Regex to extract the product information.
@@ -81,10 +84,8 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
* *
* Group 2: Version * Group 2: Version
*/ */
private static final Pattern SET_VERSION = Pattern private static final Pattern SET_VERSION = Pattern.compile(
.compile( "^ *set\\s*\\(\\s*(\\w+)_version\\s+\"?(\\d+(?:\\.\\d+)+)[\\s\"]?\\)", REGEX_OPTIONS);
"^ *set\\s*\\(\\s*(\\w+)_version\\s+\"?(\\d+(?:\\.\\d+)+)[\\s\"]?\\)",
REGEX_OPTIONS);
/** /**
* Detects files that can be analyzed. * Detects files that can be analyzed.
@@ -125,17 +126,13 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Initializes the analyzer. * Initializes the analyzer.
* *
* @param engine a reference to the dependency-check engine
* @throws InitializationException thrown if an exception occurs getting an * @throws InitializationException thrown if an exception occurs getting an
* instance of SHA1 * instance of SHA1
*/ */
@Override @Override
protected void initializeFileTypeAnalyzer() throws InitializationException { protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
try { //do nothing
getSha1MessageDigest();
} catch (IllegalStateException ex) {
setEnabled(false);
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
}
} }
/** /**
@@ -147,12 +144,10 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
* analyzing the dependency * analyzing the dependency
*/ */
@Override @Override
protected void analyzeDependency(Dependency dependency, Engine engine) protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
throws AnalysisException { dependency.setEcosystem(DEPENDENCY_ECOSYSTEM);
final File file = dependency.getActualFile(); final File file = dependency.getActualFile();
final String parentName = file.getParentFile().getName();
final String name = file.getName(); final String name = file.getName();
dependency.setDisplayFileName(String.format("%s%c%s", parentName, File.separatorChar, name));
String contents; String contents;
try { try {
contents = FileUtils.readFileToString(file, Charset.defaultCharset()).trim(); contents = FileUtils.readFileToString(file, Charset.defaultCharset()).trim();
@@ -160,7 +155,6 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
throw new AnalysisException( throw new AnalysisException(
"Problem occurred while reading dependency file.", e); "Problem occurred while reading dependency file.", e);
} }
if (StringUtils.isNotBlank(contents)) { if (StringUtils.isNotBlank(contents)) {
final Matcher m = PROJECT.matcher(contents); final Matcher m = PROJECT.matcher(contents);
int count = 0; int count = 0;
@@ -170,9 +164,10 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
"Found project command match with %d groups: %s", "Found project command match with %d groups: %s",
m.groupCount(), m.group(0))); m.groupCount(), m.group(0)));
final String group = m.group(1); final String group = m.group(1);
LOGGER.debug("Group 1: " + group); LOGGER.debug("Group 1: {}", group);
dependency.getProductEvidence().addEvidence(name, "Project", dependency.addEvidence(EvidenceType.PRODUCT, name, "Project", group, Confidence.HIGH);
group, Confidence.HIGH); dependency.addEvidence(EvidenceType.VENDOR, name, "Project", group, Confidence.HIGH);
dependency.setName(group);
} }
LOGGER.debug("Found {} matches.", count); LOGGER.debug("Found {} matches.", count);
analyzeSetVersionCommand(dependency, engine, contents); analyzeSetVersionCommand(dependency, engine, contents);
@@ -188,9 +183,6 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
* @param engine the dependency-check engine * @param engine the dependency-check engine
* @param contents the version information * @param contents the version information
*/ */
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings(
value = "DM_DEFAULT_ENCODING",
justification = "Default encoding is only used if UTF-8 is not available")
private void analyzeSetVersionCommand(Dependency dependency, Engine engine, String contents) { private void analyzeSetVersionCommand(Dependency dependency, Engine engine, String contents) {
Dependency currentDep = dependency; Dependency currentDep = dependency;
@@ -202,8 +194,8 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
m.groupCount(), m.group(0)); m.groupCount(), m.group(0));
String product = m.group(1); String product = m.group(1);
final String version = m.group(2); final String version = m.group(2);
LOGGER.debug("Group 1: " + product); LOGGER.debug("Group 1: {}", product);
LOGGER.debug("Group 2: " + version); LOGGER.debug("Group 2: {}", version);
final String aliasPrefix = "ALIASOF_"; final String aliasPrefix = "ALIASOF_";
if (product.startsWith(aliasPrefix)) { if (product.startsWith(aliasPrefix)) {
product = product.replaceFirst(aliasPrefix, ""); product = product.replaceFirst(aliasPrefix, "");
@@ -211,45 +203,26 @@ public class CMakeAnalyzer extends AbstractFileTypeAnalyzer {
if (count > 1) { if (count > 1) {
//TODO - refactor so we do not assign to the parameter (checkstyle) //TODO - refactor so we do not assign to the parameter (checkstyle)
currentDep = new Dependency(dependency.getActualFile()); currentDep = new Dependency(dependency.getActualFile());
currentDep.setDisplayFileName(String.format("%s:%s", dependency.getDisplayFileName(), product)); currentDep.setEcosystem(DEPENDENCY_ECOSYSTEM);
final String filePath = String.format("%s:%s", dependency.getFilePath(), product); final String filePath = String.format("%s:%s", dependency.getFilePath(), product);
currentDep.setFilePath(filePath); currentDep.setFilePath(filePath);
byte[] path; currentDep.setSha1sum(Checksum.getSHA1Checksum(filePath));
try { currentDep.setMd5sum(Checksum.getMD5Checksum(filePath));
path = filePath.getBytes("UTF-8"); engine.addDependency(currentDep);
} catch (UnsupportedEncodingException ex) {
path = filePath.getBytes();
}
final MessageDigest sha1 = getSha1MessageDigest();
currentDep.setSha1sum(Checksum.getHex(sha1.digest(path)));
engine.getDependencies().add(currentDep);
} }
final String source = currentDep.getDisplayFileName(); final String source = currentDep.getFileName();
currentDep.getProductEvidence().addEvidence(source, "Product", currentDep.addEvidence(EvidenceType.PRODUCT, source, "Product", product, Confidence.MEDIUM);
product, Confidence.MEDIUM); currentDep.addEvidence(EvidenceType.VENDOR, source, "Vendor", product, Confidence.MEDIUM);
currentDep.getVersionEvidence().addEvidence(source, "Version", currentDep.addEvidence(EvidenceType.VERSION, source, "Version", version, Confidence.MEDIUM);
version, Confidence.MEDIUM); currentDep.setName(product);
currentDep.setVersion(version);
} }
LOGGER.debug(String.format("Found %d matches.", count)); LOGGER.debug("Found {} matches.", count);
} }
@Override @Override
protected String getAnalyzerEnabledSettingKey() { protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_CMAKE_ENABLED; return Settings.KEYS.ANALYZER_CMAKE_ENABLED;
} }
/**
* Returns the sha1 message digest.
*
* @return the sha1 message digest
*/
private MessageDigest getSha1MessageDigest() {
try {
return MessageDigest.getInstance("SHA1");
} catch (NoSuchAlgorithmException e) {
LOGGER.error(e.getMessage());
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
}
}
} }

View File

@@ -21,12 +21,15 @@ import java.io.IOException;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import javax.annotation.concurrent.ThreadSafe;
import org.apache.commons.lang3.builder.CompareToBuilder; import org.apache.commons.lang3.builder.CompareToBuilder;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.queryparser.classic.ParseException; import org.apache.lucene.queryparser.classic.ParseException;
@@ -39,12 +42,13 @@ import org.owasp.dependencycheck.data.cpe.Fields;
import org.owasp.dependencycheck.data.cpe.IndexEntry; import org.owasp.dependencycheck.data.cpe.IndexEntry;
import org.owasp.dependencycheck.data.cpe.IndexException; import org.owasp.dependencycheck.data.cpe.IndexException;
import org.owasp.dependencycheck.data.lucene.LuceneUtils; import org.owasp.dependencycheck.data.lucene.LuceneUtils;
import org.owasp.dependencycheck.data.lucene.SearchFieldAnalyzer;
import org.owasp.dependencycheck.data.nvdcve.CveDB; import org.owasp.dependencycheck.data.nvdcve.CveDB;
import org.owasp.dependencycheck.data.nvdcve.DatabaseException; import org.owasp.dependencycheck.data.nvdcve.DatabaseException;
import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence; import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.dependency.EvidenceCollection; import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.VulnerableSoftware; import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
@@ -61,6 +65,7 @@ import org.slf4j.LoggerFactory;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@ThreadSafe
public class CPEAnalyzer extends AbstractAnalyzer { public class CPEAnalyzer extends AbstractAnalyzer {
/** /**
@@ -77,9 +82,11 @@ public class CPEAnalyzer extends AbstractAnalyzer {
private static final String WEIGHTING_BOOST = "^5"; private static final String WEIGHTING_BOOST = "^5";
/** /**
* A string representation of a regular expression defining characters * A string representation of a regular expression defining characters
* utilized within the CPE Names. * utilized within the CPE Names. Note, the :/ are included so URLs are
* passed into the Lucene query so that the specialized tokenizer can parse
* them.
*/ */
private static final String CLEANSE_CHARACTER_RX = "[^A-Za-z0-9 ._-]"; private static final String CLEANSE_CHARACTER_RX = "[^A-Za-z0-9 ._:/-]";
/** /**
* A string representation of a regular expression used to remove all but * A string representation of a regular expression used to remove all but
* alpha characters. * alpha characters.
@@ -90,6 +97,10 @@ public class CPEAnalyzer extends AbstractAnalyzer {
* data that will be written into the string. * data that will be written into the string.
*/ */
private static final int STRING_BUILDER_BUFFER = 20; private static final int STRING_BUILDER_BUFFER = 20;
/**
* The URL to perform a search of the NVD CVE data at NIST.
*/
public static final String NVD_SEARCH_URL = "https://web.nvd.nist.gov/view/vuln/search-results?adv_search=true&cves=on&cpe_version=%s";
/** /**
* The CPE in memory index. * The CPE in memory index.
*/ */
@@ -98,11 +109,12 @@ public class CPEAnalyzer extends AbstractAnalyzer {
* The CVE Database. * The CVE Database.
*/ */
private CveDB cve; private CveDB cve;
/** /**
* The URL to perform a search of the NVD CVE data at NIST. * The list of ecosystems to skip during analysis. These are skipped because
* there is generally a more accurate vulnerability analyzer in the
* pipeline.
*/ */
public static final String NVD_SEARCH_URL = "https://web.nvd.nist.gov/view/vuln/search-results?adv_search=true&cves=on&cpe_version=%s"; private List<String> skipEcosystems;
/** /**
* Returns the name of this analyzer. * Returns the name of this analyzer.
@@ -124,26 +136,18 @@ public class CPEAnalyzer extends AbstractAnalyzer {
return AnalysisPhase.IDENTIFIER_ANALYSIS; return AnalysisPhase.IDENTIFIER_ANALYSIS;
} }
/**
* The default is to support parallel processing.
*
* @return false
*/
@Override
public boolean supportsParallelProcessing() {
return false;
}
/** /**
* Creates the CPE Lucene Index. * Creates the CPE Lucene Index.
* *
* @param engine a reference to the dependency-check engine
* @throws InitializationException is thrown if there is an issue opening * @throws InitializationException is thrown if there is an issue opening
* the index. * the index.
*/ */
@Override @Override
public void initializeAnalyzer() throws InitializationException { public void prepareAnalyzer(Engine engine) throws InitializationException {
super.prepareAnalyzer(engine);
try { try {
this.open(); this.open(engine.getDatabase());
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.debug("Exception initializing the Lucene Index", ex); LOGGER.debug("Exception initializing the Lucene Index", ex);
throw new InitializationException("An exception occurred initializing the Lucene Index", ex); throw new InitializationException("An exception occurred initializing the Lucene Index", ex);
@@ -151,29 +155,35 @@ public class CPEAnalyzer extends AbstractAnalyzer {
LOGGER.debug("Exception accessing the database", ex); LOGGER.debug("Exception accessing the database", ex);
throw new InitializationException("An exception occurred accessing the database", ex); throw new InitializationException("An exception occurred accessing the database", ex);
} }
final String[] tmp = engine.getSettings().getArray(Settings.KEYS.ECOSYSTEM_SKIP_CPEANALYZER);
if (tmp == null) {
skipEcosystems = new ArrayList<>();
} else {
LOGGER.info("Skipping CPE Analysis for {}", tmp);
skipEcosystems = Arrays.asList(tmp);
}
} }
/** /**
* Opens the data source. * Opens the data source.
* *
* @param cve a reference to the NVD CVE database
* @throws IOException when the Lucene directory to be queried does not * @throws IOException when the Lucene directory to be queried does not
* exist or is corrupt. * exist or is corrupt.
* @throws DatabaseException when the database throws an exception. This * @throws DatabaseException when the database throws an exception. This
* usually occurs when the database is in use by another process. * usually occurs when the database is in use by another process.
*/ */
public void open() throws IOException, DatabaseException { public void open(CveDB cve) throws IOException, DatabaseException {
if (!isOpen()) { this.cve = cve;
cve = CveDB.getInstance(); this.cpe = CpeMemoryIndex.getInstance();
cpe = CpeMemoryIndex.getInstance(); try {
try { final long creationStart = System.currentTimeMillis();
final long creationStart = System.currentTimeMillis(); cpe.open(cve);
cpe.open(cve); final long creationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - creationStart);
final long creationSeconds = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - creationStart); LOGGER.info("Created CPE Index ({} seconds)", creationSeconds);
LOGGER.info("Created CPE Index ({} seconds)", creationSeconds); } catch (IndexException ex) {
} catch (IndexException ex) { LOGGER.debug("IndexException", ex);
LOGGER.debug("IndexException", ex); throw new DatabaseException(ex);
throw new DatabaseException(ex);
}
} }
} }
@@ -182,25 +192,12 @@ public class CPEAnalyzer extends AbstractAnalyzer {
*/ */
@Override @Override
public void closeAnalyzer() { public void closeAnalyzer() {
if (cve != null) {
cve.close();
cve = null;
}
if (cpe != null) { if (cpe != null) {
cpe.close(); cpe.close();
cpe = null; cpe = null;
} }
} }
/**
* Returns whether or not the analyzer is open.
*
* @return <code>true</code> if the analyzer is open
*/
public boolean isOpen() {
return cpe != null && cpe.isOpen();
}
/** /**
* Searches the data store of CPE entries, trying to identify the CPE for * Searches the data store of CPE entries, trying to identify the CPE for
* the given dependency based on the evidence contained within. The * the given dependency based on the evidence contained within. The
@@ -212,21 +209,20 @@ public class CPEAnalyzer extends AbstractAnalyzer {
* @throws ParseException is thrown when the Lucene query cannot be parsed. * @throws ParseException is thrown when the Lucene query cannot be parsed.
*/ */
protected void determineCPE(Dependency dependency) throws CorruptIndexException, IOException, ParseException { protected void determineCPE(Dependency dependency) throws CorruptIndexException, IOException, ParseException {
//TODO test dojo-war against this. we should get dojo-toolkit:dojo-toolkit AND dojo-toolkit:toolkit
String vendors = ""; String vendors = "";
String products = ""; String products = "";
for (Confidence confidence : Confidence.values()) { for (Confidence confidence : Confidence.values()) {
if (dependency.getVendorEvidence().contains(confidence)) { if (dependency.contains(EvidenceType.VENDOR, confidence)) {
vendors = addEvidenceWithoutDuplicateTerms(vendors, dependency.getVendorEvidence(), confidence); vendors = addEvidenceWithoutDuplicateTerms(vendors, dependency.getIterator(EvidenceType.VENDOR, confidence));
LOGGER.debug("vendor search: {}", vendors); LOGGER.debug("vendor search: {}", vendors);
} }
if (dependency.getProductEvidence().contains(confidence)) { if (dependency.contains(EvidenceType.PRODUCT, confidence)) {
products = addEvidenceWithoutDuplicateTerms(products, dependency.getProductEvidence(), confidence); products = addEvidenceWithoutDuplicateTerms(products, dependency.getIterator(EvidenceType.PRODUCT, confidence));
LOGGER.debug("product search: {}", products); LOGGER.debug("product search: {}", products);
} }
if (!vendors.isEmpty() && !products.isEmpty()) { if (!vendors.isEmpty() && !products.isEmpty()) {
final List<IndexEntry> entries = searchCPE(vendors, products, dependency.getVendorEvidence().getWeighting(), final List<IndexEntry> entries = searchCPE(vendors, products, dependency.getVendorWeightings(),
dependency.getProductEvidence().getWeighting()); dependency.getProductWeightings());
if (entries == null) { if (entries == null) {
continue; continue;
} }
@@ -248,30 +244,31 @@ public class CPEAnalyzer extends AbstractAnalyzer {
} }
/** /**
* <p>
* Returns the text created by concatenating the text and the values from * Returns the text created by concatenating the text and the values from
* the EvidenceCollection (filtered for a specific confidence). This * the EvidenceCollection (filtered for a specific confidence). This
* attempts to prevent duplicate terms from being added.<br/<br/> Note, if * attempts to prevent duplicate terms from being added.</p>
* the evidence is longer then 200 characters it will be truncated. * <p>
* Note, if the evidence is longer then 200 characters it will be
* truncated.</p>
* *
* @param text the base text. * @param text the base text
* @param ec an EvidenceCollection * @param evidence an iterable set of evidence to concatenate
* @param confidenceFilter a Confidence level to filter the evidence by.
* @return the new evidence text * @return the new evidence text
*/ */
private String addEvidenceWithoutDuplicateTerms(final String text, final EvidenceCollection ec, Confidence confidenceFilter) { @SuppressWarnings("null")
protected String addEvidenceWithoutDuplicateTerms(final String text, final Iterable<Evidence> evidence) {
final String txt = (text == null) ? "" : text; final String txt = (text == null) ? "" : text;
final StringBuilder sb = new StringBuilder(txt.length() + (20 * ec.size())); final StringBuilder sb = new StringBuilder(txt.length() * 2);
sb.append(' ').append(txt).append(' '); sb.append(' ').append(txt).append(' ');
for (Evidence e : ec.iterator(confidenceFilter)) { for (Evidence e : evidence) {
String value = e.getValue(); String value = e.getValue();
if (value.length() > 1000) {
//hack to get around the fact that lucene does a really good job of recognizing domains and not value = value.substring(0, 1000);
// splitting them. TODO - put together a better lucene analyzer specific to the domain. final int pos = value.lastIndexOf(" ");
if (value.startsWith("http://")) { if (pos > 0) {
value = value.substring(7).replaceAll("\\.", " "); value = value.substring(0, pos);
} }
if (value.startsWith("https://")) {
value = value.substring(8).replaceAll("\\.", " ");
} }
if (sb.indexOf(" " + value + " ") < 0) { if (sb.indexOf(" " + value + " ") < 0) {
sb.append(value).append(' '); sb.append(value).append(' ');
@@ -381,7 +378,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
* @return if the append was successful. * @return if the append was successful.
*/ */
private boolean appendWeightedSearch(StringBuilder sb, String field, String searchText, Set<String> weightedText) { private boolean appendWeightedSearch(StringBuilder sb, String field, String searchText, Set<String> weightedText) {
sb.append(' ').append(field).append(":( "); sb.append(field).append(":(");
final String cleanText = cleanseText(searchText); final String cleanText = cleanseText(searchText);
@@ -392,6 +389,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
if (weightedText == null || weightedText.isEmpty()) { if (weightedText == null || weightedText.isEmpty()) {
LuceneUtils.appendEscapedLuceneQuery(sb, cleanText); LuceneUtils.appendEscapedLuceneQuery(sb, cleanText);
} else { } else {
boolean addSpace = false;
final StringTokenizer tokens = new StringTokenizer(cleanText); final StringTokenizer tokens = new StringTokenizer(cleanText);
while (tokens.hasMoreElements()) { while (tokens.hasMoreElements()) {
final String word = tokens.nextToken(); final String word = tokens.nextToken();
@@ -403,14 +401,20 @@ public class CPEAnalyzer extends AbstractAnalyzer {
LuceneUtils.appendEscapedLuceneQuery(temp, word); LuceneUtils.appendEscapedLuceneQuery(temp, word);
temp.append(WEIGHTING_BOOST); temp.append(WEIGHTING_BOOST);
if (!word.equalsIgnoreCase(weightedStr)) { if (!word.equalsIgnoreCase(weightedStr)) {
temp.append(' '); if (temp.length() > 0) {
temp.append(' ');
}
LuceneUtils.appendEscapedLuceneQuery(temp, weightedStr); LuceneUtils.appendEscapedLuceneQuery(temp, weightedStr);
temp.append(WEIGHTING_BOOST); temp.append(WEIGHTING_BOOST);
} }
break; break;
} }
} }
sb.append(' '); if (addSpace) {
sb.append(' ');
} else {
addSpace = true;
}
if (temp == null) { if (temp == null) {
LuceneUtils.appendEscapedLuceneQuery(sb, word); LuceneUtils.appendEscapedLuceneQuery(sb, word);
} else { } else {
@@ -418,7 +422,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
} }
} }
} }
sb.append(" ) "); sb.append(")");
return true; return true;
} }
@@ -465,8 +469,8 @@ public class CPEAnalyzer extends AbstractAnalyzer {
//TODO - does this nullify some of the fuzzy matching that happens in the lucene search? //TODO - does this nullify some of the fuzzy matching that happens in the lucene search?
// for instance CPE some-component and in the evidence we have SomeComponent. // for instance CPE some-component and in the evidence we have SomeComponent.
if (collectionContainsString(dependency.getProductEvidence(), entry.getProduct()) if (collectionContainsString(dependency.getEvidence(EvidenceType.PRODUCT), entry.getProduct())
&& collectionContainsString(dependency.getVendorEvidence(), entry.getVendor())) { && collectionContainsString(dependency.getEvidence(EvidenceType.VENDOR), entry.getVendor())) {
//&& collectionContainsVersion(dependency.getVersionEvidence(), entry.getVersion()) //&& collectionContainsVersion(dependency.getVersionEvidence(), entry.getVersion())
isValid = true; isValid = true;
} }
@@ -476,11 +480,11 @@ public class CPEAnalyzer extends AbstractAnalyzer {
/** /**
* Used to determine if the EvidenceCollection contains a specific string. * Used to determine if the EvidenceCollection contains a specific string.
* *
* @param ec an EvidenceCollection * @param evidence an of evidence object to check
* @param text the text to search for * @param text the text to search for
* @return whether or not the EvidenceCollection contains the string * @return whether or not the EvidenceCollection contains the string
*/ */
private boolean collectionContainsString(EvidenceCollection ec, String text) { private boolean collectionContainsString(Set<Evidence> evidence, String text) {
//TODO - likely need to change the split... not sure if this will work for CPE with special chars //TODO - likely need to change the split... not sure if this will work for CPE with special chars
if (text == null) { if (text == null) {
return false; return false;
@@ -488,7 +492,11 @@ public class CPEAnalyzer extends AbstractAnalyzer {
final String[] words = text.split("[\\s_-]"); final String[] words = text.split("[\\s_-]");
final List<String> list = new ArrayList<>(); final List<String> list = new ArrayList<>();
String tempWord = null; String tempWord = null;
final CharArraySet stopWords = SearchFieldAnalyzer.getStopWords();
for (String word : words) { for (String word : words) {
if (stopWords.contains(word)) {
continue;
}
/* /*
single letter words should be concatenated with the next word. single letter words should be concatenated with the next word.
so { "m", "core", "sample" } -> { "mcore", "sample" } so { "m", "core", "sample" } -> { "mcore", "sample" }
@@ -513,11 +521,24 @@ public class CPEAnalyzer extends AbstractAnalyzer {
if (list.isEmpty()) { if (list.isEmpty()) {
return false; return false;
} }
boolean contains = true; boolean isValid = true;
for (String word : list) { for (String word : list) {
contains &= ec.containsUsedString(word); boolean found = false;
for (Evidence e : evidence) {
if (e.getValue().toLowerCase().contains(word.toLowerCase())) {
if ("http".equals(word) && e.getValue().contains("http:")) {
continue;
}
found = true;
break;
}
}
isValid &= found;
if (!isValid) {
break;
}
} }
return contains; return isValid;
} }
/** /**
@@ -530,7 +551,10 @@ public class CPEAnalyzer extends AbstractAnalyzer {
* dependency. * dependency.
*/ */
@Override @Override
protected synchronized void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException { protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (skipEcosystems.contains(dependency.getEcosystem())) {
return;
}
try { try {
determineCPE(dependency); determineCPE(dependency);
} catch (CorruptIndexException ex) { } catch (CorruptIndexException ex) {
@@ -561,6 +585,9 @@ public class CPEAnalyzer extends AbstractAnalyzer {
protected boolean determineIdentifiers(Dependency dependency, String vendor, String product, protected boolean determineIdentifiers(Dependency dependency, String vendor, String product,
Confidence currentConfidence) throws UnsupportedEncodingException { Confidence currentConfidence) throws UnsupportedEncodingException {
final Set<VulnerableSoftware> cpes = cve.getCPEs(vendor, product); final Set<VulnerableSoftware> cpes = cve.getCPEs(vendor, product);
if (cpes.isEmpty()) {
return false;
}
DependencyVersion bestGuess = new DependencyVersion("-"); DependencyVersion bestGuess = new DependencyVersion("-");
Confidence bestGuessConf = null; Confidence bestGuessConf = null;
boolean hasBroadMatch = false; boolean hasBroadMatch = false;
@@ -570,7 +597,7 @@ public class CPEAnalyzer extends AbstractAnalyzer {
// if there lower confidence evidence when the current (highest) version number // if there lower confidence evidence when the current (highest) version number
// is newer then anything in the NVD. // is newer then anything in the NVD.
for (Confidence conf : Confidence.values()) { for (Confidence conf : Confidence.values()) {
for (Evidence evidence : dependency.getVersionEvidence().iterator(conf)) { for (Evidence evidence : dependency.getIterator(EvidenceType.VERSION, conf)) {
final DependencyVersion evVer = DependencyVersionUtil.parseVersion(evidence.getValue()); final DependencyVersion evVer = DependencyVersionUtil.parseVersion(evidence.getValue());
if (evVer == null) { if (evVer == null) {
continue; continue;

View File

@@ -36,6 +36,8 @@ import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.List; import java.util.List;
import javax.annotation.concurrent.ThreadSafe;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.exception.InitializationException; import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.utils.DownloadFailedException; import org.owasp.dependencycheck.utils.DownloadFailedException;
import org.owasp.dependencycheck.utils.Downloader; import org.owasp.dependencycheck.utils.Downloader;
@@ -49,6 +51,7 @@ import org.owasp.dependencycheck.utils.Settings;
* *
* @author colezlaw * @author colezlaw
*/ */
@ThreadSafe
public class CentralAnalyzer extends AbstractFileTypeAnalyzer { public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
@@ -72,28 +75,26 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
private static final String SUPPORTED_EXTENSIONS = "jar"; private static final String SUPPORTED_EXTENSIONS = "jar";
/** /**
* The analyzer should be disabled if there are errors, so this is a flag to * There may be temporary issues when connecting to MavenCentral. In order
* determine if such an error has occurred. * to compensate for 99% of the issues, we perform a retry before finally
* failing the analysis.
*/ */
private volatile boolean errorFlag = false; private static final int NUMBER_OF_TRIES = 5;
/** /**
* The searcher itself. * The searcher itself.
*/ */
private CentralSearch searcher; private CentralSearch searcher;
/**
* Field indicating if the analyzer is enabled.
*/
private final boolean enabled = checkEnabled();
/** /**
* Determine whether to enable this analyzer or not. * Initializes the analyzer with the configured settings.
* *
* @return whether the analyzer should be enabled * @param settings the configured settings to use
*/ */
@Override @Override
public boolean isEnabled() { public void initialize(Settings settings) {
return enabled; super.initialize(settings);
setEnabled(checkEnabled());
} }
/** /**
@@ -106,9 +107,9 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
boolean retVal = false; boolean retVal = false;
try { try {
if (Settings.getBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED)) { if (getSettings().getBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED)) {
if (!Settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED) if (!getSettings().getBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED)
|| NexusAnalyzer.DEFAULT_URL.equals(Settings.getString(Settings.KEYS.ANALYZER_NEXUS_URL))) { || NexusAnalyzer.DEFAULT_URL.equals(getSettings().getString(Settings.KEYS.ANALYZER_NEXUS_URL))) {
LOGGER.debug("Enabling the Central analyzer"); LOGGER.debug("Enabling the Central analyzer");
retVal = true; retVal = true;
} else { } else {
@@ -126,20 +127,19 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Initializes the analyzer once before any analysis is performed. * Initializes the analyzer once before any analysis is performed.
* *
* @param engine a reference to the dependency-check engine
* @throws InitializationException if there's an error during initialization * @throws InitializationException if there's an error during initialization
*/ */
@Override @Override
public void initializeFileTypeAnalyzer() throws InitializationException { public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
LOGGER.debug("Initializing Central analyzer"); LOGGER.debug("Initializing Central analyzer");
LOGGER.debug("Central analyzer enabled: {}", isEnabled()); LOGGER.debug("Central analyzer enabled: {}", isEnabled());
if (isEnabled()) { if (isEnabled()) {
final String searchUrl = Settings.getString(Settings.KEYS.ANALYZER_CENTRAL_URL);
LOGGER.debug("Central Analyzer URL: {}", searchUrl);
try { try {
searcher = new CentralSearch(new URL(searchUrl)); searcher = new CentralSearch(getSettings());
} catch (MalformedURLException ex) { } catch (MalformedURLException ex) {
setEnabled(false); setEnabled(false);
throw new InitializationException("The configured URL to Maven Central is malformed: " + searchUrl, ex); throw new InitializationException("The configured URL to Maven Central is malformed", ex);
} }
} }
} }
@@ -194,18 +194,14 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
*/ */
@Override @Override
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException { public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (errorFlag || !isEnabled()) {
return;
}
try { try {
final List<MavenArtifact> mas = searcher.searchSha1(dependency.getSha1sum()); final List<MavenArtifact> mas = fetchMavenArtifacts(dependency);
final Confidence confidence = mas.size() > 1 ? Confidence.HIGH : Confidence.HIGHEST; final Confidence confidence = mas.size() > 1 ? Confidence.HIGH : Confidence.HIGHEST;
for (MavenArtifact ma : mas) { for (MavenArtifact ma : mas) {
LOGGER.debug("Central analyzer found artifact ({}) for dependency ({})", ma, dependency.getFileName()); LOGGER.debug("Central analyzer found artifact ({}) for dependency ({})", ma, dependency.getFileName());
dependency.addAsEvidence("central", ma, confidence); dependency.addAsEvidence("central", ma, confidence);
boolean pomAnalyzed = false; boolean pomAnalyzed = false;
for (Evidence e : dependency.getVendorEvidence()) { for (Evidence e : dependency.getEvidence(EvidenceType.VENDOR)) {
if ("pom".equals(e.getSource())) { if ("pom".equals(e.getSource())) {
pomAnalyzed = true; pomAnalyzed = true;
break; break;
@@ -214,7 +210,7 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
if (!pomAnalyzed && ma.getPomUrl() != null) { if (!pomAnalyzed && ma.getPomUrl() != null) {
File pomFile = null; File pomFile = null;
try { try {
final File baseDir = Settings.getTempDirectory(); final File baseDir = getSettings().getTempDirectory();
pomFile = File.createTempFile("pom", ".xml", baseDir); pomFile = File.createTempFile("pom", ".xml", baseDir);
if (!pomFile.delete()) { if (!pomFile.delete()) {
LOGGER.warn("Unable to fetch pom.xml for {} from Central; " LOGGER.warn("Unable to fetch pom.xml for {} from Central; "
@@ -222,7 +218,8 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
LOGGER.debug("Unable to delete temp file"); LOGGER.debug("Unable to delete temp file");
} }
LOGGER.debug("Downloading {}", ma.getPomUrl()); LOGGER.debug("Downloading {}", ma.getPomUrl());
Downloader.fetchFile(new URL(ma.getPomUrl()), pomFile); final Downloader downloader = new Downloader(getSettings());
downloader.fetchFile(new URL(ma.getPomUrl()), pomFile);
PomUtils.analyzePOM(dependency, pomFile); PomUtils.analyzePOM(dependency, pomFile);
} catch (DownloadFailedException ex) { } catch (DownloadFailedException ex) {
@@ -242,8 +239,61 @@ public class CentralAnalyzer extends AbstractFileTypeAnalyzer {
} catch (FileNotFoundException fnfe) { } catch (FileNotFoundException fnfe) {
LOGGER.debug("Artifact not found in repository: '{}", dependency.getFileName()); LOGGER.debug("Artifact not found in repository: '{}", dependency.getFileName());
} catch (IOException ioe) { } catch (IOException ioe) {
LOGGER.debug("Could not connect to Central search", ioe); final String message = "Could not connect to Central search. Analysis failed.";
errorFlag = true; LOGGER.error(message, ioe);
throw new AnalysisException(message, ioe);
} }
} }
/**
* Downloads the corresponding list of MavenArtifacts of the given
* dependency from MavenCentral.
* <p>
* As the connection to MavenCentral is known to be unreliable, we implement
* a simple retry logic in order to compensate for 99% of the issues.
*
* @param dependency the dependency to analyze
* @return the downloaded list of MavenArtifacts
* @throws FileNotFoundException if the specified artifact is not found
* @throws IOException if connecting to MavenCentral finally failed
*/
protected List<MavenArtifact> fetchMavenArtifacts(Dependency dependency) throws IOException {
IOException lastException = null;
long sleepingTimeBetweenRetriesInMillis = 1000;
int triesLeft = NUMBER_OF_TRIES;
while (triesLeft-- > 0) {
try {
return searcher.searchSha1(dependency.getSha1sum());
} catch (FileNotFoundException fnfe) {
// retry does not make sense, just throw the exception
throw fnfe;
} catch (IOException ioe) {
LOGGER.debug("Could not connect to Central search (tries left: {}): {}",
triesLeft, ioe.getMessage());
lastException = ioe;
if (triesLeft > 0) {
try {
Thread.sleep(sleepingTimeBetweenRetriesInMillis);
sleepingTimeBetweenRetriesInMillis *= 2;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
}
final String message = "Finally failed connecting to Central search."
+ " Giving up after " + NUMBER_OF_TRIES + " tries.";
throw new IOException(message, lastException);
}
/**
* Method used by unit tests to setup the analyzer.
*
* @param searcher the Central Search object to use.
*/
protected void setCentralSearch(CentralSearch searcher) {
this.searcher = searcher;
}
} }

View File

@@ -23,13 +23,14 @@ import java.io.IOException;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import javax.annotation.concurrent.ThreadSafe;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceCollection; import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
@@ -41,8 +42,15 @@ import org.owasp.dependencycheck.utils.Settings;
* @author Bianca Jiang (https://twitter.com/biancajiang) * @author Bianca Jiang (https://twitter.com/biancajiang)
*/ */
@Experimental @Experimental
@ThreadSafe
public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer { public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
/**
* A descriptor for the type of dependencies processed or added by this
* analyzer.
*/
public static final String DEPENDENCY_ECOSYSTEM = "CocoaPod";
/** /**
* The logger. * The logger.
*/ */
@@ -83,7 +91,7 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
} }
@Override @Override
protected void initializeFileTypeAnalyzer() { protected void prepareFileTypeAnalyzer(Engine engine) {
// NO-OP // NO-OP
} }
@@ -122,6 +130,7 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
protected void analyzeDependency(Dependency dependency, Engine engine) protected void analyzeDependency(Dependency dependency, Engine engine)
throws AnalysisException { throws AnalysisException {
dependency.setEcosystem(DEPENDENCY_ECOSYSTEM);
String contents; String contents;
try { try {
contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset()); contents = FileUtils.readFileToString(dependency.getActualFile(), Charset.defaultCharset());
@@ -134,21 +143,35 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
contents = contents.substring(matcher.end()); contents = contents.substring(matcher.end());
final String blockVariable = matcher.group(1); final String blockVariable = matcher.group(1);
final EvidenceCollection vendor = dependency.getVendorEvidence(); final String name = determineEvidence(contents, blockVariable, "name");
final EvidenceCollection product = dependency.getProductEvidence();
final EvidenceCollection version = dependency.getVersionEvidence();
final String name = addStringEvidence(product, contents, blockVariable, "name", "name", Confidence.HIGHEST);
if (!name.isEmpty()) { if (!name.isEmpty()) {
vendor.addEvidence(PODSPEC, "name_project", name, Confidence.HIGHEST); dependency.addEvidence(EvidenceType.PRODUCT, PODSPEC, "name_project", name, Confidence.HIGHEST);
dependency.addEvidence(EvidenceType.VENDOR, PODSPEC, "name_project", name, Confidence.HIGHEST);
dependency.setName(name);
}
final String summary = determineEvidence(contents, blockVariable, "summary");
if (!summary.isEmpty()) {
dependency.addEvidence(EvidenceType.PRODUCT, PODSPEC, "summary", summary, Confidence.HIGHEST);
} }
addStringEvidence(product, contents, blockVariable, "summary", "summary", Confidence.HIGHEST);
addStringEvidence(vendor, contents, blockVariable, "author", "authors?", Confidence.HIGHEST); final String author = determineEvidence(contents, blockVariable, "authors?");
addStringEvidence(vendor, contents, blockVariable, "homepage", "homepage", Confidence.HIGHEST); if (!author.isEmpty()) {
addStringEvidence(vendor, contents, blockVariable, "license", "licen[cs]es?", Confidence.HIGHEST); dependency.addEvidence(EvidenceType.VENDOR, PODSPEC, "author", author, Confidence.HIGHEST);
}
final String homepage = determineEvidence(contents, blockVariable, "homepage");
if (!homepage.isEmpty()) {
dependency.addEvidence(EvidenceType.VENDOR, PODSPEC, "homepage", homepage, Confidence.HIGHEST);
}
final String license = determineEvidence(contents, blockVariable, "licen[cs]es?");
if (!license.isEmpty()) {
dependency.setLicense(license);
}
addStringEvidence(version, contents, blockVariable, "version", "version", Confidence.HIGHEST); final String version = determineEvidence(contents, blockVariable, "version");
if (!version.isEmpty()) {
dependency.addEvidence(EvidenceType.VERSION, PODSPEC, "version", version, Confidence.HIGHEST);
dependency.setVersion(version);
}
} }
setPackagePath(dependency); setPackagePath(dependency);
@@ -158,16 +181,12 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
* Extracts evidence from the contents and adds it to the given evidence * Extracts evidence from the contents and adds it to the given evidence
* collection. * collection.
* *
* @param evidences the evidence collection to update
* @param contents the text to extract evidence from * @param contents the text to extract evidence from
* @param blockVariable the block variable within the content to search for * @param blockVariable the block variable within the content to search for
* @param field the name of the field being searched for
* @param fieldPattern the field pattern within the contents to search for * @param fieldPattern the field pattern within the contents to search for
* @param confidence the confidence level of the evidence if found * @return the evidence
* @return the string that was added as evidence
*/ */
private String addStringEvidence(EvidenceCollection evidences, String contents, private String determineEvidence(String contents, String blockVariable, String fieldPattern) {
String blockVariable, String field, String fieldPattern, Confidence confidence) {
String value = ""; String value = "";
//capture array value between [ ] //capture array value between [ ]
@@ -184,9 +203,6 @@ public class CocoaPodsAnalyzer extends AbstractFileTypeAnalyzer {
value = matcher.group(2); value = matcher.group(2);
} }
} }
if (value.length() > 0) {
evidences.addEvidence(PODSPEC, field, value, confidence);
}
return value; return value;
} }

View File

@@ -34,9 +34,7 @@ import org.slf4j.LoggerFactory;
import java.io.FileFilter; import java.io.FileFilter;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset; import org.owasp.dependencycheck.dependency.EvidenceType;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/** /**
* Used to analyze a composer.lock file for a composer PHP app. * Used to analyze a composer.lock file for a composer PHP app.
@@ -46,6 +44,12 @@ import java.security.NoSuchAlgorithmException;
@Experimental @Experimental
public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer { public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
/**
* A descriptor for the type of dependencies processed or added by this
* analyzer.
*/
public static final String DEPENDENCY_ECOSYSTEM = "Composer";
/** /**
* The logger. * The logger.
*/ */
@@ -79,17 +83,13 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
/** /**
* Initializes the analyzer. * Initializes the analyzer.
* *
* @param engine a reference to the dependency-check engine
* @throws InitializationException thrown if an exception occurs getting an * @throws InitializationException thrown if an exception occurs getting an
* instance of SHA1 * instance of SHA1
*/ */
@Override @Override
protected void initializeFileTypeAnalyzer() throws InitializationException { protected void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
try { // do nothing
getSha1MessageDigest();
} catch (IllegalStateException ex) {
setEnabled(false);
throw new InitializationException("Unable to create SHA1 MessageDigest", ex);
}
} }
/** /**
@@ -103,20 +103,33 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException { protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
try (FileInputStream fis = new FileInputStream(dependency.getActualFile())) { try (FileInputStream fis = new FileInputStream(dependency.getActualFile())) {
final ComposerLockParser clp = new ComposerLockParser(fis); final ComposerLockParser clp = new ComposerLockParser(fis);
LOGGER.info("Checking composer.lock file {}", dependency.getActualFilePath()); LOGGER.debug("Checking composer.lock file {}", dependency.getActualFilePath());
clp.process(); clp.process();
//if dependencies are found in the lock, then there is always an empty shell dependency left behind for the
//composer.lock. The first pass through, reuse the top level dependency, and add new ones for the rest.
boolean processedAtLeastOneDep = false;
for (ComposerDependency dep : clp.getDependencies()) { for (ComposerDependency dep : clp.getDependencies()) {
final Dependency d = new Dependency(dependency.getActualFile()); final Dependency d = new Dependency(dependency.getActualFile());
d.setDisplayFileName(String.format("%s:%s/%s", dependency.getDisplayFileName(), dep.getGroup(), dep.getProject())); final String filePath = String.format("%s:%s/%s/%s", dependency.getFilePath(), dep.getGroup(), dep.getProject(), dep.getVersion());
final String filePath = String.format("%s:%s/%s", dependency.getFilePath(), dep.getGroup(), dep.getProject()); d.setName(dep.getProject());
final MessageDigest sha1 = getSha1MessageDigest(); d.setVersion(dep.getVersion());
d.setEcosystem(DEPENDENCY_ECOSYSTEM);
d.setFilePath(filePath); d.setFilePath(filePath);
d.setSha1sum(Checksum.getHex(sha1.digest(filePath.getBytes(Charset.defaultCharset())))); d.setSha1sum(Checksum.getSHA1Checksum(filePath));
d.getVendorEvidence().addEvidence(COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST); d.setMd5sum(Checksum.getMD5Checksum(filePath));
d.getProductEvidence().addEvidence(COMPOSER_LOCK, "product", dep.getProject(), Confidence.HIGHEST); d.addEvidence(EvidenceType.VENDOR, COMPOSER_LOCK, "vendor", dep.getGroup(), Confidence.HIGHEST);
d.getVersionEvidence().addEvidence(COMPOSER_LOCK, "version", dep.getVersion(), Confidence.HIGHEST); d.addEvidence(EvidenceType.PRODUCT, COMPOSER_LOCK, "product", dep.getProject(), Confidence.HIGHEST);
LOGGER.info("Adding dependency {}", d); d.addEvidence(EvidenceType.VERSION, COMPOSER_LOCK, "version", dep.getVersion(), Confidence.HIGHEST);
engine.getDependencies().add(d); LOGGER.debug("Adding dependency {}", d.getDisplayFileName());
engine.addDependency(d);
//make sure we only remove the main dependency if we went through this loop at least once.
processedAtLeastOneDep = true;
}
// remove the dependency at the end because it's referenced in the loop itself.
// double check the name to be sure we only remove the generic entry.
if (processedAtLeastOneDep && dependency.getDisplayFileName().equalsIgnoreCase("composer.lock")) {
LOGGER.debug("Removing main redundant dependency {}", dependency.getDisplayFileName());
engine.removeDependency(dependency);
} }
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.warn("Error opening dependency {}", dependency.getActualFilePath()); LOGGER.warn("Error opening dependency {}", dependency.getActualFilePath());
@@ -154,18 +167,4 @@ public class ComposerLockAnalyzer extends AbstractFileTypeAnalyzer {
public AnalysisPhase getAnalysisPhase() { public AnalysisPhase getAnalysisPhase() {
return AnalysisPhase.INFORMATION_COLLECTION; return AnalysisPhase.INFORMATION_COLLECTION;
} }
/**
* Returns the sha1 message digest.
*
* @return the sha1 message digest
*/
private MessageDigest getSha1MessageDigest() {
try {
return MessageDigest.getInstance("SHA1");
} catch (NoSuchAlgorithmException e) {
LOGGER.error(e.getMessage());
throw new IllegalStateException("Failed to obtain the SHA1 message digest.", e);
}
}
} }

View File

@@ -17,21 +17,19 @@
*/ */
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import javax.annotation.concurrent.ThreadSafe;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.owasp.dependencycheck.xml.suppression.SuppressionRule;
/** /**
* The suppression analyzer processes an externally defined XML document that complies with the suppressions.xsd schema. * The suppression analyzer processes an externally defined XML document that
* Any identified CPE entries within the dependencies that match will be removed. * complies with the suppressions.xsd schema. Any identified CPE entries within
* the dependencies that match will be removed.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@ThreadSafe
public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer { public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/** /**
* The name of the analyzer. * The name of the analyzer.
*/ */
@@ -60,19 +58,6 @@ public class CpeSuppressionAnalyzer extends AbstractSuppressionAnalyzer {
public AnalysisPhase getAnalysisPhase() { public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE; return ANALYSIS_PHASE;
} }
//</editor-fold>
@Override
protected void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
if (getRules() == null || getRules().size() <= 0) {
return;
}
for (final SuppressionRule rule : getRules()) {
rule.process(dependency);
}
}
/** /**
* <p> * <p>

View File

@@ -17,15 +17,14 @@
*/ */
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import com.vdurmont.semver4j.Semver;
import com.vdurmont.semver4j.Semver.SemverType;
import com.vdurmont.semver4j.SemverException;
import java.io.File; import java.io.File;
import java.util.HashSet;
import java.util.Iterator;
import java.util.ListIterator;
import java.util.Set; import java.util.Set;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.owasp.dependencycheck.Engine; import javax.annotation.concurrent.ThreadSafe;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.utils.DependencyVersion; import org.owasp.dependencycheck.utils.DependencyVersion;
@@ -47,37 +46,19 @@ import org.slf4j.LoggerFactory;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class DependencyBundlingAnalyzer extends AbstractAnalyzer { @ThreadSafe
public class DependencyBundlingAnalyzer extends AbstractDependencyComparingAnalyzer {
/** /**
* The Logger. * The Logger.
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyBundlingAnalyzer.class); private static final Logger LOGGER = LoggerFactory.getLogger(DependencyBundlingAnalyzer.class);
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
/** /**
* A pattern for obtaining the first part of a filename. * A pattern for obtaining the first part of a filename.
*/ */
private static final Pattern STARTING_TEXT_PATTERN = Pattern.compile("^[a-zA-Z0-9]*"); private static final Pattern STARTING_TEXT_PATTERN = Pattern.compile("^[a-zA-Z0-9]*");
/**
* a flag indicating if this analyzer has run. This analyzer only runs once.
*/
private boolean analyzed = false;
/**
* Returns a flag indicating if this analyzer has run. This analyzer only
* runs once. Note this is currently only used in the unit tests.
*
* @return a flag indicating if this analyzer has run. This analyzer only
* runs once
*/
protected synchronized boolean getAnalyzed() {
return analyzed;
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/** /**
* The name of the analyzer. * The name of the analyzer.
*/ */
@@ -106,19 +87,6 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
public AnalysisPhase getAnalysisPhase() { public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE; return ANALYSIS_PHASE;
} }
//</editor-fold>
/**
* Does not support parallel processing as it only runs once and then
* operates on <em>all</em> dependencies.
*
* @return whether or not parallel processing is enabled
* @see #analyze(Dependency, Engine)
*/
@Override
public boolean supportsParallelProcessing() {
return false;
}
/** /**
* <p> * <p>
@@ -132,65 +100,56 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
} }
/** /**
* Analyzes a set of dependencies. If they have been found to have the same * Evaluates the dependencies
* base path and the same set of identifiers they are likely related. The
* related dependencies are bundled into a single reportable item.
* *
* @param ignore this analyzer ignores the dependency being analyzed * @param dependency a dependency to compare
* @param engine the engine that is scanning the dependencies * @param nextDependency a dependency to compare
* @throws AnalysisException is thrown if there is an error reading the JAR * @param dependenciesToRemove a set of dependencies that will be removed
* file. * @return true if a dependency is removed; otherwise false
*/ */
@Override @Override
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException { protected boolean evaluateDependencies(final Dependency dependency, final Dependency nextDependency, final Set<Dependency> dependenciesToRemove) {
if (!analyzed) { if (hashesMatch(dependency, nextDependency)) {
analyzed = true; if (!containedInWar(dependency.getFilePath())
final Set<Dependency> dependenciesToRemove = new HashSet<>(); && !containedInWar(nextDependency.getFilePath())) {
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator(); if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
//for (Dependency nextDependency : engine.getDependencies()) { mergeDependencies(dependency, nextDependency, dependenciesToRemove);
while (mainIterator.hasNext()) { } else {
final Dependency dependency = mainIterator.next(); mergeDependencies(nextDependency, dependency, dependenciesToRemove);
if (mainIterator.hasNext() && !dependenciesToRemove.contains(dependency)) { return true; //since we merged into the next dependency - skip forward to the next in mainIterator
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex());
while (subIterator.hasNext()) {
final Dependency nextDependency = subIterator.next();
if (hashesMatch(dependency, nextDependency)) {
if (!containedInWar(dependency.getFilePath())
&& !containedInWar(nextDependency.getFilePath())) {
if (firstPathIsShortest(dependency.getFilePath(), nextDependency.getFilePath())) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
}
} else if (isShadedJar(dependency, nextDependency)) {
if (dependency.getFileName().toLowerCase().endsWith("pom.xml")) {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
nextDependency.getRelatedDependencies().remove(dependency);
break;
} else {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
dependency.getRelatedDependencies().remove(nextDependency);
}
} else if (cpeIdentifiersMatch(dependency, nextDependency)
&& hasSameBasePath(dependency, nextDependency)
&& vulnCountMatches(dependency, nextDependency)
&& fileNameMatch(dependency, nextDependency)) {
if (isCore(dependency, nextDependency)) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
}
}
} }
} }
//removing dependencies here as ensuring correctness and avoiding ConcurrentUpdateExceptions } else if (isShadedJar(dependency, nextDependency)) {
// was difficult because of the inner iterator. if (dependency.getFileName().toLowerCase().endsWith("pom.xml")) {
engine.getDependencies().removeAll(dependenciesToRemove); mergeDependencies(nextDependency, dependency, dependenciesToRemove);
nextDependency.removeRelatedDependencies(dependency);
return true;
} else {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
dependency.removeRelatedDependencies(nextDependency);
}
} else if (cpeIdentifiersMatch(dependency, nextDependency)
&& hasSameBasePath(dependency, nextDependency)
&& vulnCountMatches(dependency, nextDependency)
&& fileNameMatch(dependency, nextDependency)) {
if (isCore(dependency, nextDependency)) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
return true; //since we merged into the next dependency - skip forward to the next in mainIterator
}
} else if (ecoSystemIs(AbstractNpmAnalyzer.NPM_DEPENDENCY_ECOSYSTEM, dependency, nextDependency)
&& namesAreEqual(dependency, nextDependency)
&& npmVersionsMatch(dependency.getVersion(), nextDependency.getVersion())) {
if (!dependency.isVirtual()) {
DependencyMergingAnalyzer.mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
DependencyMergingAnalyzer.mergeDependencies(nextDependency, dependency, dependenciesToRemove);
return true;
}
} }
return false;
} }
/** /**
@@ -203,17 +162,19 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
* removed from the main analysis loop, this function adds to this * removed from the main analysis loop, this function adds to this
* collection * collection
*/ */
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) { public static void mergeDependencies(final Dependency dependency,
final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) {
dependency.addRelatedDependency(relatedDependency); dependency.addRelatedDependency(relatedDependency);
final Iterator<Dependency> i = relatedDependency.getRelatedDependencies().iterator(); for (Dependency d : relatedDependency.getRelatedDependencies()) {
while (i.hasNext()) { dependency.addRelatedDependency(d);
dependency.addRelatedDependency(i.next()); relatedDependency.removeRelatedDependencies(d);
i.remove();
} }
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) { if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) {
dependency.addAllProjectReferences(relatedDependency.getProjectReferences()); dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
} }
dependenciesToRemove.add(relatedDependency); if (dependenciesToRemove != null) {
dependenciesToRemove.add(relatedDependency);
}
} }
/** /**
@@ -237,9 +198,10 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
if (tmp <= 0) { if (tmp <= 0) {
return path; return path;
} }
if (tmp > 0) { //below is always true
pos = tmp + 1; //if (tmp > 0) {
} pos = tmp + 1;
//}
tmp = path.indexOf(File.separator, pos); tmp = path.indexOf(File.separator, pos);
if (tmp > 0) { if (tmp > 0) {
pos = tmp + 1; pos = tmp + 1;
@@ -439,6 +401,10 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
* between the two collections match; otherwise false * between the two collections match; otherwise false
*/ */
private boolean isShadedJar(Dependency dependency, Dependency nextDependency) { private boolean isShadedJar(Dependency dependency, Dependency nextDependency) {
if (dependency == null || dependency.getFileName() == null
|| nextDependency == null || nextDependency.getFileName() == null) {
return false;
}
final String mainName = dependency.getFileName().toLowerCase(); final String mainName = dependency.getFileName().toLowerCase();
final String nextName = nextDependency.getFileName().toLowerCase(); final String nextName = nextDependency.getFileName().toLowerCase();
if (mainName.endsWith(".jar") && nextName.endsWith("pom.xml")) { if (mainName.endsWith(".jar") && nextName.endsWith("pom.xml")) {
@@ -502,4 +468,108 @@ public class DependencyBundlingAnalyzer extends AbstractAnalyzer {
return filePath != null && filePath.matches(".*\\.(ear|war)[\\\\/].*"); return filePath != null && filePath.matches(".*\\.(ear|war)[\\\\/].*");
} }
/**
* Determine if the dependency ecosystem is equal in the given dependencies.
*
* @param ecoSystem the ecosystem to validate against
* @param dependency a dependency to compare
* @param nextDependency a dependency to compare
* @return true if the ecosystem is equal in both dependencies; otherwise
* false
*/
private boolean ecoSystemIs(String ecoSystem, Dependency dependency, Dependency nextDependency) {
return ecoSystem.equals(dependency.getEcosystem()) && ecoSystem.equals(nextDependency.getEcosystem());
}
/**
* Determine if the dependency name is equal in the given dependencies.
*
* @param dependency a dependency to compare
* @param nextDependency a dependency to compare
* @return true if the name is equal in both dependencies; otherwise false
*/
private boolean namesAreEqual(Dependency dependency, Dependency nextDependency) {
return dependency.getName() != null && dependency.getName().equals(nextDependency.getName());
}
/**
* Determine if the dependency version is equal in the given dependencies.
* This method attempts to evaluate version range checks.
*
* @param current a dependency version to compare
* @param next a dependency version to compare
* @return true if the version is equal in both dependencies; otherwise
* false
*/
public static boolean npmVersionsMatch(String current, String next) {
String left = current;
String right = next;
if (left == null || right == null) {
return false;
}
if (left.equals(right) || "*".equals(left) || "*".equals(right)) {
return true;
}
if (left.contains(" ")) { // we have a version string from package.json
if (right.contains(" ")) { // we can't evaluate this ">=1.5.4 <2.0.0" vs "2 || 3"
return false;
}
if (!right.matches("^\\d.*$")) {
right = stripLeadingNonNumeric(right);
if (right == null) {
return false;
}
}
try {
final Semver v = new Semver(right, SemverType.NPM);
return v.satisfies(left);
} catch (SemverException ex) {
LOGGER.trace("ignore", ex);
}
} else {
if (!left.matches("^\\d.*$")) {
left = stripLeadingNonNumeric(left);
if (left == null || left.isEmpty()) {
return false;
}
}
try {
Semver v = new Semver(left, SemverType.NPM);
if (!right.isEmpty() && v.satisfies(right)) {
return true;
}
if (!right.contains(" ")) {
left = current;
right = stripLeadingNonNumeric(right);
if (right != null) {
v = new Semver(right, SemverType.NPM);
return v.satisfies(left);
}
}
} catch (SemverException ex) {
LOGGER.trace("ignore", ex);
} catch (NullPointerException ex) {
LOGGER.error("SemVer comparison error: left:\"{}\", right:\"{}\"", left, right);
LOGGER.debug("SemVer comparison resulted in NPE", ex);
}
}
return false;
}
/**
* Strips leading non-numeric values from the start of the string. If no
* numbers are present this will return null.
*
* @param str the string to modify
* @return the string without leading non-numeric characters
*/
private static String stripLeadingNonNumeric(String str) {
for (int x = 0; x < str.length(); x++) {
if (Character.isDigit(str.codePointAt(x))) {
return str.substring(x);
}
}
return null;
}
} }

View File

@@ -18,13 +18,10 @@
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import java.io.File; import java.io.File;
import java.util.HashSet;
import java.util.Iterator;
import java.util.ListIterator;
import java.util.Set; import java.util.Set;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -36,31 +33,12 @@ import org.slf4j.LoggerFactory;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
public class DependencyMergingAnalyzer extends AbstractAnalyzer { public class DependencyMergingAnalyzer extends AbstractDependencyComparingAnalyzer {
//<editor-fold defaultstate="collapsed" desc="Constants and Member Variables">
/** /**
* The Logger. * The Logger.
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(DependencyMergingAnalyzer.class); private static final Logger LOGGER = LoggerFactory.getLogger(DependencyMergingAnalyzer.class);
/**
* a flag indicating if this analyzer has run. This analyzer only runs once.
*/
private boolean analyzed = false;
/**
* Returns a flag indicating if this analyzer has run. This analyzer only
* runs once. Note this is currently only used in the unit tests.
*
* @return a flag indicating if this analyzer has run. This analyzer only
* runs once
*/
protected synchronized boolean getAnalyzed() {
return analyzed;
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/** /**
* The name of the analyzer. * The name of the analyzer.
*/ */
@@ -90,18 +68,6 @@ public class DependencyMergingAnalyzer extends AbstractAnalyzer {
return ANALYSIS_PHASE; return ANALYSIS_PHASE;
} }
/**
* Does not support parallel processing as it only runs once and then
* operates on <em>all</em> dependencies.
*
* @return whether or not parallel processing is enabled
* @see #analyze(Dependency, Engine)
*/
@Override
public boolean supportsParallelProcessing() {
return false;
}
/** /**
* <p> * <p>
* Returns the setting key to determine if the analyzer is enabled.</p> * Returns the setting key to determine if the analyzer is enabled.</p>
@@ -112,55 +78,36 @@ public class DependencyMergingAnalyzer extends AbstractAnalyzer {
protected String getAnalyzerEnabledSettingKey() { protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_DEPENDENCY_MERGING_ENABLED; return Settings.KEYS.ANALYZER_DEPENDENCY_MERGING_ENABLED;
} }
//</editor-fold>
/** /**
* Analyzes a set of dependencies. If they have been found to be the same * Evaluates the dependencies
* dependency created by more multiple FileTypeAnalyzers (i.e. a gemspec
* dependency and a dependency from the Bundle Audit Analyzer. The
* dependencies are then merged into a single reportable item.
* *
* @param ignore this analyzer ignores the dependency being analyzed * @param dependency a dependency to compare
* @param engine the engine that is scanning the dependencies * @param nextDependency a dependency to compare
* @throws AnalysisException is thrown if there is an error reading the JAR * @param dependenciesToRemove a set of dependencies that will be removed
* file. * @return true if a dependency is removed; otherwise false
*/ */
@Override @Override
protected synchronized void analyzeDependency(Dependency ignore, Engine engine) throws AnalysisException { protected boolean evaluateDependencies(final Dependency dependency, final Dependency nextDependency, final Set<Dependency> dependenciesToRemove) {
if (!analyzed) { Dependency main;
analyzed = true; //CSOFF: InnerAssignment
final Set<Dependency> dependenciesToRemove = new HashSet<>(); if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) {
final ListIterator<Dependency> mainIterator = engine.getDependencies().listIterator(); if (main == dependency) {
//for (Dependency nextDependency : engine.getDependencies()) { mergeDependencies(dependency, nextDependency, dependenciesToRemove);
while (mainIterator.hasNext()) { } else {
final Dependency dependency = mainIterator.next(); mergeDependencies(nextDependency, dependency, dependenciesToRemove);
if (mainIterator.hasNext() && !dependenciesToRemove.contains(dependency)) { return true; //since we merged into the next dependency - skip forward to the next in mainIterator
final ListIterator<Dependency> subIterator = engine.getDependencies().listIterator(mainIterator.nextIndex()); }
while (subIterator.hasNext()) { } else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
final Dependency nextDependency = subIterator.next(); if (main == dependency) {
Dependency main; mergeDependencies(dependency, nextDependency, dependenciesToRemove);
if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) { } else {
if (main == dependency) { mergeDependencies(nextDependency, dependency, dependenciesToRemove);
mergeDependencies(dependency, nextDependency, dependenciesToRemove); return true; //since we merged into the next dependency - skip forward to the next in mainIterator
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
} else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) {
if (main == dependency) {
mergeDependencies(dependency, nextDependency, dependenciesToRemove);
} else {
mergeDependencies(nextDependency, dependency, dependenciesToRemove);
break; //since we merged into the next dependency - skip forward to the next in mainIterator
}
}
}
}
} }
//removing dependencies here as ensuring correctness and avoiding ConcurrentUpdateExceptions
// was difficult because of the inner iterator.
engine.getDependencies().removeAll(dependenciesToRemove);
} }
//CSON: InnerAssignment
return false;
} }
/** /**
@@ -173,22 +120,28 @@ public class DependencyMergingAnalyzer extends AbstractAnalyzer {
* removed from the main analysis loop, this function adds to this * removed from the main analysis loop, this function adds to this
* collection * collection
*/ */
private void mergeDependencies(final Dependency dependency, final Dependency relatedDependency, final Set<Dependency> dependenciesToRemove) { public static void mergeDependencies(final Dependency dependency, final Dependency relatedDependency,
final Set<Dependency> dependenciesToRemove) {
LOGGER.debug("Merging '{}' into '{}'", relatedDependency.getFilePath(), dependency.getFilePath()); LOGGER.debug("Merging '{}' into '{}'", relatedDependency.getFilePath(), dependency.getFilePath());
dependency.addRelatedDependency(relatedDependency); dependency.addRelatedDependency(relatedDependency);
dependency.getVendorEvidence().getEvidence().addAll(relatedDependency.getVendorEvidence().getEvidence()); for (Evidence e : relatedDependency.getEvidence(EvidenceType.VENDOR)) {
dependency.getProductEvidence().getEvidence().addAll(relatedDependency.getProductEvidence().getEvidence()); dependency.addEvidence(EvidenceType.VENDOR, e);
dependency.getVersionEvidence().getEvidence().addAll(relatedDependency.getVersionEvidence().getEvidence()); }
for (Evidence e : relatedDependency.getEvidence(EvidenceType.PRODUCT)) {
dependency.addEvidence(EvidenceType.PRODUCT, e);
}
for (Evidence e : relatedDependency.getEvidence(EvidenceType.VERSION)) {
dependency.addEvidence(EvidenceType.VERSION, e);
}
final Iterator<Dependency> i = relatedDependency.getRelatedDependencies().iterator(); for (Dependency d : relatedDependency.getRelatedDependencies()) {
while (i.hasNext()) { dependency.addRelatedDependency(d);
dependency.addRelatedDependency(i.next()); relatedDependency.removeRelatedDependencies(d);
i.remove();
} }
if (dependency.getSha1sum().equals(relatedDependency.getSha1sum())) { dependency.addAllProjectReferences(relatedDependency.getProjectReferences());
dependency.addAllProjectReferences(relatedDependency.getProjectReferences()); if (dependenciesToRemove != null) {
dependenciesToRemove.add(relatedDependency);
} }
dependenciesToRemove.add(relatedDependency);
} }
/** /**

View File

@@ -22,15 +22,18 @@ import java.io.UnsupportedEncodingException;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Iterator; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.ListIterator; import java.util.ListIterator;
import java.util.Set; import java.util.Set;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import javax.annotation.concurrent.ThreadSafe;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.Evidence;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Identifier;
import org.owasp.dependencycheck.dependency.VulnerableSoftware; import org.owasp.dependencycheck.dependency.VulnerableSoftware;
import org.owasp.dependencycheck.utils.FileFilterBuilder; import org.owasp.dependencycheck.utils.FileFilterBuilder;
@@ -44,17 +47,37 @@ import org.slf4j.LoggerFactory;
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@ThreadSafe
public class FalsePositiveAnalyzer extends AbstractAnalyzer { public class FalsePositiveAnalyzer extends AbstractAnalyzer {
/** /**
* The Logger. * The Logger.
*/ */
private static final Logger LOGGER = LoggerFactory.getLogger(FalsePositiveAnalyzer.class); private static final Logger LOGGER = LoggerFactory.getLogger(FalsePositiveAnalyzer.class);
/** /**
* The file filter used to find DLL and EXE. * The file filter used to find DLL and EXE.
*/ */
private static final FileFilter DLL_EXE_FILTER = FileFilterBuilder.newInstance().addExtensions("dll", "exe").build(); private static final FileFilter DLL_EXE_FILTER = FileFilterBuilder.newInstance().addExtensions("dll", "exe").build();
/**
* Regex to identify core java libraries and a few other commonly
* misidentified ones.
*/
public static final Pattern CORE_JAVA = Pattern.compile("^cpe:/a:(sun|oracle|ibm):(j2[ems]e|"
+ "java(_platform_micro_edition|_runtime_environment|_se|virtual_machine|se_development_kit|fx)?|"
+ "jdk|jre|jsse)($|:.*)");
/**
* Regex to identify core jsf libraries.
*/
public static final Pattern CORE_JAVA_JSF = Pattern.compile("^cpe:/a:(sun|oracle|ibm):jsf($|:.*)");
/**
* Regex to identify core java library files. This is currently incomplete.
*/
public static final Pattern CORE_FILES = Pattern.compile("(^|/)((alt[-])?rt|jsse|jfxrt|jfr|jce|javaws|deploy|charsets)\\.jar$");
/**
* Regex to identify core jsf java library files. This is currently
* incomplete.
*/
public static final Pattern CORE_JSF_FILES = Pattern.compile("(^|/)jsf[-][^/]*\\.jar$");
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer"> //<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/** /**
@@ -136,19 +159,19 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
} }
} }
} }
if (mustContain if (mustContain != null) {
!= null) { final Set<Identifier> removalSet = new HashSet<>();
final Iterator<Identifier> itr = dependency.getIdentifiers().iterator(); for (Identifier i : dependency.getIdentifiers()) {
while (itr.hasNext()) {
final Identifier i = itr.next();
if ("cpe".contains(i.getType()) if ("cpe".contains(i.getType())
&& i.getValue() != null && i.getValue() != null
&& i.getValue().startsWith("cpe:/a:springsource:") && i.getValue().startsWith("cpe:/a:springsource:")
&& !i.getValue().toLowerCase().contains(mustContain)) { && !i.getValue().toLowerCase().contains(mustContain)) {
itr.remove(); removalSet.add(i);
//dependency.getIdentifiers().remove(i);
} }
} }
for (Identifier i : removalSet) {
dependency.removeIdentifier(i);
}
} }
} }
@@ -199,42 +222,21 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
//how did we get here? //how did we get here?
LOGGER.debug("currentVersion and nextVersion are both null?"); LOGGER.debug("currentVersion and nextVersion are both null?");
} else if (currentVersion == null && nextVersion != null) { } else if (currentVersion == null && nextVersion != null) {
dependency.getIdentifiers().remove(currentId); dependency.removeIdentifier(currentId);
} else if (nextVersion == null && currentVersion != null) { } else if (nextVersion == null && currentVersion != null) {
dependency.getIdentifiers().remove(nextId); dependency.removeIdentifier(nextId);
} else if (currentVersion.length() < nextVersion.length()) { } else if (currentVersion.length() < nextVersion.length()) {
if (nextVersion.startsWith(currentVersion) || "-".equals(currentVersion)) { if (nextVersion.startsWith(currentVersion) || "-".equals(currentVersion)) {
dependency.getIdentifiers().remove(currentId); dependency.removeIdentifier(currentId);
} }
} else if (currentVersion.startsWith(nextVersion) || "-".equals(nextVersion)) { } else if (currentVersion.startsWith(nextVersion) || "-".equals(nextVersion)) {
dependency.getIdentifiers().remove(nextId); dependency.removeIdentifier(nextId);
} }
} }
} }
} }
} }
} }
/**
* Regex to identify core java libraries and a few other commonly
* misidentified ones.
*/
public static final Pattern CORE_JAVA = Pattern.compile("^cpe:/a:(sun|oracle|ibm):(j2[ems]e|"
+ "java(_platform_micro_edition|_runtime_environment|_se|virtual_machine|se_development_kit|fx)?|"
+ "jdk|jre|jsse)($|:.*)");
/**
* Regex to identify core jsf libraries.
*/
public static final Pattern CORE_JAVA_JSF = Pattern.compile("^cpe:/a:(sun|oracle|ibm):jsf($|:.*)");
/**
* Regex to identify core java library files. This is currently incomplete.
*/
public static final Pattern CORE_FILES = Pattern.compile("(^|/)((alt[-])?rt|jsse|jfxrt|jfr|jce|javaws|deploy|charsets)\\.jar$");
/**
* Regex to identify core jsf java library files. This is currently
* incomplete.
*/
public static final Pattern CORE_JSF_FILES = Pattern.compile("(^|/)jsf[-][^/]*\\.jar$");
/** /**
* Removes any CPE entries for the JDK/JRE unless the filename ends with * Removes any CPE entries for the JDK/JRE unless the filename ends with
@@ -243,21 +245,22 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
* @param dependency the dependency to remove JRE CPEs from * @param dependency the dependency to remove JRE CPEs from
*/ */
private void removeJreEntries(Dependency dependency) { private void removeJreEntries(Dependency dependency) {
final Set<Identifier> identifiers = dependency.getIdentifiers(); final Set<Identifier> removalSet = new HashSet<>();
final Iterator<Identifier> itr = identifiers.iterator(); for (Identifier i : dependency.getIdentifiers()) {
while (itr.hasNext()) {
final Identifier i = itr.next();
final Matcher coreCPE = CORE_JAVA.matcher(i.getValue()); final Matcher coreCPE = CORE_JAVA.matcher(i.getValue());
final Matcher coreFiles = CORE_FILES.matcher(dependency.getFileName()); final Matcher coreFiles = CORE_FILES.matcher(dependency.getFileName());
if (coreCPE.matches() && !coreFiles.matches()) { if (coreCPE.matches() && !coreFiles.matches()) {
itr.remove(); removalSet.add(i);
} }
final Matcher coreJsfCPE = CORE_JAVA_JSF.matcher(i.getValue()); final Matcher coreJsfCPE = CORE_JAVA_JSF.matcher(i.getValue());
final Matcher coreJsfFiles = CORE_JSF_FILES.matcher(dependency.getFileName()); final Matcher coreJsfFiles = CORE_JSF_FILES.matcher(dependency.getFileName());
if (coreJsfCPE.matches() && !coreJsfFiles.matches()) { if (coreJsfCPE.matches() && !coreJsfFiles.matches()) {
itr.remove(); removalSet.add(i);
} }
} }
for (Identifier i : removalSet) {
dependency.removeIdentifier(i);
}
} }
/** /**
@@ -288,9 +291,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
* *
* @param dependency the dependency to analyze * @param dependency the dependency to analyze
*/ */
private void removeBadMatches(Dependency dependency) { protected void removeBadMatches(Dependency dependency) {
final Set<Identifier> identifiers = dependency.getIdentifiers();
final Iterator<Identifier> itr = identifiers.iterator();
/* TODO - can we utilize the pom's groupid and artifactId to filter??? most of /* TODO - can we utilize the pom's groupid and artifactId to filter??? most of
* these are due to low quality data. Other idea would be to say any CPE * these are due to low quality data. Other idea would be to say any CPE
@@ -299,8 +300,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
*/ */
//Set<Evidence> groupId = dependency.getVendorEvidence().getEvidence("pom", "groupid"); //Set<Evidence> groupId = dependency.getVendorEvidence().getEvidence("pom", "groupid");
//Set<Evidence> artifactId = dependency.getVendorEvidence().getEvidence("pom", "artifactid"); //Set<Evidence> artifactId = dependency.getVendorEvidence().getEvidence("pom", "artifactid");
while (itr.hasNext()) { for (Identifier i : dependency.getIdentifiers()) {
final Identifier i = itr.next();
//TODO move this startsWith expression to the base suppression file //TODO move this startsWith expression to the base suppression file
if ("cpe".equals(i.getType())) { if ("cpe".equals(i.getType())) {
if ((i.getValue().matches(".*c\\+\\+.*") if ((i.getValue().matches(".*c\\+\\+.*")
@@ -324,7 +324,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|| dependency.getFileName().toLowerCase().endsWith(".tgz") || dependency.getFileName().toLowerCase().endsWith(".tgz")
|| dependency.getFileName().toLowerCase().endsWith(".ear") || dependency.getFileName().toLowerCase().endsWith(".ear")
|| dependency.getFileName().toLowerCase().endsWith(".war"))) { || dependency.getFileName().toLowerCase().endsWith(".war"))) {
itr.remove(); //itr.remove();
dependency.removeIdentifier(i);
} else if ((i.getValue().startsWith("cpe:/a:jquery:jquery") } else if ((i.getValue().startsWith("cpe:/a:jquery:jquery")
|| i.getValue().startsWith("cpe:/a:prototypejs:prototype") || i.getValue().startsWith("cpe:/a:prototypejs:prototype")
|| i.getValue().startsWith("cpe:/a:yahoo:yui")) || i.getValue().startsWith("cpe:/a:yahoo:yui"))
@@ -332,7 +333,8 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|| dependency.getFileName().toLowerCase().endsWith("pom.xml") || dependency.getFileName().toLowerCase().endsWith("pom.xml")
|| dependency.getFileName().toLowerCase().endsWith(".dll") || dependency.getFileName().toLowerCase().endsWith(".dll")
|| dependency.getFileName().toLowerCase().endsWith(".exe"))) { || dependency.getFileName().toLowerCase().endsWith(".exe"))) {
itr.remove(); //itr.remove();
dependency.removeIdentifier(i);
} else if ((i.getValue().startsWith("cpe:/a:microsoft:excel") } else if ((i.getValue().startsWith("cpe:/a:microsoft:excel")
|| i.getValue().startsWith("cpe:/a:microsoft:word") || i.getValue().startsWith("cpe:/a:microsoft:word")
|| i.getValue().startsWith("cpe:/a:microsoft:visio") || i.getValue().startsWith("cpe:/a:microsoft:visio")
@@ -343,16 +345,36 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|| dependency.getFileName().toLowerCase().endsWith(".ear") || dependency.getFileName().toLowerCase().endsWith(".ear")
|| dependency.getFileName().toLowerCase().endsWith(".war") || dependency.getFileName().toLowerCase().endsWith(".war")
|| dependency.getFileName().toLowerCase().endsWith("pom.xml"))) { || dependency.getFileName().toLowerCase().endsWith("pom.xml"))) {
itr.remove(); //itr.remove();
dependency.removeIdentifier(i);
} else if (i.getValue().startsWith("cpe:/a:apache:maven") } else if (i.getValue().startsWith("cpe:/a:apache:maven")
&& !dependency.getFileName().toLowerCase().matches("maven-core-[\\d\\.]+\\.jar")) { && !dependency.getFileName().toLowerCase().matches("maven-core-[\\d\\.]+\\.jar")) {
itr.remove(); //itr.remove();
} else if (i.getValue().startsWith("cpe:/a:m-core:m-core") dependency.removeIdentifier(i);
&& !dependency.getEvidenceUsed().containsUsedString("m-core")) { } else if (i.getValue().startsWith("cpe:/a:m-core:m-core")) {
itr.remove(); boolean found = false;
for (Evidence e : dependency.getEvidence(EvidenceType.PRODUCT)) {
if ("m-core".equalsIgnoreCase(e.getValue())) {
found = true;
break;
}
}
if (!found) {
for (Evidence e : dependency.getEvidence(EvidenceType.VENDOR)) {
if ("m-core".equalsIgnoreCase(e.getValue())) {
found = true;
break;
}
}
}
if (!found) {
//itr.remove();
dependency.removeIdentifier(i);
}
} else if (i.getValue().startsWith("cpe:/a:jboss:jboss") } else if (i.getValue().startsWith("cpe:/a:jboss:jboss")
&& !dependency.getFileName().toLowerCase().matches("jboss-?[\\d\\.-]+(GA)?\\.jar")) { && !dependency.getFileName().toLowerCase().matches("jboss-?[\\d\\.-]+(GA)?\\.jar")) {
itr.remove(); //itr.remove();
dependency.removeIdentifier(i);
} }
} }
} }
@@ -365,31 +387,30 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
* @param dependency the dependency to analyze * @param dependency the dependency to analyze
*/ */
private void removeWrongVersionMatches(Dependency dependency) { private void removeWrongVersionMatches(Dependency dependency) {
final Set<Identifier> identifiers = dependency.getIdentifiers(); final Set<Identifier> identifiersToRemove = new HashSet<>();
final Iterator<Identifier> itr = identifiers.iterator();
final String fileName = dependency.getFileName(); final String fileName = dependency.getFileName();
if (fileName != null && fileName.contains("axis2")) { if (fileName != null && fileName.contains("axis2")) {
while (itr.hasNext()) { for (Identifier i : dependency.getIdentifiers()) {
final Identifier i = itr.next();
if ("cpe".equals(i.getType())) { if ("cpe".equals(i.getType())) {
final String cpe = i.getValue(); final String cpe = i.getValue();
if (cpe != null && (cpe.startsWith("cpe:/a:apache:axis:") || "cpe:/a:apache:axis".equals(cpe))) { if (cpe != null && (cpe.startsWith("cpe:/a:apache:axis:") || "cpe:/a:apache:axis".equals(cpe))) {
itr.remove(); identifiersToRemove.add(i);
} }
} }
} }
} else if (fileName != null && fileName.contains("axis")) { } else if (fileName != null && fileName.contains("axis")) {
while (itr.hasNext()) { for (Identifier i : dependency.getIdentifiers()) {
final Identifier i = itr.next();
if ("cpe".equals(i.getType())) { if ("cpe".equals(i.getType())) {
final String cpe = i.getValue(); final String cpe = i.getValue();
if (cpe != null && (cpe.startsWith("cpe:/a:apache:axis2:") || "cpe:/a:apache:axis2".equals(cpe))) { if (cpe != null && (cpe.startsWith("cpe:/a:apache:axis2:") || "cpe:/a:apache:axis2".equals(cpe))) {
itr.remove(); identifiersToRemove.add(i);
} }
} }
} }
} }
for (Identifier i : identifiersToRemove) {
dependency.removeIdentifier(i);
}
} }
/** /**
@@ -408,23 +429,38 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
|| identifier.getValue().startsWith("cpe:/a:oracle:opensso_enterprise:") || identifier.getValue().startsWith("cpe:/a:oracle:opensso_enterprise:")
|| identifier.getValue().startsWith("cpe:/a:sun:opensso_enterprise:") || identifier.getValue().startsWith("cpe:/a:sun:opensso_enterprise:")
|| identifier.getValue().startsWith("cpe:/a:sun:opensso:"))) { || identifier.getValue().startsWith("cpe:/a:sun:opensso:"))) {
final String newCpe = String.format("cpe:/a:sun:opensso_enterprise:%s", identifier.getValue().substring(22)); final String[] parts = identifier.getValue().split(":");
final String newCpe2 = String.format("cpe:/a:oracle:opensso_enterprise:%s", identifier.getValue().substring(22)); final int pos = parts[0].length() + parts[1].length() + parts[2].length() + parts[3].length() + 4;
final String newCpe3 = String.format("cpe:/a:sun:opensso:%s", identifier.getValue().substring(22)); final String newCpe = String.format("cpe:/a:sun:opensso_enterprise:%s", identifier.getValue().substring(pos));
final String newCpe4 = String.format("cpe:/a:oracle:opensso:%s", identifier.getValue().substring(22)); final String newCpe2 = String.format("cpe:/a:oracle:opensso_enterprise:%s", identifier.getValue().substring(pos));
final String newCpe3 = String.format("cpe:/a:sun:opensso:%s", identifier.getValue().substring(pos));
final String newCpe4 = String.format("cpe:/a:oracle:opensso:%s", identifier.getValue().substring(pos));
try { try {
dependency.addIdentifier("cpe", dependency.addIdentifier("cpe", newCpe,
newCpe, String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe, "UTF-8")),
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe, "UTF-8"))); identifier.getConfidence());
dependency.addIdentifier("cpe", dependency.addIdentifier("cpe", newCpe2,
newCpe2, String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe2, "UTF-8")),
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe2, "UTF-8"))); identifier.getConfidence());
dependency.addIdentifier("cpe", dependency.addIdentifier("cpe", newCpe3,
newCpe3, String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe3, "UTF-8")),
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe3, "UTF-8"))); identifier.getConfidence());
dependency.addIdentifier("cpe", dependency.addIdentifier("cpe", newCpe4,
newCpe4, String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe4, "UTF-8")),
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe4, "UTF-8"))); identifier.getConfidence());
} catch (UnsupportedEncodingException ex) {
LOGGER.debug("", ex);
}
}
if ("cpe".equals(identifier.getType()) && identifier.getValue() != null
&& identifier.getValue().startsWith("cpe:/a:apache:santuario_xml_security_for_java:")) {
final String[] parts = identifier.getValue().split(":");
final int pos = parts[0].length() + parts[1].length() + parts[2].length() + parts[3].length() + 4;
final String newCpe = String.format("cpe:/a:apache:xml_security_for_java:%s", identifier.getValue().substring(pos));
try {
dependency.addIdentifier("cpe", newCpe,
String.format(CPEAnalyzer.NVD_SEARCH_URL, URLEncoder.encode(newCpe, "UTF-8")),
identifier.getConfidence());
} catch (UnsupportedEncodingException ex) { } catch (UnsupportedEncodingException ex) {
LOGGER.debug("", ex); LOGGER.debug("", ex);
} }
@@ -446,7 +482,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
String parentPath = dependency.getFilePath().toLowerCase(); String parentPath = dependency.getFilePath().toLowerCase();
if (parentPath.contains(".jar")) { if (parentPath.contains(".jar")) {
parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4); parentPath = parentPath.substring(0, parentPath.indexOf(".jar") + 4);
final List<Dependency> dependencies = engine.getDependencies(); final Dependency[] dependencies = engine.getDependencies();
final Dependency parent = findDependency(parentPath, dependencies); final Dependency parent = findDependency(parentPath, dependencies);
if (parent != null) { if (parent != null) {
boolean remove = false; boolean remove = false;
@@ -464,7 +500,7 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
} }
} }
if (remove) { if (remove) {
dependencies.remove(dependency); engine.removeDependency(dependency);
} }
} }
} }
@@ -476,10 +512,10 @@ public class FalsePositiveAnalyzer extends AbstractAnalyzer {
* dependencies. * dependencies.
* *
* @param dependencyPath the path of the dependency to return * @param dependencyPath the path of the dependency to return
* @param dependencies the collection of dependencies to search * @param dependencies the array of dependencies to search
* @return the dependency object for the given path, otherwise null * @return the dependency object for the given path, otherwise null
*/ */
private Dependency findDependency(String dependencyPath, List<Dependency> dependencies) { private Dependency findDependency(String dependencyPath, Dependency[] dependencies) {
for (Dependency d : dependencies) { for (Dependency d : dependencies) {
if (d.getFilePath().equalsIgnoreCase(dependencyPath)) { if (d.getFilePath().equalsIgnoreCase(dependencyPath)) {
return d; return d;

View File

@@ -18,25 +18,46 @@
package org.owasp.dependencycheck.analyzer; package org.owasp.dependencycheck.analyzer;
import java.io.File; import java.io.File;
import javax.annotation.concurrent.ThreadSafe;
import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOCase;
import org.apache.commons.io.filefilter.NameFileFilter; import org.apache.commons.io.filefilter.NameFileFilter;
import org.owasp.dependencycheck.Engine; import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException; import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.utils.DependencyVersion; import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil; import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.Settings; import org.owasp.dependencycheck.utils.Settings;
/** /**
*
* Takes a dependency and analyzes the filename and determines the hashes. * Takes a dependency and analyzes the filename and determines the hashes.
* *
* @author Jeremy Long * @author Jeremy Long
*/ */
@ThreadSafe
public class FileNameAnalyzer extends AbstractAnalyzer { public class FileNameAnalyzer extends AbstractAnalyzer {
/**
* Python init files
*/
//CSOFF: WhitespaceAfter
private static final NameFileFilter IGNORED_FILES = new NameFileFilter(new String[]{
"__init__.py",
"__init__.pyc",
"__init__.pyo",
"composer.lock",
"configure.in",
"configure.ac",
"Gemfile.lock",
"METADATA",
"PKG-INFO",
"package.json",
"Package.swift",}, IOCase.INSENSITIVE);
//CSON: WhitespaceAfter
//<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer"> //<editor-fold defaultstate="collapsed" desc="All standard implementation details of Analyzer">
/** /**
* The name of the analyzer. * The name of the analyzer.
@@ -66,6 +87,7 @@ public class FileNameAnalyzer extends AbstractAnalyzer {
public AnalysisPhase getAnalysisPhase() { public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE; return ANALYSIS_PHASE;
} }
/** /**
* <p> * <p>
* Returns the setting key to determine if the analyzer is enabled.</p> * Returns the setting key to determine if the analyzer is enabled.</p>
@@ -78,16 +100,6 @@ public class FileNameAnalyzer extends AbstractAnalyzer {
} }
//</editor-fold> //</editor-fold>
/**
* Python init files
*/
//CSOFF: WhitespaceAfter
private static final NameFileFilter IGNORED_FILES = new NameFileFilter(new String[]{
"__init__.py",
"__init__.pyc",
"__init__.pyo",});
//CSON: WhitespaceAfter
/** /**
* Collects information about the file name. * Collects information about the file name.
* *
@@ -111,21 +123,16 @@ public class FileNameAnalyzer extends AbstractAnalyzer {
// a shade. This should hopefully correct for cases like log4j.jar or // a shade. This should hopefully correct for cases like log4j.jar or
// struts2-core.jar // struts2-core.jar
if (version.getVersionParts() == null || version.getVersionParts().size() < 2) { if (version.getVersionParts() == null || version.getVersionParts().size() < 2) {
dependency.getVersionEvidence().addEvidence("file", "version", dependency.addEvidence(EvidenceType.VERSION, "file", "version", version.toString(), Confidence.MEDIUM);
version.toString(), Confidence.MEDIUM);
} else { } else {
dependency.getVersionEvidence().addEvidence("file", "version", dependency.addEvidence(EvidenceType.VERSION, "file", "version", version.toString(), Confidence.HIGHEST);
version.toString(), Confidence.HIGHEST);
} }
dependency.getVersionEvidence().addEvidence("file", "name", dependency.addEvidence(EvidenceType.VERSION, "file", "name", packageName, Confidence.MEDIUM);
packageName, Confidence.MEDIUM);
} }
if (!IGNORED_FILES.accept(f)) { if (!IGNORED_FILES.accept(f)) {
dependency.getProductEvidence().addEvidence("file", "name", dependency.addEvidence(EvidenceType.PRODUCT, "file", "name", packageName, Confidence.HIGH);
packageName, Confidence.HIGH); dependency.addEvidence(EvidenceType.VENDOR, "file", "name", packageName, Confidence.HIGH);
dependency.getVendorEvidence().addEvidence("file", "name",
packageName, Confidence.HIGH);
} }
} }
} }

Some files were not shown because too many files have changed in this diff Show More